PyFoxFile 0.26.0__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyfoxfile-0.26.0.data → pyfoxfile-0.27.0.data}/scripts/foxfile.py +1 -1
- {pyfoxfile-0.26.0.dist-info → pyfoxfile-0.27.0.dist-info}/METADATA +1 -1
- pyfoxfile-0.27.0.dist-info/RECORD +10 -0
- pyfoxfile.py +755 -194
- pyfoxfile-0.26.0.dist-info/RECORD +0 -10
- {pyfoxfile-0.26.0.data → pyfoxfile-0.27.0.data}/scripts/foxneofile.py +0 -0
- {pyfoxfile-0.26.0.data → pyfoxfile-0.27.0.data}/scripts/neofoxfile.py +0 -0
- {pyfoxfile-0.26.0.dist-info → pyfoxfile-0.27.0.dist-info}/WHEEL +0 -0
- {pyfoxfile-0.26.0.dist-info → pyfoxfile-0.27.0.dist-info}/licenses/LICENSE +0 -0
- {pyfoxfile-0.26.0.dist-info → pyfoxfile-0.27.0.dist-info}/top_level.txt +0 -0
- {pyfoxfile-0.26.0.dist-info → pyfoxfile-0.27.0.dist-info}/zip-safe +0 -0
pyfoxfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyfoxfile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyfoxfile.py - Last Update: 11/14/2025 Ver. 0.27.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -24,7 +24,6 @@ import re
|
|
|
24
24
|
import sys
|
|
25
25
|
import time
|
|
26
26
|
import stat
|
|
27
|
-
import zlib
|
|
28
27
|
import mmap
|
|
29
28
|
import hmac
|
|
30
29
|
import base64
|
|
@@ -38,8 +37,8 @@ import zipfile
|
|
|
38
37
|
import binascii
|
|
39
38
|
import datetime
|
|
40
39
|
import platform
|
|
40
|
+
import collections
|
|
41
41
|
from io import StringIO, BytesIO
|
|
42
|
-
from collections import namedtuple
|
|
43
42
|
import posixpath # POSIX-safe joins/normpaths
|
|
44
43
|
try:
|
|
45
44
|
from backports import tempfile
|
|
@@ -50,12 +49,16 @@ try:
|
|
|
50
49
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
51
50
|
from socketserver import TCPServer
|
|
52
51
|
from urllib.parse import urlparse, parse_qs
|
|
53
|
-
import base64
|
|
54
52
|
except ImportError:
|
|
55
53
|
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
|
56
54
|
from SocketServer import TCPServer
|
|
57
55
|
from urlparse import urlparse, parse_qs
|
|
58
|
-
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Python 3.8+ only
|
|
59
|
+
from multiprocessing import shared_memory
|
|
60
|
+
except ImportError:
|
|
61
|
+
shared_memory = None
|
|
59
62
|
|
|
60
63
|
# FTP Support
|
|
61
64
|
ftpssl = True
|
|
@@ -146,6 +149,15 @@ try:
|
|
|
146
149
|
except Exception:
|
|
147
150
|
PATH_TYPES = (basestring,)
|
|
148
151
|
|
|
152
|
+
def running_interactively():
|
|
153
|
+
main = sys.modules.get("__main__")
|
|
154
|
+
no_main_file = not hasattr(main, "__file__")
|
|
155
|
+
interactive_flag = bool(getattr(sys.flags, "interactive", 0))
|
|
156
|
+
return no_main_file or interactive_flag
|
|
157
|
+
|
|
158
|
+
if running_interactively():
|
|
159
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
160
|
+
|
|
149
161
|
def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
150
162
|
"""
|
|
151
163
|
Normalize any input to text_type (unicode on Py2, str on Py3).
|
|
@@ -166,7 +178,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
|
166
178
|
|
|
167
179
|
# Handle pathlib.Path & other path-like objects
|
|
168
180
|
try:
|
|
169
|
-
import os
|
|
170
181
|
if hasattr(os, "fspath"):
|
|
171
182
|
fs = os.fspath(s)
|
|
172
183
|
if isinstance(fs, text_type):
|
|
@@ -207,7 +218,6 @@ except ImportError:
|
|
|
207
218
|
|
|
208
219
|
# Windows-specific setup
|
|
209
220
|
if os.name == "nt":
|
|
210
|
-
import io
|
|
211
221
|
def _wrap(stream):
|
|
212
222
|
buf = getattr(stream, "buffer", None)
|
|
213
223
|
is_tty = getattr(stream, "isatty", lambda: False)()
|
|
@@ -444,7 +454,13 @@ if('PYFOXFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYFOXFIL
|
|
|
444
454
|
else:
|
|
445
455
|
prescriptpath = get_importing_script_path()
|
|
446
456
|
if(prescriptpath is not None):
|
|
447
|
-
|
|
457
|
+
if(__use_ini_file__ and not __use_json_file__):
|
|
458
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
459
|
+
elif(__use_json_file__ and not __use_ini_file__):
|
|
460
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
|
|
461
|
+
else:
|
|
462
|
+
scriptconf = ""
|
|
463
|
+
prescriptpath = None
|
|
448
464
|
else:
|
|
449
465
|
scriptconf = ""
|
|
450
466
|
if os.path.exists(scriptconf):
|
|
@@ -648,12 +664,12 @@ __project__ = __program_name__
|
|
|
648
664
|
__program_alt_name__ = __program_name__
|
|
649
665
|
__project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
|
|
650
666
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
651
|
-
__version_info__ = (0,
|
|
652
|
-
__version_date_info__ = (2025, 11,
|
|
667
|
+
__version_info__ = (0, 27, 0, "RC 1", 1)
|
|
668
|
+
__version_date_info__ = (2025, 11, 14, "RC 1", 1)
|
|
653
669
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
654
670
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
655
671
|
__revision__ = __version_info__[3]
|
|
656
|
-
__revision_id__ = "$Id:
|
|
672
|
+
__revision_id__ = "$Id: 922ba385fbf9784e38aef32e660e695e81aee4bf $"
|
|
657
673
|
if(__version_info__[4] is not None):
|
|
658
674
|
__version_date_plusrc__ = __version_date__ + \
|
|
659
675
|
"-" + str(__version_date_info__[4])
|
|
@@ -805,9 +821,9 @@ except Exception:
|
|
|
805
821
|
geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
|
|
806
822
|
proname=__project__, prover=__version__, prourl=__project_url__)
|
|
807
823
|
if(platform.python_implementation() != ""):
|
|
808
|
-
py_implementation = platform.python_implementation()
|
|
824
|
+
py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
|
|
809
825
|
if(platform.python_implementation() == ""):
|
|
810
|
-
py_implementation = "CPython"
|
|
826
|
+
py_implementation = "CPython"+str(platform.python_version_tuple()[0])
|
|
811
827
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
812
828
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
813
829
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -823,13 +839,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
|
|
|
823
839
|
|
|
824
840
|
compressionsupport = []
|
|
825
841
|
try:
|
|
826
|
-
|
|
842
|
+
try:
|
|
843
|
+
import compression.gzip as gzip
|
|
844
|
+
except ImportError:
|
|
845
|
+
import gzip
|
|
827
846
|
compressionsupport.append("gz")
|
|
828
847
|
compressionsupport.append("gzip")
|
|
829
848
|
except ImportError:
|
|
830
849
|
pass
|
|
831
850
|
try:
|
|
832
|
-
|
|
851
|
+
try:
|
|
852
|
+
import compression.bz2 as bz2
|
|
853
|
+
except ImportError:
|
|
854
|
+
import bz2
|
|
833
855
|
compressionsupport.append("bz2")
|
|
834
856
|
compressionsupport.append("bzip2")
|
|
835
857
|
except ImportError:
|
|
@@ -850,20 +872,20 @@ except ImportError:
|
|
|
850
872
|
pass
|
|
851
873
|
'''
|
|
852
874
|
try:
|
|
853
|
-
|
|
875
|
+
try:
|
|
876
|
+
import compression.zstd as zstd
|
|
877
|
+
except ImportError:
|
|
878
|
+
import pyzstd.zstdfile as zstd
|
|
854
879
|
compressionsupport.append("zst")
|
|
855
880
|
compressionsupport.append("zstd")
|
|
856
881
|
compressionsupport.append("zstandard")
|
|
857
882
|
except ImportError:
|
|
883
|
+
pass
|
|
884
|
+
try:
|
|
858
885
|
try:
|
|
859
|
-
import
|
|
860
|
-
compressionsupport.append("zst")
|
|
861
|
-
compressionsupport.append("zstd")
|
|
862
|
-
compressionsupport.append("zstandard")
|
|
886
|
+
import compression.lzma as lzma
|
|
863
887
|
except ImportError:
|
|
864
|
-
|
|
865
|
-
try:
|
|
866
|
-
import lzma
|
|
888
|
+
import lzma
|
|
867
889
|
compressionsupport.append("lzma")
|
|
868
890
|
compressionsupport.append("xz")
|
|
869
891
|
except ImportError:
|
|
@@ -873,12 +895,18 @@ except ImportError:
|
|
|
873
895
|
compressionsupport.append("xz")
|
|
874
896
|
except ImportError:
|
|
875
897
|
pass
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
898
|
+
try:
|
|
899
|
+
try:
|
|
900
|
+
import compression.zlib as zlib
|
|
901
|
+
except ImportError:
|
|
902
|
+
import zlib
|
|
903
|
+
compressionsupport.append("zlib")
|
|
904
|
+
compressionsupport.append("zl")
|
|
905
|
+
compressionsupport.append("zz")
|
|
906
|
+
compressionsupport.append("Z")
|
|
907
|
+
compressionsupport.append("z")
|
|
908
|
+
except ImportError:
|
|
909
|
+
pass
|
|
882
910
|
compressionlist = ['auto']
|
|
883
911
|
compressionlistalt = []
|
|
884
912
|
outextlist = []
|
|
@@ -1052,6 +1080,14 @@ def to_ns(timestamp):
|
|
|
1052
1080
|
# Multiply by 1e9 to get nanoseconds, then cast to int
|
|
1053
1081
|
return int(seconds * 1000000000)
|
|
1054
1082
|
|
|
1083
|
+
def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
|
|
1084
|
+
ts_ns = int(ts_ns)
|
|
1085
|
+
sec, ns = divmod(ts_ns, 10**9)
|
|
1086
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
1087
|
+
base = dt.strftime(fmt)
|
|
1088
|
+
ns_str = "%09d" % ns
|
|
1089
|
+
return base + "." + ns_str
|
|
1090
|
+
|
|
1055
1091
|
def _split_posix(name):
|
|
1056
1092
|
"""
|
|
1057
1093
|
Return a list of path parts without collapsing '..'.
|
|
@@ -2088,7 +2124,7 @@ def MkTempFile(data=None,
|
|
|
2088
2124
|
spool_max=__spoolfile_size__,
|
|
2089
2125
|
spool_dir=__use_spooldir__,
|
|
2090
2126
|
reset_to_start=True,
|
|
2091
|
-
memfd_name=
|
|
2127
|
+
memfd_name=__program_name__,
|
|
2092
2128
|
memfd_allow_sealing=False,
|
|
2093
2129
|
memfd_flags_extra=0,
|
|
2094
2130
|
on_create=None):
|
|
@@ -2580,6 +2616,384 @@ def _is_valid_zlib_header(cmf, flg):
|
|
|
2580
2616
|
return False
|
|
2581
2617
|
return True
|
|
2582
2618
|
|
|
2619
|
+
class SharedMemoryFile(object):
|
|
2620
|
+
"""
|
|
2621
|
+
File-like wrapper around multiprocessing.shared_memory.SharedMemory.
|
|
2622
|
+
|
|
2623
|
+
Binary-only API, intended to behave similarly to a regular file opened in
|
|
2624
|
+
'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
|
|
2625
|
+
|
|
2626
|
+
Notes:
|
|
2627
|
+
- Requires Python 3.8+ at runtime to actually use SharedMemory.
|
|
2628
|
+
- On Python 2, importing is fine but constructing will raise RuntimeError.
|
|
2629
|
+
- There is no automatic resizing; buffer size is fixed by SharedMemory.
|
|
2630
|
+
- No real fileno(); this does not represent an OS-level file descriptor.
|
|
2631
|
+
- For text mode, wrap this with io.TextIOWrapper on Python 3:
|
|
2632
|
+
f = SharedMemoryFile(...)
|
|
2633
|
+
tf = io.TextIOWrapper(f, encoding="utf-8")
|
|
2634
|
+
"""
|
|
2635
|
+
|
|
2636
|
+
def __init__(self, shm=None, name=None, create=False, size=0,
|
|
2637
|
+
mode='r+b', offset=0, unlink_on_close=False):
|
|
2638
|
+
"""
|
|
2639
|
+
Parameters:
|
|
2640
|
+
shm : existing SharedMemory object (preferred).
|
|
2641
|
+
name : name of shared memory block (for attach or create).
|
|
2642
|
+
create: if True, create new SharedMemory; else attach existing.
|
|
2643
|
+
size : size in bytes (required when create=True).
|
|
2644
|
+
mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
|
|
2645
|
+
offset: starting offset within the shared memory buffer.
|
|
2646
|
+
unlink_on_close: if True, call shm.unlink() when close() is called.
|
|
2647
|
+
|
|
2648
|
+
Usage examples:
|
|
2649
|
+
|
|
2650
|
+
# Create new block and file-like wrapper
|
|
2651
|
+
f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
|
|
2652
|
+
|
|
2653
|
+
# Attach to existing shared memory by name
|
|
2654
|
+
f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
|
|
2655
|
+
|
|
2656
|
+
# Wrap an existing SharedMemory object
|
|
2657
|
+
shm = shared_memory.SharedMemory(create=True, size=1024)
|
|
2658
|
+
f = SharedMemoryFile(shm=shm, mode='r+b')
|
|
2659
|
+
"""
|
|
2660
|
+
if shared_memory is None:
|
|
2661
|
+
# No SharedMemory available on this interpreter
|
|
2662
|
+
raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
|
|
2663
|
+
"is not available on this Python version")
|
|
2664
|
+
|
|
2665
|
+
if 't' in mode:
|
|
2666
|
+
raise ValueError("SharedMemoryFile is binary-only; "
|
|
2667
|
+
"wrap it with io.TextIOWrapper for text")
|
|
2668
|
+
|
|
2669
|
+
self.mode = mode
|
|
2670
|
+
self._closed = False
|
|
2671
|
+
self._unlinked = False
|
|
2672
|
+
self._unlink_on_close = bool(unlink_on_close)
|
|
2673
|
+
|
|
2674
|
+
if shm is not None:
|
|
2675
|
+
self._shm = shm
|
|
2676
|
+
else:
|
|
2677
|
+
# name may be None when create=True
|
|
2678
|
+
self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
|
|
2679
|
+
|
|
2680
|
+
self._buf = self._shm.buf
|
|
2681
|
+
self._base_offset = int(offset)
|
|
2682
|
+
if self._base_offset < 0 or self._base_offset > len(self._buf):
|
|
2683
|
+
raise ValueError("offset out of range")
|
|
2684
|
+
|
|
2685
|
+
# We treat the accessible region as [base_offset, len(buf))
|
|
2686
|
+
self._size = len(self._buf) - self._base_offset
|
|
2687
|
+
self._pos = 0 # logical file position within that region
|
|
2688
|
+
|
|
2689
|
+
# ---------- basic properties ----------
|
|
2690
|
+
|
|
2691
|
+
@property
|
|
2692
|
+
def name(self):
|
|
2693
|
+
# SharedMemory name (may be None for anonymous)
|
|
2694
|
+
return getattr(self._shm, "name", None)
|
|
2695
|
+
|
|
2696
|
+
@property
|
|
2697
|
+
def closed(self):
|
|
2698
|
+
return self._closed
|
|
2699
|
+
|
|
2700
|
+
def readable(self):
|
|
2701
|
+
return ('r' in self.mode) or ('+' in self.mode)
|
|
2702
|
+
|
|
2703
|
+
def writable(self):
|
|
2704
|
+
return any(ch in self.mode for ch in ('w', 'a', '+'))
|
|
2705
|
+
|
|
2706
|
+
def seekable(self):
|
|
2707
|
+
return True
|
|
2708
|
+
|
|
2709
|
+
# ---------- core helpers ----------
|
|
2710
|
+
|
|
2711
|
+
def _check_closed(self):
|
|
2712
|
+
if self._closed:
|
|
2713
|
+
raise ValueError("I/O operation on closed SharedMemoryFile")
|
|
2714
|
+
|
|
2715
|
+
def _clamp_pos(self, pos):
|
|
2716
|
+
if pos < 0:
|
|
2717
|
+
return 0
|
|
2718
|
+
if pos > self._size:
|
|
2719
|
+
return self._size
|
|
2720
|
+
return pos
|
|
2721
|
+
|
|
2722
|
+
def _region_bounds(self):
|
|
2723
|
+
"""Return (start, end) absolute indices into the SharedMemory buffer."""
|
|
2724
|
+
start = self._base_offset + self._pos
|
|
2725
|
+
end = self._base_offset + self._size
|
|
2726
|
+
return start, end
|
|
2727
|
+
|
|
2728
|
+
# ---------- positioning ----------
|
|
2729
|
+
|
|
2730
|
+
def seek(self, offset, whence=0):
|
|
2731
|
+
"""
|
|
2732
|
+
Seek to a new file position.
|
|
2733
|
+
|
|
2734
|
+
whence: 0 = from start, 1 = from current, 2 = from end.
|
|
2735
|
+
"""
|
|
2736
|
+
self._check_closed()
|
|
2737
|
+
offset = int(offset)
|
|
2738
|
+
whence = int(whence)
|
|
2739
|
+
|
|
2740
|
+
if whence == 0: # from start
|
|
2741
|
+
new_pos = offset
|
|
2742
|
+
elif whence == 1: # from current
|
|
2743
|
+
new_pos = self._pos + offset
|
|
2744
|
+
elif whence == 2: # from end
|
|
2745
|
+
new_pos = self._size + offset
|
|
2746
|
+
else:
|
|
2747
|
+
raise ValueError("invalid whence (expected 0, 1, or 2)")
|
|
2748
|
+
|
|
2749
|
+
self._pos = self._clamp_pos(new_pos)
|
|
2750
|
+
return self._pos
|
|
2751
|
+
|
|
2752
|
+
def tell(self):
|
|
2753
|
+
return self._pos
|
|
2754
|
+
|
|
2755
|
+
# ---------- reading ----------
|
|
2756
|
+
|
|
2757
|
+
def read(self, size=-1):
|
|
2758
|
+
"""
|
|
2759
|
+
Read up to 'size' bytes (or to EOF if size<0 or None).
|
|
2760
|
+
Returns bytes (py3) or str (py2).
|
|
2761
|
+
"""
|
|
2762
|
+
self._check_closed()
|
|
2763
|
+
if not self.readable():
|
|
2764
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2765
|
+
|
|
2766
|
+
if size is None or size < 0:
|
|
2767
|
+
size = self._size - self._pos
|
|
2768
|
+
else:
|
|
2769
|
+
size = int(size)
|
|
2770
|
+
if size < 0:
|
|
2771
|
+
size = 0
|
|
2772
|
+
|
|
2773
|
+
if size == 0:
|
|
2774
|
+
return b'' if not PY2 else ''
|
|
2775
|
+
|
|
2776
|
+
start, end_abs = self._region_bounds()
|
|
2777
|
+
available = end_abs - (self._base_offset + self._pos)
|
|
2778
|
+
if available <= 0:
|
|
2779
|
+
return b'' if not PY2 else ''
|
|
2780
|
+
|
|
2781
|
+
size = min(size, available)
|
|
2782
|
+
|
|
2783
|
+
abs_start = self._base_offset + self._pos
|
|
2784
|
+
abs_end = abs_start + size
|
|
2785
|
+
|
|
2786
|
+
chunk = self._buf[abs_start:abs_end]
|
|
2787
|
+
if PY2:
|
|
2788
|
+
data = bytes(chunk) # bytes() -> str in py2
|
|
2789
|
+
else:
|
|
2790
|
+
data = bytes(chunk)
|
|
2791
|
+
|
|
2792
|
+
self._pos += len(data)
|
|
2793
|
+
return data
|
|
2794
|
+
|
|
2795
|
+
def readline(self, size=-1):
|
|
2796
|
+
"""
|
|
2797
|
+
Read a single line (ending with '\\n' or EOF).
|
|
2798
|
+
If size >= 0, at most that many bytes are returned.
|
|
2799
|
+
"""
|
|
2800
|
+
self._check_closed()
|
|
2801
|
+
if not self.readable():
|
|
2802
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2803
|
+
|
|
2804
|
+
# Determine maximum bytes we can scan
|
|
2805
|
+
start, end_abs = self._region_bounds()
|
|
2806
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2807
|
+
if remaining <= 0:
|
|
2808
|
+
return b'' if not PY2 else ''
|
|
2809
|
+
|
|
2810
|
+
if size is not None and size >= 0:
|
|
2811
|
+
size = int(size)
|
|
2812
|
+
max_len = min(size, remaining)
|
|
2813
|
+
else:
|
|
2814
|
+
max_len = remaining
|
|
2815
|
+
|
|
2816
|
+
abs_start = self._base_offset + self._pos
|
|
2817
|
+
abs_max = abs_start + max_len
|
|
2818
|
+
|
|
2819
|
+
# Work on a local bytes slice for easy .find()
|
|
2820
|
+
if PY2:
|
|
2821
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2822
|
+
else:
|
|
2823
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2824
|
+
|
|
2825
|
+
idx = buf_bytes.find(b'\n')
|
|
2826
|
+
if idx == -1:
|
|
2827
|
+
# No newline; read entire chunk
|
|
2828
|
+
line_bytes = buf_bytes
|
|
2829
|
+
else:
|
|
2830
|
+
line_bytes = buf_bytes[:idx + 1]
|
|
2831
|
+
|
|
2832
|
+
self._pos += len(line_bytes)
|
|
2833
|
+
|
|
2834
|
+
if PY2:
|
|
2835
|
+
return line_bytes # already str
|
|
2836
|
+
return line_bytes
|
|
2837
|
+
|
|
2838
|
+
def readinto(self, b):
|
|
2839
|
+
"""
|
|
2840
|
+
Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
|
|
2841
|
+
Returns number of bytes read.
|
|
2842
|
+
"""
|
|
2843
|
+
self._check_closed()
|
|
2844
|
+
if not self.readable():
|
|
2845
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2846
|
+
|
|
2847
|
+
# Normalize target buffer
|
|
2848
|
+
if isinstance(b, memoryview):
|
|
2849
|
+
mv = b
|
|
2850
|
+
else:
|
|
2851
|
+
mv = memoryview(b)
|
|
2852
|
+
|
|
2853
|
+
size = len(mv)
|
|
2854
|
+
if size <= 0:
|
|
2855
|
+
return 0
|
|
2856
|
+
|
|
2857
|
+
start, end_abs = self._region_bounds()
|
|
2858
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2859
|
+
if remaining <= 0:
|
|
2860
|
+
return 0
|
|
2861
|
+
|
|
2862
|
+
size = min(size, remaining)
|
|
2863
|
+
|
|
2864
|
+
abs_start = self._base_offset + self._pos
|
|
2865
|
+
abs_end = abs_start + size
|
|
2866
|
+
|
|
2867
|
+
mv[:size] = self._buf[abs_start:abs_end]
|
|
2868
|
+
self._pos += size
|
|
2869
|
+
return size
|
|
2870
|
+
|
|
2871
|
+
# ---------- writing ----------
|
|
2872
|
+
|
|
2873
|
+
def write(self, data):
|
|
2874
|
+
"""
|
|
2875
|
+
Write bytes-like object to the shared memory region.
|
|
2876
|
+
|
|
2877
|
+
Returns number of bytes written. Will raise if not opened writable
|
|
2878
|
+
or if writing would overflow the fixed-size region.
|
|
2879
|
+
"""
|
|
2880
|
+
self._check_closed()
|
|
2881
|
+
if not self.writable():
|
|
2882
|
+
raise IOError("SharedMemoryFile not opened for writing")
|
|
2883
|
+
|
|
2884
|
+
if isinstance(data, memoryview):
|
|
2885
|
+
data = bytes(data)
|
|
2886
|
+
elif isinstance(data, bytearray):
|
|
2887
|
+
data = bytes(data)
|
|
2888
|
+
|
|
2889
|
+
if not isinstance(data, binary_types):
|
|
2890
|
+
raise TypeError("write() expects a bytes-like object")
|
|
2891
|
+
|
|
2892
|
+
data_len = len(data)
|
|
2893
|
+
if data_len == 0:
|
|
2894
|
+
return 0
|
|
2895
|
+
|
|
2896
|
+
# Handle "append" semantics roughly: start from end on first write
|
|
2897
|
+
if 'a' in self.mode and self._pos == 0:
|
|
2898
|
+
# Move to logical end of region
|
|
2899
|
+
self._pos = self._size
|
|
2900
|
+
|
|
2901
|
+
start, end_abs = self._region_bounds()
|
|
2902
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2903
|
+
if data_len > remaining:
|
|
2904
|
+
raise IOError("write would overflow SharedMemory region (need %d, have %d)"
|
|
2905
|
+
% (data_len, remaining))
|
|
2906
|
+
|
|
2907
|
+
abs_start = self._base_offset + self._pos
|
|
2908
|
+
abs_end = abs_start + data_len
|
|
2909
|
+
|
|
2910
|
+
self._buf[abs_start:abs_end] = data
|
|
2911
|
+
self._pos += data_len
|
|
2912
|
+
return data_len
|
|
2913
|
+
|
|
2914
|
+
def flush(self):
|
|
2915
|
+
"""
|
|
2916
|
+
No-op for shared memory; provided for file-like compatibility.
|
|
2917
|
+
"""
|
|
2918
|
+
self._check_closed()
|
|
2919
|
+
# nothing to flush
|
|
2920
|
+
|
|
2921
|
+
# ---------- unlink / close / context manager ----------
|
|
2922
|
+
|
|
2923
|
+
def unlink(self):
|
|
2924
|
+
"""
|
|
2925
|
+
Unlink (destroy) the underlying shared memory block.
|
|
2926
|
+
|
|
2927
|
+
After unlink(), new processes cannot attach via name.
|
|
2928
|
+
Existing attachments (including this one) can continue to use
|
|
2929
|
+
the memory until they close() it.
|
|
2930
|
+
|
|
2931
|
+
This is idempotent: calling it more than once is safe.
|
|
2932
|
+
"""
|
|
2933
|
+
if self._unlinked:
|
|
2934
|
+
return
|
|
2935
|
+
|
|
2936
|
+
try:
|
|
2937
|
+
self._shm.unlink()
|
|
2938
|
+
except AttributeError:
|
|
2939
|
+
# Should not happen on normal Python 3.8+,
|
|
2940
|
+
# but keep a clear error if it does.
|
|
2941
|
+
raise RuntimeError("Underlying SharedMemory object "
|
|
2942
|
+
"does not support unlink()")
|
|
2943
|
+
|
|
2944
|
+
self._unlinked = True
|
|
2945
|
+
|
|
2946
|
+
def close(self):
|
|
2947
|
+
if self._closed:
|
|
2948
|
+
return
|
|
2949
|
+
self._closed = True
|
|
2950
|
+
|
|
2951
|
+
# Optionally unlink on close if requested
|
|
2952
|
+
if self._unlink_on_close and not self._unlinked:
|
|
2953
|
+
try:
|
|
2954
|
+
self.unlink()
|
|
2955
|
+
except Exception:
|
|
2956
|
+
# best-effort; close anyway
|
|
2957
|
+
pass
|
|
2958
|
+
|
|
2959
|
+
try:
|
|
2960
|
+
self._shm.close()
|
|
2961
|
+
except Exception:
|
|
2962
|
+
pass
|
|
2963
|
+
|
|
2964
|
+
def __enter__(self):
|
|
2965
|
+
self._check_closed()
|
|
2966
|
+
return self
|
|
2967
|
+
|
|
2968
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
2969
|
+
self.close()
|
|
2970
|
+
|
|
2971
|
+
# ---------- iteration ----------
|
|
2972
|
+
|
|
2973
|
+
def __iter__(self):
|
|
2974
|
+
return self
|
|
2975
|
+
|
|
2976
|
+
def __next__(self):
|
|
2977
|
+
line = self.readline()
|
|
2978
|
+
if (not line) or len(line) == 0:
|
|
2979
|
+
raise StopIteration
|
|
2980
|
+
return line
|
|
2981
|
+
|
|
2982
|
+
if PY2:
|
|
2983
|
+
next = __next__
|
|
2984
|
+
|
|
2985
|
+
# ---------- misc helpers ----------
|
|
2986
|
+
|
|
2987
|
+
def fileno(self):
|
|
2988
|
+
"""
|
|
2989
|
+
There is no real OS-level file descriptor; raise OSError for APIs
|
|
2990
|
+
that require a fileno().
|
|
2991
|
+
"""
|
|
2992
|
+
raise OSError("SharedMemoryFile does not have a real fileno()")
|
|
2993
|
+
|
|
2994
|
+
def isatty(self):
|
|
2995
|
+
return False
|
|
2996
|
+
|
|
2583
2997
|
# ---------- Main class ----------
|
|
2584
2998
|
class ZlibFile(object):
|
|
2585
2999
|
"""
|
|
@@ -4471,7 +4885,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4471
4885
|
extrastart = extrastart + 1
|
|
4472
4886
|
fvendorfieldslist = []
|
|
4473
4887
|
fvendorfields = 0;
|
|
4474
|
-
if(len(HeaderOut)>extraend):
|
|
4888
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
4475
4889
|
extrastart = extraend
|
|
4476
4890
|
extraend = len(HeaderOut) - 4
|
|
4477
4891
|
while(extrastart < extraend):
|
|
@@ -4691,6 +5105,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4691
5105
|
while(extrastart < extraend):
|
|
4692
5106
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4693
5107
|
extrastart = extrastart + 1
|
|
5108
|
+
fvendorfieldslist = []
|
|
5109
|
+
fvendorfields = 0;
|
|
5110
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
5111
|
+
extrastart = extraend
|
|
5112
|
+
extraend = len(HeaderOut) - 4
|
|
5113
|
+
while(extrastart < extraend):
|
|
5114
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5115
|
+
extrastart = extrastart + 1
|
|
5116
|
+
fvendorfields = fvendorfields + 1
|
|
4694
5117
|
if(fextrafields==1):
|
|
4695
5118
|
try:
|
|
4696
5119
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4700,6 +5123,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4700
5123
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4701
5124
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4702
5125
|
pass
|
|
5126
|
+
fjstart = fp.tell()
|
|
4703
5127
|
if(fjsontype=="json"):
|
|
4704
5128
|
fjsoncontent = {}
|
|
4705
5129
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
@@ -4766,6 +5190,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4766
5190
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4767
5191
|
pass
|
|
4768
5192
|
fp.seek(len(delimiter), 1)
|
|
5193
|
+
fjend = fp.tell() - 1
|
|
4769
5194
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4770
5195
|
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4771
5196
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
@@ -4798,6 +5223,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4798
5223
|
pyhascontents = False
|
|
4799
5224
|
fcontents.seek(0, 0)
|
|
4800
5225
|
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
5226
|
+
fcontents.seek(0, 0)
|
|
4801
5227
|
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4802
5228
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4803
5229
|
fname + " at offset " + str(fcontentstart))
|
|
@@ -4837,8 +5263,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4837
5263
|
fcontents.seek(0, 0)
|
|
4838
5264
|
if(not contentasfile):
|
|
4839
5265
|
fcontents = fcontents.read()
|
|
4840
|
-
outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
4841
|
-
finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent,
|
|
5266
|
+
outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
5267
|
+
fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
|
|
4842
5268
|
return outlist
|
|
4843
5269
|
|
|
4844
5270
|
|
|
@@ -4855,6 +5281,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4855
5281
|
CatSizeEnd = CatSize
|
|
4856
5282
|
fp.seek(curloc, 0)
|
|
4857
5283
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5284
|
+
headeroffset = fp.tell()
|
|
4858
5285
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4859
5286
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4860
5287
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4874,7 +5301,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4874
5301
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4875
5302
|
if(not headercheck and not skipchecksum):
|
|
4876
5303
|
VerbosePrintOut(
|
|
4877
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5304
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
4878
5305
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4879
5306
|
"'" + newfcs + "'")
|
|
4880
5307
|
return False
|
|
@@ -4926,6 +5353,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4926
5353
|
CatSizeEnd = CatSize
|
|
4927
5354
|
fp.seek(curloc, 0)
|
|
4928
5355
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5356
|
+
headeroffset = fp.tell()
|
|
4929
5357
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4930
5358
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4931
5359
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4958,7 +5386,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4958
5386
|
pass
|
|
4959
5387
|
fvendorfieldslist = []
|
|
4960
5388
|
fvendorfields = 0;
|
|
4961
|
-
if(len(inheader)>extraend):
|
|
5389
|
+
if((len(inheader) - 2)>extraend):
|
|
4962
5390
|
extrastart = extraend
|
|
4963
5391
|
extraend = len(inheader) - 2
|
|
4964
5392
|
while(extrastart < extraend):
|
|
@@ -4968,8 +5396,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4968
5396
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4969
5397
|
fheadsize = int(inheader[0], 16)
|
|
4970
5398
|
fnumfields = int(inheader[1], 16)
|
|
4971
|
-
fheadctime = int(inheader[
|
|
4972
|
-
fheadmtime = int(inheader[
|
|
5399
|
+
fheadctime = int(inheader[2], 16)
|
|
5400
|
+
fheadmtime = int(inheader[3], 16)
|
|
4973
5401
|
fhencoding = inheader[4]
|
|
4974
5402
|
fostype = inheader[5]
|
|
4975
5403
|
fpythontype = inheader[6]
|
|
@@ -5078,7 +5506,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5078
5506
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5079
5507
|
if(not headercheck and not skipchecksum):
|
|
5080
5508
|
VerbosePrintOut(
|
|
5081
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5509
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5082
5510
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5083
5511
|
"'" + newfcs + "'")
|
|
5084
5512
|
return False
|
|
@@ -5188,6 +5616,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5188
5616
|
CatSizeEnd = CatSize
|
|
5189
5617
|
fp.seek(curloc, 0)
|
|
5190
5618
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5619
|
+
headeroffset = fp.tell()
|
|
5191
5620
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
5192
5621
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
5193
5622
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -5218,19 +5647,98 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5218
5647
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
5219
5648
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5220
5649
|
pass
|
|
5650
|
+
fvendorfieldslist = []
|
|
5651
|
+
fvendorfields = 0;
|
|
5652
|
+
if((len(inheader) - 2)>extraend):
|
|
5653
|
+
extrastart = extraend
|
|
5654
|
+
extraend = len(inheader) - 2
|
|
5655
|
+
while(extrastart < extraend):
|
|
5656
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5657
|
+
extrastart = extrastart + 1
|
|
5658
|
+
fvendorfields = fvendorfields + 1
|
|
5221
5659
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5222
5660
|
fheadsize = int(inheader[0], 16)
|
|
5223
5661
|
fnumfields = int(inheader[1], 16)
|
|
5662
|
+
fheadctime = int(inheader[2], 16)
|
|
5663
|
+
fheadmtime = int(inheader[3], 16)
|
|
5664
|
+
fhencoding = inheader[4]
|
|
5665
|
+
fostype = inheader[5]
|
|
5666
|
+
fpythontype = inheader[6]
|
|
5667
|
+
fprojectname = inheader[7]
|
|
5224
5668
|
fnumfiles = int(inheader[8], 16)
|
|
5225
|
-
fseeknextfile =
|
|
5226
|
-
fjsontype =
|
|
5669
|
+
fseeknextfile = inheader[9]
|
|
5670
|
+
fjsontype = inheader[10]
|
|
5227
5671
|
fjsonlen = int(inheader[11], 16)
|
|
5228
5672
|
fjsonsize = int(inheader[12], 16)
|
|
5229
5673
|
fjsonchecksumtype = inheader[13]
|
|
5230
5674
|
fjsonchecksum = inheader[14]
|
|
5231
5675
|
fjsoncontent = {}
|
|
5232
5676
|
fjstart = fp.tell()
|
|
5233
|
-
|
|
5677
|
+
if(fjsontype=="json"):
|
|
5678
|
+
fjsoncontent = {}
|
|
5679
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5680
|
+
if(fjsonsize > 0):
|
|
5681
|
+
try:
|
|
5682
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5683
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
5684
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5685
|
+
try:
|
|
5686
|
+
fjsonrawcontent = fprejsoncontent
|
|
5687
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
5688
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5689
|
+
fprejsoncontent = ""
|
|
5690
|
+
fjsonrawcontent = fprejsoncontent
|
|
5691
|
+
fjsoncontent = {}
|
|
5692
|
+
else:
|
|
5693
|
+
fprejsoncontent = ""
|
|
5694
|
+
fjsonrawcontent = fprejsoncontent
|
|
5695
|
+
fjsoncontent = {}
|
|
5696
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
5697
|
+
fjsoncontent = {}
|
|
5698
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5699
|
+
if (fjsonsize > 0):
|
|
5700
|
+
try:
|
|
5701
|
+
# try base64 → utf-8 → YAML
|
|
5702
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5703
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5704
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
5705
|
+
try:
|
|
5706
|
+
# fall back to treating the bytes as plain text YAML
|
|
5707
|
+
fjsonrawcontent = fprejsoncontent
|
|
5708
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5709
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
5710
|
+
# final fallback: empty
|
|
5711
|
+
fprejsoncontent = ""
|
|
5712
|
+
fjsonrawcontent = fprejsoncontent
|
|
5713
|
+
fjsoncontent = {}
|
|
5714
|
+
else:
|
|
5715
|
+
fprejsoncontent = ""
|
|
5716
|
+
fjsonrawcontent = fprejsoncontent
|
|
5717
|
+
fjsoncontent = {}
|
|
5718
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
5719
|
+
fjsoncontent = {}
|
|
5720
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5721
|
+
fprejsoncontent = ""
|
|
5722
|
+
fjsonrawcontent = fprejsoncontent
|
|
5723
|
+
elif(fjsontype=="list"):
|
|
5724
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5725
|
+
flisttmp = MkTempFile()
|
|
5726
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
5727
|
+
flisttmp.seek(0)
|
|
5728
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
5729
|
+
flisttmp.close()
|
|
5730
|
+
fjsonrawcontent = fjsoncontent
|
|
5731
|
+
if(fjsonlen==1):
|
|
5732
|
+
try:
|
|
5733
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
5734
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
5735
|
+
fjsonlen = len(fjsoncontent)
|
|
5736
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5737
|
+
try:
|
|
5738
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
5739
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
5740
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5741
|
+
pass
|
|
5234
5742
|
fjend = fp.tell()
|
|
5235
5743
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5236
5744
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5261,7 +5769,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5261
5769
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5262
5770
|
if(not headercheck and not skipchecksum):
|
|
5263
5771
|
VerbosePrintOut(
|
|
5264
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5772
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5265
5773
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5266
5774
|
"'" + newfcs + "'")
|
|
5267
5775
|
return False
|
|
@@ -5765,7 +6273,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5765
6273
|
else:
|
|
5766
6274
|
fctime = format(int(to_ns(time.time())), 'x').lower()
|
|
5767
6275
|
# Serialize the first group
|
|
5768
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
6276
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5769
6277
|
# Append tmpoutlist
|
|
5770
6278
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5771
6279
|
# Append extradata items if any
|
|
@@ -5991,9 +6499,7 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5991
6499
|
pass
|
|
5992
6500
|
return fp
|
|
5993
6501
|
|
|
5994
|
-
def
|
|
5995
|
-
if(not hasattr(fp, "write")):
|
|
5996
|
-
return False
|
|
6502
|
+
def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
5997
6503
|
advancedlist = __use_advanced_list__
|
|
5998
6504
|
altinode = __use_alt_inode__
|
|
5999
6505
|
infilelist = []
|
|
@@ -6035,16 +6541,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6035
6541
|
inodetofile = {}
|
|
6036
6542
|
filetoinode = {}
|
|
6037
6543
|
inodetoforminode = {}
|
|
6038
|
-
numfiles = int(len(GetDirList))
|
|
6039
|
-
fnumfiles = format(numfiles, 'x').lower()
|
|
6040
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6041
|
-
try:
|
|
6042
|
-
fp.flush()
|
|
6043
|
-
if(hasattr(os, "sync")):
|
|
6044
|
-
os.fsync(fp.fileno())
|
|
6045
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6046
|
-
pass
|
|
6047
6544
|
FullSizeFilesAlt = 0
|
|
6545
|
+
tmpoutlist = []
|
|
6048
6546
|
for curfname in GetDirList:
|
|
6049
6547
|
fencoding = "UTF-8"
|
|
6050
6548
|
if(re.findall("^[.|/]", curfname)):
|
|
@@ -6219,7 +6717,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6219
6717
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6220
6718
|
fcontents.seek(0, 0)
|
|
6221
6719
|
if(typechecktest is not False):
|
|
6222
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6720
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6223
6721
|
fcontents.seek(0, 0)
|
|
6224
6722
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6225
6723
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6268,7 +6766,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6268
6766
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6269
6767
|
fcontents.seek(0, 0)
|
|
6270
6768
|
if(typechecktest is not False):
|
|
6271
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6769
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6272
6770
|
fcontents.seek(0, 0)
|
|
6273
6771
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6274
6772
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6312,10 +6810,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6312
6810
|
if(fcompression == "none"):
|
|
6313
6811
|
fcompression = ""
|
|
6314
6812
|
fcontents.seek(0, 0)
|
|
6813
|
+
if(not contentasfile):
|
|
6814
|
+
fcontents = fcontents.read()
|
|
6315
6815
|
ftypehex = format(ftype, 'x').lower()
|
|
6316
|
-
tmpoutlist
|
|
6317
|
-
|
|
6318
|
-
|
|
6816
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6817
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
6818
|
+
return tmpoutlist
|
|
6819
|
+
|
|
6820
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6821
|
+
GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
6822
|
+
if(not hasattr(fp, "write")):
|
|
6823
|
+
return False
|
|
6824
|
+
numfiles = int(len(GetDirList))
|
|
6825
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
6826
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6827
|
+
try:
|
|
6828
|
+
fp.flush()
|
|
6829
|
+
if(hasattr(os, "sync")):
|
|
6830
|
+
os.fsync(fp.fileno())
|
|
6831
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6832
|
+
pass
|
|
6833
|
+
for curfname in GetDirList:
|
|
6834
|
+
tmpoutlist = curfname['fheaders']
|
|
6835
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6319
6836
|
try:
|
|
6320
6837
|
fp.flush()
|
|
6321
6838
|
if(hasattr(os, "sync")):
|
|
@@ -6324,9 +6841,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6324
6841
|
pass
|
|
6325
6842
|
return fp
|
|
6326
6843
|
|
|
6327
|
-
def
|
|
6328
|
-
if(not hasattr(fp, "write")):
|
|
6329
|
-
return False
|
|
6844
|
+
def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6330
6845
|
curinode = 0
|
|
6331
6846
|
curfid = 0
|
|
6332
6847
|
inodelist = []
|
|
@@ -6368,10 +6883,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6368
6883
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6369
6884
|
formatspecs = formatspecs[compresscheck]
|
|
6370
6885
|
if(compresscheck=="zstd"):
|
|
6371
|
-
if '
|
|
6372
|
-
infile = ZstdFile(
|
|
6373
|
-
elif 'pyzstd' in sys.modules:
|
|
6374
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6886
|
+
if 'zstd' in compressionsupport:
|
|
6887
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6375
6888
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6376
6889
|
else:
|
|
6377
6890
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -6380,23 +6893,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6380
6893
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6381
6894
|
formatspecs = formatspecs[compresscheck]
|
|
6382
6895
|
if(compresscheck=="zstd"):
|
|
6383
|
-
if '
|
|
6384
|
-
infile = ZstdFile(
|
|
6385
|
-
elif 'pyzstd' in sys.modules:
|
|
6386
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6896
|
+
if 'zstd' in compressionsupport:
|
|
6897
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6387
6898
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6388
6899
|
else:
|
|
6389
6900
|
tarfp = tarfile.open(infile, "r")
|
|
6390
6901
|
except FileNotFoundError:
|
|
6391
6902
|
return False
|
|
6392
|
-
|
|
6393
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6394
|
-
try:
|
|
6395
|
-
fp.flush()
|
|
6396
|
-
if(hasattr(os, "sync")):
|
|
6397
|
-
os.fsync(fp.fileno())
|
|
6398
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6399
|
-
pass
|
|
6903
|
+
tmpoutlist = []
|
|
6400
6904
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6401
6905
|
fencoding = "UTF-8"
|
|
6402
6906
|
if(re.findall("^[.|/]", member.name)):
|
|
@@ -6409,14 +6913,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6409
6913
|
ffullmode = member.mode
|
|
6410
6914
|
flinkcount = 0
|
|
6411
6915
|
fblksize = 0
|
|
6412
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6413
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6414
6916
|
fblocks = 0
|
|
6415
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6416
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6417
6917
|
fflags = 0
|
|
6418
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6419
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6420
6918
|
ftype = 0
|
|
6421
6919
|
if(member.isreg()):
|
|
6422
6920
|
ffullmode = member.mode + stat.S_IFREG
|
|
@@ -6494,7 +6992,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6494
6992
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6495
6993
|
fcontents.seek(0, 0)
|
|
6496
6994
|
if(typechecktest is not False):
|
|
6497
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6995
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6498
6996
|
fcontents.seek(0, 0)
|
|
6499
6997
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6500
6998
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6538,22 +7036,38 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6538
7036
|
if(fcompression == "none"):
|
|
6539
7037
|
fcompression = ""
|
|
6540
7038
|
fcontents.seek(0, 0)
|
|
7039
|
+
if(not contentasfile):
|
|
7040
|
+
fcontents = fcontents.read()
|
|
6541
7041
|
ftypehex = format(ftype, 'x').lower()
|
|
6542
|
-
tmpoutlist
|
|
6543
|
-
|
|
6544
|
-
|
|
7042
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7043
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7044
|
+
return tmpoutlist
|
|
7045
|
+
|
|
7046
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7047
|
+
if(not hasattr(fp, "write")):
|
|
7048
|
+
return False
|
|
7049
|
+
GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7050
|
+
numfiles = int(len(GetDirList))
|
|
7051
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7052
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7053
|
+
try:
|
|
7054
|
+
fp.flush()
|
|
7055
|
+
if(hasattr(os, "sync")):
|
|
7056
|
+
os.fsync(fp.fileno())
|
|
7057
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7058
|
+
pass
|
|
7059
|
+
for curfname in GetDirList:
|
|
7060
|
+
tmpoutlist = curfname['fheaders']
|
|
7061
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6545
7062
|
try:
|
|
6546
7063
|
fp.flush()
|
|
6547
7064
|
if(hasattr(os, "sync")):
|
|
6548
7065
|
os.fsync(fp.fileno())
|
|
6549
7066
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6550
7067
|
pass
|
|
6551
|
-
fcontents.close()
|
|
6552
7068
|
return fp
|
|
6553
7069
|
|
|
6554
|
-
def
|
|
6555
|
-
if(not hasattr(fp, "write")):
|
|
6556
|
-
return False
|
|
7070
|
+
def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6557
7071
|
curinode = 0
|
|
6558
7072
|
curfid = 0
|
|
6559
7073
|
inodelist = []
|
|
@@ -6586,14 +7100,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6586
7100
|
ziptest = zipfp.testzip()
|
|
6587
7101
|
if(ziptest):
|
|
6588
7102
|
VerbosePrintOut("Bad file found!")
|
|
6589
|
-
|
|
6590
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6591
|
-
try:
|
|
6592
|
-
fp.flush()
|
|
6593
|
-
if(hasattr(os, "sync")):
|
|
6594
|
-
os.fsync(fp.fileno())
|
|
6595
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6596
|
-
pass
|
|
7103
|
+
tmpoutlist = []
|
|
6597
7104
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6598
7105
|
fencoding = "UTF-8"
|
|
6599
7106
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -6609,14 +7116,8 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6609
7116
|
fpremode = int(stat.S_IFREG | 0x1b6)
|
|
6610
7117
|
flinkcount = 0
|
|
6611
7118
|
fblksize = 0
|
|
6612
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6613
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6614
7119
|
fblocks = 0
|
|
6615
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6616
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6617
7120
|
fflags = 0
|
|
6618
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6619
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6620
7121
|
ftype = 0
|
|
6621
7122
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
6622
7123
|
ftype = 5
|
|
@@ -6756,26 +7257,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6756
7257
|
if(fcompression == "none"):
|
|
6757
7258
|
fcompression = ""
|
|
6758
7259
|
fcontents.seek(0, 0)
|
|
7260
|
+
if(not contentasfile):
|
|
7261
|
+
fcontents = fcontents.read()
|
|
6759
7262
|
ftypehex = format(ftype, 'x').lower()
|
|
6760
|
-
tmpoutlist
|
|
6761
|
-
|
|
6762
|
-
|
|
7263
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7264
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7265
|
+
return tmpoutlist
|
|
7266
|
+
|
|
7267
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7268
|
+
if(not hasattr(fp, "write")):
|
|
7269
|
+
return False
|
|
7270
|
+
GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7271
|
+
numfiles = int(len(GetDirList))
|
|
7272
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7273
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7274
|
+
try:
|
|
7275
|
+
fp.flush()
|
|
7276
|
+
if(hasattr(os, "sync")):
|
|
7277
|
+
os.fsync(fp.fileno())
|
|
7278
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7279
|
+
pass
|
|
7280
|
+
for curfname in GetDirList:
|
|
7281
|
+
tmpoutlist = curfname['fheaders']
|
|
7282
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6763
7283
|
try:
|
|
6764
7284
|
fp.flush()
|
|
6765
7285
|
if(hasattr(os, "sync")):
|
|
6766
7286
|
os.fsync(fp.fileno())
|
|
6767
7287
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6768
7288
|
pass
|
|
6769
|
-
fcontents.close()
|
|
6770
7289
|
return fp
|
|
6771
7290
|
|
|
6772
7291
|
if(not rarfile_support):
|
|
6773
|
-
def
|
|
7292
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6774
7293
|
return False
|
|
6775
|
-
else:
|
|
6776
7294
|
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6777
|
-
|
|
6778
|
-
|
|
7295
|
+
return False
|
|
7296
|
+
else:
|
|
7297
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6779
7298
|
curinode = 0
|
|
6780
7299
|
curfid = 0
|
|
6781
7300
|
inodelist = []
|
|
@@ -6790,20 +7309,7 @@ else:
|
|
|
6790
7309
|
rartest = rarfp.testrar()
|
|
6791
7310
|
if(rartest):
|
|
6792
7311
|
VerbosePrintOut("Bad file found!")
|
|
6793
|
-
|
|
6794
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6795
|
-
try:
|
|
6796
|
-
fp.flush()
|
|
6797
|
-
if(hasattr(os, "sync")):
|
|
6798
|
-
os.fsync(fp.fileno())
|
|
6799
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6800
|
-
pass
|
|
6801
|
-
try:
|
|
6802
|
-
fp.flush()
|
|
6803
|
-
if(hasattr(os, "sync")):
|
|
6804
|
-
os.fsync(fp.fileno())
|
|
6805
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6806
|
-
pass
|
|
7312
|
+
tmpoutlist = []
|
|
6807
7313
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
6808
7314
|
is_unix = False
|
|
6809
7315
|
is_windows = False
|
|
@@ -6848,14 +7354,8 @@ else:
|
|
|
6848
7354
|
fcsize = format(int(0), 'x').lower()
|
|
6849
7355
|
flinkcount = 0
|
|
6850
7356
|
fblksize = 0
|
|
6851
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6852
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6853
7357
|
fblocks = 0
|
|
6854
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6855
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6856
7358
|
fflags = 0
|
|
6857
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6858
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6859
7359
|
ftype = 0
|
|
6860
7360
|
if(member.is_file()):
|
|
6861
7361
|
ftype = 0
|
|
@@ -6993,26 +7493,84 @@ else:
|
|
|
6993
7493
|
if(fcompression == "none"):
|
|
6994
7494
|
fcompression = ""
|
|
6995
7495
|
fcontents.seek(0, 0)
|
|
7496
|
+
if(not contentasfile):
|
|
7497
|
+
fcontents = fcontents.read()
|
|
6996
7498
|
ftypehex = format(ftype, 'x').lower()
|
|
6997
|
-
tmpoutlist
|
|
6998
|
-
|
|
6999
|
-
|
|
7499
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7500
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7501
|
+
return tmpoutlist
|
|
7502
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7503
|
+
if(not hasattr(fp, "write")):
|
|
7504
|
+
return False
|
|
7505
|
+
GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7506
|
+
numfiles = int(len(GetDirList))
|
|
7507
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7508
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7509
|
+
try:
|
|
7510
|
+
fp.flush()
|
|
7511
|
+
if(hasattr(os, "sync")):
|
|
7512
|
+
os.fsync(fp.fileno())
|
|
7513
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7514
|
+
pass
|
|
7515
|
+
for curfname in GetDirList:
|
|
7516
|
+
tmpoutlist = curfname['fheaders']
|
|
7517
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
7000
7518
|
try:
|
|
7001
7519
|
fp.flush()
|
|
7002
7520
|
if(hasattr(os, "sync")):
|
|
7003
7521
|
os.fsync(fp.fileno())
|
|
7004
7522
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7005
7523
|
pass
|
|
7006
|
-
fcontents.close()
|
|
7007
7524
|
return fp
|
|
7008
7525
|
|
|
7009
7526
|
if(not py7zr_support):
|
|
7010
|
-
def
|
|
7527
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7011
7528
|
return False
|
|
7012
7529
|
else:
|
|
7530
|
+
class _MemoryIO(py7zr.Py7zIO):
|
|
7531
|
+
"""In-memory file object used by py7zr's factory API."""
|
|
7532
|
+
def __init__(self):
|
|
7533
|
+
self._buf = bytearray()
|
|
7534
|
+
def write(self, data):
|
|
7535
|
+
# py7zr will call this repeatedly with chunks
|
|
7536
|
+
self._buf.extend(data)
|
|
7537
|
+
def read(self, size=None):
|
|
7538
|
+
if size is None:
|
|
7539
|
+
return bytes(self._buf)
|
|
7540
|
+
return bytes(self._buf[:size])
|
|
7541
|
+
def seek(self, offset, whence=0):
|
|
7542
|
+
# we don't really need seeking for your use case
|
|
7543
|
+
return 0
|
|
7544
|
+
def flush(self):
|
|
7545
|
+
pass
|
|
7546
|
+
def size(self):
|
|
7547
|
+
return len(self._buf)
|
|
7548
|
+
class _MemoryFactory(py7zr.WriterFactory):
|
|
7549
|
+
"""Factory that creates _MemoryIO objects and keeps them by filename."""
|
|
7550
|
+
def __init__(self):
|
|
7551
|
+
self.files = {}
|
|
7552
|
+
def create(self, filename: str) -> py7zr.Py7zIO:
|
|
7553
|
+
io_obj = _MemoryIO()
|
|
7554
|
+
self.files[filename] = io_obj
|
|
7555
|
+
return io_obj
|
|
7556
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7557
|
+
"""
|
|
7558
|
+
Replacement for SevenZipFile.readall() using the new py7zr API.
|
|
7559
|
+
|
|
7560
|
+
Returns: dict[filename -> _MemoryIO]
|
|
7561
|
+
"""
|
|
7562
|
+
factory = _MemoryFactory()
|
|
7563
|
+
with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
|
|
7564
|
+
archive.extractall(factory=factory)
|
|
7565
|
+
return factory.files
|
|
7566
|
+
|
|
7567
|
+
if(not py7zr_support):
|
|
7568
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7569
|
+
return False
|
|
7013
7570
|
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7014
|
-
|
|
7015
|
-
|
|
7571
|
+
return False
|
|
7572
|
+
else:
|
|
7573
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7016
7574
|
formver = formatspecs['format_ver']
|
|
7017
7575
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
7018
7576
|
curinode = 0
|
|
@@ -7024,19 +7582,15 @@ else:
|
|
|
7024
7582
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
7025
7583
|
return False
|
|
7026
7584
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
7027
|
-
|
|
7585
|
+
try:
|
|
7586
|
+
file_content = szpfp.readall()
|
|
7587
|
+
except AttributeError:
|
|
7588
|
+
file_content = sevenzip_readall(infile)
|
|
7028
7589
|
#sztest = szpfp.testzip()
|
|
7029
7590
|
sztestalt = szpfp.test()
|
|
7030
7591
|
if(sztestalt):
|
|
7031
7592
|
VerbosePrintOut("Bad file found!")
|
|
7032
|
-
|
|
7033
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7034
|
-
try:
|
|
7035
|
-
fp.flush()
|
|
7036
|
-
if(hasattr(os, "sync")):
|
|
7037
|
-
os.fsync(fp.fileno())
|
|
7038
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7039
|
-
pass
|
|
7593
|
+
tmpoutlist = []
|
|
7040
7594
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
7041
7595
|
fencoding = "UTF-8"
|
|
7042
7596
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -7054,14 +7608,8 @@ else:
|
|
|
7054
7608
|
fcsize = format(int(0), 'x').lower()
|
|
7055
7609
|
flinkcount = 0
|
|
7056
7610
|
fblksize = 0
|
|
7057
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
7058
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
7059
7611
|
fblocks = 0
|
|
7060
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
7061
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
7062
7612
|
fflags = 0
|
|
7063
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
7064
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
7065
7613
|
ftype = 0
|
|
7066
7614
|
if(member.is_directory):
|
|
7067
7615
|
ftype = 5
|
|
@@ -7128,7 +7676,10 @@ else:
|
|
|
7128
7676
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
7129
7677
|
fcontents.seek(0, 0)
|
|
7130
7678
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7131
|
-
|
|
7679
|
+
try:
|
|
7680
|
+
file_content[member.filename].close()
|
|
7681
|
+
except AttributeError:
|
|
7682
|
+
pass
|
|
7132
7683
|
if(typechecktest is False and not compresswholefile):
|
|
7133
7684
|
fcontents.seek(0, 2)
|
|
7134
7685
|
ucfsize = fcontents.tell()
|
|
@@ -7170,17 +7721,34 @@ else:
|
|
|
7170
7721
|
if(fcompression == "none"):
|
|
7171
7722
|
fcompression = ""
|
|
7172
7723
|
fcontents.seek(0, 0)
|
|
7724
|
+
if(not contentasfile):
|
|
7725
|
+
fcontents = fcontents.read()
|
|
7173
7726
|
ftypehex = format(ftype, 'x').lower()
|
|
7174
|
-
tmpoutlist
|
|
7175
|
-
|
|
7176
|
-
|
|
7727
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7728
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7729
|
+
return tmpoutlist
|
|
7730
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7731
|
+
if(not hasattr(fp, "write")):
|
|
7732
|
+
return False
|
|
7733
|
+
GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7734
|
+
numfiles = int(len(GetDirList))
|
|
7735
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7736
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7737
|
+
try:
|
|
7738
|
+
fp.flush()
|
|
7739
|
+
if(hasattr(os, "sync")):
|
|
7740
|
+
os.fsync(fp.fileno())
|
|
7741
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7742
|
+
pass
|
|
7743
|
+
for curfname in GetDirList:
|
|
7744
|
+
tmpoutlist = curfname['fheaders']
|
|
7745
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
7177
7746
|
try:
|
|
7178
7747
|
fp.flush()
|
|
7179
7748
|
if(hasattr(os, "sync")):
|
|
7180
7749
|
os.fsync(fp.fileno())
|
|
7181
7750
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7182
7751
|
pass
|
|
7183
|
-
fcontents.close()
|
|
7184
7752
|
return fp
|
|
7185
7753
|
|
|
7186
7754
|
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
@@ -8450,10 +9018,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
|
|
|
8450
9018
|
elif(compresscheck == "lz4" and compresscheck in compressionsupport):
|
|
8451
9019
|
fp = lz4.frame.open(infile, "rb")
|
|
8452
9020
|
elif(compresscheck == "zstd" and compresscheck in compressionsupport):
|
|
8453
|
-
if '
|
|
8454
|
-
fp = ZstdFile(infile, mode="rb")
|
|
8455
|
-
elif 'pyzstd' in sys.modules:
|
|
8456
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
|
|
9021
|
+
if 'zstd' in compressionsupport:
|
|
9022
|
+
fp = zstd.ZstdFile(infile, mode="rb")
|
|
8457
9023
|
else:
|
|
8458
9024
|
return Flase
|
|
8459
9025
|
elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
|
|
@@ -8570,10 +9136,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
|
|
|
8570
9136
|
elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
|
|
8571
9137
|
wrapped = lzma.LZMAFile(src)
|
|
8572
9138
|
elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
|
|
8573
|
-
if '
|
|
8574
|
-
wrapped = ZstdFile(
|
|
8575
|
-
elif 'pyzstd' in sys.modules:
|
|
8576
|
-
wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
|
|
9139
|
+
if 'zstd' in compressionsupport:
|
|
9140
|
+
wrapped = zstd.ZstdFile(src, mode="rb")
|
|
8577
9141
|
else:
|
|
8578
9142
|
return False
|
|
8579
9143
|
elif kind == "lz4" and "lz4" in compressionsupport:
|
|
@@ -8641,10 +9205,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
|
|
|
8641
9205
|
elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
|
|
8642
9206
|
fp = bz2.open(infile, mode)
|
|
8643
9207
|
elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
|
|
8644
|
-
if '
|
|
8645
|
-
fp = ZstdFile(infile, mode=mode)
|
|
8646
|
-
elif 'pyzstd' in sys.modules:
|
|
8647
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
|
|
9208
|
+
if 'zstd' in compressionsupport:
|
|
9209
|
+
fp = zstd.ZstdFile(infile, mode=mode)
|
|
8648
9210
|
else:
|
|
8649
9211
|
return False
|
|
8650
9212
|
elif (compresscheck == "lz4" and "lz4" in compressionsupport):
|
|
@@ -9413,10 +9975,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
|
|
|
9413
9975
|
outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
|
|
9414
9976
|
|
|
9415
9977
|
elif (fextname == ".zst" and "zstandard" in compressionsupport):
|
|
9416
|
-
if '
|
|
9417
|
-
outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9418
|
-
elif 'pyzstd' in sys.modules:
|
|
9419
|
-
outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9978
|
+
if 'zstd' in compressionsupport:
|
|
9979
|
+
outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9420
9980
|
else:
|
|
9421
9981
|
return False # fix: 'Flase' -> False
|
|
9422
9982
|
|
|
@@ -9733,6 +10293,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
|
|
|
9733
10293
|
formatspecs = formatspecs[compresschecking]
|
|
9734
10294
|
fp.seek(filestart, 0)
|
|
9735
10295
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
10296
|
+
headeroffset = fp.tell()
|
|
9736
10297
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
9737
10298
|
formdelsize = len(formatspecs['format_delimiter'])
|
|
9738
10299
|
formdel = fp.read(formdelsize).decode("UTF-8")
|
|
@@ -9740,7 +10301,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
|
|
|
9740
10301
|
return False
|
|
9741
10302
|
if(formdel != formatspecs['format_delimiter']):
|
|
9742
10303
|
return False
|
|
9743
|
-
headeroffset = fp.tell()
|
|
9744
10304
|
if(__use_new_style__):
|
|
9745
10305
|
inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9746
10306
|
else:
|
|
@@ -11056,10 +11616,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11056
11616
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
11057
11617
|
formatspecs = formatspecs[compresscheck]
|
|
11058
11618
|
if(compresscheck=="zstd"):
|
|
11059
|
-
if '
|
|
11060
|
-
infile = ZstdFile(
|
|
11061
|
-
elif 'pyzstd' in sys.modules:
|
|
11062
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11619
|
+
if 'zstd' in compressionsupport:
|
|
11620
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
11063
11621
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
11064
11622
|
else:
|
|
11065
11623
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -11068,10 +11626,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11068
11626
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
11069
11627
|
formatspecs = formatspecs[compresscheck]
|
|
11070
11628
|
if(compresscheck=="zstd"):
|
|
11071
|
-
if '
|
|
11072
|
-
infile = ZstdFile(
|
|
11073
|
-
elif 'pyzstd' in sys.modules:
|
|
11074
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11629
|
+
if 'zstd' in compressionsupport:
|
|
11630
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
11075
11631
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
11076
11632
|
else:
|
|
11077
11633
|
tarfp = tarfile.open(infile, "r")
|
|
@@ -11398,7 +11954,10 @@ if(py7zr_support):
|
|
|
11398
11954
|
lcfi = 0
|
|
11399
11955
|
returnval = {}
|
|
11400
11956
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
11401
|
-
|
|
11957
|
+
try:
|
|
11958
|
+
file_content = szpfp.readall()
|
|
11959
|
+
except AttributeError:
|
|
11960
|
+
file_content = sevenzip_readall(infile)
|
|
11402
11961
|
#sztest = szpfp.testzip()
|
|
11403
11962
|
sztestalt = szpfp.test()
|
|
11404
11963
|
if(sztestalt):
|
|
@@ -11442,7 +12001,10 @@ if(py7zr_support):
|
|
|
11442
12001
|
printfname = member.filename
|
|
11443
12002
|
if(ftype == 0):
|
|
11444
12003
|
fsize = len(file_content[member.filename].read())
|
|
11445
|
-
|
|
12004
|
+
try:
|
|
12005
|
+
file_content[member.filename].close()
|
|
12006
|
+
except AttributeError:
|
|
12007
|
+
pass
|
|
11446
12008
|
try:
|
|
11447
12009
|
fuid = int(os.getuid())
|
|
11448
12010
|
except (KeyError, AttributeError):
|
|
@@ -13115,7 +13677,7 @@ def run_tcp_file_server(fileobj, url, on_progress=None):
|
|
|
13115
13677
|
Ends after serving exactly one client or wait window elapses.
|
|
13116
13678
|
|
|
13117
13679
|
URL example:
|
|
13118
|
-
tcp://user:pass@0.0.0.0:5000/path/my.
|
|
13680
|
+
tcp://user:pass@0.0.0.0:5000/path/my.fox?
|
|
13119
13681
|
auth=1&enforce_path=1&rate=200000&timeout=5&wait=30&ssl=0
|
|
13120
13682
|
"""
|
|
13121
13683
|
parts, o = _parse_net_url(url) # already returns proto/host/port/timeout/ssl/etc.
|
|
@@ -13317,7 +13879,7 @@ def run_udp_file_server(fileobj, url, on_progress=None):
|
|
|
13317
13879
|
Ends after serving exactly one client or wait window elapses.
|
|
13318
13880
|
|
|
13319
13881
|
URL example:
|
|
13320
|
-
udp://user:pass@0.0.0.0:5001/path/my.
|
|
13882
|
+
udp://user:pass@0.0.0.0:5001/path/my.fox?
|
|
13321
13883
|
auth=1&enforce_path=1&rate=250000&timeout=5&wait=30
|
|
13322
13884
|
"""
|
|
13323
13885
|
parts, o = _parse_net_url(url)
|
|
@@ -13577,7 +14139,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
|
|
|
13577
14139
|
if not ah or not ah.strip().lower().startswith("basic "):
|
|
13578
14140
|
return False
|
|
13579
14141
|
try:
|
|
13580
|
-
import base64
|
|
13581
14142
|
b64 = ah.strip().split(" ", 1)[1]
|
|
13582
14143
|
raw = base64.b64decode(_to_bytes(b64))
|
|
13583
14144
|
try: raw_txt = raw.decode("utf-8")
|
|
@@ -13741,7 +14302,7 @@ def run_tcp_file_server(fileobj, url, on_progress=None):
|
|
|
13741
14302
|
Ends after serving exactly one client or wait window elapses.
|
|
13742
14303
|
|
|
13743
14304
|
URL example:
|
|
13744
|
-
tcp://user:pass@0.0.0.0:5000/path/my.
|
|
14305
|
+
tcp://user:pass@0.0.0.0:5000/path/my.fox?
|
|
13745
14306
|
auth=1&enforce_path=1&rate=200000&timeout=5&wait=30&ssl=0
|
|
13746
14307
|
"""
|
|
13747
14308
|
parts, o = _parse_net_url(url) # already returns proto/host/port/timeout/ssl/etc.
|
|
@@ -14293,7 +14854,7 @@ def run_udp_file_server(fileobj, url, on_progress=None):
|
|
|
14293
14854
|
Ends after serving exactly one client or wait window elapses.
|
|
14294
14855
|
|
|
14295
14856
|
URL example:
|
|
14296
|
-
udp://user:pass@0.0.0.0:5001/path/my.
|
|
14857
|
+
udp://user:pass@0.0.0.0:5001/path/my.fox?
|
|
14297
14858
|
auth=1&enforce_path=1&rate=250000&timeout=5&wait=30
|
|
14298
14859
|
"""
|
|
14299
14860
|
parts, o = _parse_net_url(url)
|