PyArchiveFile 0.26.0__py3-none-any.whl → 0.27.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.26.0.data → pyarchivefile-0.27.2.data}/scripts/archivefile.py +6 -6
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.2.dist-info}/METADATA +1 -1
- pyarchivefile-0.27.2.dist-info/RECORD +8 -0
- pyarchivefile.py +907 -244
- pyarchivefile-0.26.0.data/scripts/archiveneofile.py +0 -130
- pyarchivefile-0.26.0.data/scripts/neoarchivefile.py +0 -136
- pyarchivefile-0.26.0.dist-info/RECORD +0 -10
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.2.dist-info}/WHEEL +0 -0
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.2.dist-info}/licenses/LICENSE +0 -0
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.2.dist-info}/top_level.txt +0 -0
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.2.dist-info}/zip-safe +0 -0
pyarchivefile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 11/15/2025 Ver. 0.27.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -24,7 +24,6 @@ import re
|
|
|
24
24
|
import sys
|
|
25
25
|
import time
|
|
26
26
|
import stat
|
|
27
|
-
import zlib
|
|
28
27
|
import mmap
|
|
29
28
|
import hmac
|
|
30
29
|
import base64
|
|
@@ -38,8 +37,8 @@ import zipfile
|
|
|
38
37
|
import binascii
|
|
39
38
|
import datetime
|
|
40
39
|
import platform
|
|
40
|
+
import collections
|
|
41
41
|
from io import StringIO, BytesIO
|
|
42
|
-
from collections import namedtuple
|
|
43
42
|
import posixpath # POSIX-safe joins/normpaths
|
|
44
43
|
try:
|
|
45
44
|
from backports import tempfile
|
|
@@ -48,14 +47,16 @@ except ImportError:
|
|
|
48
47
|
|
|
49
48
|
try:
|
|
50
49
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
51
|
-
from socketserver import TCPServer
|
|
52
50
|
from urllib.parse import urlparse, parse_qs
|
|
53
|
-
import base64
|
|
54
51
|
except ImportError:
|
|
55
52
|
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
|
56
|
-
from SocketServer import TCPServer
|
|
57
53
|
from urlparse import urlparse, parse_qs
|
|
58
|
-
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
# Python 3.8+ only
|
|
57
|
+
from multiprocessing import shared_memory
|
|
58
|
+
except ImportError:
|
|
59
|
+
shared_memory = None
|
|
59
60
|
|
|
60
61
|
# FTP Support
|
|
61
62
|
ftpssl = True
|
|
@@ -146,6 +147,15 @@ try:
|
|
|
146
147
|
except Exception:
|
|
147
148
|
PATH_TYPES = (basestring,)
|
|
148
149
|
|
|
150
|
+
def running_interactively():
|
|
151
|
+
main = sys.modules.get("__main__")
|
|
152
|
+
no_main_file = not hasattr(main, "__file__")
|
|
153
|
+
interactive_flag = bool(getattr(sys.flags, "interactive", 0))
|
|
154
|
+
return no_main_file or interactive_flag
|
|
155
|
+
|
|
156
|
+
if running_interactively():
|
|
157
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
158
|
+
|
|
149
159
|
def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
150
160
|
"""
|
|
151
161
|
Normalize any input to text_type (unicode on Py2, str on Py3).
|
|
@@ -166,7 +176,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
|
166
176
|
|
|
167
177
|
# Handle pathlib.Path & other path-like objects
|
|
168
178
|
try:
|
|
169
|
-
import os
|
|
170
179
|
if hasattr(os, "fspath"):
|
|
171
180
|
fs = os.fspath(s)
|
|
172
181
|
if isinstance(fs, text_type):
|
|
@@ -207,7 +216,6 @@ except ImportError:
|
|
|
207
216
|
|
|
208
217
|
# Windows-specific setup
|
|
209
218
|
if os.name == "nt":
|
|
210
|
-
import io
|
|
211
219
|
def _wrap(stream):
|
|
212
220
|
buf = getattr(stream, "buffer", None)
|
|
213
221
|
is_tty = getattr(stream, "isatty", lambda: False)()
|
|
@@ -444,7 +452,13 @@ if('PYARCHIVEFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYAR
|
|
|
444
452
|
else:
|
|
445
453
|
prescriptpath = get_importing_script_path()
|
|
446
454
|
if(prescriptpath is not None):
|
|
447
|
-
|
|
455
|
+
if(__use_ini_file__ and not __use_json_file__):
|
|
456
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
457
|
+
elif(__use_json_file__ and not __use_ini_file__):
|
|
458
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
|
|
459
|
+
else:
|
|
460
|
+
scriptconf = ""
|
|
461
|
+
prescriptpath = None
|
|
448
462
|
else:
|
|
449
463
|
scriptconf = ""
|
|
450
464
|
if os.path.exists(scriptconf):
|
|
@@ -641,12 +655,12 @@ __project__ = __program_name__
|
|
|
641
655
|
__program_alt_name__ = __program_name__
|
|
642
656
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
643
657
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
644
|
-
__version_info__ = (0,
|
|
645
|
-
__version_date_info__ = (2025, 11,
|
|
658
|
+
__version_info__ = (0, 27, 2, "RC 1", 1)
|
|
659
|
+
__version_date_info__ = (2025, 11, 15, "RC 1", 1)
|
|
646
660
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
647
661
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
648
662
|
__revision__ = __version_info__[3]
|
|
649
|
-
__revision_id__ = "$Id:
|
|
663
|
+
__revision_id__ = "$Id: 8fdff8949e2d0bce935d7cea2b59865dbf4911af $"
|
|
650
664
|
if(__version_info__[4] is not None):
|
|
651
665
|
__version_date_plusrc__ = __version_date__ + \
|
|
652
666
|
"-" + str(__version_date_info__[4])
|
|
@@ -798,9 +812,9 @@ except Exception:
|
|
|
798
812
|
geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
|
|
799
813
|
proname=__project__, prover=__version__, prourl=__project_url__)
|
|
800
814
|
if(platform.python_implementation() != ""):
|
|
801
|
-
py_implementation = platform.python_implementation()
|
|
815
|
+
py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
|
|
802
816
|
if(platform.python_implementation() == ""):
|
|
803
|
-
py_implementation = "CPython"
|
|
817
|
+
py_implementation = "CPython"+str(platform.python_version_tuple()[0])
|
|
804
818
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
805
819
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
806
820
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -816,13 +830,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
|
|
|
816
830
|
|
|
817
831
|
compressionsupport = []
|
|
818
832
|
try:
|
|
819
|
-
|
|
833
|
+
try:
|
|
834
|
+
import compression.gzip as gzip
|
|
835
|
+
except ImportError:
|
|
836
|
+
import gzip
|
|
820
837
|
compressionsupport.append("gz")
|
|
821
838
|
compressionsupport.append("gzip")
|
|
822
839
|
except ImportError:
|
|
823
840
|
pass
|
|
824
841
|
try:
|
|
825
|
-
|
|
842
|
+
try:
|
|
843
|
+
import compression.bz2 as bz2
|
|
844
|
+
except ImportError:
|
|
845
|
+
import bz2
|
|
826
846
|
compressionsupport.append("bz2")
|
|
827
847
|
compressionsupport.append("bzip2")
|
|
828
848
|
except ImportError:
|
|
@@ -843,35 +863,39 @@ except ImportError:
|
|
|
843
863
|
pass
|
|
844
864
|
'''
|
|
845
865
|
try:
|
|
846
|
-
|
|
866
|
+
try:
|
|
867
|
+
import compression.zstd as zstd
|
|
868
|
+
except ImportError:
|
|
869
|
+
import pyzstd.zstdfile as zstd
|
|
847
870
|
compressionsupport.append("zst")
|
|
848
871
|
compressionsupport.append("zstd")
|
|
849
872
|
compressionsupport.append("zstandard")
|
|
850
873
|
except ImportError:
|
|
874
|
+
pass
|
|
875
|
+
try:
|
|
851
876
|
try:
|
|
852
|
-
import
|
|
853
|
-
compressionsupport.append("zst")
|
|
854
|
-
compressionsupport.append("zstd")
|
|
855
|
-
compressionsupport.append("zstandard")
|
|
877
|
+
import compression.lzma as lzma
|
|
856
878
|
except ImportError:
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
879
|
+
try:
|
|
880
|
+
import lzma
|
|
881
|
+
except ImportError:
|
|
882
|
+
from backports import lzma
|
|
860
883
|
compressionsupport.append("lzma")
|
|
861
884
|
compressionsupport.append("xz")
|
|
862
885
|
except ImportError:
|
|
886
|
+
pass
|
|
887
|
+
try:
|
|
863
888
|
try:
|
|
864
|
-
|
|
865
|
-
compressionsupport.append("lzma")
|
|
866
|
-
compressionsupport.append("xz")
|
|
889
|
+
import compression.zlib as zlib
|
|
867
890
|
except ImportError:
|
|
868
|
-
|
|
869
|
-
compressionsupport.append("zlib")
|
|
870
|
-
compressionsupport.append("zl")
|
|
871
|
-
compressionsupport.append("zz")
|
|
872
|
-
compressionsupport.append("Z")
|
|
873
|
-
compressionsupport.append("z")
|
|
874
|
-
|
|
891
|
+
import zlib
|
|
892
|
+
compressionsupport.append("zlib")
|
|
893
|
+
compressionsupport.append("zl")
|
|
894
|
+
compressionsupport.append("zz")
|
|
895
|
+
compressionsupport.append("Z")
|
|
896
|
+
compressionsupport.append("z")
|
|
897
|
+
except ImportError:
|
|
898
|
+
pass
|
|
875
899
|
compressionlist = ['auto']
|
|
876
900
|
compressionlistalt = []
|
|
877
901
|
outextlist = []
|
|
@@ -1045,6 +1069,14 @@ def to_ns(timestamp):
|
|
|
1045
1069
|
# Multiply by 1e9 to get nanoseconds, then cast to int
|
|
1046
1070
|
return int(seconds * 1000000000)
|
|
1047
1071
|
|
|
1072
|
+
def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
|
|
1073
|
+
ts_ns = int(ts_ns)
|
|
1074
|
+
sec, ns = divmod(ts_ns, 10**9)
|
|
1075
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
1076
|
+
base = dt.strftime(fmt)
|
|
1077
|
+
ns_str = "%09d" % ns
|
|
1078
|
+
return base + "." + ns_str
|
|
1079
|
+
|
|
1048
1080
|
def _split_posix(name):
|
|
1049
1081
|
"""
|
|
1050
1082
|
Return a list of path parts without collapsing '..'.
|
|
@@ -2081,7 +2113,7 @@ def MkTempFile(data=None,
|
|
|
2081
2113
|
spool_max=__spoolfile_size__,
|
|
2082
2114
|
spool_dir=__use_spooldir__,
|
|
2083
2115
|
reset_to_start=True,
|
|
2084
|
-
memfd_name=
|
|
2116
|
+
memfd_name=__program_name__,
|
|
2085
2117
|
memfd_allow_sealing=False,
|
|
2086
2118
|
memfd_flags_extra=0,
|
|
2087
2119
|
on_create=None):
|
|
@@ -2573,6 +2605,384 @@ def _is_valid_zlib_header(cmf, flg):
|
|
|
2573
2605
|
return False
|
|
2574
2606
|
return True
|
|
2575
2607
|
|
|
2608
|
+
class SharedMemoryFile(object):
|
|
2609
|
+
"""
|
|
2610
|
+
File-like wrapper around multiprocessing.shared_memory.SharedMemory.
|
|
2611
|
+
|
|
2612
|
+
Binary-only API, intended to behave similarly to a regular file opened in
|
|
2613
|
+
'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
|
|
2614
|
+
|
|
2615
|
+
Notes:
|
|
2616
|
+
- Requires Python 3.8+ at runtime to actually use SharedMemory.
|
|
2617
|
+
- On Python 2, importing is fine but constructing will raise RuntimeError.
|
|
2618
|
+
- There is no automatic resizing; buffer size is fixed by SharedMemory.
|
|
2619
|
+
- No real fileno(); this does not represent an OS-level file descriptor.
|
|
2620
|
+
- For text mode, wrap this with io.TextIOWrapper on Python 3:
|
|
2621
|
+
f = SharedMemoryFile(...)
|
|
2622
|
+
tf = io.TextIOWrapper(f, encoding="utf-8")
|
|
2623
|
+
"""
|
|
2624
|
+
|
|
2625
|
+
def __init__(self, shm=None, name=None, create=False, size=0,
|
|
2626
|
+
mode='r+b', offset=0, unlink_on_close=False):
|
|
2627
|
+
"""
|
|
2628
|
+
Parameters:
|
|
2629
|
+
shm : existing SharedMemory object (preferred).
|
|
2630
|
+
name : name of shared memory block (for attach or create).
|
|
2631
|
+
create: if True, create new SharedMemory; else attach existing.
|
|
2632
|
+
size : size in bytes (required when create=True).
|
|
2633
|
+
mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
|
|
2634
|
+
offset: starting offset within the shared memory buffer.
|
|
2635
|
+
unlink_on_close: if True, call shm.unlink() when close() is called.
|
|
2636
|
+
|
|
2637
|
+
Usage examples:
|
|
2638
|
+
|
|
2639
|
+
# Create new block and file-like wrapper
|
|
2640
|
+
f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
|
|
2641
|
+
|
|
2642
|
+
# Attach to existing shared memory by name
|
|
2643
|
+
f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
|
|
2644
|
+
|
|
2645
|
+
# Wrap an existing SharedMemory object
|
|
2646
|
+
shm = shared_memory.SharedMemory(create=True, size=1024)
|
|
2647
|
+
f = SharedMemoryFile(shm=shm, mode='r+b')
|
|
2648
|
+
"""
|
|
2649
|
+
if shared_memory is None:
|
|
2650
|
+
# No SharedMemory available on this interpreter
|
|
2651
|
+
raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
|
|
2652
|
+
"is not available on this Python version")
|
|
2653
|
+
|
|
2654
|
+
if 't' in mode:
|
|
2655
|
+
raise ValueError("SharedMemoryFile is binary-only; "
|
|
2656
|
+
"wrap it with io.TextIOWrapper for text")
|
|
2657
|
+
|
|
2658
|
+
self.mode = mode
|
|
2659
|
+
self._closed = False
|
|
2660
|
+
self._unlinked = False
|
|
2661
|
+
self._unlink_on_close = bool(unlink_on_close)
|
|
2662
|
+
|
|
2663
|
+
if shm is not None:
|
|
2664
|
+
self._shm = shm
|
|
2665
|
+
else:
|
|
2666
|
+
# name may be None when create=True
|
|
2667
|
+
self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
|
|
2668
|
+
|
|
2669
|
+
self._buf = self._shm.buf
|
|
2670
|
+
self._base_offset = int(offset)
|
|
2671
|
+
if self._base_offset < 0 or self._base_offset > len(self._buf):
|
|
2672
|
+
raise ValueError("offset out of range")
|
|
2673
|
+
|
|
2674
|
+
# We treat the accessible region as [base_offset, len(buf))
|
|
2675
|
+
self._size = len(self._buf) - self._base_offset
|
|
2676
|
+
self._pos = 0 # logical file position within that region
|
|
2677
|
+
|
|
2678
|
+
# ---------- basic properties ----------
|
|
2679
|
+
|
|
2680
|
+
@property
|
|
2681
|
+
def name(self):
|
|
2682
|
+
# SharedMemory name (may be None for anonymous)
|
|
2683
|
+
return getattr(self._shm, "name", None)
|
|
2684
|
+
|
|
2685
|
+
@property
|
|
2686
|
+
def closed(self):
|
|
2687
|
+
return self._closed
|
|
2688
|
+
|
|
2689
|
+
def readable(self):
|
|
2690
|
+
return ('r' in self.mode) or ('+' in self.mode)
|
|
2691
|
+
|
|
2692
|
+
def writable(self):
|
|
2693
|
+
return any(ch in self.mode for ch in ('w', 'a', '+'))
|
|
2694
|
+
|
|
2695
|
+
def seekable(self):
|
|
2696
|
+
return True
|
|
2697
|
+
|
|
2698
|
+
# ---------- core helpers ----------
|
|
2699
|
+
|
|
2700
|
+
def _check_closed(self):
|
|
2701
|
+
if self._closed:
|
|
2702
|
+
raise ValueError("I/O operation on closed SharedMemoryFile")
|
|
2703
|
+
|
|
2704
|
+
def _clamp_pos(self, pos):
|
|
2705
|
+
if pos < 0:
|
|
2706
|
+
return 0
|
|
2707
|
+
if pos > self._size:
|
|
2708
|
+
return self._size
|
|
2709
|
+
return pos
|
|
2710
|
+
|
|
2711
|
+
def _region_bounds(self):
|
|
2712
|
+
"""Return (start, end) absolute indices into the SharedMemory buffer."""
|
|
2713
|
+
start = self._base_offset + self._pos
|
|
2714
|
+
end = self._base_offset + self._size
|
|
2715
|
+
return start, end
|
|
2716
|
+
|
|
2717
|
+
# ---------- positioning ----------
|
|
2718
|
+
|
|
2719
|
+
def seek(self, offset, whence=0):
|
|
2720
|
+
"""
|
|
2721
|
+
Seek to a new file position.
|
|
2722
|
+
|
|
2723
|
+
whence: 0 = from start, 1 = from current, 2 = from end.
|
|
2724
|
+
"""
|
|
2725
|
+
self._check_closed()
|
|
2726
|
+
offset = int(offset)
|
|
2727
|
+
whence = int(whence)
|
|
2728
|
+
|
|
2729
|
+
if whence == 0: # from start
|
|
2730
|
+
new_pos = offset
|
|
2731
|
+
elif whence == 1: # from current
|
|
2732
|
+
new_pos = self._pos + offset
|
|
2733
|
+
elif whence == 2: # from end
|
|
2734
|
+
new_pos = self._size + offset
|
|
2735
|
+
else:
|
|
2736
|
+
raise ValueError("invalid whence (expected 0, 1, or 2)")
|
|
2737
|
+
|
|
2738
|
+
self._pos = self._clamp_pos(new_pos)
|
|
2739
|
+
return self._pos
|
|
2740
|
+
|
|
2741
|
+
def tell(self):
|
|
2742
|
+
return self._pos
|
|
2743
|
+
|
|
2744
|
+
# ---------- reading ----------
|
|
2745
|
+
|
|
2746
|
+
def read(self, size=-1):
|
|
2747
|
+
"""
|
|
2748
|
+
Read up to 'size' bytes (or to EOF if size<0 or None).
|
|
2749
|
+
Returns bytes (py3) or str (py2).
|
|
2750
|
+
"""
|
|
2751
|
+
self._check_closed()
|
|
2752
|
+
if not self.readable():
|
|
2753
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2754
|
+
|
|
2755
|
+
if size is None or size < 0:
|
|
2756
|
+
size = self._size - self._pos
|
|
2757
|
+
else:
|
|
2758
|
+
size = int(size)
|
|
2759
|
+
if size < 0:
|
|
2760
|
+
size = 0
|
|
2761
|
+
|
|
2762
|
+
if size == 0:
|
|
2763
|
+
return b'' if not PY2 else ''
|
|
2764
|
+
|
|
2765
|
+
start, end_abs = self._region_bounds()
|
|
2766
|
+
available = end_abs - (self._base_offset + self._pos)
|
|
2767
|
+
if available <= 0:
|
|
2768
|
+
return b'' if not PY2 else ''
|
|
2769
|
+
|
|
2770
|
+
size = min(size, available)
|
|
2771
|
+
|
|
2772
|
+
abs_start = self._base_offset + self._pos
|
|
2773
|
+
abs_end = abs_start + size
|
|
2774
|
+
|
|
2775
|
+
chunk = self._buf[abs_start:abs_end]
|
|
2776
|
+
if PY2:
|
|
2777
|
+
data = bytes(chunk) # bytes() -> str in py2
|
|
2778
|
+
else:
|
|
2779
|
+
data = bytes(chunk)
|
|
2780
|
+
|
|
2781
|
+
self._pos += len(data)
|
|
2782
|
+
return data
|
|
2783
|
+
|
|
2784
|
+
def readline(self, size=-1):
|
|
2785
|
+
"""
|
|
2786
|
+
Read a single line (ending with '\\n' or EOF).
|
|
2787
|
+
If size >= 0, at most that many bytes are returned.
|
|
2788
|
+
"""
|
|
2789
|
+
self._check_closed()
|
|
2790
|
+
if not self.readable():
|
|
2791
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2792
|
+
|
|
2793
|
+
# Determine maximum bytes we can scan
|
|
2794
|
+
start, end_abs = self._region_bounds()
|
|
2795
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2796
|
+
if remaining <= 0:
|
|
2797
|
+
return b'' if not PY2 else ''
|
|
2798
|
+
|
|
2799
|
+
if size is not None and size >= 0:
|
|
2800
|
+
size = int(size)
|
|
2801
|
+
max_len = min(size, remaining)
|
|
2802
|
+
else:
|
|
2803
|
+
max_len = remaining
|
|
2804
|
+
|
|
2805
|
+
abs_start = self._base_offset + self._pos
|
|
2806
|
+
abs_max = abs_start + max_len
|
|
2807
|
+
|
|
2808
|
+
# Work on a local bytes slice for easy .find()
|
|
2809
|
+
if PY2:
|
|
2810
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2811
|
+
else:
|
|
2812
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2813
|
+
|
|
2814
|
+
idx = buf_bytes.find(b'\n')
|
|
2815
|
+
if idx == -1:
|
|
2816
|
+
# No newline; read entire chunk
|
|
2817
|
+
line_bytes = buf_bytes
|
|
2818
|
+
else:
|
|
2819
|
+
line_bytes = buf_bytes[:idx + 1]
|
|
2820
|
+
|
|
2821
|
+
self._pos += len(line_bytes)
|
|
2822
|
+
|
|
2823
|
+
if PY2:
|
|
2824
|
+
return line_bytes # already str
|
|
2825
|
+
return line_bytes
|
|
2826
|
+
|
|
2827
|
+
def readinto(self, b):
|
|
2828
|
+
"""
|
|
2829
|
+
Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
|
|
2830
|
+
Returns number of bytes read.
|
|
2831
|
+
"""
|
|
2832
|
+
self._check_closed()
|
|
2833
|
+
if not self.readable():
|
|
2834
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2835
|
+
|
|
2836
|
+
# Normalize target buffer
|
|
2837
|
+
if isinstance(b, memoryview):
|
|
2838
|
+
mv = b
|
|
2839
|
+
else:
|
|
2840
|
+
mv = memoryview(b)
|
|
2841
|
+
|
|
2842
|
+
size = len(mv)
|
|
2843
|
+
if size <= 0:
|
|
2844
|
+
return 0
|
|
2845
|
+
|
|
2846
|
+
start, end_abs = self._region_bounds()
|
|
2847
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2848
|
+
if remaining <= 0:
|
|
2849
|
+
return 0
|
|
2850
|
+
|
|
2851
|
+
size = min(size, remaining)
|
|
2852
|
+
|
|
2853
|
+
abs_start = self._base_offset + self._pos
|
|
2854
|
+
abs_end = abs_start + size
|
|
2855
|
+
|
|
2856
|
+
mv[:size] = self._buf[abs_start:abs_end]
|
|
2857
|
+
self._pos += size
|
|
2858
|
+
return size
|
|
2859
|
+
|
|
2860
|
+
# ---------- writing ----------
|
|
2861
|
+
|
|
2862
|
+
def write(self, data):
|
|
2863
|
+
"""
|
|
2864
|
+
Write bytes-like object to the shared memory region.
|
|
2865
|
+
|
|
2866
|
+
Returns number of bytes written. Will raise if not opened writable
|
|
2867
|
+
or if writing would overflow the fixed-size region.
|
|
2868
|
+
"""
|
|
2869
|
+
self._check_closed()
|
|
2870
|
+
if not self.writable():
|
|
2871
|
+
raise IOError("SharedMemoryFile not opened for writing")
|
|
2872
|
+
|
|
2873
|
+
if isinstance(data, memoryview):
|
|
2874
|
+
data = bytes(data)
|
|
2875
|
+
elif isinstance(data, bytearray):
|
|
2876
|
+
data = bytes(data)
|
|
2877
|
+
|
|
2878
|
+
if not isinstance(data, binary_types):
|
|
2879
|
+
raise TypeError("write() expects a bytes-like object")
|
|
2880
|
+
|
|
2881
|
+
data_len = len(data)
|
|
2882
|
+
if data_len == 0:
|
|
2883
|
+
return 0
|
|
2884
|
+
|
|
2885
|
+
# Handle "append" semantics roughly: start from end on first write
|
|
2886
|
+
if 'a' in self.mode and self._pos == 0:
|
|
2887
|
+
# Move to logical end of region
|
|
2888
|
+
self._pos = self._size
|
|
2889
|
+
|
|
2890
|
+
start, end_abs = self._region_bounds()
|
|
2891
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2892
|
+
if data_len > remaining:
|
|
2893
|
+
raise IOError("write would overflow SharedMemory region (need %d, have %d)"
|
|
2894
|
+
% (data_len, remaining))
|
|
2895
|
+
|
|
2896
|
+
abs_start = self._base_offset + self._pos
|
|
2897
|
+
abs_end = abs_start + data_len
|
|
2898
|
+
|
|
2899
|
+
self._buf[abs_start:abs_end] = data
|
|
2900
|
+
self._pos += data_len
|
|
2901
|
+
return data_len
|
|
2902
|
+
|
|
2903
|
+
def flush(self):
|
|
2904
|
+
"""
|
|
2905
|
+
No-op for shared memory; provided for file-like compatibility.
|
|
2906
|
+
"""
|
|
2907
|
+
self._check_closed()
|
|
2908
|
+
# nothing to flush
|
|
2909
|
+
|
|
2910
|
+
# ---------- unlink / close / context manager ----------
|
|
2911
|
+
|
|
2912
|
+
def unlink(self):
|
|
2913
|
+
"""
|
|
2914
|
+
Unlink (destroy) the underlying shared memory block.
|
|
2915
|
+
|
|
2916
|
+
After unlink(), new processes cannot attach via name.
|
|
2917
|
+
Existing attachments (including this one) can continue to use
|
|
2918
|
+
the memory until they close() it.
|
|
2919
|
+
|
|
2920
|
+
This is idempotent: calling it more than once is safe.
|
|
2921
|
+
"""
|
|
2922
|
+
if self._unlinked:
|
|
2923
|
+
return
|
|
2924
|
+
|
|
2925
|
+
try:
|
|
2926
|
+
self._shm.unlink()
|
|
2927
|
+
except AttributeError:
|
|
2928
|
+
# Should not happen on normal Python 3.8+,
|
|
2929
|
+
# but keep a clear error if it does.
|
|
2930
|
+
raise RuntimeError("Underlying SharedMemory object "
|
|
2931
|
+
"does not support unlink()")
|
|
2932
|
+
|
|
2933
|
+
self._unlinked = True
|
|
2934
|
+
|
|
2935
|
+
def close(self):
|
|
2936
|
+
if self._closed:
|
|
2937
|
+
return
|
|
2938
|
+
self._closed = True
|
|
2939
|
+
|
|
2940
|
+
# Optionally unlink on close if requested
|
|
2941
|
+
if self._unlink_on_close and not self._unlinked:
|
|
2942
|
+
try:
|
|
2943
|
+
self.unlink()
|
|
2944
|
+
except Exception:
|
|
2945
|
+
# best-effort; close anyway
|
|
2946
|
+
pass
|
|
2947
|
+
|
|
2948
|
+
try:
|
|
2949
|
+
self._shm.close()
|
|
2950
|
+
except Exception:
|
|
2951
|
+
pass
|
|
2952
|
+
|
|
2953
|
+
def __enter__(self):
|
|
2954
|
+
self._check_closed()
|
|
2955
|
+
return self
|
|
2956
|
+
|
|
2957
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
2958
|
+
self.close()
|
|
2959
|
+
|
|
2960
|
+
# ---------- iteration ----------
|
|
2961
|
+
|
|
2962
|
+
def __iter__(self):
|
|
2963
|
+
return self
|
|
2964
|
+
|
|
2965
|
+
def __next__(self):
|
|
2966
|
+
line = self.readline()
|
|
2967
|
+
if (not line) or len(line) == 0:
|
|
2968
|
+
raise StopIteration
|
|
2969
|
+
return line
|
|
2970
|
+
|
|
2971
|
+
if PY2:
|
|
2972
|
+
next = __next__
|
|
2973
|
+
|
|
2974
|
+
# ---------- misc helpers ----------
|
|
2975
|
+
|
|
2976
|
+
def fileno(self):
|
|
2977
|
+
"""
|
|
2978
|
+
There is no real OS-level file descriptor; raise OSError for APIs
|
|
2979
|
+
that require a fileno().
|
|
2980
|
+
"""
|
|
2981
|
+
raise OSError("SharedMemoryFile does not have a real fileno()")
|
|
2982
|
+
|
|
2983
|
+
def isatty(self):
|
|
2984
|
+
return False
|
|
2985
|
+
|
|
2576
2986
|
# ---------- Main class ----------
|
|
2577
2987
|
class ZlibFile(object):
|
|
2578
2988
|
"""
|
|
@@ -4464,7 +4874,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4464
4874
|
extrastart = extrastart + 1
|
|
4465
4875
|
fvendorfieldslist = []
|
|
4466
4876
|
fvendorfields = 0;
|
|
4467
|
-
if(len(HeaderOut)>extraend):
|
|
4877
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
4468
4878
|
extrastart = extraend
|
|
4469
4879
|
extraend = len(HeaderOut) - 4
|
|
4470
4880
|
while(extrastart < extraend):
|
|
@@ -4684,6 +5094,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4684
5094
|
while(extrastart < extraend):
|
|
4685
5095
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4686
5096
|
extrastart = extrastart + 1
|
|
5097
|
+
fvendorfieldslist = []
|
|
5098
|
+
fvendorfields = 0;
|
|
5099
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
5100
|
+
extrastart = extraend
|
|
5101
|
+
extraend = len(HeaderOut) - 4
|
|
5102
|
+
while(extrastart < extraend):
|
|
5103
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5104
|
+
extrastart = extrastart + 1
|
|
5105
|
+
fvendorfields = fvendorfields + 1
|
|
4687
5106
|
if(fextrafields==1):
|
|
4688
5107
|
try:
|
|
4689
5108
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4693,6 +5112,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4693
5112
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4694
5113
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4695
5114
|
pass
|
|
5115
|
+
fjstart = fp.tell()
|
|
4696
5116
|
if(fjsontype=="json"):
|
|
4697
5117
|
fjsoncontent = {}
|
|
4698
5118
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
@@ -4759,6 +5179,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4759
5179
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4760
5180
|
pass
|
|
4761
5181
|
fp.seek(len(delimiter), 1)
|
|
5182
|
+
fjend = fp.tell() - 1
|
|
4762
5183
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4763
5184
|
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4764
5185
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
@@ -4791,6 +5212,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4791
5212
|
pyhascontents = False
|
|
4792
5213
|
fcontents.seek(0, 0)
|
|
4793
5214
|
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
5215
|
+
fcontents.seek(0, 0)
|
|
4794
5216
|
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4795
5217
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4796
5218
|
fname + " at offset " + str(fcontentstart))
|
|
@@ -4830,8 +5252,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4830
5252
|
fcontents.seek(0, 0)
|
|
4831
5253
|
if(not contentasfile):
|
|
4832
5254
|
fcontents = fcontents.read()
|
|
4833
|
-
outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
4834
|
-
finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent,
|
|
5255
|
+
outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
5256
|
+
fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
|
|
4835
5257
|
return outlist
|
|
4836
5258
|
|
|
4837
5259
|
|
|
@@ -4848,6 +5270,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4848
5270
|
CatSizeEnd = CatSize
|
|
4849
5271
|
fp.seek(curloc, 0)
|
|
4850
5272
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5273
|
+
headeroffset = fp.tell()
|
|
4851
5274
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4852
5275
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4853
5276
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4867,7 +5290,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4867
5290
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4868
5291
|
if(not headercheck and not skipchecksum):
|
|
4869
5292
|
VerbosePrintOut(
|
|
4870
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5293
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
4871
5294
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4872
5295
|
"'" + newfcs + "'")
|
|
4873
5296
|
return False
|
|
@@ -4919,6 +5342,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4919
5342
|
CatSizeEnd = CatSize
|
|
4920
5343
|
fp.seek(curloc, 0)
|
|
4921
5344
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5345
|
+
headeroffset = fp.tell()
|
|
4922
5346
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4923
5347
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4924
5348
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4951,7 +5375,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4951
5375
|
pass
|
|
4952
5376
|
fvendorfieldslist = []
|
|
4953
5377
|
fvendorfields = 0;
|
|
4954
|
-
if(len(inheader)>extraend):
|
|
5378
|
+
if((len(inheader) - 2)>extraend):
|
|
4955
5379
|
extrastart = extraend
|
|
4956
5380
|
extraend = len(inheader) - 2
|
|
4957
5381
|
while(extrastart < extraend):
|
|
@@ -4961,8 +5385,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4961
5385
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4962
5386
|
fheadsize = int(inheader[0], 16)
|
|
4963
5387
|
fnumfields = int(inheader[1], 16)
|
|
4964
|
-
fheadctime = int(inheader[
|
|
4965
|
-
fheadmtime = int(inheader[
|
|
5388
|
+
fheadctime = int(inheader[2], 16)
|
|
5389
|
+
fheadmtime = int(inheader[3], 16)
|
|
4966
5390
|
fhencoding = inheader[4]
|
|
4967
5391
|
fostype = inheader[5]
|
|
4968
5392
|
fpythontype = inheader[6]
|
|
@@ -5071,7 +5495,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5071
5495
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5072
5496
|
if(not headercheck and not skipchecksum):
|
|
5073
5497
|
VerbosePrintOut(
|
|
5074
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5498
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5075
5499
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5076
5500
|
"'" + newfcs + "'")
|
|
5077
5501
|
return False
|
|
@@ -5181,6 +5605,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5181
5605
|
CatSizeEnd = CatSize
|
|
5182
5606
|
fp.seek(curloc, 0)
|
|
5183
5607
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5608
|
+
headeroffset = fp.tell()
|
|
5184
5609
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
5185
5610
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
5186
5611
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -5208,22 +5633,101 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5208
5633
|
fnumextrafields = len(fextrafieldslist)
|
|
5209
5634
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5210
5635
|
try:
|
|
5211
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
5636
|
+
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
5637
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5638
|
+
pass
|
|
5639
|
+
fvendorfieldslist = []
|
|
5640
|
+
fvendorfields = 0;
|
|
5641
|
+
if((len(inheader) - 2)>extraend):
|
|
5642
|
+
extrastart = extraend
|
|
5643
|
+
extraend = len(inheader) - 2
|
|
5644
|
+
while(extrastart < extraend):
|
|
5645
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5646
|
+
extrastart = extrastart + 1
|
|
5647
|
+
fvendorfields = fvendorfields + 1
|
|
5648
|
+
formversion = re.findall("([\\d]+)", formstring)
|
|
5649
|
+
fheadsize = int(inheader[0], 16)
|
|
5650
|
+
fnumfields = int(inheader[1], 16)
|
|
5651
|
+
fheadctime = int(inheader[2], 16)
|
|
5652
|
+
fheadmtime = int(inheader[3], 16)
|
|
5653
|
+
fhencoding = inheader[4]
|
|
5654
|
+
fostype = inheader[5]
|
|
5655
|
+
fpythontype = inheader[6]
|
|
5656
|
+
fprojectname = inheader[7]
|
|
5657
|
+
fnumfiles = int(inheader[8], 16)
|
|
5658
|
+
fseeknextfile = inheader[9]
|
|
5659
|
+
fjsontype = inheader[10]
|
|
5660
|
+
fjsonlen = int(inheader[11], 16)
|
|
5661
|
+
fjsonsize = int(inheader[12], 16)
|
|
5662
|
+
fjsonchecksumtype = inheader[13]
|
|
5663
|
+
fjsonchecksum = inheader[14]
|
|
5664
|
+
fjsoncontent = {}
|
|
5665
|
+
fjstart = fp.tell()
|
|
5666
|
+
if(fjsontype=="json"):
|
|
5667
|
+
fjsoncontent = {}
|
|
5668
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5669
|
+
if(fjsonsize > 0):
|
|
5670
|
+
try:
|
|
5671
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5672
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
5673
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5674
|
+
try:
|
|
5675
|
+
fjsonrawcontent = fprejsoncontent
|
|
5676
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
5677
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5678
|
+
fprejsoncontent = ""
|
|
5679
|
+
fjsonrawcontent = fprejsoncontent
|
|
5680
|
+
fjsoncontent = {}
|
|
5681
|
+
else:
|
|
5682
|
+
fprejsoncontent = ""
|
|
5683
|
+
fjsonrawcontent = fprejsoncontent
|
|
5684
|
+
fjsoncontent = {}
|
|
5685
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
5686
|
+
fjsoncontent = {}
|
|
5687
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5688
|
+
if (fjsonsize > 0):
|
|
5689
|
+
try:
|
|
5690
|
+
# try base64 → utf-8 → YAML
|
|
5691
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5692
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5693
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
5694
|
+
try:
|
|
5695
|
+
# fall back to treating the bytes as plain text YAML
|
|
5696
|
+
fjsonrawcontent = fprejsoncontent
|
|
5697
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5698
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
5699
|
+
# final fallback: empty
|
|
5700
|
+
fprejsoncontent = ""
|
|
5701
|
+
fjsonrawcontent = fprejsoncontent
|
|
5702
|
+
fjsoncontent = {}
|
|
5703
|
+
else:
|
|
5704
|
+
fprejsoncontent = ""
|
|
5705
|
+
fjsonrawcontent = fprejsoncontent
|
|
5706
|
+
fjsoncontent = {}
|
|
5707
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
5708
|
+
fjsoncontent = {}
|
|
5709
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5710
|
+
fprejsoncontent = ""
|
|
5711
|
+
fjsonrawcontent = fprejsoncontent
|
|
5712
|
+
elif(fjsontype=="list"):
|
|
5713
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5714
|
+
flisttmp = MkTempFile()
|
|
5715
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
5716
|
+
flisttmp.seek(0)
|
|
5717
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
5718
|
+
flisttmp.close()
|
|
5719
|
+
fjsonrawcontent = fjsoncontent
|
|
5720
|
+
if(fjsonlen==1):
|
|
5721
|
+
try:
|
|
5722
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
5723
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
5724
|
+
fjsonlen = len(fjsoncontent)
|
|
5212
5725
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5213
|
-
|
|
5214
|
-
|
|
5215
|
-
|
|
5216
|
-
|
|
5217
|
-
|
|
5218
|
-
fseeknextfile = inheaderdata[9]
|
|
5219
|
-
fjsontype = int(inheader[10], 16)
|
|
5220
|
-
fjsonlen = int(inheader[11], 16)
|
|
5221
|
-
fjsonsize = int(inheader[12], 16)
|
|
5222
|
-
fjsonchecksumtype = inheader[13]
|
|
5223
|
-
fjsonchecksum = inheader[14]
|
|
5224
|
-
fjsoncontent = {}
|
|
5225
|
-
fjstart = fp.tell()
|
|
5226
|
-
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5726
|
+
try:
|
|
5727
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
5728
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
5729
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5730
|
+
pass
|
|
5227
5731
|
fjend = fp.tell()
|
|
5228
5732
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5229
5733
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5254,7 +5758,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5254
5758
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5255
5759
|
if(not headercheck and not skipchecksum):
|
|
5256
5760
|
VerbosePrintOut(
|
|
5257
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5761
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5258
5762
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5259
5763
|
"'" + newfcs + "'")
|
|
5260
5764
|
return False
|
|
@@ -5758,7 +6262,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5758
6262
|
else:
|
|
5759
6263
|
fctime = format(int(to_ns(time.time())), 'x').lower()
|
|
5760
6264
|
# Serialize the first group
|
|
5761
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
6265
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5762
6266
|
# Append tmpoutlist
|
|
5763
6267
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5764
6268
|
# Append extradata items if any
|
|
@@ -5984,22 +6488,33 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5984
6488
|
pass
|
|
5985
6489
|
return fp
|
|
5986
6490
|
|
|
5987
|
-
def
|
|
5988
|
-
if(not hasattr(fp, "write")):
|
|
5989
|
-
return False
|
|
6491
|
+
def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
5990
6492
|
advancedlist = __use_advanced_list__
|
|
5991
6493
|
altinode = __use_alt_inode__
|
|
5992
6494
|
infilelist = []
|
|
5993
|
-
if(infiles == "-"):
|
|
6495
|
+
if(not dirlistfromtxt and not isinstance(infiles, (list, tuple, )) and infiles == "-"):
|
|
5994
6496
|
for line in PY_STDIN_TEXT:
|
|
5995
6497
|
infilelist.append(line.strip())
|
|
5996
6498
|
infilelist = list(filter(None, infilelist))
|
|
5997
|
-
|
|
5998
|
-
|
|
5999
|
-
|
|
6000
|
-
|
|
6001
|
-
|
|
6002
|
-
|
|
6499
|
+
if(not dirlistfromtxt and isinstance(infiles, (list, tuple, )) and len(infiles)==1 and infiles[0] == "-"):
|
|
6500
|
+
for line in PY_STDIN_TEXT:
|
|
6501
|
+
infilelist.append(line.strip())
|
|
6502
|
+
infilelist = list(filter(None, infilelist))
|
|
6503
|
+
elif(dirlistfromtxt):
|
|
6504
|
+
if(not isinstance(infiles, (list, tuple, ))):
|
|
6505
|
+
infiles = [infiles]
|
|
6506
|
+
if(isinstance(infiles, (list, tuple, ))):
|
|
6507
|
+
for fileloc in infiles:
|
|
6508
|
+
if(fileloc == "-"):
|
|
6509
|
+
for line in PY_STDIN_TEXT:
|
|
6510
|
+
infilelist.append(line.strip())
|
|
6511
|
+
else:
|
|
6512
|
+
if(not os.path.exists(fileloc) or not os.path.isfile(fileloc)):
|
|
6513
|
+
return False
|
|
6514
|
+
else:
|
|
6515
|
+
with UncompressFile(fileloc, formatspecs, "r") as finfile:
|
|
6516
|
+
for line in finfile:
|
|
6517
|
+
infilelist.append(line.strip())
|
|
6003
6518
|
infilelist = list(filter(None, infilelist))
|
|
6004
6519
|
else:
|
|
6005
6520
|
if(isinstance(infiles, (list, tuple, ))):
|
|
@@ -6028,16 +6543,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6028
6543
|
inodetofile = {}
|
|
6029
6544
|
filetoinode = {}
|
|
6030
6545
|
inodetoforminode = {}
|
|
6031
|
-
numfiles = int(len(GetDirList))
|
|
6032
|
-
fnumfiles = format(numfiles, 'x').lower()
|
|
6033
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6034
|
-
try:
|
|
6035
|
-
fp.flush()
|
|
6036
|
-
if(hasattr(os, "sync")):
|
|
6037
|
-
os.fsync(fp.fileno())
|
|
6038
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6039
|
-
pass
|
|
6040
6546
|
FullSizeFilesAlt = 0
|
|
6547
|
+
tmpoutlist = []
|
|
6041
6548
|
for curfname in GetDirList:
|
|
6042
6549
|
fencoding = "UTF-8"
|
|
6043
6550
|
if(re.findall("^[.|/]", curfname)):
|
|
@@ -6212,7 +6719,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6212
6719
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6213
6720
|
fcontents.seek(0, 0)
|
|
6214
6721
|
if(typechecktest is not False):
|
|
6215
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6722
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6216
6723
|
fcontents.seek(0, 0)
|
|
6217
6724
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6218
6725
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6261,7 +6768,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6261
6768
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6262
6769
|
fcontents.seek(0, 0)
|
|
6263
6770
|
if(typechecktest is not False):
|
|
6264
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6771
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6265
6772
|
fcontents.seek(0, 0)
|
|
6266
6773
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6267
6774
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6305,10 +6812,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6305
6812
|
if(fcompression == "none"):
|
|
6306
6813
|
fcompression = ""
|
|
6307
6814
|
fcontents.seek(0, 0)
|
|
6815
|
+
if(not contentasfile):
|
|
6816
|
+
fcontents = fcontents.read()
|
|
6308
6817
|
ftypehex = format(ftype, 'x').lower()
|
|
6309
|
-
tmpoutlist
|
|
6310
|
-
|
|
6311
|
-
|
|
6818
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6819
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
6820
|
+
return tmpoutlist
|
|
6821
|
+
|
|
6822
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6823
|
+
GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
6824
|
+
if(not hasattr(fp, "write")):
|
|
6825
|
+
return False
|
|
6826
|
+
numfiles = int(len(GetDirList))
|
|
6827
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
6828
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6829
|
+
try:
|
|
6830
|
+
fp.flush()
|
|
6831
|
+
if(hasattr(os, "sync")):
|
|
6832
|
+
os.fsync(fp.fileno())
|
|
6833
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6834
|
+
pass
|
|
6835
|
+
for curfname in GetDirList:
|
|
6836
|
+
tmpoutlist = curfname['fheaders']
|
|
6837
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6312
6838
|
try:
|
|
6313
6839
|
fp.flush()
|
|
6314
6840
|
if(hasattr(os, "sync")):
|
|
@@ -6317,16 +6843,14 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6317
6843
|
pass
|
|
6318
6844
|
return fp
|
|
6319
6845
|
|
|
6320
|
-
def
|
|
6321
|
-
if(not hasattr(fp, "write")):
|
|
6322
|
-
return False
|
|
6846
|
+
def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6323
6847
|
curinode = 0
|
|
6324
6848
|
curfid = 0
|
|
6325
6849
|
inodelist = []
|
|
6326
6850
|
inodetofile = {}
|
|
6327
6851
|
filetoinode = {}
|
|
6328
6852
|
inodetoforminode = {}
|
|
6329
|
-
if(infile == "-"):
|
|
6853
|
+
if(not isinstance(infile, (list, tuple, )) and infile == "-"):
|
|
6330
6854
|
infile = MkTempFile()
|
|
6331
6855
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6332
6856
|
infile.seek(0, 0)
|
|
@@ -6361,10 +6885,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6361
6885
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6362
6886
|
formatspecs = formatspecs[compresscheck]
|
|
6363
6887
|
if(compresscheck=="zstd"):
|
|
6364
|
-
if '
|
|
6365
|
-
infile = ZstdFile(
|
|
6366
|
-
elif 'pyzstd' in sys.modules:
|
|
6367
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6888
|
+
if 'zstd' in compressionsupport:
|
|
6889
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6368
6890
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6369
6891
|
else:
|
|
6370
6892
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -6373,23 +6895,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6373
6895
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6374
6896
|
formatspecs = formatspecs[compresscheck]
|
|
6375
6897
|
if(compresscheck=="zstd"):
|
|
6376
|
-
if '
|
|
6377
|
-
infile = ZstdFile(
|
|
6378
|
-
elif 'pyzstd' in sys.modules:
|
|
6379
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6898
|
+
if 'zstd' in compressionsupport:
|
|
6899
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6380
6900
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6381
6901
|
else:
|
|
6382
6902
|
tarfp = tarfile.open(infile, "r")
|
|
6383
6903
|
except FileNotFoundError:
|
|
6384
6904
|
return False
|
|
6385
|
-
|
|
6386
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6387
|
-
try:
|
|
6388
|
-
fp.flush()
|
|
6389
|
-
if(hasattr(os, "sync")):
|
|
6390
|
-
os.fsync(fp.fileno())
|
|
6391
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6392
|
-
pass
|
|
6905
|
+
tmpoutlist = []
|
|
6393
6906
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6394
6907
|
fencoding = "UTF-8"
|
|
6395
6908
|
if(re.findall("^[.|/]", member.name)):
|
|
@@ -6402,16 +6915,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6402
6915
|
ffullmode = member.mode
|
|
6403
6916
|
flinkcount = 0
|
|
6404
6917
|
fblksize = 0
|
|
6405
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6406
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6407
6918
|
fblocks = 0
|
|
6408
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6409
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6410
6919
|
fflags = 0
|
|
6411
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6412
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6413
6920
|
ftype = 0
|
|
6414
|
-
if(member.isreg()):
|
|
6921
|
+
if(member.isreg() or member.isfile()):
|
|
6415
6922
|
ffullmode = member.mode + stat.S_IFREG
|
|
6416
6923
|
ftype = 0
|
|
6417
6924
|
elif(member.islnk()):
|
|
@@ -6487,7 +6994,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6487
6994
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6488
6995
|
fcontents.seek(0, 0)
|
|
6489
6996
|
if(typechecktest is not False):
|
|
6490
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6997
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6491
6998
|
fcontents.seek(0, 0)
|
|
6492
6999
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6493
7000
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6531,29 +7038,45 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6531
7038
|
if(fcompression == "none"):
|
|
6532
7039
|
fcompression = ""
|
|
6533
7040
|
fcontents.seek(0, 0)
|
|
7041
|
+
if(not contentasfile):
|
|
7042
|
+
fcontents = fcontents.read()
|
|
6534
7043
|
ftypehex = format(ftype, 'x').lower()
|
|
6535
|
-
tmpoutlist
|
|
6536
|
-
|
|
6537
|
-
|
|
7044
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7045
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7046
|
+
return tmpoutlist
|
|
7047
|
+
|
|
7048
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7049
|
+
if(not hasattr(fp, "write")):
|
|
7050
|
+
return False
|
|
7051
|
+
GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7052
|
+
numfiles = int(len(GetDirList))
|
|
7053
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7054
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7055
|
+
try:
|
|
7056
|
+
fp.flush()
|
|
7057
|
+
if(hasattr(os, "sync")):
|
|
7058
|
+
os.fsync(fp.fileno())
|
|
7059
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7060
|
+
pass
|
|
7061
|
+
for curfname in GetDirList:
|
|
7062
|
+
tmpoutlist = curfname['fheaders']
|
|
7063
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6538
7064
|
try:
|
|
6539
7065
|
fp.flush()
|
|
6540
7066
|
if(hasattr(os, "sync")):
|
|
6541
7067
|
os.fsync(fp.fileno())
|
|
6542
7068
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6543
7069
|
pass
|
|
6544
|
-
fcontents.close()
|
|
6545
7070
|
return fp
|
|
6546
7071
|
|
|
6547
|
-
def
|
|
6548
|
-
if(not hasattr(fp, "write")):
|
|
6549
|
-
return False
|
|
7072
|
+
def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6550
7073
|
curinode = 0
|
|
6551
7074
|
curfid = 0
|
|
6552
7075
|
inodelist = []
|
|
6553
7076
|
inodetofile = {}
|
|
6554
7077
|
filetoinode = {}
|
|
6555
7078
|
inodetoforminode = {}
|
|
6556
|
-
if(infile == "-"):
|
|
7079
|
+
if(not isinstance(infile, (list, tuple, )) and infile == "-"):
|
|
6557
7080
|
infile = MkTempFile()
|
|
6558
7081
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6559
7082
|
infile.seek(0, 0)
|
|
@@ -6579,14 +7102,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6579
7102
|
ziptest = zipfp.testzip()
|
|
6580
7103
|
if(ziptest):
|
|
6581
7104
|
VerbosePrintOut("Bad file found!")
|
|
6582
|
-
|
|
6583
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6584
|
-
try:
|
|
6585
|
-
fp.flush()
|
|
6586
|
-
if(hasattr(os, "sync")):
|
|
6587
|
-
os.fsync(fp.fileno())
|
|
6588
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6589
|
-
pass
|
|
7105
|
+
tmpoutlist = []
|
|
6590
7106
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6591
7107
|
fencoding = "UTF-8"
|
|
6592
7108
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -6602,20 +7118,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6602
7118
|
fpremode = int(stat.S_IFREG | 0x1b6)
|
|
6603
7119
|
flinkcount = 0
|
|
6604
7120
|
fblksize = 0
|
|
6605
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6606
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6607
7121
|
fblocks = 0
|
|
6608
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6609
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6610
7122
|
fflags = 0
|
|
6611
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6612
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6613
7123
|
ftype = 0
|
|
6614
7124
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
6615
7125
|
ftype = 5
|
|
7126
|
+
elif ((hasattr(member, "symlink") and member.symlink())):
|
|
7127
|
+
ftype = 2
|
|
6616
7128
|
else:
|
|
6617
7129
|
ftype = 0
|
|
6618
7130
|
flinkname = ""
|
|
7131
|
+
if(ftype==2):
|
|
7132
|
+
flinkname = zipfp.read(member.filename).decode("UTF-8")
|
|
6619
7133
|
fcurfid = format(int(curfid), 'x').lower()
|
|
6620
7134
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6621
7135
|
curfid = curfid + 1
|
|
@@ -6641,6 +7155,10 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6641
7155
|
fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
|
|
6642
7156
|
fchmode = stat.S_IMODE(int(stat.S_IFDIR | 0x1ff))
|
|
6643
7157
|
ftypemod = stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))
|
|
7158
|
+
elif ((hasattr(member, "symlink") and member.symlink()) or member.filename.endswith('/')):
|
|
7159
|
+
fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
|
|
7160
|
+
fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
|
|
7161
|
+
ftypemod = stat.S_IFMT(int(stat.S_IFREG | 0x1b6))
|
|
6644
7162
|
else:
|
|
6645
7163
|
fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
|
|
6646
7164
|
fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
|
|
@@ -6649,6 +7167,17 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6649
7167
|
fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
|
|
6650
7168
|
fmode = format(int((zipinfo.external_attr >> 16) & 0xFFFF), 'x').lower()
|
|
6651
7169
|
prefmode = int((zipinfo.external_attr >> 16) & 0xFFFF)
|
|
7170
|
+
if(prefmode==0):
|
|
7171
|
+
fmode = 0
|
|
7172
|
+
prefmode = 0
|
|
7173
|
+
else:
|
|
7174
|
+
file_type = prefmode & 0xF000
|
|
7175
|
+
if(file_type not in (stat.S_IFREG, stat.S_IFDIR, stat.S_IFLNK)):
|
|
7176
|
+
fmode = 0
|
|
7177
|
+
prefmode = 0
|
|
7178
|
+
if((mode & 0x1FF) == 0):
|
|
7179
|
+
fmode = 0
|
|
7180
|
+
prefmode = 0
|
|
6652
7181
|
if (prefmode == 0):
|
|
6653
7182
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
6654
7183
|
fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
|
|
@@ -6749,26 +7278,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6749
7278
|
if(fcompression == "none"):
|
|
6750
7279
|
fcompression = ""
|
|
6751
7280
|
fcontents.seek(0, 0)
|
|
7281
|
+
if(not contentasfile):
|
|
7282
|
+
fcontents = fcontents.read()
|
|
6752
7283
|
ftypehex = format(ftype, 'x').lower()
|
|
6753
|
-
tmpoutlist
|
|
6754
|
-
|
|
6755
|
-
|
|
7284
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7285
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7286
|
+
return tmpoutlist
|
|
7287
|
+
|
|
7288
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7289
|
+
if(not hasattr(fp, "write")):
|
|
7290
|
+
return False
|
|
7291
|
+
GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7292
|
+
numfiles = int(len(GetDirList))
|
|
7293
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7294
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7295
|
+
try:
|
|
7296
|
+
fp.flush()
|
|
7297
|
+
if(hasattr(os, "sync")):
|
|
7298
|
+
os.fsync(fp.fileno())
|
|
7299
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7300
|
+
pass
|
|
7301
|
+
for curfname in GetDirList:
|
|
7302
|
+
tmpoutlist = curfname['fheaders']
|
|
7303
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6756
7304
|
try:
|
|
6757
7305
|
fp.flush()
|
|
6758
7306
|
if(hasattr(os, "sync")):
|
|
6759
7307
|
os.fsync(fp.fileno())
|
|
6760
7308
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6761
7309
|
pass
|
|
6762
|
-
fcontents.close()
|
|
6763
7310
|
return fp
|
|
6764
7311
|
|
|
6765
7312
|
if(not rarfile_support):
|
|
6766
|
-
def
|
|
7313
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6767
7314
|
return False
|
|
6768
|
-
else:
|
|
6769
7315
|
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6770
|
-
|
|
6771
|
-
|
|
7316
|
+
return False
|
|
7317
|
+
else:
|
|
7318
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6772
7319
|
curinode = 0
|
|
6773
7320
|
curfid = 0
|
|
6774
7321
|
inodelist = []
|
|
@@ -6783,20 +7330,7 @@ else:
|
|
|
6783
7330
|
rartest = rarfp.testrar()
|
|
6784
7331
|
if(rartest):
|
|
6785
7332
|
VerbosePrintOut("Bad file found!")
|
|
6786
|
-
|
|
6787
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6788
|
-
try:
|
|
6789
|
-
fp.flush()
|
|
6790
|
-
if(hasattr(os, "sync")):
|
|
6791
|
-
os.fsync(fp.fileno())
|
|
6792
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6793
|
-
pass
|
|
6794
|
-
try:
|
|
6795
|
-
fp.flush()
|
|
6796
|
-
if(hasattr(os, "sync")):
|
|
6797
|
-
os.fsync(fp.fileno())
|
|
6798
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6799
|
-
pass
|
|
7333
|
+
tmpoutlist = []
|
|
6800
7334
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
6801
7335
|
is_unix = False
|
|
6802
7336
|
is_windows = False
|
|
@@ -6841,14 +7375,8 @@ else:
|
|
|
6841
7375
|
fcsize = format(int(0), 'x').lower()
|
|
6842
7376
|
flinkcount = 0
|
|
6843
7377
|
fblksize = 0
|
|
6844
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6845
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6846
7378
|
fblocks = 0
|
|
6847
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6848
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6849
7379
|
fflags = 0
|
|
6850
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6851
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6852
7380
|
ftype = 0
|
|
6853
7381
|
if(member.is_file()):
|
|
6854
7382
|
ftype = 0
|
|
@@ -6986,26 +7514,84 @@ else:
|
|
|
6986
7514
|
if(fcompression == "none"):
|
|
6987
7515
|
fcompression = ""
|
|
6988
7516
|
fcontents.seek(0, 0)
|
|
7517
|
+
if(not contentasfile):
|
|
7518
|
+
fcontents = fcontents.read()
|
|
6989
7519
|
ftypehex = format(ftype, 'x').lower()
|
|
6990
|
-
tmpoutlist
|
|
6991
|
-
|
|
6992
|
-
|
|
7520
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7521
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7522
|
+
return tmpoutlist
|
|
7523
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7524
|
+
if(not hasattr(fp, "write")):
|
|
7525
|
+
return False
|
|
7526
|
+
GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7527
|
+
numfiles = int(len(GetDirList))
|
|
7528
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7529
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7530
|
+
try:
|
|
7531
|
+
fp.flush()
|
|
7532
|
+
if(hasattr(os, "sync")):
|
|
7533
|
+
os.fsync(fp.fileno())
|
|
7534
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7535
|
+
pass
|
|
7536
|
+
for curfname in GetDirList:
|
|
7537
|
+
tmpoutlist = curfname['fheaders']
|
|
7538
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6993
7539
|
try:
|
|
6994
7540
|
fp.flush()
|
|
6995
7541
|
if(hasattr(os, "sync")):
|
|
6996
7542
|
os.fsync(fp.fileno())
|
|
6997
7543
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6998
7544
|
pass
|
|
6999
|
-
fcontents.close()
|
|
7000
7545
|
return fp
|
|
7001
7546
|
|
|
7002
7547
|
if(not py7zr_support):
|
|
7003
|
-
def
|
|
7548
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7004
7549
|
return False
|
|
7005
7550
|
else:
|
|
7551
|
+
class _MemoryIO(py7zr.Py7zIO):
|
|
7552
|
+
"""In-memory file object used by py7zr's factory API."""
|
|
7553
|
+
def __init__(self):
|
|
7554
|
+
self._buf = bytearray()
|
|
7555
|
+
def write(self, data):
|
|
7556
|
+
# py7zr will call this repeatedly with chunks
|
|
7557
|
+
self._buf.extend(data)
|
|
7558
|
+
def read(self, size=None):
|
|
7559
|
+
if size is None:
|
|
7560
|
+
return bytes(self._buf)
|
|
7561
|
+
return bytes(self._buf[:size])
|
|
7562
|
+
def seek(self, offset, whence=0):
|
|
7563
|
+
# we don't really need seeking for your use case
|
|
7564
|
+
return 0
|
|
7565
|
+
def flush(self):
|
|
7566
|
+
pass
|
|
7567
|
+
def size(self):
|
|
7568
|
+
return len(self._buf)
|
|
7569
|
+
class _MemoryFactory(py7zr.WriterFactory):
|
|
7570
|
+
"""Factory that creates _MemoryIO objects and keeps them by filename."""
|
|
7571
|
+
def __init__(self):
|
|
7572
|
+
self.files = {}
|
|
7573
|
+
def create(self, filename: str) -> py7zr.Py7zIO:
|
|
7574
|
+
io_obj = _MemoryIO()
|
|
7575
|
+
self.files[filename] = io_obj
|
|
7576
|
+
return io_obj
|
|
7577
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7578
|
+
"""
|
|
7579
|
+
Replacement for SevenZipFile.readall() using the new py7zr API.
|
|
7580
|
+
|
|
7581
|
+
Returns: dict[filename -> _MemoryIO]
|
|
7582
|
+
"""
|
|
7583
|
+
factory = _MemoryFactory()
|
|
7584
|
+
with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
|
|
7585
|
+
archive.extractall(factory=factory)
|
|
7586
|
+
return factory.files
|
|
7587
|
+
|
|
7588
|
+
if(not py7zr_support):
|
|
7589
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7590
|
+
return False
|
|
7006
7591
|
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7007
|
-
|
|
7008
|
-
|
|
7592
|
+
return False
|
|
7593
|
+
else:
|
|
7594
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7009
7595
|
formver = formatspecs['format_ver']
|
|
7010
7596
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
7011
7597
|
curinode = 0
|
|
@@ -7017,19 +7603,15 @@ else:
|
|
|
7017
7603
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
7018
7604
|
return False
|
|
7019
7605
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
7020
|
-
|
|
7606
|
+
try:
|
|
7607
|
+
file_content = szpfp.readall()
|
|
7608
|
+
except AttributeError:
|
|
7609
|
+
file_content = sevenzip_readall(infile)
|
|
7021
7610
|
#sztest = szpfp.testzip()
|
|
7022
7611
|
sztestalt = szpfp.test()
|
|
7023
7612
|
if(sztestalt):
|
|
7024
7613
|
VerbosePrintOut("Bad file found!")
|
|
7025
|
-
|
|
7026
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7027
|
-
try:
|
|
7028
|
-
fp.flush()
|
|
7029
|
-
if(hasattr(os, "sync")):
|
|
7030
|
-
os.fsync(fp.fileno())
|
|
7031
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7032
|
-
pass
|
|
7614
|
+
tmpoutlist = []
|
|
7033
7615
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
7034
7616
|
fencoding = "UTF-8"
|
|
7035
7617
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -7042,19 +7624,16 @@ else:
|
|
|
7042
7624
|
fpremode = int(stat.S_IFREG | 0x1b6)
|
|
7043
7625
|
elif(member.is_directory):
|
|
7044
7626
|
fpremode = int(stat.S_IFDIR | 0x1ff)
|
|
7045
|
-
|
|
7627
|
+
try:
|
|
7628
|
+
fwinattributes = format(int(member.attributes & 0xFFFF), 'x').lower()
|
|
7629
|
+
except AttributeError:
|
|
7630
|
+
fwinattributes = format(int(0), 'x').lower()
|
|
7046
7631
|
fcompression = ""
|
|
7047
7632
|
fcsize = format(int(0), 'x').lower()
|
|
7048
7633
|
flinkcount = 0
|
|
7049
7634
|
fblksize = 0
|
|
7050
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
7051
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
7052
7635
|
fblocks = 0
|
|
7053
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
7054
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
7055
7636
|
fflags = 0
|
|
7056
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
7057
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
7058
7637
|
ftype = 0
|
|
7059
7638
|
if(member.is_directory):
|
|
7060
7639
|
ftype = 5
|
|
@@ -7084,6 +7663,13 @@ else:
|
|
|
7084
7663
|
int(stat.S_IMODE(int(stat.S_IFREG | 0x1b6))), 'x').lower()
|
|
7085
7664
|
ftypemod = format(
|
|
7086
7665
|
int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
|
|
7666
|
+
try:
|
|
7667
|
+
ffullmode = member.posix_mode
|
|
7668
|
+
fmode = format(int(ffullmode), 'x').lower()
|
|
7669
|
+
fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
|
|
7670
|
+
ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
|
|
7671
|
+
except AttributeError:
|
|
7672
|
+
pass
|
|
7087
7673
|
try:
|
|
7088
7674
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7089
7675
|
except (KeyError, AttributeError):
|
|
@@ -7121,7 +7707,10 @@ else:
|
|
|
7121
7707
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
7122
7708
|
fcontents.seek(0, 0)
|
|
7123
7709
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7124
|
-
|
|
7710
|
+
try:
|
|
7711
|
+
file_content[member.filename].close()
|
|
7712
|
+
except AttributeError:
|
|
7713
|
+
pass
|
|
7125
7714
|
if(typechecktest is False and not compresswholefile):
|
|
7126
7715
|
fcontents.seek(0, 2)
|
|
7127
7716
|
ucfsize = fcontents.tell()
|
|
@@ -7163,17 +7752,34 @@ else:
|
|
|
7163
7752
|
if(fcompression == "none"):
|
|
7164
7753
|
fcompression = ""
|
|
7165
7754
|
fcontents.seek(0, 0)
|
|
7755
|
+
if(not contentasfile):
|
|
7756
|
+
fcontents = fcontents.read()
|
|
7166
7757
|
ftypehex = format(ftype, 'x').lower()
|
|
7167
|
-
tmpoutlist
|
|
7168
|
-
|
|
7169
|
-
|
|
7758
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7759
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7760
|
+
return tmpoutlist
|
|
7761
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7762
|
+
if(not hasattr(fp, "write")):
|
|
7763
|
+
return False
|
|
7764
|
+
GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7765
|
+
numfiles = int(len(GetDirList))
|
|
7766
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7767
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7768
|
+
try:
|
|
7769
|
+
fp.flush()
|
|
7770
|
+
if(hasattr(os, "sync")):
|
|
7771
|
+
os.fsync(fp.fileno())
|
|
7772
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7773
|
+
pass
|
|
7774
|
+
for curfname in GetDirList:
|
|
7775
|
+
tmpoutlist = curfname['fheaders']
|
|
7776
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
7170
7777
|
try:
|
|
7171
7778
|
fp.flush()
|
|
7172
7779
|
if(hasattr(os, "sync")):
|
|
7173
7780
|
os.fsync(fp.fileno())
|
|
7174
7781
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7175
7782
|
pass
|
|
7176
|
-
fcontents.close()
|
|
7177
7783
|
return fp
|
|
7178
7784
|
|
|
7179
7785
|
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
@@ -8443,10 +9049,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
|
|
|
8443
9049
|
elif(compresscheck == "lz4" and compresscheck in compressionsupport):
|
|
8444
9050
|
fp = lz4.frame.open(infile, "rb")
|
|
8445
9051
|
elif(compresscheck == "zstd" and compresscheck in compressionsupport):
|
|
8446
|
-
if '
|
|
8447
|
-
fp = ZstdFile(infile, mode="rb")
|
|
8448
|
-
elif 'pyzstd' in sys.modules:
|
|
8449
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
|
|
9052
|
+
if 'zstd' in compressionsupport:
|
|
9053
|
+
fp = zstd.ZstdFile(infile, mode="rb")
|
|
8450
9054
|
else:
|
|
8451
9055
|
return Flase
|
|
8452
9056
|
elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
|
|
@@ -8563,10 +9167,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
|
|
|
8563
9167
|
elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
|
|
8564
9168
|
wrapped = lzma.LZMAFile(src)
|
|
8565
9169
|
elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
|
|
8566
|
-
if '
|
|
8567
|
-
wrapped = ZstdFile(
|
|
8568
|
-
elif 'pyzstd' in sys.modules:
|
|
8569
|
-
wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
|
|
9170
|
+
if 'zstd' in compressionsupport:
|
|
9171
|
+
wrapped = zstd.ZstdFile(src, mode="rb")
|
|
8570
9172
|
else:
|
|
8571
9173
|
return False
|
|
8572
9174
|
elif kind == "lz4" and "lz4" in compressionsupport:
|
|
@@ -8634,10 +9236,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
|
|
|
8634
9236
|
elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
|
|
8635
9237
|
fp = bz2.open(infile, mode)
|
|
8636
9238
|
elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
|
|
8637
|
-
if '
|
|
8638
|
-
fp = ZstdFile(infile, mode=mode)
|
|
8639
|
-
elif 'pyzstd' in sys.modules:
|
|
8640
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
|
|
9239
|
+
if 'zstd' in compressionsupport:
|
|
9240
|
+
fp = zstd.ZstdFile(infile, mode=mode)
|
|
8641
9241
|
else:
|
|
8642
9242
|
return False
|
|
8643
9243
|
elif (compresscheck == "lz4" and "lz4" in compressionsupport):
|
|
@@ -9406,10 +10006,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
|
|
|
9406
10006
|
outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
|
|
9407
10007
|
|
|
9408
10008
|
elif (fextname == ".zst" and "zstandard" in compressionsupport):
|
|
9409
|
-
if '
|
|
9410
|
-
outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9411
|
-
elif 'pyzstd' in sys.modules:
|
|
9412
|
-
outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
10009
|
+
if 'zstd' in compressionsupport:
|
|
10010
|
+
outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9413
10011
|
else:
|
|
9414
10012
|
return False # fix: 'Flase' -> False
|
|
9415
10013
|
|
|
@@ -9637,7 +10235,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_
|
|
|
9637
10235
|
if(not fp):
|
|
9638
10236
|
return False
|
|
9639
10237
|
fp.seek(filestart, 0)
|
|
9640
|
-
elif(infile == "-"):
|
|
10238
|
+
elif(not isinstance(infile, (list, tuple, )) and infile == "-"):
|
|
9641
10239
|
fp = MkTempFile()
|
|
9642
10240
|
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
9643
10241
|
fp.seek(filestart, 0)
|
|
@@ -9726,6 +10324,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_
|
|
|
9726
10324
|
formatspecs = formatspecs[compresschecking]
|
|
9727
10325
|
fp.seek(filestart, 0)
|
|
9728
10326
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
10327
|
+
headeroffset = fp.tell()
|
|
9729
10328
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
9730
10329
|
formdelsize = len(formatspecs['format_delimiter'])
|
|
9731
10330
|
formdel = fp.read(formdelsize).decode("UTF-8")
|
|
@@ -9733,7 +10332,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_
|
|
|
9733
10332
|
return False
|
|
9734
10333
|
if(formdel != formatspecs['format_delimiter']):
|
|
9735
10334
|
return False
|
|
9736
|
-
headeroffset = fp.tell()
|
|
9737
10335
|
if(__use_new_style__):
|
|
9738
10336
|
inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9739
10337
|
else:
|
|
@@ -11013,8 +11611,8 @@ def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipc
|
|
|
11013
11611
|
return listarrayfiles
|
|
11014
11612
|
|
|
11015
11613
|
|
|
11016
|
-
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11017
|
-
if(infile == "-"):
|
|
11614
|
+
def TarFileListFiles(infile, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
11615
|
+
if(not isinstance(infile, (list, tuple, )) and infile == "-"):
|
|
11018
11616
|
infile = MkTempFile()
|
|
11019
11617
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11020
11618
|
infile.seek(0, 0)
|
|
@@ -11049,10 +11647,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11049
11647
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
11050
11648
|
formatspecs = formatspecs[compresscheck]
|
|
11051
11649
|
if(compresscheck=="zstd"):
|
|
11052
|
-
if '
|
|
11053
|
-
infile = ZstdFile(
|
|
11054
|
-
elif 'pyzstd' in sys.modules:
|
|
11055
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11650
|
+
if 'zstd' in compressionsupport:
|
|
11651
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
11056
11652
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
11057
11653
|
else:
|
|
11058
11654
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -11061,10 +11657,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11061
11657
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
11062
11658
|
formatspecs = formatspecs[compresscheck]
|
|
11063
11659
|
if(compresscheck=="zstd"):
|
|
11064
|
-
if '
|
|
11065
|
-
infile = ZstdFile(
|
|
11066
|
-
elif 'pyzstd' in sys.modules:
|
|
11067
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11660
|
+
if 'zstd' in compressionsupport:
|
|
11661
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
11068
11662
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
11069
11663
|
else:
|
|
11070
11664
|
tarfp = tarfile.open(infile, "r")
|
|
@@ -11078,7 +11672,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11078
11672
|
ffullmode = member.mode
|
|
11079
11673
|
flinkcount = 0
|
|
11080
11674
|
ftype = 0
|
|
11081
|
-
if(member.isreg()):
|
|
11675
|
+
if(member.isreg() or member.isfile()):
|
|
11082
11676
|
ffullmode = member.mode + stat.S_IFREG
|
|
11083
11677
|
ftype = 0
|
|
11084
11678
|
elif(member.islnk()):
|
|
@@ -11133,8 +11727,12 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11133
11727
|
return True
|
|
11134
11728
|
|
|
11135
11729
|
|
|
11730
|
+
def TarFileListFile(infile, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
11731
|
+
return TarFileListFiles(infile, formatspecs, verbose, returnfp)
|
|
11732
|
+
|
|
11733
|
+
|
|
11136
11734
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11137
|
-
if(infile == "-"):
|
|
11735
|
+
if(not isinstance(infile, (list, tuple, )) and infile == "-"):
|
|
11138
11736
|
infile = MkTempFile()
|
|
11139
11737
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11140
11738
|
infile.seek(0, 0)
|
|
@@ -11165,35 +11763,59 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11165
11763
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
11166
11764
|
zipinfo = zipfp.getinfo(member.filename)
|
|
11167
11765
|
if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
|
|
11168
|
-
fwinattributes = int(zipinfo.external_attr)
|
|
11766
|
+
fwinattributes = int(zipinfo.external_attr & 0xFFFF)
|
|
11169
11767
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
11170
11768
|
fmode = int(stat.S_IFDIR | 0x1ff)
|
|
11171
|
-
fchmode =
|
|
11172
|
-
ftypemod =
|
|
11769
|
+
fchmode = stat.S_IMODE(int(stat.S_IFDIR | 0x1ff))
|
|
11770
|
+
ftypemod = stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))
|
|
11771
|
+
elif ((hasattr(member, "symlink") and member.symlink()) or member.filename.endswith('/')):
|
|
11772
|
+
fmode = int(stat.S_IFREG | 0x1b6)
|
|
11773
|
+
fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
|
|
11774
|
+
ftypemod = stat.S_IFMT(int(stat.S_IFREG | 0x1b6))
|
|
11173
11775
|
else:
|
|
11174
11776
|
fmode = int(stat.S_IFREG | 0x1b6)
|
|
11175
|
-
fchmode = int(stat.
|
|
11176
|
-
ftypemod = int(stat.
|
|
11777
|
+
fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
|
|
11778
|
+
ftypemod = stat.S_IFMT(int(stat.S_IFREG | 0x1b6))
|
|
11177
11779
|
elif(zipinfo.create_system == 3):
|
|
11178
|
-
fwinattributes = int(
|
|
11179
|
-
|
|
11180
|
-
|
|
11181
|
-
|
|
11182
|
-
|
|
11183
|
-
|
|
11184
|
-
|
|
11185
|
-
|
|
11186
|
-
|
|
11780
|
+
fwinattributes = int(zipinfo.external_attr & 0xFFFF)
|
|
11781
|
+
fmode = int((zipinfo.external_attr >> 16) & 0xFFFF)
|
|
11782
|
+
prefmode = int((zipinfo.external_attr >> 16) & 0xFFFF)
|
|
11783
|
+
if(prefmode==0):
|
|
11784
|
+
fmode = 0
|
|
11785
|
+
prefmode = 0
|
|
11786
|
+
else:
|
|
11787
|
+
file_type = prefmode & 0xF000
|
|
11788
|
+
if(file_type not in (stat.S_IFREG, stat.S_IFDIR, stat.S_IFLNK)):
|
|
11789
|
+
fmode = 0
|
|
11790
|
+
prefmode = 0
|
|
11791
|
+
if((mode & 0x1FF) == 0):
|
|
11792
|
+
fmode = 0
|
|
11793
|
+
prefmode = 0
|
|
11794
|
+
if (prefmode == 0):
|
|
11795
|
+
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
11796
|
+
fmode = int(stat.S_IFDIR | 0x1ff)
|
|
11797
|
+
prefmode = int(stat.S_IFDIR | 0x1ff)
|
|
11798
|
+
fchmode = stat.S_IMODE(prefmode)
|
|
11799
|
+
ftypemod = stat.S_IFMT(prefmode)
|
|
11800
|
+
else:
|
|
11801
|
+
fmode = int(stat.S_IFREG | 0x1b6)
|
|
11802
|
+
prefmode = int(stat.S_IFREG | 0x1b6)
|
|
11803
|
+
fchmode = stat.S_IMODE(prefmode)
|
|
11804
|
+
ftypemod = stat.S_IFMT(prefmode)
|
|
11805
|
+
fchmode = stat.S_IMODE(prefmode)
|
|
11806
|
+
ftypemod = stat.S_IFMT(prefmode)
|
|
11187
11807
|
else:
|
|
11188
|
-
fwinattributes = int(
|
|
11808
|
+
fwinattributes = int(zipinfo.external_attr & 0xFFFF)
|
|
11189
11809
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
11190
11810
|
fmode = int(stat.S_IFDIR | 0x1ff)
|
|
11191
|
-
|
|
11192
|
-
|
|
11811
|
+
prefmode = int(stat.S_IFDIR | 0x1ff)
|
|
11812
|
+
fchmode = stat.S_IMODE(prefmode)
|
|
11813
|
+
ftypemod = stat.S_IFMT(prefmode)
|
|
11193
11814
|
else:
|
|
11194
11815
|
fmode = int(stat.S_IFREG | 0x1b6)
|
|
11195
|
-
|
|
11196
|
-
|
|
11816
|
+
prefmode = int(stat.S_IFREG | 0x1b6)
|
|
11817
|
+
fchmode = stat.S_IMODE(prefmode)
|
|
11818
|
+
ftypemod = stat.S_IFMT(prefmode)
|
|
11197
11819
|
returnval.update({lcfi: member.filename})
|
|
11198
11820
|
if(not verbose):
|
|
11199
11821
|
VerbosePrintOut(member.filename)
|
|
@@ -11207,10 +11829,17 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11207
11829
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
11208
11830
|
ftype = 5
|
|
11209
11831
|
permissionstr = "d" + permissionstr
|
|
11832
|
+
elif ((hasattr(member, "symlink") and member.symlink())):
|
|
11833
|
+
ftype = 2
|
|
11834
|
+
permissionstr = "l" + permissionstr
|
|
11210
11835
|
else:
|
|
11211
11836
|
ftype = 0
|
|
11212
11837
|
permissionstr = "-" + permissionstr
|
|
11213
11838
|
printfname = member.filename
|
|
11839
|
+
if(ftype==2):
|
|
11840
|
+
flinkname = zipfp.read(member.filename).decode("UTF-8")
|
|
11841
|
+
if(ftype==2):
|
|
11842
|
+
printfname = member.filename + " -> " + flinkname
|
|
11214
11843
|
try:
|
|
11215
11844
|
fuid = int(os.getuid())
|
|
11216
11845
|
except (KeyError, AttributeError):
|
|
@@ -11253,6 +11882,10 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11253
11882
|
return True
|
|
11254
11883
|
|
|
11255
11884
|
|
|
11885
|
+
def ZipFileListFile(infile, verbose=False, returnfp=False):
|
|
11886
|
+
return ZipFileListFiles(infile, verbose, returnfp)
|
|
11887
|
+
|
|
11888
|
+
|
|
11256
11889
|
if(not rarfile_support):
|
|
11257
11890
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11258
11891
|
return False
|
|
@@ -11380,6 +12013,11 @@ if(rarfile_support):
|
|
|
11380
12013
|
else:
|
|
11381
12014
|
return True
|
|
11382
12015
|
|
|
12016
|
+
|
|
12017
|
+
def RarFileListFile(infile, verbose=False, returnfp=False):
|
|
12018
|
+
return RarFileListFiles(infile, verbose, returnfp)
|
|
12019
|
+
|
|
12020
|
+
|
|
11383
12021
|
if(not py7zr_support):
|
|
11384
12022
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11385
12023
|
return False
|
|
@@ -11391,7 +12029,10 @@ if(py7zr_support):
|
|
|
11391
12029
|
lcfi = 0
|
|
11392
12030
|
returnval = {}
|
|
11393
12031
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
11394
|
-
|
|
12032
|
+
try:
|
|
12033
|
+
file_content = szpfp.readall()
|
|
12034
|
+
except AttributeError:
|
|
12035
|
+
file_content = sevenzip_readall(infile)
|
|
11395
12036
|
#sztest = szpfp.testzip()
|
|
11396
12037
|
sztestalt = szpfp.test()
|
|
11397
12038
|
if(sztestalt):
|
|
@@ -11414,6 +12055,13 @@ if(py7zr_support):
|
|
|
11414
12055
|
fmode = int(stat.S_IFLNK | 0x1b6)
|
|
11415
12056
|
fchmode = int(stat.S_IMODE(int(stat.S_IFLNK | 0x1b6)))
|
|
11416
12057
|
ftypemod = int(stat.S_IFMT(int(stat.S_IFLNK | 0x1b6)))
|
|
12058
|
+
try:
|
|
12059
|
+
ffullmode = member.posix_mode
|
|
12060
|
+
fmode = format(int(ffullmode), 'x').lower()
|
|
12061
|
+
fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
|
|
12062
|
+
ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
|
|
12063
|
+
except AttributeError:
|
|
12064
|
+
pass
|
|
11417
12065
|
returnval.update({lcfi: member.filename})
|
|
11418
12066
|
if(not verbose):
|
|
11419
12067
|
VerbosePrintOut(member.filename)
|
|
@@ -11435,7 +12083,10 @@ if(py7zr_support):
|
|
|
11435
12083
|
printfname = member.filename
|
|
11436
12084
|
if(ftype == 0):
|
|
11437
12085
|
fsize = len(file_content[member.filename].read())
|
|
11438
|
-
|
|
12086
|
+
try:
|
|
12087
|
+
file_content[member.filename].close()
|
|
12088
|
+
except AttributeError:
|
|
12089
|
+
pass
|
|
11439
12090
|
try:
|
|
11440
12091
|
fuid = int(os.getuid())
|
|
11441
12092
|
except (KeyError, AttributeError):
|
|
@@ -11478,12 +12129,16 @@ if(py7zr_support):
|
|
|
11478
12129
|
return True
|
|
11479
12130
|
|
|
11480
12131
|
|
|
12132
|
+
def SevenZipFileListFile(infile, verbose=False, returnfp=False):
|
|
12133
|
+
return SevenZipFileListFiles(infile, verbose, returnfp)
|
|
12134
|
+
|
|
12135
|
+
|
|
11481
12136
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11482
12137
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11483
12138
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11484
12139
|
formatspecs = formatspecs[checkcompressfile]
|
|
11485
12140
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
11486
|
-
return TarFileListFiles(infile, verbose, returnfp)
|
|
12141
|
+
return TarFileListFiles(infile, formatspecs, verbose, returnfp)
|
|
11487
12142
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
11488
12143
|
return ZipFileListFiles(infile, verbose, returnfp)
|
|
11489
12144
|
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
@@ -11497,6 +12152,10 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
|
|
|
11497
12152
|
return False
|
|
11498
12153
|
|
|
11499
12154
|
|
|
12155
|
+
def InFileListFile(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
12156
|
+
return InFileListFiles(infile, verbose, formatspecs, seektoend, newstyle, returnfp)
|
|
12157
|
+
|
|
12158
|
+
|
|
11500
12159
|
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11501
12160
|
outarray = MkTempFile()
|
|
11502
12161
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
@@ -11505,6 +12164,11 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11505
12164
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
11506
12165
|
return listarrayfiles
|
|
11507
12166
|
|
|
12167
|
+
|
|
12168
|
+
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
12169
|
+
return ListDirListFiles(infiles, dirlistfromtxt, compression, compresswholefile, compressionlevel, followlink, seekstart, seekend, skipchecksum, checksumtype, formatspecs, seektoend, verbose, returnfp)
|
|
12170
|
+
|
|
12171
|
+
|
|
11508
12172
|
def detect_cwd(ftp, file_dir):
|
|
11509
12173
|
"""
|
|
11510
12174
|
Test whether cwd into file_dir works. Returns True if it does,
|
|
@@ -13570,7 +14234,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
|
|
|
13570
14234
|
if not ah or not ah.strip().lower().startswith("basic "):
|
|
13571
14235
|
return False
|
|
13572
14236
|
try:
|
|
13573
|
-
import base64
|
|
13574
14237
|
b64 = ah.strip().split(" ", 1)[1]
|
|
13575
14238
|
raw = base64.b64decode(_to_bytes(b64))
|
|
13576
14239
|
try: raw_txt = raw.decode("utf-8")
|