PyCatFile 0.26.0__py3-none-any.whl → 0.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pycatfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pycatfile.py - Last Update: 11/12/2025 Ver. 0.26.0 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pycatfile.py - Last Update: 11/14/2025 Ver. 0.27.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -24,7 +24,6 @@ import re
24
24
  import sys
25
25
  import time
26
26
  import stat
27
- import zlib
28
27
  import mmap
29
28
  import hmac
30
29
  import base64
@@ -38,8 +37,8 @@ import zipfile
38
37
  import binascii
39
38
  import datetime
40
39
  import platform
40
+ import collections
41
41
  from io import StringIO, BytesIO
42
- from collections import namedtuple
43
42
  import posixpath # POSIX-safe joins/normpaths
44
43
  try:
45
44
  from backports import tempfile
@@ -50,12 +49,16 @@ try:
50
49
  from http.server import BaseHTTPRequestHandler, HTTPServer
51
50
  from socketserver import TCPServer
52
51
  from urllib.parse import urlparse, parse_qs
53
- import base64
54
52
  except ImportError:
55
53
  from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
56
54
  from SocketServer import TCPServer
57
55
  from urlparse import urlparse, parse_qs
58
- import base64
56
+
57
+ try:
58
+ # Python 3.8+ only
59
+ from multiprocessing import shared_memory
60
+ except ImportError:
61
+ shared_memory = None
59
62
 
60
63
  # FTP Support
61
64
  ftpssl = True
@@ -146,6 +149,15 @@ try:
146
149
  except Exception:
147
150
  PATH_TYPES = (basestring,)
148
151
 
152
+ def running_interactively():
153
+ main = sys.modules.get("__main__")
154
+ no_main_file = not hasattr(main, "__file__")
155
+ interactive_flag = bool(getattr(sys.flags, "interactive", 0))
156
+ return no_main_file or interactive_flag
157
+
158
+ if running_interactively():
159
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
160
+
149
161
  def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
150
162
  """
151
163
  Normalize any input to text_type (unicode on Py2, str on Py3).
@@ -166,7 +178,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
166
178
 
167
179
  # Handle pathlib.Path & other path-like objects
168
180
  try:
169
- import os
170
181
  if hasattr(os, "fspath"):
171
182
  fs = os.fspath(s)
172
183
  if isinstance(fs, text_type):
@@ -207,7 +218,6 @@ except ImportError:
207
218
 
208
219
  # Windows-specific setup
209
220
  if os.name == "nt":
210
- import io
211
221
  def _wrap(stream):
212
222
  buf = getattr(stream, "buffer", None)
213
223
  is_tty = getattr(stream, "isatty", lambda: False)()
@@ -444,7 +454,13 @@ if('PYCATFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYCATFIL
444
454
  else:
445
455
  prescriptpath = get_importing_script_path()
446
456
  if(prescriptpath is not None):
447
- scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
457
+ if(__use_ini_file__ and not __use_json_file__):
458
+ scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
459
+ elif(__use_json_file__ and not __use_ini_file__):
460
+ scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
461
+ else:
462
+ scriptconf = ""
463
+ prescriptpath = None
448
464
  else:
449
465
  scriptconf = ""
450
466
  if os.path.exists(scriptconf):
@@ -647,12 +663,12 @@ __project__ = __program_name__
647
663
  __program_alt_name__ = __program_name__
648
664
  __project_url__ = "https://github.com/GameMaker2k/PyCatFile"
649
665
  __project_release_url__ = __project_url__+"/releases/latest"
650
- __version_info__ = (0, 26, 0, "RC 1", 1)
651
- __version_date_info__ = (2025, 11, 12, "RC 1", 1)
666
+ __version_info__ = (0, 27, 0, "RC 1", 1)
667
+ __version_date_info__ = (2025, 11, 14, "RC 1", 1)
652
668
  __version_date__ = str(__version_date_info__[0]) + "." + str(
653
669
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
654
670
  __revision__ = __version_info__[3]
655
- __revision_id__ = "$Id: 1f4434bfb0e0cb5e732daced1add124d7b880a31 $"
671
+ __revision_id__ = "$Id: a0f8681f37b2a5e4682ca33c86dc2fe3ec56a903 $"
656
672
  if(__version_info__[4] is not None):
657
673
  __version_date_plusrc__ = __version_date__ + \
658
674
  "-" + str(__version_date_info__[4])
@@ -804,9 +820,9 @@ except Exception:
804
820
  geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
805
821
  proname=__project__, prover=__version__, prourl=__project_url__)
806
822
  if(platform.python_implementation() != ""):
807
- py_implementation = platform.python_implementation()
823
+ py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
808
824
  if(platform.python_implementation() == ""):
809
- py_implementation = "CPython"
825
+ py_implementation = "CPython"+str(platform.python_version_tuple()[0])
810
826
  geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
811
827
  )+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
812
828
  geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
@@ -822,13 +838,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
822
838
 
823
839
  compressionsupport = []
824
840
  try:
825
- import gzip
841
+ try:
842
+ import compression.gzip as gzip
843
+ except ImportError:
844
+ import gzip
826
845
  compressionsupport.append("gz")
827
846
  compressionsupport.append("gzip")
828
847
  except ImportError:
829
848
  pass
830
849
  try:
831
- import bz2
850
+ try:
851
+ import compression.bz2 as bz2
852
+ except ImportError:
853
+ import bz2
832
854
  compressionsupport.append("bz2")
833
855
  compressionsupport.append("bzip2")
834
856
  except ImportError:
@@ -849,20 +871,20 @@ except ImportError:
849
871
  pass
850
872
  '''
851
873
  try:
852
- import zstandard
874
+ try:
875
+ import compression.zstd as zstd
876
+ except ImportError:
877
+ import pyzstd.zstdfile as zstd
853
878
  compressionsupport.append("zst")
854
879
  compressionsupport.append("zstd")
855
880
  compressionsupport.append("zstandard")
856
881
  except ImportError:
882
+ pass
883
+ try:
857
884
  try:
858
- import pyzstd.zstdfile
859
- compressionsupport.append("zst")
860
- compressionsupport.append("zstd")
861
- compressionsupport.append("zstandard")
885
+ import compression.lzma as lzma
862
886
  except ImportError:
863
- pass
864
- try:
865
- import lzma
887
+ import lzma
866
888
  compressionsupport.append("lzma")
867
889
  compressionsupport.append("xz")
868
890
  except ImportError:
@@ -872,12 +894,18 @@ except ImportError:
872
894
  compressionsupport.append("xz")
873
895
  except ImportError:
874
896
  pass
875
- compressionsupport.append("zlib")
876
- compressionsupport.append("zl")
877
- compressionsupport.append("zz")
878
- compressionsupport.append("Z")
879
- compressionsupport.append("z")
880
-
897
+ try:
898
+ try:
899
+ import compression.zlib as zlib
900
+ except ImportError:
901
+ import zlib
902
+ compressionsupport.append("zlib")
903
+ compressionsupport.append("zl")
904
+ compressionsupport.append("zz")
905
+ compressionsupport.append("Z")
906
+ compressionsupport.append("z")
907
+ except ImportError:
908
+ pass
881
909
  compressionlist = ['auto']
882
910
  compressionlistalt = []
883
911
  outextlist = []
@@ -1051,6 +1079,14 @@ def to_ns(timestamp):
1051
1079
  # Multiply by 1e9 to get nanoseconds, then cast to int
1052
1080
  return int(seconds * 1000000000)
1053
1081
 
1082
+ def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
1083
+ ts_ns = int(ts_ns)
1084
+ sec, ns = divmod(ts_ns, 10**9)
1085
+ dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
1086
+ base = dt.strftime(fmt)
1087
+ ns_str = "%09d" % ns
1088
+ return base + "." + ns_str
1089
+
1054
1090
  def _split_posix(name):
1055
1091
  """
1056
1092
  Return a list of path parts without collapsing '..'.
@@ -2087,7 +2123,7 @@ def MkTempFile(data=None,
2087
2123
  spool_max=__spoolfile_size__,
2088
2124
  spool_dir=__use_spooldir__,
2089
2125
  reset_to_start=True,
2090
- memfd_name=None,
2126
+ memfd_name=__program_name__,
2091
2127
  memfd_allow_sealing=False,
2092
2128
  memfd_flags_extra=0,
2093
2129
  on_create=None):
@@ -2579,6 +2615,384 @@ def _is_valid_zlib_header(cmf, flg):
2579
2615
  return False
2580
2616
  return True
2581
2617
 
2618
+ class SharedMemoryFile(object):
2619
+ """
2620
+ File-like wrapper around multiprocessing.shared_memory.SharedMemory.
2621
+
2622
+ Binary-only API, intended to behave similarly to a regular file opened in
2623
+ 'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
2624
+
2625
+ Notes:
2626
+ - Requires Python 3.8+ at runtime to actually use SharedMemory.
2627
+ - On Python 2, importing is fine but constructing will raise RuntimeError.
2628
+ - There is no automatic resizing; buffer size is fixed by SharedMemory.
2629
+ - No real fileno(); this does not represent an OS-level file descriptor.
2630
+ - For text mode, wrap this with io.TextIOWrapper on Python 3:
2631
+ f = SharedMemoryFile(...)
2632
+ tf = io.TextIOWrapper(f, encoding="utf-8")
2633
+ """
2634
+
2635
+ def __init__(self, shm=None, name=None, create=False, size=0,
2636
+ mode='r+b', offset=0, unlink_on_close=False):
2637
+ """
2638
+ Parameters:
2639
+ shm : existing SharedMemory object (preferred).
2640
+ name : name of shared memory block (for attach or create).
2641
+ create: if True, create new SharedMemory; else attach existing.
2642
+ size : size in bytes (required when create=True).
2643
+ mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
2644
+ offset: starting offset within the shared memory buffer.
2645
+ unlink_on_close: if True, call shm.unlink() when close() is called.
2646
+
2647
+ Usage examples:
2648
+
2649
+ # Create new block and file-like wrapper
2650
+ f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
2651
+
2652
+ # Attach to existing shared memory by name
2653
+ f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
2654
+
2655
+ # Wrap an existing SharedMemory object
2656
+ shm = shared_memory.SharedMemory(create=True, size=1024)
2657
+ f = SharedMemoryFile(shm=shm, mode='r+b')
2658
+ """
2659
+ if shared_memory is None:
2660
+ # No SharedMemory available on this interpreter
2661
+ raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
2662
+ "is not available on this Python version")
2663
+
2664
+ if 't' in mode:
2665
+ raise ValueError("SharedMemoryFile is binary-only; "
2666
+ "wrap it with io.TextIOWrapper for text")
2667
+
2668
+ self.mode = mode
2669
+ self._closed = False
2670
+ self._unlinked = False
2671
+ self._unlink_on_close = bool(unlink_on_close)
2672
+
2673
+ if shm is not None:
2674
+ self._shm = shm
2675
+ else:
2676
+ # name may be None when create=True
2677
+ self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
2678
+
2679
+ self._buf = self._shm.buf
2680
+ self._base_offset = int(offset)
2681
+ if self._base_offset < 0 or self._base_offset > len(self._buf):
2682
+ raise ValueError("offset out of range")
2683
+
2684
+ # We treat the accessible region as [base_offset, len(buf))
2685
+ self._size = len(self._buf) - self._base_offset
2686
+ self._pos = 0 # logical file position within that region
2687
+
2688
+ # ---------- basic properties ----------
2689
+
2690
+ @property
2691
+ def name(self):
2692
+ # SharedMemory name (may be None for anonymous)
2693
+ return getattr(self._shm, "name", None)
2694
+
2695
+ @property
2696
+ def closed(self):
2697
+ return self._closed
2698
+
2699
+ def readable(self):
2700
+ return ('r' in self.mode) or ('+' in self.mode)
2701
+
2702
+ def writable(self):
2703
+ return any(ch in self.mode for ch in ('w', 'a', '+'))
2704
+
2705
+ def seekable(self):
2706
+ return True
2707
+
2708
+ # ---------- core helpers ----------
2709
+
2710
+ def _check_closed(self):
2711
+ if self._closed:
2712
+ raise ValueError("I/O operation on closed SharedMemoryFile")
2713
+
2714
+ def _clamp_pos(self, pos):
2715
+ if pos < 0:
2716
+ return 0
2717
+ if pos > self._size:
2718
+ return self._size
2719
+ return pos
2720
+
2721
+ def _region_bounds(self):
2722
+ """Return (start, end) absolute indices into the SharedMemory buffer."""
2723
+ start = self._base_offset + self._pos
2724
+ end = self._base_offset + self._size
2725
+ return start, end
2726
+
2727
+ # ---------- positioning ----------
2728
+
2729
+ def seek(self, offset, whence=0):
2730
+ """
2731
+ Seek to a new file position.
2732
+
2733
+ whence: 0 = from start, 1 = from current, 2 = from end.
2734
+ """
2735
+ self._check_closed()
2736
+ offset = int(offset)
2737
+ whence = int(whence)
2738
+
2739
+ if whence == 0: # from start
2740
+ new_pos = offset
2741
+ elif whence == 1: # from current
2742
+ new_pos = self._pos + offset
2743
+ elif whence == 2: # from end
2744
+ new_pos = self._size + offset
2745
+ else:
2746
+ raise ValueError("invalid whence (expected 0, 1, or 2)")
2747
+
2748
+ self._pos = self._clamp_pos(new_pos)
2749
+ return self._pos
2750
+
2751
+ def tell(self):
2752
+ return self._pos
2753
+
2754
+ # ---------- reading ----------
2755
+
2756
+ def read(self, size=-1):
2757
+ """
2758
+ Read up to 'size' bytes (or to EOF if size<0 or None).
2759
+ Returns bytes (py3) or str (py2).
2760
+ """
2761
+ self._check_closed()
2762
+ if not self.readable():
2763
+ raise IOError("SharedMemoryFile not opened for reading")
2764
+
2765
+ if size is None or size < 0:
2766
+ size = self._size - self._pos
2767
+ else:
2768
+ size = int(size)
2769
+ if size < 0:
2770
+ size = 0
2771
+
2772
+ if size == 0:
2773
+ return b'' if not PY2 else ''
2774
+
2775
+ start, end_abs = self._region_bounds()
2776
+ available = end_abs - (self._base_offset + self._pos)
2777
+ if available <= 0:
2778
+ return b'' if not PY2 else ''
2779
+
2780
+ size = min(size, available)
2781
+
2782
+ abs_start = self._base_offset + self._pos
2783
+ abs_end = abs_start + size
2784
+
2785
+ chunk = self._buf[abs_start:abs_end]
2786
+ if PY2:
2787
+ data = bytes(chunk) # bytes() -> str in py2
2788
+ else:
2789
+ data = bytes(chunk)
2790
+
2791
+ self._pos += len(data)
2792
+ return data
2793
+
2794
+ def readline(self, size=-1):
2795
+ """
2796
+ Read a single line (ending with '\\n' or EOF).
2797
+ If size >= 0, at most that many bytes are returned.
2798
+ """
2799
+ self._check_closed()
2800
+ if not self.readable():
2801
+ raise IOError("SharedMemoryFile not opened for reading")
2802
+
2803
+ # Determine maximum bytes we can scan
2804
+ start, end_abs = self._region_bounds()
2805
+ remaining = end_abs - (self._base_offset + self._pos)
2806
+ if remaining <= 0:
2807
+ return b'' if not PY2 else ''
2808
+
2809
+ if size is not None and size >= 0:
2810
+ size = int(size)
2811
+ max_len = min(size, remaining)
2812
+ else:
2813
+ max_len = remaining
2814
+
2815
+ abs_start = self._base_offset + self._pos
2816
+ abs_max = abs_start + max_len
2817
+
2818
+ # Work on a local bytes slice for easy .find()
2819
+ if PY2:
2820
+ buf_bytes = bytes(self._buf[abs_start:abs_max])
2821
+ else:
2822
+ buf_bytes = bytes(self._buf[abs_start:abs_max])
2823
+
2824
+ idx = buf_bytes.find(b'\n')
2825
+ if idx == -1:
2826
+ # No newline; read entire chunk
2827
+ line_bytes = buf_bytes
2828
+ else:
2829
+ line_bytes = buf_bytes[:idx + 1]
2830
+
2831
+ self._pos += len(line_bytes)
2832
+
2833
+ if PY2:
2834
+ return line_bytes # already str
2835
+ return line_bytes
2836
+
2837
+ def readinto(self, b):
2838
+ """
2839
+ Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
2840
+ Returns number of bytes read.
2841
+ """
2842
+ self._check_closed()
2843
+ if not self.readable():
2844
+ raise IOError("SharedMemoryFile not opened for reading")
2845
+
2846
+ # Normalize target buffer
2847
+ if isinstance(b, memoryview):
2848
+ mv = b
2849
+ else:
2850
+ mv = memoryview(b)
2851
+
2852
+ size = len(mv)
2853
+ if size <= 0:
2854
+ return 0
2855
+
2856
+ start, end_abs = self._region_bounds()
2857
+ remaining = end_abs - (self._base_offset + self._pos)
2858
+ if remaining <= 0:
2859
+ return 0
2860
+
2861
+ size = min(size, remaining)
2862
+
2863
+ abs_start = self._base_offset + self._pos
2864
+ abs_end = abs_start + size
2865
+
2866
+ mv[:size] = self._buf[abs_start:abs_end]
2867
+ self._pos += size
2868
+ return size
2869
+
2870
+ # ---------- writing ----------
2871
+
2872
+ def write(self, data):
2873
+ """
2874
+ Write bytes-like object to the shared memory region.
2875
+
2876
+ Returns number of bytes written. Will raise if not opened writable
2877
+ or if writing would overflow the fixed-size region.
2878
+ """
2879
+ self._check_closed()
2880
+ if not self.writable():
2881
+ raise IOError("SharedMemoryFile not opened for writing")
2882
+
2883
+ if isinstance(data, memoryview):
2884
+ data = bytes(data)
2885
+ elif isinstance(data, bytearray):
2886
+ data = bytes(data)
2887
+
2888
+ if not isinstance(data, binary_types):
2889
+ raise TypeError("write() expects a bytes-like object")
2890
+
2891
+ data_len = len(data)
2892
+ if data_len == 0:
2893
+ return 0
2894
+
2895
+ # Handle "append" semantics roughly: start from end on first write
2896
+ if 'a' in self.mode and self._pos == 0:
2897
+ # Move to logical end of region
2898
+ self._pos = self._size
2899
+
2900
+ start, end_abs = self._region_bounds()
2901
+ remaining = end_abs - (self._base_offset + self._pos)
2902
+ if data_len > remaining:
2903
+ raise IOError("write would overflow SharedMemory region (need %d, have %d)"
2904
+ % (data_len, remaining))
2905
+
2906
+ abs_start = self._base_offset + self._pos
2907
+ abs_end = abs_start + data_len
2908
+
2909
+ self._buf[abs_start:abs_end] = data
2910
+ self._pos += data_len
2911
+ return data_len
2912
+
2913
+ def flush(self):
2914
+ """
2915
+ No-op for shared memory; provided for file-like compatibility.
2916
+ """
2917
+ self._check_closed()
2918
+ # nothing to flush
2919
+
2920
+ # ---------- unlink / close / context manager ----------
2921
+
2922
+ def unlink(self):
2923
+ """
2924
+ Unlink (destroy) the underlying shared memory block.
2925
+
2926
+ After unlink(), new processes cannot attach via name.
2927
+ Existing attachments (including this one) can continue to use
2928
+ the memory until they close() it.
2929
+
2930
+ This is idempotent: calling it more than once is safe.
2931
+ """
2932
+ if self._unlinked:
2933
+ return
2934
+
2935
+ try:
2936
+ self._shm.unlink()
2937
+ except AttributeError:
2938
+ # Should not happen on normal Python 3.8+,
2939
+ # but keep a clear error if it does.
2940
+ raise RuntimeError("Underlying SharedMemory object "
2941
+ "does not support unlink()")
2942
+
2943
+ self._unlinked = True
2944
+
2945
+ def close(self):
2946
+ if self._closed:
2947
+ return
2948
+ self._closed = True
2949
+
2950
+ # Optionally unlink on close if requested
2951
+ if self._unlink_on_close and not self._unlinked:
2952
+ try:
2953
+ self.unlink()
2954
+ except Exception:
2955
+ # best-effort; close anyway
2956
+ pass
2957
+
2958
+ try:
2959
+ self._shm.close()
2960
+ except Exception:
2961
+ pass
2962
+
2963
+ def __enter__(self):
2964
+ self._check_closed()
2965
+ return self
2966
+
2967
+ def __exit__(self, exc_type, exc_val, exc_tb):
2968
+ self.close()
2969
+
2970
+ # ---------- iteration ----------
2971
+
2972
+ def __iter__(self):
2973
+ return self
2974
+
2975
+ def __next__(self):
2976
+ line = self.readline()
2977
+ if (not line) or len(line) == 0:
2978
+ raise StopIteration
2979
+ return line
2980
+
2981
+ if PY2:
2982
+ next = __next__
2983
+
2984
+ # ---------- misc helpers ----------
2985
+
2986
+ def fileno(self):
2987
+ """
2988
+ There is no real OS-level file descriptor; raise OSError for APIs
2989
+ that require a fileno().
2990
+ """
2991
+ raise OSError("SharedMemoryFile does not have a real fileno()")
2992
+
2993
+ def isatty(self):
2994
+ return False
2995
+
2582
2996
  # ---------- Main class ----------
2583
2997
  class ZlibFile(object):
2584
2998
  """
@@ -4470,7 +4884,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4470
4884
  extrastart = extrastart + 1
4471
4885
  fvendorfieldslist = []
4472
4886
  fvendorfields = 0;
4473
- if(len(HeaderOut)>extraend):
4887
+ if((len(HeaderOut) - 4)>extraend):
4474
4888
  extrastart = extraend
4475
4889
  extraend = len(HeaderOut) - 4
4476
4890
  while(extrastart < extraend):
@@ -4690,6 +5104,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4690
5104
  while(extrastart < extraend):
4691
5105
  fextrafieldslist.append(HeaderOut[extrastart])
4692
5106
  extrastart = extrastart + 1
5107
+ fvendorfieldslist = []
5108
+ fvendorfields = 0;
5109
+ if((len(HeaderOut) - 4)>extraend):
5110
+ extrastart = extraend
5111
+ extraend = len(HeaderOut) - 4
5112
+ while(extrastart < extraend):
5113
+ fvendorfieldslist.append(HeaderOut[extrastart])
5114
+ extrastart = extrastart + 1
5115
+ fvendorfields = fvendorfields + 1
4693
5116
  if(fextrafields==1):
4694
5117
  try:
4695
5118
  fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
@@ -4699,6 +5122,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4699
5122
  fextrafieldslist = json.loads(fextrafieldslist[0])
4700
5123
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4701
5124
  pass
5125
+ fjstart = fp.tell()
4702
5126
  if(fjsontype=="json"):
4703
5127
  fjsoncontent = {}
4704
5128
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
@@ -4765,6 +5189,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4765
5189
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4766
5190
  pass
4767
5191
  fp.seek(len(delimiter), 1)
5192
+ fjend = fp.tell() - 1
4768
5193
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4769
5194
  if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4770
5195
  VerbosePrintOut("File JSON Data Checksum Error with file " +
@@ -4797,6 +5222,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4797
5222
  pyhascontents = False
4798
5223
  fcontents.seek(0, 0)
4799
5224
  newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
5225
+ fcontents.seek(0, 0)
4800
5226
  if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4801
5227
  VerbosePrintOut("File Content Checksum Error with file " +
4802
5228
  fname + " at offset " + str(fcontentstart))
@@ -4836,8 +5262,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4836
5262
  fcontents.seek(0, 0)
4837
5263
  if(not contentasfile):
4838
5264
  fcontents = fcontents.read()
4839
- outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4840
- finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
5265
+ outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
5266
+ fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
4841
5267
  return outlist
4842
5268
 
4843
5269
 
@@ -4854,6 +5280,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4854
5280
  CatSizeEnd = CatSize
4855
5281
  fp.seek(curloc, 0)
4856
5282
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5283
+ headeroffset = fp.tell()
4857
5284
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
4858
5285
  formdelszie = len(formatspecs['format_delimiter'])
4859
5286
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -4873,7 +5300,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4873
5300
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4874
5301
  if(not headercheck and not skipchecksum):
4875
5302
  VerbosePrintOut(
4876
- "File Header Checksum Error with file at offset " + str(0))
5303
+ "File Header Checksum Error with file at offset " + str(headeroffset))
4877
5304
  VerbosePrintOut("'" + fprechecksum + "' != " +
4878
5305
  "'" + newfcs + "'")
4879
5306
  return False
@@ -4925,6 +5352,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4925
5352
  CatSizeEnd = CatSize
4926
5353
  fp.seek(curloc, 0)
4927
5354
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5355
+ headeroffset = fp.tell()
4928
5356
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
4929
5357
  formdelszie = len(formatspecs['format_delimiter'])
4930
5358
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -4957,7 +5385,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4957
5385
  pass
4958
5386
  fvendorfieldslist = []
4959
5387
  fvendorfields = 0;
4960
- if(len(inheader)>extraend):
5388
+ if((len(inheader) - 2)>extraend):
4961
5389
  extrastart = extraend
4962
5390
  extraend = len(inheader) - 2
4963
5391
  while(extrastart < extraend):
@@ -4967,8 +5395,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4967
5395
  formversion = re.findall("([\\d]+)", formstring)
4968
5396
  fheadsize = int(inheader[0], 16)
4969
5397
  fnumfields = int(inheader[1], 16)
4970
- fheadctime = int(inheader[1], 16)
4971
- fheadmtime = int(inheader[1], 16)
5398
+ fheadctime = int(inheader[2], 16)
5399
+ fheadmtime = int(inheader[3], 16)
4972
5400
  fhencoding = inheader[4]
4973
5401
  fostype = inheader[5]
4974
5402
  fpythontype = inheader[6]
@@ -5077,7 +5505,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5077
5505
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
5078
5506
  if(not headercheck and not skipchecksum):
5079
5507
  VerbosePrintOut(
5080
- "File Header Checksum Error with file at offset " + str(0))
5508
+ "File Header Checksum Error with file at offset " + str(headeroffset))
5081
5509
  VerbosePrintOut("'" + fprechecksum + "' != " +
5082
5510
  "'" + newfcs + "'")
5083
5511
  return False
@@ -5187,6 +5615,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5187
5615
  CatSizeEnd = CatSize
5188
5616
  fp.seek(curloc, 0)
5189
5617
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5618
+ headeroffset = fp.tell()
5190
5619
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
5191
5620
  formdelszie = len(formatspecs['format_delimiter'])
5192
5621
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -5217,19 +5646,98 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5217
5646
  fextrafieldslist = json.loads(fextrafieldslist[0])
5218
5647
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5219
5648
  pass
5649
+ fvendorfieldslist = []
5650
+ fvendorfields = 0;
5651
+ if((len(inheader) - 2)>extraend):
5652
+ extrastart = extraend
5653
+ extraend = len(inheader) - 2
5654
+ while(extrastart < extraend):
5655
+ fvendorfieldslist.append(HeaderOut[extrastart])
5656
+ extrastart = extrastart + 1
5657
+ fvendorfields = fvendorfields + 1
5220
5658
  formversion = re.findall("([\\d]+)", formstring)
5221
5659
  fheadsize = int(inheader[0], 16)
5222
5660
  fnumfields = int(inheader[1], 16)
5661
+ fheadctime = int(inheader[2], 16)
5662
+ fheadmtime = int(inheader[3], 16)
5663
+ fhencoding = inheader[4]
5664
+ fostype = inheader[5]
5665
+ fpythontype = inheader[6]
5666
+ fprojectname = inheader[7]
5223
5667
  fnumfiles = int(inheader[8], 16)
5224
- fseeknextfile = inheaderdata[9]
5225
- fjsontype = int(inheader[10], 16)
5668
+ fseeknextfile = inheader[9]
5669
+ fjsontype = inheader[10]
5226
5670
  fjsonlen = int(inheader[11], 16)
5227
5671
  fjsonsize = int(inheader[12], 16)
5228
5672
  fjsonchecksumtype = inheader[13]
5229
5673
  fjsonchecksum = inheader[14]
5230
5674
  fjsoncontent = {}
5231
5675
  fjstart = fp.tell()
5232
- fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5676
+ if(fjsontype=="json"):
5677
+ fjsoncontent = {}
5678
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5679
+ if(fjsonsize > 0):
5680
+ try:
5681
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5682
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
5683
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5684
+ try:
5685
+ fjsonrawcontent = fprejsoncontent
5686
+ fjsoncontent = json.loads(fprejsoncontent)
5687
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5688
+ fprejsoncontent = ""
5689
+ fjsonrawcontent = fprejsoncontent
5690
+ fjsoncontent = {}
5691
+ else:
5692
+ fprejsoncontent = ""
5693
+ fjsonrawcontent = fprejsoncontent
5694
+ fjsoncontent = {}
5695
+ elif(testyaml and fjsontype == "yaml"):
5696
+ fjsoncontent = {}
5697
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5698
+ if (fjsonsize > 0):
5699
+ try:
5700
+ # try base64 → utf-8 → YAML
5701
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5702
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5703
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
5704
+ try:
5705
+ # fall back to treating the bytes as plain text YAML
5706
+ fjsonrawcontent = fprejsoncontent
5707
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5708
+ except (UnicodeDecodeError, yaml.YAMLError):
5709
+ # final fallback: empty
5710
+ fprejsoncontent = ""
5711
+ fjsonrawcontent = fprejsoncontent
5712
+ fjsoncontent = {}
5713
+ else:
5714
+ fprejsoncontent = ""
5715
+ fjsonrawcontent = fprejsoncontent
5716
+ fjsoncontent = {}
5717
+ elif(not testyaml and fjsontype == "yaml"):
5718
+ fjsoncontent = {}
5719
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5720
+ fprejsoncontent = ""
5721
+ fjsonrawcontent = fprejsoncontent
5722
+ elif(fjsontype=="list"):
5723
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5724
+ flisttmp = MkTempFile()
5725
+ flisttmp.write(fprejsoncontent.encode())
5726
+ flisttmp.seek(0)
5727
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
5728
+ flisttmp.close()
5729
+ fjsonrawcontent = fjsoncontent
5730
+ if(fjsonlen==1):
5731
+ try:
5732
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
5733
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
5734
+ fjsonlen = len(fjsoncontent)
5735
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5736
+ try:
5737
+ fjsonrawcontent = fjsoncontent[0]
5738
+ fjsoncontent = json.loads(fjsoncontent[0])
5739
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5740
+ pass
5233
5741
  fjend = fp.tell()
5234
5742
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5235
5743
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -5260,7 +5768,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5260
5768
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
5261
5769
  if(not headercheck and not skipchecksum):
5262
5770
  VerbosePrintOut(
5263
- "File Header Checksum Error with file at offset " + str(0))
5771
+ "File Header Checksum Error with file at offset " + str(headeroffset))
5264
5772
  VerbosePrintOut("'" + fprechecksum + "' != " +
5265
5773
  "'" + newfcs + "'")
5266
5774
  return False
@@ -5764,7 +6272,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
5764
6272
  else:
5765
6273
  fctime = format(int(to_ns(time.time())), 'x').lower()
5766
6274
  # Serialize the first group
5767
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
6275
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
5768
6276
  # Append tmpoutlist
5769
6277
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5770
6278
  # Append extradata items if any
@@ -5990,9 +6498,7 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5990
6498
  pass
5991
6499
  return fp
5992
6500
 
5993
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5994
- if(not hasattr(fp, "write")):
5995
- return False
6501
+ def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5996
6502
  advancedlist = __use_advanced_list__
5997
6503
  altinode = __use_alt_inode__
5998
6504
  infilelist = []
@@ -6034,16 +6540,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6034
6540
  inodetofile = {}
6035
6541
  filetoinode = {}
6036
6542
  inodetoforminode = {}
6037
- numfiles = int(len(GetDirList))
6038
- fnumfiles = format(numfiles, 'x').lower()
6039
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6040
- try:
6041
- fp.flush()
6042
- if(hasattr(os, "sync")):
6043
- os.fsync(fp.fileno())
6044
- except (io.UnsupportedOperation, AttributeError, OSError):
6045
- pass
6046
6543
  FullSizeFilesAlt = 0
6544
+ tmpoutlist = []
6047
6545
  for curfname in GetDirList:
6048
6546
  fencoding = "UTF-8"
6049
6547
  if(re.findall("^[.|/]", curfname)):
@@ -6218,7 +6716,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6218
6716
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6219
6717
  fcontents.seek(0, 0)
6220
6718
  if(typechecktest is not False):
6221
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6719
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6222
6720
  fcontents.seek(0, 0)
6223
6721
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6224
6722
  if(typechecktest is False and not compresswholefile):
@@ -6267,7 +6765,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6267
6765
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6268
6766
  fcontents.seek(0, 0)
6269
6767
  if(typechecktest is not False):
6270
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6768
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6271
6769
  fcontents.seek(0, 0)
6272
6770
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6273
6771
  if(typechecktest is False and not compresswholefile):
@@ -6311,10 +6809,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6311
6809
  if(fcompression == "none"):
6312
6810
  fcompression = ""
6313
6811
  fcontents.seek(0, 0)
6812
+ if(not contentasfile):
6813
+ fcontents = fcontents.read()
6314
6814
  ftypehex = format(ftype, 'x').lower()
6315
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6316
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6317
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6815
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6816
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
6817
+ return tmpoutlist
6818
+
6819
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6820
+ GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
6821
+ if(not hasattr(fp, "write")):
6822
+ return False
6823
+ numfiles = int(len(GetDirList))
6824
+ fnumfiles = format(numfiles, 'x').lower()
6825
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6826
+ try:
6827
+ fp.flush()
6828
+ if(hasattr(os, "sync")):
6829
+ os.fsync(fp.fileno())
6830
+ except (io.UnsupportedOperation, AttributeError, OSError):
6831
+ pass
6832
+ for curfname in GetDirList:
6833
+ tmpoutlist = curfname['fheaders']
6834
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6318
6835
  try:
6319
6836
  fp.flush()
6320
6837
  if(hasattr(os, "sync")):
@@ -6323,9 +6840,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6323
6840
  pass
6324
6841
  return fp
6325
6842
 
6326
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6327
- if(not hasattr(fp, "write")):
6328
- return False
6843
+ def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6329
6844
  curinode = 0
6330
6845
  curfid = 0
6331
6846
  inodelist = []
@@ -6367,10 +6882,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6367
6882
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6368
6883
  formatspecs = formatspecs[compresscheck]
6369
6884
  if(compresscheck=="zstd"):
6370
- if 'zstandard' in sys.modules:
6371
- infile = ZstdFile(fileobj=infile, mode="rb")
6372
- elif 'pyzstd' in sys.modules:
6373
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
6885
+ if 'zstd' in compressionsupport:
6886
+ infile = zstd.ZstdFile(infile, mode="rb")
6374
6887
  tarfp = tarfile.open(fileobj=infile, mode="r")
6375
6888
  else:
6376
6889
  tarfp = tarfile.open(fileobj=infile, mode="r")
@@ -6379,23 +6892,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6379
6892
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6380
6893
  formatspecs = formatspecs[compresscheck]
6381
6894
  if(compresscheck=="zstd"):
6382
- if 'zstandard' in sys.modules:
6383
- infile = ZstdFile(fileobj=infile, mode="rb")
6384
- elif 'pyzstd' in sys.modules:
6385
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
6895
+ if 'zstd' in compressionsupport:
6896
+ infile = zstd.ZstdFile(infile, mode="rb")
6386
6897
  tarfp = tarfile.open(fileobj=infile, mode="r")
6387
6898
  else:
6388
6899
  tarfp = tarfile.open(infile, "r")
6389
6900
  except FileNotFoundError:
6390
6901
  return False
6391
- numfiles = int(len(tarfp.getmembers()))
6392
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6393
- try:
6394
- fp.flush()
6395
- if(hasattr(os, "sync")):
6396
- os.fsync(fp.fileno())
6397
- except (io.UnsupportedOperation, AttributeError, OSError):
6398
- pass
6902
+ tmpoutlist = []
6399
6903
  for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
6400
6904
  fencoding = "UTF-8"
6401
6905
  if(re.findall("^[.|/]", member.name)):
@@ -6408,14 +6912,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6408
6912
  ffullmode = member.mode
6409
6913
  flinkcount = 0
6410
6914
  fblksize = 0
6411
- if(hasattr(fstatinfo, "st_blksize")):
6412
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6413
6915
  fblocks = 0
6414
- if(hasattr(fstatinfo, "st_blocks")):
6415
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6416
6916
  fflags = 0
6417
- if(hasattr(fstatinfo, "st_flags")):
6418
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
6419
6917
  ftype = 0
6420
6918
  if(member.isreg()):
6421
6919
  ffullmode = member.mode + stat.S_IFREG
@@ -6493,7 +6991,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6493
6991
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6494
6992
  fcontents.seek(0, 0)
6495
6993
  if(typechecktest is not False):
6496
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6994
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6497
6995
  fcontents.seek(0, 0)
6498
6996
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6499
6997
  if(typechecktest is False and not compresswholefile):
@@ -6537,22 +7035,38 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6537
7035
  if(fcompression == "none"):
6538
7036
  fcompression = ""
6539
7037
  fcontents.seek(0, 0)
7038
+ if(not contentasfile):
7039
+ fcontents = fcontents.read()
6540
7040
  ftypehex = format(ftype, 'x').lower()
6541
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6542
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6543
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7041
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7042
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7043
+ return tmpoutlist
7044
+
7045
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7046
+ if(not hasattr(fp, "write")):
7047
+ return False
7048
+ GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7049
+ numfiles = int(len(GetDirList))
7050
+ fnumfiles = format(numfiles, 'x').lower()
7051
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7052
+ try:
7053
+ fp.flush()
7054
+ if(hasattr(os, "sync")):
7055
+ os.fsync(fp.fileno())
7056
+ except (io.UnsupportedOperation, AttributeError, OSError):
7057
+ pass
7058
+ for curfname in GetDirList:
7059
+ tmpoutlist = curfname['fheaders']
7060
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6544
7061
  try:
6545
7062
  fp.flush()
6546
7063
  if(hasattr(os, "sync")):
6547
7064
  os.fsync(fp.fileno())
6548
7065
  except (io.UnsupportedOperation, AttributeError, OSError):
6549
7066
  pass
6550
- fcontents.close()
6551
7067
  return fp
6552
7068
 
6553
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6554
- if(not hasattr(fp, "write")):
6555
- return False
7069
+ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6556
7070
  curinode = 0
6557
7071
  curfid = 0
6558
7072
  inodelist = []
@@ -6585,14 +7099,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6585
7099
  ziptest = zipfp.testzip()
6586
7100
  if(ziptest):
6587
7101
  VerbosePrintOut("Bad file found!")
6588
- numfiles = int(len(zipfp.infolist()))
6589
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6590
- try:
6591
- fp.flush()
6592
- if(hasattr(os, "sync")):
6593
- os.fsync(fp.fileno())
6594
- except (io.UnsupportedOperation, AttributeError, OSError):
6595
- pass
7102
+ tmpoutlist = []
6596
7103
  for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
6597
7104
  fencoding = "UTF-8"
6598
7105
  if(re.findall("^[.|/]", member.filename)):
@@ -6608,14 +7115,8 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6608
7115
  fpremode = int(stat.S_IFREG | 0x1b6)
6609
7116
  flinkcount = 0
6610
7117
  fblksize = 0
6611
- if(hasattr(fstatinfo, "st_blksize")):
6612
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6613
7118
  fblocks = 0
6614
- if(hasattr(fstatinfo, "st_blocks")):
6615
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6616
7119
  fflags = 0
6617
- if(hasattr(fstatinfo, "st_flags")):
6618
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
6619
7120
  ftype = 0
6620
7121
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6621
7122
  ftype = 5
@@ -6755,26 +7256,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6755
7256
  if(fcompression == "none"):
6756
7257
  fcompression = ""
6757
7258
  fcontents.seek(0, 0)
7259
+ if(not contentasfile):
7260
+ fcontents = fcontents.read()
6758
7261
  ftypehex = format(ftype, 'x').lower()
6759
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6760
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6761
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7262
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7263
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7264
+ return tmpoutlist
7265
+
7266
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7267
+ if(not hasattr(fp, "write")):
7268
+ return False
7269
+ GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7270
+ numfiles = int(len(GetDirList))
7271
+ fnumfiles = format(numfiles, 'x').lower()
7272
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7273
+ try:
7274
+ fp.flush()
7275
+ if(hasattr(os, "sync")):
7276
+ os.fsync(fp.fileno())
7277
+ except (io.UnsupportedOperation, AttributeError, OSError):
7278
+ pass
7279
+ for curfname in GetDirList:
7280
+ tmpoutlist = curfname['fheaders']
7281
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6762
7282
  try:
6763
7283
  fp.flush()
6764
7284
  if(hasattr(os, "sync")):
6765
7285
  os.fsync(fp.fileno())
6766
7286
  except (io.UnsupportedOperation, AttributeError, OSError):
6767
7287
  pass
6768
- fcontents.close()
6769
7288
  return fp
6770
7289
 
6771
7290
  if(not rarfile_support):
6772
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7291
+ def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6773
7292
  return False
6774
- else:
6775
7293
  def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6776
- if(not hasattr(fp, "write")):
6777
- return False
7294
+ return False
7295
+ else:
7296
+ def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6778
7297
  curinode = 0
6779
7298
  curfid = 0
6780
7299
  inodelist = []
@@ -6789,20 +7308,7 @@ else:
6789
7308
  rartest = rarfp.testrar()
6790
7309
  if(rartest):
6791
7310
  VerbosePrintOut("Bad file found!")
6792
- numfiles = int(len(rarfp.infolist()))
6793
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6794
- try:
6795
- fp.flush()
6796
- if(hasattr(os, "sync")):
6797
- os.fsync(fp.fileno())
6798
- except (io.UnsupportedOperation, AttributeError, OSError):
6799
- pass
6800
- try:
6801
- fp.flush()
6802
- if(hasattr(os, "sync")):
6803
- os.fsync(fp.fileno())
6804
- except (io.UnsupportedOperation, AttributeError, OSError):
6805
- pass
7311
+ tmpoutlist = []
6806
7312
  for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
6807
7313
  is_unix = False
6808
7314
  is_windows = False
@@ -6847,14 +7353,8 @@ else:
6847
7353
  fcsize = format(int(0), 'x').lower()
6848
7354
  flinkcount = 0
6849
7355
  fblksize = 0
6850
- if(hasattr(fstatinfo, "st_blksize")):
6851
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6852
7356
  fblocks = 0
6853
- if(hasattr(fstatinfo, "st_blocks")):
6854
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6855
7357
  fflags = 0
6856
- if(hasattr(fstatinfo, "st_flags")):
6857
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
6858
7358
  ftype = 0
6859
7359
  if(member.is_file()):
6860
7360
  ftype = 0
@@ -6992,26 +7492,84 @@ else:
6992
7492
  if(fcompression == "none"):
6993
7493
  fcompression = ""
6994
7494
  fcontents.seek(0, 0)
7495
+ if(not contentasfile):
7496
+ fcontents = fcontents.read()
6995
7497
  ftypehex = format(ftype, 'x').lower()
6996
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6997
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6998
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7498
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7499
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7500
+ return tmpoutlist
7501
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7502
+ if(not hasattr(fp, "write")):
7503
+ return False
7504
+ GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7505
+ numfiles = int(len(GetDirList))
7506
+ fnumfiles = format(numfiles, 'x').lower()
7507
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7508
+ try:
7509
+ fp.flush()
7510
+ if(hasattr(os, "sync")):
7511
+ os.fsync(fp.fileno())
7512
+ except (io.UnsupportedOperation, AttributeError, OSError):
7513
+ pass
7514
+ for curfname in GetDirList:
7515
+ tmpoutlist = curfname['fheaders']
7516
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6999
7517
  try:
7000
7518
  fp.flush()
7001
7519
  if(hasattr(os, "sync")):
7002
7520
  os.fsync(fp.fileno())
7003
7521
  except (io.UnsupportedOperation, AttributeError, OSError):
7004
7522
  pass
7005
- fcontents.close()
7006
7523
  return fp
7007
7524
 
7008
7525
  if(not py7zr_support):
7009
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7526
+ def sevenzip_readall(infile, **kwargs):
7010
7527
  return False
7011
7528
  else:
7529
+ class _MemoryIO(py7zr.Py7zIO):
7530
+ """In-memory file object used by py7zr's factory API."""
7531
+ def __init__(self):
7532
+ self._buf = bytearray()
7533
+ def write(self, data):
7534
+ # py7zr will call this repeatedly with chunks
7535
+ self._buf.extend(data)
7536
+ def read(self, size=None):
7537
+ if size is None:
7538
+ return bytes(self._buf)
7539
+ return bytes(self._buf[:size])
7540
+ def seek(self, offset, whence=0):
7541
+ # we don't really need seeking for your use case
7542
+ return 0
7543
+ def flush(self):
7544
+ pass
7545
+ def size(self):
7546
+ return len(self._buf)
7547
+ class _MemoryFactory(py7zr.WriterFactory):
7548
+ """Factory that creates _MemoryIO objects and keeps them by filename."""
7549
+ def __init__(self):
7550
+ self.files = {}
7551
+ def create(self, filename: str) -> py7zr.Py7zIO:
7552
+ io_obj = _MemoryIO()
7553
+ self.files[filename] = io_obj
7554
+ return io_obj
7555
+ def sevenzip_readall(infile, **kwargs):
7556
+ """
7557
+ Replacement for SevenZipFile.readall() using the new py7zr API.
7558
+
7559
+ Returns: dict[filename -> _MemoryIO]
7560
+ """
7561
+ factory = _MemoryFactory()
7562
+ with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
7563
+ archive.extractall(factory=factory)
7564
+ return factory.files
7565
+
7566
+ if(not py7zr_support):
7567
+ def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7568
+ return False
7012
7569
  def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7013
- if(not hasattr(fp, "write")):
7014
- return False
7570
+ return False
7571
+ else:
7572
+ def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7015
7573
  formver = formatspecs['format_ver']
7016
7574
  fileheaderver = str(int(formver.replace(".", "")))
7017
7575
  curinode = 0
@@ -7023,19 +7581,15 @@ else:
7023
7581
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
7024
7582
  return False
7025
7583
  szpfp = py7zr.SevenZipFile(infile, mode="r")
7026
- file_content = szpfp.readall()
7584
+ try:
7585
+ file_content = szpfp.readall()
7586
+ except AttributeError:
7587
+ file_content = sevenzip_readall(infile)
7027
7588
  #sztest = szpfp.testzip()
7028
7589
  sztestalt = szpfp.test()
7029
7590
  if(sztestalt):
7030
7591
  VerbosePrintOut("Bad file found!")
7031
- numfiles = int(len(szpfp.list()))
7032
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7033
- try:
7034
- fp.flush()
7035
- if(hasattr(os, "sync")):
7036
- os.fsync(fp.fileno())
7037
- except (io.UnsupportedOperation, AttributeError, OSError):
7038
- pass
7592
+ tmpoutlist = []
7039
7593
  for member in sorted(szpfp.list(), key=lambda x: x.filename):
7040
7594
  fencoding = "UTF-8"
7041
7595
  if(re.findall("^[.|/]", member.filename)):
@@ -7053,14 +7607,8 @@ else:
7053
7607
  fcsize = format(int(0), 'x').lower()
7054
7608
  flinkcount = 0
7055
7609
  fblksize = 0
7056
- if(hasattr(fstatinfo, "st_blksize")):
7057
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
7058
7610
  fblocks = 0
7059
- if(hasattr(fstatinfo, "st_blocks")):
7060
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
7061
7611
  fflags = 0
7062
- if(hasattr(fstatinfo, "st_flags")):
7063
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
7064
7612
  ftype = 0
7065
7613
  if(member.is_directory):
7066
7614
  ftype = 5
@@ -7127,7 +7675,10 @@ else:
7127
7675
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7128
7676
  fcontents.seek(0, 0)
7129
7677
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7130
- file_content[member.filename].close()
7678
+ try:
7679
+ file_content[member.filename].close()
7680
+ except AttributeError:
7681
+ pass
7131
7682
  if(typechecktest is False and not compresswholefile):
7132
7683
  fcontents.seek(0, 2)
7133
7684
  ucfsize = fcontents.tell()
@@ -7169,17 +7720,34 @@ else:
7169
7720
  if(fcompression == "none"):
7170
7721
  fcompression = ""
7171
7722
  fcontents.seek(0, 0)
7723
+ if(not contentasfile):
7724
+ fcontents = fcontents.read()
7172
7725
  ftypehex = format(ftype, 'x').lower()
7173
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7174
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
7175
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7726
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7727
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7728
+ return tmpoutlist
7729
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7730
+ if(not hasattr(fp, "write")):
7731
+ return False
7732
+ GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7733
+ numfiles = int(len(GetDirList))
7734
+ fnumfiles = format(numfiles, 'x').lower()
7735
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7736
+ try:
7737
+ fp.flush()
7738
+ if(hasattr(os, "sync")):
7739
+ os.fsync(fp.fileno())
7740
+ except (io.UnsupportedOperation, AttributeError, OSError):
7741
+ pass
7742
+ for curfname in GetDirList:
7743
+ tmpoutlist = curfname['fheaders']
7744
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
7176
7745
  try:
7177
7746
  fp.flush()
7178
7747
  if(hasattr(os, "sync")):
7179
7748
  os.fsync(fp.fileno())
7180
7749
  except (io.UnsupportedOperation, AttributeError, OSError):
7181
7750
  pass
7182
- fcontents.close()
7183
7751
  return fp
7184
7752
 
7185
7753
  def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
@@ -8449,10 +9017,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
8449
9017
  elif(compresscheck == "lz4" and compresscheck in compressionsupport):
8450
9018
  fp = lz4.frame.open(infile, "rb")
8451
9019
  elif(compresscheck == "zstd" and compresscheck in compressionsupport):
8452
- if 'zstandard' in sys.modules:
8453
- fp = ZstdFile(infile, mode="rb")
8454
- elif 'pyzstd' in sys.modules:
8455
- fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
9020
+ if 'zstd' in compressionsupport:
9021
+ fp = zstd.ZstdFile(infile, mode="rb")
8456
9022
  else:
8457
9023
  return Flase
8458
9024
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
@@ -8569,10 +9135,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
8569
9135
  elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
8570
9136
  wrapped = lzma.LZMAFile(src)
8571
9137
  elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
8572
- if 'zstandard' in sys.modules:
8573
- wrapped = ZstdFile(fileobj=src, mode="rb")
8574
- elif 'pyzstd' in sys.modules:
8575
- wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
9138
+ if 'zstd' in compressionsupport:
9139
+ wrapped = zstd.ZstdFile(src, mode="rb")
8576
9140
  else:
8577
9141
  return False
8578
9142
  elif kind == "lz4" and "lz4" in compressionsupport:
@@ -8640,10 +9204,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
8640
9204
  elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
8641
9205
  fp = bz2.open(infile, mode)
8642
9206
  elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
8643
- if 'zstandard' in sys.modules:
8644
- fp = ZstdFile(infile, mode=mode)
8645
- elif 'pyzstd' in sys.modules:
8646
- fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
9207
+ if 'zstd' in compressionsupport:
9208
+ fp = zstd.ZstdFile(infile, mode=mode)
8647
9209
  else:
8648
9210
  return False
8649
9211
  elif (compresscheck == "lz4" and "lz4" in compressionsupport):
@@ -9412,10 +9974,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
9412
9974
  outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
9413
9975
 
9414
9976
  elif (fextname == ".zst" and "zstandard" in compressionsupport):
9415
- if 'zstandard' in sys.modules:
9416
- outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9417
- elif 'pyzstd' in sys.modules:
9418
- outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9977
+ if 'zstd' in compressionsupport:
9978
+ outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9419
9979
  else:
9420
9980
  return False # fix: 'Flase' -> False
9421
9981
 
@@ -9732,6 +10292,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
9732
10292
  formatspecs = formatspecs[compresschecking]
9733
10293
  fp.seek(filestart, 0)
9734
10294
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
10295
+ headeroffset = fp.tell()
9735
10296
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
9736
10297
  formdelsize = len(formatspecs['format_delimiter'])
9737
10298
  formdel = fp.read(formdelsize).decode("UTF-8")
@@ -9739,7 +10300,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
9739
10300
  return False
9740
10301
  if(formdel != formatspecs['format_delimiter']):
9741
10302
  return False
9742
- headeroffset = fp.tell()
9743
10303
  if(__use_new_style__):
9744
10304
  inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9745
10305
  else:
@@ -11055,10 +11615,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11055
11615
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
11056
11616
  formatspecs = formatspecs[compresscheck]
11057
11617
  if(compresscheck=="zstd"):
11058
- if 'zstandard' in sys.modules:
11059
- infile = ZstdFile(fileobj=infile, mode="rb")
11060
- elif 'pyzstd' in sys.modules:
11061
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
11618
+ if 'zstd' in compressionsupport:
11619
+ infile = zstd.ZstdFile(infile, mode="rb")
11062
11620
  tarfp = tarfile.open(fileobj=infile, mode="r")
11063
11621
  else:
11064
11622
  tarfp = tarfile.open(fileobj=infile, mode="r")
@@ -11067,10 +11625,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11067
11625
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
11068
11626
  formatspecs = formatspecs[compresscheck]
11069
11627
  if(compresscheck=="zstd"):
11070
- if 'zstandard' in sys.modules:
11071
- infile = ZstdFile(fileobj=infile, mode="rb")
11072
- elif 'pyzstd' in sys.modules:
11073
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
11628
+ if 'zstd' in compressionsupport:
11629
+ infile = zstd.ZstdFile(infile, mode="rb")
11074
11630
  tarfp = tarfile.open(fileobj=infile, mode="r")
11075
11631
  else:
11076
11632
  tarfp = tarfile.open(infile, "r")
@@ -11397,7 +11953,10 @@ if(py7zr_support):
11397
11953
  lcfi = 0
11398
11954
  returnval = {}
11399
11955
  szpfp = py7zr.SevenZipFile(infile, mode="r")
11400
- file_content = szpfp.readall()
11956
+ try:
11957
+ file_content = szpfp.readall()
11958
+ except AttributeError:
11959
+ file_content = sevenzip_readall(infile)
11401
11960
  #sztest = szpfp.testzip()
11402
11961
  sztestalt = szpfp.test()
11403
11962
  if(sztestalt):
@@ -11441,7 +12000,10 @@ if(py7zr_support):
11441
12000
  printfname = member.filename
11442
12001
  if(ftype == 0):
11443
12002
  fsize = len(file_content[member.filename].read())
11444
- file_content[member.filename].close()
12003
+ try:
12004
+ file_content[member.filename].close()
12005
+ except AttributeError:
12006
+ pass
11445
12007
  try:
11446
12008
  fuid = int(os.getuid())
11447
12009
  except (KeyError, AttributeError):
@@ -13576,7 +14138,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
13576
14138
  if not ah or not ah.strip().lower().startswith("basic "):
13577
14139
  return False
13578
14140
  try:
13579
- import base64
13580
14141
  b64 = ah.strip().split(" ", 1)[1]
13581
14142
  raw = base64.b64decode(_to_bytes(b64))
13582
14143
  try: raw_txt = raw.decode("utf-8")