PyCatFile 0.26.0__py3-none-any.whl → 0.27.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pycatfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pycatfile.py - Last Update: 11/12/2025 Ver. 0.26.0 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pycatfile.py - Last Update: 11/15/2025 Ver. 0.27.2 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -24,7 +24,6 @@ import re
24
24
  import sys
25
25
  import time
26
26
  import stat
27
- import zlib
28
27
  import mmap
29
28
  import hmac
30
29
  import base64
@@ -38,8 +37,8 @@ import zipfile
38
37
  import binascii
39
38
  import datetime
40
39
  import platform
40
+ import collections
41
41
  from io import StringIO, BytesIO
42
- from collections import namedtuple
43
42
  import posixpath # POSIX-safe joins/normpaths
44
43
  try:
45
44
  from backports import tempfile
@@ -48,14 +47,16 @@ except ImportError:
48
47
 
49
48
  try:
50
49
  from http.server import BaseHTTPRequestHandler, HTTPServer
51
- from socketserver import TCPServer
52
50
  from urllib.parse import urlparse, parse_qs
53
- import base64
54
51
  except ImportError:
55
52
  from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
56
- from SocketServer import TCPServer
57
53
  from urlparse import urlparse, parse_qs
58
- import base64
54
+
55
+ try:
56
+ # Python 3.8+ only
57
+ from multiprocessing import shared_memory
58
+ except ImportError:
59
+ shared_memory = None
59
60
 
60
61
  # FTP Support
61
62
  ftpssl = True
@@ -146,6 +147,15 @@ try:
146
147
  except Exception:
147
148
  PATH_TYPES = (basestring,)
148
149
 
150
+ def running_interactively():
151
+ main = sys.modules.get("__main__")
152
+ no_main_file = not hasattr(main, "__file__")
153
+ interactive_flag = bool(getattr(sys.flags, "interactive", 0))
154
+ return no_main_file or interactive_flag
155
+
156
+ if running_interactively():
157
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
158
+
149
159
  def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
150
160
  """
151
161
  Normalize any input to text_type (unicode on Py2, str on Py3).
@@ -166,7 +176,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
166
176
 
167
177
  # Handle pathlib.Path & other path-like objects
168
178
  try:
169
- import os
170
179
  if hasattr(os, "fspath"):
171
180
  fs = os.fspath(s)
172
181
  if isinstance(fs, text_type):
@@ -207,7 +216,6 @@ except ImportError:
207
216
 
208
217
  # Windows-specific setup
209
218
  if os.name == "nt":
210
- import io
211
219
  def _wrap(stream):
212
220
  buf = getattr(stream, "buffer", None)
213
221
  is_tty = getattr(stream, "isatty", lambda: False)()
@@ -444,7 +452,13 @@ if('PYCATFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYCATFIL
444
452
  else:
445
453
  prescriptpath = get_importing_script_path()
446
454
  if(prescriptpath is not None):
447
- scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
455
+ if(__use_ini_file__ and not __use_json_file__):
456
+ scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
457
+ elif(__use_json_file__ and not __use_ini_file__):
458
+ scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
459
+ else:
460
+ scriptconf = ""
461
+ prescriptpath = None
448
462
  else:
449
463
  scriptconf = ""
450
464
  if os.path.exists(scriptconf):
@@ -647,12 +661,12 @@ __project__ = __program_name__
647
661
  __program_alt_name__ = __program_name__
648
662
  __project_url__ = "https://github.com/GameMaker2k/PyCatFile"
649
663
  __project_release_url__ = __project_url__+"/releases/latest"
650
- __version_info__ = (0, 26, 0, "RC 1", 1)
651
- __version_date_info__ = (2025, 11, 12, "RC 1", 1)
664
+ __version_info__ = (0, 27, 2, "RC 1", 1)
665
+ __version_date_info__ = (2025, 11, 15, "RC 1", 1)
652
666
  __version_date__ = str(__version_date_info__[0]) + "." + str(
653
667
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
654
668
  __revision__ = __version_info__[3]
655
- __revision_id__ = "$Id: 1f4434bfb0e0cb5e732daced1add124d7b880a31 $"
669
+ __revision_id__ = "$Id: 44c63222877d771eec434744489e580eea061ad6 $"
656
670
  if(__version_info__[4] is not None):
657
671
  __version_date_plusrc__ = __version_date__ + \
658
672
  "-" + str(__version_date_info__[4])
@@ -804,9 +818,9 @@ except Exception:
804
818
  geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
805
819
  proname=__project__, prover=__version__, prourl=__project_url__)
806
820
  if(platform.python_implementation() != ""):
807
- py_implementation = platform.python_implementation()
821
+ py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
808
822
  if(platform.python_implementation() == ""):
809
- py_implementation = "CPython"
823
+ py_implementation = "CPython"+str(platform.python_version_tuple()[0])
810
824
  geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
811
825
  )+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
812
826
  geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
@@ -822,13 +836,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
822
836
 
823
837
  compressionsupport = []
824
838
  try:
825
- import gzip
839
+ try:
840
+ import compression.gzip as gzip
841
+ except ImportError:
842
+ import gzip
826
843
  compressionsupport.append("gz")
827
844
  compressionsupport.append("gzip")
828
845
  except ImportError:
829
846
  pass
830
847
  try:
831
- import bz2
848
+ try:
849
+ import compression.bz2 as bz2
850
+ except ImportError:
851
+ import bz2
832
852
  compressionsupport.append("bz2")
833
853
  compressionsupport.append("bzip2")
834
854
  except ImportError:
@@ -849,35 +869,39 @@ except ImportError:
849
869
  pass
850
870
  '''
851
871
  try:
852
- import zstandard
872
+ try:
873
+ import compression.zstd as zstd
874
+ except ImportError:
875
+ import pyzstd.zstdfile as zstd
853
876
  compressionsupport.append("zst")
854
877
  compressionsupport.append("zstd")
855
878
  compressionsupport.append("zstandard")
856
879
  except ImportError:
880
+ pass
881
+ try:
857
882
  try:
858
- import pyzstd.zstdfile
859
- compressionsupport.append("zst")
860
- compressionsupport.append("zstd")
861
- compressionsupport.append("zstandard")
883
+ import compression.lzma as lzma
862
884
  except ImportError:
863
- pass
864
- try:
865
- import lzma
885
+ try:
886
+ import lzma
887
+ except ImportError:
888
+ from backports import lzma
866
889
  compressionsupport.append("lzma")
867
890
  compressionsupport.append("xz")
868
891
  except ImportError:
892
+ pass
893
+ try:
869
894
  try:
870
- from backports import lzma
871
- compressionsupport.append("lzma")
872
- compressionsupport.append("xz")
895
+ import compression.zlib as zlib
873
896
  except ImportError:
874
- pass
875
- compressionsupport.append("zlib")
876
- compressionsupport.append("zl")
877
- compressionsupport.append("zz")
878
- compressionsupport.append("Z")
879
- compressionsupport.append("z")
880
-
897
+ import zlib
898
+ compressionsupport.append("zlib")
899
+ compressionsupport.append("zl")
900
+ compressionsupport.append("zz")
901
+ compressionsupport.append("Z")
902
+ compressionsupport.append("z")
903
+ except ImportError:
904
+ pass
881
905
  compressionlist = ['auto']
882
906
  compressionlistalt = []
883
907
  outextlist = []
@@ -1051,6 +1075,14 @@ def to_ns(timestamp):
1051
1075
  # Multiply by 1e9 to get nanoseconds, then cast to int
1052
1076
  return int(seconds * 1000000000)
1053
1077
 
1078
+ def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
1079
+ ts_ns = int(ts_ns)
1080
+ sec, ns = divmod(ts_ns, 10**9)
1081
+ dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
1082
+ base = dt.strftime(fmt)
1083
+ ns_str = "%09d" % ns
1084
+ return base + "." + ns_str
1085
+
1054
1086
  def _split_posix(name):
1055
1087
  """
1056
1088
  Return a list of path parts without collapsing '..'.
@@ -2087,7 +2119,7 @@ def MkTempFile(data=None,
2087
2119
  spool_max=__spoolfile_size__,
2088
2120
  spool_dir=__use_spooldir__,
2089
2121
  reset_to_start=True,
2090
- memfd_name=None,
2122
+ memfd_name=__program_name__,
2091
2123
  memfd_allow_sealing=False,
2092
2124
  memfd_flags_extra=0,
2093
2125
  on_create=None):
@@ -2579,6 +2611,384 @@ def _is_valid_zlib_header(cmf, flg):
2579
2611
  return False
2580
2612
  return True
2581
2613
 
2614
+ class SharedMemoryFile(object):
2615
+ """
2616
+ File-like wrapper around multiprocessing.shared_memory.SharedMemory.
2617
+
2618
+ Binary-only API, intended to behave similarly to a regular file opened in
2619
+ 'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
2620
+
2621
+ Notes:
2622
+ - Requires Python 3.8+ at runtime to actually use SharedMemory.
2623
+ - On Python 2, importing is fine but constructing will raise RuntimeError.
2624
+ - There is no automatic resizing; buffer size is fixed by SharedMemory.
2625
+ - No real fileno(); this does not represent an OS-level file descriptor.
2626
+ - For text mode, wrap this with io.TextIOWrapper on Python 3:
2627
+ f = SharedMemoryFile(...)
2628
+ tf = io.TextIOWrapper(f, encoding="utf-8")
2629
+ """
2630
+
2631
+ def __init__(self, shm=None, name=None, create=False, size=0,
2632
+ mode='r+b', offset=0, unlink_on_close=False):
2633
+ """
2634
+ Parameters:
2635
+ shm : existing SharedMemory object (preferred).
2636
+ name : name of shared memory block (for attach or create).
2637
+ create: if True, create new SharedMemory; else attach existing.
2638
+ size : size in bytes (required when create=True).
2639
+ mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
2640
+ offset: starting offset within the shared memory buffer.
2641
+ unlink_on_close: if True, call shm.unlink() when close() is called.
2642
+
2643
+ Usage examples:
2644
+
2645
+ # Create new block and file-like wrapper
2646
+ f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
2647
+
2648
+ # Attach to existing shared memory by name
2649
+ f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
2650
+
2651
+ # Wrap an existing SharedMemory object
2652
+ shm = shared_memory.SharedMemory(create=True, size=1024)
2653
+ f = SharedMemoryFile(shm=shm, mode='r+b')
2654
+ """
2655
+ if shared_memory is None:
2656
+ # No SharedMemory available on this interpreter
2657
+ raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
2658
+ "is not available on this Python version")
2659
+
2660
+ if 't' in mode:
2661
+ raise ValueError("SharedMemoryFile is binary-only; "
2662
+ "wrap it with io.TextIOWrapper for text")
2663
+
2664
+ self.mode = mode
2665
+ self._closed = False
2666
+ self._unlinked = False
2667
+ self._unlink_on_close = bool(unlink_on_close)
2668
+
2669
+ if shm is not None:
2670
+ self._shm = shm
2671
+ else:
2672
+ # name may be None when create=True
2673
+ self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
2674
+
2675
+ self._buf = self._shm.buf
2676
+ self._base_offset = int(offset)
2677
+ if self._base_offset < 0 or self._base_offset > len(self._buf):
2678
+ raise ValueError("offset out of range")
2679
+
2680
+ # We treat the accessible region as [base_offset, len(buf))
2681
+ self._size = len(self._buf) - self._base_offset
2682
+ self._pos = 0 # logical file position within that region
2683
+
2684
+ # ---------- basic properties ----------
2685
+
2686
+ @property
2687
+ def name(self):
2688
+ # SharedMemory name (may be None for anonymous)
2689
+ return getattr(self._shm, "name", None)
2690
+
2691
+ @property
2692
+ def closed(self):
2693
+ return self._closed
2694
+
2695
+ def readable(self):
2696
+ return ('r' in self.mode) or ('+' in self.mode)
2697
+
2698
+ def writable(self):
2699
+ return any(ch in self.mode for ch in ('w', 'a', '+'))
2700
+
2701
+ def seekable(self):
2702
+ return True
2703
+
2704
+ # ---------- core helpers ----------
2705
+
2706
+ def _check_closed(self):
2707
+ if self._closed:
2708
+ raise ValueError("I/O operation on closed SharedMemoryFile")
2709
+
2710
+ def _clamp_pos(self, pos):
2711
+ if pos < 0:
2712
+ return 0
2713
+ if pos > self._size:
2714
+ return self._size
2715
+ return pos
2716
+
2717
+ def _region_bounds(self):
2718
+ """Return (start, end) absolute indices into the SharedMemory buffer."""
2719
+ start = self._base_offset + self._pos
2720
+ end = self._base_offset + self._size
2721
+ return start, end
2722
+
2723
+ # ---------- positioning ----------
2724
+
2725
+ def seek(self, offset, whence=0):
2726
+ """
2727
+ Seek to a new file position.
2728
+
2729
+ whence: 0 = from start, 1 = from current, 2 = from end.
2730
+ """
2731
+ self._check_closed()
2732
+ offset = int(offset)
2733
+ whence = int(whence)
2734
+
2735
+ if whence == 0: # from start
2736
+ new_pos = offset
2737
+ elif whence == 1: # from current
2738
+ new_pos = self._pos + offset
2739
+ elif whence == 2: # from end
2740
+ new_pos = self._size + offset
2741
+ else:
2742
+ raise ValueError("invalid whence (expected 0, 1, or 2)")
2743
+
2744
+ self._pos = self._clamp_pos(new_pos)
2745
+ return self._pos
2746
+
2747
+ def tell(self):
2748
+ return self._pos
2749
+
2750
+ # ---------- reading ----------
2751
+
2752
+ def read(self, size=-1):
2753
+ """
2754
+ Read up to 'size' bytes (or to EOF if size<0 or None).
2755
+ Returns bytes (py3) or str (py2).
2756
+ """
2757
+ self._check_closed()
2758
+ if not self.readable():
2759
+ raise IOError("SharedMemoryFile not opened for reading")
2760
+
2761
+ if size is None or size < 0:
2762
+ size = self._size - self._pos
2763
+ else:
2764
+ size = int(size)
2765
+ if size < 0:
2766
+ size = 0
2767
+
2768
+ if size == 0:
2769
+ return b'' if not PY2 else ''
2770
+
2771
+ start, end_abs = self._region_bounds()
2772
+ available = end_abs - (self._base_offset + self._pos)
2773
+ if available <= 0:
2774
+ return b'' if not PY2 else ''
2775
+
2776
+ size = min(size, available)
2777
+
2778
+ abs_start = self._base_offset + self._pos
2779
+ abs_end = abs_start + size
2780
+
2781
+ chunk = self._buf[abs_start:abs_end]
2782
+ if PY2:
2783
+ data = bytes(chunk) # bytes() -> str in py2
2784
+ else:
2785
+ data = bytes(chunk)
2786
+
2787
+ self._pos += len(data)
2788
+ return data
2789
+
2790
+ def readline(self, size=-1):
2791
+ """
2792
+ Read a single line (ending with '\\n' or EOF).
2793
+ If size >= 0, at most that many bytes are returned.
2794
+ """
2795
+ self._check_closed()
2796
+ if not self.readable():
2797
+ raise IOError("SharedMemoryFile not opened for reading")
2798
+
2799
+ # Determine maximum bytes we can scan
2800
+ start, end_abs = self._region_bounds()
2801
+ remaining = end_abs - (self._base_offset + self._pos)
2802
+ if remaining <= 0:
2803
+ return b'' if not PY2 else ''
2804
+
2805
+ if size is not None and size >= 0:
2806
+ size = int(size)
2807
+ max_len = min(size, remaining)
2808
+ else:
2809
+ max_len = remaining
2810
+
2811
+ abs_start = self._base_offset + self._pos
2812
+ abs_max = abs_start + max_len
2813
+
2814
+ # Work on a local bytes slice for easy .find()
2815
+ if PY2:
2816
+ buf_bytes = bytes(self._buf[abs_start:abs_max])
2817
+ else:
2818
+ buf_bytes = bytes(self._buf[abs_start:abs_max])
2819
+
2820
+ idx = buf_bytes.find(b'\n')
2821
+ if idx == -1:
2822
+ # No newline; read entire chunk
2823
+ line_bytes = buf_bytes
2824
+ else:
2825
+ line_bytes = buf_bytes[:idx + 1]
2826
+
2827
+ self._pos += len(line_bytes)
2828
+
2829
+ if PY2:
2830
+ return line_bytes # already str
2831
+ return line_bytes
2832
+
2833
+ def readinto(self, b):
2834
+ """
2835
+ Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
2836
+ Returns number of bytes read.
2837
+ """
2838
+ self._check_closed()
2839
+ if not self.readable():
2840
+ raise IOError("SharedMemoryFile not opened for reading")
2841
+
2842
+ # Normalize target buffer
2843
+ if isinstance(b, memoryview):
2844
+ mv = b
2845
+ else:
2846
+ mv = memoryview(b)
2847
+
2848
+ size = len(mv)
2849
+ if size <= 0:
2850
+ return 0
2851
+
2852
+ start, end_abs = self._region_bounds()
2853
+ remaining = end_abs - (self._base_offset + self._pos)
2854
+ if remaining <= 0:
2855
+ return 0
2856
+
2857
+ size = min(size, remaining)
2858
+
2859
+ abs_start = self._base_offset + self._pos
2860
+ abs_end = abs_start + size
2861
+
2862
+ mv[:size] = self._buf[abs_start:abs_end]
2863
+ self._pos += size
2864
+ return size
2865
+
2866
+ # ---------- writing ----------
2867
+
2868
+ def write(self, data):
2869
+ """
2870
+ Write bytes-like object to the shared memory region.
2871
+
2872
+ Returns number of bytes written. Will raise if not opened writable
2873
+ or if writing would overflow the fixed-size region.
2874
+ """
2875
+ self._check_closed()
2876
+ if not self.writable():
2877
+ raise IOError("SharedMemoryFile not opened for writing")
2878
+
2879
+ if isinstance(data, memoryview):
2880
+ data = bytes(data)
2881
+ elif isinstance(data, bytearray):
2882
+ data = bytes(data)
2883
+
2884
+ if not isinstance(data, binary_types):
2885
+ raise TypeError("write() expects a bytes-like object")
2886
+
2887
+ data_len = len(data)
2888
+ if data_len == 0:
2889
+ return 0
2890
+
2891
+ # Handle "append" semantics roughly: start from end on first write
2892
+ if 'a' in self.mode and self._pos == 0:
2893
+ # Move to logical end of region
2894
+ self._pos = self._size
2895
+
2896
+ start, end_abs = self._region_bounds()
2897
+ remaining = end_abs - (self._base_offset + self._pos)
2898
+ if data_len > remaining:
2899
+ raise IOError("write would overflow SharedMemory region (need %d, have %d)"
2900
+ % (data_len, remaining))
2901
+
2902
+ abs_start = self._base_offset + self._pos
2903
+ abs_end = abs_start + data_len
2904
+
2905
+ self._buf[abs_start:abs_end] = data
2906
+ self._pos += data_len
2907
+ return data_len
2908
+
2909
+ def flush(self):
2910
+ """
2911
+ No-op for shared memory; provided for file-like compatibility.
2912
+ """
2913
+ self._check_closed()
2914
+ # nothing to flush
2915
+
2916
+ # ---------- unlink / close / context manager ----------
2917
+
2918
+ def unlink(self):
2919
+ """
2920
+ Unlink (destroy) the underlying shared memory block.
2921
+
2922
+ After unlink(), new processes cannot attach via name.
2923
+ Existing attachments (including this one) can continue to use
2924
+ the memory until they close() it.
2925
+
2926
+ This is idempotent: calling it more than once is safe.
2927
+ """
2928
+ if self._unlinked:
2929
+ return
2930
+
2931
+ try:
2932
+ self._shm.unlink()
2933
+ except AttributeError:
2934
+ # Should not happen on normal Python 3.8+,
2935
+ # but keep a clear error if it does.
2936
+ raise RuntimeError("Underlying SharedMemory object "
2937
+ "does not support unlink()")
2938
+
2939
+ self._unlinked = True
2940
+
2941
+ def close(self):
2942
+ if self._closed:
2943
+ return
2944
+ self._closed = True
2945
+
2946
+ # Optionally unlink on close if requested
2947
+ if self._unlink_on_close and not self._unlinked:
2948
+ try:
2949
+ self.unlink()
2950
+ except Exception:
2951
+ # best-effort; close anyway
2952
+ pass
2953
+
2954
+ try:
2955
+ self._shm.close()
2956
+ except Exception:
2957
+ pass
2958
+
2959
+ def __enter__(self):
2960
+ self._check_closed()
2961
+ return self
2962
+
2963
+ def __exit__(self, exc_type, exc_val, exc_tb):
2964
+ self.close()
2965
+
2966
+ # ---------- iteration ----------
2967
+
2968
+ def __iter__(self):
2969
+ return self
2970
+
2971
+ def __next__(self):
2972
+ line = self.readline()
2973
+ if (not line) or len(line) == 0:
2974
+ raise StopIteration
2975
+ return line
2976
+
2977
+ if PY2:
2978
+ next = __next__
2979
+
2980
+ # ---------- misc helpers ----------
2981
+
2982
+ def fileno(self):
2983
+ """
2984
+ There is no real OS-level file descriptor; raise OSError for APIs
2985
+ that require a fileno().
2986
+ """
2987
+ raise OSError("SharedMemoryFile does not have a real fileno()")
2988
+
2989
+ def isatty(self):
2990
+ return False
2991
+
2582
2992
  # ---------- Main class ----------
2583
2993
  class ZlibFile(object):
2584
2994
  """
@@ -4470,7 +4880,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4470
4880
  extrastart = extrastart + 1
4471
4881
  fvendorfieldslist = []
4472
4882
  fvendorfields = 0;
4473
- if(len(HeaderOut)>extraend):
4883
+ if((len(HeaderOut) - 4)>extraend):
4474
4884
  extrastart = extraend
4475
4885
  extraend = len(HeaderOut) - 4
4476
4886
  while(extrastart < extraend):
@@ -4690,6 +5100,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4690
5100
  while(extrastart < extraend):
4691
5101
  fextrafieldslist.append(HeaderOut[extrastart])
4692
5102
  extrastart = extrastart + 1
5103
+ fvendorfieldslist = []
5104
+ fvendorfields = 0;
5105
+ if((len(HeaderOut) - 4)>extraend):
5106
+ extrastart = extraend
5107
+ extraend = len(HeaderOut) - 4
5108
+ while(extrastart < extraend):
5109
+ fvendorfieldslist.append(HeaderOut[extrastart])
5110
+ extrastart = extrastart + 1
5111
+ fvendorfields = fvendorfields + 1
4693
5112
  if(fextrafields==1):
4694
5113
  try:
4695
5114
  fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
@@ -4699,6 +5118,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4699
5118
  fextrafieldslist = json.loads(fextrafieldslist[0])
4700
5119
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4701
5120
  pass
5121
+ fjstart = fp.tell()
4702
5122
  if(fjsontype=="json"):
4703
5123
  fjsoncontent = {}
4704
5124
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
@@ -4765,6 +5185,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4765
5185
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4766
5186
  pass
4767
5187
  fp.seek(len(delimiter), 1)
5188
+ fjend = fp.tell() - 1
4768
5189
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4769
5190
  if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4770
5191
  VerbosePrintOut("File JSON Data Checksum Error with file " +
@@ -4797,6 +5218,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4797
5218
  pyhascontents = False
4798
5219
  fcontents.seek(0, 0)
4799
5220
  newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
5221
+ fcontents.seek(0, 0)
4800
5222
  if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4801
5223
  VerbosePrintOut("File Content Checksum Error with file " +
4802
5224
  fname + " at offset " + str(fcontentstart))
@@ -4836,8 +5258,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4836
5258
  fcontents.seek(0, 0)
4837
5259
  if(not contentasfile):
4838
5260
  fcontents = fcontents.read()
4839
- outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4840
- finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
5261
+ outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
5262
+ fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
4841
5263
  return outlist
4842
5264
 
4843
5265
 
@@ -4854,6 +5276,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4854
5276
  CatSizeEnd = CatSize
4855
5277
  fp.seek(curloc, 0)
4856
5278
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5279
+ headeroffset = fp.tell()
4857
5280
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
4858
5281
  formdelszie = len(formatspecs['format_delimiter'])
4859
5282
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -4873,7 +5296,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4873
5296
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4874
5297
  if(not headercheck and not skipchecksum):
4875
5298
  VerbosePrintOut(
4876
- "File Header Checksum Error with file at offset " + str(0))
5299
+ "File Header Checksum Error with file at offset " + str(headeroffset))
4877
5300
  VerbosePrintOut("'" + fprechecksum + "' != " +
4878
5301
  "'" + newfcs + "'")
4879
5302
  return False
@@ -4925,6 +5348,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4925
5348
  CatSizeEnd = CatSize
4926
5349
  fp.seek(curloc, 0)
4927
5350
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5351
+ headeroffset = fp.tell()
4928
5352
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
4929
5353
  formdelszie = len(formatspecs['format_delimiter'])
4930
5354
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -4957,7 +5381,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4957
5381
  pass
4958
5382
  fvendorfieldslist = []
4959
5383
  fvendorfields = 0;
4960
- if(len(inheader)>extraend):
5384
+ if((len(inheader) - 2)>extraend):
4961
5385
  extrastart = extraend
4962
5386
  extraend = len(inheader) - 2
4963
5387
  while(extrastart < extraend):
@@ -4967,8 +5391,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4967
5391
  formversion = re.findall("([\\d]+)", formstring)
4968
5392
  fheadsize = int(inheader[0], 16)
4969
5393
  fnumfields = int(inheader[1], 16)
4970
- fheadctime = int(inheader[1], 16)
4971
- fheadmtime = int(inheader[1], 16)
5394
+ fheadctime = int(inheader[2], 16)
5395
+ fheadmtime = int(inheader[3], 16)
4972
5396
  fhencoding = inheader[4]
4973
5397
  fostype = inheader[5]
4974
5398
  fpythontype = inheader[6]
@@ -5077,7 +5501,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5077
5501
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
5078
5502
  if(not headercheck and not skipchecksum):
5079
5503
  VerbosePrintOut(
5080
- "File Header Checksum Error with file at offset " + str(0))
5504
+ "File Header Checksum Error with file at offset " + str(headeroffset))
5081
5505
  VerbosePrintOut("'" + fprechecksum + "' != " +
5082
5506
  "'" + newfcs + "'")
5083
5507
  return False
@@ -5187,6 +5611,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5187
5611
  CatSizeEnd = CatSize
5188
5612
  fp.seek(curloc, 0)
5189
5613
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5614
+ headeroffset = fp.tell()
5190
5615
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
5191
5616
  formdelszie = len(formatspecs['format_delimiter'])
5192
5617
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -5214,22 +5639,101 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5214
5639
  fnumextrafields = len(fextrafieldslist)
5215
5640
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5216
5641
  try:
5217
- fextrafieldslist = json.loads(fextrafieldslist[0])
5642
+ fextrafieldslist = json.loads(fextrafieldslist[0])
5643
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5644
+ pass
5645
+ fvendorfieldslist = []
5646
+ fvendorfields = 0;
5647
+ if((len(inheader) - 2)>extraend):
5648
+ extrastart = extraend
5649
+ extraend = len(inheader) - 2
5650
+ while(extrastart < extraend):
5651
+ fvendorfieldslist.append(HeaderOut[extrastart])
5652
+ extrastart = extrastart + 1
5653
+ fvendorfields = fvendorfields + 1
5654
+ formversion = re.findall("([\\d]+)", formstring)
5655
+ fheadsize = int(inheader[0], 16)
5656
+ fnumfields = int(inheader[1], 16)
5657
+ fheadctime = int(inheader[2], 16)
5658
+ fheadmtime = int(inheader[3], 16)
5659
+ fhencoding = inheader[4]
5660
+ fostype = inheader[5]
5661
+ fpythontype = inheader[6]
5662
+ fprojectname = inheader[7]
5663
+ fnumfiles = int(inheader[8], 16)
5664
+ fseeknextfile = inheader[9]
5665
+ fjsontype = inheader[10]
5666
+ fjsonlen = int(inheader[11], 16)
5667
+ fjsonsize = int(inheader[12], 16)
5668
+ fjsonchecksumtype = inheader[13]
5669
+ fjsonchecksum = inheader[14]
5670
+ fjsoncontent = {}
5671
+ fjstart = fp.tell()
5672
+ if(fjsontype=="json"):
5673
+ fjsoncontent = {}
5674
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5675
+ if(fjsonsize > 0):
5676
+ try:
5677
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5678
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
5679
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5680
+ try:
5681
+ fjsonrawcontent = fprejsoncontent
5682
+ fjsoncontent = json.loads(fprejsoncontent)
5683
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5684
+ fprejsoncontent = ""
5685
+ fjsonrawcontent = fprejsoncontent
5686
+ fjsoncontent = {}
5687
+ else:
5688
+ fprejsoncontent = ""
5689
+ fjsonrawcontent = fprejsoncontent
5690
+ fjsoncontent = {}
5691
+ elif(testyaml and fjsontype == "yaml"):
5692
+ fjsoncontent = {}
5693
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5694
+ if (fjsonsize > 0):
5695
+ try:
5696
+ # try base64 → utf-8 → YAML
5697
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5698
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5699
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
5700
+ try:
5701
+ # fall back to treating the bytes as plain text YAML
5702
+ fjsonrawcontent = fprejsoncontent
5703
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5704
+ except (UnicodeDecodeError, yaml.YAMLError):
5705
+ # final fallback: empty
5706
+ fprejsoncontent = ""
5707
+ fjsonrawcontent = fprejsoncontent
5708
+ fjsoncontent = {}
5709
+ else:
5710
+ fprejsoncontent = ""
5711
+ fjsonrawcontent = fprejsoncontent
5712
+ fjsoncontent = {}
5713
+ elif(not testyaml and fjsontype == "yaml"):
5714
+ fjsoncontent = {}
5715
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5716
+ fprejsoncontent = ""
5717
+ fjsonrawcontent = fprejsoncontent
5718
+ elif(fjsontype=="list"):
5719
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5720
+ flisttmp = MkTempFile()
5721
+ flisttmp.write(fprejsoncontent.encode())
5722
+ flisttmp.seek(0)
5723
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
5724
+ flisttmp.close()
5725
+ fjsonrawcontent = fjsoncontent
5726
+ if(fjsonlen==1):
5727
+ try:
5728
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
5729
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
5730
+ fjsonlen = len(fjsoncontent)
5218
5731
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5219
- pass
5220
- formversion = re.findall("([\\d]+)", formstring)
5221
- fheadsize = int(inheader[0], 16)
5222
- fnumfields = int(inheader[1], 16)
5223
- fnumfiles = int(inheader[8], 16)
5224
- fseeknextfile = inheaderdata[9]
5225
- fjsontype = int(inheader[10], 16)
5226
- fjsonlen = int(inheader[11], 16)
5227
- fjsonsize = int(inheader[12], 16)
5228
- fjsonchecksumtype = inheader[13]
5229
- fjsonchecksum = inheader[14]
5230
- fjsoncontent = {}
5231
- fjstart = fp.tell()
5232
- fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5732
+ try:
5733
+ fjsonrawcontent = fjsoncontent[0]
5734
+ fjsoncontent = json.loads(fjsoncontent[0])
5735
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5736
+ pass
5233
5737
  fjend = fp.tell()
5234
5738
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5235
5739
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -5260,7 +5764,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5260
5764
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
5261
5765
  if(not headercheck and not skipchecksum):
5262
5766
  VerbosePrintOut(
5263
- "File Header Checksum Error with file at offset " + str(0))
5767
+ "File Header Checksum Error with file at offset " + str(headeroffset))
5264
5768
  VerbosePrintOut("'" + fprechecksum + "' != " +
5265
5769
  "'" + newfcs + "'")
5266
5770
  return False
@@ -5764,7 +6268,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
5764
6268
  else:
5765
6269
  fctime = format(int(to_ns(time.time())), 'x').lower()
5766
6270
  # Serialize the first group
5767
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
6271
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
5768
6272
  # Append tmpoutlist
5769
6273
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5770
6274
  # Append extradata items if any
@@ -5990,22 +6494,33 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5990
6494
  pass
5991
6495
  return fp
5992
6496
 
5993
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5994
- if(not hasattr(fp, "write")):
5995
- return False
6497
+ def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5996
6498
  advancedlist = __use_advanced_list__
5997
6499
  altinode = __use_alt_inode__
5998
6500
  infilelist = []
5999
- if(infiles == "-"):
6501
+ if(not dirlistfromtxt and not isinstance(infiles, (list, tuple, )) and infiles == "-"):
6000
6502
  for line in PY_STDIN_TEXT:
6001
6503
  infilelist.append(line.strip())
6002
6504
  infilelist = list(filter(None, infilelist))
6003
- elif(infiles != "-" and dirlistfromtxt and os.path.exists(infiles) and (os.path.isfile(infiles) or infiles == os.devnull)):
6004
- if(not os.path.exists(infiles) or not os.path.isfile(infiles)):
6005
- return False
6006
- with UncompressFile(infiles, formatspecs, "r") as finfile:
6007
- for line in finfile:
6008
- infilelist.append(line.strip())
6505
+ if(not dirlistfromtxt and isinstance(infiles, (list, tuple, )) and len(infiles)==1 and infiles[0] == "-"):
6506
+ for line in PY_STDIN_TEXT:
6507
+ infilelist.append(line.strip())
6508
+ infilelist = list(filter(None, infilelist))
6509
+ elif(dirlistfromtxt):
6510
+ if(not isinstance(infiles, (list, tuple, ))):
6511
+ infiles = [infiles]
6512
+ if(isinstance(infiles, (list, tuple, ))):
6513
+ for fileloc in infiles:
6514
+ if(fileloc == "-"):
6515
+ for line in PY_STDIN_TEXT:
6516
+ infilelist.append(line.strip())
6517
+ else:
6518
+ if(not os.path.exists(fileloc) or not os.path.isfile(fileloc)):
6519
+ return False
6520
+ else:
6521
+ with UncompressFile(fileloc, formatspecs, "r") as finfile:
6522
+ for line in finfile:
6523
+ infilelist.append(line.strip())
6009
6524
  infilelist = list(filter(None, infilelist))
6010
6525
  else:
6011
6526
  if(isinstance(infiles, (list, tuple, ))):
@@ -6034,16 +6549,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6034
6549
  inodetofile = {}
6035
6550
  filetoinode = {}
6036
6551
  inodetoforminode = {}
6037
- numfiles = int(len(GetDirList))
6038
- fnumfiles = format(numfiles, 'x').lower()
6039
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6040
- try:
6041
- fp.flush()
6042
- if(hasattr(os, "sync")):
6043
- os.fsync(fp.fileno())
6044
- except (io.UnsupportedOperation, AttributeError, OSError):
6045
- pass
6046
6552
  FullSizeFilesAlt = 0
6553
+ tmpoutlist = []
6047
6554
  for curfname in GetDirList:
6048
6555
  fencoding = "UTF-8"
6049
6556
  if(re.findall("^[.|/]", curfname)):
@@ -6218,7 +6725,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6218
6725
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6219
6726
  fcontents.seek(0, 0)
6220
6727
  if(typechecktest is not False):
6221
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6728
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6222
6729
  fcontents.seek(0, 0)
6223
6730
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6224
6731
  if(typechecktest is False and not compresswholefile):
@@ -6267,7 +6774,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6267
6774
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6268
6775
  fcontents.seek(0, 0)
6269
6776
  if(typechecktest is not False):
6270
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6777
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6271
6778
  fcontents.seek(0, 0)
6272
6779
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6273
6780
  if(typechecktest is False and not compresswholefile):
@@ -6311,10 +6818,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6311
6818
  if(fcompression == "none"):
6312
6819
  fcompression = ""
6313
6820
  fcontents.seek(0, 0)
6821
+ if(not contentasfile):
6822
+ fcontents = fcontents.read()
6314
6823
  ftypehex = format(ftype, 'x').lower()
6315
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6316
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6317
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6824
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6825
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
6826
+ return tmpoutlist
6827
+
6828
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6829
+ GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
6830
+ if(not hasattr(fp, "write")):
6831
+ return False
6832
+ numfiles = int(len(GetDirList))
6833
+ fnumfiles = format(numfiles, 'x').lower()
6834
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6835
+ try:
6836
+ fp.flush()
6837
+ if(hasattr(os, "sync")):
6838
+ os.fsync(fp.fileno())
6839
+ except (io.UnsupportedOperation, AttributeError, OSError):
6840
+ pass
6841
+ for curfname in GetDirList:
6842
+ tmpoutlist = curfname['fheaders']
6843
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6318
6844
  try:
6319
6845
  fp.flush()
6320
6846
  if(hasattr(os, "sync")):
@@ -6323,16 +6849,14 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6323
6849
  pass
6324
6850
  return fp
6325
6851
 
6326
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6327
- if(not hasattr(fp, "write")):
6328
- return False
6852
+ def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6329
6853
  curinode = 0
6330
6854
  curfid = 0
6331
6855
  inodelist = []
6332
6856
  inodetofile = {}
6333
6857
  filetoinode = {}
6334
6858
  inodetoforminode = {}
6335
- if(infile == "-"):
6859
+ if(not isinstance(infile, (list, tuple, )) and infile == "-"):
6336
6860
  infile = MkTempFile()
6337
6861
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6338
6862
  infile.seek(0, 0)
@@ -6367,10 +6891,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6367
6891
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6368
6892
  formatspecs = formatspecs[compresscheck]
6369
6893
  if(compresscheck=="zstd"):
6370
- if 'zstandard' in sys.modules:
6371
- infile = ZstdFile(fileobj=infile, mode="rb")
6372
- elif 'pyzstd' in sys.modules:
6373
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
6894
+ if 'zstd' in compressionsupport:
6895
+ infile = zstd.ZstdFile(infile, mode="rb")
6374
6896
  tarfp = tarfile.open(fileobj=infile, mode="r")
6375
6897
  else:
6376
6898
  tarfp = tarfile.open(fileobj=infile, mode="r")
@@ -6379,23 +6901,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6379
6901
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6380
6902
  formatspecs = formatspecs[compresscheck]
6381
6903
  if(compresscheck=="zstd"):
6382
- if 'zstandard' in sys.modules:
6383
- infile = ZstdFile(fileobj=infile, mode="rb")
6384
- elif 'pyzstd' in sys.modules:
6385
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
6904
+ if 'zstd' in compressionsupport:
6905
+ infile = zstd.ZstdFile(infile, mode="rb")
6386
6906
  tarfp = tarfile.open(fileobj=infile, mode="r")
6387
6907
  else:
6388
6908
  tarfp = tarfile.open(infile, "r")
6389
6909
  except FileNotFoundError:
6390
6910
  return False
6391
- numfiles = int(len(tarfp.getmembers()))
6392
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6393
- try:
6394
- fp.flush()
6395
- if(hasattr(os, "sync")):
6396
- os.fsync(fp.fileno())
6397
- except (io.UnsupportedOperation, AttributeError, OSError):
6398
- pass
6911
+ tmpoutlist = []
6399
6912
  for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
6400
6913
  fencoding = "UTF-8"
6401
6914
  if(re.findall("^[.|/]", member.name)):
@@ -6408,16 +6921,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6408
6921
  ffullmode = member.mode
6409
6922
  flinkcount = 0
6410
6923
  fblksize = 0
6411
- if(hasattr(fstatinfo, "st_blksize")):
6412
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6413
6924
  fblocks = 0
6414
- if(hasattr(fstatinfo, "st_blocks")):
6415
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6416
6925
  fflags = 0
6417
- if(hasattr(fstatinfo, "st_flags")):
6418
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
6419
6926
  ftype = 0
6420
- if(member.isreg()):
6927
+ if(member.isreg() or member.isfile()):
6421
6928
  ffullmode = member.mode + stat.S_IFREG
6422
6929
  ftype = 0
6423
6930
  elif(member.islnk()):
@@ -6493,7 +7000,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6493
7000
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6494
7001
  fcontents.seek(0, 0)
6495
7002
  if(typechecktest is not False):
6496
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
7003
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6497
7004
  fcontents.seek(0, 0)
6498
7005
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6499
7006
  if(typechecktest is False and not compresswholefile):
@@ -6537,29 +7044,45 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6537
7044
  if(fcompression == "none"):
6538
7045
  fcompression = ""
6539
7046
  fcontents.seek(0, 0)
7047
+ if(not contentasfile):
7048
+ fcontents = fcontents.read()
6540
7049
  ftypehex = format(ftype, 'x').lower()
6541
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6542
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6543
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7050
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7051
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7052
+ return tmpoutlist
7053
+
7054
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7055
+ if(not hasattr(fp, "write")):
7056
+ return False
7057
+ GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7058
+ numfiles = int(len(GetDirList))
7059
+ fnumfiles = format(numfiles, 'x').lower()
7060
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7061
+ try:
7062
+ fp.flush()
7063
+ if(hasattr(os, "sync")):
7064
+ os.fsync(fp.fileno())
7065
+ except (io.UnsupportedOperation, AttributeError, OSError):
7066
+ pass
7067
+ for curfname in GetDirList:
7068
+ tmpoutlist = curfname['fheaders']
7069
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6544
7070
  try:
6545
7071
  fp.flush()
6546
7072
  if(hasattr(os, "sync")):
6547
7073
  os.fsync(fp.fileno())
6548
7074
  except (io.UnsupportedOperation, AttributeError, OSError):
6549
7075
  pass
6550
- fcontents.close()
6551
7076
  return fp
6552
7077
 
6553
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6554
- if(not hasattr(fp, "write")):
6555
- return False
7078
+ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6556
7079
  curinode = 0
6557
7080
  curfid = 0
6558
7081
  inodelist = []
6559
7082
  inodetofile = {}
6560
7083
  filetoinode = {}
6561
7084
  inodetoforminode = {}
6562
- if(infile == "-"):
7085
+ if(not isinstance(infile, (list, tuple, )) and infile == "-"):
6563
7086
  infile = MkTempFile()
6564
7087
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6565
7088
  infile.seek(0, 0)
@@ -6585,14 +7108,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6585
7108
  ziptest = zipfp.testzip()
6586
7109
  if(ziptest):
6587
7110
  VerbosePrintOut("Bad file found!")
6588
- numfiles = int(len(zipfp.infolist()))
6589
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6590
- try:
6591
- fp.flush()
6592
- if(hasattr(os, "sync")):
6593
- os.fsync(fp.fileno())
6594
- except (io.UnsupportedOperation, AttributeError, OSError):
6595
- pass
7111
+ tmpoutlist = []
6596
7112
  for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
6597
7113
  fencoding = "UTF-8"
6598
7114
  if(re.findall("^[.|/]", member.filename)):
@@ -6608,20 +7124,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6608
7124
  fpremode = int(stat.S_IFREG | 0x1b6)
6609
7125
  flinkcount = 0
6610
7126
  fblksize = 0
6611
- if(hasattr(fstatinfo, "st_blksize")):
6612
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6613
7127
  fblocks = 0
6614
- if(hasattr(fstatinfo, "st_blocks")):
6615
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6616
7128
  fflags = 0
6617
- if(hasattr(fstatinfo, "st_flags")):
6618
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
6619
7129
  ftype = 0
6620
7130
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6621
7131
  ftype = 5
7132
+ elif ((hasattr(member, "symlink") and member.symlink())):
7133
+ ftype = 2
6622
7134
  else:
6623
7135
  ftype = 0
6624
7136
  flinkname = ""
7137
+ if(ftype==2):
7138
+ flinkname = zipfp.read(member.filename).decode("UTF-8")
6625
7139
  fcurfid = format(int(curfid), 'x').lower()
6626
7140
  fcurinode = format(int(curfid), 'x').lower()
6627
7141
  curfid = curfid + 1
@@ -6647,6 +7161,10 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6647
7161
  fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6648
7162
  fchmode = stat.S_IMODE(int(stat.S_IFDIR | 0x1ff))
6649
7163
  ftypemod = stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))
7164
+ elif ((hasattr(member, "symlink") and member.symlink()) or member.filename.endswith('/')):
7165
+ fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
7166
+ fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
7167
+ ftypemod = stat.S_IFMT(int(stat.S_IFREG | 0x1b6))
6650
7168
  else:
6651
7169
  fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
6652
7170
  fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
@@ -6655,6 +7173,17 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6655
7173
  fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
6656
7174
  fmode = format(int((zipinfo.external_attr >> 16) & 0xFFFF), 'x').lower()
6657
7175
  prefmode = int((zipinfo.external_attr >> 16) & 0xFFFF)
7176
+ if(prefmode==0):
7177
+ fmode = 0
7178
+ prefmode = 0
7179
+ else:
7180
+ file_type = prefmode & 0xF000
7181
+ if(file_type not in (stat.S_IFREG, stat.S_IFDIR, stat.S_IFLNK)):
7182
+ fmode = 0
7183
+ prefmode = 0
7184
+ if((mode & 0x1FF) == 0):
7185
+ fmode = 0
7186
+ prefmode = 0
6658
7187
  if (prefmode == 0):
6659
7188
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6660
7189
  fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
@@ -6755,26 +7284,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6755
7284
  if(fcompression == "none"):
6756
7285
  fcompression = ""
6757
7286
  fcontents.seek(0, 0)
7287
+ if(not contentasfile):
7288
+ fcontents = fcontents.read()
6758
7289
  ftypehex = format(ftype, 'x').lower()
6759
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6760
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6761
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7290
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7291
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7292
+ return tmpoutlist
7293
+
7294
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7295
+ if(not hasattr(fp, "write")):
7296
+ return False
7297
+ GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7298
+ numfiles = int(len(GetDirList))
7299
+ fnumfiles = format(numfiles, 'x').lower()
7300
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7301
+ try:
7302
+ fp.flush()
7303
+ if(hasattr(os, "sync")):
7304
+ os.fsync(fp.fileno())
7305
+ except (io.UnsupportedOperation, AttributeError, OSError):
7306
+ pass
7307
+ for curfname in GetDirList:
7308
+ tmpoutlist = curfname['fheaders']
7309
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6762
7310
  try:
6763
7311
  fp.flush()
6764
7312
  if(hasattr(os, "sync")):
6765
7313
  os.fsync(fp.fileno())
6766
7314
  except (io.UnsupportedOperation, AttributeError, OSError):
6767
7315
  pass
6768
- fcontents.close()
6769
7316
  return fp
6770
7317
 
6771
7318
  if(not rarfile_support):
6772
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7319
+ def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6773
7320
  return False
6774
- else:
6775
7321
  def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6776
- if(not hasattr(fp, "write")):
6777
- return False
7322
+ return False
7323
+ else:
7324
+ def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6778
7325
  curinode = 0
6779
7326
  curfid = 0
6780
7327
  inodelist = []
@@ -6789,20 +7336,7 @@ else:
6789
7336
  rartest = rarfp.testrar()
6790
7337
  if(rartest):
6791
7338
  VerbosePrintOut("Bad file found!")
6792
- numfiles = int(len(rarfp.infolist()))
6793
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6794
- try:
6795
- fp.flush()
6796
- if(hasattr(os, "sync")):
6797
- os.fsync(fp.fileno())
6798
- except (io.UnsupportedOperation, AttributeError, OSError):
6799
- pass
6800
- try:
6801
- fp.flush()
6802
- if(hasattr(os, "sync")):
6803
- os.fsync(fp.fileno())
6804
- except (io.UnsupportedOperation, AttributeError, OSError):
6805
- pass
7339
+ tmpoutlist = []
6806
7340
  for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
6807
7341
  is_unix = False
6808
7342
  is_windows = False
@@ -6847,14 +7381,8 @@ else:
6847
7381
  fcsize = format(int(0), 'x').lower()
6848
7382
  flinkcount = 0
6849
7383
  fblksize = 0
6850
- if(hasattr(fstatinfo, "st_blksize")):
6851
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6852
7384
  fblocks = 0
6853
- if(hasattr(fstatinfo, "st_blocks")):
6854
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6855
7385
  fflags = 0
6856
- if(hasattr(fstatinfo, "st_flags")):
6857
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
6858
7386
  ftype = 0
6859
7387
  if(member.is_file()):
6860
7388
  ftype = 0
@@ -6992,26 +7520,84 @@ else:
6992
7520
  if(fcompression == "none"):
6993
7521
  fcompression = ""
6994
7522
  fcontents.seek(0, 0)
7523
+ if(not contentasfile):
7524
+ fcontents = fcontents.read()
6995
7525
  ftypehex = format(ftype, 'x').lower()
6996
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6997
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6998
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7526
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7527
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7528
+ return tmpoutlist
7529
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7530
+ if(not hasattr(fp, "write")):
7531
+ return False
7532
+ GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7533
+ numfiles = int(len(GetDirList))
7534
+ fnumfiles = format(numfiles, 'x').lower()
7535
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7536
+ try:
7537
+ fp.flush()
7538
+ if(hasattr(os, "sync")):
7539
+ os.fsync(fp.fileno())
7540
+ except (io.UnsupportedOperation, AttributeError, OSError):
7541
+ pass
7542
+ for curfname in GetDirList:
7543
+ tmpoutlist = curfname['fheaders']
7544
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6999
7545
  try:
7000
7546
  fp.flush()
7001
7547
  if(hasattr(os, "sync")):
7002
7548
  os.fsync(fp.fileno())
7003
7549
  except (io.UnsupportedOperation, AttributeError, OSError):
7004
7550
  pass
7005
- fcontents.close()
7006
7551
  return fp
7007
7552
 
7008
7553
  if(not py7zr_support):
7009
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7554
+ def sevenzip_readall(infile, **kwargs):
7010
7555
  return False
7011
7556
  else:
7557
+ class _MemoryIO(py7zr.Py7zIO):
7558
+ """In-memory file object used by py7zr's factory API."""
7559
+ def __init__(self):
7560
+ self._buf = bytearray()
7561
+ def write(self, data):
7562
+ # py7zr will call this repeatedly with chunks
7563
+ self._buf.extend(data)
7564
+ def read(self, size=None):
7565
+ if size is None:
7566
+ return bytes(self._buf)
7567
+ return bytes(self._buf[:size])
7568
+ def seek(self, offset, whence=0):
7569
+ # we don't really need seeking for your use case
7570
+ return 0
7571
+ def flush(self):
7572
+ pass
7573
+ def size(self):
7574
+ return len(self._buf)
7575
+ class _MemoryFactory(py7zr.WriterFactory):
7576
+ """Factory that creates _MemoryIO objects and keeps them by filename."""
7577
+ def __init__(self):
7578
+ self.files = {}
7579
+ def create(self, filename: str) -> py7zr.Py7zIO:
7580
+ io_obj = _MemoryIO()
7581
+ self.files[filename] = io_obj
7582
+ return io_obj
7583
+ def sevenzip_readall(infile, **kwargs):
7584
+ """
7585
+ Replacement for SevenZipFile.readall() using the new py7zr API.
7586
+
7587
+ Returns: dict[filename -> _MemoryIO]
7588
+ """
7589
+ factory = _MemoryFactory()
7590
+ with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
7591
+ archive.extractall(factory=factory)
7592
+ return factory.files
7593
+
7594
+ if(not py7zr_support):
7595
+ def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7596
+ return False
7012
7597
  def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7013
- if(not hasattr(fp, "write")):
7014
- return False
7598
+ return False
7599
+ else:
7600
+ def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7015
7601
  formver = formatspecs['format_ver']
7016
7602
  fileheaderver = str(int(formver.replace(".", "")))
7017
7603
  curinode = 0
@@ -7023,19 +7609,15 @@ else:
7023
7609
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
7024
7610
  return False
7025
7611
  szpfp = py7zr.SevenZipFile(infile, mode="r")
7026
- file_content = szpfp.readall()
7612
+ try:
7613
+ file_content = szpfp.readall()
7614
+ except AttributeError:
7615
+ file_content = sevenzip_readall(infile)
7027
7616
  #sztest = szpfp.testzip()
7028
7617
  sztestalt = szpfp.test()
7029
7618
  if(sztestalt):
7030
7619
  VerbosePrintOut("Bad file found!")
7031
- numfiles = int(len(szpfp.list()))
7032
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7033
- try:
7034
- fp.flush()
7035
- if(hasattr(os, "sync")):
7036
- os.fsync(fp.fileno())
7037
- except (io.UnsupportedOperation, AttributeError, OSError):
7038
- pass
7620
+ tmpoutlist = []
7039
7621
  for member in sorted(szpfp.list(), key=lambda x: x.filename):
7040
7622
  fencoding = "UTF-8"
7041
7623
  if(re.findall("^[.|/]", member.filename)):
@@ -7048,19 +7630,16 @@ else:
7048
7630
  fpremode = int(stat.S_IFREG | 0x1b6)
7049
7631
  elif(member.is_directory):
7050
7632
  fpremode = int(stat.S_IFDIR | 0x1ff)
7051
- fwinattributes = format(int(0), 'x').lower()
7633
+ try:
7634
+ fwinattributes = format(int(member.attributes & 0xFFFF), 'x').lower()
7635
+ except AttributeError:
7636
+ fwinattributes = format(int(0), 'x').lower()
7052
7637
  fcompression = ""
7053
7638
  fcsize = format(int(0), 'x').lower()
7054
7639
  flinkcount = 0
7055
7640
  fblksize = 0
7056
- if(hasattr(fstatinfo, "st_blksize")):
7057
- fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
7058
7641
  fblocks = 0
7059
- if(hasattr(fstatinfo, "st_blocks")):
7060
- fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
7061
7642
  fflags = 0
7062
- if(hasattr(fstatinfo, "st_flags")):
7063
- fflags = format(int(fstatinfo.st_flags), 'x').lower()
7064
7643
  ftype = 0
7065
7644
  if(member.is_directory):
7066
7645
  ftype = 5
@@ -7090,6 +7669,13 @@ else:
7090
7669
  int(stat.S_IMODE(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7091
7670
  ftypemod = format(
7092
7671
  int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7672
+ try:
7673
+ ffullmode = member.posix_mode
7674
+ fmode = format(int(ffullmode), 'x').lower()
7675
+ fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
7676
+ ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
7677
+ except AttributeError:
7678
+ pass
7093
7679
  try:
7094
7680
  fuid = format(int(os.getuid()), 'x').lower()
7095
7681
  except (KeyError, AttributeError):
@@ -7127,7 +7713,10 @@ else:
7127
7713
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7128
7714
  fcontents.seek(0, 0)
7129
7715
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7130
- file_content[member.filename].close()
7716
+ try:
7717
+ file_content[member.filename].close()
7718
+ except AttributeError:
7719
+ pass
7131
7720
  if(typechecktest is False and not compresswholefile):
7132
7721
  fcontents.seek(0, 2)
7133
7722
  ucfsize = fcontents.tell()
@@ -7169,17 +7758,34 @@ else:
7169
7758
  if(fcompression == "none"):
7170
7759
  fcompression = ""
7171
7760
  fcontents.seek(0, 0)
7761
+ if(not contentasfile):
7762
+ fcontents = fcontents.read()
7172
7763
  ftypehex = format(ftype, 'x').lower()
7173
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7174
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
7175
- AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7764
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7765
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7766
+ return tmpoutlist
7767
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7768
+ if(not hasattr(fp, "write")):
7769
+ return False
7770
+ GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7771
+ numfiles = int(len(GetDirList))
7772
+ fnumfiles = format(numfiles, 'x').lower()
7773
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7774
+ try:
7775
+ fp.flush()
7776
+ if(hasattr(os, "sync")):
7777
+ os.fsync(fp.fileno())
7778
+ except (io.UnsupportedOperation, AttributeError, OSError):
7779
+ pass
7780
+ for curfname in GetDirList:
7781
+ tmpoutlist = curfname['fheaders']
7782
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
7176
7783
  try:
7177
7784
  fp.flush()
7178
7785
  if(hasattr(os, "sync")):
7179
7786
  os.fsync(fp.fileno())
7180
7787
  except (io.UnsupportedOperation, AttributeError, OSError):
7181
7788
  pass
7182
- fcontents.close()
7183
7789
  return fp
7184
7790
 
7185
7791
  def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
@@ -8449,10 +9055,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
8449
9055
  elif(compresscheck == "lz4" and compresscheck in compressionsupport):
8450
9056
  fp = lz4.frame.open(infile, "rb")
8451
9057
  elif(compresscheck == "zstd" and compresscheck in compressionsupport):
8452
- if 'zstandard' in sys.modules:
8453
- fp = ZstdFile(infile, mode="rb")
8454
- elif 'pyzstd' in sys.modules:
8455
- fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
9058
+ if 'zstd' in compressionsupport:
9059
+ fp = zstd.ZstdFile(infile, mode="rb")
8456
9060
  else:
8457
9061
  return Flase
8458
9062
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
@@ -8569,10 +9173,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
8569
9173
  elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
8570
9174
  wrapped = lzma.LZMAFile(src)
8571
9175
  elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
8572
- if 'zstandard' in sys.modules:
8573
- wrapped = ZstdFile(fileobj=src, mode="rb")
8574
- elif 'pyzstd' in sys.modules:
8575
- wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
9176
+ if 'zstd' in compressionsupport:
9177
+ wrapped = zstd.ZstdFile(src, mode="rb")
8576
9178
  else:
8577
9179
  return False
8578
9180
  elif kind == "lz4" and "lz4" in compressionsupport:
@@ -8640,10 +9242,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
8640
9242
  elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
8641
9243
  fp = bz2.open(infile, mode)
8642
9244
  elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
8643
- if 'zstandard' in sys.modules:
8644
- fp = ZstdFile(infile, mode=mode)
8645
- elif 'pyzstd' in sys.modules:
8646
- fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
9245
+ if 'zstd' in compressionsupport:
9246
+ fp = zstd.ZstdFile(infile, mode=mode)
8647
9247
  else:
8648
9248
  return False
8649
9249
  elif (compresscheck == "lz4" and "lz4" in compressionsupport):
@@ -9412,10 +10012,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
9412
10012
  outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
9413
10013
 
9414
10014
  elif (fextname == ".zst" and "zstandard" in compressionsupport):
9415
- if 'zstandard' in sys.modules:
9416
- outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9417
- elif 'pyzstd' in sys.modules:
9418
- outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
10015
+ if 'zstd' in compressionsupport:
10016
+ outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9419
10017
  else:
9420
10018
  return False # fix: 'Flase' -> False
9421
10019
 
@@ -9643,7 +10241,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
9643
10241
  if(not fp):
9644
10242
  return False
9645
10243
  fp.seek(filestart, 0)
9646
- elif(infile == "-"):
10244
+ elif(not isinstance(infile, (list, tuple, )) and infile == "-"):
9647
10245
  fp = MkTempFile()
9648
10246
  shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
9649
10247
  fp.seek(filestart, 0)
@@ -9732,6 +10330,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
9732
10330
  formatspecs = formatspecs[compresschecking]
9733
10331
  fp.seek(filestart, 0)
9734
10332
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
10333
+ headeroffset = fp.tell()
9735
10334
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
9736
10335
  formdelsize = len(formatspecs['format_delimiter'])
9737
10336
  formdel = fp.read(formdelsize).decode("UTF-8")
@@ -9739,7 +10338,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
9739
10338
  return False
9740
10339
  if(formdel != formatspecs['format_delimiter']):
9741
10340
  return False
9742
- headeroffset = fp.tell()
9743
10341
  if(__use_new_style__):
9744
10342
  inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9745
10343
  else:
@@ -11019,8 +11617,8 @@ def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
11019
11617
  return listarrayfiles
11020
11618
 
11021
11619
 
11022
- def TarFileListFiles(infile, verbose=False, returnfp=False):
11023
- if(infile == "-"):
11620
+ def TarFileListFiles(infile, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
11621
+ if(not isinstance(infile, (list, tuple, )) and infile == "-"):
11024
11622
  infile = MkTempFile()
11025
11623
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11026
11624
  infile.seek(0, 0)
@@ -11055,10 +11653,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11055
11653
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
11056
11654
  formatspecs = formatspecs[compresscheck]
11057
11655
  if(compresscheck=="zstd"):
11058
- if 'zstandard' in sys.modules:
11059
- infile = ZstdFile(fileobj=infile, mode="rb")
11060
- elif 'pyzstd' in sys.modules:
11061
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
11656
+ if 'zstd' in compressionsupport:
11657
+ infile = zstd.ZstdFile(infile, mode="rb")
11062
11658
  tarfp = tarfile.open(fileobj=infile, mode="r")
11063
11659
  else:
11064
11660
  tarfp = tarfile.open(fileobj=infile, mode="r")
@@ -11067,10 +11663,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11067
11663
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
11068
11664
  formatspecs = formatspecs[compresscheck]
11069
11665
  if(compresscheck=="zstd"):
11070
- if 'zstandard' in sys.modules:
11071
- infile = ZstdFile(fileobj=infile, mode="rb")
11072
- elif 'pyzstd' in sys.modules:
11073
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
11666
+ if 'zstd' in compressionsupport:
11667
+ infile = zstd.ZstdFile(infile, mode="rb")
11074
11668
  tarfp = tarfile.open(fileobj=infile, mode="r")
11075
11669
  else:
11076
11670
  tarfp = tarfile.open(infile, "r")
@@ -11084,7 +11678,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11084
11678
  ffullmode = member.mode
11085
11679
  flinkcount = 0
11086
11680
  ftype = 0
11087
- if(member.isreg()):
11681
+ if(member.isreg() or member.isfile()):
11088
11682
  ffullmode = member.mode + stat.S_IFREG
11089
11683
  ftype = 0
11090
11684
  elif(member.islnk()):
@@ -11139,8 +11733,12 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11139
11733
  return True
11140
11734
 
11141
11735
 
11736
+ def TarFileListFile(infile, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
11737
+ return TarFileListFiles(infile, formatspecs, verbose, returnfp)
11738
+
11739
+
11142
11740
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
11143
- if(infile == "-"):
11741
+ if(not isinstance(infile, (list, tuple, )) and infile == "-"):
11144
11742
  infile = MkTempFile()
11145
11743
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11146
11744
  infile.seek(0, 0)
@@ -11171,35 +11769,59 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11171
11769
  for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
11172
11770
  zipinfo = zipfp.getinfo(member.filename)
11173
11771
  if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
11174
- fwinattributes = int(zipinfo.external_attr)
11772
+ fwinattributes = int(zipinfo.external_attr & 0xFFFF)
11175
11773
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
11176
11774
  fmode = int(stat.S_IFDIR | 0x1ff)
11177
- fchmode = int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff)))
11178
- ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff)))
11775
+ fchmode = stat.S_IMODE(int(stat.S_IFDIR | 0x1ff))
11776
+ ftypemod = stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))
11777
+ elif ((hasattr(member, "symlink") and member.symlink()) or member.filename.endswith('/')):
11778
+ fmode = int(stat.S_IFREG | 0x1b6)
11779
+ fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
11780
+ ftypemod = stat.S_IFMT(int(stat.S_IFREG | 0x1b6))
11179
11781
  else:
11180
11782
  fmode = int(stat.S_IFREG | 0x1b6)
11181
- fchmode = int(stat.S_IMODE(fmode))
11182
- ftypemod = int(stat.S_IFMT(fmode))
11783
+ fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
11784
+ ftypemod = stat.S_IFMT(int(stat.S_IFREG | 0x1b6))
11183
11785
  elif(zipinfo.create_system == 3):
11184
- fwinattributes = int(0)
11185
- try:
11186
- fmode = int(zipinfo.external_attr)
11187
- fchmode = stat.S_IMODE(fmode)
11188
- ftypemod = stat.S_IFMT(fmode)
11189
- except OverflowError:
11190
- fmode = int(zipinfo.external_attr >> 16)
11191
- fchmode = stat.S_IMODE(fmode)
11192
- ftypemod = stat.S_IFMT(fmode)
11786
+ fwinattributes = int(zipinfo.external_attr & 0xFFFF)
11787
+ fmode = int((zipinfo.external_attr >> 16) & 0xFFFF)
11788
+ prefmode = int((zipinfo.external_attr >> 16) & 0xFFFF)
11789
+ if(prefmode==0):
11790
+ fmode = 0
11791
+ prefmode = 0
11792
+ else:
11793
+ file_type = prefmode & 0xF000
11794
+ if(file_type not in (stat.S_IFREG, stat.S_IFDIR, stat.S_IFLNK)):
11795
+ fmode = 0
11796
+ prefmode = 0
11797
+ if((mode & 0x1FF) == 0):
11798
+ fmode = 0
11799
+ prefmode = 0
11800
+ if (prefmode == 0):
11801
+ if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
11802
+ fmode = int(stat.S_IFDIR | 0x1ff)
11803
+ prefmode = int(stat.S_IFDIR | 0x1ff)
11804
+ fchmode = stat.S_IMODE(prefmode)
11805
+ ftypemod = stat.S_IFMT(prefmode)
11806
+ else:
11807
+ fmode = int(stat.S_IFREG | 0x1b6)
11808
+ prefmode = int(stat.S_IFREG | 0x1b6)
11809
+ fchmode = stat.S_IMODE(prefmode)
11810
+ ftypemod = stat.S_IFMT(prefmode)
11811
+ fchmode = stat.S_IMODE(prefmode)
11812
+ ftypemod = stat.S_IFMT(prefmode)
11193
11813
  else:
11194
- fwinattributes = int(0)
11814
+ fwinattributes = int(zipinfo.external_attr & 0xFFFF)
11195
11815
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
11196
11816
  fmode = int(stat.S_IFDIR | 0x1ff)
11197
- fchmode = int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff)))
11198
- ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff)))
11817
+ prefmode = int(stat.S_IFDIR | 0x1ff)
11818
+ fchmode = stat.S_IMODE(prefmode)
11819
+ ftypemod = stat.S_IFMT(prefmode)
11199
11820
  else:
11200
11821
  fmode = int(stat.S_IFREG | 0x1b6)
11201
- fchmode = int(stat.S_IMODE(fmode))
11202
- ftypemod = int(stat.S_IFMT(fmode))
11822
+ prefmode = int(stat.S_IFREG | 0x1b6)
11823
+ fchmode = stat.S_IMODE(prefmode)
11824
+ ftypemod = stat.S_IFMT(prefmode)
11203
11825
  returnval.update({lcfi: member.filename})
11204
11826
  if(not verbose):
11205
11827
  VerbosePrintOut(member.filename)
@@ -11213,10 +11835,17 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11213
11835
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
11214
11836
  ftype = 5
11215
11837
  permissionstr = "d" + permissionstr
11838
+ elif ((hasattr(member, "symlink") and member.symlink())):
11839
+ ftype = 2
11840
+ permissionstr = "l" + permissionstr
11216
11841
  else:
11217
11842
  ftype = 0
11218
11843
  permissionstr = "-" + permissionstr
11219
11844
  printfname = member.filename
11845
+ if(ftype==2):
11846
+ flinkname = zipfp.read(member.filename).decode("UTF-8")
11847
+ if(ftype==2):
11848
+ printfname = member.filename + " -> " + flinkname
11220
11849
  try:
11221
11850
  fuid = int(os.getuid())
11222
11851
  except (KeyError, AttributeError):
@@ -11259,6 +11888,10 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11259
11888
  return True
11260
11889
 
11261
11890
 
11891
+ def ZipFileListFile(infile, verbose=False, returnfp=False):
11892
+ return ZipFileListFiles(infile, verbose, returnfp)
11893
+
11894
+
11262
11895
  if(not rarfile_support):
11263
11896
  def RarFileListFiles(infile, verbose=False, returnfp=False):
11264
11897
  return False
@@ -11386,6 +12019,11 @@ if(rarfile_support):
11386
12019
  else:
11387
12020
  return True
11388
12021
 
12022
+
12023
+ def RarFileListFile(infile, verbose=False, returnfp=False):
12024
+ return RarFileListFiles(infile, verbose, returnfp)
12025
+
12026
+
11389
12027
  if(not py7zr_support):
11390
12028
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
11391
12029
  return False
@@ -11397,7 +12035,10 @@ if(py7zr_support):
11397
12035
  lcfi = 0
11398
12036
  returnval = {}
11399
12037
  szpfp = py7zr.SevenZipFile(infile, mode="r")
11400
- file_content = szpfp.readall()
12038
+ try:
12039
+ file_content = szpfp.readall()
12040
+ except AttributeError:
12041
+ file_content = sevenzip_readall(infile)
11401
12042
  #sztest = szpfp.testzip()
11402
12043
  sztestalt = szpfp.test()
11403
12044
  if(sztestalt):
@@ -11420,6 +12061,13 @@ if(py7zr_support):
11420
12061
  fmode = int(stat.S_IFLNK | 0x1b6)
11421
12062
  fchmode = int(stat.S_IMODE(int(stat.S_IFLNK | 0x1b6)))
11422
12063
  ftypemod = int(stat.S_IFMT(int(stat.S_IFLNK | 0x1b6)))
12064
+ try:
12065
+ ffullmode = member.posix_mode
12066
+ fmode = format(int(ffullmode), 'x').lower()
12067
+ fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
12068
+ ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
12069
+ except AttributeError:
12070
+ pass
11423
12071
  returnval.update({lcfi: member.filename})
11424
12072
  if(not verbose):
11425
12073
  VerbosePrintOut(member.filename)
@@ -11441,7 +12089,10 @@ if(py7zr_support):
11441
12089
  printfname = member.filename
11442
12090
  if(ftype == 0):
11443
12091
  fsize = len(file_content[member.filename].read())
11444
- file_content[member.filename].close()
12092
+ try:
12093
+ file_content[member.filename].close()
12094
+ except AttributeError:
12095
+ pass
11445
12096
  try:
11446
12097
  fuid = int(os.getuid())
11447
12098
  except (KeyError, AttributeError):
@@ -11484,12 +12135,16 @@ if(py7zr_support):
11484
12135
  return True
11485
12136
 
11486
12137
 
12138
+ def SevenZipFileListFile(infile, verbose=False, returnfp=False):
12139
+ return SevenZipFileListFiles(infile, verbose, returnfp)
12140
+
12141
+
11487
12142
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
11488
12143
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
11489
12144
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
11490
12145
  formatspecs = formatspecs[checkcompressfile]
11491
12146
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
11492
- return TarFileListFiles(infile, verbose, returnfp)
12147
+ return TarFileListFiles(infile, formatspecs, verbose, returnfp)
11493
12148
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
11494
12149
  return ZipFileListFiles(infile, verbose, returnfp)
11495
12150
  elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
@@ -11503,6 +12158,10 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
11503
12158
  return False
11504
12159
 
11505
12160
 
12161
+ def InFileListFile(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
12162
+ return InFileListFiles(infile, verbose, formatspecs, seektoend, newstyle, returnfp)
12163
+
12164
+
11506
12165
  def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
11507
12166
  outarray = MkTempFile()
11508
12167
  packform = PackCatFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
@@ -11511,6 +12170,11 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
11511
12170
  outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
11512
12171
  return listarrayfiles
11513
12172
 
12173
+
12174
+ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
12175
+ return ListDirListFiles(infiles, dirlistfromtxt, compression, compresswholefile, compressionlevel, followlink, seekstart, seekend, skipchecksum, checksumtype, formatspecs, seektoend, verbose, returnfp)
12176
+
12177
+
11514
12178
  def detect_cwd(ftp, file_dir):
11515
12179
  """
11516
12180
  Test whether cwd into file_dir works. Returns True if it does,
@@ -13576,7 +14240,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
13576
14240
  if not ah or not ah.strip().lower().startswith("basic "):
13577
14241
  return False
13578
14242
  try:
13579
- import base64
13580
14243
  b64 = ah.strip().split(" ", 1)[1]
13581
14244
  raw = base64.b64decode(_to_bytes(b64))
13582
14245
  try: raw_txt = raw.decode("utf-8")