PyFoxFile 0.25.2__py3-none-any.whl → 0.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyfoxfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyfoxfile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyfoxfile.py - Last Update: 11/14/2025 Ver. 0.27.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -24,7 +24,6 @@ import re
24
24
  import sys
25
25
  import time
26
26
  import stat
27
- import zlib
28
27
  import mmap
29
28
  import hmac
30
29
  import base64
@@ -38,8 +37,8 @@ import zipfile
38
37
  import binascii
39
38
  import datetime
40
39
  import platform
40
+ import collections
41
41
  from io import StringIO, BytesIO
42
- from collections import namedtuple
43
42
  import posixpath # POSIX-safe joins/normpaths
44
43
  try:
45
44
  from backports import tempfile
@@ -50,12 +49,16 @@ try:
50
49
  from http.server import BaseHTTPRequestHandler, HTTPServer
51
50
  from socketserver import TCPServer
52
51
  from urllib.parse import urlparse, parse_qs
53
- import base64
54
52
  except ImportError:
55
53
  from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
56
54
  from SocketServer import TCPServer
57
55
  from urlparse import urlparse, parse_qs
58
- import base64
56
+
57
+ try:
58
+ # Python 3.8+ only
59
+ from multiprocessing import shared_memory
60
+ except ImportError:
61
+ shared_memory = None
59
62
 
60
63
  # FTP Support
61
64
  ftpssl = True
@@ -146,6 +149,15 @@ try:
146
149
  except Exception:
147
150
  PATH_TYPES = (basestring,)
148
151
 
152
+ def running_interactively():
153
+ main = sys.modules.get("__main__")
154
+ no_main_file = not hasattr(main, "__file__")
155
+ interactive_flag = bool(getattr(sys.flags, "interactive", 0))
156
+ return no_main_file or interactive_flag
157
+
158
+ if running_interactively():
159
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
160
+
149
161
  def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
150
162
  """
151
163
  Normalize any input to text_type (unicode on Py2, str on Py3).
@@ -166,7 +178,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
166
178
 
167
179
  # Handle pathlib.Path & other path-like objects
168
180
  try:
169
- import os
170
181
  if hasattr(os, "fspath"):
171
182
  fs = os.fspath(s)
172
183
  if isinstance(fs, text_type):
@@ -207,7 +218,6 @@ except ImportError:
207
218
 
208
219
  # Windows-specific setup
209
220
  if os.name == "nt":
210
- import io
211
221
  def _wrap(stream):
212
222
  buf = getattr(stream, "buffer", None)
213
223
  is_tty = getattr(stream, "isatty", lambda: False)()
@@ -416,9 +426,13 @@ def is_only_nonprintable(var):
416
426
  __file_format_multi_dict__ = {}
417
427
  __file_format_default__ = "FoxFile"
418
428
  __include_defaults__ = True
419
- __use_inmemfile__ = True
429
+ __use_inmem__ = True
430
+ __use_memfd__ = True
420
431
  __use_spoolfile__ = False
421
432
  __use_spooldir__ = tempfile.gettempdir()
433
+ __use_new_style__ = True
434
+ __use_advanced_list__ = True
435
+ __use_alt_inode__ = False
422
436
  BYTES_PER_KiB = 1024
423
437
  BYTES_PER_MiB = 1024 * BYTES_PER_KiB
424
438
  # Spool: not tiny, but won’t blow up RAM if many are in use
@@ -440,7 +454,13 @@ if('PYFOXFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYFOXFIL
440
454
  else:
441
455
  prescriptpath = get_importing_script_path()
442
456
  if(prescriptpath is not None):
443
- scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
457
+ if(__use_ini_file__ and not __use_json_file__):
458
+ scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
459
+ elif(__use_json_file__ and not __use_ini_file__):
460
+ scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
461
+ else:
462
+ scriptconf = ""
463
+ prescriptpath = None
444
464
  else:
445
465
  scriptconf = ""
446
466
  if os.path.exists(scriptconf):
@@ -462,9 +482,13 @@ if __use_ini_file__ and os.path.exists(__config_file__):
462
482
  __file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
463
483
  __program_name__ = decode_unicode_escape(config.get('config', 'proname'))
464
484
  __include_defaults__ = config.getboolean('config', 'includedef')
465
- __use_inmemfile__ = config.getboolean('config', 'inmemfile')
485
+ __use_inmem__ = config.getboolean('config', 'useinmem')
486
+ __use_memfd__ = config.getboolean('config', 'usememfd')
466
487
  __use_spoolfile__ = config.getboolean('config', 'usespoolfile')
467
488
  __spoolfile_size__ = config.getint('config', 'spoolfilesize')
489
+ __use_new_style__ = config.getboolean('config', 'newstyle')
490
+ __use_advanced_list__ = config.getboolean('config', 'advancedlist')
491
+ __use_alt_inode__ = config.getboolean('config', 'altinode')
468
492
  # Loop through all sections
469
493
  for section in config.sections():
470
494
  if section == "config":
@@ -472,8 +496,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
472
496
 
473
497
  required_keys = [
474
498
  "len", "hex", "ver", "name",
475
- "magic", "delimiter", "extension",
476
- "newstyle", "advancedlist", "altinode"
499
+ "magic", "delimiter", "extension"
477
500
  ]
478
501
 
479
502
  # Py2+Py3 compatible key presence check
@@ -493,9 +516,6 @@ if __use_ini_file__ and os.path.exists(__config_file__):
493
516
  'format_hex': config.get(section, 'hex'),
494
517
  'format_delimiter': delim,
495
518
  'format_ver': config.get(section, 'ver'),
496
- 'new_style': config.getboolean(section, 'newstyle'),
497
- 'use_advanced_list': config.getboolean(section, 'advancedlist'),
498
- 'use_alt_inode': config.getboolean(section, 'altinode'),
499
519
  'format_extension': decode_unicode_escape(config.get(section, 'extension')),
500
520
  }
501
521
  })
@@ -556,16 +576,19 @@ elif __use_json_file__ and os.path.exists(__config_file__):
556
576
  cfg_config = cfg.get('config', {}) or {}
557
577
  __file_format_default__ = decode_unicode_escape(_get(cfg_config, 'default', ''))
558
578
  __program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
559
- __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', False))
560
- __use_inmemfile__ = _to_bool(_get(cfg_config, 'inmemfile', False))
579
+ __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', True))
580
+ __use_inmem__ = _to_bool(_get(cfg_config, 'useinmem', True))
581
+ __use_memfd__ = _to_bool(_get(cfg_config, 'usememfd', True))
561
582
  __use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
562
583
  __spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
584
+ __use_new_style__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
585
+ __use_advanced_list__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
586
+ __use_alt_inode__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
563
587
 
564
588
  # --- iterate format sections (everything except "config") ---
565
589
  required_keys = [
566
590
  "len", "hex", "ver", "name",
567
- "magic", "delimiter", "extension",
568
- "newstyle", "advancedlist", "altinode"
591
+ "magic", "delimiter", "extension"
569
592
  ]
570
593
 
571
594
  for section_name, section in cfg.items():
@@ -583,9 +606,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
583
606
  fmt_hex = decode_unicode_escape(_get(section, 'hex', ''))
584
607
  fmt_ver = decode_unicode_escape(_get(section, 'ver', ''))
585
608
  delim = decode_unicode_escape(_get(section, 'delimiter', ''))
586
- new_style = _to_bool(_get(section, 'newstyle', False))
587
- adv_list = _to_bool(_get(section, 'advancedlist', False))
588
- alt_inode = _to_bool(_get(section, 'altinode', False))
589
609
  extension = decode_unicode_escape(_get(section, 'extension', ''))
590
610
 
591
611
  # keep your delimiter validation semantics
@@ -600,9 +620,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
600
620
  'format_hex': fmt_hex,
601
621
  'format_delimiter': delim,
602
622
  'format_ver': fmt_ver,
603
- 'new_style': new_style,
604
- 'use_advanced_list': adv_list,
605
- 'use_alt_inode': alt_inode,
606
623
  'format_extension': extension,
607
624
  }
608
625
  })
@@ -641,21 +658,18 @@ __file_format_len__ = __file_format_multi_dict__[__file_format_default__]['forma
641
658
  __file_format_hex__ = __file_format_multi_dict__[__file_format_default__]['format_hex']
642
659
  __file_format_delimiter__ = __file_format_multi_dict__[__file_format_default__]['format_delimiter']
643
660
  __file_format_ver__ = __file_format_multi_dict__[__file_format_default__]['format_ver']
644
- __use_new_style__ = __file_format_multi_dict__[__file_format_default__]['new_style']
645
- __use_advanced_list__ = __file_format_multi_dict__[__file_format_default__]['use_advanced_list']
646
- __use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt_inode']
647
661
  __file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
648
662
  __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
649
663
  __project__ = __program_name__
650
664
  __program_alt_name__ = __program_name__
651
665
  __project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
652
666
  __project_release_url__ = __project_url__+"/releases/latest"
653
- __version_info__ = (0, 25, 2, "RC 1", 1)
654
- __version_date_info__ = (2025, 11, 6, "RC 1", 1)
667
+ __version_info__ = (0, 27, 0, "RC 1", 1)
668
+ __version_date_info__ = (2025, 11, 14, "RC 1", 1)
655
669
  __version_date__ = str(__version_date_info__[0]) + "." + str(
656
670
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
657
671
  __revision__ = __version_info__[3]
658
- __revision_id__ = "$Id: 30b50b3fe5848bbe7a8ffa021b798be5dd67425e $"
672
+ __revision_id__ = "$Id: 922ba385fbf9784e38aef32e660e695e81aee4bf $"
659
673
  if(__version_info__[4] is not None):
660
674
  __version_date_plusrc__ = __version_date__ + \
661
675
  "-" + str(__version_date_info__[4])
@@ -667,6 +681,9 @@ if(__version_info__[3] is not None):
667
681
  if(__version_info__[3] is None):
668
682
  __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
669
683
 
684
+ _logger = logging.getLogger(__project__) # library-style logger
685
+ _logger.addHandler(logging.NullHandler()) # don't emit logs unless app configures logging
686
+
670
687
  # From: https://stackoverflow.com/a/28568003
671
688
  # By Phaxmohdem
672
689
 
@@ -804,9 +821,9 @@ except Exception:
804
821
  geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
805
822
  proname=__project__, prover=__version__, prourl=__project_url__)
806
823
  if(platform.python_implementation() != ""):
807
- py_implementation = platform.python_implementation()
824
+ py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
808
825
  if(platform.python_implementation() == ""):
809
- py_implementation = "CPython"
826
+ py_implementation = "CPython"+str(platform.python_version_tuple()[0])
810
827
  geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
811
828
  )+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
812
829
  geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
@@ -822,13 +839,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
822
839
 
823
840
  compressionsupport = []
824
841
  try:
825
- import gzip
842
+ try:
843
+ import compression.gzip as gzip
844
+ except ImportError:
845
+ import gzip
826
846
  compressionsupport.append("gz")
827
847
  compressionsupport.append("gzip")
828
848
  except ImportError:
829
849
  pass
830
850
  try:
831
- import bz2
851
+ try:
852
+ import compression.bz2 as bz2
853
+ except ImportError:
854
+ import bz2
832
855
  compressionsupport.append("bz2")
833
856
  compressionsupport.append("bzip2")
834
857
  except ImportError:
@@ -849,20 +872,20 @@ except ImportError:
849
872
  pass
850
873
  '''
851
874
  try:
852
- import zstandard
875
+ try:
876
+ import compression.zstd as zstd
877
+ except ImportError:
878
+ import pyzstd.zstdfile as zstd
853
879
  compressionsupport.append("zst")
854
880
  compressionsupport.append("zstd")
855
881
  compressionsupport.append("zstandard")
856
882
  except ImportError:
883
+ pass
884
+ try:
857
885
  try:
858
- import pyzstd.zstdfile
859
- compressionsupport.append("zst")
860
- compressionsupport.append("zstd")
861
- compressionsupport.append("zstandard")
886
+ import compression.lzma as lzma
862
887
  except ImportError:
863
- pass
864
- try:
865
- import lzma
888
+ import lzma
866
889
  compressionsupport.append("lzma")
867
890
  compressionsupport.append("xz")
868
891
  except ImportError:
@@ -872,12 +895,18 @@ except ImportError:
872
895
  compressionsupport.append("xz")
873
896
  except ImportError:
874
897
  pass
875
- compressionsupport.append("zlib")
876
- compressionsupport.append("zl")
877
- compressionsupport.append("zz")
878
- compressionsupport.append("Z")
879
- compressionsupport.append("z")
880
-
898
+ try:
899
+ try:
900
+ import compression.zlib as zlib
901
+ except ImportError:
902
+ import zlib
903
+ compressionsupport.append("zlib")
904
+ compressionsupport.append("zl")
905
+ compressionsupport.append("zz")
906
+ compressionsupport.append("Z")
907
+ compressionsupport.append("z")
908
+ except ImportError:
909
+ pass
881
910
  compressionlist = ['auto']
882
911
  compressionlistalt = []
883
912
  outextlist = []
@@ -1036,6 +1065,28 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20, **
1036
1065
  VerbosePrintOut(dbgtxt, outtype, dbgenable, dgblevel, **kwargs)
1037
1066
  return dbgtxt
1038
1067
 
1068
+ def to_ns(timestamp):
1069
+ """
1070
+ Convert a second-resolution timestamp (int or float)
1071
+ into a nanosecond timestamp (int) by zero-padding.
1072
+ Works in Python 2 and Python 3.
1073
+ """
1074
+ try:
1075
+ # Convert incoming timestamp to float so it works for int or float
1076
+ seconds = float(timestamp)
1077
+ except (TypeError, ValueError):
1078
+ raise ValueError("Timestamp must be int or float")
1079
+
1080
+ # Multiply by 1e9 to get nanoseconds, then cast to int
1081
+ return int(seconds * 1000000000)
1082
+
1083
+ def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
1084
+ ts_ns = int(ts_ns)
1085
+ sec, ns = divmod(ts_ns, 10**9)
1086
+ dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
1087
+ base = dt.strftime(fmt)
1088
+ ns_str = "%09d" % ns
1089
+ return base + "." + ns_str
1039
1090
 
1040
1091
  def _split_posix(name):
1041
1092
  """
@@ -2059,34 +2110,53 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
2059
2110
 
2060
2111
 
2061
2112
  def MkTempFile(data=None,
2062
- inmem=__use_inmemfile__,
2113
+ inmem=__use_inmem__, usememfd=__use_memfd__,
2063
2114
  isbytes=True,
2064
- prefix="",
2115
+ prefix=__program_name__,
2065
2116
  delete=True,
2066
2117
  encoding="utf-8",
2067
- newline=None, # text mode only; in-memory objects ignore newline semantics
2118
+ newline=None,
2119
+ text_errors="strict",
2068
2120
  dir=None,
2069
2121
  suffix="",
2070
2122
  use_spool=__use_spoolfile__,
2123
+ autoswitch_spool=False,
2071
2124
  spool_max=__spoolfile_size__,
2072
- spool_dir=__use_spooldir__):
2125
+ spool_dir=__use_spooldir__,
2126
+ reset_to_start=True,
2127
+ memfd_name=__program_name__,
2128
+ memfd_allow_sealing=False,
2129
+ memfd_flags_extra=0,
2130
+ on_create=None):
2073
2131
  """
2074
2132
  Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
2075
2133
 
2076
2134
  Storage:
2077
- - inmem=True -> BytesIO (bytes) or StringIO (text)
2078
- - inmem=False, use_spool=True -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2079
- - inmem=False, use_spool=False -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2135
+ - inmem=True, usememfd=True, isbytes=True and memfd available
2136
+ -> memfd-backed anonymous file (binary)
2137
+ - inmem=True, otherwise
2138
+ -> BytesIO (bytes) or StringIO (text)
2139
+ - inmem=False, use_spool=True
2140
+ -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2141
+ - inmem=False, use_spool=False
2142
+ -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2080
2143
 
2081
2144
  Text vs bytes:
2082
2145
  - isbytes=True -> file expects bytes; 'data' must be bytes-like
2083
- - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and encoding
2084
- apply only for spooled/named files (not BytesIO/StringIO).
2146
+ - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
2147
+ encoding apply only for spooled/named files (not BytesIO/StringIO).
2085
2148
 
2086
2149
  Notes:
2087
- - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by other processes.
2088
- Use delete=False if you need to pass the path elsewhere.
2089
- - For text: in-memory StringIO ignores 'newline' (as usual).
2150
+ - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
2151
+ other processes. Use delete=False if you need to pass the path elsewhere.
2152
+ - For text: in-memory StringIO ignores 'newline' and 'text_errors' (as usual).
2153
+ - When available, and if usememfd=True, memfd is used only for inmem=True and isbytes=True,
2154
+ providing an anonymous in-memory file descriptor (Linux-only). Text in-memory still uses
2155
+ StringIO to preserve newline semantics.
2156
+ - If autoswitch_spool=True and initial data size exceeds spool_max, in-memory storage is
2157
+ skipped and a spooled file is used instead (if use_spool=True).
2158
+ - If on_create is not None, it is called as on_create(fp, kind) where kind is one of:
2159
+ "memfd", "bytesio", "stringio", "spool", "disk".
2090
2160
  """
2091
2161
 
2092
2162
  # -- sanitize simple params (avoid None surprises) --
@@ -2118,23 +2188,65 @@ def MkTempFile(data=None,
2118
2188
  else:
2119
2189
  init = None
2120
2190
 
2191
+ # Size of init for autoswitch; only meaningful for bytes
2192
+ init_len = len(init) if (init is not None and isbytes) else None
2193
+
2121
2194
  # -------- In-memory --------
2122
2195
  if inmem:
2123
- if isbytes:
2124
- f = io.BytesIO(init if init is not None else b"")
2125
- else:
2126
- # newline not enforced for StringIO; matches stdlib semantics
2127
- f = io.StringIO(init if init is not None else "")
2128
- # already positioned at 0 with provided init; ensure rewind for symmetry
2129
- f.seek(0)
2130
- return f
2196
+ # If autoswitch is enabled and data is larger than spool_max, and
2197
+ # spooling is allowed, skip the in-memory branch and fall through
2198
+ # to the spool/disk logic below.
2199
+ if autoswitch_spool and use_spool and init_len is not None and init_len > spool_max:
2200
+ pass # fall through to spool/disk sections
2201
+ else:
2202
+ # Use memfd only for bytes, and only where available (Linux, Python 3.8+)
2203
+ if usememfd and isbytes and hasattr(os, "memfd_create"):
2204
+ name = memfd_name or prefix or "MkTempFile"
2205
+ flags = 0
2206
+ # Close-on-exec is almost always what you want for temps
2207
+ if hasattr(os, "MFD_CLOEXEC"):
2208
+ flags |= os.MFD_CLOEXEC
2209
+ # Optional sealing support if requested and available
2210
+ if memfd_allow_sealing and hasattr(os, "MFD_ALLOW_SEALING"):
2211
+ flags |= os.MFD_ALLOW_SEALING
2212
+ # Extra custom flags (e.g. hugepage flags) if caller wants them
2213
+ if memfd_flags_extra:
2214
+ flags |= memfd_flags_extra
2215
+
2216
+ fd = os.memfd_create(name, flags)
2217
+ # Binary read/write file-like object backed by RAM
2218
+ f = os.fdopen(fd, "w+b")
2219
+
2220
+ if init is not None:
2221
+ f.write(init)
2222
+ if reset_to_start:
2223
+ f.seek(0)
2224
+
2225
+ if on_create is not None:
2226
+ on_create(f, "memfd")
2227
+ return f
2228
+
2229
+ # Fallback: pure Python in-memory objects
2230
+ if isbytes:
2231
+ f = io.BytesIO(init if init is not None else b"")
2232
+ kind = "bytesio"
2233
+ else:
2234
+ # newline/text_errors not enforced for StringIO; matches stdlib semantics
2235
+ f = io.StringIO(init if init is not None else "")
2236
+ kind = "stringio"
2237
+
2238
+ if reset_to_start:
2239
+ f.seek(0)
2240
+
2241
+ if on_create is not None:
2242
+ on_create(f, kind)
2243
+ return f
2131
2244
 
2132
2245
  # Helper: wrap a binary file into a text file with encoding/newline
2133
2246
  def _wrap_text(handle):
2134
2247
  # For both Py2 & Py3, TextIOWrapper gives consistent newline/encoding behavior
2135
- tw = io.TextIOWrapper(handle, encoding=encoding, newline=newline)
2136
- # Position at start; if we wrote initial data below, we will rewind after writing
2137
- return tw
2248
+ return io.TextIOWrapper(handle, encoding=encoding,
2249
+ newline=newline, errors=text_errors)
2138
2250
 
2139
2251
  # -------- Spooled (RAM then disk) --------
2140
2252
  if use_spool:
@@ -2142,19 +2254,33 @@ def MkTempFile(data=None,
2142
2254
  bin_mode = "w+b" # read/write, binary
2143
2255
  b = tempfile.SpooledTemporaryFile(max_size=spool_max, mode=bin_mode, dir=spool_dir)
2144
2256
  f = b if isbytes else _wrap_text(b)
2257
+
2145
2258
  if init is not None:
2146
2259
  f.write(init)
2260
+ if reset_to_start:
2261
+ f.seek(0)
2262
+ elif reset_to_start:
2147
2263
  f.seek(0)
2264
+
2265
+ if on_create is not None:
2266
+ on_create(f, "spool")
2148
2267
  return f
2149
2268
 
2150
2269
  # -------- On-disk temp (NamedTemporaryFile) --------
2151
2270
  # Always create binary file; wrap for text if needed for uniform Py2/3 behavior
2152
- b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix, dir=dir, delete=delete)
2271
+ b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
2272
+ dir=dir, delete=delete)
2153
2273
  f = b if isbytes else _wrap_text(b)
2154
2274
 
2155
2275
  if init is not None:
2156
2276
  f.write(init)
2277
+ if reset_to_start:
2278
+ f.seek(0)
2279
+ elif reset_to_start:
2157
2280
  f.seek(0)
2281
+
2282
+ if on_create is not None:
2283
+ on_create(f, "disk")
2158
2284
  return f
2159
2285
 
2160
2286
 
@@ -2490,6 +2616,384 @@ def _is_valid_zlib_header(cmf, flg):
2490
2616
  return False
2491
2617
  return True
2492
2618
 
2619
+ class SharedMemoryFile(object):
2620
+ """
2621
+ File-like wrapper around multiprocessing.shared_memory.SharedMemory.
2622
+
2623
+ Binary-only API, intended to behave similarly to a regular file opened in
2624
+ 'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
2625
+
2626
+ Notes:
2627
+ - Requires Python 3.8+ at runtime to actually use SharedMemory.
2628
+ - On Python 2, importing is fine but constructing will raise RuntimeError.
2629
+ - There is no automatic resizing; buffer size is fixed by SharedMemory.
2630
+ - No real fileno(); this does not represent an OS-level file descriptor.
2631
+ - For text mode, wrap this with io.TextIOWrapper on Python 3:
2632
+ f = SharedMemoryFile(...)
2633
+ tf = io.TextIOWrapper(f, encoding="utf-8")
2634
+ """
2635
+
2636
+ def __init__(self, shm=None, name=None, create=False, size=0,
2637
+ mode='r+b', offset=0, unlink_on_close=False):
2638
+ """
2639
+ Parameters:
2640
+ shm : existing SharedMemory object (preferred).
2641
+ name : name of shared memory block (for attach or create).
2642
+ create: if True, create new SharedMemory; else attach existing.
2643
+ size : size in bytes (required when create=True).
2644
+ mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
2645
+ offset: starting offset within the shared memory buffer.
2646
+ unlink_on_close: if True, call shm.unlink() when close() is called.
2647
+
2648
+ Usage examples:
2649
+
2650
+ # Create new block and file-like wrapper
2651
+ f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
2652
+
2653
+ # Attach to existing shared memory by name
2654
+ f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
2655
+
2656
+ # Wrap an existing SharedMemory object
2657
+ shm = shared_memory.SharedMemory(create=True, size=1024)
2658
+ f = SharedMemoryFile(shm=shm, mode='r+b')
2659
+ """
2660
+ if shared_memory is None:
2661
+ # No SharedMemory available on this interpreter
2662
+ raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
2663
+ "is not available on this Python version")
2664
+
2665
+ if 't' in mode:
2666
+ raise ValueError("SharedMemoryFile is binary-only; "
2667
+ "wrap it with io.TextIOWrapper for text")
2668
+
2669
+ self.mode = mode
2670
+ self._closed = False
2671
+ self._unlinked = False
2672
+ self._unlink_on_close = bool(unlink_on_close)
2673
+
2674
+ if shm is not None:
2675
+ self._shm = shm
2676
+ else:
2677
+ # name may be None when create=True
2678
+ self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
2679
+
2680
+ self._buf = self._shm.buf
2681
+ self._base_offset = int(offset)
2682
+ if self._base_offset < 0 or self._base_offset > len(self._buf):
2683
+ raise ValueError("offset out of range")
2684
+
2685
+ # We treat the accessible region as [base_offset, len(buf))
2686
+ self._size = len(self._buf) - self._base_offset
2687
+ self._pos = 0 # logical file position within that region
2688
+
2689
+ # ---------- basic properties ----------
2690
+
2691
+ @property
2692
+ def name(self):
2693
+ # SharedMemory name (may be None for anonymous)
2694
+ return getattr(self._shm, "name", None)
2695
+
2696
+ @property
2697
+ def closed(self):
2698
+ return self._closed
2699
+
2700
+ def readable(self):
2701
+ return ('r' in self.mode) or ('+' in self.mode)
2702
+
2703
+ def writable(self):
2704
+ return any(ch in self.mode for ch in ('w', 'a', '+'))
2705
+
2706
+ def seekable(self):
2707
+ return True
2708
+
2709
+ # ---------- core helpers ----------
2710
+
2711
+ def _check_closed(self):
2712
+ if self._closed:
2713
+ raise ValueError("I/O operation on closed SharedMemoryFile")
2714
+
2715
+ def _clamp_pos(self, pos):
2716
+ if pos < 0:
2717
+ return 0
2718
+ if pos > self._size:
2719
+ return self._size
2720
+ return pos
2721
+
2722
+ def _region_bounds(self):
2723
+ """Return (start, end) absolute indices into the SharedMemory buffer."""
2724
+ start = self._base_offset + self._pos
2725
+ end = self._base_offset + self._size
2726
+ return start, end
2727
+
2728
+ # ---------- positioning ----------
2729
+
2730
+ def seek(self, offset, whence=0):
2731
+ """
2732
+ Seek to a new file position.
2733
+
2734
+ whence: 0 = from start, 1 = from current, 2 = from end.
2735
+ """
2736
+ self._check_closed()
2737
+ offset = int(offset)
2738
+ whence = int(whence)
2739
+
2740
+ if whence == 0: # from start
2741
+ new_pos = offset
2742
+ elif whence == 1: # from current
2743
+ new_pos = self._pos + offset
2744
+ elif whence == 2: # from end
2745
+ new_pos = self._size + offset
2746
+ else:
2747
+ raise ValueError("invalid whence (expected 0, 1, or 2)")
2748
+
2749
+ self._pos = self._clamp_pos(new_pos)
2750
+ return self._pos
2751
+
2752
+ def tell(self):
2753
+ return self._pos
2754
+
2755
+ # ---------- reading ----------
2756
+
2757
+ def read(self, size=-1):
2758
+ """
2759
+ Read up to 'size' bytes (or to EOF if size<0 or None).
2760
+ Returns bytes (py3) or str (py2).
2761
+ """
2762
+ self._check_closed()
2763
+ if not self.readable():
2764
+ raise IOError("SharedMemoryFile not opened for reading")
2765
+
2766
+ if size is None or size < 0:
2767
+ size = self._size - self._pos
2768
+ else:
2769
+ size = int(size)
2770
+ if size < 0:
2771
+ size = 0
2772
+
2773
+ if size == 0:
2774
+ return b'' if not PY2 else ''
2775
+
2776
+ start, end_abs = self._region_bounds()
2777
+ available = end_abs - (self._base_offset + self._pos)
2778
+ if available <= 0:
2779
+ return b'' if not PY2 else ''
2780
+
2781
+ size = min(size, available)
2782
+
2783
+ abs_start = self._base_offset + self._pos
2784
+ abs_end = abs_start + size
2785
+
2786
+ chunk = self._buf[abs_start:abs_end]
2787
+ if PY2:
2788
+ data = bytes(chunk) # bytes() -> str in py2
2789
+ else:
2790
+ data = bytes(chunk)
2791
+
2792
+ self._pos += len(data)
2793
+ return data
2794
+
2795
+ def readline(self, size=-1):
2796
+ """
2797
+ Read a single line (ending with '\\n' or EOF).
2798
+ If size >= 0, at most that many bytes are returned.
2799
+ """
2800
+ self._check_closed()
2801
+ if not self.readable():
2802
+ raise IOError("SharedMemoryFile not opened for reading")
2803
+
2804
+ # Determine maximum bytes we can scan
2805
+ start, end_abs = self._region_bounds()
2806
+ remaining = end_abs - (self._base_offset + self._pos)
2807
+ if remaining <= 0:
2808
+ return b'' if not PY2 else ''
2809
+
2810
+ if size is not None and size >= 0:
2811
+ size = int(size)
2812
+ max_len = min(size, remaining)
2813
+ else:
2814
+ max_len = remaining
2815
+
2816
+ abs_start = self._base_offset + self._pos
2817
+ abs_max = abs_start + max_len
2818
+
2819
+ # Work on a local bytes slice for easy .find()
2820
+ if PY2:
2821
+ buf_bytes = bytes(self._buf[abs_start:abs_max])
2822
+ else:
2823
+ buf_bytes = bytes(self._buf[abs_start:abs_max])
2824
+
2825
+ idx = buf_bytes.find(b'\n')
2826
+ if idx == -1:
2827
+ # No newline; read entire chunk
2828
+ line_bytes = buf_bytes
2829
+ else:
2830
+ line_bytes = buf_bytes[:idx + 1]
2831
+
2832
+ self._pos += len(line_bytes)
2833
+
2834
+ if PY2:
2835
+ return line_bytes # already str
2836
+ return line_bytes
2837
+
2838
+ def readinto(self, b):
2839
+ """
2840
+ Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
2841
+ Returns number of bytes read.
2842
+ """
2843
+ self._check_closed()
2844
+ if not self.readable():
2845
+ raise IOError("SharedMemoryFile not opened for reading")
2846
+
2847
+ # Normalize target buffer
2848
+ if isinstance(b, memoryview):
2849
+ mv = b
2850
+ else:
2851
+ mv = memoryview(b)
2852
+
2853
+ size = len(mv)
2854
+ if size <= 0:
2855
+ return 0
2856
+
2857
+ start, end_abs = self._region_bounds()
2858
+ remaining = end_abs - (self._base_offset + self._pos)
2859
+ if remaining <= 0:
2860
+ return 0
2861
+
2862
+ size = min(size, remaining)
2863
+
2864
+ abs_start = self._base_offset + self._pos
2865
+ abs_end = abs_start + size
2866
+
2867
+ mv[:size] = self._buf[abs_start:abs_end]
2868
+ self._pos += size
2869
+ return size
2870
+
2871
+ # ---------- writing ----------
2872
+
2873
+ def write(self, data):
2874
+ """
2875
+ Write bytes-like object to the shared memory region.
2876
+
2877
+ Returns number of bytes written. Will raise if not opened writable
2878
+ or if writing would overflow the fixed-size region.
2879
+ """
2880
+ self._check_closed()
2881
+ if not self.writable():
2882
+ raise IOError("SharedMemoryFile not opened for writing")
2883
+
2884
+ if isinstance(data, memoryview):
2885
+ data = bytes(data)
2886
+ elif isinstance(data, bytearray):
2887
+ data = bytes(data)
2888
+
2889
+ if not isinstance(data, binary_types):
2890
+ raise TypeError("write() expects a bytes-like object")
2891
+
2892
+ data_len = len(data)
2893
+ if data_len == 0:
2894
+ return 0
2895
+
2896
+ # Handle "append" semantics roughly: start from end on first write
2897
+ if 'a' in self.mode and self._pos == 0:
2898
+ # Move to logical end of region
2899
+ self._pos = self._size
2900
+
2901
+ start, end_abs = self._region_bounds()
2902
+ remaining = end_abs - (self._base_offset + self._pos)
2903
+ if data_len > remaining:
2904
+ raise IOError("write would overflow SharedMemory region (need %d, have %d)"
2905
+ % (data_len, remaining))
2906
+
2907
+ abs_start = self._base_offset + self._pos
2908
+ abs_end = abs_start + data_len
2909
+
2910
+ self._buf[abs_start:abs_end] = data
2911
+ self._pos += data_len
2912
+ return data_len
2913
+
2914
+ def flush(self):
2915
+ """
2916
+ No-op for shared memory; provided for file-like compatibility.
2917
+ """
2918
+ self._check_closed()
2919
+ # nothing to flush
2920
+
2921
+ # ---------- unlink / close / context manager ----------
2922
+
2923
+ def unlink(self):
2924
+ """
2925
+ Unlink (destroy) the underlying shared memory block.
2926
+
2927
+ After unlink(), new processes cannot attach via name.
2928
+ Existing attachments (including this one) can continue to use
2929
+ the memory until they close() it.
2930
+
2931
+ This is idempotent: calling it more than once is safe.
2932
+ """
2933
+ if self._unlinked:
2934
+ return
2935
+
2936
+ try:
2937
+ self._shm.unlink()
2938
+ except AttributeError:
2939
+ # Should not happen on normal Python 3.8+,
2940
+ # but keep a clear error if it does.
2941
+ raise RuntimeError("Underlying SharedMemory object "
2942
+ "does not support unlink()")
2943
+
2944
+ self._unlinked = True
2945
+
2946
+ def close(self):
2947
+ if self._closed:
2948
+ return
2949
+ self._closed = True
2950
+
2951
+ # Optionally unlink on close if requested
2952
+ if self._unlink_on_close and not self._unlinked:
2953
+ try:
2954
+ self.unlink()
2955
+ except Exception:
2956
+ # best-effort; close anyway
2957
+ pass
2958
+
2959
+ try:
2960
+ self._shm.close()
2961
+ except Exception:
2962
+ pass
2963
+
2964
+ def __enter__(self):
2965
+ self._check_closed()
2966
+ return self
2967
+
2968
+ def __exit__(self, exc_type, exc_val, exc_tb):
2969
+ self.close()
2970
+
2971
+ # ---------- iteration ----------
2972
+
2973
+ def __iter__(self):
2974
+ return self
2975
+
2976
+ def __next__(self):
2977
+ line = self.readline()
2978
+ if (not line) or len(line) == 0:
2979
+ raise StopIteration
2980
+ return line
2981
+
2982
+ if PY2:
2983
+ next = __next__
2984
+
2985
+ # ---------- misc helpers ----------
2986
+
2987
+ def fileno(self):
2988
+ """
2989
+ There is no real OS-level file descriptor; raise OSError for APIs
2990
+ that require a fileno().
2991
+ """
2992
+ raise OSError("SharedMemoryFile does not have a real fileno()")
2993
+
2994
+ def isatty(self):
2995
+ return False
2996
+
2493
2997
  # ---------- Main class ----------
2494
2998
  class ZlibFile(object):
2495
2999
  """
@@ -3674,7 +4178,7 @@ def _bytes_to_int(b):
3674
4178
  # =========================
3675
4179
  # Public checksum API
3676
4180
  # =========================
3677
- def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
4181
+ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3678
4182
  """
3679
4183
  Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
3680
4184
  or a single field) and compute the requested checksum. Returns lowercase hex.
@@ -3686,15 +4190,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatsp
3686
4190
  if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
3687
4191
  hdr_bytes = _to_bytes(hdr_bytes)
3688
4192
  hdr_bytes = bytes(hdr_bytes)
3689
-
4193
+ saltkeyval = None
4194
+ if(hasattr(saltkey, "read")):
4195
+ saltkeyval = skfp.read()
4196
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
4197
+ saltkeyval = saltkeyval.encode("UTF-8")
4198
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
4199
+ saltkeyval = saltkey
4200
+ elif(saltkey is not None and os.path.exists(saltkey)):
4201
+ with open(saltkey, "rb") as skfp:
4202
+ saltkeyval = skfp.read()
4203
+ else:
4204
+ saltkey = None
4205
+ if(saltkeyval is None):
4206
+ saltkey = None
3690
4207
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3691
- h = hashlib.new(algo_key)
3692
- h.update(hdr_bytes)
3693
- return h.hexdigest().lower()
4208
+ if(saltkey is None or saltkeyval is None):
4209
+ h = hashlib.new(algo_key, hdr_bytes)
4210
+ else:
4211
+ h = hmac.new(saltkeyval, hdr_bytes, digestmod=algo_key)
4212
+ return h.hexdigest().lower()
3694
4213
 
3695
4214
  return "0"
3696
4215
 
3697
- def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
4216
+ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3698
4217
  """
3699
4218
  Accepts bytes/str/file-like.
3700
4219
  - Hashlib algos: streamed in 1 MiB chunks.
@@ -3702,13 +4221,29 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3702
4221
  - Falls back to one-shot for non-file-like inputs.
3703
4222
  """
3704
4223
  algo_key = (checksumtype or "md5").lower()
3705
-
4224
+ saltkeyval = None
4225
+ if(hasattr(saltkey, "read")):
4226
+ saltkeyval = skfp.read()
4227
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
4228
+ saltkeyval = saltkeyval.encode("UTF-8")
4229
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
4230
+ saltkeyval = saltkey
4231
+ elif(saltkey is not None and os.path.exists(saltkey)):
4232
+ with open(saltkey, "rb") as skfp:
4233
+ saltkeyval = skfp.read()
4234
+ else:
4235
+ saltkey = None
4236
+ if(saltkeyval is None):
4237
+ saltkey = None
3706
4238
  # file-like streaming
3707
4239
  if hasattr(inbytes, "read"):
3708
4240
  # hashlib
3709
4241
 
3710
4242
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3711
- h = hashlib.new(algo_key)
4243
+ if(saltkey is None or saltkeyval is None):
4244
+ h = hashlib.new(algo_key)
4245
+ else:
4246
+ h = hmac.new(saltkeyval, digestmod=algo_key)
3712
4247
  while True:
3713
4248
  chunk = inbytes.read(__filebuff_size__)
3714
4249
  if not chunk:
@@ -3729,26 +4264,41 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3729
4264
  # one-shot
3730
4265
 
3731
4266
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3732
- h = hashlib.new(algo_key)
3733
- h.update(data)
4267
+ if(saltkey is None or saltkeyval is None):
4268
+ h = hashlib.new(algo_key, data)
4269
+ else:
4270
+ h = hmac.new(saltkeyval, data, digestmod=algo_key)
3734
4271
  return h.hexdigest().lower()
3735
4272
 
3736
4273
  return "0"
3737
4274
 
3738
- def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3739
- calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
4275
+ def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
4276
+ calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs, saltkey)
3740
4277
  want = (inchecksum or "0").strip().lower()
3741
4278
  if want.startswith("0x"):
3742
4279
  want = want[2:]
3743
- return hmac.compare_digest(want, calc)
4280
+ return CheckChecksums(want, calc)
3744
4281
 
3745
- def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3746
- calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
4282
+ def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
4283
+ calc = GetFileChecksum(infile, checksumtype, True, formatspecs, saltkey)
3747
4284
  want = (inchecksum or "0").strip().lower()
3748
4285
  if want.startswith("0x"):
3749
4286
  want = want[2:]
3750
- return hmac.compare_digest(want, calc)
4287
+ return CheckChecksums(want, calc)
4288
+
4289
+ def CheckChecksums(inchecksum, outchecksum):
4290
+ # Normalize as text first
4291
+ calc = (inchecksum or "0").strip().lower()
4292
+ want = (outchecksum or "0").strip().lower()
3751
4293
 
4294
+ if want.startswith("0x"):
4295
+ want = want[2:]
4296
+
4297
+ # Now force both to bytes
4298
+ calc_b = _to_bytes(calc) # defaults to utf-8, strict
4299
+ want_b = _to_bytes(want)
4300
+
4301
+ return hmac.compare_digest(want_b, calc_b)
3752
4302
 
3753
4303
  def MajorMinorToDev(major, minor):
3754
4304
  """
@@ -4117,11 +4667,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
4117
4667
  return first_two + headerdata
4118
4668
 
4119
4669
 
4120
- def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4670
+ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4121
4671
  if(not hasattr(fp, "read")):
4122
4672
  return False
4123
4673
  delimiter = formatspecs['format_delimiter']
4124
- if(formatspecs['new_style']):
4674
+ if(__use_new_style__):
4125
4675
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4126
4676
  else:
4127
4677
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4209,15 +4759,14 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4209
4759
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4210
4760
  pass
4211
4761
  fp.seek(len(delimiter), 1)
4212
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4213
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4762
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4763
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4214
4764
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4215
4765
  fname + " at offset " + str(fheaderstart))
4216
4766
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4217
4767
  return False
4218
4768
  fp.seek(len(delimiter), 1)
4219
- newfcs = GetHeaderChecksum(
4220
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4769
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4221
4770
  HeaderOut.append(fjsoncontent)
4222
4771
  if(fcs != newfcs and not skipchecksum):
4223
4772
  VerbosePrintOut("File Header Checksum Error with file " +
@@ -4236,10 +4785,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4236
4785
  else:
4237
4786
  fp.seek(fcsize, 1)
4238
4787
  fcontents.seek(0, 0)
4239
- newfccs = GetFileChecksum(
4240
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4788
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4241
4789
  fcontents.seek(0, 0)
4242
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4790
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4243
4791
  VerbosePrintOut("File Content Checksum Error with file " +
4244
4792
  fname + " at offset " + str(fcontentstart))
4245
4793
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4276,12 +4824,12 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4276
4824
  return HeaderOut
4277
4825
 
4278
4826
 
4279
- def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4827
+ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4280
4828
  if(not hasattr(fp, "read")):
4281
4829
  return False
4282
4830
  delimiter = formatspecs['format_delimiter']
4283
4831
  fheaderstart = fp.tell()
4284
- if(formatspecs['new_style']):
4832
+ if(__use_new_style__):
4285
4833
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4286
4834
  else:
4287
4835
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4299,40 +4847,51 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4299
4847
  fbasedir = os.path.dirname(fname)
4300
4848
  flinkname = HeaderOut[6]
4301
4849
  fsize = int(HeaderOut[7], 16)
4302
- fatime = int(HeaderOut[8], 16)
4303
- fmtime = int(HeaderOut[9], 16)
4304
- fctime = int(HeaderOut[10], 16)
4305
- fbtime = int(HeaderOut[11], 16)
4306
- fmode = int(HeaderOut[12], 16)
4850
+ fblksize = int(HeaderOut[8], 16)
4851
+ fblocks = int(HeaderOut[9], 16)
4852
+ fflags = int(HeaderOut[10], 16)
4853
+ fatime = int(HeaderOut[11], 16)
4854
+ fmtime = int(HeaderOut[12], 16)
4855
+ fctime = int(HeaderOut[13], 16)
4856
+ fbtime = int(HeaderOut[14], 16)
4857
+ fmode = int(HeaderOut[15], 16)
4307
4858
  fchmode = stat.S_IMODE(fmode)
4308
4859
  ftypemod = stat.S_IFMT(fmode)
4309
- fwinattributes = int(HeaderOut[13], 16)
4310
- fcompression = HeaderOut[14]
4311
- fcsize = int(HeaderOut[15], 16)
4312
- fuid = int(HeaderOut[16], 16)
4313
- funame = HeaderOut[17]
4314
- fgid = int(HeaderOut[18], 16)
4315
- fgname = HeaderOut[19]
4316
- fid = int(HeaderOut[20], 16)
4317
- finode = int(HeaderOut[21], 16)
4318
- flinkcount = int(HeaderOut[22], 16)
4319
- fdev = int(HeaderOut[23], 16)
4320
- fdev_minor = int(HeaderOut[24], 16)
4321
- fdev_major = int(HeaderOut[25], 16)
4322
- fseeknextfile = HeaderOut[26]
4323
- fjsontype = HeaderOut[27]
4324
- fjsonlen = int(HeaderOut[28], 16)
4325
- fjsonsize = int(HeaderOut[29], 16)
4326
- fjsonchecksumtype = HeaderOut[30]
4327
- fjsonchecksum = HeaderOut[31]
4328
- fextrasize = int(HeaderOut[32], 16)
4329
- fextrafields = int(HeaderOut[33], 16)
4860
+ fwinattributes = int(HeaderOut[16], 16)
4861
+ fcompression = HeaderOut[17]
4862
+ fcsize = int(HeaderOut[18], 16)
4863
+ fuid = int(HeaderOut[19], 16)
4864
+ funame = HeaderOut[20]
4865
+ fgid = int(HeaderOut[21], 16)
4866
+ fgname = HeaderOut[22]
4867
+ fid = int(HeaderOut[23], 16)
4868
+ finode = int(HeaderOut[24], 16)
4869
+ flinkcount = int(HeaderOut[25], 16)
4870
+ fdev = int(HeaderOut[26], 16)
4871
+ frdev = int(HeaderOut[27], 16)
4872
+ fseeknextfile = HeaderOut[28]
4873
+ fjsontype = HeaderOut[29]
4874
+ fjsonlen = int(HeaderOut[30], 16)
4875
+ fjsonsize = int(HeaderOut[31], 16)
4876
+ fjsonchecksumtype = HeaderOut[32]
4877
+ fjsonchecksum = HeaderOut[33]
4878
+ fextrasize = int(HeaderOut[34], 16)
4879
+ fextrafields = int(HeaderOut[35], 16)
4330
4880
  fextrafieldslist = []
4331
- extrastart = 34
4881
+ extrastart = 36
4332
4882
  extraend = extrastart + fextrafields
4333
4883
  while(extrastart < extraend):
4334
4884
  fextrafieldslist.append(HeaderOut[extrastart])
4335
4885
  extrastart = extrastart + 1
4886
+ fvendorfieldslist = []
4887
+ fvendorfields = 0;
4888
+ if((len(HeaderOut) - 4)>extraend):
4889
+ extrastart = extraend
4890
+ extraend = len(HeaderOut) - 4
4891
+ while(extrastart < extraend):
4892
+ fvendorfieldslist.append(HeaderOut[extrastart])
4893
+ extrastart = extrastart + 1
4894
+ fvendorfields = fvendorfields + 1
4336
4895
  if(fextrafields==1):
4337
4896
  try:
4338
4897
  fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
@@ -4410,16 +4969,15 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4410
4969
  pass
4411
4970
  fp.seek(len(delimiter), 1)
4412
4971
  fjend = fp.tell() - 1
4413
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4414
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4972
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4973
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4415
4974
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4416
4975
  fname + " at offset " + str(fheaderstart))
4417
4976
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4418
4977
  return False
4419
4978
  fcs = HeaderOut[-2].lower()
4420
4979
  fccs = HeaderOut[-1].lower()
4421
- newfcs = GetHeaderChecksum(
4422
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4980
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4423
4981
  if(fcs != newfcs and not skipchecksum):
4424
4982
  VerbosePrintOut("File Header Checksum Error with file " +
4425
4983
  fname + " at offset " + str(fheaderstart))
@@ -4442,10 +5000,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4442
5000
  fp.seek(fcsize, 1)
4443
5001
  pyhascontents = False
4444
5002
  fcontents.seek(0, 0)
4445
- newfccs = GetFileChecksum(
4446
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
5003
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4447
5004
  fcontents.seek(0, 0)
4448
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
5005
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4449
5006
  VerbosePrintOut("File Content Checksum Error with file " +
4450
5007
  fname + " at offset " + str(fcontentstart))
4451
5008
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4462,8 +5019,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4462
5019
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4463
5020
  cfcontents.close()
4464
5021
  fcontents.seek(0, 0)
4465
- fccs = GetFileChecksum(
4466
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
5022
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4467
5023
  fcontentend = fp.tell()
4468
5024
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4469
5025
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4485,17 +5041,17 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4485
5041
  fcontents.seek(0, 0)
4486
5042
  if(not contentasfile):
4487
5043
  fcontents = fcontents.read()
4488
- outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
4489
- 'fdev': fdev, 'fminor': fdev_minor, 'fmajor': fdev_major, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
5044
+ outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fblksize': fblksize, 'fblocks': fblocks, 'fflags': fflags, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
5045
+ 'fdev': fdev, 'frdev': frdev, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
4490
5046
  return outlist
4491
5047
 
4492
5048
 
4493
- def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
5049
+ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4494
5050
  if(not hasattr(fp, "read")):
4495
5051
  return False
4496
5052
  delimiter = formatspecs['format_delimiter']
4497
5053
  fheaderstart = fp.tell()
4498
- if(formatspecs['new_style']):
5054
+ if(__use_new_style__):
4499
5055
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4500
5056
  else:
4501
5057
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4513,40 +5069,51 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4513
5069
  fbasedir = os.path.dirname(fname)
4514
5070
  flinkname = HeaderOut[6]
4515
5071
  fsize = int(HeaderOut[7], 16)
4516
- fatime = int(HeaderOut[8], 16)
4517
- fmtime = int(HeaderOut[9], 16)
4518
- fctime = int(HeaderOut[10], 16)
4519
- fbtime = int(HeaderOut[11], 16)
4520
- fmode = int(HeaderOut[12], 16)
5072
+ fblksize = int(HeaderOut[8], 16)
5073
+ fblocks = int(HeaderOut[9], 16)
5074
+ fflags = int(HeaderOut[10], 16)
5075
+ fatime = int(HeaderOut[11], 16)
5076
+ fmtime = int(HeaderOut[12], 16)
5077
+ fctime = int(HeaderOut[13], 16)
5078
+ fbtime = int(HeaderOut[14], 16)
5079
+ fmode = int(HeaderOut[15], 16)
4521
5080
  fchmode = stat.S_IMODE(fmode)
4522
5081
  ftypemod = stat.S_IFMT(fmode)
4523
- fwinattributes = int(HeaderOut[13], 16)
4524
- fcompression = HeaderOut[14]
4525
- fcsize = int(HeaderOut[15], 16)
4526
- fuid = int(HeaderOut[16], 16)
4527
- funame = HeaderOut[17]
4528
- fgid = int(HeaderOut[18], 16)
4529
- fgname = HeaderOut[19]
4530
- fid = int(HeaderOut[20], 16)
4531
- finode = int(HeaderOut[21], 16)
4532
- flinkcount = int(HeaderOut[22], 16)
4533
- fdev = int(HeaderOut[23], 16)
4534
- fdev_minor = int(HeaderOut[24], 16)
4535
- fdev_major = int(HeaderOut[25], 16)
4536
- fseeknextfile = HeaderOut[26]
4537
- fjsontype = HeaderOut[27]
4538
- fjsonlen = int(HeaderOut[28], 16)
4539
- fjsonsize = int(HeaderOut[29], 16)
4540
- fjsonchecksumtype = HeaderOut[30]
4541
- fjsonchecksum = HeaderOut[31]
4542
- fextrasize = int(HeaderOut[32], 16)
4543
- fextrafields = int(HeaderOut[33], 16)
5082
+ fwinattributes = int(HeaderOut[16], 16)
5083
+ fcompression = HeaderOut[17]
5084
+ fcsize = int(HeaderOut[18], 16)
5085
+ fuid = int(HeaderOut[19], 16)
5086
+ funame = HeaderOut[20]
5087
+ fgid = int(HeaderOut[21], 16)
5088
+ fgname = HeaderOut[22]
5089
+ fid = int(HeaderOut[23], 16)
5090
+ finode = int(HeaderOut[24], 16)
5091
+ flinkcount = int(HeaderOut[25], 16)
5092
+ fdev = int(HeaderOut[26], 16)
5093
+ frdev = int(HeaderOut[27], 16)
5094
+ fseeknextfile = HeaderOut[28]
5095
+ fjsontype = HeaderOut[29]
5096
+ fjsonlen = int(HeaderOut[30], 16)
5097
+ fjsonsize = int(HeaderOut[31], 16)
5098
+ fjsonchecksumtype = HeaderOut[32]
5099
+ fjsonchecksum = HeaderOut[33]
5100
+ fextrasize = int(HeaderOut[34], 16)
5101
+ fextrafields = int(HeaderOut[35], 16)
4544
5102
  fextrafieldslist = []
4545
- extrastart = 34
5103
+ extrastart = 36
4546
5104
  extraend = extrastart + fextrafields
4547
5105
  while(extrastart < extraend):
4548
5106
  fextrafieldslist.append(HeaderOut[extrastart])
4549
5107
  extrastart = extrastart + 1
5108
+ fvendorfieldslist = []
5109
+ fvendorfields = 0;
5110
+ if((len(HeaderOut) - 4)>extraend):
5111
+ extrastart = extraend
5112
+ extraend = len(HeaderOut) - 4
5113
+ while(extrastart < extraend):
5114
+ fvendorfieldslist.append(HeaderOut[extrastart])
5115
+ extrastart = extrastart + 1
5116
+ fvendorfields = fvendorfields + 1
4550
5117
  if(fextrafields==1):
4551
5118
  try:
4552
5119
  fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
@@ -4556,6 +5123,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4556
5123
  fextrafieldslist = json.loads(fextrafieldslist[0])
4557
5124
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4558
5125
  pass
5126
+ fjstart = fp.tell()
4559
5127
  if(fjsontype=="json"):
4560
5128
  fjsoncontent = {}
4561
5129
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
@@ -4622,16 +5190,16 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4622
5190
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4623
5191
  pass
4624
5192
  fp.seek(len(delimiter), 1)
4625
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4626
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
5193
+ fjend = fp.tell() - 1
5194
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5195
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4627
5196
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4628
5197
  fname + " at offset " + str(fheaderstart))
4629
5198
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4630
5199
  return False
4631
5200
  fcs = HeaderOut[-2].lower()
4632
5201
  fccs = HeaderOut[-1].lower()
4633
- newfcs = GetHeaderChecksum(
4634
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
5202
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4635
5203
  if(fcs != newfcs and not skipchecksum):
4636
5204
  VerbosePrintOut("File Header Checksum Error with file " +
4637
5205
  fname + " at offset " + str(fheaderstart))
@@ -4654,9 +5222,9 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4654
5222
  fp.seek(fcsize, 1)
4655
5223
  pyhascontents = False
4656
5224
  fcontents.seek(0, 0)
4657
- newfccs = GetFileChecksum(
4658
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4659
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
5225
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
5226
+ fcontents.seek(0, 0)
5227
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4660
5228
  VerbosePrintOut("File Content Checksum Error with file " +
4661
5229
  fname + " at offset " + str(fcontentstart))
4662
5230
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4673,8 +5241,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4673
5241
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4674
5242
  cfcontents.close()
4675
5243
  fcontents.seek(0, 0)
4676
- fccs = GetFileChecksum(
4677
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
5244
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4678
5245
  fcontentend = fp.tell()
4679
5246
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4680
5247
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4696,12 +5263,12 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4696
5263
  fcontents.seek(0, 0)
4697
5264
  if(not contentasfile):
4698
5265
  fcontents = fcontents.read()
4699
- outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4700
- finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
5266
+ outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
5267
+ fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
4701
5268
  return outlist
4702
5269
 
4703
5270
 
4704
- def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
5271
+ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4705
5272
  if(not hasattr(fp, "read")):
4706
5273
  return False
4707
5274
  delimiter = formatspecs['format_delimiter']
@@ -4714,6 +5281,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4714
5281
  CatSizeEnd = CatSize
4715
5282
  fp.seek(curloc, 0)
4716
5283
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5284
+ headeroffset = fp.tell()
4717
5285
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
4718
5286
  formdelszie = len(formatspecs['format_delimiter'])
4719
5287
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -4721,7 +5289,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4721
5289
  return False
4722
5290
  if(formdel != formatspecs['format_delimiter']):
4723
5291
  return False
4724
- if(formatspecs['new_style']):
5292
+ if(__use_new_style__):
4725
5293
  inheader = ReadFileHeaderDataBySize(
4726
5294
  fp, formatspecs['format_delimiter'])
4727
5295
  else:
@@ -4729,19 +5297,19 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4729
5297
  fp, formatspecs['format_delimiter'])
4730
5298
  fprechecksumtype = inheader[-2]
4731
5299
  fprechecksum = inheader[-1]
4732
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4733
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5300
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5301
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4734
5302
  if(not headercheck and not skipchecksum):
4735
5303
  VerbosePrintOut(
4736
- "File Header Checksum Error with file at offset " + str(0))
5304
+ "File Header Checksum Error with file at offset " + str(headeroffset))
4737
5305
  VerbosePrintOut("'" + fprechecksum + "' != " +
4738
5306
  "'" + newfcs + "'")
4739
5307
  return False
4740
- fnumfiles = int(inheader[6], 16)
4741
- outfseeknextfile = inheaderdata[7]
4742
- fjsonsize = int(inheaderdata[10], 16)
4743
- fjsonchecksumtype = inheader[11]
4744
- fjsonchecksum = inheader[12]
5308
+ fnumfiles = int(inheader[8], 16)
5309
+ outfseeknextfile = inheaderdata[9]
5310
+ fjsonsize = int(inheaderdata[12], 16)
5311
+ fjsonchecksumtype = inheader[13]
5312
+ fjsonchecksum = inheader[14]
4745
5313
  fp.read(fjsonsize)
4746
5314
  # Next seek directive
4747
5315
  if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
@@ -4764,8 +5332,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4764
5332
  countnum = 0
4765
5333
  flist = []
4766
5334
  while(countnum < fnumfiles):
4767
- HeaderOut = ReadFileHeaderDataWithContent(
4768
- fp, listonly, uncompress, skipchecksum, formatspecs)
5335
+ HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
4769
5336
  if(len(HeaderOut) == 0):
4770
5337
  break
4771
5338
  flist.append(HeaderOut)
@@ -4773,7 +5340,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4773
5340
  return flist
4774
5341
 
4775
5342
 
4776
- def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
5343
+ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
4777
5344
  if(not hasattr(fp, "read")):
4778
5345
  return False
4779
5346
  delimiter = formatspecs['format_delimiter']
@@ -4786,6 +5353,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4786
5353
  CatSizeEnd = CatSize
4787
5354
  fp.seek(curloc, 0)
4788
5355
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5356
+ headeroffset = fp.tell()
4789
5357
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
4790
5358
  formdelszie = len(formatspecs['format_delimiter'])
4791
5359
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -4793,16 +5361,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4793
5361
  return False
4794
5362
  if(formdel != formatspecs['format_delimiter']):
4795
5363
  return False
4796
- if(formatspecs['new_style']):
5364
+ if(__use_new_style__):
4797
5365
  inheader = ReadFileHeaderDataBySize(
4798
5366
  fp, formatspecs['format_delimiter'])
4799
5367
  else:
4800
5368
  inheader = ReadFileHeaderDataWoSize(
4801
5369
  fp, formatspecs['format_delimiter'])
4802
- fnumextrafieldsize = int(inheader[13], 16)
4803
- fnumextrafields = int(inheader[14], 16)
5370
+ fnumextrafieldsize = int(inheader[15], 16)
5371
+ fnumextrafields = int(inheader[16], 16)
4804
5372
  fextrafieldslist = []
4805
- extrastart = 15
5373
+ extrastart = 17
4806
5374
  extraend = extrastart + fnumextrafields
4807
5375
  while(extrastart < extraend):
4808
5376
  fextrafieldslist.append(inheader[extrastart])
@@ -4816,20 +5384,31 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4816
5384
  fextrafieldslist = json.loads(fextrafieldslist[0])
4817
5385
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4818
5386
  pass
5387
+ fvendorfieldslist = []
5388
+ fvendorfields = 0;
5389
+ if((len(inheader) - 2)>extraend):
5390
+ extrastart = extraend
5391
+ extraend = len(inheader) - 2
5392
+ while(extrastart < extraend):
5393
+ fvendorfieldslist.append(HeaderOut[extrastart])
5394
+ extrastart = extrastart + 1
5395
+ fvendorfields = fvendorfields + 1
4819
5396
  formversion = re.findall("([\\d]+)", formstring)
4820
5397
  fheadsize = int(inheader[0], 16)
4821
5398
  fnumfields = int(inheader[1], 16)
4822
- fhencoding = inheader[2]
4823
- fostype = inheader[3]
4824
- fpythontype = inheader[4]
4825
- fprojectname = inheader[4]
4826
- fnumfiles = int(inheader[6], 16)
4827
- fseeknextfile = inheader[7]
4828
- fjsontype = inheader[8]
4829
- fjsonlen = int(inheader[9], 16)
4830
- fjsonsize = int(inheader[10], 16)
4831
- fjsonchecksumtype = inheader[11]
4832
- fjsonchecksum = inheader[12]
5399
+ fheadctime = int(inheader[2], 16)
5400
+ fheadmtime = int(inheader[3], 16)
5401
+ fhencoding = inheader[4]
5402
+ fostype = inheader[5]
5403
+ fpythontype = inheader[6]
5404
+ fprojectname = inheader[7]
5405
+ fnumfiles = int(inheader[8], 16)
5406
+ fseeknextfile = inheader[9]
5407
+ fjsontype = inheader[10]
5408
+ fjsonlen = int(inheader[11], 16)
5409
+ fjsonsize = int(inheader[12], 16)
5410
+ fjsonchecksumtype = inheader[13]
5411
+ fjsonchecksum = inheader[14]
4833
5412
  fjsoncontent = {}
4834
5413
  fjstart = fp.tell()
4835
5414
  if(fjsontype=="json"):
@@ -4915,25 +5494,25 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4915
5494
  fp.seek(fseeknextasnum, 0)
4916
5495
  else:
4917
5496
  return False
4918
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4919
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
5497
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5498
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4920
5499
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4921
5500
  fname + " at offset " + str(fheaderstart))
4922
5501
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4923
5502
  return False
4924
5503
  fprechecksumtype = inheader[-2]
4925
5504
  fprechecksum = inheader[-1]
4926
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4927
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5505
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5506
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4928
5507
  if(not headercheck and not skipchecksum):
4929
5508
  VerbosePrintOut(
4930
- "File Header Checksum Error with file at offset " + str(0))
5509
+ "File Header Checksum Error with file at offset " + str(headeroffset))
4931
5510
  VerbosePrintOut("'" + fprechecksum + "' != " +
4932
5511
  "'" + newfcs + "'")
4933
5512
  return False
4934
5513
  formversions = re.search('(.*?)(\\d+)', formstring).groups()
4935
5514
  fcompresstype = ""
4936
- outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
5515
+ outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fmtime': fheadmtime, 'fctime': fheadctime, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'ffilelist': []}
4937
5516
  if (seekstart < 0) or (seekstart > fnumfiles):
4938
5517
  seekstart = 0
4939
5518
  if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
@@ -4960,16 +5539,15 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4960
5539
  prefjsonchecksum = preheaderdata[31]
4961
5540
  prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
4962
5541
  fp.seek(len(delimiter), 1)
4963
- prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
4964
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5542
+ prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5543
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
4965
5544
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4966
5545
  prefname + " at offset " + str(prefhstart))
4967
5546
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
4968
5547
  return False
4969
- prenewfcs = GetHeaderChecksum(
4970
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5548
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
4971
5549
  prefcs = preheaderdata[-2]
4972
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5550
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
4973
5551
  VerbosePrintOut("File Header Checksum Error with file " +
4974
5552
  prefname + " at offset " + str(prefhstart))
4975
5553
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -4984,11 +5562,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4984
5562
  if(prefsize > 0):
4985
5563
  prefcontents.write(fp.read(prefsize))
4986
5564
  prefcontents.seek(0, 0)
4987
- prenewfccs = GetFileChecksum(
4988
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5565
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
4989
5566
  prefccs = preheaderdata[-1]
4990
5567
  pyhascontents = True
4991
- if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5568
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
4992
5569
  VerbosePrintOut("File Content Checksum Error with file " +
4993
5570
  prefname + " at offset " + str(prefcontentstart))
4994
5571
  VerbosePrintOut("'" + prefccs +
@@ -5015,8 +5592,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5015
5592
  realidnum = 0
5016
5593
  countnum = seekstart
5017
5594
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
5018
- HeaderOut = ReadFileHeaderDataWithContentToArray(
5019
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5595
+ HeaderOut = ReadFileHeaderDataWithContentToArray(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
5020
5596
  if(len(HeaderOut) == 0):
5021
5597
  break
5022
5598
  HeaderOut.update({'fid': realidnum, 'fidalt': realidnum})
@@ -5027,7 +5603,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5027
5603
  return outlist
5028
5604
 
5029
5605
 
5030
- def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
5606
+ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
5031
5607
  if(not hasattr(fp, "read")):
5032
5608
  return False
5033
5609
  delimiter = formatspecs['format_delimiter']
@@ -5040,6 +5616,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5040
5616
  CatSizeEnd = CatSize
5041
5617
  fp.seek(curloc, 0)
5042
5618
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
5619
+ headeroffset = fp.tell()
5043
5620
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
5044
5621
  formdelszie = len(formatspecs['format_delimiter'])
5045
5622
  formdel = fp.read(formdelszie).decode("UTF-8")
@@ -5047,16 +5624,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5047
5624
  return False
5048
5625
  if(formdel != formatspecs['format_delimiter']):
5049
5626
  return False
5050
- if(formatspecs['new_style']):
5627
+ if(__use_new_style__):
5051
5628
  inheader = ReadFileHeaderDataBySize(
5052
5629
  fp, formatspecs['format_delimiter'])
5053
5630
  else:
5054
5631
  inheader = ReadFileHeaderDataWoSize(
5055
5632
  fp, formatspecs['format_delimiter'])
5056
- fnumextrafieldsize = int(inheader[13], 16)
5057
- fnumextrafields = int(inheader[14], 16)
5633
+ fnumextrafieldsize = int(inheader[15], 16)
5634
+ fnumextrafields = int(inheader[16], 16)
5058
5635
  fextrafieldslist = []
5059
- extrastart = 15
5636
+ extrastart = 17
5060
5637
  extraend = extrastart + fnumextrafields
5061
5638
  while(extrastart < extraend):
5062
5639
  fextrafieldslist.append(inheader[extrastart])
@@ -5070,19 +5647,98 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5070
5647
  fextrafieldslist = json.loads(fextrafieldslist[0])
5071
5648
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5072
5649
  pass
5650
+ fvendorfieldslist = []
5651
+ fvendorfields = 0;
5652
+ if((len(inheader) - 2)>extraend):
5653
+ extrastart = extraend
5654
+ extraend = len(inheader) - 2
5655
+ while(extrastart < extraend):
5656
+ fvendorfieldslist.append(HeaderOut[extrastart])
5657
+ extrastart = extrastart + 1
5658
+ fvendorfields = fvendorfields + 1
5073
5659
  formversion = re.findall("([\\d]+)", formstring)
5074
5660
  fheadsize = int(inheader[0], 16)
5075
5661
  fnumfields = int(inheader[1], 16)
5076
- fnumfiles = int(inheader[6], 16)
5077
- fseeknextfile = inheaderdata[7]
5078
- fjsontype = int(inheader[8], 16)
5079
- fjsonlen = int(inheader[9], 16)
5080
- fjsonsize = int(inheader[10], 16)
5081
- fjsonchecksumtype = inheader[11]
5082
- fjsonchecksum = inheader[12]
5662
+ fheadctime = int(inheader[2], 16)
5663
+ fheadmtime = int(inheader[3], 16)
5664
+ fhencoding = inheader[4]
5665
+ fostype = inheader[5]
5666
+ fpythontype = inheader[6]
5667
+ fprojectname = inheader[7]
5668
+ fnumfiles = int(inheader[8], 16)
5669
+ fseeknextfile = inheader[9]
5670
+ fjsontype = inheader[10]
5671
+ fjsonlen = int(inheader[11], 16)
5672
+ fjsonsize = int(inheader[12], 16)
5673
+ fjsonchecksumtype = inheader[13]
5674
+ fjsonchecksum = inheader[14]
5083
5675
  fjsoncontent = {}
5084
5676
  fjstart = fp.tell()
5085
- fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5677
+ if(fjsontype=="json"):
5678
+ fjsoncontent = {}
5679
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5680
+ if(fjsonsize > 0):
5681
+ try:
5682
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5683
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
5684
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5685
+ try:
5686
+ fjsonrawcontent = fprejsoncontent
5687
+ fjsoncontent = json.loads(fprejsoncontent)
5688
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5689
+ fprejsoncontent = ""
5690
+ fjsonrawcontent = fprejsoncontent
5691
+ fjsoncontent = {}
5692
+ else:
5693
+ fprejsoncontent = ""
5694
+ fjsonrawcontent = fprejsoncontent
5695
+ fjsoncontent = {}
5696
+ elif(testyaml and fjsontype == "yaml"):
5697
+ fjsoncontent = {}
5698
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5699
+ if (fjsonsize > 0):
5700
+ try:
5701
+ # try base64 → utf-8 → YAML
5702
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5703
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5704
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
5705
+ try:
5706
+ # fall back to treating the bytes as plain text YAML
5707
+ fjsonrawcontent = fprejsoncontent
5708
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5709
+ except (UnicodeDecodeError, yaml.YAMLError):
5710
+ # final fallback: empty
5711
+ fprejsoncontent = ""
5712
+ fjsonrawcontent = fprejsoncontent
5713
+ fjsoncontent = {}
5714
+ else:
5715
+ fprejsoncontent = ""
5716
+ fjsonrawcontent = fprejsoncontent
5717
+ fjsoncontent = {}
5718
+ elif(not testyaml and fjsontype == "yaml"):
5719
+ fjsoncontent = {}
5720
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5721
+ fprejsoncontent = ""
5722
+ fjsonrawcontent = fprejsoncontent
5723
+ elif(fjsontype=="list"):
5724
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5725
+ flisttmp = MkTempFile()
5726
+ flisttmp.write(fprejsoncontent.encode())
5727
+ flisttmp.seek(0)
5728
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
5729
+ flisttmp.close()
5730
+ fjsonrawcontent = fjsoncontent
5731
+ if(fjsonlen==1):
5732
+ try:
5733
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
5734
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
5735
+ fjsonlen = len(fjsoncontent)
5736
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5737
+ try:
5738
+ fjsonrawcontent = fjsoncontent[0]
5739
+ fjsoncontent = json.loads(fjsoncontent[0])
5740
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5741
+ pass
5086
5742
  fjend = fp.tell()
5087
5743
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5088
5744
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -5101,19 +5757,19 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5101
5757
  fp.seek(fseeknextasnum, 0)
5102
5758
  else:
5103
5759
  return False
5104
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
5105
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
5760
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5761
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
5106
5762
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5107
5763
  fname + " at offset " + str(fheaderstart))
5108
5764
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
5109
5765
  return False
5110
5766
  fprechecksumtype = inheader[-2]
5111
5767
  fprechecksum = inheader[-1]
5112
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
5113
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5768
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5769
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
5114
5770
  if(not headercheck and not skipchecksum):
5115
5771
  VerbosePrintOut(
5116
- "File Header Checksum Error with file at offset " + str(0))
5772
+ "File Header Checksum Error with file at offset " + str(headeroffset))
5117
5773
  VerbosePrintOut("'" + fprechecksum + "' != " +
5118
5774
  "'" + newfcs + "'")
5119
5775
  return False
@@ -5129,7 +5785,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5129
5785
  il = 0
5130
5786
  while(il < seekstart):
5131
5787
  prefhstart = fp.tell()
5132
- if(formatspecs['new_style']):
5788
+ if(__use_new_style__):
5133
5789
  preheaderdata = ReadFileHeaderDataBySize(
5134
5790
  fp, formatspecs['format_delimiter'])
5135
5791
  else:
@@ -5151,16 +5807,15 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5151
5807
  prefjsonchecksum = preheaderdata[31]
5152
5808
  prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
5153
5809
  fp.seek(len(delimiter), 1)
5154
- prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
5155
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5810
+ prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5811
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5156
5812
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5157
5813
  prefname + " at offset " + str(prefhstart))
5158
5814
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
5159
5815
  return False
5160
- prenewfcs = GetHeaderChecksum(
5161
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5816
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
5162
5817
  prefcs = preheaderdata[-2]
5163
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5818
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
5164
5819
  VerbosePrintOut("File Header Checksum Error with file " +
5165
5820
  prefname + " at offset " + str(prefhstart))
5166
5821
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -5177,11 +5832,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5177
5832
  prefcontents = fp.read(prefsize)
5178
5833
  else:
5179
5834
  prefcontents = fp.read(prefcsize)
5180
- prenewfccs = GetFileChecksum(
5181
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5835
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
5182
5836
  prefccs = preheaderdata[-1]
5183
5837
  pyhascontents = True
5184
- if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5838
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
5185
5839
  VerbosePrintOut("File Content Checksum Error with file " +
5186
5840
  prefname + " at offset " + str(prefcontentstart))
5187
5841
  VerbosePrintOut("'" + prefccs +
@@ -5208,8 +5862,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5208
5862
  realidnum = 0
5209
5863
  countnum = seekstart
5210
5864
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
5211
- HeaderOut = ReadFileHeaderDataWithContentToList(
5212
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5865
+ HeaderOut = ReadFileHeaderDataWithContentToList(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
5213
5866
  if(len(HeaderOut) == 0):
5214
5867
  break
5215
5868
  outlist.append(HeaderOut)
@@ -5217,7 +5870,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5217
5870
  realidnum = realidnum + 1
5218
5871
  return outlist
5219
5872
 
5220
- def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5873
+ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5221
5874
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5222
5875
  fp = infile
5223
5876
  try:
@@ -5312,7 +5965,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5312
5965
  else:
5313
5966
  break
5314
5967
  readfp.seek(oldfppos, 0)
5315
- ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5968
+ ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5316
5969
  currentfilepos = readfp.tell()
5317
5970
  else:
5318
5971
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5334,27 +5987,27 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5334
5987
  else:
5335
5988
  break
5336
5989
  infp.seek(oldinfppos, 0)
5337
- ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5990
+ ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5338
5991
  currentinfilepos = infp.tell()
5339
5992
  currentfilepos = readfp.tell()
5340
5993
  return ArchiveList
5341
5994
 
5342
5995
 
5343
- def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5996
+ def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5344
5997
  if(isinstance(infile, (list, tuple, ))):
5345
5998
  pass
5346
5999
  else:
5347
6000
  infile = [infile]
5348
6001
  outretval = []
5349
6002
  for curfname in infile:
5350
- outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
6003
+ outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5351
6004
  return outretval
5352
6005
 
5353
- def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5354
- return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
6006
+ def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
6007
+ return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5355
6008
 
5356
6009
 
5357
- def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
6010
+ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5358
6011
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5359
6012
  fp = infile
5360
6013
  try:
@@ -5449,7 +6102,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5449
6102
  else:
5450
6103
  break
5451
6104
  readfp.seek(oldfppos, 0)
5452
- ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
6105
+ ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5453
6106
  currentfilepos = readfp.tell()
5454
6107
  else:
5455
6108
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5471,24 +6124,24 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5471
6124
  else:
5472
6125
  break
5473
6126
  infp.seek(oldinfppos, 0)
5474
- ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
6127
+ ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5475
6128
  currentinfilepos = infp.tell()
5476
6129
  currentfilepos = readfp.tell()
5477
6130
  return ArchiveList
5478
6131
 
5479
6132
 
5480
- def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
6133
+ def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5481
6134
  if(isinstance(infile, (list, tuple, ))):
5482
6135
  pass
5483
6136
  else:
5484
6137
  infile = [infile]
5485
6138
  outretval = {}
5486
6139
  for curfname in infile:
5487
- outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
6140
+ outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5488
6141
  return outretval
5489
6142
 
5490
- def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5491
- return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
6143
+ def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
6144
+ return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5492
6145
 
5493
6146
 
5494
6147
  def _field_to_bytes(x):
@@ -5542,7 +6195,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
5542
6195
  def _hex_lower(n):
5543
6196
  return format(int(n), 'x').lower()
5544
6197
 
5545
- def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
6198
+ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5546
6199
  """
5547
6200
  Build and write the archive file header.
5548
6201
  Returns the same file-like 'fp' on success, or False on failure.
@@ -5606,18 +6259,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
5606
6259
  tmpoutlist.append(fjsonsize)
5607
6260
  if(len(jsondata) > 0):
5608
6261
  tmpoutlist.append(checksumtype[1])
5609
- tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
6262
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs, saltkey))
5610
6263
  else:
5611
6264
  tmpoutlist.append("none")
5612
- tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
6265
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5613
6266
  # Preserve your original "tmpoutlen" computation exactly
5614
6267
  tmpoutlist.append(extrasizelen)
5615
6268
  tmpoutlist.append(extrafields)
5616
- tmpoutlen = 8 + len(tmpoutlist) + len(xlist)
6269
+ tmpoutlen = 10 + len(tmpoutlist) + len(xlist)
5617
6270
  tmpoutlenhex = _hex_lower(tmpoutlen)
5618
-
6271
+ if(hasattr(time, "time_ns")):
6272
+ fctime = format(int(time.time_ns()), 'x').lower()
6273
+ else:
6274
+ fctime = format(int(to_ns(time.time())), 'x').lower()
5619
6275
  # Serialize the first group
5620
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
6276
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
5621
6277
  # Append tmpoutlist
5622
6278
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5623
6279
  # Append extradata items if any
@@ -5627,7 +6283,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
5627
6283
  fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
5628
6284
 
5629
6285
  # 5) inner checksum over fnumfilesa
5630
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
6286
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5631
6287
  tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
5632
6288
 
5633
6289
  # 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
@@ -5640,7 +6296,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
5640
6296
  + fnumfilesa
5641
6297
  )
5642
6298
 
5643
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
6299
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5644
6300
  fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
5645
6301
 
5646
6302
  # 8) final total size field (again per your original logic)
@@ -5673,21 +6329,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
5673
6329
  return fp
5674
6330
 
5675
6331
 
5676
- def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
6332
+ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
5677
6333
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
5678
6334
  formatspecs = formatspecs[fmttype]
5679
6335
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
5680
6336
  fmttype = __file_format_default__
5681
6337
  formatspecs = formatspecs[fmttype]
5682
- AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
6338
+ AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs, saltkey)
5683
6339
  return fp
5684
6340
 
5685
6341
 
5686
- def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
5687
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
6342
+ def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
6343
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5688
6344
 
5689
6345
 
5690
- def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
6346
+ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, returnfp=False):
5691
6347
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
5692
6348
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
5693
6349
  get_in_ext = os.path.splitext(outfile)
@@ -5717,6 +6373,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5717
6373
  fp = MkTempFile()
5718
6374
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
5719
6375
  fp = outfile
6376
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5720
6377
  elif(re.findall(__upload_proto_support__, outfile)):
5721
6378
  fp = MkTempFile()
5722
6379
  else:
@@ -5728,7 +6385,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5728
6385
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
5729
6386
  except PermissionError:
5730
6387
  return False
5731
- AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
6388
+ AppendFileHeader(fp, 0, "UTF-8", ['hello', 'goodbye'], {}, checksumtype, formatspecs, saltkey)
5732
6389
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
5733
6390
  fp = CompressOpenFileAlt(
5734
6391
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -5759,11 +6416,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5759
6416
  return True
5760
6417
 
5761
6418
 
5762
- def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
5763
- return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
6419
+ def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, returnfp=False):
6420
+ return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, returnfp)
5764
6421
 
5765
6422
 
5766
- def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
6423
+ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5767
6424
  if(not hasattr(fp, "write")):
5768
6425
  return False
5769
6426
  if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
@@ -5795,10 +6452,10 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5795
6452
  tmpoutlist.append(fjsonsize)
5796
6453
  if(len(jsondata) > 0):
5797
6454
  tmpoutlist.append(checksumtype[2])
5798
- tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs))
6455
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs, saltkey))
5799
6456
  else:
5800
6457
  tmpoutlist.append("none")
5801
- tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
6458
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5802
6459
  tmpoutlist.append(extrasizelen)
5803
6460
  tmpoutlist.append(extrafields)
5804
6461
  outfileoutstr = AppendNullBytes(
@@ -5813,22 +6470,18 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5813
6470
  outfileoutstr = outfileoutstr + \
5814
6471
  AppendNullBytes(checksumlist, formatspecs['format_delimiter'])
5815
6472
  nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
5816
- outfileheadercshex = GetFileChecksum(
5817
- outfileoutstr, checksumtype[0], True, formatspecs)
6473
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5818
6474
  if(len(filecontent) == 0):
5819
- outfilecontentcshex = GetFileChecksum(
5820
- filecontent, "none", False, formatspecs)
6475
+ outfilecontentcshex = GetFileChecksum(filecontent, "none", False, formatspecs, saltkey)
5821
6476
  else:
5822
- outfilecontentcshex = GetFileChecksum(
5823
- filecontent, checksumtype[1], False, formatspecs)
6477
+ outfilecontentcshex = GetFileChecksum(filecontent, checksumtype[1], False, formatspecs, saltkey)
5824
6478
  tmpfileoutstr = outfileoutstr + \
5825
6479
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5826
6480
  formatspecs['format_delimiter'])
5827
6481
  formheaersize = format(int(len(tmpfileoutstr) - len(formatspecs['format_delimiter'])), 'x').lower()
5828
6482
  outfileoutstr = AppendNullByte(
5829
6483
  formheaersize, formatspecs['format_delimiter']) + outfileoutstr
5830
- outfileheadercshex = GetFileChecksum(
5831
- outfileoutstr, checksumtype[0], True, formatspecs)
6484
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5832
6485
  outfileoutstr = outfileoutstr + \
5833
6486
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5834
6487
  formatspecs['format_delimiter'])
@@ -5846,14 +6499,9 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5846
6499
  pass
5847
6500
  return fp
5848
6501
 
5849
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
5850
- if(not hasattr(fp, "write")):
5851
- return False
5852
- advancedlist = formatspecs['use_advanced_list']
5853
- altinode = formatspecs['use_alt_inode']
5854
- if(verbose):
5855
- logging.basicConfig(format="%(message)s",
5856
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6502
+ def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6503
+ advancedlist = __use_advanced_list__
6504
+ altinode = __use_alt_inode__
5857
6505
  infilelist = []
5858
6506
  if(infiles == "-"):
5859
6507
  for line in PY_STDIN_TEXT:
@@ -5893,16 +6541,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5893
6541
  inodetofile = {}
5894
6542
  filetoinode = {}
5895
6543
  inodetoforminode = {}
5896
- numfiles = int(len(GetDirList))
5897
- fnumfiles = format(numfiles, 'x').lower()
5898
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
5899
- try:
5900
- fp.flush()
5901
- if(hasattr(os, "sync")):
5902
- os.fsync(fp.fileno())
5903
- except (io.UnsupportedOperation, AttributeError, OSError):
5904
- pass
5905
6544
  FullSizeFilesAlt = 0
6545
+ tmpoutlist = []
5906
6546
  for curfname in GetDirList:
5907
6547
  fencoding = "UTF-8"
5908
6548
  if(re.findall("^[.|/]", curfname)):
@@ -5924,14 +6564,24 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5924
6564
  FullSizeFilesAlt += fstatinfo.st_rsize
5925
6565
  except AttributeError:
5926
6566
  FullSizeFilesAlt += fstatinfo.st_size
6567
+ fblksize = 0
6568
+ if(hasattr(fstatinfo, "st_blksize")):
6569
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6570
+ fblocks = 0
6571
+ if(hasattr(fstatinfo, "st_blocks")):
6572
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6573
+ fflags = 0
6574
+ if(hasattr(fstatinfo, "st_flags")):
6575
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
5927
6576
  ftype = 0
5928
- if(hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
6577
+ if(not followlink and hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
5929
6578
  ftype = 13
5930
- elif(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
5931
- ftype = 12
5932
6579
  elif(stat.S_ISREG(fpremode)):
5933
- ftype = 0
5934
- elif(stat.S_ISLNK(fpremode)):
6580
+ if(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_size > 0 and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
6581
+ ftype = 12
6582
+ else:
6583
+ ftype = 0
6584
+ elif(not followlink and stat.S_ISLNK(fpremode)):
5935
6585
  ftype = 2
5936
6586
  elif(stat.S_ISCHR(fpremode)):
5937
6587
  ftype = 3
@@ -5953,43 +6603,42 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5953
6603
  ftype = 0
5954
6604
  flinkname = ""
5955
6605
  fcurfid = format(int(curfid), 'x').lower()
5956
- if not followlink and finode != 0:
6606
+ if(not followlink and finode != 0):
5957
6607
  unique_id = (fstatinfo.st_dev, finode)
5958
- if ftype != 1:
5959
- if unique_id in inodelist:
6608
+ if(ftype != 1):
6609
+ if(unique_id in inodetofile):
5960
6610
  # Hard link detected
5961
6611
  ftype = 1
5962
6612
  flinkname = inodetofile[unique_id]
5963
- if altinode:
5964
- fcurinode = format(int(unique_id[1]), 'x').lower()
5965
- else:
5966
- fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5967
6613
  else:
5968
- # New inode
5969
- inodelist.append(unique_id)
6614
+ # First time seeing this inode
5970
6615
  inodetofile[unique_id] = fname
6616
+ if(unique_id not in inodetoforminode):
5971
6617
  inodetoforminode[unique_id] = curinode
5972
- if altinode:
5973
- fcurinode = format(int(unique_id[1]), 'x').lower()
5974
- else:
5975
- fcurinode = format(int(curinode), 'x').lower()
5976
- curinode += 1
6618
+ curinode = curinode + 1
6619
+ if(altinode):
6620
+ # altinode == True → use real inode number
6621
+ fcurinode = format(int(unique_id[1]), 'x').lower()
6622
+ else:
6623
+ # altinode == False → use synthetic inode id
6624
+ fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5977
6625
  else:
5978
6626
  # Handle cases where inodes are not supported or symlinks are followed
5979
6627
  fcurinode = format(int(curinode), 'x').lower()
5980
- curinode += 1
6628
+ curinode = curinode + 1
5981
6629
  curfid = curfid + 1
5982
6630
  if(ftype == 2):
5983
6631
  flinkname = os.readlink(fname)
5984
- if(not os.path.exists(flinkname)):
6632
+ if(not os.path.exists(fname)):
5985
6633
  return False
5986
6634
  try:
5987
6635
  fdev = fstatinfo.st_rdev
5988
6636
  except AttributeError:
5989
6637
  fdev = 0
5990
- getfdev = GetDevMajorMinor(fdev)
5991
- fdev_minor = getfdev[0]
5992
- fdev_major = getfdev[1]
6638
+ try:
6639
+ frdev = fstatinfo.st_rdev
6640
+ except AttributeError:
6641
+ frdev = 0
5993
6642
  # Types that should be considered zero-length in the archive context:
5994
6643
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
5995
6644
  # Types that have actual data to read:
@@ -6000,13 +6649,28 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6000
6649
  fsize = format(int(fstatinfo.st_size), 'x').lower()
6001
6650
  else:
6002
6651
  fsize = format(int(fstatinfo.st_size), 'x').lower()
6003
- fatime = format(int(fstatinfo.st_atime), 'x').lower()
6004
- fmtime = format(int(fstatinfo.st_mtime), 'x').lower()
6005
- fctime = format(int(fstatinfo.st_ctime), 'x').lower()
6652
+ if(hasattr(fstatinfo, "st_atime_ns")):
6653
+ fatime = format(int(fstatinfo.st_atime_ns), 'x').lower()
6654
+ else:
6655
+ fatime = format(int(to_ns(fstatinfo.st_atime)), 'x').lower()
6656
+ if(hasattr(fstatinfo, "st_mtime_ns")):
6657
+ fmtime = format(int(fstatinfo.st_mtime_ns), 'x').lower()
6658
+ else:
6659
+ fmtime = format(int(to_ns(fstatinfo.st_mtime)), 'x').lower()
6660
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6661
+ fctime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6662
+ else:
6663
+ fctime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
6006
6664
  if(hasattr(fstatinfo, "st_birthtime")):
6007
- fbtime = format(int(fstatinfo.st_birthtime), 'x').lower()
6665
+ if(hasattr(fstatinfo, "st_birthtime_ns")):
6666
+ fbtime = format(int(fstatinfo.st_birthtime_ns), 'x').lower()
6667
+ else:
6668
+ fbtime = format(int(to_ns(fstatinfo.st_birthtime)), 'x').lower()
6008
6669
  else:
6009
- fbtime = format(int(fstatinfo.st_ctime), 'x').lower()
6670
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6671
+ fbtime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6672
+ else:
6673
+ fbtime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
6010
6674
  fmode = format(int(fstatinfo.st_mode), 'x').lower()
6011
6675
  fchmode = format(int(stat.S_IMODE(fstatinfo.st_mode)), 'x').lower()
6012
6676
  ftypemod = format(int(stat.S_IFMT(fstatinfo.st_mode)), 'x').lower()
@@ -6033,8 +6697,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6033
6697
  except ImportError:
6034
6698
  fgname = ""
6035
6699
  fdev = format(int(fdev), 'x').lower()
6036
- fdev_minor = format(int(fdev_minor), 'x').lower()
6037
- fdev_major = format(int(fdev_major), 'x').lower()
6700
+ frdev = format(int(frdev), 'x').lower()
6038
6701
  finode = format(int(finode), 'x').lower()
6039
6702
  flinkcount = format(int(flinkcount), 'x').lower()
6040
6703
  if(hasattr(fstatinfo, "st_file_attributes")):
@@ -6054,7 +6717,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6054
6717
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6055
6718
  fcontents.seek(0, 0)
6056
6719
  if(typechecktest is not False):
6057
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6720
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6058
6721
  fcontents.seek(0, 0)
6059
6722
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6060
6723
  if(typechecktest is False and not compresswholefile):
@@ -6095,16 +6758,15 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6095
6758
  fcompression = curcompression
6096
6759
  fcontents.close()
6097
6760
  fcontents = cfcontents
6098
- elif followlink and (ftype == 1 or ftype == 2):
6099
- if(not os.path.exists(flinkname)):
6761
+ elif followlink and (ftype == 2 or ftype in data_types):
6762
+ if(not os.path.exists(fname)):
6100
6763
  return False
6101
- flstatinfo = os.stat(flinkname)
6102
6764
  with open(flinkname, "rb") as fpc:
6103
6765
  shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
6104
6766
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6105
6767
  fcontents.seek(0, 0)
6106
6768
  if(typechecktest is not False):
6107
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6769
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6108
6770
  fcontents.seek(0, 0)
6109
6771
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6110
6772
  if(typechecktest is False and not compresswholefile):
@@ -6148,11 +6810,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6148
6810
  if(fcompression == "none"):
6149
6811
  fcompression = ""
6150
6812
  fcontents.seek(0, 0)
6813
+ if(not contentasfile):
6814
+ fcontents = fcontents.read()
6151
6815
  ftypehex = format(ftype, 'x').lower()
6152
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6153
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6154
- AppendFileHeaderWithContent(
6155
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
6816
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6817
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
6818
+ return tmpoutlist
6819
+
6820
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6821
+ GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
6822
+ if(not hasattr(fp, "write")):
6823
+ return False
6824
+ numfiles = int(len(GetDirList))
6825
+ fnumfiles = format(numfiles, 'x').lower()
6826
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6827
+ try:
6828
+ fp.flush()
6829
+ if(hasattr(os, "sync")):
6830
+ os.fsync(fp.fileno())
6831
+ except (io.UnsupportedOperation, AttributeError, OSError):
6832
+ pass
6833
+ for curfname in GetDirList:
6834
+ tmpoutlist = curfname['fheaders']
6835
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6156
6836
  try:
6157
6837
  fp.flush()
6158
6838
  if(hasattr(os, "sync")):
@@ -6161,12 +6841,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6161
6841
  pass
6162
6842
  return fp
6163
6843
 
6164
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6165
- if(not hasattr(fp, "write")):
6166
- return False
6167
- if(verbose):
6168
- logging.basicConfig(format="%(message)s",
6169
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6844
+ def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6170
6845
  curinode = 0
6171
6846
  curfid = 0
6172
6847
  inodelist = []
@@ -6208,10 +6883,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6208
6883
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6209
6884
  formatspecs = formatspecs[compresscheck]
6210
6885
  if(compresscheck=="zstd"):
6211
- if 'zstandard' in sys.modules:
6212
- infile = ZstdFile(fileobj=infile, mode="rb")
6213
- elif 'pyzstd' in sys.modules:
6214
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
6886
+ if 'zstd' in compressionsupport:
6887
+ infile = zstd.ZstdFile(infile, mode="rb")
6215
6888
  tarfp = tarfile.open(fileobj=infile, mode="r")
6216
6889
  else:
6217
6890
  tarfp = tarfile.open(fileobj=infile, mode="r")
@@ -6220,23 +6893,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6220
6893
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6221
6894
  formatspecs = formatspecs[compresscheck]
6222
6895
  if(compresscheck=="zstd"):
6223
- if 'zstandard' in sys.modules:
6224
- infile = ZstdFile(fileobj=infile, mode="rb")
6225
- elif 'pyzstd' in sys.modules:
6226
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
6896
+ if 'zstd' in compressionsupport:
6897
+ infile = zstd.ZstdFile(infile, mode="rb")
6227
6898
  tarfp = tarfile.open(fileobj=infile, mode="r")
6228
6899
  else:
6229
6900
  tarfp = tarfile.open(infile, "r")
6230
6901
  except FileNotFoundError:
6231
6902
  return False
6232
- numfiles = int(len(tarfp.getmembers()))
6233
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
6234
- try:
6235
- fp.flush()
6236
- if(hasattr(os, "sync")):
6237
- os.fsync(fp.fileno())
6238
- except (io.UnsupportedOperation, AttributeError, OSError):
6239
- pass
6903
+ tmpoutlist = []
6240
6904
  for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
6241
6905
  fencoding = "UTF-8"
6242
6906
  if(re.findall("^[.|/]", member.name)):
@@ -6248,6 +6912,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6248
6912
  fpremode = member.mode
6249
6913
  ffullmode = member.mode
6250
6914
  flinkcount = 0
6915
+ fblksize = 0
6916
+ fblocks = 0
6917
+ fflags = 0
6251
6918
  ftype = 0
6252
6919
  if(member.isreg()):
6253
6920
  ffullmode = member.mode + stat.S_IFREG
@@ -6285,12 +6952,11 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6285
6952
  curfid = curfid + 1
6286
6953
  if(ftype == 2):
6287
6954
  flinkname = member.linkname
6955
+ fdev = format(int("0"), 'x').lower()
6288
6956
  try:
6289
- fdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6957
+ frdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6290
6958
  except AttributeError:
6291
- fdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6292
- fdev_minor = format(int(member.devminor), 'x').lower()
6293
- fdev_major = format(int(member.devmajor), 'x').lower()
6959
+ frdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6294
6960
  # Types that should be considered zero-length in the archive context:
6295
6961
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
6296
6962
  # Types that have actual data to read:
@@ -6301,10 +6967,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6301
6967
  fsize = format(int(member.size), 'x').lower()
6302
6968
  else:
6303
6969
  fsize = format(int(member.size), 'x').lower()
6304
- fatime = format(int(member.mtime), 'x').lower()
6305
- fmtime = format(int(member.mtime), 'x').lower()
6306
- fctime = format(int(member.mtime), 'x').lower()
6307
- fbtime = format(int(member.mtime), 'x').lower()
6970
+ fatime = format(int(to_ns(member.mtime)), 'x').lower()
6971
+ fmtime = format(int(to_ns(member.mtime)), 'x').lower()
6972
+ fctime = format(int(to_ns(member.mtime)), 'x').lower()
6973
+ fbtime = format(int(to_ns(member.mtime)), 'x').lower()
6308
6974
  fmode = format(int(ffullmode), 'x').lower()
6309
6975
  fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
6310
6976
  ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
@@ -6326,7 +6992,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6326
6992
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6327
6993
  fcontents.seek(0, 0)
6328
6994
  if(typechecktest is not False):
6329
- typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=True)
6995
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6330
6996
  fcontents.seek(0, 0)
6331
6997
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6332
6998
  if(typechecktest is False and not compresswholefile):
@@ -6370,26 +7036,38 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6370
7036
  if(fcompression == "none"):
6371
7037
  fcompression = ""
6372
7038
  fcontents.seek(0, 0)
7039
+ if(not contentasfile):
7040
+ fcontents = fcontents.read()
6373
7041
  ftypehex = format(ftype, 'x').lower()
6374
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6375
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6376
- AppendFileHeaderWithContent(
6377
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
7042
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7043
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7044
+ return tmpoutlist
7045
+
7046
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7047
+ if(not hasattr(fp, "write")):
7048
+ return False
7049
+ GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7050
+ numfiles = int(len(GetDirList))
7051
+ fnumfiles = format(numfiles, 'x').lower()
7052
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7053
+ try:
7054
+ fp.flush()
7055
+ if(hasattr(os, "sync")):
7056
+ os.fsync(fp.fileno())
7057
+ except (io.UnsupportedOperation, AttributeError, OSError):
7058
+ pass
7059
+ for curfname in GetDirList:
7060
+ tmpoutlist = curfname['fheaders']
7061
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6378
7062
  try:
6379
7063
  fp.flush()
6380
7064
  if(hasattr(os, "sync")):
6381
7065
  os.fsync(fp.fileno())
6382
7066
  except (io.UnsupportedOperation, AttributeError, OSError):
6383
7067
  pass
6384
- fcontents.close()
6385
7068
  return fp
6386
7069
 
6387
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6388
- if(not hasattr(fp, "write")):
6389
- return False
6390
- if(verbose):
6391
- logging.basicConfig(format="%(message)s",
6392
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7070
+ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6393
7071
  curinode = 0
6394
7072
  curfid = 0
6395
7073
  inodelist = []
@@ -6422,14 +7100,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6422
7100
  ziptest = zipfp.testzip()
6423
7101
  if(ziptest):
6424
7102
  VerbosePrintOut("Bad file found!")
6425
- numfiles = int(len(zipfp.infolist()))
6426
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
6427
- try:
6428
- fp.flush()
6429
- if(hasattr(os, "sync")):
6430
- os.fsync(fp.fileno())
6431
- except (io.UnsupportedOperation, AttributeError, OSError):
6432
- pass
7103
+ tmpoutlist = []
6433
7104
  for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
6434
7105
  fencoding = "UTF-8"
6435
7106
  if(re.findall("^[.|/]", member.filename)):
@@ -6444,6 +7115,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6444
7115
  else:
6445
7116
  fpremode = int(stat.S_IFREG | 0x1b6)
6446
7117
  flinkcount = 0
7118
+ fblksize = 0
7119
+ fblocks = 0
7120
+ fflags = 0
6447
7121
  ftype = 0
6448
7122
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6449
7123
  ftype = 5
@@ -6454,8 +7128,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6454
7128
  fcurinode = format(int(curfid), 'x').lower()
6455
7129
  curfid = curfid + 1
6456
7130
  fdev = format(int(0), 'x').lower()
6457
- fdev_minor = format(int(0), 'x').lower()
6458
- fdev_major = format(int(0), 'x').lower()
7131
+ frdev = format(int(0), 'x').lower()
6459
7132
  if(ftype == 5):
6460
7133
  fsize = format(int("0"), 'x').lower()
6461
7134
  elif(ftype == 0):
@@ -6463,13 +7136,13 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6463
7136
  else:
6464
7137
  fsize = format(int(member.file_size), 'x').lower()
6465
7138
  fatime = format(
6466
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
7139
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6467
7140
  fmtime = format(
6468
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
7141
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6469
7142
  fctime = format(
6470
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
7143
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6471
7144
  fbtime = format(
6472
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
7145
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6473
7146
  if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
6474
7147
  fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
6475
7148
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
@@ -6584,30 +7257,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6584
7257
  if(fcompression == "none"):
6585
7258
  fcompression = ""
6586
7259
  fcontents.seek(0, 0)
7260
+ if(not contentasfile):
7261
+ fcontents = fcontents.read()
6587
7262
  ftypehex = format(ftype, 'x').lower()
6588
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6589
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6590
- AppendFileHeaderWithContent(
6591
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
7263
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7264
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7265
+ return tmpoutlist
7266
+
7267
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7268
+ if(not hasattr(fp, "write")):
7269
+ return False
7270
+ GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7271
+ numfiles = int(len(GetDirList))
7272
+ fnumfiles = format(numfiles, 'x').lower()
7273
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7274
+ try:
7275
+ fp.flush()
7276
+ if(hasattr(os, "sync")):
7277
+ os.fsync(fp.fileno())
7278
+ except (io.UnsupportedOperation, AttributeError, OSError):
7279
+ pass
7280
+ for curfname in GetDirList:
7281
+ tmpoutlist = curfname['fheaders']
7282
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6592
7283
  try:
6593
7284
  fp.flush()
6594
7285
  if(hasattr(os, "sync")):
6595
7286
  os.fsync(fp.fileno())
6596
7287
  except (io.UnsupportedOperation, AttributeError, OSError):
6597
7288
  pass
6598
- fcontents.close()
6599
7289
  return fp
6600
7290
 
6601
7291
  if(not rarfile_support):
6602
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7292
+ def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6603
7293
  return False
6604
- else:
6605
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6606
- if(not hasattr(fp, "write")):
6607
- return False
6608
- if(verbose):
6609
- logging.basicConfig(format="%(message)s",
6610
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7294
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7295
+ return False
7296
+ else:
7297
+ def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6611
7298
  curinode = 0
6612
7299
  curfid = 0
6613
7300
  inodelist = []
@@ -6622,20 +7309,7 @@ else:
6622
7309
  rartest = rarfp.testrar()
6623
7310
  if(rartest):
6624
7311
  VerbosePrintOut("Bad file found!")
6625
- numfiles = int(len(rarfp.infolist()))
6626
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
6627
- try:
6628
- fp.flush()
6629
- if(hasattr(os, "sync")):
6630
- os.fsync(fp.fileno())
6631
- except (io.UnsupportedOperation, AttributeError, OSError):
6632
- pass
6633
- try:
6634
- fp.flush()
6635
- if(hasattr(os, "sync")):
6636
- os.fsync(fp.fileno())
6637
- except (io.UnsupportedOperation, AttributeError, OSError):
6638
- pass
7312
+ tmpoutlist = []
6639
7313
  for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
6640
7314
  is_unix = False
6641
7315
  is_windows = False
@@ -6679,6 +7353,9 @@ else:
6679
7353
  fcompression = ""
6680
7354
  fcsize = format(int(0), 'x').lower()
6681
7355
  flinkcount = 0
7356
+ fblksize = 0
7357
+ fblocks = 0
7358
+ fflags = 0
6682
7359
  ftype = 0
6683
7360
  if(member.is_file()):
6684
7361
  ftype = 0
@@ -6693,8 +7370,7 @@ else:
6693
7370
  fcurinode = format(int(curfid), 'x').lower()
6694
7371
  curfid = curfid + 1
6695
7372
  fdev = format(int(0), 'x').lower()
6696
- fdev_minor = format(int(0), 'x').lower()
6697
- fdev_major = format(int(0), 'x').lower()
7373
+ frdev = format(int(0), 'x').lower()
6698
7374
  if(ftype == 5):
6699
7375
  fsize = format(int("0"), 'x').lower()
6700
7376
  elif(ftype == 0):
@@ -6703,20 +7379,20 @@ else:
6703
7379
  fsize = format(int(member.file_size), 'x').lower()
6704
7380
  try:
6705
7381
  if(member.atime):
6706
- fatime = format(int(member.atime.timestamp()), 'x').lower()
7382
+ fatime = format(int(to_ns(member.atime.timestamp())), 'x').lower()
6707
7383
  else:
6708
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
7384
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6709
7385
  except AttributeError:
6710
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
6711
- fmtime = format(int(member.mtime.timestamp()), 'x').lower()
7386
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
7387
+ fmtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6712
7388
  try:
6713
7389
  if(member.ctime):
6714
- fctime = format(int(member.ctime.timestamp()), 'x').lower()
7390
+ fctime = format(int(to_ns(member.ctime.timestamp())), 'x').lower()
6715
7391
  else:
6716
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
7392
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6717
7393
  except AttributeError:
6718
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
6719
- fbtime = format(int(member.mtime.timestamp()), 'x').lower()
7394
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
7395
+ fbtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6720
7396
  if(is_unix and member.external_attr != 0):
6721
7397
  fmode = format(int(member.external_attr), 'x').lower()
6722
7398
  fchmode = format(
@@ -6817,30 +7493,84 @@ else:
6817
7493
  if(fcompression == "none"):
6818
7494
  fcompression = ""
6819
7495
  fcontents.seek(0, 0)
7496
+ if(not contentasfile):
7497
+ fcontents = fcontents.read()
6820
7498
  ftypehex = format(ftype, 'x').lower()
6821
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6822
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6823
- AppendFileHeaderWithContent(
6824
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
7499
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7500
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7501
+ return tmpoutlist
7502
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7503
+ if(not hasattr(fp, "write")):
7504
+ return False
7505
+ GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7506
+ numfiles = int(len(GetDirList))
7507
+ fnumfiles = format(numfiles, 'x').lower()
7508
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7509
+ try:
7510
+ fp.flush()
7511
+ if(hasattr(os, "sync")):
7512
+ os.fsync(fp.fileno())
7513
+ except (io.UnsupportedOperation, AttributeError, OSError):
7514
+ pass
7515
+ for curfname in GetDirList:
7516
+ tmpoutlist = curfname['fheaders']
7517
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6825
7518
  try:
6826
7519
  fp.flush()
6827
7520
  if(hasattr(os, "sync")):
6828
7521
  os.fsync(fp.fileno())
6829
7522
  except (io.UnsupportedOperation, AttributeError, OSError):
6830
7523
  pass
6831
- fcontents.close()
6832
7524
  return fp
6833
7525
 
6834
7526
  if(not py7zr_support):
6835
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7527
+ def sevenzip_readall(infile, **kwargs):
6836
7528
  return False
6837
7529
  else:
6838
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6839
- if(not hasattr(fp, "write")):
6840
- return False
6841
- if(verbose):
6842
- logging.basicConfig(format="%(message)s",
6843
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7530
+ class _MemoryIO(py7zr.Py7zIO):
7531
+ """In-memory file object used by py7zr's factory API."""
7532
+ def __init__(self):
7533
+ self._buf = bytearray()
7534
+ def write(self, data):
7535
+ # py7zr will call this repeatedly with chunks
7536
+ self._buf.extend(data)
7537
+ def read(self, size=None):
7538
+ if size is None:
7539
+ return bytes(self._buf)
7540
+ return bytes(self._buf[:size])
7541
+ def seek(self, offset, whence=0):
7542
+ # we don't really need seeking for your use case
7543
+ return 0
7544
+ def flush(self):
7545
+ pass
7546
+ def size(self):
7547
+ return len(self._buf)
7548
+ class _MemoryFactory(py7zr.WriterFactory):
7549
+ """Factory that creates _MemoryIO objects and keeps them by filename."""
7550
+ def __init__(self):
7551
+ self.files = {}
7552
+ def create(self, filename: str) -> py7zr.Py7zIO:
7553
+ io_obj = _MemoryIO()
7554
+ self.files[filename] = io_obj
7555
+ return io_obj
7556
+ def sevenzip_readall(infile, **kwargs):
7557
+ """
7558
+ Replacement for SevenZipFile.readall() using the new py7zr API.
7559
+
7560
+ Returns: dict[filename -> _MemoryIO]
7561
+ """
7562
+ factory = _MemoryFactory()
7563
+ with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
7564
+ archive.extractall(factory=factory)
7565
+ return factory.files
7566
+
7567
+ if(not py7zr_support):
7568
+ def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7569
+ return False
7570
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7571
+ return False
7572
+ else:
7573
+ def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6844
7574
  formver = formatspecs['format_ver']
6845
7575
  fileheaderver = str(int(formver.replace(".", "")))
6846
7576
  curinode = 0
@@ -6852,19 +7582,15 @@ else:
6852
7582
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
6853
7583
  return False
6854
7584
  szpfp = py7zr.SevenZipFile(infile, mode="r")
6855
- file_content = szpfp.readall()
7585
+ try:
7586
+ file_content = szpfp.readall()
7587
+ except AttributeError:
7588
+ file_content = sevenzip_readall(infile)
6856
7589
  #sztest = szpfp.testzip()
6857
7590
  sztestalt = szpfp.test()
6858
7591
  if(sztestalt):
6859
7592
  VerbosePrintOut("Bad file found!")
6860
- numfiles = int(len(szpfp.list()))
6861
- AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
6862
- try:
6863
- fp.flush()
6864
- if(hasattr(os, "sync")):
6865
- os.fsync(fp.fileno())
6866
- except (io.UnsupportedOperation, AttributeError, OSError):
6867
- pass
7593
+ tmpoutlist = []
6868
7594
  for member in sorted(szpfp.list(), key=lambda x: x.filename):
6869
7595
  fencoding = "UTF-8"
6870
7596
  if(re.findall("^[.|/]", member.filename)):
@@ -6881,6 +7607,9 @@ else:
6881
7607
  fcompression = ""
6882
7608
  fcsize = format(int(0), 'x').lower()
6883
7609
  flinkcount = 0
7610
+ fblksize = 0
7611
+ fblocks = 0
7612
+ fflags = 0
6884
7613
  ftype = 0
6885
7614
  if(member.is_directory):
6886
7615
  ftype = 5
@@ -6891,14 +7620,13 @@ else:
6891
7620
  fcurinode = format(int(curfid), 'x').lower()
6892
7621
  curfid = curfid + 1
6893
7622
  fdev = format(int(0), 'x').lower()
6894
- fdev_minor = format(int(0), 'x').lower()
6895
- fdev_major = format(int(0), 'x').lower()
7623
+ frdev = format(int(0), 'x').lower()
6896
7624
  if(ftype == 5):
6897
7625
  fsize = format(int("0"), 'x').lower()
6898
- fatime = format(int(member.creationtime.timestamp()), 'x').lower()
6899
- fmtime = format(int(member.creationtime.timestamp()), 'x').lower()
6900
- fctime = format(int(member.creationtime.timestamp()), 'x').lower()
6901
- fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
7626
+ fatime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7627
+ fmtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7628
+ fctime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7629
+ fbtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
6902
7630
  if(member.is_directory):
6903
7631
  fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6904
7632
  fchmode = format(
@@ -6948,7 +7676,10 @@ else:
6948
7676
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6949
7677
  fcontents.seek(0, 0)
6950
7678
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6951
- file_content[member.filename].close()
7679
+ try:
7680
+ file_content[member.filename].close()
7681
+ except AttributeError:
7682
+ pass
6952
7683
  if(typechecktest is False and not compresswholefile):
6953
7684
  fcontents.seek(0, 2)
6954
7685
  ucfsize = fcontents.tell()
@@ -6990,25 +7721,39 @@ else:
6990
7721
  if(fcompression == "none"):
6991
7722
  fcompression = ""
6992
7723
  fcontents.seek(0, 0)
7724
+ if(not contentasfile):
7725
+ fcontents = fcontents.read()
6993
7726
  ftypehex = format(ftype, 'x').lower()
6994
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6995
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6996
- AppendFileHeaderWithContent(
6997
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
7727
+ tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7728
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
7729
+ return tmpoutlist
7730
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7731
+ if(not hasattr(fp, "write")):
7732
+ return False
7733
+ GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
7734
+ numfiles = int(len(GetDirList))
7735
+ fnumfiles = format(numfiles, 'x').lower()
7736
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7737
+ try:
7738
+ fp.flush()
7739
+ if(hasattr(os, "sync")):
7740
+ os.fsync(fp.fileno())
7741
+ except (io.UnsupportedOperation, AttributeError, OSError):
7742
+ pass
7743
+ for curfname in GetDirList:
7744
+ tmpoutlist = curfname['fheaders']
7745
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6998
7746
  try:
6999
7747
  fp.flush()
7000
7748
  if(hasattr(os, "sync")):
7001
7749
  os.fsync(fp.fileno())
7002
7750
  except (io.UnsupportedOperation, AttributeError, OSError):
7003
7751
  pass
7004
- fcontents.close()
7005
7752
  return fp
7006
7753
 
7007
- def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7754
+ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7008
7755
  if(not hasattr(fp, "write")):
7009
7756
  return False
7010
- if(verbose):
7011
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7012
7757
  GetDirList = inlist
7013
7758
  if(not GetDirList):
7014
7759
  return False
@@ -7020,7 +7765,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
7020
7765
  inodetoforminode = {}
7021
7766
  numfiles = int(len(GetDirList))
7022
7767
  fnumfiles = format(numfiles, 'x').lower()
7023
- AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
7768
+ AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
7024
7769
  for curfname in GetDirList:
7025
7770
  ftype = format(curfname[0], 'x').lower()
7026
7771
  fencoding = curfname[1]
@@ -7034,44 +7779,45 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
7034
7779
  fbasedir = os.path.dirname(fname)
7035
7780
  flinkname = curfname[4]
7036
7781
  fsize = format(curfname[5], 'x').lower()
7037
- fatime = format(curfname[6], 'x').lower()
7038
- fmtime = format(curfname[7], 'x').lower()
7039
- fctime = format(curfname[8], 'x').lower()
7040
- fbtime = format(curfname[9], 'x').lower()
7041
- fmode = format(curfname[10], 'x').lower()
7042
- fwinattributes = format(curfname[11], 'x').lower()
7043
- fcompression = curfname[12]
7044
- fcsize = format(curfname[13], 'x').lower()
7045
- fuid = format(curfname[14], 'x').lower()
7046
- funame = curfname[15]
7047
- fgid = format(curfname[16], 'x').lower()
7048
- fgname = curfname[17]
7049
- fid = format(curfname[18], 'x').lower()
7050
- finode = format(curfname[19], 'x').lower()
7051
- flinkcount = format(curfname[20], 'x').lower()
7052
- fdev = format(curfname[21], 'x').lower()
7053
- fdev_minor = format(curfname[22], 'x').lower()
7054
- fdev_major = format(curfname[23], 'x').lower()
7055
- fseeknextfile = curfname[24]
7056
- extradata = curfname[25]
7057
- fheaderchecksumtype = curfname[26]
7058
- fcontentchecksumtype = curfname[27]
7059
- fcontents = curfname[28]
7782
+ fblksize = format(curfname[6], 'x').lower()
7783
+ fblocks = format(curfname[7], 'x').lower()
7784
+ fflags = format(curfname[8], 'x').lower()
7785
+ fatime = format(curfname[9], 'x').lower()
7786
+ fmtime = format(curfname[10], 'x').lower()
7787
+ fctime = format(curfname[11], 'x').lower()
7788
+ fbtime = format(curfname[12], 'x').lower()
7789
+ fmode = format(curfname[13], 'x').lower()
7790
+ fwinattributes = format(curfname[14], 'x').lower()
7791
+ fcompression = curfname[15]
7792
+ fcsize = format(curfname[16], 'x').lower()
7793
+ fuid = format(curfname[17], 'x').lower()
7794
+ funame = curfname[18]
7795
+ fgid = format(curfname[19], 'x').lower()
7796
+ fgname = curfname[20]
7797
+ fid = format(curfname[21], 'x').lower()
7798
+ finode = format(curfname[22], 'x').lower()
7799
+ flinkcount = format(curfname[23], 'x').lower()
7800
+ fdev = format(curfname[24], 'x').lower()
7801
+ frdev = format(curfname[25], 'x').lower()
7802
+ fseeknextfile = curfname[26]
7803
+ extradata = curfname[27]
7804
+ fheaderchecksumtype = curfname[28]
7805
+ fcontentchecksumtype = curfname[29]
7806
+ fcontents = curfname[30]
7060
7807
  fencoding = GetFileEncoding(fcontents, 0, False)[0]
7061
- tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
7062
- fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
7808
+ tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
7809
+ fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile]
7063
7810
  fcontents.seek(0, 0)
7064
- AppendFileHeaderWithContent(
7065
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
7811
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
7066
7812
  return fp
7067
7813
 
7068
7814
 
7069
- def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7070
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7071
- return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7815
+ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7816
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
7817
+ return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose)
7072
7818
 
7073
7819
 
7074
- def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7820
+ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7075
7821
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7076
7822
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7077
7823
  get_in_ext = os.path.splitext(outfile)
@@ -7115,8 +7861,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7115
7861
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7116
7862
  except PermissionError:
7117
7863
  return False
7118
- AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
7119
- compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
7864
+ AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, saltkey, verbose)
7120
7865
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7121
7866
  fp = CompressOpenFileAlt(
7122
7867
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7145,12 +7890,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7145
7890
  fp.close()
7146
7891
  return True
7147
7892
 
7148
- def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7893
+ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7149
7894
  if not isinstance(infiles, list):
7150
7895
  infiles = [infiles]
7151
7896
  returnout = False
7152
7897
  for infileslist in infiles:
7153
- returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
7898
+ returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, True)
7154
7899
  if(not returnout):
7155
7900
  break
7156
7901
  else:
@@ -7160,7 +7905,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
7160
7905
  return True
7161
7906
  return returnout
7162
7907
 
7163
- def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7908
+ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, saltkey=None, returnfp=False):
7164
7909
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7165
7910
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7166
7911
  get_in_ext = os.path.splitext(outfile)
@@ -7201,8 +7946,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7201
7946
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7202
7947
  except PermissionError:
7203
7948
  return False
7204
- AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
7205
- compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
7949
+ AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, saltkey, verbose)
7206
7950
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7207
7951
  fp = CompressOpenFileAlt(
7208
7952
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7232,7 +7976,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7232
7976
  fp.close()
7233
7977
  return True
7234
7978
 
7235
- def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7979
+ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7236
7980
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7237
7981
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7238
7982
  get_in_ext = os.path.splitext(outfile)
@@ -7274,8 +8018,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7274
8018
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7275
8019
  except PermissionError:
7276
8020
  return False
7277
- AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression,
7278
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
8021
+ AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7279
8022
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7280
8023
  fp = CompressOpenFileAlt(
7281
8024
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7305,12 +8048,12 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7305
8048
  fp.close()
7306
8049
  return True
7307
8050
 
7308
- def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8051
+ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7309
8052
  if not isinstance(infiles, list):
7310
8053
  infiles = [infiles]
7311
8054
  returnout = False
7312
8055
  for infileslist in infiles:
7313
- returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
8056
+ returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7314
8057
  if(not returnout):
7315
8058
  break
7316
8059
  else:
@@ -7320,7 +8063,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
7320
8063
  return True
7321
8064
  return returnout
7322
8065
 
7323
- def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8066
+ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7324
8067
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7325
8068
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7326
8069
  get_in_ext = os.path.splitext(outfile)
@@ -7362,8 +8105,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7362
8105
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7363
8106
  except PermissionError:
7364
8107
  return False
7365
- AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression,
7366
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
8108
+ AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7367
8109
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7368
8110
  fp = CompressOpenFileAlt(
7369
8111
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7393,12 +8135,12 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7393
8135
  fp.close()
7394
8136
  return True
7395
8137
 
7396
- def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8138
+ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7397
8139
  if not isinstance(infiles, list):
7398
8140
  infiles = [infiles]
7399
8141
  returnout = False
7400
8142
  for infileslist in infiles:
7401
- returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
8143
+ returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7402
8144
  if(not returnout):
7403
8145
  break
7404
8146
  else:
@@ -7409,10 +8151,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
7409
8151
  return returnout
7410
8152
 
7411
8153
  if(not rarfile_support):
7412
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8154
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7413
8155
  return False
7414
8156
  else:
7415
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8157
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7416
8158
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7417
8159
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7418
8160
  get_in_ext = os.path.splitext(outfile)
@@ -7454,8 +8196,7 @@ else:
7454
8196
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7455
8197
  except PermissionError:
7456
8198
  return False
7457
- AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression,
7458
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
8199
+ AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7459
8200
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7460
8201
  fp = CompressOpenFileAlt(
7461
8202
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7485,12 +8226,12 @@ else:
7485
8226
  fp.close()
7486
8227
  return True
7487
8228
 
7488
- def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8229
+ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7489
8230
  if not isinstance(infiles, list):
7490
8231
  infiles = [infiles]
7491
8232
  returnout = False
7492
8233
  for infileslist in infiles:
7493
- returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
8234
+ returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7494
8235
  if(not returnout):
7495
8236
  break
7496
8237
  else:
@@ -7501,10 +8242,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
7501
8242
  return returnout
7502
8243
 
7503
8244
  if(not py7zr_support):
7504
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8245
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7505
8246
  return False
7506
8247
  else:
7507
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8248
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7508
8249
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7509
8250
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7510
8251
  get_in_ext = os.path.splitext(outfile)
@@ -7546,8 +8287,7 @@ else:
7546
8287
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7547
8288
  except PermissionError:
7548
8289
  return False
7549
- AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
7550
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
8290
+ AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7551
8291
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7552
8292
  fp = CompressOpenFileAlt(
7553
8293
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7577,12 +8317,12 @@ else:
7577
8317
  fp.close()
7578
8318
  return True
7579
8319
 
7580
- def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8320
+ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7581
8321
  if not isinstance(infiles, list):
7582
8322
  infiles = [infiles]
7583
8323
  returnout = False
7584
8324
  for infileslist in infiles:
7585
- returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
8325
+ returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7586
8326
  if(not returnout):
7587
8327
  break
7588
8328
  else:
@@ -7592,9 +8332,9 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
7592
8332
  return True
7593
8333
  return returnout
7594
8334
 
7595
- def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7596
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7597
- return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
8335
+ def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
8336
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
8337
+ return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
7598
8338
 
7599
8339
 
7600
8340
  def PrintPermissionString(fchmode, ftype):
@@ -8278,10 +9018,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
8278
9018
  elif(compresscheck == "lz4" and compresscheck in compressionsupport):
8279
9019
  fp = lz4.frame.open(infile, "rb")
8280
9020
  elif(compresscheck == "zstd" and compresscheck in compressionsupport):
8281
- if 'zstandard' in sys.modules:
8282
- fp = ZstdFile(infile, mode="rb")
8283
- elif 'pyzstd' in sys.modules:
8284
- fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
9021
+ if 'zstd' in compressionsupport:
9022
+ fp = zstd.ZstdFile(infile, mode="rb")
8285
9023
  else:
8286
9024
  return Flase
8287
9025
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
@@ -8398,10 +9136,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
8398
9136
  elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
8399
9137
  wrapped = lzma.LZMAFile(src)
8400
9138
  elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
8401
- if 'zstandard' in sys.modules:
8402
- wrapped = ZstdFile(fileobj=src, mode="rb")
8403
- elif 'pyzstd' in sys.modules:
8404
- wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
9139
+ if 'zstd' in compressionsupport:
9140
+ wrapped = zstd.ZstdFile(src, mode="rb")
8405
9141
  else:
8406
9142
  return False
8407
9143
  elif kind == "lz4" and "lz4" in compressionsupport:
@@ -8469,10 +9205,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
8469
9205
  elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
8470
9206
  fp = bz2.open(infile, mode)
8471
9207
  elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
8472
- if 'zstandard' in sys.modules:
8473
- fp = ZstdFile(infile, mode=mode)
8474
- elif 'pyzstd' in sys.modules:
8475
- fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
9208
+ if 'zstd' in compressionsupport:
9209
+ fp = zstd.ZstdFile(infile, mode=mode)
8476
9210
  else:
8477
9211
  return False
8478
9212
  elif (compresscheck == "lz4" and "lz4" in compressionsupport):
@@ -9241,10 +9975,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
9241
9975
  outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
9242
9976
 
9243
9977
  elif (fextname == ".zst" and "zstandard" in compressionsupport):
9244
- if 'zstandard' in sys.modules:
9245
- outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9246
- elif 'pyzstd' in sys.modules:
9247
- outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9978
+ if 'zstd' in compressionsupport:
9979
+ outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9248
9980
  else:
9249
9981
  return False # fix: 'Flase' -> False
9250
9982
 
@@ -9335,56 +10067,54 @@ def CheckSumSupport(checkfor, guaranteed=True):
9335
10067
  return False
9336
10068
 
9337
10069
 
9338
- def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9339
- return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
10070
+ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
10071
+ return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9340
10072
 
9341
- def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9342
- return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
10073
+ def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
10074
+ return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9343
10075
 
9344
- def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9345
- return PackFoxFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
10076
+ def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
10077
+ return PackFoxFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, returnfp)
9346
10078
 
9347
10079
 
9348
- def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9349
- return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
10080
+ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
10081
+ return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9350
10082
 
9351
10083
 
9352
- def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9353
- return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
10084
+ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
10085
+ return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9354
10086
 
9355
10087
 
9356
10088
  if(not rarfile_support):
9357
- def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
10089
+ def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9358
10090
  return False
9359
10091
  else:
9360
- def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9361
- return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
10092
+ def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
10093
+ return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9362
10094
 
9363
10095
 
9364
10096
  if(not py7zr_support):
9365
- def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
10097
+ def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9366
10098
  return False
9367
10099
  else:
9368
- def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9369
- return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
10100
+ def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
10101
+ return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9370
10102
 
9371
10103
 
9372
- def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
10104
+ def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9373
10105
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
9374
10106
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9375
10107
  formatspecs = formatspecs[checkcompressfile]
9376
- if(verbose):
9377
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9378
10108
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
9379
- return PackFoxFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
10109
+ return PackFoxFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9380
10110
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
9381
- return PackFoxFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
10111
+ return PackFoxFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9382
10112
  elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
9383
- return PackFoxFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
10113
+ return PackFoxFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9384
10114
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
9385
- return PackFoxFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
10115
+ return PackFoxFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9386
10116
  elif(IsSingleDict(formatspecs) and checkcompressfile == formatspecs['format_magic']):
9387
- return RePackFoxFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, verbose, returnfp)
10117
+ return RePackFoxFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9388
10118
  else:
9389
10119
  return False
9390
10120
  return False
@@ -9453,19 +10183,12 @@ def FoxFileArrayValidate(listarrayfiles, verbose=False):
9453
10183
  ok = False
9454
10184
  return ok
9455
10185
 
9456
- def FoxFileValidate(infile, fmttype="auto", filestart=0,
9457
- formatspecs=__file_format_multi_dict__, # keep default like original
9458
- seektoend=False, verbose=False, returnfp=False):
9459
- if(verbose):
9460
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9461
-
10186
+ def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9462
10187
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
9463
10188
  formatspecs = formatspecs[fmttype]
9464
10189
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
9465
10190
  fmttype = "auto"
9466
-
9467
10191
  curloc = filestart
9468
-
9469
10192
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9470
10193
  curloc = infile.tell()
9471
10194
  fp = infile
@@ -9481,7 +10204,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9481
10204
  if(not fp):
9482
10205
  return False
9483
10206
  fp.seek(filestart, 0)
9484
-
9485
10207
  elif(infile == "-"):
9486
10208
  fp = MkTempFile()
9487
10209
  shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
@@ -9493,7 +10215,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9493
10215
  if(not fp):
9494
10216
  return False
9495
10217
  fp.seek(filestart, 0)
9496
-
9497
10218
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
9498
10219
  fp = MkTempFile()
9499
10220
  fp.write(infile)
@@ -9505,7 +10226,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9505
10226
  if(not fp):
9506
10227
  return False
9507
10228
  fp.seek(filestart, 0)
9508
-
9509
10229
  elif(re.findall(__download_proto_support__, infile)):
9510
10230
  fp = download_file_from_internet_file(infile)
9511
10231
  fp = UncompressFileAlt(fp, formatspecs, filestart)
@@ -9516,7 +10236,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9516
10236
  if(not fp):
9517
10237
  return False
9518
10238
  fp.seek(filestart, 0)
9519
-
9520
10239
  else:
9521
10240
  infile = RemoveWindowsPath(infile)
9522
10241
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
@@ -9563,11 +10282,9 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9563
10282
  fp.seek(0, 2)
9564
10283
  except (OSError, ValueError):
9565
10284
  SeekToEndOfFile(fp)
9566
-
9567
10285
  CatSize = fp.tell()
9568
10286
  CatSizeEnd = CatSize
9569
10287
  fp.seek(curloc, 0)
9570
-
9571
10288
  if(IsNestedDict(formatspecs)):
9572
10289
  compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
9573
10290
  if(compresschecking not in formatspecs):
@@ -9575,54 +10292,36 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9575
10292
  else:
9576
10293
  formatspecs = formatspecs[compresschecking]
9577
10294
  fp.seek(filestart, 0)
9578
-
9579
10295
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
10296
+ headeroffset = fp.tell()
9580
10297
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
9581
10298
  formdelsize = len(formatspecs['format_delimiter'])
9582
10299
  formdel = fp.read(formdelsize).decode("UTF-8")
9583
-
9584
10300
  if(formstring != formatspecs['format_magic'] + inheaderver):
9585
10301
  return False
9586
10302
  if(formdel != formatspecs['format_delimiter']):
9587
10303
  return False
9588
-
9589
- if(formatspecs['new_style']):
10304
+ if(__use_new_style__):
9590
10305
  inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9591
10306
  else:
9592
10307
  inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9593
-
9594
- fnumextrafieldsize = int(inheader[13], 16)
9595
- fnumextrafields = int(inheader[14], 16)
9596
- extrastart = 15
10308
+ fnumextrafieldsize = int(inheader[15], 16)
10309
+ fnumextrafields = int(inheader[16], 16)
10310
+ extrastart = 17
9597
10311
  extraend = extrastart + fnumextrafields
9598
10312
  formversion = re.findall("([\\d]+)", formstring)
9599
10313
  fheadsize = int(inheader[0], 16)
9600
10314
  fnumfields = int(inheader[1], 16)
9601
- fnumfiles = int(inheader[6], 16)
10315
+ fnumfiles = int(inheader[8], 16)
9602
10316
  fprechecksumtype = inheader[-2]
9603
10317
  fprechecksum = inheader[-1]
9604
- outfseeknextfile = inheader[7]
9605
- fjsonsize = int(inheader[10], 16)
9606
- fjsonchecksumtype = inheader[11]
9607
- fjsonchecksum = inheader[12]
10318
+ outfseeknextfile = inheader[9]
10319
+ fjsonsize = int(inheader[12], 16)
10320
+ fjsonchecksumtype = inheader[13]
10321
+ fjsonchecksum = inheader[14]
10322
+ headerjsonoffset = fp.tell()
9608
10323
  fprejsoncontent = fp.read(fjsonsize)
9609
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
9610
- if(fjsonsize > 0):
9611
- if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
9612
- if(verbose):
9613
- VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
9614
- VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
9615
- else:
9616
- valid_archive = False
9617
- invalid_archive = True
9618
- if(verbose):
9619
- VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
9620
- VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9621
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
9622
- VerbosePrintOut("File JSON Data Checksum Error with file " +
9623
- fname + " at offset " + str(fheaderstart))
9624
- VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
9625
- return False
10324
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
9626
10325
  # Next seek directive
9627
10326
  if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9628
10327
  fseeknextasnum = int(outfseeknextfile.replace("+", ""))
@@ -9641,14 +10340,11 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9641
10340
  fp.seek(fseeknextasnum, 0)
9642
10341
  else:
9643
10342
  return False
9644
-
9645
10343
  il = 0
9646
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
9647
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
9648
-
10344
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
10345
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
9649
10346
  valid_archive = True
9650
10347
  invalid_archive = False
9651
-
9652
10348
  if(verbose):
9653
10349
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9654
10350
  try:
@@ -9660,78 +10356,56 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9660
10356
  else:
9661
10357
  VerbosePrintOut(infile)
9662
10358
  VerbosePrintOut("Number of Records " + str(fnumfiles))
9663
-
9664
10359
  if(headercheck):
9665
10360
  if(verbose):
9666
- VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
10361
+ VerbosePrintOut("File Header Checksum Passed at offset " + str(headeroffset))
9667
10362
  VerbosePrintOut("'" + fprechecksum + "' == " + "'" + newfcs + "'")
9668
10363
  else:
9669
10364
  # always flip flags, even when not verbose
9670
10365
  valid_archive = False
9671
10366
  invalid_archive = True
9672
10367
  if(verbose):
9673
- VerbosePrintOut("File Header Checksum Failed at offset " + str(0))
10368
+ VerbosePrintOut("File Header Checksum Failed at offset " + str(headeroffset))
9674
10369
  VerbosePrintOut("'" + fprechecksum + "' != " + "'" + newfcs + "'")
9675
-
10370
+ if(fjsonsize > 0):
10371
+ if(CheckChecksums(jsonfcs, fjsonchecksum)):
10372
+ if(verbose):
10373
+ VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(headerjsonoffset))
10374
+ VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
10375
+ else:
10376
+ valid_archive = False
10377
+ invalid_archive = True
10378
+ if(verbose):
10379
+ VerbosePrintOut("File JSON Data Checksum Error at offset " + str(headerjsonoffset))
10380
+ VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9676
10381
  if(verbose):
9677
10382
  VerbosePrintOut("")
9678
-
9679
10383
  # Iterate either until EOF (seektoend) or fixed count
9680
10384
  while (fp.tell() < CatSizeEnd) if seektoend else (il < fnumfiles):
9681
10385
  outfhstart = fp.tell()
9682
- if(formatspecs['new_style']):
10386
+ if(__use_new_style__):
9683
10387
  inheaderdata = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9684
10388
  else:
9685
10389
  inheaderdata = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9686
10390
 
9687
10391
  if(len(inheaderdata) == 0):
9688
10392
  break
9689
-
9690
- outfheadsize = int(inheaderdata[0], 16)
9691
- outfnumfields = int(inheaderdata[1], 16)
9692
- outftype = int(inheaderdata[2], 16)
9693
- # FIX: these must come from inheaderdata, not inheader
9694
- outfostype = inheaderdata[3]
9695
- outfencoding = inheaderdata[4]
9696
-
9697
10393
  if(re.findall("^[.|/]", inheaderdata[5])):
9698
10394
  outfname = inheaderdata[5]
9699
10395
  else:
9700
10396
  outfname = "./" + inheaderdata[5]
9701
10397
  outfbasedir = os.path.dirname(outfname)
9702
-
9703
- outflinkname = inheaderdata[6]
9704
10398
  outfsize = int(inheaderdata[7], 16)
9705
- outfatime = int(inheaderdata[8], 16)
9706
- outfmtime = int(inheaderdata[9], 16)
9707
- outfctime = int(inheaderdata[10], 16)
9708
- outfbtime = int(inheaderdata[11], 16)
9709
- outfmode = int(inheaderdata[12], 16)
9710
- outfchmode = stat.S_IMODE(outfmode)
9711
- outftypemod = stat.S_IFMT(outfmode)
9712
- outfwinattributes = int(inheaderdata[13], 16)
9713
- outfcompression = inheaderdata[14]
9714
- outfcsize = int(inheaderdata[15], 16)
9715
- outfuid = int(inheaderdata[16], 16)
9716
- outfuname = inheaderdata[17]
9717
- outfgid = int(inheaderdata[18], 16)
9718
- outfgname = inheaderdata[19]
9719
- fid = int(inheaderdata[20], 16)
9720
- finode = int(inheaderdata[21], 16)
9721
- flinkcount = int(inheaderdata[22], 16)
9722
- outfdev = int(inheaderdata[23], 16)
9723
- outfdev_minor = int(inheaderdata[24], 16)
9724
- outfdev_major = int(inheaderdata[25], 16)
9725
- outfseeknextfile = inheaderdata[26]
9726
- outfjsontype = inheaderdata[27]
9727
- outfjsonlen = int(inheaderdata[28], 16)
9728
- outfjsonsize = int(inheaderdata[29], 16)
9729
- outfjsonchecksumtype = inheaderdata[30]
9730
- outfjsonchecksum = inheaderdata[31]
9731
-
10399
+ outfcompression = inheaderdata[17]
10400
+ outfcsize = int(inheaderdata[18], 16)
10401
+ fid = int(inheaderdata[23], 16)
10402
+ finode = int(inheaderdata[24], 16)
10403
+ outfseeknextfile = inheaderdata[28]
10404
+ outfjsonsize = int(inheaderdata[31], 16)
10405
+ outfjsonchecksumtype = inheaderdata[32]
10406
+ outfjsonchecksum = inheaderdata[33]
9732
10407
  outfhend = fp.tell() - 1 # (kept for parity; not used)
9733
10408
  outfjstart = fp.tell()
9734
-
9735
10409
  # Read JSON bytes; compute checksum on bytes for robustness
9736
10410
  outfprejsoncontent_bytes = fp.read(outfjsonsize)
9737
10411
  # Decode for any downstream text needs (not used further here)
@@ -9739,27 +10413,21 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9739
10413
  outfprejsoncontent = outfprejsoncontent_bytes.decode("UTF-8")
9740
10414
  except Exception:
9741
10415
  outfprejsoncontent = None
9742
-
9743
10416
  outfjend = fp.tell()
9744
10417
  fp.seek(len(formatspecs['format_delimiter']), 1)
9745
-
9746
- injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs)
9747
-
9748
- outfextrasize = int(inheaderdata[32], 16)
9749
- outfextrafields = int(inheaderdata[33], 16)
10418
+ injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs, saltkey)
10419
+ outfextrafields = int(inheaderdata[35], 16)
9750
10420
  extrafieldslist = []
9751
- extrastart = 34
10421
+ extrastart = 36
9752
10422
  extraend = extrastart + outfextrafields
9753
-
9754
10423
  outfcs = inheaderdata[-2].lower()
9755
10424
  outfccs = inheaderdata[-1].lower()
9756
- infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
9757
-
10425
+ infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs, saltkey)
9758
10426
  if(verbose):
9759
10427
  VerbosePrintOut(outfname)
9760
10428
  VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
9761
10429
 
9762
- if(hmac.compare_digest(outfcs, infcs)):
10430
+ if(CheckChecksums(outfcs, infcs)):
9763
10431
  if(verbose):
9764
10432
  VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
9765
10433
  VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
@@ -9769,9 +10437,8 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9769
10437
  if(verbose):
9770
10438
  VerbosePrintOut("File Header Checksum Failed at offset " + str(outfhstart))
9771
10439
  VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
9772
-
9773
10440
  if(outfjsonsize > 0):
9774
- if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
10441
+ if(CheckChecksums(injsonfcs, outfjsonchecksum)):
9775
10442
  if(verbose):
9776
10443
  VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
9777
10444
  VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
@@ -9781,21 +10448,19 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9781
10448
  if(verbose):
9782
10449
  VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
9783
10450
  VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9784
-
9785
10451
  outfcontentstart = fp.tell()
9786
10452
  outfcontents = b"" # FIX: bytes for Py2/3 consistency
9787
10453
  pyhascontents = False
9788
-
9789
10454
  if(outfsize > 0):
9790
10455
  if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
9791
10456
  outfcontents = fp.read(outfsize)
9792
10457
  else:
9793
10458
  outfcontents = fp.read(outfcsize)
9794
10459
 
9795
- infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
10460
+ infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs, saltkey)
9796
10461
  pyhascontents = True
9797
10462
 
9798
- if(hmac.compare_digest(outfccs, infccs)):
10463
+ if(CheckChecksums(outfccs, infccs)):
9799
10464
  if(verbose):
9800
10465
  VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
9801
10466
  VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
@@ -9805,10 +10470,8 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9805
10470
  if(verbose):
9806
10471
  VerbosePrintOut("File Content Checksum Failed at offset " + str(outfcontentstart))
9807
10472
  VerbosePrintOut("'" + outfccs + "' != " + "'" + infccs + "'")
9808
-
9809
10473
  if(verbose):
9810
10474
  VerbosePrintOut("")
9811
-
9812
10475
  # Next seek directive
9813
10476
  if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9814
10477
  fseeknextasnum = int(outfseeknextfile.replace("+", ""))
@@ -9827,9 +10490,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9827
10490
  fp.seek(fseeknextasnum, 0)
9828
10491
  else:
9829
10492
  return False
9830
-
9831
10493
  il = il + 1
9832
-
9833
10494
  if(valid_archive):
9834
10495
  if(returnfp):
9835
10496
  return fp
@@ -9841,34 +10502,34 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9841
10502
  return False
9842
10503
 
9843
10504
 
9844
- def FoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9845
- return FoxFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10505
+ def FoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10506
+ return FoxFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9846
10507
 
9847
10508
 
9848
- def FoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10509
+ def FoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9849
10510
  if(isinstance(infile, (list, tuple, ))):
9850
10511
  pass
9851
10512
  else:
9852
10513
  infile = [infile]
9853
10514
  outretval = True
9854
10515
  for curfname in infile:
9855
- curretfile = FoxFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10516
+ curretfile = FoxFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9856
10517
  if(not curretfile):
9857
10518
  outretval = False
9858
10519
  return outretval
9859
10520
 
9860
- def FoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9861
- return FoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10521
+ def FoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10522
+ return FoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9862
10523
 
9863
10524
 
9864
- def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10525
+ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9865
10526
  outretval = []
9866
10527
  outstartfile = filestart
9867
10528
  outfsize = float('inf')
9868
10529
  while True:
9869
10530
  if outstartfile >= outfsize: # stop when function signals False
9870
10531
  break
9871
- is_valid_file = FoxFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
10532
+ is_valid_file = FoxFileValidate(infile, fmttype, outstartfile, formatspecs, saltkey, seektoend, verbose, True)
9872
10533
  if is_valid_file is False: # stop when function signals False
9873
10534
  outretval.append(is_valid_file)
9874
10535
  break
@@ -9885,33 +10546,36 @@ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
9885
10546
  if(returnfp):
9886
10547
  return infile
9887
10548
  else:
9888
- infile.close()
10549
+ try:
10550
+ infile.close()
10551
+ except AttributeError:
10552
+ return False
9889
10553
  return outretval
9890
10554
 
9891
10555
 
9892
10556
 
9893
- def StackedFoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9894
- return StackedFoxFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10557
+ def StackedFoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10558
+ return StackedFoxFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9895
10559
 
9896
10560
 
9897
- def StackedFoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10561
+ def StackedFoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9898
10562
  if(isinstance(infile, (list, tuple, ))):
9899
10563
  pass
9900
10564
  else:
9901
10565
  infile = [infile]
9902
10566
  outretval = True
9903
10567
  for curfname in infile:
9904
- curretfile = StackedFoxFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10568
+ curretfile = StackedFoxFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9905
10569
  if(not curretfile):
9906
10570
  outretval = False
9907
10571
  return outretval
9908
10572
 
9909
- def StackedFoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9910
- return StackedFoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10573
+ def StackedFoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10574
+ return StackedFoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9911
10575
 
9912
10576
 
9913
- def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9914
- outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
10577
+ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10578
+ outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
9915
10579
  if not returnfp:
9916
10580
  for item in outfp:
9917
10581
  fp = item.get('fp')
@@ -9925,26 +10589,26 @@ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
9925
10589
  return outfp
9926
10590
 
9927
10591
 
9928
- def MultipleFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10592
+ def MultipleFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9929
10593
  if(isinstance(infile, (list, tuple, ))):
9930
10594
  pass
9931
10595
  else:
9932
10596
  infile = [infile]
9933
10597
  outretval = []
9934
10598
  for curfname in infile:
9935
- outretval.append(FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp))
10599
+ outretval.append(FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp))
9936
10600
  return outretval
9937
10601
 
9938
- def MultipleFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9939
- return MultipleFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
10602
+ def MultipleFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10603
+ return MultipleFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9940
10604
 
9941
10605
 
9942
- def FoxFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10606
+ def FoxFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9943
10607
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9944
10608
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9945
10609
  formatspecs = formatspecs[checkcompressfile]
9946
10610
  fp = MkTempFile(instr)
9947
- listarrayfiles = FoxFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10611
+ listarrayfiles = FoxFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9948
10612
  return listarrayfiles
9949
10613
 
9950
10614
 
@@ -9953,9 +10617,8 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9953
10617
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9954
10618
  formatspecs = formatspecs[checkcompressfile]
9955
10619
  fp = MkTempFile()
9956
- fp = PackFoxFileFromTarFile(
9957
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9958
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10620
+ fp = PackFoxFileFromTarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10621
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9959
10622
  return listarrayfiles
9960
10623
 
9961
10624
 
@@ -9964,9 +10627,8 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9964
10627
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9965
10628
  formatspecs = formatspecs[checkcompressfile]
9966
10629
  fp = MkTempFile()
9967
- fp = PackFoxFileFromZipFile(
9968
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9969
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10630
+ fp = PackFoxFileFromZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10631
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9970
10632
  return listarrayfiles
9971
10633
 
9972
10634
 
@@ -9980,9 +10642,8 @@ if(rarfile_support):
9980
10642
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9981
10643
  formatspecs = formatspecs[checkcompressfile]
9982
10644
  fp = MkTempFile()
9983
- fp = PackFoxFileFromRarFile(
9984
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9985
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10645
+ fp = PackFoxFileFromRarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10646
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9986
10647
  return listarrayfiles
9987
10648
 
9988
10649
  if(not py7zr_support):
@@ -9995,13 +10656,12 @@ if(py7zr_support):
9995
10656
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9996
10657
  formatspecs = formatspecs[checkcompressfile]
9997
10658
  fp = MkTempFile()
9998
- fp = PackFoxFileFromSevenZipFile(
9999
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10000
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10659
+ fp = PackFoxFileFromSevenZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10660
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
10001
10661
  return listarrayfiles
10002
10662
 
10003
10663
 
10004
- def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10664
+ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10005
10665
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
10006
10666
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
10007
10667
  formatspecs = formatspecs[checkcompressfile]
@@ -10014,17 +10674,16 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
10014
10674
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
10015
10675
  return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
10016
10676
  elif(checkcompressfile == formatspecs['format_magic']):
10017
- return FoxFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10677
+ return FoxFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10018
10678
  else:
10019
10679
  return False
10020
10680
  return False
10021
10681
 
10022
10682
 
10023
- def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
10683
+ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, saltkey=None, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
10024
10684
  outarray = MkTempFile()
10025
- packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10026
- compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
10027
- listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
10685
+ packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
10686
+ listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10028
10687
  return listarrayfiles
10029
10688
 
10030
10689
 
@@ -10146,12 +10805,12 @@ def FoxFileArrayToArrayIndex(inarray, returnfp=False):
10146
10805
  return out
10147
10806
 
10148
10807
 
10149
- def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
10808
+ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10150
10809
  # ---------- Safe defaults ----------
10151
10810
  if compressionuselist is None:
10152
10811
  compressionuselist = compressionlistalt
10153
10812
  if checksumtype is None:
10154
- checksumtype = ["md5", "md5", "md5", "md5"]
10813
+ checksumtype = ["md5", "md5", "md5", "md5", "md5"]
10155
10814
  if extradata is None:
10156
10815
  extradata = []
10157
10816
  if jsondata is None:
@@ -10170,7 +10829,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10170
10829
  infile = RemoveWindowsPath(infile)
10171
10830
  listarrayfileslist = FoxFileToArray(
10172
10831
  infile, "auto", filestart, seekstart, seekend,
10173
- False, True, True, skipchecksum, formatspecs, seektoend, False
10832
+ False, True, True, skipchecksum, formatspecs, insaltkey, seektoend, False
10174
10833
  )
10175
10834
 
10176
10835
  # ---------- Format specs selection ----------
@@ -10237,9 +10896,6 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10237
10896
  if (compression is None) or (compressionuselist and compression not in compressionuselist):
10238
10897
  compression = "auto"
10239
10898
 
10240
- if verbose:
10241
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10242
-
10243
10899
  # No files?
10244
10900
  if not listarrayfiles.get('ffilelist'):
10245
10901
  return False
@@ -10252,7 +10908,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10252
10908
  if lenlist != fnumfiles:
10253
10909
  fnumfiles = lenlist
10254
10910
 
10255
- AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), [], checksumtype[0], formatspecs)
10911
+ AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), listarrayfiles['fextradata'], listarrayfiles['fjsondata'], [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
10256
10912
 
10257
10913
  # loop counters
10258
10914
  lcfi = 0
@@ -10282,6 +10938,9 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10282
10938
  # fields (hex-encoded where expected)
10283
10939
  fheadersize = format(int(cur_entry['fheadersize']), 'x').lower()
10284
10940
  fsize = format(int(cur_entry['fsize']), 'x').lower()
10941
+ fblksize = format(int(cur_entry['fblksize']), 'x').lower()
10942
+ fblocks = format(int(cur_entry['fblocks']), 'x').lower()
10943
+ fflags = format(int(cur_entry['fflags']), 'x').lower()
10285
10944
  flinkname = cur_entry['flinkname']
10286
10945
  fatime = format(int(cur_entry['fatime']), 'x').lower()
10287
10946
  fmtime = format(int(cur_entry['fmtime']), 'x').lower()
@@ -10300,8 +10959,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10300
10959
  fcompression = cur_entry['fcompression']
10301
10960
  fcsize = format(int(cur_entry['fcsize']), 'x').lower()
10302
10961
  fdev = format(int(cur_entry['fdev']), 'x').lower()
10303
- fdev_minor = format(int(cur_entry['fminor']), 'x').lower()
10304
- fdev_major = format(int(cur_entry['fmajor']), 'x').lower()
10962
+ frdev = format(int(cur_entry['frdev']), 'x').lower()
10305
10963
  fseeknextfile = cur_entry['fseeknextfile']
10306
10964
 
10307
10965
  # extra fields sizing
@@ -10312,6 +10970,12 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10312
10970
  # extradata/jsondata defaults per file
10313
10971
  if not followlink and len(extradata) <= 0:
10314
10972
  extradata = cur_entry['fextradata']
10973
+
10974
+ fvendorfields = cur_entry['fvendorfields']
10975
+ ffvendorfieldslist = []
10976
+ if(fvendorfields>0):
10977
+ ffvendorfieldslist = cur_entry['fvendorfieldslist']
10978
+
10315
10979
  if not followlink and len(jsondata) <= 0:
10316
10980
  jsondata = cur_entry['fjsondata']
10317
10981
 
@@ -10347,7 +11011,11 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10347
11011
  fcontents.seek(0, 0)
10348
11012
  cfcontents.seek(0, 0)
10349
11013
  cfcontents = CompressOpenFileAlt(
10350
- cfcontents, compressionuselist[ilmin], compressionlevel, compressionuselist, formatspecs
11014
+ cfcontents,
11015
+ compressionuselist[ilmin],
11016
+ compressionlevel,
11017
+ compressionuselist,
11018
+ formatspecs
10351
11019
  )
10352
11020
  if cfcontents:
10353
11021
  cfcontents.seek(0, 2)
@@ -10355,7 +11023,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10355
11023
  cfcontents.close()
10356
11024
  else:
10357
11025
  ilcsize.append(float("inf"))
10358
- ilmin += 1
11026
+ ilmin = ilmin + 1
10359
11027
  ilcmin = ilcsize.index(min(ilcsize))
10360
11028
  curcompression = compressionuselist[ilcmin]
10361
11029
 
@@ -10364,16 +11032,24 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10364
11032
  shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10365
11033
  cfcontents.seek(0, 0)
10366
11034
  cfcontents = CompressOpenFileAlt(
10367
- cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
11035
+ cfcontents,
11036
+ curcompression,
11037
+ compressionlevel,
11038
+ compressionuselist,
11039
+ formatspecs
10368
11040
  )
10369
11041
  cfcontents.seek(0, 2)
10370
- cfsize_val = cfcontents.tell()
10371
- if ucfsize > cfsize_val:
10372
- fcsize = format(int(cfsize_val), 'x').lower()
11042
+ cfsize = cfcontents.tell()
11043
+ if ucfsize > cfsize:
11044
+ fcsize = format(int(cfsize), 'x').lower()
10373
11045
  fcompression = curcompression
10374
11046
  fcontents.close()
10375
11047
  fcontents = cfcontents
10376
11048
 
11049
+ if fcompression == "none":
11050
+ fcompression = ""
11051
+ fcontents.seek(0, 0)
11052
+
10377
11053
  # link following (fixed: use listarrayfiles, not prelistarrayfiles)
10378
11054
  if followlink:
10379
11055
  if (cur_entry['ftype'] == 1 or cur_entry['ftype'] == 2):
@@ -10382,6 +11058,9 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10382
11058
  flinkinfo = listarrayfiles['ffilelist'][flinkid]
10383
11059
  fheadersize = format(int(flinkinfo['fheadersize']), 'x').lower()
10384
11060
  fsize = format(int(flinkinfo['fsize']), 'x').lower()
11061
+ fblksize = format(int(flinkinfo['fblksize']), 'x').lower()
11062
+ fblocks = format(int(flinkinfo['fblocks']), 'x').lower()
11063
+ fflags = format(int(flinkinfo['fflags']), 'x').lower()
10385
11064
  flinkname = flinkinfo['flinkname']
10386
11065
  fatime = format(int(flinkinfo['fatime']), 'x').lower()
10387
11066
  fmtime = format(int(flinkinfo['fmtime']), 'x').lower()
@@ -10400,14 +11079,19 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10400
11079
  fcompression = flinkinfo['fcompression']
10401
11080
  fcsize = format(int(flinkinfo['fcsize']), 'x').lower()
10402
11081
  fdev = format(int(flinkinfo['fdev']), 'x').lower()
10403
- fdev_minor = format(int(flinkinfo['fminor']), 'x').lower()
10404
- fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
11082
+ frdev = format(int(flinkinfo['frdev']), 'x').lower()
10405
11083
  fseeknextfile = flinkinfo['fseeknextfile']
10406
11084
  if (len(flinkinfo['fextradata']) > flinkinfo['fextrafields']
10407
11085
  and len(flinkinfo['fextradata']) > 0):
10408
11086
  flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
10409
11087
  if len(extradata) < 0:
10410
11088
  extradata = flinkinfo['fextradata']
11089
+
11090
+ fvendorfields = flinkinfo['fvendorfields']
11091
+ ffvendorfieldslist = []
11092
+ if(fvendorfields>0):
11093
+ ffvendorfieldslist = flinkinfo['fvendorfieldslist']
11094
+
10411
11095
  if len(jsondata) < 0:
10412
11096
  jsondata = flinkinfo['fjsondata']
10413
11097
  fcontents = flinkinfo['fcontents']
@@ -10436,15 +11120,15 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10436
11120
  fcompression = ""
10437
11121
 
10438
11122
  tmpoutlist = [
10439
- ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime,
11123
+ ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime,
10440
11124
  fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame,
10441
- fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile
11125
+ fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, fseeknextfile
10442
11126
  ]
10443
11127
 
10444
- AppendFileHeaderWithContent(
10445
- fp, tmpoutlist, extradata, jsondata, fcontents.read(),
10446
- [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs
10447
- )
11128
+ if(fvendorfields>0 and len(ffvendorfieldslist)>0):
11129
+ extradata.extend(fvendorfields)
11130
+
11131
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(),[checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, outsaltkey)
10448
11132
  try:
10449
11133
  fcontents.close()
10450
11134
  except Exception:
@@ -10489,12 +11173,12 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10489
11173
  pass
10490
11174
  return True
10491
11175
 
10492
- def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
11176
+ def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10493
11177
  if not isinstance(infiles, list):
10494
11178
  infiles = [infiles]
10495
11179
  returnout = False
10496
11180
  for infileslist in infiles:
10497
- returnout = RePackFoxFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, True)
11181
+ returnout = RePackFoxFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, True)
10498
11182
  if(not returnout):
10499
11183
  break
10500
11184
  else:
@@ -10504,33 +11188,28 @@ def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto",
10504
11188
  return True
10505
11189
  return returnout
10506
11190
 
10507
- def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
11191
+ def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10508
11192
  fp = MkTempFile(instr)
10509
- listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10510
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
11193
+ listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, returnfp)
10511
11194
  return listarrayfiles
10512
11195
 
10513
11196
 
10514
- def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
11197
+ def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10515
11198
  outarray = MkTempFile()
10516
- packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10517
- compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
10518
- listarrayfiles = RePackFoxFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10519
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
11199
+ packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
11200
+ listarrayfiles = RePackFoxFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, saltkey, seektoend, verbose, returnfp)
10520
11201
  return listarrayfiles
10521
11202
 
10522
11203
 
10523
- def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
11204
+ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
10524
11205
  if(outdir is not None):
10525
11206
  outdir = RemoveWindowsPath(outdir)
10526
- if(verbose):
10527
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10528
11207
  if(isinstance(infile, dict)):
10529
11208
  listarrayfiles = infile
10530
11209
  else:
10531
11210
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10532
11211
  infile = RemoveWindowsPath(infile)
10533
- listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
11212
+ listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10534
11213
  if(not listarrayfiles):
10535
11214
  return False
10536
11215
  lenlist = len(listarrayfiles['ffilelist'])
@@ -10766,9 +11445,9 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
10766
11445
  return True
10767
11446
 
10768
11447
 
10769
- def UnPackFoxFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
11448
+ def UnPackFoxFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10770
11449
  fp = MkTempFile(instr)
10771
- listarrayfiles = UnPackFoxFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
11450
+ listarrayfiles = UnPackFoxFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, returnfp)
10772
11451
  return listarrayfiles
10773
11452
 
10774
11453
  def ftype_to_str(ftype):
@@ -10786,9 +11465,7 @@ def ftype_to_str(ftype):
10786
11465
  # Default to "file" if unknown
10787
11466
  return mapping.get(ftype, "file")
10788
11467
 
10789
- def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10790
- if(verbose):
10791
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11468
+ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10792
11469
  if(isinstance(infile, dict)):
10793
11470
  listarrayfileslist = [infile]
10794
11471
  if(isinstance(infile, list)):
@@ -10796,7 +11473,7 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10796
11473
  else:
10797
11474
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10798
11475
  infile = RemoveWindowsPath(infile)
10799
- listarrayfileslist = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
11476
+ listarrayfileslist = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10800
11477
  if(not listarrayfileslist):
10801
11478
  return False
10802
11479
  for listarrayfiles in listarrayfileslist:
@@ -10833,8 +11510,11 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10833
11510
  VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
10834
11511
  listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
10835
11512
  else:
11513
+ ts_ns = listarrayfiles['ffilelist'][lcfi]['fmtime']
11514
+ sec, ns = divmod(int(ts_ns), 10**9)
11515
+ dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
10836
11516
  VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
10837
- listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
11517
+ listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + dt.strftime('%Y-%m-%d %H:%M') + " " + printfname)
10838
11518
  lcfi = lcfi + 1
10839
11519
  if(returnfp):
10840
11520
  return listarrayfiles['fp']
@@ -10842,25 +11522,25 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10842
11522
  return True
10843
11523
 
10844
11524
 
10845
- def MultipleFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
11525
+ def MultipleFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10846
11526
  if(isinstance(infile, (list, tuple, ))):
10847
11527
  pass
10848
11528
  else:
10849
11529
  infile = [infile]
10850
11530
  outretval = {}
10851
11531
  for curfname in infile:
10852
- outretval[curfname] = FoxFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
11532
+ outretval[curfname] = FoxFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10853
11533
  return outretval
10854
11534
 
10855
11535
 
10856
- def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11536
+ def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10857
11537
  outretval = []
10858
11538
  outstartfile = filestart
10859
11539
  outfsize = float('inf')
10860
11540
  while True:
10861
11541
  if outstartfile >= outfsize: # stop when function signals False
10862
11542
  break
10863
- list_file_retu = FoxFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
11543
+ list_file_retu = FoxFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, True)
10864
11544
  if list_file_retu is False: # stop when function signals False
10865
11545
  outretval.append(list_file_retu)
10866
11546
  else:
@@ -10876,30 +11556,31 @@ def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
10876
11556
  if(returnfp):
10877
11557
  return infile
10878
11558
  else:
10879
- infile.close()
11559
+ try:
11560
+ infile.close()
11561
+ except AttributeError:
11562
+ return False
10880
11563
  return outretval
10881
11564
 
10882
11565
 
10883
- def MultipleStackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
11566
+ def MultipleStackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10884
11567
  if(isinstance(infile, (list, tuple, ))):
10885
11568
  pass
10886
11569
  else:
10887
11570
  infile = [infile]
10888
11571
  outretval = {}
10889
11572
  for curfname in infile:
10890
- outretval[curfname] = StackedFoxFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
11573
+ outretval[curfname] = StackedFoxFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10891
11574
  return outretval
10892
11575
 
10893
11576
 
10894
- def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11577
+ def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10895
11578
  fp = MkTempFile(instr)
10896
- listarrayfiles = FoxFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
11579
+ listarrayfiles = FoxFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10897
11580
  return listarrayfiles
10898
11581
 
10899
11582
 
10900
11583
  def TarFileListFiles(infile, verbose=False, returnfp=False):
10901
- if(verbose):
10902
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10903
11584
  if(infile == "-"):
10904
11585
  infile = MkTempFile()
10905
11586
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -10935,10 +11616,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
10935
11616
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
10936
11617
  formatspecs = formatspecs[compresscheck]
10937
11618
  if(compresscheck=="zstd"):
10938
- if 'zstandard' in sys.modules:
10939
- infile = ZstdFile(fileobj=infile, mode="rb")
10940
- elif 'pyzstd' in sys.modules:
10941
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
11619
+ if 'zstd' in compressionsupport:
11620
+ infile = zstd.ZstdFile(infile, mode="rb")
10942
11621
  tarfp = tarfile.open(fileobj=infile, mode="r")
10943
11622
  else:
10944
11623
  tarfp = tarfile.open(fileobj=infile, mode="r")
@@ -10947,10 +11626,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
10947
11626
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
10948
11627
  formatspecs = formatspecs[compresscheck]
10949
11628
  if(compresscheck=="zstd"):
10950
- if 'zstandard' in sys.modules:
10951
- infile = ZstdFile(fileobj=infile, mode="rb")
10952
- elif 'pyzstd' in sys.modules:
10953
- infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
11629
+ if 'zstd' in compressionsupport:
11630
+ infile = zstd.ZstdFile(infile, mode="rb")
10954
11631
  tarfp = tarfile.open(fileobj=infile, mode="r")
10955
11632
  else:
10956
11633
  tarfp = tarfile.open(infile, "r")
@@ -11020,8 +11697,6 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11020
11697
 
11021
11698
 
11022
11699
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
11023
- if(verbose):
11024
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11025
11700
  if(infile == "-"):
11026
11701
  infile = MkTempFile()
11027
11702
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -11147,8 +11822,6 @@ if(not rarfile_support):
11147
11822
 
11148
11823
  if(rarfile_support):
11149
11824
  def RarFileListFiles(infile, verbose=False, returnfp=False):
11150
- if(verbose):
11151
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11152
11825
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11153
11826
  return False
11154
11827
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -11276,14 +11949,15 @@ if(not py7zr_support):
11276
11949
 
11277
11950
  if(py7zr_support):
11278
11951
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
11279
- if(verbose):
11280
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11281
11952
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11282
11953
  return False
11283
11954
  lcfi = 0
11284
11955
  returnval = {}
11285
11956
  szpfp = py7zr.SevenZipFile(infile, mode="r")
11286
- file_content = szpfp.readall()
11957
+ try:
11958
+ file_content = szpfp.readall()
11959
+ except AttributeError:
11960
+ file_content = sevenzip_readall(infile)
11287
11961
  #sztest = szpfp.testzip()
11288
11962
  sztestalt = szpfp.test()
11289
11963
  if(sztestalt):
@@ -11327,7 +12001,10 @@ if(py7zr_support):
11327
12001
  printfname = member.filename
11328
12002
  if(ftype == 0):
11329
12003
  fsize = len(file_content[member.filename].read())
11330
- file_content[member.filename].close()
12004
+ try:
12005
+ file_content[member.filename].close()
12006
+ except AttributeError:
12007
+ pass
11331
12008
  try:
11332
12009
  fuid = int(os.getuid())
11333
12010
  except (KeyError, AttributeError):
@@ -11371,8 +12048,6 @@ if(py7zr_support):
11371
12048
 
11372
12049
 
11373
12050
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
11374
- if(verbose):
11375
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11376
12051
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
11377
12052
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
11378
12053
  formatspecs = formatspecs[checkcompressfile]
@@ -11399,44 +12074,6 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
11399
12074
  outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
11400
12075
  return listarrayfiles
11401
12076
 
11402
- """
11403
- PyNeoFile compatibility layer
11404
- """
11405
-
11406
- def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11407
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
11408
-
11409
- def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11410
- return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
11411
-
11412
- def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11413
- return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
11414
-
11415
- def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11416
- return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
11417
-
11418
- def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11419
- return PackFoxFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
11420
-
11421
- def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
11422
- return FoxFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
11423
-
11424
- def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
11425
- return UnPackFoxFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
11426
-
11427
- def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11428
- return RePackFoxFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11429
-
11430
- def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
11431
- return FoxFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
11432
-
11433
- def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
11434
- return FoxFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
11435
-
11436
- def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11437
- intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
11438
- return RePackFoxFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11439
-
11440
12077
  def detect_cwd(ftp, file_dir):
11441
12078
  """
11442
12079
  Test whether cwd into file_dir works. Returns True if it does,
@@ -13502,7 +14139,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
13502
14139
  if not ah or not ah.strip().lower().startswith("basic "):
13503
14140
  return False
13504
14141
  try:
13505
- import base64
13506
14142
  b64 = ah.strip().split(" ", 1)[1]
13507
14143
  raw = base64.b64decode(_to_bytes(b64))
13508
14144
  try: raw_txt = raw.decode("utf-8")