PyCatFile 0.25.0__py3-none-any.whl → 0.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pycatfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pycatfile.py - Last Update: 11/3/2025 Ver. 0.25.0 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pycatfile.py - Last Update: 11/12/2025 Ver. 0.26.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -416,9 +416,13 @@ def is_only_nonprintable(var):
416
416
  __file_format_multi_dict__ = {}
417
417
  __file_format_default__ = "CatFile"
418
418
  __include_defaults__ = True
419
- __use_inmemfile__ = True
419
+ __use_inmem__ = True
420
+ __use_memfd__ = True
420
421
  __use_spoolfile__ = False
421
422
  __use_spooldir__ = tempfile.gettempdir()
423
+ __use_new_style__ = True
424
+ __use_advanced_list__ = True
425
+ __use_alt_inode__ = False
422
426
  BYTES_PER_KiB = 1024
423
427
  BYTES_PER_MiB = 1024 * BYTES_PER_KiB
424
428
  # Spool: not tiny, but won’t blow up RAM if many are in use
@@ -462,9 +466,13 @@ if __use_ini_file__ and os.path.exists(__config_file__):
462
466
  __file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
463
467
  __program_name__ = decode_unicode_escape(config.get('config', 'proname'))
464
468
  __include_defaults__ = config.getboolean('config', 'includedef')
465
- __use_inmemfile__ = config.getboolean('config', 'inmemfile')
469
+ __use_inmem__ = config.getboolean('config', 'useinmem')
470
+ __use_memfd__ = config.getboolean('config', 'usememfd')
466
471
  __use_spoolfile__ = config.getboolean('config', 'usespoolfile')
467
472
  __spoolfile_size__ = config.getint('config', 'spoolfilesize')
473
+ __use_new_style__ = config.getboolean('config', 'newstyle')
474
+ __use_advanced_list__ = config.getboolean('config', 'advancedlist')
475
+ __use_alt_inode__ = config.getboolean('config', 'altinode')
468
476
  # Loop through all sections
469
477
  for section in config.sections():
470
478
  if section == "config":
@@ -472,8 +480,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
472
480
 
473
481
  required_keys = [
474
482
  "len", "hex", "ver", "name",
475
- "magic", "delimiter", "extension",
476
- "newstyle", "advancedlist", "altinode"
483
+ "magic", "delimiter", "extension"
477
484
  ]
478
485
 
479
486
  # Py2+Py3 compatible key presence check
@@ -493,9 +500,6 @@ if __use_ini_file__ and os.path.exists(__config_file__):
493
500
  'format_hex': config.get(section, 'hex'),
494
501
  'format_delimiter': delim,
495
502
  'format_ver': config.get(section, 'ver'),
496
- 'new_style': config.getboolean(section, 'newstyle'),
497
- 'use_advanced_list': config.getboolean(section, 'advancedlist'),
498
- 'use_alt_inode': config.getboolean(section, 'altinode'),
499
503
  'format_extension': decode_unicode_escape(config.get(section, 'extension')),
500
504
  }
501
505
  })
@@ -556,16 +560,19 @@ elif __use_json_file__ and os.path.exists(__config_file__):
556
560
  cfg_config = cfg.get('config', {}) or {}
557
561
  __file_format_default__ = decode_unicode_escape(_get(cfg_config, 'default', ''))
558
562
  __program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
559
- __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', False))
560
- __use_inmemfile__ = _to_bool(_get(cfg_config, 'inmemfile', False))
563
+ __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', True))
564
+ __use_inmem__ = _to_bool(_get(cfg_config, 'useinmem', True))
565
+ __use_memfd__ = _to_bool(_get(cfg_config, 'usememfd', True))
561
566
  __use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
562
567
  __spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
568
+ __use_new_style__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
569
+ __use_advanced_list__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
570
+ __use_alt_inode__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
563
571
 
564
572
  # --- iterate format sections (everything except "config") ---
565
573
  required_keys = [
566
574
  "len", "hex", "ver", "name",
567
- "magic", "delimiter", "extension",
568
- "newstyle", "advancedlist", "altinode"
575
+ "magic", "delimiter", "extension"
569
576
  ]
570
577
 
571
578
  for section_name, section in cfg.items():
@@ -583,9 +590,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
583
590
  fmt_hex = decode_unicode_escape(_get(section, 'hex', ''))
584
591
  fmt_ver = decode_unicode_escape(_get(section, 'ver', ''))
585
592
  delim = decode_unicode_escape(_get(section, 'delimiter', ''))
586
- new_style = _to_bool(_get(section, 'newstyle', False))
587
- adv_list = _to_bool(_get(section, 'advancedlist', False))
588
- alt_inode = _to_bool(_get(section, 'altinode', False))
589
593
  extension = decode_unicode_escape(_get(section, 'extension', ''))
590
594
 
591
595
  # keep your delimiter validation semantics
@@ -600,9 +604,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
600
604
  'format_hex': fmt_hex,
601
605
  'format_delimiter': delim,
602
606
  'format_ver': fmt_ver,
603
- 'new_style': new_style,
604
- 'use_advanced_list': adv_list,
605
- 'use_alt_inode': alt_inode,
606
607
  'format_extension': extension,
607
608
  }
608
609
  })
@@ -640,21 +641,18 @@ __file_format_len__ = __file_format_multi_dict__[__file_format_default__]['forma
640
641
  __file_format_hex__ = __file_format_multi_dict__[__file_format_default__]['format_hex']
641
642
  __file_format_delimiter__ = __file_format_multi_dict__[__file_format_default__]['format_delimiter']
642
643
  __file_format_ver__ = __file_format_multi_dict__[__file_format_default__]['format_ver']
643
- __use_new_style__ = __file_format_multi_dict__[__file_format_default__]['new_style']
644
- __use_advanced_list__ = __file_format_multi_dict__[__file_format_default__]['use_advanced_list']
645
- __use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt_inode']
646
644
  __file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
647
645
  __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
648
646
  __project__ = __program_name__
649
647
  __program_alt_name__ = __program_name__
650
648
  __project_url__ = "https://github.com/GameMaker2k/PyCatFile"
651
649
  __project_release_url__ = __project_url__+"/releases/latest"
652
- __version_info__ = (0, 25, 0, "RC 1", 1)
653
- __version_date_info__ = (2025, 11, 5, "RC 1", 1)
650
+ __version_info__ = (0, 26, 0, "RC 1", 1)
651
+ __version_date_info__ = (2025, 11, 12, "RC 1", 1)
654
652
  __version_date__ = str(__version_date_info__[0]) + "." + str(
655
653
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
656
654
  __revision__ = __version_info__[3]
657
- __revision_id__ = "$Id: 74aa70c1670492a2322dd50826f637a861fcab9e $"
655
+ __revision_id__ = "$Id: 1f4434bfb0e0cb5e732daced1add124d7b880a31 $"
658
656
  if(__version_info__[4] is not None):
659
657
  __version_date_plusrc__ = __version_date__ + \
660
658
  "-" + str(__version_date_info__[4])
@@ -666,6 +664,9 @@ if(__version_info__[3] is not None):
666
664
  if(__version_info__[3] is None):
667
665
  __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
668
666
 
667
+ _logger = logging.getLogger(__project__) # library-style logger
668
+ _logger.addHandler(logging.NullHandler()) # don't emit logs unless app configures logging
669
+
669
670
  # From: https://stackoverflow.com/a/28568003
670
671
  # By Phaxmohdem
671
672
 
@@ -1035,6 +1036,20 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20, **
1035
1036
  VerbosePrintOut(dbgtxt, outtype, dbgenable, dgblevel, **kwargs)
1036
1037
  return dbgtxt
1037
1038
 
1039
+ def to_ns(timestamp):
1040
+ """
1041
+ Convert a second-resolution timestamp (int or float)
1042
+ into a nanosecond timestamp (int) by zero-padding.
1043
+ Works in Python 2 and Python 3.
1044
+ """
1045
+ try:
1046
+ # Convert incoming timestamp to float so it works for int or float
1047
+ seconds = float(timestamp)
1048
+ except (TypeError, ValueError):
1049
+ raise ValueError("Timestamp must be int or float")
1050
+
1051
+ # Multiply by 1e9 to get nanoseconds, then cast to int
1052
+ return int(seconds * 1000000000)
1038
1053
 
1039
1054
  def _split_posix(name):
1040
1055
  """
@@ -2058,34 +2073,53 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
2058
2073
 
2059
2074
 
2060
2075
  def MkTempFile(data=None,
2061
- inmem=__use_inmemfile__,
2076
+ inmem=__use_inmem__, usememfd=__use_memfd__,
2062
2077
  isbytes=True,
2063
- prefix="",
2078
+ prefix=__program_name__,
2064
2079
  delete=True,
2065
2080
  encoding="utf-8",
2066
- newline=None, # text mode only; in-memory objects ignore newline semantics
2081
+ newline=None,
2082
+ text_errors="strict",
2067
2083
  dir=None,
2068
2084
  suffix="",
2069
2085
  use_spool=__use_spoolfile__,
2086
+ autoswitch_spool=False,
2070
2087
  spool_max=__spoolfile_size__,
2071
- spool_dir=__use_spooldir__):
2088
+ spool_dir=__use_spooldir__,
2089
+ reset_to_start=True,
2090
+ memfd_name=None,
2091
+ memfd_allow_sealing=False,
2092
+ memfd_flags_extra=0,
2093
+ on_create=None):
2072
2094
  """
2073
2095
  Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
2074
2096
 
2075
2097
  Storage:
2076
- - inmem=True -> BytesIO (bytes) or StringIO (text)
2077
- - inmem=False, use_spool=True -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2078
- - inmem=False, use_spool=False -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2098
+ - inmem=True, usememfd=True, isbytes=True and memfd available
2099
+ -> memfd-backed anonymous file (binary)
2100
+ - inmem=True, otherwise
2101
+ -> BytesIO (bytes) or StringIO (text)
2102
+ - inmem=False, use_spool=True
2103
+ -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2104
+ - inmem=False, use_spool=False
2105
+ -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2079
2106
 
2080
2107
  Text vs bytes:
2081
2108
  - isbytes=True -> file expects bytes; 'data' must be bytes-like
2082
- - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and encoding
2083
- apply only for spooled/named files (not BytesIO/StringIO).
2109
+ - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
2110
+ encoding apply only for spooled/named files (not BytesIO/StringIO).
2084
2111
 
2085
2112
  Notes:
2086
- - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by other processes.
2087
- Use delete=False if you need to pass the path elsewhere.
2088
- - For text: in-memory StringIO ignores 'newline' (as usual).
2113
+ - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
2114
+ other processes. Use delete=False if you need to pass the path elsewhere.
2115
+ - For text: in-memory StringIO ignores 'newline' and 'text_errors' (as usual).
2116
+ - When available, and if usememfd=True, memfd is used only for inmem=True and isbytes=True,
2117
+ providing an anonymous in-memory file descriptor (Linux-only). Text in-memory still uses
2118
+ StringIO to preserve newline semantics.
2119
+ - If autoswitch_spool=True and initial data size exceeds spool_max, in-memory storage is
2120
+ skipped and a spooled file is used instead (if use_spool=True).
2121
+ - If on_create is not None, it is called as on_create(fp, kind) where kind is one of:
2122
+ "memfd", "bytesio", "stringio", "spool", "disk".
2089
2123
  """
2090
2124
 
2091
2125
  # -- sanitize simple params (avoid None surprises) --
@@ -2117,23 +2151,65 @@ def MkTempFile(data=None,
2117
2151
  else:
2118
2152
  init = None
2119
2153
 
2154
+ # Size of init for autoswitch; only meaningful for bytes
2155
+ init_len = len(init) if (init is not None and isbytes) else None
2156
+
2120
2157
  # -------- In-memory --------
2121
2158
  if inmem:
2122
- if isbytes:
2123
- f = io.BytesIO(init if init is not None else b"")
2124
- else:
2125
- # newline not enforced for StringIO; matches stdlib semantics
2126
- f = io.StringIO(init if init is not None else "")
2127
- # already positioned at 0 with provided init; ensure rewind for symmetry
2128
- f.seek(0)
2129
- return f
2159
+ # If autoswitch is enabled and data is larger than spool_max, and
2160
+ # spooling is allowed, skip the in-memory branch and fall through
2161
+ # to the spool/disk logic below.
2162
+ if autoswitch_spool and use_spool and init_len is not None and init_len > spool_max:
2163
+ pass # fall through to spool/disk sections
2164
+ else:
2165
+ # Use memfd only for bytes, and only where available (Linux, Python 3.8+)
2166
+ if usememfd and isbytes and hasattr(os, "memfd_create"):
2167
+ name = memfd_name or prefix or "MkTempFile"
2168
+ flags = 0
2169
+ # Close-on-exec is almost always what you want for temps
2170
+ if hasattr(os, "MFD_CLOEXEC"):
2171
+ flags |= os.MFD_CLOEXEC
2172
+ # Optional sealing support if requested and available
2173
+ if memfd_allow_sealing and hasattr(os, "MFD_ALLOW_SEALING"):
2174
+ flags |= os.MFD_ALLOW_SEALING
2175
+ # Extra custom flags (e.g. hugepage flags) if caller wants them
2176
+ if memfd_flags_extra:
2177
+ flags |= memfd_flags_extra
2178
+
2179
+ fd = os.memfd_create(name, flags)
2180
+ # Binary read/write file-like object backed by RAM
2181
+ f = os.fdopen(fd, "w+b")
2182
+
2183
+ if init is not None:
2184
+ f.write(init)
2185
+ if reset_to_start:
2186
+ f.seek(0)
2187
+
2188
+ if on_create is not None:
2189
+ on_create(f, "memfd")
2190
+ return f
2191
+
2192
+ # Fallback: pure Python in-memory objects
2193
+ if isbytes:
2194
+ f = io.BytesIO(init if init is not None else b"")
2195
+ kind = "bytesio"
2196
+ else:
2197
+ # newline/text_errors not enforced for StringIO; matches stdlib semantics
2198
+ f = io.StringIO(init if init is not None else "")
2199
+ kind = "stringio"
2200
+
2201
+ if reset_to_start:
2202
+ f.seek(0)
2203
+
2204
+ if on_create is not None:
2205
+ on_create(f, kind)
2206
+ return f
2130
2207
 
2131
2208
  # Helper: wrap a binary file into a text file with encoding/newline
2132
2209
  def _wrap_text(handle):
2133
2210
  # For both Py2 & Py3, TextIOWrapper gives consistent newline/encoding behavior
2134
- tw = io.TextIOWrapper(handle, encoding=encoding, newline=newline)
2135
- # Position at start; if we wrote initial data below, we will rewind after writing
2136
- return tw
2211
+ return io.TextIOWrapper(handle, encoding=encoding,
2212
+ newline=newline, errors=text_errors)
2137
2213
 
2138
2214
  # -------- Spooled (RAM then disk) --------
2139
2215
  if use_spool:
@@ -2141,19 +2217,33 @@ def MkTempFile(data=None,
2141
2217
  bin_mode = "w+b" # read/write, binary
2142
2218
  b = tempfile.SpooledTemporaryFile(max_size=spool_max, mode=bin_mode, dir=spool_dir)
2143
2219
  f = b if isbytes else _wrap_text(b)
2220
+
2144
2221
  if init is not None:
2145
2222
  f.write(init)
2223
+ if reset_to_start:
2224
+ f.seek(0)
2225
+ elif reset_to_start:
2146
2226
  f.seek(0)
2227
+
2228
+ if on_create is not None:
2229
+ on_create(f, "spool")
2147
2230
  return f
2148
2231
 
2149
2232
  # -------- On-disk temp (NamedTemporaryFile) --------
2150
2233
  # Always create binary file; wrap for text if needed for uniform Py2/3 behavior
2151
- b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix, dir=dir, delete=delete)
2234
+ b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
2235
+ dir=dir, delete=delete)
2152
2236
  f = b if isbytes else _wrap_text(b)
2153
2237
 
2154
2238
  if init is not None:
2155
2239
  f.write(init)
2240
+ if reset_to_start:
2241
+ f.seek(0)
2242
+ elif reset_to_start:
2156
2243
  f.seek(0)
2244
+
2245
+ if on_create is not None:
2246
+ on_create(f, "disk")
2157
2247
  return f
2158
2248
 
2159
2249
 
@@ -3673,7 +3763,7 @@ def _bytes_to_int(b):
3673
3763
  # =========================
3674
3764
  # Public checksum API
3675
3765
  # =========================
3676
- def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
3766
+ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3677
3767
  """
3678
3768
  Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
3679
3769
  or a single field) and compute the requested checksum. Returns lowercase hex.
@@ -3685,15 +3775,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatsp
3685
3775
  if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
3686
3776
  hdr_bytes = _to_bytes(hdr_bytes)
3687
3777
  hdr_bytes = bytes(hdr_bytes)
3688
-
3778
+ saltkeyval = None
3779
+ if(hasattr(saltkey, "read")):
3780
+ saltkeyval = skfp.read()
3781
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
3782
+ saltkeyval = saltkeyval.encode("UTF-8")
3783
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
3784
+ saltkeyval = saltkey
3785
+ elif(saltkey is not None and os.path.exists(saltkey)):
3786
+ with open(saltkey, "rb") as skfp:
3787
+ saltkeyval = skfp.read()
3788
+ else:
3789
+ saltkey = None
3790
+ if(saltkeyval is None):
3791
+ saltkey = None
3689
3792
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3690
- h = hashlib.new(algo_key)
3691
- h.update(hdr_bytes)
3692
- return h.hexdigest().lower()
3793
+ if(saltkey is None or saltkeyval is None):
3794
+ h = hashlib.new(algo_key, hdr_bytes)
3795
+ else:
3796
+ h = hmac.new(saltkeyval, hdr_bytes, digestmod=algo_key)
3797
+ return h.hexdigest().lower()
3693
3798
 
3694
3799
  return "0"
3695
3800
 
3696
- def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
3801
+ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3697
3802
  """
3698
3803
  Accepts bytes/str/file-like.
3699
3804
  - Hashlib algos: streamed in 1 MiB chunks.
@@ -3701,13 +3806,29 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3701
3806
  - Falls back to one-shot for non-file-like inputs.
3702
3807
  """
3703
3808
  algo_key = (checksumtype or "md5").lower()
3704
-
3809
+ saltkeyval = None
3810
+ if(hasattr(saltkey, "read")):
3811
+ saltkeyval = skfp.read()
3812
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
3813
+ saltkeyval = saltkeyval.encode("UTF-8")
3814
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
3815
+ saltkeyval = saltkey
3816
+ elif(saltkey is not None and os.path.exists(saltkey)):
3817
+ with open(saltkey, "rb") as skfp:
3818
+ saltkeyval = skfp.read()
3819
+ else:
3820
+ saltkey = None
3821
+ if(saltkeyval is None):
3822
+ saltkey = None
3705
3823
  # file-like streaming
3706
3824
  if hasattr(inbytes, "read"):
3707
3825
  # hashlib
3708
3826
 
3709
3827
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3710
- h = hashlib.new(algo_key)
3828
+ if(saltkey is None or saltkeyval is None):
3829
+ h = hashlib.new(algo_key)
3830
+ else:
3831
+ h = hmac.new(saltkeyval, digestmod=algo_key)
3711
3832
  while True:
3712
3833
  chunk = inbytes.read(__filebuff_size__)
3713
3834
  if not chunk:
@@ -3728,26 +3849,41 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3728
3849
  # one-shot
3729
3850
 
3730
3851
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3731
- h = hashlib.new(algo_key)
3732
- h.update(data)
3852
+ if(saltkey is None or saltkeyval is None):
3853
+ h = hashlib.new(algo_key, data)
3854
+ else:
3855
+ h = hmac.new(saltkeyval, data, digestmod=algo_key)
3733
3856
  return h.hexdigest().lower()
3734
3857
 
3735
3858
  return "0"
3736
3859
 
3737
- def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3738
- calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
3860
+ def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
3861
+ calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs, saltkey)
3739
3862
  want = (inchecksum or "0").strip().lower()
3740
3863
  if want.startswith("0x"):
3741
3864
  want = want[2:]
3742
- return hmac.compare_digest(want, calc)
3865
+ return CheckChecksums(want, calc)
3743
3866
 
3744
- def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3745
- calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
3867
+ def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
3868
+ calc = GetFileChecksum(infile, checksumtype, True, formatspecs, saltkey)
3746
3869
  want = (inchecksum or "0").strip().lower()
3747
3870
  if want.startswith("0x"):
3748
3871
  want = want[2:]
3749
- return hmac.compare_digest(want, calc)
3872
+ return CheckChecksums(want, calc)
3873
+
3874
+ def CheckChecksums(inchecksum, outchecksum):
3875
+ # Normalize as text first
3876
+ calc = (inchecksum or "0").strip().lower()
3877
+ want = (outchecksum or "0").strip().lower()
3878
+
3879
+ if want.startswith("0x"):
3880
+ want = want[2:]
3750
3881
 
3882
+ # Now force both to bytes
3883
+ calc_b = _to_bytes(calc) # defaults to utf-8, strict
3884
+ want_b = _to_bytes(want)
3885
+
3886
+ return hmac.compare_digest(want_b, calc_b)
3751
3887
 
3752
3888
  def MajorMinorToDev(major, minor):
3753
3889
  """
@@ -4116,11 +4252,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
4116
4252
  return first_two + headerdata
4117
4253
 
4118
4254
 
4119
- def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4255
+ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4120
4256
  if(not hasattr(fp, "read")):
4121
4257
  return False
4122
4258
  delimiter = formatspecs['format_delimiter']
4123
- if(formatspecs['new_style']):
4259
+ if(__use_new_style__):
4124
4260
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4125
4261
  else:
4126
4262
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4208,15 +4344,14 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4208
4344
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4209
4345
  pass
4210
4346
  fp.seek(len(delimiter), 1)
4211
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4212
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4347
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4348
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4213
4349
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4214
4350
  fname + " at offset " + str(fheaderstart))
4215
4351
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4216
4352
  return False
4217
4353
  fp.seek(len(delimiter), 1)
4218
- newfcs = GetHeaderChecksum(
4219
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4354
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4220
4355
  HeaderOut.append(fjsoncontent)
4221
4356
  if(fcs != newfcs and not skipchecksum):
4222
4357
  VerbosePrintOut("File Header Checksum Error with file " +
@@ -4235,10 +4370,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4235
4370
  else:
4236
4371
  fp.seek(fcsize, 1)
4237
4372
  fcontents.seek(0, 0)
4238
- newfccs = GetFileChecksum(
4239
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4373
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4240
4374
  fcontents.seek(0, 0)
4241
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4375
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4242
4376
  VerbosePrintOut("File Content Checksum Error with file " +
4243
4377
  fname + " at offset " + str(fcontentstart))
4244
4378
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4275,12 +4409,12 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4275
4409
  return HeaderOut
4276
4410
 
4277
4411
 
4278
- def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4412
+ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4279
4413
  if(not hasattr(fp, "read")):
4280
4414
  return False
4281
4415
  delimiter = formatspecs['format_delimiter']
4282
4416
  fheaderstart = fp.tell()
4283
- if(formatspecs['new_style']):
4417
+ if(__use_new_style__):
4284
4418
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4285
4419
  else:
4286
4420
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4298,40 +4432,51 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4298
4432
  fbasedir = os.path.dirname(fname)
4299
4433
  flinkname = HeaderOut[6]
4300
4434
  fsize = int(HeaderOut[7], 16)
4301
- fatime = int(HeaderOut[8], 16)
4302
- fmtime = int(HeaderOut[9], 16)
4303
- fctime = int(HeaderOut[10], 16)
4304
- fbtime = int(HeaderOut[11], 16)
4305
- fmode = int(HeaderOut[12], 16)
4435
+ fblksize = int(HeaderOut[8], 16)
4436
+ fblocks = int(HeaderOut[9], 16)
4437
+ fflags = int(HeaderOut[10], 16)
4438
+ fatime = int(HeaderOut[11], 16)
4439
+ fmtime = int(HeaderOut[12], 16)
4440
+ fctime = int(HeaderOut[13], 16)
4441
+ fbtime = int(HeaderOut[14], 16)
4442
+ fmode = int(HeaderOut[15], 16)
4306
4443
  fchmode = stat.S_IMODE(fmode)
4307
4444
  ftypemod = stat.S_IFMT(fmode)
4308
- fwinattributes = int(HeaderOut[13], 16)
4309
- fcompression = HeaderOut[14]
4310
- fcsize = int(HeaderOut[15], 16)
4311
- fuid = int(HeaderOut[16], 16)
4312
- funame = HeaderOut[17]
4313
- fgid = int(HeaderOut[18], 16)
4314
- fgname = HeaderOut[19]
4315
- fid = int(HeaderOut[20], 16)
4316
- finode = int(HeaderOut[21], 16)
4317
- flinkcount = int(HeaderOut[22], 16)
4318
- fdev = int(HeaderOut[23], 16)
4319
- fdev_minor = int(HeaderOut[24], 16)
4320
- fdev_major = int(HeaderOut[25], 16)
4321
- fseeknextfile = HeaderOut[26]
4322
- fjsontype = HeaderOut[27]
4323
- fjsonlen = int(HeaderOut[28], 16)
4324
- fjsonsize = int(HeaderOut[29], 16)
4325
- fjsonchecksumtype = HeaderOut[30]
4326
- fjsonchecksum = HeaderOut[31]
4327
- fextrasize = int(HeaderOut[32], 16)
4328
- fextrafields = int(HeaderOut[33], 16)
4445
+ fwinattributes = int(HeaderOut[16], 16)
4446
+ fcompression = HeaderOut[17]
4447
+ fcsize = int(HeaderOut[18], 16)
4448
+ fuid = int(HeaderOut[19], 16)
4449
+ funame = HeaderOut[20]
4450
+ fgid = int(HeaderOut[21], 16)
4451
+ fgname = HeaderOut[22]
4452
+ fid = int(HeaderOut[23], 16)
4453
+ finode = int(HeaderOut[24], 16)
4454
+ flinkcount = int(HeaderOut[25], 16)
4455
+ fdev = int(HeaderOut[26], 16)
4456
+ frdev = int(HeaderOut[27], 16)
4457
+ fseeknextfile = HeaderOut[28]
4458
+ fjsontype = HeaderOut[29]
4459
+ fjsonlen = int(HeaderOut[30], 16)
4460
+ fjsonsize = int(HeaderOut[31], 16)
4461
+ fjsonchecksumtype = HeaderOut[32]
4462
+ fjsonchecksum = HeaderOut[33]
4463
+ fextrasize = int(HeaderOut[34], 16)
4464
+ fextrafields = int(HeaderOut[35], 16)
4329
4465
  fextrafieldslist = []
4330
- extrastart = 34
4466
+ extrastart = 36
4331
4467
  extraend = extrastart + fextrafields
4332
4468
  while(extrastart < extraend):
4333
4469
  fextrafieldslist.append(HeaderOut[extrastart])
4334
4470
  extrastart = extrastart + 1
4471
+ fvendorfieldslist = []
4472
+ fvendorfields = 0;
4473
+ if(len(HeaderOut)>extraend):
4474
+ extrastart = extraend
4475
+ extraend = len(HeaderOut) - 4
4476
+ while(extrastart < extraend):
4477
+ fvendorfieldslist.append(HeaderOut[extrastart])
4478
+ extrastart = extrastart + 1
4479
+ fvendorfields = fvendorfields + 1
4335
4480
  if(fextrafields==1):
4336
4481
  try:
4337
4482
  fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
@@ -4409,16 +4554,15 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4409
4554
  pass
4410
4555
  fp.seek(len(delimiter), 1)
4411
4556
  fjend = fp.tell() - 1
4412
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4413
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4557
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4558
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4414
4559
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4415
4560
  fname + " at offset " + str(fheaderstart))
4416
4561
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4417
4562
  return False
4418
4563
  fcs = HeaderOut[-2].lower()
4419
4564
  fccs = HeaderOut[-1].lower()
4420
- newfcs = GetHeaderChecksum(
4421
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4565
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4422
4566
  if(fcs != newfcs and not skipchecksum):
4423
4567
  VerbosePrintOut("File Header Checksum Error with file " +
4424
4568
  fname + " at offset " + str(fheaderstart))
@@ -4441,10 +4585,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4441
4585
  fp.seek(fcsize, 1)
4442
4586
  pyhascontents = False
4443
4587
  fcontents.seek(0, 0)
4444
- newfccs = GetFileChecksum(
4445
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4588
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4446
4589
  fcontents.seek(0, 0)
4447
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4590
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4448
4591
  VerbosePrintOut("File Content Checksum Error with file " +
4449
4592
  fname + " at offset " + str(fcontentstart))
4450
4593
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4461,8 +4604,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4461
4604
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4462
4605
  cfcontents.close()
4463
4606
  fcontents.seek(0, 0)
4464
- fccs = GetFileChecksum(
4465
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4607
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4466
4608
  fcontentend = fp.tell()
4467
4609
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4468
4610
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4484,17 +4626,17 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4484
4626
  fcontents.seek(0, 0)
4485
4627
  if(not contentasfile):
4486
4628
  fcontents = fcontents.read()
4487
- outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
4488
- 'fdev': fdev, 'fminor': fdev_minor, 'fmajor': fdev_major, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
4629
+ outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fblksize': fblksize, 'fblocks': fblocks, 'fflags': fflags, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
4630
+ 'fdev': fdev, 'frdev': frdev, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
4489
4631
  return outlist
4490
4632
 
4491
4633
 
4492
- def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4634
+ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4493
4635
  if(not hasattr(fp, "read")):
4494
4636
  return False
4495
4637
  delimiter = formatspecs['format_delimiter']
4496
4638
  fheaderstart = fp.tell()
4497
- if(formatspecs['new_style']):
4639
+ if(__use_new_style__):
4498
4640
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4499
4641
  else:
4500
4642
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4512,36 +4654,38 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4512
4654
  fbasedir = os.path.dirname(fname)
4513
4655
  flinkname = HeaderOut[6]
4514
4656
  fsize = int(HeaderOut[7], 16)
4515
- fatime = int(HeaderOut[8], 16)
4516
- fmtime = int(HeaderOut[9], 16)
4517
- fctime = int(HeaderOut[10], 16)
4518
- fbtime = int(HeaderOut[11], 16)
4519
- fmode = int(HeaderOut[12], 16)
4657
+ fblksize = int(HeaderOut[8], 16)
4658
+ fblocks = int(HeaderOut[9], 16)
4659
+ fflags = int(HeaderOut[10], 16)
4660
+ fatime = int(HeaderOut[11], 16)
4661
+ fmtime = int(HeaderOut[12], 16)
4662
+ fctime = int(HeaderOut[13], 16)
4663
+ fbtime = int(HeaderOut[14], 16)
4664
+ fmode = int(HeaderOut[15], 16)
4520
4665
  fchmode = stat.S_IMODE(fmode)
4521
4666
  ftypemod = stat.S_IFMT(fmode)
4522
- fwinattributes = int(HeaderOut[13], 16)
4523
- fcompression = HeaderOut[14]
4524
- fcsize = int(HeaderOut[15], 16)
4525
- fuid = int(HeaderOut[16], 16)
4526
- funame = HeaderOut[17]
4527
- fgid = int(HeaderOut[18], 16)
4528
- fgname = HeaderOut[19]
4529
- fid = int(HeaderOut[20], 16)
4530
- finode = int(HeaderOut[21], 16)
4531
- flinkcount = int(HeaderOut[22], 16)
4532
- fdev = int(HeaderOut[23], 16)
4533
- fdev_minor = int(HeaderOut[24], 16)
4534
- fdev_major = int(HeaderOut[25], 16)
4535
- fseeknextfile = HeaderOut[26]
4536
- fjsontype = HeaderOut[27]
4537
- fjsonlen = int(HeaderOut[28], 16)
4538
- fjsonsize = int(HeaderOut[29], 16)
4539
- fjsonchecksumtype = HeaderOut[30]
4540
- fjsonchecksum = HeaderOut[31]
4541
- fextrasize = int(HeaderOut[32], 16)
4542
- fextrafields = int(HeaderOut[33], 16)
4667
+ fwinattributes = int(HeaderOut[16], 16)
4668
+ fcompression = HeaderOut[17]
4669
+ fcsize = int(HeaderOut[18], 16)
4670
+ fuid = int(HeaderOut[19], 16)
4671
+ funame = HeaderOut[20]
4672
+ fgid = int(HeaderOut[21], 16)
4673
+ fgname = HeaderOut[22]
4674
+ fid = int(HeaderOut[23], 16)
4675
+ finode = int(HeaderOut[24], 16)
4676
+ flinkcount = int(HeaderOut[25], 16)
4677
+ fdev = int(HeaderOut[26], 16)
4678
+ frdev = int(HeaderOut[27], 16)
4679
+ fseeknextfile = HeaderOut[28]
4680
+ fjsontype = HeaderOut[29]
4681
+ fjsonlen = int(HeaderOut[30], 16)
4682
+ fjsonsize = int(HeaderOut[31], 16)
4683
+ fjsonchecksumtype = HeaderOut[32]
4684
+ fjsonchecksum = HeaderOut[33]
4685
+ fextrasize = int(HeaderOut[34], 16)
4686
+ fextrafields = int(HeaderOut[35], 16)
4543
4687
  fextrafieldslist = []
4544
- extrastart = 34
4688
+ extrastart = 36
4545
4689
  extraend = extrastart + fextrafields
4546
4690
  while(extrastart < extraend):
4547
4691
  fextrafieldslist.append(HeaderOut[extrastart])
@@ -4621,16 +4765,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4621
4765
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4622
4766
  pass
4623
4767
  fp.seek(len(delimiter), 1)
4624
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4625
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4768
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4769
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4626
4770
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4627
4771
  fname + " at offset " + str(fheaderstart))
4628
4772
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4629
4773
  return False
4630
4774
  fcs = HeaderOut[-2].lower()
4631
4775
  fccs = HeaderOut[-1].lower()
4632
- newfcs = GetHeaderChecksum(
4633
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4776
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4634
4777
  if(fcs != newfcs and not skipchecksum):
4635
4778
  VerbosePrintOut("File Header Checksum Error with file " +
4636
4779
  fname + " at offset " + str(fheaderstart))
@@ -4653,9 +4796,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4653
4796
  fp.seek(fcsize, 1)
4654
4797
  pyhascontents = False
4655
4798
  fcontents.seek(0, 0)
4656
- newfccs = GetFileChecksum(
4657
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4658
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4799
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4800
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4659
4801
  VerbosePrintOut("File Content Checksum Error with file " +
4660
4802
  fname + " at offset " + str(fcontentstart))
4661
4803
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4672,8 +4814,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4672
4814
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4673
4815
  cfcontents.close()
4674
4816
  fcontents.seek(0, 0)
4675
- fccs = GetFileChecksum(
4676
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4817
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4677
4818
  fcontentend = fp.tell()
4678
4819
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4679
4820
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4695,12 +4836,12 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4695
4836
  fcontents.seek(0, 0)
4696
4837
  if(not contentasfile):
4697
4838
  fcontents = fcontents.read()
4698
- outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4699
- finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
4839
+ outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4840
+ finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
4700
4841
  return outlist
4701
4842
 
4702
4843
 
4703
- def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4844
+ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4704
4845
  if(not hasattr(fp, "read")):
4705
4846
  return False
4706
4847
  delimiter = formatspecs['format_delimiter']
@@ -4720,7 +4861,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4720
4861
  return False
4721
4862
  if(formdel != formatspecs['format_delimiter']):
4722
4863
  return False
4723
- if(formatspecs['new_style']):
4864
+ if(__use_new_style__):
4724
4865
  inheader = ReadFileHeaderDataBySize(
4725
4866
  fp, formatspecs['format_delimiter'])
4726
4867
  else:
@@ -4728,20 +4869,42 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4728
4869
  fp, formatspecs['format_delimiter'])
4729
4870
  fprechecksumtype = inheader[-2]
4730
4871
  fprechecksum = inheader[-1]
4731
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4732
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
4872
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
4873
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4733
4874
  if(not headercheck and not skipchecksum):
4734
4875
  VerbosePrintOut(
4735
4876
  "File Header Checksum Error with file at offset " + str(0))
4736
4877
  VerbosePrintOut("'" + fprechecksum + "' != " +
4737
4878
  "'" + newfcs + "'")
4738
4879
  return False
4739
- fnumfiles = int(inheader[4], 16)
4880
+ fnumfiles = int(inheader[8], 16)
4881
+ outfseeknextfile = inheaderdata[9]
4882
+ fjsonsize = int(inheaderdata[12], 16)
4883
+ fjsonchecksumtype = inheader[13]
4884
+ fjsonchecksum = inheader[14]
4885
+ fp.read(fjsonsize)
4886
+ # Next seek directive
4887
+ if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
4888
+ fseeknextasnum = int(outfseeknextfile.replace("+", ""))
4889
+ if(abs(fseeknextasnum) == 0):
4890
+ pass
4891
+ fp.seek(fseeknextasnum, 1)
4892
+ elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
4893
+ fseeknextasnum = int(outfseeknextfile)
4894
+ if(abs(fseeknextasnum) == 0):
4895
+ pass
4896
+ fp.seek(fseeknextasnum, 1)
4897
+ elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
4898
+ fseeknextasnum = int(outfseeknextfile)
4899
+ if(abs(fseeknextasnum) == 0):
4900
+ pass
4901
+ fp.seek(fseeknextasnum, 0)
4902
+ else:
4903
+ return False
4740
4904
  countnum = 0
4741
4905
  flist = []
4742
4906
  while(countnum < fnumfiles):
4743
- HeaderOut = ReadFileHeaderDataWithContent(
4744
- fp, listonly, uncompress, skipchecksum, formatspecs)
4907
+ HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
4745
4908
  if(len(HeaderOut) == 0):
4746
4909
  break
4747
4910
  flist.append(HeaderOut)
@@ -4749,7 +4912,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4749
4912
  return flist
4750
4913
 
4751
4914
 
4752
- def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
4915
+ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
4753
4916
  if(not hasattr(fp, "read")):
4754
4917
  return False
4755
4918
  delimiter = formatspecs['format_delimiter']
@@ -4769,16 +4932,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4769
4932
  return False
4770
4933
  if(formdel != formatspecs['format_delimiter']):
4771
4934
  return False
4772
- if(formatspecs['new_style']):
4935
+ if(__use_new_style__):
4773
4936
  inheader = ReadFileHeaderDataBySize(
4774
4937
  fp, formatspecs['format_delimiter'])
4775
4938
  else:
4776
4939
  inheader = ReadFileHeaderDataWoSize(
4777
4940
  fp, formatspecs['format_delimiter'])
4778
- fnumextrafieldsize = int(inheader[6], 16)
4779
- fnumextrafields = int(inheader[7], 16)
4941
+ fnumextrafieldsize = int(inheader[15], 16)
4942
+ fnumextrafields = int(inheader[16], 16)
4780
4943
  fextrafieldslist = []
4781
- extrastart = 8
4944
+ extrastart = 17
4782
4945
  extraend = extrastart + fnumextrafields
4783
4946
  while(extrastart < extraend):
4784
4947
  fextrafieldslist.append(inheader[extrastart])
@@ -4792,17 +4955,126 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4792
4955
  fextrafieldslist = json.loads(fextrafieldslist[0])
4793
4956
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4794
4957
  pass
4958
+ fvendorfieldslist = []
4959
+ fvendorfields = 0;
4960
+ if(len(inheader)>extraend):
4961
+ extrastart = extraend
4962
+ extraend = len(inheader) - 2
4963
+ while(extrastart < extraend):
4964
+ fvendorfieldslist.append(HeaderOut[extrastart])
4965
+ extrastart = extrastart + 1
4966
+ fvendorfields = fvendorfields + 1
4795
4967
  formversion = re.findall("([\\d]+)", formstring)
4796
4968
  fheadsize = int(inheader[0], 16)
4797
4969
  fnumfields = int(inheader[1], 16)
4798
- fhencoding = inheader[2]
4799
- fostype = inheader[3]
4800
- fpythontype = inheader[4]
4801
- fnumfiles = int(inheader[5], 16)
4970
+ fheadctime = int(inheader[1], 16)
4971
+ fheadmtime = int(inheader[1], 16)
4972
+ fhencoding = inheader[4]
4973
+ fostype = inheader[5]
4974
+ fpythontype = inheader[6]
4975
+ fprojectname = inheader[7]
4976
+ fnumfiles = int(inheader[8], 16)
4977
+ fseeknextfile = inheader[9]
4978
+ fjsontype = inheader[10]
4979
+ fjsonlen = int(inheader[11], 16)
4980
+ fjsonsize = int(inheader[12], 16)
4981
+ fjsonchecksumtype = inheader[13]
4982
+ fjsonchecksum = inheader[14]
4983
+ fjsoncontent = {}
4984
+ fjstart = fp.tell()
4985
+ if(fjsontype=="json"):
4986
+ fjsoncontent = {}
4987
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4988
+ if(fjsonsize > 0):
4989
+ try:
4990
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4991
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4992
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4993
+ try:
4994
+ fjsonrawcontent = fprejsoncontent
4995
+ fjsoncontent = json.loads(fprejsoncontent)
4996
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4997
+ fprejsoncontent = ""
4998
+ fjsonrawcontent = fprejsoncontent
4999
+ fjsoncontent = {}
5000
+ else:
5001
+ fprejsoncontent = ""
5002
+ fjsonrawcontent = fprejsoncontent
5003
+ fjsoncontent = {}
5004
+ elif(testyaml and fjsontype == "yaml"):
5005
+ fjsoncontent = {}
5006
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5007
+ if (fjsonsize > 0):
5008
+ try:
5009
+ # try base64 → utf-8 → YAML
5010
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5011
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5012
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
5013
+ try:
5014
+ # fall back to treating the bytes as plain text YAML
5015
+ fjsonrawcontent = fprejsoncontent
5016
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5017
+ except (UnicodeDecodeError, yaml.YAMLError):
5018
+ # final fallback: empty
5019
+ fprejsoncontent = ""
5020
+ fjsonrawcontent = fprejsoncontent
5021
+ fjsoncontent = {}
5022
+ else:
5023
+ fprejsoncontent = ""
5024
+ fjsonrawcontent = fprejsoncontent
5025
+ fjsoncontent = {}
5026
+ elif(not testyaml and fjsontype == "yaml"):
5027
+ fjsoncontent = {}
5028
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5029
+ fprejsoncontent = ""
5030
+ fjsonrawcontent = fprejsoncontent
5031
+ elif(fjsontype=="list"):
5032
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5033
+ flisttmp = MkTempFile()
5034
+ flisttmp.write(fprejsoncontent.encode())
5035
+ flisttmp.seek(0)
5036
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
5037
+ flisttmp.close()
5038
+ fjsonrawcontent = fjsoncontent
5039
+ if(fjsonlen==1):
5040
+ try:
5041
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
5042
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
5043
+ fjsonlen = len(fjsoncontent)
5044
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5045
+ try:
5046
+ fjsonrawcontent = fjsoncontent[0]
5047
+ fjsoncontent = json.loads(fjsoncontent[0])
5048
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5049
+ pass
5050
+ fjend = fp.tell()
5051
+ if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5052
+ fseeknextasnum = int(fseeknextfile.replace("+", ""))
5053
+ if(abs(fseeknextasnum) == 0):
5054
+ pass
5055
+ fp.seek(fseeknextasnum, 1)
5056
+ elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
5057
+ fseeknextasnum = int(fseeknextfile)
5058
+ if(abs(fseeknextasnum) == 0):
5059
+ pass
5060
+ fp.seek(fseeknextasnum, 1)
5061
+ elif(re.findall("^([0-9]+)", fseeknextfile)):
5062
+ fseeknextasnum = int(fseeknextfile)
5063
+ if(abs(fseeknextasnum) == 0):
5064
+ pass
5065
+ fp.seek(fseeknextasnum, 0)
5066
+ else:
5067
+ return False
5068
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5069
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
5070
+ VerbosePrintOut("File JSON Data Checksum Error with file " +
5071
+ fname + " at offset " + str(fheaderstart))
5072
+ VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
5073
+ return False
4802
5074
  fprechecksumtype = inheader[-2]
4803
5075
  fprechecksum = inheader[-1]
4804
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4805
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5076
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5077
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4806
5078
  if(not headercheck and not skipchecksum):
4807
5079
  VerbosePrintOut(
4808
5080
  "File Header Checksum Error with file at offset " + str(0))
@@ -4811,7 +5083,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4811
5083
  return False
4812
5084
  formversions = re.search('(.*?)(\\d+)', formstring).groups()
4813
5085
  fcompresstype = ""
4814
- outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
5086
+ outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fmtime': fheadmtime, 'fctime': fheadctime, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'ffilelist': []}
4815
5087
  if (seekstart < 0) or (seekstart > fnumfiles):
4816
5088
  seekstart = 0
4817
5089
  if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
@@ -4838,16 +5110,15 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4838
5110
  prefjsonchecksum = preheaderdata[31]
4839
5111
  prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
4840
5112
  fp.seek(len(delimiter), 1)
4841
- prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
4842
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5113
+ prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5114
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
4843
5115
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4844
5116
  prefname + " at offset " + str(prefhstart))
4845
5117
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
4846
5118
  return False
4847
- prenewfcs = GetHeaderChecksum(
4848
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5119
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
4849
5120
  prefcs = preheaderdata[-2]
4850
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5121
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
4851
5122
  VerbosePrintOut("File Header Checksum Error with file " +
4852
5123
  prefname + " at offset " + str(prefhstart))
4853
5124
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -4862,11 +5133,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4862
5133
  if(prefsize > 0):
4863
5134
  prefcontents.write(fp.read(prefsize))
4864
5135
  prefcontents.seek(0, 0)
4865
- prenewfccs = GetFileChecksum(
4866
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5136
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
4867
5137
  prefccs = preheaderdata[-1]
4868
5138
  pyhascontents = True
4869
- if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5139
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
4870
5140
  VerbosePrintOut("File Content Checksum Error with file " +
4871
5141
  prefname + " at offset " + str(prefcontentstart))
4872
5142
  VerbosePrintOut("'" + prefccs +
@@ -4893,8 +5163,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4893
5163
  realidnum = 0
4894
5164
  countnum = seekstart
4895
5165
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
4896
- HeaderOut = ReadFileHeaderDataWithContentToArray(
4897
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5166
+ HeaderOut = ReadFileHeaderDataWithContentToArray(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
4898
5167
  if(len(HeaderOut) == 0):
4899
5168
  break
4900
5169
  HeaderOut.update({'fid': realidnum, 'fidalt': realidnum})
@@ -4905,7 +5174,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4905
5174
  return outlist
4906
5175
 
4907
5176
 
4908
- def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
5177
+ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
4909
5178
  if(not hasattr(fp, "read")):
4910
5179
  return False
4911
5180
  delimiter = formatspecs['format_delimiter']
@@ -4925,16 +5194,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4925
5194
  return False
4926
5195
  if(formdel != formatspecs['format_delimiter']):
4927
5196
  return False
4928
- if(formatspecs['new_style']):
5197
+ if(__use_new_style__):
4929
5198
  inheader = ReadFileHeaderDataBySize(
4930
5199
  fp, formatspecs['format_delimiter'])
4931
5200
  else:
4932
5201
  inheader = ReadFileHeaderDataWoSize(
4933
5202
  fp, formatspecs['format_delimiter'])
4934
- fnumextrafieldsize = int(inheader[6], 16)
4935
- fnumextrafields = int(inheader[7], 16)
5203
+ fnumextrafieldsize = int(inheader[15], 16)
5204
+ fnumextrafields = int(inheader[16], 16)
4936
5205
  fextrafieldslist = []
4937
- extrastart = 8
5206
+ extrastart = 17
4938
5207
  extraend = extrastart + fnumextrafields
4939
5208
  while(extrastart < extraend):
4940
5209
  fextrafieldslist.append(inheader[extrastart])
@@ -4951,14 +5220,44 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4951
5220
  formversion = re.findall("([\\d]+)", formstring)
4952
5221
  fheadsize = int(inheader[0], 16)
4953
5222
  fnumfields = int(inheader[1], 16)
4954
- fhencoding = inheader[2]
4955
- fostype = inheader[3]
4956
- fpythontype = inheader[4]
4957
- fnumfiles = int(inheader[5], 16)
5223
+ fnumfiles = int(inheader[8], 16)
5224
+ fseeknextfile = inheaderdata[9]
5225
+ fjsontype = int(inheader[10], 16)
5226
+ fjsonlen = int(inheader[11], 16)
5227
+ fjsonsize = int(inheader[12], 16)
5228
+ fjsonchecksumtype = inheader[13]
5229
+ fjsonchecksum = inheader[14]
5230
+ fjsoncontent = {}
5231
+ fjstart = fp.tell()
5232
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5233
+ fjend = fp.tell()
5234
+ if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5235
+ fseeknextasnum = int(fseeknextfile.replace("+", ""))
5236
+ if(abs(fseeknextasnum) == 0):
5237
+ pass
5238
+ fp.seek(fseeknextasnum, 1)
5239
+ elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
5240
+ fseeknextasnum = int(fseeknextfile)
5241
+ if(abs(fseeknextasnum) == 0):
5242
+ pass
5243
+ fp.seek(fseeknextasnum, 1)
5244
+ elif(re.findall("^([0-9]+)", fseeknextfile)):
5245
+ fseeknextasnum = int(fseeknextfile)
5246
+ if(abs(fseeknextasnum) == 0):
5247
+ pass
5248
+ fp.seek(fseeknextasnum, 0)
5249
+ else:
5250
+ return False
5251
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5252
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
5253
+ VerbosePrintOut("File JSON Data Checksum Error with file " +
5254
+ fname + " at offset " + str(fheaderstart))
5255
+ VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
5256
+ return False
4958
5257
  fprechecksumtype = inheader[-2]
4959
5258
  fprechecksum = inheader[-1]
4960
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4961
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5259
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5260
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4962
5261
  if(not headercheck and not skipchecksum):
4963
5262
  VerbosePrintOut(
4964
5263
  "File Header Checksum Error with file at offset " + str(0))
@@ -4977,7 +5276,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4977
5276
  il = 0
4978
5277
  while(il < seekstart):
4979
5278
  prefhstart = fp.tell()
4980
- if(formatspecs['new_style']):
5279
+ if(__use_new_style__):
4981
5280
  preheaderdata = ReadFileHeaderDataBySize(
4982
5281
  fp, formatspecs['format_delimiter'])
4983
5282
  else:
@@ -4999,16 +5298,15 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4999
5298
  prefjsonchecksum = preheaderdata[31]
5000
5299
  prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
5001
5300
  fp.seek(len(delimiter), 1)
5002
- prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
5003
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5301
+ prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5302
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5004
5303
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5005
5304
  prefname + " at offset " + str(prefhstart))
5006
5305
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
5007
5306
  return False
5008
- prenewfcs = GetHeaderChecksum(
5009
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5307
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
5010
5308
  prefcs = preheaderdata[-2]
5011
- if(prefcs != prenewfcs and not skipchecksum):
5309
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
5012
5310
  VerbosePrintOut("File Header Checksum Error with file " +
5013
5311
  prefname + " at offset " + str(prefhstart))
5014
5312
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -5025,11 +5323,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5025
5323
  prefcontents = fp.read(prefsize)
5026
5324
  else:
5027
5325
  prefcontents = fp.read(prefcsize)
5028
- prenewfccs = GetFileChecksum(
5029
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5326
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
5030
5327
  prefccs = preheaderdata[-1]
5031
5328
  pyhascontents = True
5032
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5329
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
5033
5330
  VerbosePrintOut("File Content Checksum Error with file " +
5034
5331
  prefname + " at offset " + str(prefcontentstart))
5035
5332
  VerbosePrintOut("'" + prefccs +
@@ -5056,8 +5353,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5056
5353
  realidnum = 0
5057
5354
  countnum = seekstart
5058
5355
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
5059
- HeaderOut = ReadFileHeaderDataWithContentToList(
5060
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5356
+ HeaderOut = ReadFileHeaderDataWithContentToList(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
5061
5357
  if(len(HeaderOut) == 0):
5062
5358
  break
5063
5359
  outlist.append(HeaderOut)
@@ -5065,7 +5361,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5065
5361
  realidnum = realidnum + 1
5066
5362
  return outlist
5067
5363
 
5068
- def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5364
+ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5069
5365
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5070
5366
  fp = infile
5071
5367
  try:
@@ -5160,7 +5456,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5160
5456
  else:
5161
5457
  break
5162
5458
  readfp.seek(oldfppos, 0)
5163
- ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5459
+ ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5164
5460
  currentfilepos = readfp.tell()
5165
5461
  else:
5166
5462
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5182,27 +5478,27 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5182
5478
  else:
5183
5479
  break
5184
5480
  infp.seek(oldinfppos, 0)
5185
- ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5481
+ ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5186
5482
  currentinfilepos = infp.tell()
5187
5483
  currentfilepos = readfp.tell()
5188
5484
  return ArchiveList
5189
5485
 
5190
5486
 
5191
- def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5487
+ def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5192
5488
  if(isinstance(infile, (list, tuple, ))):
5193
5489
  pass
5194
5490
  else:
5195
5491
  infile = [infile]
5196
5492
  outretval = []
5197
5493
  for curfname in infile:
5198
- outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
5494
+ outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5199
5495
  return outretval
5200
5496
 
5201
- def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5202
- return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
5497
+ def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5498
+ return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5203
5499
 
5204
5500
 
5205
- def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5501
+ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5206
5502
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5207
5503
  fp = infile
5208
5504
  try:
@@ -5297,7 +5593,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5297
5593
  else:
5298
5594
  break
5299
5595
  readfp.seek(oldfppos, 0)
5300
- ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5596
+ ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5301
5597
  currentfilepos = readfp.tell()
5302
5598
  else:
5303
5599
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5319,24 +5615,24 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5319
5615
  else:
5320
5616
  break
5321
5617
  infp.seek(oldinfppos, 0)
5322
- ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5618
+ ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5323
5619
  currentinfilepos = infp.tell()
5324
5620
  currentfilepos = readfp.tell()
5325
5621
  return ArchiveList
5326
5622
 
5327
5623
 
5328
- def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5624
+ def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5329
5625
  if(isinstance(infile, (list, tuple, ))):
5330
5626
  pass
5331
5627
  else:
5332
5628
  infile = [infile]
5333
5629
  outretval = {}
5334
5630
  for curfname in infile:
5335
- outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
5631
+ outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5336
5632
  return outretval
5337
5633
 
5338
- def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5339
- return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
5634
+ def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5635
+ return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5340
5636
 
5341
5637
 
5342
5638
  def _field_to_bytes(x):
@@ -5390,12 +5686,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
5390
5686
  def _hex_lower(n):
5391
5687
  return format(int(n), 'x').lower()
5392
5688
 
5393
- def AppendFileHeader(fp,
5394
- numfiles,
5395
- fencoding,
5396
- extradata=None,
5397
- checksumtype="md5",
5398
- formatspecs=__file_format_dict__):
5689
+ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5399
5690
  """
5400
5691
  Build and write the archive file header.
5401
5692
  Returns the same file-like 'fp' on success, or False on failure.
@@ -5443,24 +5734,47 @@ def AppendFileHeader(fp,
5443
5734
  # 4) core header fields before checksum:
5444
5735
  # tmpoutlenhex, fencoding, platform.system(), fnumfiles
5445
5736
  fnumfiles_hex = _hex_lower(numfiles)
5446
-
5737
+ fjsontype = "json"
5738
+ if(len(jsondata) > 0):
5739
+ try:
5740
+ fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
5741
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5742
+ fjsoncontent = "".encode("UTF-8")
5743
+ else:
5744
+ fjsoncontent = "".encode("UTF-8")
5745
+ fjsonsize = format(len(fjsoncontent), 'x').lower()
5746
+ fjsonlen = format(len(jsondata), 'x').lower()
5747
+ tmpoutlist = []
5748
+ tmpoutlist.append(fjsontype)
5749
+ tmpoutlist.append(fjsonlen)
5750
+ tmpoutlist.append(fjsonsize)
5751
+ if(len(jsondata) > 0):
5752
+ tmpoutlist.append(checksumtype[1])
5753
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs, saltkey))
5754
+ else:
5755
+ tmpoutlist.append("none")
5756
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5447
5757
  # Preserve your original "tmpoutlen" computation exactly
5448
- tmpoutlist = [extrasizelen, extrafields] # you used this as a separate list
5449
- tmpoutlen = 4 + len(tmpoutlist) + len(xlist) + 2
5758
+ tmpoutlist.append(extrasizelen)
5759
+ tmpoutlist.append(extrafields)
5760
+ tmpoutlen = 10 + len(tmpoutlist) + len(xlist)
5450
5761
  tmpoutlenhex = _hex_lower(tmpoutlen)
5451
-
5762
+ if(hasattr(time, "time_ns")):
5763
+ fctime = format(int(time.time_ns()), 'x').lower()
5764
+ else:
5765
+ fctime = format(int(to_ns(time.time())), 'x').lower()
5452
5766
  # Serialize the first group
5453
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
5767
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
5454
5768
  # Append tmpoutlist
5455
5769
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5456
5770
  # Append extradata items if any
5457
5771
  if xlist:
5458
5772
  fnumfilesa += AppendNullBytes(xlist, delimiter)
5459
5773
  # Append checksum type
5460
- fnumfilesa += AppendNullByte(checksumtype, delimiter)
5774
+ fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
5461
5775
 
5462
5776
  # 5) inner checksum over fnumfilesa
5463
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
5777
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5464
5778
  tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
5465
5779
 
5466
5780
  # 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
@@ -5473,7 +5787,7 @@ def AppendFileHeader(fp,
5473
5787
  + fnumfilesa
5474
5788
  )
5475
5789
 
5476
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
5790
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5477
5791
  fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
5478
5792
 
5479
5793
  # 8) final total size field (again per your original logic)
@@ -5481,10 +5795,11 @@ def AppendFileHeader(fp,
5481
5795
  formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
5482
5796
  # Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
5483
5797
  # Keeping that behavior for compatibility.
5484
-
5798
+ nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
5799
+ outfileout = fnumfilesa + fjsoncontent + nullstrecd
5485
5800
  # 9) write and try to sync
5486
5801
  try:
5487
- fp.write(fnumfilesa)
5802
+ fp.write(outfileout)
5488
5803
  except (OSError, io.UnsupportedOperation):
5489
5804
  return False
5490
5805
 
@@ -5505,21 +5820,21 @@ def AppendFileHeader(fp,
5505
5820
  return fp
5506
5821
 
5507
5822
 
5508
- def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5823
+ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
5509
5824
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
5510
5825
  formatspecs = formatspecs[fmttype]
5511
5826
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
5512
5827
  fmttype = __file_format_default__
5513
5828
  formatspecs = formatspecs[fmttype]
5514
- AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
5829
+ AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs, saltkey)
5515
5830
  return fp
5516
5831
 
5517
5832
 
5518
- def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5519
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
5833
+ def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
5834
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5520
5835
 
5521
5836
 
5522
- def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
5837
+ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, returnfp=False):
5523
5838
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
5524
5839
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
5525
5840
  get_in_ext = os.path.splitext(outfile)
@@ -5549,6 +5864,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5549
5864
  fp = MkTempFile()
5550
5865
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
5551
5866
  fp = outfile
5867
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5552
5868
  elif(re.findall(__upload_proto_support__, outfile)):
5553
5869
  fp = MkTempFile()
5554
5870
  else:
@@ -5560,7 +5876,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5560
5876
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
5561
5877
  except PermissionError:
5562
5878
  return False
5563
- AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
5879
+ AppendFileHeader(fp, 0, "UTF-8", ['hello', 'goodbye'], {}, checksumtype, formatspecs, saltkey)
5564
5880
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
5565
5881
  fp = CompressOpenFileAlt(
5566
5882
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -5591,11 +5907,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5591
5907
  return True
5592
5908
 
5593
5909
 
5594
- def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
5595
- return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
5910
+ def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, returnfp=False):
5911
+ return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, returnfp)
5596
5912
 
5597
5913
 
5598
- def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
5914
+ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5599
5915
  if(not hasattr(fp, "write")):
5600
5916
  return False
5601
5917
  if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
@@ -5627,10 +5943,10 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5627
5943
  tmpoutlist.append(fjsonsize)
5628
5944
  if(len(jsondata) > 0):
5629
5945
  tmpoutlist.append(checksumtype[2])
5630
- tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs))
5946
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs, saltkey))
5631
5947
  else:
5632
5948
  tmpoutlist.append("none")
5633
- tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
5949
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5634
5950
  tmpoutlist.append(extrasizelen)
5635
5951
  tmpoutlist.append(extrafields)
5636
5952
  outfileoutstr = AppendNullBytes(
@@ -5645,22 +5961,18 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5645
5961
  outfileoutstr = outfileoutstr + \
5646
5962
  AppendNullBytes(checksumlist, formatspecs['format_delimiter'])
5647
5963
  nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
5648
- outfileheadercshex = GetFileChecksum(
5649
- outfileoutstr, checksumtype[0], True, formatspecs)
5964
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5650
5965
  if(len(filecontent) == 0):
5651
- outfilecontentcshex = GetFileChecksum(
5652
- filecontent, "none", False, formatspecs)
5966
+ outfilecontentcshex = GetFileChecksum(filecontent, "none", False, formatspecs, saltkey)
5653
5967
  else:
5654
- outfilecontentcshex = GetFileChecksum(
5655
- filecontent, checksumtype[1], False, formatspecs)
5968
+ outfilecontentcshex = GetFileChecksum(filecontent, checksumtype[1], False, formatspecs, saltkey)
5656
5969
  tmpfileoutstr = outfileoutstr + \
5657
5970
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5658
5971
  formatspecs['format_delimiter'])
5659
5972
  formheaersize = format(int(len(tmpfileoutstr) - len(formatspecs['format_delimiter'])), 'x').lower()
5660
5973
  outfileoutstr = AppendNullByte(
5661
5974
  formheaersize, formatspecs['format_delimiter']) + outfileoutstr
5662
- outfileheadercshex = GetFileChecksum(
5663
- outfileoutstr, checksumtype[0], True, formatspecs)
5975
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5664
5976
  outfileoutstr = outfileoutstr + \
5665
5977
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5666
5978
  formatspecs['format_delimiter'])
@@ -5678,14 +5990,11 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5678
5990
  pass
5679
5991
  return fp
5680
5992
 
5681
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
5993
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5682
5994
  if(not hasattr(fp, "write")):
5683
5995
  return False
5684
- advancedlist = formatspecs['use_advanced_list']
5685
- altinode = formatspecs['use_alt_inode']
5686
- if(verbose):
5687
- logging.basicConfig(format="%(message)s",
5688
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
5996
+ advancedlist = __use_advanced_list__
5997
+ altinode = __use_alt_inode__
5689
5998
  infilelist = []
5690
5999
  if(infiles == "-"):
5691
6000
  for line in PY_STDIN_TEXT:
@@ -5727,7 +6036,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5727
6036
  inodetoforminode = {}
5728
6037
  numfiles = int(len(GetDirList))
5729
6038
  fnumfiles = format(numfiles, 'x').lower()
5730
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6039
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
5731
6040
  try:
5732
6041
  fp.flush()
5733
6042
  if(hasattr(os, "sync")):
@@ -5756,14 +6065,24 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5756
6065
  FullSizeFilesAlt += fstatinfo.st_rsize
5757
6066
  except AttributeError:
5758
6067
  FullSizeFilesAlt += fstatinfo.st_size
6068
+ fblksize = 0
6069
+ if(hasattr(fstatinfo, "st_blksize")):
6070
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6071
+ fblocks = 0
6072
+ if(hasattr(fstatinfo, "st_blocks")):
6073
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6074
+ fflags = 0
6075
+ if(hasattr(fstatinfo, "st_flags")):
6076
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
5759
6077
  ftype = 0
5760
- if(hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
6078
+ if(not followlink and hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
5761
6079
  ftype = 13
5762
- elif(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
5763
- ftype = 12
5764
6080
  elif(stat.S_ISREG(fpremode)):
5765
- ftype = 0
5766
- elif(stat.S_ISLNK(fpremode)):
6081
+ if(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_size > 0 and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
6082
+ ftype = 12
6083
+ else:
6084
+ ftype = 0
6085
+ elif(not followlink and stat.S_ISLNK(fpremode)):
5767
6086
  ftype = 2
5768
6087
  elif(stat.S_ISCHR(fpremode)):
5769
6088
  ftype = 3
@@ -5785,43 +6104,42 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5785
6104
  ftype = 0
5786
6105
  flinkname = ""
5787
6106
  fcurfid = format(int(curfid), 'x').lower()
5788
- if not followlink and finode != 0:
6107
+ if(not followlink and finode != 0):
5789
6108
  unique_id = (fstatinfo.st_dev, finode)
5790
- if ftype != 1:
5791
- if unique_id in inodelist:
6109
+ if(ftype != 1):
6110
+ if(unique_id in inodetofile):
5792
6111
  # Hard link detected
5793
6112
  ftype = 1
5794
6113
  flinkname = inodetofile[unique_id]
5795
- if altinode:
5796
- fcurinode = format(int(unique_id[1]), 'x').lower()
5797
- else:
5798
- fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5799
6114
  else:
5800
- # New inode
5801
- inodelist.append(unique_id)
6115
+ # First time seeing this inode
5802
6116
  inodetofile[unique_id] = fname
6117
+ if(unique_id not in inodetoforminode):
5803
6118
  inodetoforminode[unique_id] = curinode
5804
- if altinode:
5805
- fcurinode = format(int(unique_id[1]), 'x').lower()
5806
- else:
5807
- fcurinode = format(int(curinode), 'x').lower()
5808
- curinode += 1
6119
+ curinode = curinode + 1
6120
+ if(altinode):
6121
+ # altinode == True → use real inode number
6122
+ fcurinode = format(int(unique_id[1]), 'x').lower()
6123
+ else:
6124
+ # altinode == False → use synthetic inode id
6125
+ fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5809
6126
  else:
5810
6127
  # Handle cases where inodes are not supported or symlinks are followed
5811
6128
  fcurinode = format(int(curinode), 'x').lower()
5812
- curinode += 1
6129
+ curinode = curinode + 1
5813
6130
  curfid = curfid + 1
5814
6131
  if(ftype == 2):
5815
6132
  flinkname = os.readlink(fname)
5816
- if(not os.path.exists(flinkname)):
6133
+ if(not os.path.exists(fname)):
5817
6134
  return False
5818
6135
  try:
5819
6136
  fdev = fstatinfo.st_rdev
5820
6137
  except AttributeError:
5821
6138
  fdev = 0
5822
- getfdev = GetDevMajorMinor(fdev)
5823
- fdev_minor = getfdev[0]
5824
- fdev_major = getfdev[1]
6139
+ try:
6140
+ frdev = fstatinfo.st_rdev
6141
+ except AttributeError:
6142
+ frdev = 0
5825
6143
  # Types that should be considered zero-length in the archive context:
5826
6144
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
5827
6145
  # Types that have actual data to read:
@@ -5832,13 +6150,28 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5832
6150
  fsize = format(int(fstatinfo.st_size), 'x').lower()
5833
6151
  else:
5834
6152
  fsize = format(int(fstatinfo.st_size), 'x').lower()
5835
- fatime = format(int(fstatinfo.st_atime), 'x').lower()
5836
- fmtime = format(int(fstatinfo.st_mtime), 'x').lower()
5837
- fctime = format(int(fstatinfo.st_ctime), 'x').lower()
6153
+ if(hasattr(fstatinfo, "st_atime_ns")):
6154
+ fatime = format(int(fstatinfo.st_atime_ns), 'x').lower()
6155
+ else:
6156
+ fatime = format(int(to_ns(fstatinfo.st_atime)), 'x').lower()
6157
+ if(hasattr(fstatinfo, "st_mtime_ns")):
6158
+ fmtime = format(int(fstatinfo.st_mtime_ns), 'x').lower()
6159
+ else:
6160
+ fmtime = format(int(to_ns(fstatinfo.st_mtime)), 'x').lower()
6161
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6162
+ fctime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6163
+ else:
6164
+ fctime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
5838
6165
  if(hasattr(fstatinfo, "st_birthtime")):
5839
- fbtime = format(int(fstatinfo.st_birthtime), 'x').lower()
6166
+ if(hasattr(fstatinfo, "st_birthtime_ns")):
6167
+ fbtime = format(int(fstatinfo.st_birthtime_ns), 'x').lower()
6168
+ else:
6169
+ fbtime = format(int(to_ns(fstatinfo.st_birthtime)), 'x').lower()
5840
6170
  else:
5841
- fbtime = format(int(fstatinfo.st_ctime), 'x').lower()
6171
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6172
+ fbtime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6173
+ else:
6174
+ fbtime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
5842
6175
  fmode = format(int(fstatinfo.st_mode), 'x').lower()
5843
6176
  fchmode = format(int(stat.S_IMODE(fstatinfo.st_mode)), 'x').lower()
5844
6177
  ftypemod = format(int(stat.S_IFMT(fstatinfo.st_mode)), 'x').lower()
@@ -5865,8 +6198,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5865
6198
  except ImportError:
5866
6199
  fgname = ""
5867
6200
  fdev = format(int(fdev), 'x').lower()
5868
- fdev_minor = format(int(fdev_minor), 'x').lower()
5869
- fdev_major = format(int(fdev_major), 'x').lower()
6201
+ frdev = format(int(frdev), 'x').lower()
5870
6202
  finode = format(int(finode), 'x').lower()
5871
6203
  flinkcount = format(int(flinkcount), 'x').lower()
5872
6204
  if(hasattr(fstatinfo, "st_file_attributes")):
@@ -5927,10 +6259,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5927
6259
  fcompression = curcompression
5928
6260
  fcontents.close()
5929
6261
  fcontents = cfcontents
5930
- elif followlink and (ftype == 1 or ftype == 2):
5931
- if(not os.path.exists(flinkname)):
6262
+ elif followlink and (ftype == 2 or ftype in data_types):
6263
+ if(not os.path.exists(fname)):
5932
6264
  return False
5933
- flstatinfo = os.stat(flinkname)
5934
6265
  with open(flinkname, "rb") as fpc:
5935
6266
  shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
5936
6267
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
@@ -5981,10 +6312,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5981
6312
  fcompression = ""
5982
6313
  fcontents.seek(0, 0)
5983
6314
  ftypehex = format(ftype, 'x').lower()
5984
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
5985
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
5986
- AppendFileHeaderWithContent(
5987
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6315
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6316
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6317
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
5988
6318
  try:
5989
6319
  fp.flush()
5990
6320
  if(hasattr(os, "sync")):
@@ -5993,12 +6323,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5993
6323
  pass
5994
6324
  return fp
5995
6325
 
5996
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6326
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5997
6327
  if(not hasattr(fp, "write")):
5998
6328
  return False
5999
- if(verbose):
6000
- logging.basicConfig(format="%(message)s",
6001
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6002
6329
  curinode = 0
6003
6330
  curfid = 0
6004
6331
  inodelist = []
@@ -6062,7 +6389,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6062
6389
  except FileNotFoundError:
6063
6390
  return False
6064
6391
  numfiles = int(len(tarfp.getmembers()))
6065
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6392
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6066
6393
  try:
6067
6394
  fp.flush()
6068
6395
  if(hasattr(os, "sync")):
@@ -6080,6 +6407,15 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6080
6407
  fpremode = member.mode
6081
6408
  ffullmode = member.mode
6082
6409
  flinkcount = 0
6410
+ fblksize = 0
6411
+ if(hasattr(fstatinfo, "st_blksize")):
6412
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6413
+ fblocks = 0
6414
+ if(hasattr(fstatinfo, "st_blocks")):
6415
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6416
+ fflags = 0
6417
+ if(hasattr(fstatinfo, "st_flags")):
6418
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6083
6419
  ftype = 0
6084
6420
  if(member.isreg()):
6085
6421
  ffullmode = member.mode + stat.S_IFREG
@@ -6117,12 +6453,11 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6117
6453
  curfid = curfid + 1
6118
6454
  if(ftype == 2):
6119
6455
  flinkname = member.linkname
6456
+ fdev = format(int("0"), 'x').lower()
6120
6457
  try:
6121
- fdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6458
+ frdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6122
6459
  except AttributeError:
6123
- fdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6124
- fdev_minor = format(int(member.devminor), 'x').lower()
6125
- fdev_major = format(int(member.devmajor), 'x').lower()
6460
+ frdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6126
6461
  # Types that should be considered zero-length in the archive context:
6127
6462
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
6128
6463
  # Types that have actual data to read:
@@ -6133,10 +6468,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6133
6468
  fsize = format(int(member.size), 'x').lower()
6134
6469
  else:
6135
6470
  fsize = format(int(member.size), 'x').lower()
6136
- fatime = format(int(member.mtime), 'x').lower()
6137
- fmtime = format(int(member.mtime), 'x').lower()
6138
- fctime = format(int(member.mtime), 'x').lower()
6139
- fbtime = format(int(member.mtime), 'x').lower()
6471
+ fatime = format(int(to_ns(member.mtime)), 'x').lower()
6472
+ fmtime = format(int(to_ns(member.mtime)), 'x').lower()
6473
+ fctime = format(int(to_ns(member.mtime)), 'x').lower()
6474
+ fbtime = format(int(to_ns(member.mtime)), 'x').lower()
6140
6475
  fmode = format(int(ffullmode), 'x').lower()
6141
6476
  fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
6142
6477
  ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
@@ -6203,10 +6538,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6203
6538
  fcompression = ""
6204
6539
  fcontents.seek(0, 0)
6205
6540
  ftypehex = format(ftype, 'x').lower()
6206
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6207
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6208
- AppendFileHeaderWithContent(
6209
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6541
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6542
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6543
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6210
6544
  try:
6211
6545
  fp.flush()
6212
6546
  if(hasattr(os, "sync")):
@@ -6216,12 +6550,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6216
6550
  fcontents.close()
6217
6551
  return fp
6218
6552
 
6219
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6553
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6220
6554
  if(not hasattr(fp, "write")):
6221
6555
  return False
6222
- if(verbose):
6223
- logging.basicConfig(format="%(message)s",
6224
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6225
6556
  curinode = 0
6226
6557
  curfid = 0
6227
6558
  inodelist = []
@@ -6255,7 +6586,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6255
6586
  if(ziptest):
6256
6587
  VerbosePrintOut("Bad file found!")
6257
6588
  numfiles = int(len(zipfp.infolist()))
6258
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6589
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6259
6590
  try:
6260
6591
  fp.flush()
6261
6592
  if(hasattr(os, "sync")):
@@ -6276,6 +6607,15 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6276
6607
  else:
6277
6608
  fpremode = int(stat.S_IFREG | 0x1b6)
6278
6609
  flinkcount = 0
6610
+ fblksize = 0
6611
+ if(hasattr(fstatinfo, "st_blksize")):
6612
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6613
+ fblocks = 0
6614
+ if(hasattr(fstatinfo, "st_blocks")):
6615
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6616
+ fflags = 0
6617
+ if(hasattr(fstatinfo, "st_flags")):
6618
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6279
6619
  ftype = 0
6280
6620
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6281
6621
  ftype = 5
@@ -6286,8 +6626,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6286
6626
  fcurinode = format(int(curfid), 'x').lower()
6287
6627
  curfid = curfid + 1
6288
6628
  fdev = format(int(0), 'x').lower()
6289
- fdev_minor = format(int(0), 'x').lower()
6290
- fdev_major = format(int(0), 'x').lower()
6629
+ frdev = format(int(0), 'x').lower()
6291
6630
  if(ftype == 5):
6292
6631
  fsize = format(int("0"), 'x').lower()
6293
6632
  elif(ftype == 0):
@@ -6295,13 +6634,13 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6295
6634
  else:
6296
6635
  fsize = format(int(member.file_size), 'x').lower()
6297
6636
  fatime = format(
6298
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6637
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6299
6638
  fmtime = format(
6300
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6639
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6301
6640
  fctime = format(
6302
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6641
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6303
6642
  fbtime = format(
6304
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6643
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6305
6644
  if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
6306
6645
  fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
6307
6646
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
@@ -6417,10 +6756,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6417
6756
  fcompression = ""
6418
6757
  fcontents.seek(0, 0)
6419
6758
  ftypehex = format(ftype, 'x').lower()
6420
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6421
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6422
- AppendFileHeaderWithContent(
6423
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6759
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6760
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6761
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6424
6762
  try:
6425
6763
  fp.flush()
6426
6764
  if(hasattr(os, "sync")):
@@ -6431,16 +6769,12 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6431
6769
  return fp
6432
6770
 
6433
6771
  if(not rarfile_support):
6434
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6772
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6435
6773
  return False
6436
-
6437
- if(rarfile_support):
6438
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6774
+ else:
6775
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6439
6776
  if(not hasattr(fp, "write")):
6440
6777
  return False
6441
- if(verbose):
6442
- logging.basicConfig(format="%(message)s",
6443
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6444
6778
  curinode = 0
6445
6779
  curfid = 0
6446
6780
  inodelist = []
@@ -6456,7 +6790,7 @@ if(rarfile_support):
6456
6790
  if(rartest):
6457
6791
  VerbosePrintOut("Bad file found!")
6458
6792
  numfiles = int(len(rarfp.infolist()))
6459
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6793
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6460
6794
  try:
6461
6795
  fp.flush()
6462
6796
  if(hasattr(os, "sync")):
@@ -6512,6 +6846,15 @@ if(rarfile_support):
6512
6846
  fcompression = ""
6513
6847
  fcsize = format(int(0), 'x').lower()
6514
6848
  flinkcount = 0
6849
+ fblksize = 0
6850
+ if(hasattr(fstatinfo, "st_blksize")):
6851
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6852
+ fblocks = 0
6853
+ if(hasattr(fstatinfo, "st_blocks")):
6854
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6855
+ fflags = 0
6856
+ if(hasattr(fstatinfo, "st_flags")):
6857
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6515
6858
  ftype = 0
6516
6859
  if(member.is_file()):
6517
6860
  ftype = 0
@@ -6526,8 +6869,7 @@ if(rarfile_support):
6526
6869
  fcurinode = format(int(curfid), 'x').lower()
6527
6870
  curfid = curfid + 1
6528
6871
  fdev = format(int(0), 'x').lower()
6529
- fdev_minor = format(int(0), 'x').lower()
6530
- fdev_major = format(int(0), 'x').lower()
6872
+ frdev = format(int(0), 'x').lower()
6531
6873
  if(ftype == 5):
6532
6874
  fsize = format(int("0"), 'x').lower()
6533
6875
  elif(ftype == 0):
@@ -6536,20 +6878,20 @@ if(rarfile_support):
6536
6878
  fsize = format(int(member.file_size), 'x').lower()
6537
6879
  try:
6538
6880
  if(member.atime):
6539
- fatime = format(int(member.atime.timestamp()), 'x').lower()
6881
+ fatime = format(int(to_ns(member.atime.timestamp())), 'x').lower()
6540
6882
  else:
6541
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
6883
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6542
6884
  except AttributeError:
6543
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
6544
- fmtime = format(int(member.mtime.timestamp()), 'x').lower()
6885
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6886
+ fmtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6545
6887
  try:
6546
6888
  if(member.ctime):
6547
- fctime = format(int(member.ctime.timestamp()), 'x').lower()
6889
+ fctime = format(int(to_ns(member.ctime.timestamp())), 'x').lower()
6548
6890
  else:
6549
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
6891
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6550
6892
  except AttributeError:
6551
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
6552
- fbtime = format(int(member.mtime.timestamp()), 'x').lower()
6893
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6894
+ fbtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6553
6895
  if(is_unix and member.external_attr != 0):
6554
6896
  fmode = format(int(member.external_attr), 'x').lower()
6555
6897
  fchmode = format(
@@ -6651,10 +6993,9 @@ if(rarfile_support):
6651
6993
  fcompression = ""
6652
6994
  fcontents.seek(0, 0)
6653
6995
  ftypehex = format(ftype, 'x').lower()
6654
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6655
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6656
- AppendFileHeaderWithContent(
6657
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6996
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6997
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6998
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6658
6999
  try:
6659
7000
  fp.flush()
6660
7001
  if(hasattr(os, "sync")):
@@ -6665,16 +7006,12 @@ if(rarfile_support):
6665
7006
  return fp
6666
7007
 
6667
7008
  if(not py7zr_support):
6668
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7009
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6669
7010
  return False
6670
-
6671
- if(py7zr_support):
6672
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7011
+ else:
7012
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6673
7013
  if(not hasattr(fp, "write")):
6674
7014
  return False
6675
- if(verbose):
6676
- logging.basicConfig(format="%(message)s",
6677
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6678
7015
  formver = formatspecs['format_ver']
6679
7016
  fileheaderver = str(int(formver.replace(".", "")))
6680
7017
  curinode = 0
@@ -6692,7 +7029,7 @@ if(py7zr_support):
6692
7029
  if(sztestalt):
6693
7030
  VerbosePrintOut("Bad file found!")
6694
7031
  numfiles = int(len(szpfp.list()))
6695
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
7032
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6696
7033
  try:
6697
7034
  fp.flush()
6698
7035
  if(hasattr(os, "sync")):
@@ -6715,6 +7052,15 @@ if(py7zr_support):
6715
7052
  fcompression = ""
6716
7053
  fcsize = format(int(0), 'x').lower()
6717
7054
  flinkcount = 0
7055
+ fblksize = 0
7056
+ if(hasattr(fstatinfo, "st_blksize")):
7057
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
7058
+ fblocks = 0
7059
+ if(hasattr(fstatinfo, "st_blocks")):
7060
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
7061
+ fflags = 0
7062
+ if(hasattr(fstatinfo, "st_flags")):
7063
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6718
7064
  ftype = 0
6719
7065
  if(member.is_directory):
6720
7066
  ftype = 5
@@ -6725,14 +7071,13 @@ if(py7zr_support):
6725
7071
  fcurinode = format(int(curfid), 'x').lower()
6726
7072
  curfid = curfid + 1
6727
7073
  fdev = format(int(0), 'x').lower()
6728
- fdev_minor = format(int(0), 'x').lower()
6729
- fdev_major = format(int(0), 'x').lower()
7074
+ frdev = format(int(0), 'x').lower()
6730
7075
  if(ftype == 5):
6731
7076
  fsize = format(int("0"), 'x').lower()
6732
- fatime = format(int(member.creationtime.timestamp()), 'x').lower()
6733
- fmtime = format(int(member.creationtime.timestamp()), 'x').lower()
6734
- fctime = format(int(member.creationtime.timestamp()), 'x').lower()
6735
- fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
7077
+ fatime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7078
+ fmtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7079
+ fctime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7080
+ fbtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
6736
7081
  if(member.is_directory):
6737
7082
  fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6738
7083
  fchmode = format(
@@ -6825,10 +7170,9 @@ if(py7zr_support):
6825
7170
  fcompression = ""
6826
7171
  fcontents.seek(0, 0)
6827
7172
  ftypehex = format(ftype, 'x').lower()
6828
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6829
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6830
- AppendFileHeaderWithContent(
6831
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7173
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7174
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
7175
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6832
7176
  try:
6833
7177
  fp.flush()
6834
7178
  if(hasattr(os, "sync")):
@@ -6838,11 +7182,9 @@ if(py7zr_support):
6838
7182
  fcontents.close()
6839
7183
  return fp
6840
7184
 
6841
- def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7185
+ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6842
7186
  if(not hasattr(fp, "write")):
6843
7187
  return False
6844
- if(verbose):
6845
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6846
7188
  GetDirList = inlist
6847
7189
  if(not GetDirList):
6848
7190
  return False
@@ -6854,7 +7196,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
6854
7196
  inodetoforminode = {}
6855
7197
  numfiles = int(len(GetDirList))
6856
7198
  fnumfiles = format(numfiles, 'x').lower()
6857
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
7199
+ AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6858
7200
  for curfname in GetDirList:
6859
7201
  ftype = format(curfname[0], 'x').lower()
6860
7202
  fencoding = curfname[1]
@@ -6868,44 +7210,45 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
6868
7210
  fbasedir = os.path.dirname(fname)
6869
7211
  flinkname = curfname[4]
6870
7212
  fsize = format(curfname[5], 'x').lower()
6871
- fatime = format(curfname[6], 'x').lower()
6872
- fmtime = format(curfname[7], 'x').lower()
6873
- fctime = format(curfname[8], 'x').lower()
6874
- fbtime = format(curfname[9], 'x').lower()
6875
- fmode = format(curfname[10], 'x').lower()
6876
- fwinattributes = format(curfname[11], 'x').lower()
6877
- fcompression = curfname[12]
6878
- fcsize = format(curfname[13], 'x').lower()
6879
- fuid = format(curfname[14], 'x').lower()
6880
- funame = curfname[15]
6881
- fgid = format(curfname[16], 'x').lower()
6882
- fgname = curfname[17]
6883
- fid = format(curfname[18], 'x').lower()
6884
- finode = format(curfname[19], 'x').lower()
6885
- flinkcount = format(curfname[20], 'x').lower()
6886
- fdev = format(curfname[21], 'x').lower()
6887
- fdev_minor = format(curfname[22], 'x').lower()
6888
- fdev_major = format(curfname[23], 'x').lower()
6889
- fseeknextfile = curfname[24]
6890
- extradata = curfname[25]
6891
- fheaderchecksumtype = curfname[26]
6892
- fcontentchecksumtype = curfname[27]
6893
- fcontents = curfname[28]
7213
+ fblksize = format(curfname[6], 'x').lower()
7214
+ fblocks = format(curfname[7], 'x').lower()
7215
+ fflags = format(curfname[8], 'x').lower()
7216
+ fatime = format(curfname[9], 'x').lower()
7217
+ fmtime = format(curfname[10], 'x').lower()
7218
+ fctime = format(curfname[11], 'x').lower()
7219
+ fbtime = format(curfname[12], 'x').lower()
7220
+ fmode = format(curfname[13], 'x').lower()
7221
+ fwinattributes = format(curfname[14], 'x').lower()
7222
+ fcompression = curfname[15]
7223
+ fcsize = format(curfname[16], 'x').lower()
7224
+ fuid = format(curfname[17], 'x').lower()
7225
+ funame = curfname[18]
7226
+ fgid = format(curfname[19], 'x').lower()
7227
+ fgname = curfname[20]
7228
+ fid = format(curfname[21], 'x').lower()
7229
+ finode = format(curfname[22], 'x').lower()
7230
+ flinkcount = format(curfname[23], 'x').lower()
7231
+ fdev = format(curfname[24], 'x').lower()
7232
+ frdev = format(curfname[25], 'x').lower()
7233
+ fseeknextfile = curfname[26]
7234
+ extradata = curfname[27]
7235
+ fheaderchecksumtype = curfname[28]
7236
+ fcontentchecksumtype = curfname[29]
7237
+ fcontents = curfname[30]
6894
7238
  fencoding = GetFileEncoding(fcontents, 0, False)[0]
6895
- tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
6896
- fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
7239
+ tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
7240
+ fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile]
6897
7241
  fcontents.seek(0, 0)
6898
- AppendFileHeaderWithContent(
6899
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7242
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6900
7243
  return fp
6901
7244
 
6902
7245
 
6903
- def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6904
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
6905
- return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7246
+ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7247
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
7248
+ return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose)
6906
7249
 
6907
7250
 
6908
- def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7251
+ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
6909
7252
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
6910
7253
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
6911
7254
  get_in_ext = os.path.splitext(outfile)
@@ -6949,8 +7292,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
6949
7292
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
6950
7293
  except PermissionError:
6951
7294
  return False
6952
- AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
6953
- compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
7295
+ AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, saltkey, verbose)
6954
7296
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
6955
7297
  fp = CompressOpenFileAlt(
6956
7298
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -6979,12 +7321,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
6979
7321
  fp.close()
6980
7322
  return True
6981
7323
 
6982
- def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7324
+ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
6983
7325
  if not isinstance(infiles, list):
6984
7326
  infiles = [infiles]
6985
7327
  returnout = False
6986
7328
  for infileslist in infiles:
6987
- returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
7329
+ returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, True)
6988
7330
  if(not returnout):
6989
7331
  break
6990
7332
  else:
@@ -6994,7 +7336,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
6994
7336
  return True
6995
7337
  return returnout
6996
7338
 
6997
- def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7339
+ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, saltkey=None, returnfp=False):
6998
7340
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
6999
7341
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7000
7342
  get_in_ext = os.path.splitext(outfile)
@@ -7035,8 +7377,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7035
7377
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7036
7378
  except PermissionError:
7037
7379
  return False
7038
- AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
7039
- compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
7380
+ AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, saltkey, verbose)
7040
7381
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7041
7382
  fp = CompressOpenFileAlt(
7042
7383
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7066,7 +7407,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7066
7407
  fp.close()
7067
7408
  return True
7068
7409
 
7069
- def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7410
+ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7070
7411
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7071
7412
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7072
7413
  get_in_ext = os.path.splitext(outfile)
@@ -7108,8 +7449,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7108
7449
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7109
7450
  except PermissionError:
7110
7451
  return False
7111
- AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression,
7112
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7452
+ AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7113
7453
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7114
7454
  fp = CompressOpenFileAlt(
7115
7455
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7139,12 +7479,12 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7139
7479
  fp.close()
7140
7480
  return True
7141
7481
 
7142
- def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7482
+ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7143
7483
  if not isinstance(infiles, list):
7144
7484
  infiles = [infiles]
7145
7485
  returnout = False
7146
7486
  for infileslist in infiles:
7147
- returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7487
+ returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7148
7488
  if(not returnout):
7149
7489
  break
7150
7490
  else:
@@ -7154,7 +7494,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
7154
7494
  return True
7155
7495
  return returnout
7156
7496
 
7157
- def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7497
+ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7158
7498
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7159
7499
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7160
7500
  get_in_ext = os.path.splitext(outfile)
@@ -7196,8 +7536,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7196
7536
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7197
7537
  except PermissionError:
7198
7538
  return False
7199
- AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression,
7200
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7539
+ AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7201
7540
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7202
7541
  fp = CompressOpenFileAlt(
7203
7542
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7227,12 +7566,12 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7227
7566
  fp.close()
7228
7567
  return True
7229
7568
 
7230
- def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7569
+ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7231
7570
  if not isinstance(infiles, list):
7232
7571
  infiles = [infiles]
7233
7572
  returnout = False
7234
7573
  for infileslist in infiles:
7235
- returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7574
+ returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7236
7575
  if(not returnout):
7237
7576
  break
7238
7577
  else:
@@ -7243,11 +7582,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
7243
7582
  return returnout
7244
7583
 
7245
7584
  if(not rarfile_support):
7246
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7585
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7247
7586
  return False
7248
-
7249
- if(rarfile_support):
7250
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7587
+ else:
7588
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7251
7589
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7252
7590
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7253
7591
  get_in_ext = os.path.splitext(outfile)
@@ -7289,8 +7627,7 @@ if(rarfile_support):
7289
7627
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7290
7628
  except PermissionError:
7291
7629
  return False
7292
- AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression,
7293
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7630
+ AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7294
7631
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7295
7632
  fp = CompressOpenFileAlt(
7296
7633
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7320,12 +7657,12 @@ if(rarfile_support):
7320
7657
  fp.close()
7321
7658
  return True
7322
7659
 
7323
- def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7660
+ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7324
7661
  if not isinstance(infiles, list):
7325
7662
  infiles = [infiles]
7326
7663
  returnout = False
7327
7664
  for infileslist in infiles:
7328
- returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7665
+ returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7329
7666
  if(not returnout):
7330
7667
  break
7331
7668
  else:
@@ -7336,11 +7673,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
7336
7673
  return returnout
7337
7674
 
7338
7675
  if(not py7zr_support):
7339
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7676
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7340
7677
  return False
7341
-
7342
- if(py7zr_support):
7343
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7678
+ else:
7679
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7344
7680
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7345
7681
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7346
7682
  get_in_ext = os.path.splitext(outfile)
@@ -7382,8 +7718,7 @@ if(py7zr_support):
7382
7718
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7383
7719
  except PermissionError:
7384
7720
  return False
7385
- AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
7386
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7721
+ AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7387
7722
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7388
7723
  fp = CompressOpenFileAlt(
7389
7724
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7413,12 +7748,12 @@ if(py7zr_support):
7413
7748
  fp.close()
7414
7749
  return True
7415
7750
 
7416
- def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7751
+ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7417
7752
  if not isinstance(infiles, list):
7418
7753
  infiles = [infiles]
7419
7754
  returnout = False
7420
7755
  for infileslist in infiles:
7421
- returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7756
+ returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7422
7757
  if(not returnout):
7423
7758
  break
7424
7759
  else:
@@ -7428,9 +7763,9 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
7428
7763
  return True
7429
7764
  return returnout
7430
7765
 
7431
- def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7432
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7433
- return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
7766
+ def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
7767
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
7768
+ return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
7434
7769
 
7435
7770
 
7436
7771
  def PrintPermissionString(fchmode, ftype):
@@ -9171,58 +9506,54 @@ def CheckSumSupport(checkfor, guaranteed=True):
9171
9506
  return False
9172
9507
 
9173
9508
 
9174
- def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9175
- return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9509
+ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
9510
+ return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9176
9511
 
9177
- def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9178
- return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9512
+ def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
9513
+ return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9179
9514
 
9180
- def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9181
- return PackCatFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
9515
+ def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9516
+ return PackCatFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, returnfp)
9182
9517
 
9183
9518
 
9184
- def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9185
- return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9519
+ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9520
+ return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9186
9521
 
9187
9522
 
9188
- def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9189
- return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9523
+ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9524
+ return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9190
9525
 
9191
9526
 
9192
9527
  if(not rarfile_support):
9193
- def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9528
+ def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9194
9529
  return False
9195
-
9196
- if(rarfile_support):
9197
- def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9198
- return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9530
+ else:
9531
+ def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9532
+ return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9199
9533
 
9200
9534
 
9201
9535
  if(not py7zr_support):
9202
- def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9536
+ def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9203
9537
  return False
9204
-
9205
- if(py7zr_support):
9206
- def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9207
- return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9538
+ else:
9539
+ def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9540
+ return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9208
9541
 
9209
9542
 
9210
- def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9543
+ def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9211
9544
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
9212
9545
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9213
9546
  formatspecs = formatspecs[checkcompressfile]
9214
- if(verbose):
9215
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9216
9547
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
9217
- return PackCatFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9548
+ return PackCatFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9218
9549
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
9219
- return PackCatFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9550
+ return PackCatFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9220
9551
  elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
9221
- return PackCatFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9552
+ return PackCatFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9222
9553
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
9223
- return PackCatFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9554
+ return PackCatFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9224
9555
  elif(IsSingleDict(formatspecs) and checkcompressfile == formatspecs['format_magic']):
9225
- return RePackCatFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, verbose, returnfp)
9556
+ return RePackCatFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9226
9557
  else:
9227
9558
  return False
9228
9559
  return False
@@ -9291,19 +9622,12 @@ def CatFileArrayValidate(listarrayfiles, verbose=False):
9291
9622
  ok = False
9292
9623
  return ok
9293
9624
 
9294
- def CatFileValidate(infile, fmttype="auto", filestart=0,
9295
- formatspecs=__file_format_multi_dict__, # keep default like original
9296
- seektoend=False, verbose=False, returnfp=False):
9297
- if(verbose):
9298
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9299
-
9625
+ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9300
9626
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
9301
9627
  formatspecs = formatspecs[fmttype]
9302
9628
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
9303
9629
  fmttype = "auto"
9304
-
9305
9630
  curloc = filestart
9306
-
9307
9631
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9308
9632
  curloc = infile.tell()
9309
9633
  fp = infile
@@ -9319,7 +9643,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9319
9643
  if(not fp):
9320
9644
  return False
9321
9645
  fp.seek(filestart, 0)
9322
-
9323
9646
  elif(infile == "-"):
9324
9647
  fp = MkTempFile()
9325
9648
  shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
@@ -9331,7 +9654,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9331
9654
  if(not fp):
9332
9655
  return False
9333
9656
  fp.seek(filestart, 0)
9334
-
9335
9657
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
9336
9658
  fp = MkTempFile()
9337
9659
  fp.write(infile)
@@ -9343,7 +9665,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9343
9665
  if(not fp):
9344
9666
  return False
9345
9667
  fp.seek(filestart, 0)
9346
-
9347
9668
  elif(re.findall(__download_proto_support__, infile)):
9348
9669
  fp = download_file_from_internet_file(infile)
9349
9670
  fp = UncompressFileAlt(fp, formatspecs, filestart)
@@ -9354,7 +9675,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9354
9675
  if(not fp):
9355
9676
  return False
9356
9677
  fp.seek(filestart, 0)
9357
-
9358
9678
  else:
9359
9679
  infile = RemoveWindowsPath(infile)
9360
9680
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
@@ -9401,11 +9721,9 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9401
9721
  fp.seek(0, 2)
9402
9722
  except (OSError, ValueError):
9403
9723
  SeekToEndOfFile(fp)
9404
-
9405
9724
  CatSize = fp.tell()
9406
9725
  CatSizeEnd = CatSize
9407
9726
  fp.seek(curloc, 0)
9408
-
9409
9727
  if(IsNestedDict(formatspecs)):
9410
9728
  compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
9411
9729
  if(compresschecking not in formatspecs):
@@ -9413,42 +9731,59 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9413
9731
  else:
9414
9732
  formatspecs = formatspecs[compresschecking]
9415
9733
  fp.seek(filestart, 0)
9416
-
9417
9734
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
9418
9735
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
9419
9736
  formdelsize = len(formatspecs['format_delimiter'])
9420
9737
  formdel = fp.read(formdelsize).decode("UTF-8")
9421
-
9422
9738
  if(formstring != formatspecs['format_magic'] + inheaderver):
9423
9739
  return False
9424
9740
  if(formdel != formatspecs['format_delimiter']):
9425
9741
  return False
9426
-
9427
- if(formatspecs['new_style']):
9742
+ headeroffset = fp.tell()
9743
+ if(__use_new_style__):
9428
9744
  inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9429
9745
  else:
9430
9746
  inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9431
-
9432
- fnumextrafieldsize = int(inheader[6], 16)
9433
- fnumextrafields = int(inheader[7], 16)
9434
- extrastart = 8
9747
+ fnumextrafieldsize = int(inheader[15], 16)
9748
+ fnumextrafields = int(inheader[16], 16)
9749
+ extrastart = 17
9435
9750
  extraend = extrastart + fnumextrafields
9436
9751
  formversion = re.findall("([\\d]+)", formstring)
9437
9752
  fheadsize = int(inheader[0], 16)
9438
9753
  fnumfields = int(inheader[1], 16)
9439
- fhencoding = inheader[2]
9440
- fostype = inheader[3]
9441
- fpythontype = inheader[4]
9442
- fnumfiles = int(inheader[5], 16)
9754
+ fnumfiles = int(inheader[8], 16)
9443
9755
  fprechecksumtype = inheader[-2]
9444
9756
  fprechecksum = inheader[-1]
9757
+ outfseeknextfile = inheader[9]
9758
+ fjsonsize = int(inheader[12], 16)
9759
+ fjsonchecksumtype = inheader[13]
9760
+ fjsonchecksum = inheader[14]
9761
+ headerjsonoffset = fp.tell()
9762
+ fprejsoncontent = fp.read(fjsonsize)
9763
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
9764
+ # Next seek directive
9765
+ if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9766
+ fseeknextasnum = int(outfseeknextfile.replace("+", ""))
9767
+ if(abs(fseeknextasnum) == 0):
9768
+ pass
9769
+ fp.seek(fseeknextasnum, 1)
9770
+ elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
9771
+ fseeknextasnum = int(outfseeknextfile)
9772
+ if(abs(fseeknextasnum) == 0):
9773
+ pass
9774
+ fp.seek(fseeknextasnum, 1)
9775
+ elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
9776
+ fseeknextasnum = int(outfseeknextfile)
9777
+ if(abs(fseeknextasnum) == 0):
9778
+ pass
9779
+ fp.seek(fseeknextasnum, 0)
9780
+ else:
9781
+ return False
9445
9782
  il = 0
9446
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
9447
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
9448
-
9783
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
9784
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
9449
9785
  valid_archive = True
9450
9786
  invalid_archive = False
9451
-
9452
9787
  if(verbose):
9453
9788
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9454
9789
  try:
@@ -9460,78 +9795,56 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9460
9795
  else:
9461
9796
  VerbosePrintOut(infile)
9462
9797
  VerbosePrintOut("Number of Records " + str(fnumfiles))
9463
-
9464
9798
  if(headercheck):
9465
9799
  if(verbose):
9466
- VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
9800
+ VerbosePrintOut("File Header Checksum Passed at offset " + str(headeroffset))
9467
9801
  VerbosePrintOut("'" + fprechecksum + "' == " + "'" + newfcs + "'")
9468
9802
  else:
9469
9803
  # always flip flags, even when not verbose
9470
9804
  valid_archive = False
9471
9805
  invalid_archive = True
9472
9806
  if(verbose):
9473
- VerbosePrintOut("File Header Checksum Failed at offset " + str(0))
9807
+ VerbosePrintOut("File Header Checksum Failed at offset " + str(headeroffset))
9474
9808
  VerbosePrintOut("'" + fprechecksum + "' != " + "'" + newfcs + "'")
9475
-
9809
+ if(fjsonsize > 0):
9810
+ if(CheckChecksums(jsonfcs, fjsonchecksum)):
9811
+ if(verbose):
9812
+ VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(headerjsonoffset))
9813
+ VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
9814
+ else:
9815
+ valid_archive = False
9816
+ invalid_archive = True
9817
+ if(verbose):
9818
+ VerbosePrintOut("File JSON Data Checksum Error at offset " + str(headerjsonoffset))
9819
+ VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9476
9820
  if(verbose):
9477
9821
  VerbosePrintOut("")
9478
-
9479
9822
  # Iterate either until EOF (seektoend) or fixed count
9480
9823
  while (fp.tell() < CatSizeEnd) if seektoend else (il < fnumfiles):
9481
9824
  outfhstart = fp.tell()
9482
- if(formatspecs['new_style']):
9825
+ if(__use_new_style__):
9483
9826
  inheaderdata = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9484
9827
  else:
9485
9828
  inheaderdata = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9486
9829
 
9487
9830
  if(len(inheaderdata) == 0):
9488
9831
  break
9489
-
9490
- outfheadsize = int(inheaderdata[0], 16)
9491
- outfnumfields = int(inheaderdata[1], 16)
9492
- outftype = int(inheaderdata[2], 16)
9493
- # FIX: these must come from inheaderdata, not inheader
9494
- outfostype = inheaderdata[3]
9495
- outfencoding = inheaderdata[4]
9496
-
9497
9832
  if(re.findall("^[.|/]", inheaderdata[5])):
9498
9833
  outfname = inheaderdata[5]
9499
9834
  else:
9500
9835
  outfname = "./" + inheaderdata[5]
9501
9836
  outfbasedir = os.path.dirname(outfname)
9502
-
9503
- outflinkname = inheaderdata[6]
9504
9837
  outfsize = int(inheaderdata[7], 16)
9505
- outfatime = int(inheaderdata[8], 16)
9506
- outfmtime = int(inheaderdata[9], 16)
9507
- outfctime = int(inheaderdata[10], 16)
9508
- outfbtime = int(inheaderdata[11], 16)
9509
- outfmode = int(inheaderdata[12], 16)
9510
- outfchmode = stat.S_IMODE(outfmode)
9511
- outftypemod = stat.S_IFMT(outfmode)
9512
- outfwinattributes = int(inheaderdata[13], 16)
9513
- outfcompression = inheaderdata[14]
9514
- outfcsize = int(inheaderdata[15], 16)
9515
- outfuid = int(inheaderdata[16], 16)
9516
- outfuname = inheaderdata[17]
9517
- outfgid = int(inheaderdata[18], 16)
9518
- outfgname = inheaderdata[19]
9519
- fid = int(inheaderdata[20], 16)
9520
- finode = int(inheaderdata[21], 16)
9521
- flinkcount = int(inheaderdata[22], 16)
9522
- outfdev = int(inheaderdata[23], 16)
9523
- outfdev_minor = int(inheaderdata[24], 16)
9524
- outfdev_major = int(inheaderdata[25], 16)
9525
- outfseeknextfile = inheaderdata[26]
9526
- outfjsontype = inheaderdata[27]
9527
- outfjsonlen = int(inheaderdata[28], 16)
9528
- outfjsonsize = int(inheaderdata[29], 16)
9529
- outfjsonchecksumtype = inheaderdata[30]
9530
- outfjsonchecksum = inheaderdata[31]
9531
-
9838
+ outfcompression = inheaderdata[17]
9839
+ outfcsize = int(inheaderdata[18], 16)
9840
+ fid = int(inheaderdata[23], 16)
9841
+ finode = int(inheaderdata[24], 16)
9842
+ outfseeknextfile = inheaderdata[28]
9843
+ outfjsonsize = int(inheaderdata[31], 16)
9844
+ outfjsonchecksumtype = inheaderdata[32]
9845
+ outfjsonchecksum = inheaderdata[33]
9532
9846
  outfhend = fp.tell() - 1 # (kept for parity; not used)
9533
9847
  outfjstart = fp.tell()
9534
-
9535
9848
  # Read JSON bytes; compute checksum on bytes for robustness
9536
9849
  outfprejsoncontent_bytes = fp.read(outfjsonsize)
9537
9850
  # Decode for any downstream text needs (not used further here)
@@ -9539,27 +9852,21 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9539
9852
  outfprejsoncontent = outfprejsoncontent_bytes.decode("UTF-8")
9540
9853
  except Exception:
9541
9854
  outfprejsoncontent = None
9542
-
9543
9855
  outfjend = fp.tell()
9544
9856
  fp.seek(len(formatspecs['format_delimiter']), 1)
9545
-
9546
- injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs)
9547
-
9548
- outfextrasize = int(inheaderdata[32], 16)
9549
- outfextrafields = int(inheaderdata[33], 16)
9857
+ injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs, saltkey)
9858
+ outfextrafields = int(inheaderdata[35], 16)
9550
9859
  extrafieldslist = []
9551
- extrastart = 34
9860
+ extrastart = 36
9552
9861
  extraend = extrastart + outfextrafields
9553
-
9554
9862
  outfcs = inheaderdata[-2].lower()
9555
9863
  outfccs = inheaderdata[-1].lower()
9556
- infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
9557
-
9864
+ infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs, saltkey)
9558
9865
  if(verbose):
9559
9866
  VerbosePrintOut(outfname)
9560
9867
  VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
9561
9868
 
9562
- if(hmac.compare_digest(outfcs, infcs)):
9869
+ if(CheckChecksums(outfcs, infcs)):
9563
9870
  if(verbose):
9564
9871
  VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
9565
9872
  VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
@@ -9569,9 +9876,8 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9569
9876
  if(verbose):
9570
9877
  VerbosePrintOut("File Header Checksum Failed at offset " + str(outfhstart))
9571
9878
  VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
9572
-
9573
9879
  if(outfjsonsize > 0):
9574
- if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
9880
+ if(CheckChecksums(injsonfcs, outfjsonchecksum)):
9575
9881
  if(verbose):
9576
9882
  VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
9577
9883
  VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
@@ -9581,21 +9887,19 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9581
9887
  if(verbose):
9582
9888
  VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
9583
9889
  VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9584
-
9585
9890
  outfcontentstart = fp.tell()
9586
9891
  outfcontents = b"" # FIX: bytes for Py2/3 consistency
9587
9892
  pyhascontents = False
9588
-
9589
9893
  if(outfsize > 0):
9590
9894
  if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
9591
9895
  outfcontents = fp.read(outfsize)
9592
9896
  else:
9593
9897
  outfcontents = fp.read(outfcsize)
9594
9898
 
9595
- infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
9899
+ infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs, saltkey)
9596
9900
  pyhascontents = True
9597
9901
 
9598
- if(hmac.compare_digest(outfccs, infccs)):
9902
+ if(CheckChecksums(outfccs, infccs)):
9599
9903
  if(verbose):
9600
9904
  VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
9601
9905
  VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
@@ -9605,10 +9909,8 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9605
9909
  if(verbose):
9606
9910
  VerbosePrintOut("File Content Checksum Failed at offset " + str(outfcontentstart))
9607
9911
  VerbosePrintOut("'" + outfccs + "' != " + "'" + infccs + "'")
9608
-
9609
9912
  if(verbose):
9610
9913
  VerbosePrintOut("")
9611
-
9612
9914
  # Next seek directive
9613
9915
  if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9614
9916
  fseeknextasnum = int(outfseeknextfile.replace("+", ""))
@@ -9627,9 +9929,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9627
9929
  fp.seek(fseeknextasnum, 0)
9628
9930
  else:
9629
9931
  return False
9630
-
9631
9932
  il = il + 1
9632
-
9633
9933
  if(valid_archive):
9634
9934
  if(returnfp):
9635
9935
  return fp
@@ -9641,34 +9941,34 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9641
9941
  return False
9642
9942
 
9643
9943
 
9644
- def CatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9645
- return CatFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9944
+ def CatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9945
+ return CatFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9646
9946
 
9647
9947
 
9648
- def CatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9948
+ def CatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9649
9949
  if(isinstance(infile, (list, tuple, ))):
9650
9950
  pass
9651
9951
  else:
9652
9952
  infile = [infile]
9653
9953
  outretval = True
9654
9954
  for curfname in infile:
9655
- curretfile = CatFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9955
+ curretfile = CatFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9656
9956
  if(not curretfile):
9657
9957
  outretval = False
9658
9958
  return outretval
9659
9959
 
9660
- def CatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9661
- return CatFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9960
+ def CatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9961
+ return CatFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9662
9962
 
9663
9963
 
9664
- def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9964
+ def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9665
9965
  outretval = []
9666
9966
  outstartfile = filestart
9667
9967
  outfsize = float('inf')
9668
9968
  while True:
9669
9969
  if outstartfile >= outfsize: # stop when function signals False
9670
9970
  break
9671
- is_valid_file = CatFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
9971
+ is_valid_file = CatFileValidate(infile, fmttype, outstartfile, formatspecs, saltkey, seektoend, verbose, True)
9672
9972
  if is_valid_file is False: # stop when function signals False
9673
9973
  outretval.append(is_valid_file)
9674
9974
  break
@@ -9685,33 +9985,36 @@ def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
9685
9985
  if(returnfp):
9686
9986
  return infile
9687
9987
  else:
9688
- infile.close()
9988
+ try:
9989
+ infile.close()
9990
+ except AttributeError:
9991
+ return False
9689
9992
  return outretval
9690
9993
 
9691
9994
 
9692
9995
 
9693
- def StackedCatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9694
- return StackedCatFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9996
+ def StackedCatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9997
+ return StackedCatFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9695
9998
 
9696
9999
 
9697
- def StackedCatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10000
+ def StackedCatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9698
10001
  if(isinstance(infile, (list, tuple, ))):
9699
10002
  pass
9700
10003
  else:
9701
10004
  infile = [infile]
9702
10005
  outretval = True
9703
10006
  for curfname in infile:
9704
- curretfile = StackedCatFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10007
+ curretfile = StackedCatFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9705
10008
  if(not curretfile):
9706
10009
  outretval = False
9707
10010
  return outretval
9708
10011
 
9709
- def StackedCatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9710
- return StackedCatFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10012
+ def StackedCatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10013
+ return StackedCatFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9711
10014
 
9712
10015
 
9713
- def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9714
- outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
10016
+ def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10017
+ outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
9715
10018
  if not returnfp:
9716
10019
  for item in outfp:
9717
10020
  fp = item.get('fp')
@@ -9725,26 +10028,26 @@ def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
9725
10028
  return outfp
9726
10029
 
9727
10030
 
9728
- def MultipleCatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10031
+ def MultipleCatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9729
10032
  if(isinstance(infile, (list, tuple, ))):
9730
10033
  pass
9731
10034
  else:
9732
10035
  infile = [infile]
9733
10036
  outretval = []
9734
10037
  for curfname in infile:
9735
- outretval.append(CatFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp))
10038
+ outretval.append(CatFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp))
9736
10039
  return outretval
9737
10040
 
9738
- def MultipleCatFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9739
- return MultipleCatFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
10041
+ def MultipleCatFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10042
+ return MultipleCatFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9740
10043
 
9741
10044
 
9742
- def CatFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10045
+ def CatFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9743
10046
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9744
10047
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9745
10048
  formatspecs = formatspecs[checkcompressfile]
9746
10049
  fp = MkTempFile(instr)
9747
- listarrayfiles = CatFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10050
+ listarrayfiles = CatFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9748
10051
  return listarrayfiles
9749
10052
 
9750
10053
 
@@ -9753,9 +10056,8 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9753
10056
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9754
10057
  formatspecs = formatspecs[checkcompressfile]
9755
10058
  fp = MkTempFile()
9756
- fp = PackCatFileFromTarFile(
9757
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9758
- listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10059
+ fp = PackCatFileFromTarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10060
+ listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9759
10061
  return listarrayfiles
9760
10062
 
9761
10063
 
@@ -9764,9 +10066,8 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9764
10066
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9765
10067
  formatspecs = formatspecs[checkcompressfile]
9766
10068
  fp = MkTempFile()
9767
- fp = PackCatFileFromZipFile(
9768
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9769
- listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10069
+ fp = PackCatFileFromZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10070
+ listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9770
10071
  return listarrayfiles
9771
10072
 
9772
10073
 
@@ -9780,9 +10081,8 @@ if(rarfile_support):
9780
10081
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9781
10082
  formatspecs = formatspecs[checkcompressfile]
9782
10083
  fp = MkTempFile()
9783
- fp = PackCatFileFromRarFile(
9784
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9785
- listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10084
+ fp = PackCatFileFromRarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10085
+ listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9786
10086
  return listarrayfiles
9787
10087
 
9788
10088
  if(not py7zr_support):
@@ -9795,13 +10095,12 @@ if(py7zr_support):
9795
10095
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9796
10096
  formatspecs = formatspecs[checkcompressfile]
9797
10097
  fp = MkTempFile()
9798
- fp = PackCatFileFromSevenZipFile(
9799
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9800
- listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10098
+ fp = PackCatFileFromSevenZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10099
+ listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9801
10100
  return listarrayfiles
9802
10101
 
9803
10102
 
9804
- def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10103
+ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9805
10104
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9806
10105
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9807
10106
  formatspecs = formatspecs[checkcompressfile]
@@ -9814,17 +10113,16 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
9814
10113
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
9815
10114
  return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
9816
10115
  elif(checkcompressfile == formatspecs['format_magic']):
9817
- return CatFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10116
+ return CatFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9818
10117
  else:
9819
10118
  return False
9820
10119
  return False
9821
10120
 
9822
10121
 
9823
- def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
10122
+ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, saltkey=None, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
9824
10123
  outarray = MkTempFile()
9825
- packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
9826
- compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
9827
- listarrayfiles = CatFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
10124
+ packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
10125
+ listarrayfiles = CatFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9828
10126
  return listarrayfiles
9829
10127
 
9830
10128
 
@@ -9946,12 +10244,12 @@ def CatFileArrayToArrayIndex(inarray, returnfp=False):
9946
10244
  return out
9947
10245
 
9948
10246
 
9949
- def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
10247
+ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
9950
10248
  # ---------- Safe defaults ----------
9951
10249
  if compressionuselist is None:
9952
10250
  compressionuselist = compressionlistalt
9953
10251
  if checksumtype is None:
9954
- checksumtype = ["md5", "md5", "md5", "md5"]
10252
+ checksumtype = ["md5", "md5", "md5", "md5", "md5"]
9955
10253
  if extradata is None:
9956
10254
  extradata = []
9957
10255
  if jsondata is None:
@@ -9970,7 +10268,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
9970
10268
  infile = RemoveWindowsPath(infile)
9971
10269
  listarrayfileslist = CatFileToArray(
9972
10270
  infile, "auto", filestart, seekstart, seekend,
9973
- False, True, True, skipchecksum, formatspecs, seektoend, False
10271
+ False, True, True, skipchecksum, formatspecs, insaltkey, seektoend, False
9974
10272
  )
9975
10273
 
9976
10274
  # ---------- Format specs selection ----------
@@ -10037,9 +10335,6 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10037
10335
  if (compression is None) or (compressionuselist and compression not in compressionuselist):
10038
10336
  compression = "auto"
10039
10337
 
10040
- if verbose:
10041
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10042
-
10043
10338
  # No files?
10044
10339
  if not listarrayfiles.get('ffilelist'):
10045
10340
  return False
@@ -10052,7 +10347,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10052
10347
  if lenlist != fnumfiles:
10053
10348
  fnumfiles = lenlist
10054
10349
 
10055
- AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), [], checksumtype[0], formatspecs)
10350
+ AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), listarrayfiles['fextradata'], listarrayfiles['fjsondata'], [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
10056
10351
 
10057
10352
  # loop counters
10058
10353
  lcfi = 0
@@ -10082,6 +10377,9 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10082
10377
  # fields (hex-encoded where expected)
10083
10378
  fheadersize = format(int(cur_entry['fheadersize']), 'x').lower()
10084
10379
  fsize = format(int(cur_entry['fsize']), 'x').lower()
10380
+ fblksize = format(int(cur_entry['fblksize']), 'x').lower()
10381
+ fblocks = format(int(cur_entry['fblocks']), 'x').lower()
10382
+ fflags = format(int(cur_entry['fflags']), 'x').lower()
10085
10383
  flinkname = cur_entry['flinkname']
10086
10384
  fatime = format(int(cur_entry['fatime']), 'x').lower()
10087
10385
  fmtime = format(int(cur_entry['fmtime']), 'x').lower()
@@ -10100,8 +10398,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10100
10398
  fcompression = cur_entry['fcompression']
10101
10399
  fcsize = format(int(cur_entry['fcsize']), 'x').lower()
10102
10400
  fdev = format(int(cur_entry['fdev']), 'x').lower()
10103
- fdev_minor = format(int(cur_entry['fminor']), 'x').lower()
10104
- fdev_major = format(int(cur_entry['fmajor']), 'x').lower()
10401
+ frdev = format(int(cur_entry['frdev']), 'x').lower()
10105
10402
  fseeknextfile = cur_entry['fseeknextfile']
10106
10403
 
10107
10404
  # extra fields sizing
@@ -10112,6 +10409,12 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10112
10409
  # extradata/jsondata defaults per file
10113
10410
  if not followlink and len(extradata) <= 0:
10114
10411
  extradata = cur_entry['fextradata']
10412
+
10413
+ fvendorfields = cur_entry['fvendorfields']
10414
+ ffvendorfieldslist = []
10415
+ if(fvendorfields>0):
10416
+ ffvendorfieldslist = cur_entry['fvendorfieldslist']
10417
+
10115
10418
  if not followlink and len(jsondata) <= 0:
10116
10419
  jsondata = cur_entry['fjsondata']
10117
10420
 
@@ -10147,7 +10450,11 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10147
10450
  fcontents.seek(0, 0)
10148
10451
  cfcontents.seek(0, 0)
10149
10452
  cfcontents = CompressOpenFileAlt(
10150
- cfcontents, compressionuselist[ilmin], compressionlevel, compressionuselist, formatspecs
10453
+ cfcontents,
10454
+ compressionuselist[ilmin],
10455
+ compressionlevel,
10456
+ compressionuselist,
10457
+ formatspecs
10151
10458
  )
10152
10459
  if cfcontents:
10153
10460
  cfcontents.seek(0, 2)
@@ -10155,7 +10462,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10155
10462
  cfcontents.close()
10156
10463
  else:
10157
10464
  ilcsize.append(float("inf"))
10158
- ilmin += 1
10465
+ ilmin = ilmin + 1
10159
10466
  ilcmin = ilcsize.index(min(ilcsize))
10160
10467
  curcompression = compressionuselist[ilcmin]
10161
10468
 
@@ -10164,16 +10471,24 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10164
10471
  shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10165
10472
  cfcontents.seek(0, 0)
10166
10473
  cfcontents = CompressOpenFileAlt(
10167
- cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
10474
+ cfcontents,
10475
+ curcompression,
10476
+ compressionlevel,
10477
+ compressionuselist,
10478
+ formatspecs
10168
10479
  )
10169
10480
  cfcontents.seek(0, 2)
10170
- cfsize_val = cfcontents.tell()
10171
- if ucfsize > cfsize_val:
10172
- fcsize = format(int(cfsize_val), 'x').lower()
10481
+ cfsize = cfcontents.tell()
10482
+ if ucfsize > cfsize:
10483
+ fcsize = format(int(cfsize), 'x').lower()
10173
10484
  fcompression = curcompression
10174
10485
  fcontents.close()
10175
10486
  fcontents = cfcontents
10176
10487
 
10488
+ if fcompression == "none":
10489
+ fcompression = ""
10490
+ fcontents.seek(0, 0)
10491
+
10177
10492
  # link following (fixed: use listarrayfiles, not prelistarrayfiles)
10178
10493
  if followlink:
10179
10494
  if (cur_entry['ftype'] == 1 or cur_entry['ftype'] == 2):
@@ -10182,6 +10497,9 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10182
10497
  flinkinfo = listarrayfiles['ffilelist'][flinkid]
10183
10498
  fheadersize = format(int(flinkinfo['fheadersize']), 'x').lower()
10184
10499
  fsize = format(int(flinkinfo['fsize']), 'x').lower()
10500
+ fblksize = format(int(flinkinfo['fblksize']), 'x').lower()
10501
+ fblocks = format(int(flinkinfo['fblocks']), 'x').lower()
10502
+ fflags = format(int(flinkinfo['fflags']), 'x').lower()
10185
10503
  flinkname = flinkinfo['flinkname']
10186
10504
  fatime = format(int(flinkinfo['fatime']), 'x').lower()
10187
10505
  fmtime = format(int(flinkinfo['fmtime']), 'x').lower()
@@ -10200,14 +10518,19 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10200
10518
  fcompression = flinkinfo['fcompression']
10201
10519
  fcsize = format(int(flinkinfo['fcsize']), 'x').lower()
10202
10520
  fdev = format(int(flinkinfo['fdev']), 'x').lower()
10203
- fdev_minor = format(int(flinkinfo['fminor']), 'x').lower()
10204
- fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
10521
+ frdev = format(int(flinkinfo['frdev']), 'x').lower()
10205
10522
  fseeknextfile = flinkinfo['fseeknextfile']
10206
10523
  if (len(flinkinfo['fextradata']) > flinkinfo['fextrafields']
10207
10524
  and len(flinkinfo['fextradata']) > 0):
10208
10525
  flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
10209
10526
  if len(extradata) < 0:
10210
10527
  extradata = flinkinfo['fextradata']
10528
+
10529
+ fvendorfields = flinkinfo['fvendorfields']
10530
+ ffvendorfieldslist = []
10531
+ if(fvendorfields>0):
10532
+ ffvendorfieldslist = flinkinfo['fvendorfieldslist']
10533
+
10211
10534
  if len(jsondata) < 0:
10212
10535
  jsondata = flinkinfo['fjsondata']
10213
10536
  fcontents = flinkinfo['fcontents']
@@ -10236,15 +10559,15 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10236
10559
  fcompression = ""
10237
10560
 
10238
10561
  tmpoutlist = [
10239
- ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime,
10562
+ ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime,
10240
10563
  fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame,
10241
- fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile
10564
+ fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, fseeknextfile
10242
10565
  ]
10243
10566
 
10244
- AppendFileHeaderWithContent(
10245
- fp, tmpoutlist, extradata, jsondata, fcontents.read(),
10246
- [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs
10247
- )
10567
+ if(fvendorfields>0 and len(ffvendorfieldslist)>0):
10568
+ extradata.extend(fvendorfields)
10569
+
10570
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(),[checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, outsaltkey)
10248
10571
  try:
10249
10572
  fcontents.close()
10250
10573
  except Exception:
@@ -10289,12 +10612,12 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10289
10612
  pass
10290
10613
  return True
10291
10614
 
10292
- def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
10615
+ def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10293
10616
  if not isinstance(infiles, list):
10294
10617
  infiles = [infiles]
10295
10618
  returnout = False
10296
10619
  for infileslist in infiles:
10297
- returnout = RePackCatFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, True)
10620
+ returnout = RePackCatFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, True)
10298
10621
  if(not returnout):
10299
10622
  break
10300
10623
  else:
@@ -10304,33 +10627,28 @@ def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto",
10304
10627
  return True
10305
10628
  return returnout
10306
10629
 
10307
- def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10630
+ def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10308
10631
  fp = MkTempFile(instr)
10309
- listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10310
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10632
+ listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, returnfp)
10311
10633
  return listarrayfiles
10312
10634
 
10313
10635
 
10314
- def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10636
+ def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10315
10637
  outarray = MkTempFile()
10316
- packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10317
- compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
10318
- listarrayfiles = RePackCatFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10319
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10638
+ packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
10639
+ listarrayfiles = RePackCatFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, saltkey, seektoend, verbose, returnfp)
10320
10640
  return listarrayfiles
10321
10641
 
10322
10642
 
10323
- def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
10643
+ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
10324
10644
  if(outdir is not None):
10325
10645
  outdir = RemoveWindowsPath(outdir)
10326
- if(verbose):
10327
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10328
10646
  if(isinstance(infile, dict)):
10329
10647
  listarrayfiles = infile
10330
10648
  else:
10331
10649
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10332
10650
  infile = RemoveWindowsPath(infile)
10333
- listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
10651
+ listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10334
10652
  if(not listarrayfiles):
10335
10653
  return False
10336
10654
  lenlist = len(listarrayfiles['ffilelist'])
@@ -10566,9 +10884,9 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
10566
10884
  return True
10567
10885
 
10568
10886
 
10569
- def UnPackCatFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10887
+ def UnPackCatFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10570
10888
  fp = MkTempFile(instr)
10571
- listarrayfiles = UnPackCatFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
10889
+ listarrayfiles = UnPackCatFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, returnfp)
10572
10890
  return listarrayfiles
10573
10891
 
10574
10892
  def ftype_to_str(ftype):
@@ -10586,9 +10904,7 @@ def ftype_to_str(ftype):
10586
10904
  # Default to "file" if unknown
10587
10905
  return mapping.get(ftype, "file")
10588
10906
 
10589
- def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10590
- if(verbose):
10591
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10907
+ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10592
10908
  if(isinstance(infile, dict)):
10593
10909
  listarrayfileslist = [infile]
10594
10910
  if(isinstance(infile, list)):
@@ -10596,7 +10912,7 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10596
10912
  else:
10597
10913
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10598
10914
  infile = RemoveWindowsPath(infile)
10599
- listarrayfileslist = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
10915
+ listarrayfileslist = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10600
10916
  if(not listarrayfileslist):
10601
10917
  return False
10602
10918
  for listarrayfiles in listarrayfileslist:
@@ -10633,8 +10949,11 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10633
10949
  VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
10634
10950
  listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
10635
10951
  else:
10952
+ ts_ns = listarrayfiles['ffilelist'][lcfi]['fmtime']
10953
+ sec, ns = divmod(int(ts_ns), 10**9)
10954
+ dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
10636
10955
  VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
10637
- listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
10956
+ listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + dt.strftime('%Y-%m-%d %H:%M') + " " + printfname)
10638
10957
  lcfi = lcfi + 1
10639
10958
  if(returnfp):
10640
10959
  return listarrayfiles['fp']
@@ -10642,25 +10961,25 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10642
10961
  return True
10643
10962
 
10644
10963
 
10645
- def MultipleCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10964
+ def MultipleCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10646
10965
  if(isinstance(infile, (list, tuple, ))):
10647
10966
  pass
10648
10967
  else:
10649
10968
  infile = [infile]
10650
10969
  outretval = {}
10651
10970
  for curfname in infile:
10652
- outretval[curfname] = CatFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
10971
+ outretval[curfname] = CatFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10653
10972
  return outretval
10654
10973
 
10655
10974
 
10656
- def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10975
+ def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10657
10976
  outretval = []
10658
10977
  outstartfile = filestart
10659
10978
  outfsize = float('inf')
10660
10979
  while True:
10661
10980
  if outstartfile >= outfsize: # stop when function signals False
10662
10981
  break
10663
- list_file_retu = CatFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
10982
+ list_file_retu = CatFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, True)
10664
10983
  if list_file_retu is False: # stop when function signals False
10665
10984
  outretval.append(list_file_retu)
10666
10985
  else:
@@ -10676,30 +10995,31 @@ def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
10676
10995
  if(returnfp):
10677
10996
  return infile
10678
10997
  else:
10679
- infile.close()
10998
+ try:
10999
+ infile.close()
11000
+ except AttributeError:
11001
+ return False
10680
11002
  return outretval
10681
11003
 
10682
11004
 
10683
- def MultipleStackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
11005
+ def MultipleStackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10684
11006
  if(isinstance(infile, (list, tuple, ))):
10685
11007
  pass
10686
11008
  else:
10687
11009
  infile = [infile]
10688
11010
  outretval = {}
10689
11011
  for curfname in infile:
10690
- outretval[curfname] = StackedCatFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
11012
+ outretval[curfname] = StackedCatFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10691
11013
  return outretval
10692
11014
 
10693
11015
 
10694
- def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11016
+ def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10695
11017
  fp = MkTempFile(instr)
10696
- listarrayfiles = CatFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
11018
+ listarrayfiles = CatFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10697
11019
  return listarrayfiles
10698
11020
 
10699
11021
 
10700
11022
  def TarFileListFiles(infile, verbose=False, returnfp=False):
10701
- if(verbose):
10702
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10703
11023
  if(infile == "-"):
10704
11024
  infile = MkTempFile()
10705
11025
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -10820,8 +11140,6 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
10820
11140
 
10821
11141
 
10822
11142
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
10823
- if(verbose):
10824
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10825
11143
  if(infile == "-"):
10826
11144
  infile = MkTempFile()
10827
11145
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -10947,8 +11265,6 @@ if(not rarfile_support):
10947
11265
 
10948
11266
  if(rarfile_support):
10949
11267
  def RarFileListFiles(infile, verbose=False, returnfp=False):
10950
- if(verbose):
10951
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10952
11268
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
10953
11269
  return False
10954
11270
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -11076,8 +11392,6 @@ if(not py7zr_support):
11076
11392
 
11077
11393
  if(py7zr_support):
11078
11394
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
11079
- if(verbose):
11080
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11081
11395
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11082
11396
  return False
11083
11397
  lcfi = 0
@@ -11171,8 +11485,6 @@ if(py7zr_support):
11171
11485
 
11172
11486
 
11173
11487
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
11174
- if(verbose):
11175
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11176
11488
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
11177
11489
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
11178
11490
  formatspecs = formatspecs[checkcompressfile]
@@ -11199,44 +11511,6 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
11199
11511
  outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
11200
11512
  return listarrayfiles
11201
11513
 
11202
- """
11203
- PyNeoFile compatibility layer
11204
- """
11205
-
11206
- def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11207
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
11208
-
11209
- def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11210
- return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
11211
-
11212
- def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11213
- return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
11214
-
11215
- def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11216
- return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
11217
-
11218
- def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11219
- return PackCatFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
11220
-
11221
- def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
11222
- return CatFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
11223
-
11224
- def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
11225
- return UnPackCatFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
11226
-
11227
- def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11228
- return RePackCatFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11229
-
11230
- def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
11231
- return CatFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
11232
-
11233
- def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
11234
- return CatFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
11235
-
11236
- def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11237
- intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
11238
- return RePackCatFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11239
-
11240
11514
  def detect_cwd(ftp, file_dir):
11241
11515
  """
11242
11516
  Test whether cwd into file_dir works. Returns True if it does,