PyArchiveFile 0.25.0__py3-none-any.whl → 0.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyarchivefile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyarchivefile.py - Last Update: 11/3/2025 Ver. 0.25.0 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyarchivefile.py - Last Update: 11/12/2025 Ver. 0.26.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -416,9 +416,13 @@ def is_only_nonprintable(var):
416
416
  __file_format_multi_dict__ = {}
417
417
  __file_format_default__ = "ArchiveFile"
418
418
  __include_defaults__ = True
419
- __use_inmemfile__ = True
419
+ __use_inmem__ = True
420
+ __use_memfd__ = True
420
421
  __use_spoolfile__ = False
421
422
  __use_spooldir__ = tempfile.gettempdir()
423
+ __use_new_style__ = True
424
+ __use_advanced_list__ = True
425
+ __use_alt_inode__ = False
422
426
  BYTES_PER_KiB = 1024
423
427
  BYTES_PER_MiB = 1024 * BYTES_PER_KiB
424
428
  # Spool: not tiny, but won’t blow up RAM if many are in use
@@ -462,9 +466,13 @@ if __use_ini_file__ and os.path.exists(__config_file__):
462
466
  __file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
463
467
  __program_name__ = decode_unicode_escape(config.get('config', 'proname'))
464
468
  __include_defaults__ = config.getboolean('config', 'includedef')
465
- __use_inmemfile__ = config.getboolean('config', 'inmemfile')
469
+ __use_inmem__ = config.getboolean('config', 'useinmem')
470
+ __use_memfd__ = config.getboolean('config', 'usememfd')
466
471
  __use_spoolfile__ = config.getboolean('config', 'usespoolfile')
467
472
  __spoolfile_size__ = config.getint('config', 'spoolfilesize')
473
+ __use_new_style__ = config.getboolean('config', 'newstyle')
474
+ __use_advanced_list__ = config.getboolean('config', 'advancedlist')
475
+ __use_alt_inode__ = config.getboolean('config', 'altinode')
468
476
  # Loop through all sections
469
477
  for section in config.sections():
470
478
  if section == "config":
@@ -472,8 +480,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
472
480
 
473
481
  required_keys = [
474
482
  "len", "hex", "ver", "name",
475
- "magic", "delimiter", "extension",
476
- "newstyle", "advancedlist", "altinode"
483
+ "magic", "delimiter", "extension"
477
484
  ]
478
485
 
479
486
  # Py2+Py3 compatible key presence check
@@ -493,9 +500,6 @@ if __use_ini_file__ and os.path.exists(__config_file__):
493
500
  'format_hex': config.get(section, 'hex'),
494
501
  'format_delimiter': delim,
495
502
  'format_ver': config.get(section, 'ver'),
496
- 'new_style': config.getboolean(section, 'newstyle'),
497
- 'use_advanced_list': config.getboolean(section, 'advancedlist'),
498
- 'use_alt_inode': config.getboolean(section, 'altinode'),
499
503
  'format_extension': decode_unicode_escape(config.get(section, 'extension')),
500
504
  }
501
505
  })
@@ -556,16 +560,19 @@ elif __use_json_file__ and os.path.exists(__config_file__):
556
560
  cfg_config = cfg.get('config', {}) or {}
557
561
  __file_format_default__ = decode_unicode_escape(_get(cfg_config, 'default', ''))
558
562
  __program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
559
- __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', False))
560
- __use_inmemfile__ = _to_bool(_get(cfg_config, 'inmemfile', False))
563
+ __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', True))
564
+ __use_inmem__ = _to_bool(_get(cfg_config, 'useinmem', True))
565
+ __use_memfd__ = _to_bool(_get(cfg_config, 'usememfd', True))
561
566
  __use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
562
567
  __spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
568
+ __use_new_style__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
569
+ __use_advanced_list__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
570
+ __use_alt_inode__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
563
571
 
564
572
  # --- iterate format sections (everything except "config") ---
565
573
  required_keys = [
566
574
  "len", "hex", "ver", "name",
567
- "magic", "delimiter", "extension",
568
- "newstyle", "advancedlist", "altinode"
575
+ "magic", "delimiter", "extension"
569
576
  ]
570
577
 
571
578
  for section_name, section in cfg.items():
@@ -583,9 +590,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
583
590
  fmt_hex = decode_unicode_escape(_get(section, 'hex', ''))
584
591
  fmt_ver = decode_unicode_escape(_get(section, 'ver', ''))
585
592
  delim = decode_unicode_escape(_get(section, 'delimiter', ''))
586
- new_style = _to_bool(_get(section, 'newstyle', False))
587
- adv_list = _to_bool(_get(section, 'advancedlist', False))
588
- alt_inode = _to_bool(_get(section, 'altinode', False))
589
593
  extension = decode_unicode_escape(_get(section, 'extension', ''))
590
594
 
591
595
  # keep your delimiter validation semantics
@@ -600,9 +604,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
600
604
  'format_hex': fmt_hex,
601
605
  'format_delimiter': delim,
602
606
  'format_ver': fmt_ver,
603
- 'new_style': new_style,
604
- 'use_advanced_list': adv_list,
605
- 'use_alt_inode': alt_inode,
606
607
  'format_extension': extension,
607
608
  }
608
609
  })
@@ -621,6 +622,7 @@ elif __use_json_file__ and not os.path.exists(__config_file__):
621
622
  if not __use_ini_file__ and not __include_defaults__:
622
623
  __include_defaults__ = True
623
624
  if __include_defaults__:
625
+ # Arc / Neo
624
626
  add_format(__file_format_multi_dict__, "ArchiveFile", "ArchiveFile", ".arc", "ArchiveFile")
625
627
  add_format(__file_format_multi_dict__, "NeoFile", "NeoFile", ".neo", "NeoFile")
626
628
 
@@ -633,21 +635,18 @@ __file_format_len__ = __file_format_multi_dict__[__file_format_default__]['forma
633
635
  __file_format_hex__ = __file_format_multi_dict__[__file_format_default__]['format_hex']
634
636
  __file_format_delimiter__ = __file_format_multi_dict__[__file_format_default__]['format_delimiter']
635
637
  __file_format_ver__ = __file_format_multi_dict__[__file_format_default__]['format_ver']
636
- __use_new_style__ = __file_format_multi_dict__[__file_format_default__]['new_style']
637
- __use_advanced_list__ = __file_format_multi_dict__[__file_format_default__]['use_advanced_list']
638
- __use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt_inode']
639
638
  __file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
640
639
  __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
641
640
  __project__ = __program_name__
642
641
  __program_alt_name__ = __program_name__
643
642
  __project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
644
643
  __project_release_url__ = __project_url__+"/releases/latest"
645
- __version_info__ = (0, 25, 0, "RC 1", 1)
646
- __version_date_info__ = (2025, 11, 5, "RC 1", 1)
644
+ __version_info__ = (0, 26, 0, "RC 1", 1)
645
+ __version_date_info__ = (2025, 11, 12, "RC 1", 1)
647
646
  __version_date__ = str(__version_date_info__[0]) + "." + str(
648
647
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
649
648
  __revision__ = __version_info__[3]
650
- __revision_id__ = "$Id: f83f2300169beab750d0c2947fc497e2a73a3e91 $"
649
+ __revision_id__ = "$Id: 5871acc6c09340f9594b10109029aa90a9e6c6aa $"
651
650
  if(__version_info__[4] is not None):
652
651
  __version_date_plusrc__ = __version_date__ + \
653
652
  "-" + str(__version_date_info__[4])
@@ -659,6 +658,9 @@ if(__version_info__[3] is not None):
659
658
  if(__version_info__[3] is None):
660
659
  __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
661
660
 
661
+ _logger = logging.getLogger(__project__) # library-style logger
662
+ _logger.addHandler(logging.NullHandler()) # don't emit logs unless app configures logging
663
+
662
664
  # From: https://stackoverflow.com/a/28568003
663
665
  # By Phaxmohdem
664
666
 
@@ -1028,6 +1030,20 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20, **
1028
1030
  VerbosePrintOut(dbgtxt, outtype, dbgenable, dgblevel, **kwargs)
1029
1031
  return dbgtxt
1030
1032
 
1033
+ def to_ns(timestamp):
1034
+ """
1035
+ Convert a second-resolution timestamp (int or float)
1036
+ into a nanosecond timestamp (int) by zero-padding.
1037
+ Works in Python 2 and Python 3.
1038
+ """
1039
+ try:
1040
+ # Convert incoming timestamp to float so it works for int or float
1041
+ seconds = float(timestamp)
1042
+ except (TypeError, ValueError):
1043
+ raise ValueError("Timestamp must be int or float")
1044
+
1045
+ # Multiply by 1e9 to get nanoseconds, then cast to int
1046
+ return int(seconds * 1000000000)
1031
1047
 
1032
1048
  def _split_posix(name):
1033
1049
  """
@@ -2051,34 +2067,53 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
2051
2067
 
2052
2068
 
2053
2069
  def MkTempFile(data=None,
2054
- inmem=__use_inmemfile__,
2070
+ inmem=__use_inmem__, usememfd=__use_memfd__,
2055
2071
  isbytes=True,
2056
- prefix="",
2072
+ prefix=__program_name__,
2057
2073
  delete=True,
2058
2074
  encoding="utf-8",
2059
- newline=None, # text mode only; in-memory objects ignore newline semantics
2075
+ newline=None,
2076
+ text_errors="strict",
2060
2077
  dir=None,
2061
2078
  suffix="",
2062
2079
  use_spool=__use_spoolfile__,
2080
+ autoswitch_spool=False,
2063
2081
  spool_max=__spoolfile_size__,
2064
- spool_dir=__use_spooldir__):
2082
+ spool_dir=__use_spooldir__,
2083
+ reset_to_start=True,
2084
+ memfd_name=None,
2085
+ memfd_allow_sealing=False,
2086
+ memfd_flags_extra=0,
2087
+ on_create=None):
2065
2088
  """
2066
2089
  Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
2067
2090
 
2068
2091
  Storage:
2069
- - inmem=True -> BytesIO (bytes) or StringIO (text)
2070
- - inmem=False, use_spool=True -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2071
- - inmem=False, use_spool=False -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2092
+ - inmem=True, usememfd=True, isbytes=True and memfd available
2093
+ -> memfd-backed anonymous file (binary)
2094
+ - inmem=True, otherwise
2095
+ -> BytesIO (bytes) or StringIO (text)
2096
+ - inmem=False, use_spool=True
2097
+ -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2098
+ - inmem=False, use_spool=False
2099
+ -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2072
2100
 
2073
2101
  Text vs bytes:
2074
2102
  - isbytes=True -> file expects bytes; 'data' must be bytes-like
2075
- - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and encoding
2076
- apply only for spooled/named files (not BytesIO/StringIO).
2103
+ - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
2104
+ encoding apply only for spooled/named files (not BytesIO/StringIO).
2077
2105
 
2078
2106
  Notes:
2079
- - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by other processes.
2080
- Use delete=False if you need to pass the path elsewhere.
2081
- - For text: in-memory StringIO ignores 'newline' (as usual).
2107
+ - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
2108
+ other processes. Use delete=False if you need to pass the path elsewhere.
2109
+ - For text: in-memory StringIO ignores 'newline' and 'text_errors' (as usual).
2110
+ - When available, and if usememfd=True, memfd is used only for inmem=True and isbytes=True,
2111
+ providing an anonymous in-memory file descriptor (Linux-only). Text in-memory still uses
2112
+ StringIO to preserve newline semantics.
2113
+ - If autoswitch_spool=True and initial data size exceeds spool_max, in-memory storage is
2114
+ skipped and a spooled file is used instead (if use_spool=True).
2115
+ - If on_create is not None, it is called as on_create(fp, kind) where kind is one of:
2116
+ "memfd", "bytesio", "stringio", "spool", "disk".
2082
2117
  """
2083
2118
 
2084
2119
  # -- sanitize simple params (avoid None surprises) --
@@ -2110,23 +2145,65 @@ def MkTempFile(data=None,
2110
2145
  else:
2111
2146
  init = None
2112
2147
 
2148
+ # Size of init for autoswitch; only meaningful for bytes
2149
+ init_len = len(init) if (init is not None and isbytes) else None
2150
+
2113
2151
  # -------- In-memory --------
2114
2152
  if inmem:
2115
- if isbytes:
2116
- f = io.BytesIO(init if init is not None else b"")
2117
- else:
2118
- # newline not enforced for StringIO; matches stdlib semantics
2119
- f = io.StringIO(init if init is not None else "")
2120
- # already positioned at 0 with provided init; ensure rewind for symmetry
2121
- f.seek(0)
2122
- return f
2153
+ # If autoswitch is enabled and data is larger than spool_max, and
2154
+ # spooling is allowed, skip the in-memory branch and fall through
2155
+ # to the spool/disk logic below.
2156
+ if autoswitch_spool and use_spool and init_len is not None and init_len > spool_max:
2157
+ pass # fall through to spool/disk sections
2158
+ else:
2159
+ # Use memfd only for bytes, and only where available (Linux, Python 3.8+)
2160
+ if usememfd and isbytes and hasattr(os, "memfd_create"):
2161
+ name = memfd_name or prefix or "MkTempFile"
2162
+ flags = 0
2163
+ # Close-on-exec is almost always what you want for temps
2164
+ if hasattr(os, "MFD_CLOEXEC"):
2165
+ flags |= os.MFD_CLOEXEC
2166
+ # Optional sealing support if requested and available
2167
+ if memfd_allow_sealing and hasattr(os, "MFD_ALLOW_SEALING"):
2168
+ flags |= os.MFD_ALLOW_SEALING
2169
+ # Extra custom flags (e.g. hugepage flags) if caller wants them
2170
+ if memfd_flags_extra:
2171
+ flags |= memfd_flags_extra
2172
+
2173
+ fd = os.memfd_create(name, flags)
2174
+ # Binary read/write file-like object backed by RAM
2175
+ f = os.fdopen(fd, "w+b")
2176
+
2177
+ if init is not None:
2178
+ f.write(init)
2179
+ if reset_to_start:
2180
+ f.seek(0)
2181
+
2182
+ if on_create is not None:
2183
+ on_create(f, "memfd")
2184
+ return f
2185
+
2186
+ # Fallback: pure Python in-memory objects
2187
+ if isbytes:
2188
+ f = io.BytesIO(init if init is not None else b"")
2189
+ kind = "bytesio"
2190
+ else:
2191
+ # newline/text_errors not enforced for StringIO; matches stdlib semantics
2192
+ f = io.StringIO(init if init is not None else "")
2193
+ kind = "stringio"
2194
+
2195
+ if reset_to_start:
2196
+ f.seek(0)
2197
+
2198
+ if on_create is not None:
2199
+ on_create(f, kind)
2200
+ return f
2123
2201
 
2124
2202
  # Helper: wrap a binary file into a text file with encoding/newline
2125
2203
  def _wrap_text(handle):
2126
2204
  # For both Py2 & Py3, TextIOWrapper gives consistent newline/encoding behavior
2127
- tw = io.TextIOWrapper(handle, encoding=encoding, newline=newline)
2128
- # Position at start; if we wrote initial data below, we will rewind after writing
2129
- return tw
2205
+ return io.TextIOWrapper(handle, encoding=encoding,
2206
+ newline=newline, errors=text_errors)
2130
2207
 
2131
2208
  # -------- Spooled (RAM then disk) --------
2132
2209
  if use_spool:
@@ -2134,19 +2211,33 @@ def MkTempFile(data=None,
2134
2211
  bin_mode = "w+b" # read/write, binary
2135
2212
  b = tempfile.SpooledTemporaryFile(max_size=spool_max, mode=bin_mode, dir=spool_dir)
2136
2213
  f = b if isbytes else _wrap_text(b)
2214
+
2137
2215
  if init is not None:
2138
2216
  f.write(init)
2217
+ if reset_to_start:
2218
+ f.seek(0)
2219
+ elif reset_to_start:
2139
2220
  f.seek(0)
2221
+
2222
+ if on_create is not None:
2223
+ on_create(f, "spool")
2140
2224
  return f
2141
2225
 
2142
2226
  # -------- On-disk temp (NamedTemporaryFile) --------
2143
2227
  # Always create binary file; wrap for text if needed for uniform Py2/3 behavior
2144
- b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix, dir=dir, delete=delete)
2228
+ b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
2229
+ dir=dir, delete=delete)
2145
2230
  f = b if isbytes else _wrap_text(b)
2146
2231
 
2147
2232
  if init is not None:
2148
2233
  f.write(init)
2234
+ if reset_to_start:
2235
+ f.seek(0)
2236
+ elif reset_to_start:
2149
2237
  f.seek(0)
2238
+
2239
+ if on_create is not None:
2240
+ on_create(f, "disk")
2150
2241
  return f
2151
2242
 
2152
2243
 
@@ -3666,7 +3757,7 @@ def _bytes_to_int(b):
3666
3757
  # =========================
3667
3758
  # Public checksum API
3668
3759
  # =========================
3669
- def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
3760
+ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3670
3761
  """
3671
3762
  Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
3672
3763
  or a single field) and compute the requested checksum. Returns lowercase hex.
@@ -3678,15 +3769,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatsp
3678
3769
  if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
3679
3770
  hdr_bytes = _to_bytes(hdr_bytes)
3680
3771
  hdr_bytes = bytes(hdr_bytes)
3681
-
3772
+ saltkeyval = None
3773
+ if(hasattr(saltkey, "read")):
3774
+ saltkeyval = skfp.read()
3775
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
3776
+ saltkeyval = saltkeyval.encode("UTF-8")
3777
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
3778
+ saltkeyval = saltkey
3779
+ elif(saltkey is not None and os.path.exists(saltkey)):
3780
+ with open(saltkey, "rb") as skfp:
3781
+ saltkeyval = skfp.read()
3782
+ else:
3783
+ saltkey = None
3784
+ if(saltkeyval is None):
3785
+ saltkey = None
3682
3786
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3683
- h = hashlib.new(algo_key)
3684
- h.update(hdr_bytes)
3685
- return h.hexdigest().lower()
3787
+ if(saltkey is None or saltkeyval is None):
3788
+ h = hashlib.new(algo_key, hdr_bytes)
3789
+ else:
3790
+ h = hmac.new(saltkeyval, hdr_bytes, digestmod=algo_key)
3791
+ return h.hexdigest().lower()
3686
3792
 
3687
3793
  return "0"
3688
3794
 
3689
- def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
3795
+ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3690
3796
  """
3691
3797
  Accepts bytes/str/file-like.
3692
3798
  - Hashlib algos: streamed in 1 MiB chunks.
@@ -3694,13 +3800,29 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3694
3800
  - Falls back to one-shot for non-file-like inputs.
3695
3801
  """
3696
3802
  algo_key = (checksumtype or "md5").lower()
3697
-
3803
+ saltkeyval = None
3804
+ if(hasattr(saltkey, "read")):
3805
+ saltkeyval = skfp.read()
3806
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
3807
+ saltkeyval = saltkeyval.encode("UTF-8")
3808
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
3809
+ saltkeyval = saltkey
3810
+ elif(saltkey is not None and os.path.exists(saltkey)):
3811
+ with open(saltkey, "rb") as skfp:
3812
+ saltkeyval = skfp.read()
3813
+ else:
3814
+ saltkey = None
3815
+ if(saltkeyval is None):
3816
+ saltkey = None
3698
3817
  # file-like streaming
3699
3818
  if hasattr(inbytes, "read"):
3700
3819
  # hashlib
3701
3820
 
3702
3821
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3703
- h = hashlib.new(algo_key)
3822
+ if(saltkey is None or saltkeyval is None):
3823
+ h = hashlib.new(algo_key)
3824
+ else:
3825
+ h = hmac.new(saltkeyval, digestmod=algo_key)
3704
3826
  while True:
3705
3827
  chunk = inbytes.read(__filebuff_size__)
3706
3828
  if not chunk:
@@ -3721,26 +3843,41 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3721
3843
  # one-shot
3722
3844
 
3723
3845
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3724
- h = hashlib.new(algo_key)
3725
- h.update(data)
3846
+ if(saltkey is None or saltkeyval is None):
3847
+ h = hashlib.new(algo_key, data)
3848
+ else:
3849
+ h = hmac.new(saltkeyval, data, digestmod=algo_key)
3726
3850
  return h.hexdigest().lower()
3727
3851
 
3728
3852
  return "0"
3729
3853
 
3730
- def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3731
- calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
3854
+ def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
3855
+ calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs, saltkey)
3732
3856
  want = (inchecksum or "0").strip().lower()
3733
3857
  if want.startswith("0x"):
3734
3858
  want = want[2:]
3735
- return hmac.compare_digest(want, calc)
3859
+ return CheckChecksums(want, calc)
3736
3860
 
3737
- def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3738
- calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
3861
+ def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
3862
+ calc = GetFileChecksum(infile, checksumtype, True, formatspecs, saltkey)
3739
3863
  want = (inchecksum or "0").strip().lower()
3740
3864
  if want.startswith("0x"):
3741
3865
  want = want[2:]
3742
- return hmac.compare_digest(want, calc)
3866
+ return CheckChecksums(want, calc)
3867
+
3868
+ def CheckChecksums(inchecksum, outchecksum):
3869
+ # Normalize as text first
3870
+ calc = (inchecksum or "0").strip().lower()
3871
+ want = (outchecksum or "0").strip().lower()
3872
+
3873
+ if want.startswith("0x"):
3874
+ want = want[2:]
3743
3875
 
3876
+ # Now force both to bytes
3877
+ calc_b = _to_bytes(calc) # defaults to utf-8, strict
3878
+ want_b = _to_bytes(want)
3879
+
3880
+ return hmac.compare_digest(want_b, calc_b)
3744
3881
 
3745
3882
  def MajorMinorToDev(major, minor):
3746
3883
  """
@@ -4109,11 +4246,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
4109
4246
  return first_two + headerdata
4110
4247
 
4111
4248
 
4112
- def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4249
+ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4113
4250
  if(not hasattr(fp, "read")):
4114
4251
  return False
4115
4252
  delimiter = formatspecs['format_delimiter']
4116
- if(formatspecs['new_style']):
4253
+ if(__use_new_style__):
4117
4254
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4118
4255
  else:
4119
4256
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4201,15 +4338,14 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4201
4338
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4202
4339
  pass
4203
4340
  fp.seek(len(delimiter), 1)
4204
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4205
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4341
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4342
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4206
4343
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4207
4344
  fname + " at offset " + str(fheaderstart))
4208
4345
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4209
4346
  return False
4210
4347
  fp.seek(len(delimiter), 1)
4211
- newfcs = GetHeaderChecksum(
4212
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4348
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4213
4349
  HeaderOut.append(fjsoncontent)
4214
4350
  if(fcs != newfcs and not skipchecksum):
4215
4351
  VerbosePrintOut("File Header Checksum Error with file " +
@@ -4228,10 +4364,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4228
4364
  else:
4229
4365
  fp.seek(fcsize, 1)
4230
4366
  fcontents.seek(0, 0)
4231
- newfccs = GetFileChecksum(
4232
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4367
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4233
4368
  fcontents.seek(0, 0)
4234
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4369
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4235
4370
  VerbosePrintOut("File Content Checksum Error with file " +
4236
4371
  fname + " at offset " + str(fcontentstart))
4237
4372
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4268,12 +4403,12 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4268
4403
  return HeaderOut
4269
4404
 
4270
4405
 
4271
- def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4406
+ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4272
4407
  if(not hasattr(fp, "read")):
4273
4408
  return False
4274
4409
  delimiter = formatspecs['format_delimiter']
4275
4410
  fheaderstart = fp.tell()
4276
- if(formatspecs['new_style']):
4411
+ if(__use_new_style__):
4277
4412
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4278
4413
  else:
4279
4414
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4291,40 +4426,51 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4291
4426
  fbasedir = os.path.dirname(fname)
4292
4427
  flinkname = HeaderOut[6]
4293
4428
  fsize = int(HeaderOut[7], 16)
4294
- fatime = int(HeaderOut[8], 16)
4295
- fmtime = int(HeaderOut[9], 16)
4296
- fctime = int(HeaderOut[10], 16)
4297
- fbtime = int(HeaderOut[11], 16)
4298
- fmode = int(HeaderOut[12], 16)
4429
+ fblksize = int(HeaderOut[8], 16)
4430
+ fblocks = int(HeaderOut[9], 16)
4431
+ fflags = int(HeaderOut[10], 16)
4432
+ fatime = int(HeaderOut[11], 16)
4433
+ fmtime = int(HeaderOut[12], 16)
4434
+ fctime = int(HeaderOut[13], 16)
4435
+ fbtime = int(HeaderOut[14], 16)
4436
+ fmode = int(HeaderOut[15], 16)
4299
4437
  fchmode = stat.S_IMODE(fmode)
4300
4438
  ftypemod = stat.S_IFMT(fmode)
4301
- fwinattributes = int(HeaderOut[13], 16)
4302
- fcompression = HeaderOut[14]
4303
- fcsize = int(HeaderOut[15], 16)
4304
- fuid = int(HeaderOut[16], 16)
4305
- funame = HeaderOut[17]
4306
- fgid = int(HeaderOut[18], 16)
4307
- fgname = HeaderOut[19]
4308
- fid = int(HeaderOut[20], 16)
4309
- finode = int(HeaderOut[21], 16)
4310
- flinkcount = int(HeaderOut[22], 16)
4311
- fdev = int(HeaderOut[23], 16)
4312
- fdev_minor = int(HeaderOut[24], 16)
4313
- fdev_major = int(HeaderOut[25], 16)
4314
- fseeknextfile = HeaderOut[26]
4315
- fjsontype = HeaderOut[27]
4316
- fjsonlen = int(HeaderOut[28], 16)
4317
- fjsonsize = int(HeaderOut[29], 16)
4318
- fjsonchecksumtype = HeaderOut[30]
4319
- fjsonchecksum = HeaderOut[31]
4320
- fextrasize = int(HeaderOut[32], 16)
4321
- fextrafields = int(HeaderOut[33], 16)
4439
+ fwinattributes = int(HeaderOut[16], 16)
4440
+ fcompression = HeaderOut[17]
4441
+ fcsize = int(HeaderOut[18], 16)
4442
+ fuid = int(HeaderOut[19], 16)
4443
+ funame = HeaderOut[20]
4444
+ fgid = int(HeaderOut[21], 16)
4445
+ fgname = HeaderOut[22]
4446
+ fid = int(HeaderOut[23], 16)
4447
+ finode = int(HeaderOut[24], 16)
4448
+ flinkcount = int(HeaderOut[25], 16)
4449
+ fdev = int(HeaderOut[26], 16)
4450
+ frdev = int(HeaderOut[27], 16)
4451
+ fseeknextfile = HeaderOut[28]
4452
+ fjsontype = HeaderOut[29]
4453
+ fjsonlen = int(HeaderOut[30], 16)
4454
+ fjsonsize = int(HeaderOut[31], 16)
4455
+ fjsonchecksumtype = HeaderOut[32]
4456
+ fjsonchecksum = HeaderOut[33]
4457
+ fextrasize = int(HeaderOut[34], 16)
4458
+ fextrafields = int(HeaderOut[35], 16)
4322
4459
  fextrafieldslist = []
4323
- extrastart = 34
4460
+ extrastart = 36
4324
4461
  extraend = extrastart + fextrafields
4325
4462
  while(extrastart < extraend):
4326
4463
  fextrafieldslist.append(HeaderOut[extrastart])
4327
4464
  extrastart = extrastart + 1
4465
+ fvendorfieldslist = []
4466
+ fvendorfields = 0;
4467
+ if(len(HeaderOut)>extraend):
4468
+ extrastart = extraend
4469
+ extraend = len(HeaderOut) - 4
4470
+ while(extrastart < extraend):
4471
+ fvendorfieldslist.append(HeaderOut[extrastart])
4472
+ extrastart = extrastart + 1
4473
+ fvendorfields = fvendorfields + 1
4328
4474
  if(fextrafields==1):
4329
4475
  try:
4330
4476
  fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
@@ -4402,16 +4548,15 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4402
4548
  pass
4403
4549
  fp.seek(len(delimiter), 1)
4404
4550
  fjend = fp.tell() - 1
4405
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4406
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4551
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4552
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4407
4553
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4408
4554
  fname + " at offset " + str(fheaderstart))
4409
4555
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4410
4556
  return False
4411
4557
  fcs = HeaderOut[-2].lower()
4412
4558
  fccs = HeaderOut[-1].lower()
4413
- newfcs = GetHeaderChecksum(
4414
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4559
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4415
4560
  if(fcs != newfcs and not skipchecksum):
4416
4561
  VerbosePrintOut("File Header Checksum Error with file " +
4417
4562
  fname + " at offset " + str(fheaderstart))
@@ -4434,10 +4579,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4434
4579
  fp.seek(fcsize, 1)
4435
4580
  pyhascontents = False
4436
4581
  fcontents.seek(0, 0)
4437
- newfccs = GetFileChecksum(
4438
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4582
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4439
4583
  fcontents.seek(0, 0)
4440
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4584
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4441
4585
  VerbosePrintOut("File Content Checksum Error with file " +
4442
4586
  fname + " at offset " + str(fcontentstart))
4443
4587
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4454,8 +4598,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4454
4598
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4455
4599
  cfcontents.close()
4456
4600
  fcontents.seek(0, 0)
4457
- fccs = GetFileChecksum(
4458
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4601
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4459
4602
  fcontentend = fp.tell()
4460
4603
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4461
4604
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4477,17 +4620,17 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4477
4620
  fcontents.seek(0, 0)
4478
4621
  if(not contentasfile):
4479
4622
  fcontents = fcontents.read()
4480
- outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
4481
- 'fdev': fdev, 'fminor': fdev_minor, 'fmajor': fdev_major, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
4623
+ outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fblksize': fblksize, 'fblocks': fblocks, 'fflags': fflags, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
4624
+ 'fdev': fdev, 'frdev': frdev, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
4482
4625
  return outlist
4483
4626
 
4484
4627
 
4485
- def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4628
+ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4486
4629
  if(not hasattr(fp, "read")):
4487
4630
  return False
4488
4631
  delimiter = formatspecs['format_delimiter']
4489
4632
  fheaderstart = fp.tell()
4490
- if(formatspecs['new_style']):
4633
+ if(__use_new_style__):
4491
4634
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4492
4635
  else:
4493
4636
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4505,36 +4648,38 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4505
4648
  fbasedir = os.path.dirname(fname)
4506
4649
  flinkname = HeaderOut[6]
4507
4650
  fsize = int(HeaderOut[7], 16)
4508
- fatime = int(HeaderOut[8], 16)
4509
- fmtime = int(HeaderOut[9], 16)
4510
- fctime = int(HeaderOut[10], 16)
4511
- fbtime = int(HeaderOut[11], 16)
4512
- fmode = int(HeaderOut[12], 16)
4651
+ fblksize = int(HeaderOut[8], 16)
4652
+ fblocks = int(HeaderOut[9], 16)
4653
+ fflags = int(HeaderOut[10], 16)
4654
+ fatime = int(HeaderOut[11], 16)
4655
+ fmtime = int(HeaderOut[12], 16)
4656
+ fctime = int(HeaderOut[13], 16)
4657
+ fbtime = int(HeaderOut[14], 16)
4658
+ fmode = int(HeaderOut[15], 16)
4513
4659
  fchmode = stat.S_IMODE(fmode)
4514
4660
  ftypemod = stat.S_IFMT(fmode)
4515
- fwinattributes = int(HeaderOut[13], 16)
4516
- fcompression = HeaderOut[14]
4517
- fcsize = int(HeaderOut[15], 16)
4518
- fuid = int(HeaderOut[16], 16)
4519
- funame = HeaderOut[17]
4520
- fgid = int(HeaderOut[18], 16)
4521
- fgname = HeaderOut[19]
4522
- fid = int(HeaderOut[20], 16)
4523
- finode = int(HeaderOut[21], 16)
4524
- flinkcount = int(HeaderOut[22], 16)
4525
- fdev = int(HeaderOut[23], 16)
4526
- fdev_minor = int(HeaderOut[24], 16)
4527
- fdev_major = int(HeaderOut[25], 16)
4528
- fseeknextfile = HeaderOut[26]
4529
- fjsontype = HeaderOut[27]
4530
- fjsonlen = int(HeaderOut[28], 16)
4531
- fjsonsize = int(HeaderOut[29], 16)
4532
- fjsonchecksumtype = HeaderOut[30]
4533
- fjsonchecksum = HeaderOut[31]
4534
- fextrasize = int(HeaderOut[32], 16)
4535
- fextrafields = int(HeaderOut[33], 16)
4661
+ fwinattributes = int(HeaderOut[16], 16)
4662
+ fcompression = HeaderOut[17]
4663
+ fcsize = int(HeaderOut[18], 16)
4664
+ fuid = int(HeaderOut[19], 16)
4665
+ funame = HeaderOut[20]
4666
+ fgid = int(HeaderOut[21], 16)
4667
+ fgname = HeaderOut[22]
4668
+ fid = int(HeaderOut[23], 16)
4669
+ finode = int(HeaderOut[24], 16)
4670
+ flinkcount = int(HeaderOut[25], 16)
4671
+ fdev = int(HeaderOut[26], 16)
4672
+ frdev = int(HeaderOut[27], 16)
4673
+ fseeknextfile = HeaderOut[28]
4674
+ fjsontype = HeaderOut[29]
4675
+ fjsonlen = int(HeaderOut[30], 16)
4676
+ fjsonsize = int(HeaderOut[31], 16)
4677
+ fjsonchecksumtype = HeaderOut[32]
4678
+ fjsonchecksum = HeaderOut[33]
4679
+ fextrasize = int(HeaderOut[34], 16)
4680
+ fextrafields = int(HeaderOut[35], 16)
4536
4681
  fextrafieldslist = []
4537
- extrastart = 34
4682
+ extrastart = 36
4538
4683
  extraend = extrastart + fextrafields
4539
4684
  while(extrastart < extraend):
4540
4685
  fextrafieldslist.append(HeaderOut[extrastart])
@@ -4614,16 +4759,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4614
4759
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4615
4760
  pass
4616
4761
  fp.seek(len(delimiter), 1)
4617
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4618
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4762
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4763
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4619
4764
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4620
4765
  fname + " at offset " + str(fheaderstart))
4621
4766
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4622
4767
  return False
4623
4768
  fcs = HeaderOut[-2].lower()
4624
4769
  fccs = HeaderOut[-1].lower()
4625
- newfcs = GetHeaderChecksum(
4626
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4770
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4627
4771
  if(fcs != newfcs and not skipchecksum):
4628
4772
  VerbosePrintOut("File Header Checksum Error with file " +
4629
4773
  fname + " at offset " + str(fheaderstart))
@@ -4646,9 +4790,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4646
4790
  fp.seek(fcsize, 1)
4647
4791
  pyhascontents = False
4648
4792
  fcontents.seek(0, 0)
4649
- newfccs = GetFileChecksum(
4650
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4651
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4793
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4794
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4652
4795
  VerbosePrintOut("File Content Checksum Error with file " +
4653
4796
  fname + " at offset " + str(fcontentstart))
4654
4797
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4665,8 +4808,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4665
4808
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4666
4809
  cfcontents.close()
4667
4810
  fcontents.seek(0, 0)
4668
- fccs = GetFileChecksum(
4669
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4811
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4670
4812
  fcontentend = fp.tell()
4671
4813
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4672
4814
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4688,12 +4830,12 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4688
4830
  fcontents.seek(0, 0)
4689
4831
  if(not contentasfile):
4690
4832
  fcontents = fcontents.read()
4691
- outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4692
- finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
4833
+ outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4834
+ finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
4693
4835
  return outlist
4694
4836
 
4695
4837
 
4696
- def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4838
+ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4697
4839
  if(not hasattr(fp, "read")):
4698
4840
  return False
4699
4841
  delimiter = formatspecs['format_delimiter']
@@ -4713,7 +4855,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4713
4855
  return False
4714
4856
  if(formdel != formatspecs['format_delimiter']):
4715
4857
  return False
4716
- if(formatspecs['new_style']):
4858
+ if(__use_new_style__):
4717
4859
  inheader = ReadFileHeaderDataBySize(
4718
4860
  fp, formatspecs['format_delimiter'])
4719
4861
  else:
@@ -4721,20 +4863,42 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4721
4863
  fp, formatspecs['format_delimiter'])
4722
4864
  fprechecksumtype = inheader[-2]
4723
4865
  fprechecksum = inheader[-1]
4724
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4725
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
4866
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
4867
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4726
4868
  if(not headercheck and not skipchecksum):
4727
4869
  VerbosePrintOut(
4728
4870
  "File Header Checksum Error with file at offset " + str(0))
4729
4871
  VerbosePrintOut("'" + fprechecksum + "' != " +
4730
4872
  "'" + newfcs + "'")
4731
4873
  return False
4732
- fnumfiles = int(inheader[4], 16)
4874
+ fnumfiles = int(inheader[8], 16)
4875
+ outfseeknextfile = inheaderdata[9]
4876
+ fjsonsize = int(inheaderdata[12], 16)
4877
+ fjsonchecksumtype = inheader[13]
4878
+ fjsonchecksum = inheader[14]
4879
+ fp.read(fjsonsize)
4880
+ # Next seek directive
4881
+ if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
4882
+ fseeknextasnum = int(outfseeknextfile.replace("+", ""))
4883
+ if(abs(fseeknextasnum) == 0):
4884
+ pass
4885
+ fp.seek(fseeknextasnum, 1)
4886
+ elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
4887
+ fseeknextasnum = int(outfseeknextfile)
4888
+ if(abs(fseeknextasnum) == 0):
4889
+ pass
4890
+ fp.seek(fseeknextasnum, 1)
4891
+ elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
4892
+ fseeknextasnum = int(outfseeknextfile)
4893
+ if(abs(fseeknextasnum) == 0):
4894
+ pass
4895
+ fp.seek(fseeknextasnum, 0)
4896
+ else:
4897
+ return False
4733
4898
  countnum = 0
4734
4899
  flist = []
4735
4900
  while(countnum < fnumfiles):
4736
- HeaderOut = ReadFileHeaderDataWithContent(
4737
- fp, listonly, uncompress, skipchecksum, formatspecs)
4901
+ HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
4738
4902
  if(len(HeaderOut) == 0):
4739
4903
  break
4740
4904
  flist.append(HeaderOut)
@@ -4742,7 +4906,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4742
4906
  return flist
4743
4907
 
4744
4908
 
4745
- def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
4909
+ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
4746
4910
  if(not hasattr(fp, "read")):
4747
4911
  return False
4748
4912
  delimiter = formatspecs['format_delimiter']
@@ -4762,16 +4926,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4762
4926
  return False
4763
4927
  if(formdel != formatspecs['format_delimiter']):
4764
4928
  return False
4765
- if(formatspecs['new_style']):
4929
+ if(__use_new_style__):
4766
4930
  inheader = ReadFileHeaderDataBySize(
4767
4931
  fp, formatspecs['format_delimiter'])
4768
4932
  else:
4769
4933
  inheader = ReadFileHeaderDataWoSize(
4770
4934
  fp, formatspecs['format_delimiter'])
4771
- fnumextrafieldsize = int(inheader[6], 16)
4772
- fnumextrafields = int(inheader[7], 16)
4935
+ fnumextrafieldsize = int(inheader[15], 16)
4936
+ fnumextrafields = int(inheader[16], 16)
4773
4937
  fextrafieldslist = []
4774
- extrastart = 8
4938
+ extrastart = 17
4775
4939
  extraend = extrastart + fnumextrafields
4776
4940
  while(extrastart < extraend):
4777
4941
  fextrafieldslist.append(inheader[extrastart])
@@ -4785,17 +4949,126 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4785
4949
  fextrafieldslist = json.loads(fextrafieldslist[0])
4786
4950
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4787
4951
  pass
4952
+ fvendorfieldslist = []
4953
+ fvendorfields = 0;
4954
+ if(len(inheader)>extraend):
4955
+ extrastart = extraend
4956
+ extraend = len(inheader) - 2
4957
+ while(extrastart < extraend):
4958
+ fvendorfieldslist.append(HeaderOut[extrastart])
4959
+ extrastart = extrastart + 1
4960
+ fvendorfields = fvendorfields + 1
4788
4961
  formversion = re.findall("([\\d]+)", formstring)
4789
4962
  fheadsize = int(inheader[0], 16)
4790
4963
  fnumfields = int(inheader[1], 16)
4791
- fhencoding = inheader[2]
4792
- fostype = inheader[3]
4793
- fpythontype = inheader[4]
4794
- fnumfiles = int(inheader[5], 16)
4964
+ fheadctime = int(inheader[1], 16)
4965
+ fheadmtime = int(inheader[1], 16)
4966
+ fhencoding = inheader[4]
4967
+ fostype = inheader[5]
4968
+ fpythontype = inheader[6]
4969
+ fprojectname = inheader[7]
4970
+ fnumfiles = int(inheader[8], 16)
4971
+ fseeknextfile = inheader[9]
4972
+ fjsontype = inheader[10]
4973
+ fjsonlen = int(inheader[11], 16)
4974
+ fjsonsize = int(inheader[12], 16)
4975
+ fjsonchecksumtype = inheader[13]
4976
+ fjsonchecksum = inheader[14]
4977
+ fjsoncontent = {}
4978
+ fjstart = fp.tell()
4979
+ if(fjsontype=="json"):
4980
+ fjsoncontent = {}
4981
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4982
+ if(fjsonsize > 0):
4983
+ try:
4984
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4985
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4986
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4987
+ try:
4988
+ fjsonrawcontent = fprejsoncontent
4989
+ fjsoncontent = json.loads(fprejsoncontent)
4990
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4991
+ fprejsoncontent = ""
4992
+ fjsonrawcontent = fprejsoncontent
4993
+ fjsoncontent = {}
4994
+ else:
4995
+ fprejsoncontent = ""
4996
+ fjsonrawcontent = fprejsoncontent
4997
+ fjsoncontent = {}
4998
+ elif(testyaml and fjsontype == "yaml"):
4999
+ fjsoncontent = {}
5000
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5001
+ if (fjsonsize > 0):
5002
+ try:
5003
+ # try base64 → utf-8 → YAML
5004
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5005
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5006
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
5007
+ try:
5008
+ # fall back to treating the bytes as plain text YAML
5009
+ fjsonrawcontent = fprejsoncontent
5010
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5011
+ except (UnicodeDecodeError, yaml.YAMLError):
5012
+ # final fallback: empty
5013
+ fprejsoncontent = ""
5014
+ fjsonrawcontent = fprejsoncontent
5015
+ fjsoncontent = {}
5016
+ else:
5017
+ fprejsoncontent = ""
5018
+ fjsonrawcontent = fprejsoncontent
5019
+ fjsoncontent = {}
5020
+ elif(not testyaml and fjsontype == "yaml"):
5021
+ fjsoncontent = {}
5022
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5023
+ fprejsoncontent = ""
5024
+ fjsonrawcontent = fprejsoncontent
5025
+ elif(fjsontype=="list"):
5026
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5027
+ flisttmp = MkTempFile()
5028
+ flisttmp.write(fprejsoncontent.encode())
5029
+ flisttmp.seek(0)
5030
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
5031
+ flisttmp.close()
5032
+ fjsonrawcontent = fjsoncontent
5033
+ if(fjsonlen==1):
5034
+ try:
5035
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
5036
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
5037
+ fjsonlen = len(fjsoncontent)
5038
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5039
+ try:
5040
+ fjsonrawcontent = fjsoncontent[0]
5041
+ fjsoncontent = json.loads(fjsoncontent[0])
5042
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5043
+ pass
5044
+ fjend = fp.tell()
5045
+ if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5046
+ fseeknextasnum = int(fseeknextfile.replace("+", ""))
5047
+ if(abs(fseeknextasnum) == 0):
5048
+ pass
5049
+ fp.seek(fseeknextasnum, 1)
5050
+ elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
5051
+ fseeknextasnum = int(fseeknextfile)
5052
+ if(abs(fseeknextasnum) == 0):
5053
+ pass
5054
+ fp.seek(fseeknextasnum, 1)
5055
+ elif(re.findall("^([0-9]+)", fseeknextfile)):
5056
+ fseeknextasnum = int(fseeknextfile)
5057
+ if(abs(fseeknextasnum) == 0):
5058
+ pass
5059
+ fp.seek(fseeknextasnum, 0)
5060
+ else:
5061
+ return False
5062
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5063
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
5064
+ VerbosePrintOut("File JSON Data Checksum Error with file " +
5065
+ fname + " at offset " + str(fheaderstart))
5066
+ VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
5067
+ return False
4795
5068
  fprechecksumtype = inheader[-2]
4796
5069
  fprechecksum = inheader[-1]
4797
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4798
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5070
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5071
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4799
5072
  if(not headercheck and not skipchecksum):
4800
5073
  VerbosePrintOut(
4801
5074
  "File Header Checksum Error with file at offset " + str(0))
@@ -4804,7 +5077,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4804
5077
  return False
4805
5078
  formversions = re.search('(.*?)(\\d+)', formstring).groups()
4806
5079
  fcompresstype = ""
4807
- outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
5080
+ outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fmtime': fheadmtime, 'fctime': fheadctime, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'ffilelist': []}
4808
5081
  if (seekstart < 0) or (seekstart > fnumfiles):
4809
5082
  seekstart = 0
4810
5083
  if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
@@ -4831,16 +5104,15 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4831
5104
  prefjsonchecksum = preheaderdata[31]
4832
5105
  prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
4833
5106
  fp.seek(len(delimiter), 1)
4834
- prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
4835
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5107
+ prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5108
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
4836
5109
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4837
5110
  prefname + " at offset " + str(prefhstart))
4838
5111
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
4839
5112
  return False
4840
- prenewfcs = GetHeaderChecksum(
4841
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5113
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
4842
5114
  prefcs = preheaderdata[-2]
4843
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5115
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
4844
5116
  VerbosePrintOut("File Header Checksum Error with file " +
4845
5117
  prefname + " at offset " + str(prefhstart))
4846
5118
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -4855,11 +5127,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4855
5127
  if(prefsize > 0):
4856
5128
  prefcontents.write(fp.read(prefsize))
4857
5129
  prefcontents.seek(0, 0)
4858
- prenewfccs = GetFileChecksum(
4859
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5130
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
4860
5131
  prefccs = preheaderdata[-1]
4861
5132
  pyhascontents = True
4862
- if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5133
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
4863
5134
  VerbosePrintOut("File Content Checksum Error with file " +
4864
5135
  prefname + " at offset " + str(prefcontentstart))
4865
5136
  VerbosePrintOut("'" + prefccs +
@@ -4886,8 +5157,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4886
5157
  realidnum = 0
4887
5158
  countnum = seekstart
4888
5159
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
4889
- HeaderOut = ReadFileHeaderDataWithContentToArray(
4890
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5160
+ HeaderOut = ReadFileHeaderDataWithContentToArray(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
4891
5161
  if(len(HeaderOut) == 0):
4892
5162
  break
4893
5163
  HeaderOut.update({'fid': realidnum, 'fidalt': realidnum})
@@ -4898,7 +5168,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4898
5168
  return outlist
4899
5169
 
4900
5170
 
4901
- def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
5171
+ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
4902
5172
  if(not hasattr(fp, "read")):
4903
5173
  return False
4904
5174
  delimiter = formatspecs['format_delimiter']
@@ -4918,16 +5188,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4918
5188
  return False
4919
5189
  if(formdel != formatspecs['format_delimiter']):
4920
5190
  return False
4921
- if(formatspecs['new_style']):
5191
+ if(__use_new_style__):
4922
5192
  inheader = ReadFileHeaderDataBySize(
4923
5193
  fp, formatspecs['format_delimiter'])
4924
5194
  else:
4925
5195
  inheader = ReadFileHeaderDataWoSize(
4926
5196
  fp, formatspecs['format_delimiter'])
4927
- fnumextrafieldsize = int(inheader[6], 16)
4928
- fnumextrafields = int(inheader[7], 16)
5197
+ fnumextrafieldsize = int(inheader[15], 16)
5198
+ fnumextrafields = int(inheader[16], 16)
4929
5199
  fextrafieldslist = []
4930
- extrastart = 8
5200
+ extrastart = 17
4931
5201
  extraend = extrastart + fnumextrafields
4932
5202
  while(extrastart < extraend):
4933
5203
  fextrafieldslist.append(inheader[extrastart])
@@ -4944,14 +5214,44 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4944
5214
  formversion = re.findall("([\\d]+)", formstring)
4945
5215
  fheadsize = int(inheader[0], 16)
4946
5216
  fnumfields = int(inheader[1], 16)
4947
- fhencoding = inheader[2]
4948
- fostype = inheader[3]
4949
- fpythontype = inheader[4]
4950
- fnumfiles = int(inheader[5], 16)
5217
+ fnumfiles = int(inheader[8], 16)
5218
+ fseeknextfile = inheaderdata[9]
5219
+ fjsontype = int(inheader[10], 16)
5220
+ fjsonlen = int(inheader[11], 16)
5221
+ fjsonsize = int(inheader[12], 16)
5222
+ fjsonchecksumtype = inheader[13]
5223
+ fjsonchecksum = inheader[14]
5224
+ fjsoncontent = {}
5225
+ fjstart = fp.tell()
5226
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5227
+ fjend = fp.tell()
5228
+ if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5229
+ fseeknextasnum = int(fseeknextfile.replace("+", ""))
5230
+ if(abs(fseeknextasnum) == 0):
5231
+ pass
5232
+ fp.seek(fseeknextasnum, 1)
5233
+ elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
5234
+ fseeknextasnum = int(fseeknextfile)
5235
+ if(abs(fseeknextasnum) == 0):
5236
+ pass
5237
+ fp.seek(fseeknextasnum, 1)
5238
+ elif(re.findall("^([0-9]+)", fseeknextfile)):
5239
+ fseeknextasnum = int(fseeknextfile)
5240
+ if(abs(fseeknextasnum) == 0):
5241
+ pass
5242
+ fp.seek(fseeknextasnum, 0)
5243
+ else:
5244
+ return False
5245
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5246
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
5247
+ VerbosePrintOut("File JSON Data Checksum Error with file " +
5248
+ fname + " at offset " + str(fheaderstart))
5249
+ VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
5250
+ return False
4951
5251
  fprechecksumtype = inheader[-2]
4952
5252
  fprechecksum = inheader[-1]
4953
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4954
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5253
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5254
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4955
5255
  if(not headercheck and not skipchecksum):
4956
5256
  VerbosePrintOut(
4957
5257
  "File Header Checksum Error with file at offset " + str(0))
@@ -4970,7 +5270,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4970
5270
  il = 0
4971
5271
  while(il < seekstart):
4972
5272
  prefhstart = fp.tell()
4973
- if(formatspecs['new_style']):
5273
+ if(__use_new_style__):
4974
5274
  preheaderdata = ReadFileHeaderDataBySize(
4975
5275
  fp, formatspecs['format_delimiter'])
4976
5276
  else:
@@ -4992,16 +5292,15 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4992
5292
  prefjsonchecksum = preheaderdata[31]
4993
5293
  prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
4994
5294
  fp.seek(len(delimiter), 1)
4995
- prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
4996
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5295
+ prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5296
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
4997
5297
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4998
5298
  prefname + " at offset " + str(prefhstart))
4999
5299
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
5000
5300
  return False
5001
- prenewfcs = GetHeaderChecksum(
5002
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5301
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
5003
5302
  prefcs = preheaderdata[-2]
5004
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5303
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
5005
5304
  VerbosePrintOut("File Header Checksum Error with file " +
5006
5305
  prefname + " at offset " + str(prefhstart))
5007
5306
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -5018,11 +5317,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5018
5317
  prefcontents = fp.read(prefsize)
5019
5318
  else:
5020
5319
  prefcontents = fp.read(prefcsize)
5021
- prenewfccs = GetFileChecksum(
5022
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5320
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
5023
5321
  prefccs = preheaderdata[-1]
5024
5322
  pyhascontents = True
5025
- if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5323
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
5026
5324
  VerbosePrintOut("File Content Checksum Error with file " +
5027
5325
  prefname + " at offset " + str(prefcontentstart))
5028
5326
  VerbosePrintOut("'" + prefccs +
@@ -5049,8 +5347,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5049
5347
  realidnum = 0
5050
5348
  countnum = seekstart
5051
5349
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
5052
- HeaderOut = ReadFileHeaderDataWithContentToList(
5053
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5350
+ HeaderOut = ReadFileHeaderDataWithContentToList(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
5054
5351
  if(len(HeaderOut) == 0):
5055
5352
  break
5056
5353
  outlist.append(HeaderOut)
@@ -5058,7 +5355,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5058
5355
  realidnum = realidnum + 1
5059
5356
  return outlist
5060
5357
 
5061
- def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5358
+ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5062
5359
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5063
5360
  fp = infile
5064
5361
  try:
@@ -5153,7 +5450,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5153
5450
  else:
5154
5451
  break
5155
5452
  readfp.seek(oldfppos, 0)
5156
- ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5453
+ ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5157
5454
  currentfilepos = readfp.tell()
5158
5455
  else:
5159
5456
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5175,27 +5472,27 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5175
5472
  else:
5176
5473
  break
5177
5474
  infp.seek(oldinfppos, 0)
5178
- ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5475
+ ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5179
5476
  currentinfilepos = infp.tell()
5180
5477
  currentfilepos = readfp.tell()
5181
5478
  return ArchiveList
5182
5479
 
5183
5480
 
5184
- def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5481
+ def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5185
5482
  if(isinstance(infile, (list, tuple, ))):
5186
5483
  pass
5187
5484
  else:
5188
5485
  infile = [infile]
5189
5486
  outretval = []
5190
5487
  for curfname in infile:
5191
- outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
5488
+ outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5192
5489
  return outretval
5193
5490
 
5194
- def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5195
- return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
5491
+ def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5492
+ return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5196
5493
 
5197
5494
 
5198
- def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5495
+ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5199
5496
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5200
5497
  fp = infile
5201
5498
  try:
@@ -5290,7 +5587,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5290
5587
  else:
5291
5588
  break
5292
5589
  readfp.seek(oldfppos, 0)
5293
- ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5590
+ ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5294
5591
  currentfilepos = readfp.tell()
5295
5592
  else:
5296
5593
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5312,24 +5609,24 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5312
5609
  else:
5313
5610
  break
5314
5611
  infp.seek(oldinfppos, 0)
5315
- ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5612
+ ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5316
5613
  currentinfilepos = infp.tell()
5317
5614
  currentfilepos = readfp.tell()
5318
5615
  return ArchiveList
5319
5616
 
5320
5617
 
5321
- def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5618
+ def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5322
5619
  if(isinstance(infile, (list, tuple, ))):
5323
5620
  pass
5324
5621
  else:
5325
5622
  infile = [infile]
5326
5623
  outretval = {}
5327
5624
  for curfname in infile:
5328
- outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
5625
+ outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5329
5626
  return outretval
5330
5627
 
5331
- def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5332
- return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
5628
+ def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5629
+ return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5333
5630
 
5334
5631
 
5335
5632
  def _field_to_bytes(x):
@@ -5383,12 +5680,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
5383
5680
  def _hex_lower(n):
5384
5681
  return format(int(n), 'x').lower()
5385
5682
 
5386
- def AppendFileHeader(fp,
5387
- numfiles,
5388
- fencoding,
5389
- extradata=None,
5390
- checksumtype="md5",
5391
- formatspecs=__file_format_dict__):
5683
+ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5392
5684
  """
5393
5685
  Build and write the archive file header.
5394
5686
  Returns the same file-like 'fp' on success, or False on failure.
@@ -5436,24 +5728,47 @@ def AppendFileHeader(fp,
5436
5728
  # 4) core header fields before checksum:
5437
5729
  # tmpoutlenhex, fencoding, platform.system(), fnumfiles
5438
5730
  fnumfiles_hex = _hex_lower(numfiles)
5439
-
5731
+ fjsontype = "json"
5732
+ if(len(jsondata) > 0):
5733
+ try:
5734
+ fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
5735
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5736
+ fjsoncontent = "".encode("UTF-8")
5737
+ else:
5738
+ fjsoncontent = "".encode("UTF-8")
5739
+ fjsonsize = format(len(fjsoncontent), 'x').lower()
5740
+ fjsonlen = format(len(jsondata), 'x').lower()
5741
+ tmpoutlist = []
5742
+ tmpoutlist.append(fjsontype)
5743
+ tmpoutlist.append(fjsonlen)
5744
+ tmpoutlist.append(fjsonsize)
5745
+ if(len(jsondata) > 0):
5746
+ tmpoutlist.append(checksumtype[1])
5747
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs, saltkey))
5748
+ else:
5749
+ tmpoutlist.append("none")
5750
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5440
5751
  # Preserve your original "tmpoutlen" computation exactly
5441
- tmpoutlist = [extrasizelen, extrafields] # you used this as a separate list
5442
- tmpoutlen = 4 + len(tmpoutlist) + len(xlist) + 2
5752
+ tmpoutlist.append(extrasizelen)
5753
+ tmpoutlist.append(extrafields)
5754
+ tmpoutlen = 10 + len(tmpoutlist) + len(xlist)
5443
5755
  tmpoutlenhex = _hex_lower(tmpoutlen)
5444
-
5756
+ if(hasattr(time, "time_ns")):
5757
+ fctime = format(int(time.time_ns()), 'x').lower()
5758
+ else:
5759
+ fctime = format(int(to_ns(time.time())), 'x').lower()
5445
5760
  # Serialize the first group
5446
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
5761
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
5447
5762
  # Append tmpoutlist
5448
5763
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5449
5764
  # Append extradata items if any
5450
5765
  if xlist:
5451
5766
  fnumfilesa += AppendNullBytes(xlist, delimiter)
5452
5767
  # Append checksum type
5453
- fnumfilesa += AppendNullByte(checksumtype, delimiter)
5768
+ fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
5454
5769
 
5455
5770
  # 5) inner checksum over fnumfilesa
5456
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
5771
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5457
5772
  tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
5458
5773
 
5459
5774
  # 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
@@ -5466,7 +5781,7 @@ def AppendFileHeader(fp,
5466
5781
  + fnumfilesa
5467
5782
  )
5468
5783
 
5469
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
5784
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5470
5785
  fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
5471
5786
 
5472
5787
  # 8) final total size field (again per your original logic)
@@ -5474,10 +5789,11 @@ def AppendFileHeader(fp,
5474
5789
  formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
5475
5790
  # Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
5476
5791
  # Keeping that behavior for compatibility.
5477
-
5792
+ nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
5793
+ outfileout = fnumfilesa + fjsoncontent + nullstrecd
5478
5794
  # 9) write and try to sync
5479
5795
  try:
5480
- fp.write(fnumfilesa)
5796
+ fp.write(outfileout)
5481
5797
  except (OSError, io.UnsupportedOperation):
5482
5798
  return False
5483
5799
 
@@ -5498,21 +5814,21 @@ def AppendFileHeader(fp,
5498
5814
  return fp
5499
5815
 
5500
5816
 
5501
- def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5817
+ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
5502
5818
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
5503
5819
  formatspecs = formatspecs[fmttype]
5504
5820
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
5505
5821
  fmttype = __file_format_default__
5506
5822
  formatspecs = formatspecs[fmttype]
5507
- AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
5823
+ AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs, saltkey)
5508
5824
  return fp
5509
5825
 
5510
5826
 
5511
- def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5512
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
5827
+ def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
5828
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5513
5829
 
5514
5830
 
5515
- def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
5831
+ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, returnfp=False):
5516
5832
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
5517
5833
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
5518
5834
  get_in_ext = os.path.splitext(outfile)
@@ -5542,6 +5858,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5542
5858
  fp = MkTempFile()
5543
5859
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
5544
5860
  fp = outfile
5861
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5545
5862
  elif(re.findall(__upload_proto_support__, outfile)):
5546
5863
  fp = MkTempFile()
5547
5864
  else:
@@ -5553,7 +5870,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5553
5870
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
5554
5871
  except PermissionError:
5555
5872
  return False
5556
- AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
5873
+ AppendFileHeader(fp, 0, "UTF-8", ['hello', 'goodbye'], {}, checksumtype, formatspecs, saltkey)
5557
5874
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
5558
5875
  fp = CompressOpenFileAlt(
5559
5876
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -5584,11 +5901,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5584
5901
  return True
5585
5902
 
5586
5903
 
5587
- def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
5588
- return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
5904
+ def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, returnfp=False):
5905
+ return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, returnfp)
5589
5906
 
5590
5907
 
5591
- def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
5908
+ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5592
5909
  if(not hasattr(fp, "write")):
5593
5910
  return False
5594
5911
  if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
@@ -5620,10 +5937,10 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5620
5937
  tmpoutlist.append(fjsonsize)
5621
5938
  if(len(jsondata) > 0):
5622
5939
  tmpoutlist.append(checksumtype[2])
5623
- tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs))
5940
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs, saltkey))
5624
5941
  else:
5625
5942
  tmpoutlist.append("none")
5626
- tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
5943
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5627
5944
  tmpoutlist.append(extrasizelen)
5628
5945
  tmpoutlist.append(extrafields)
5629
5946
  outfileoutstr = AppendNullBytes(
@@ -5638,22 +5955,18 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5638
5955
  outfileoutstr = outfileoutstr + \
5639
5956
  AppendNullBytes(checksumlist, formatspecs['format_delimiter'])
5640
5957
  nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
5641
- outfileheadercshex = GetFileChecksum(
5642
- outfileoutstr, checksumtype[0], True, formatspecs)
5958
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5643
5959
  if(len(filecontent) == 0):
5644
- outfilecontentcshex = GetFileChecksum(
5645
- filecontent, "none", False, formatspecs)
5960
+ outfilecontentcshex = GetFileChecksum(filecontent, "none", False, formatspecs, saltkey)
5646
5961
  else:
5647
- outfilecontentcshex = GetFileChecksum(
5648
- filecontent, checksumtype[1], False, formatspecs)
5962
+ outfilecontentcshex = GetFileChecksum(filecontent, checksumtype[1], False, formatspecs, saltkey)
5649
5963
  tmpfileoutstr = outfileoutstr + \
5650
5964
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5651
5965
  formatspecs['format_delimiter'])
5652
5966
  formheaersize = format(int(len(tmpfileoutstr) - len(formatspecs['format_delimiter'])), 'x').lower()
5653
5967
  outfileoutstr = AppendNullByte(
5654
5968
  formheaersize, formatspecs['format_delimiter']) + outfileoutstr
5655
- outfileheadercshex = GetFileChecksum(
5656
- outfileoutstr, checksumtype[0], True, formatspecs)
5969
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5657
5970
  outfileoutstr = outfileoutstr + \
5658
5971
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5659
5972
  formatspecs['format_delimiter'])
@@ -5671,14 +5984,11 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5671
5984
  pass
5672
5985
  return fp
5673
5986
 
5674
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
5987
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5675
5988
  if(not hasattr(fp, "write")):
5676
5989
  return False
5677
- advancedlist = formatspecs['use_advanced_list']
5678
- altinode = formatspecs['use_alt_inode']
5679
- if(verbose):
5680
- logging.basicConfig(format="%(message)s",
5681
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
5990
+ advancedlist = __use_advanced_list__
5991
+ altinode = __use_alt_inode__
5682
5992
  infilelist = []
5683
5993
  if(infiles == "-"):
5684
5994
  for line in PY_STDIN_TEXT:
@@ -5720,7 +6030,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5720
6030
  inodetoforminode = {}
5721
6031
  numfiles = int(len(GetDirList))
5722
6032
  fnumfiles = format(numfiles, 'x').lower()
5723
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6033
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
5724
6034
  try:
5725
6035
  fp.flush()
5726
6036
  if(hasattr(os, "sync")):
@@ -5749,14 +6059,24 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5749
6059
  FullSizeFilesAlt += fstatinfo.st_rsize
5750
6060
  except AttributeError:
5751
6061
  FullSizeFilesAlt += fstatinfo.st_size
6062
+ fblksize = 0
6063
+ if(hasattr(fstatinfo, "st_blksize")):
6064
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6065
+ fblocks = 0
6066
+ if(hasattr(fstatinfo, "st_blocks")):
6067
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6068
+ fflags = 0
6069
+ if(hasattr(fstatinfo, "st_flags")):
6070
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
5752
6071
  ftype = 0
5753
- if(hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
6072
+ if(not followlink and hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
5754
6073
  ftype = 13
5755
- elif(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
5756
- ftype = 12
5757
6074
  elif(stat.S_ISREG(fpremode)):
5758
- ftype = 0
5759
- elif(stat.S_ISLNK(fpremode)):
6075
+ if(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_size > 0 and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
6076
+ ftype = 12
6077
+ else:
6078
+ ftype = 0
6079
+ elif(not followlink and stat.S_ISLNK(fpremode)):
5760
6080
  ftype = 2
5761
6081
  elif(stat.S_ISCHR(fpremode)):
5762
6082
  ftype = 3
@@ -5778,43 +6098,42 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5778
6098
  ftype = 0
5779
6099
  flinkname = ""
5780
6100
  fcurfid = format(int(curfid), 'x').lower()
5781
- if not followlink and finode != 0:
6101
+ if(not followlink and finode != 0):
5782
6102
  unique_id = (fstatinfo.st_dev, finode)
5783
- if ftype != 1:
5784
- if unique_id in inodelist:
6103
+ if(ftype != 1):
6104
+ if(unique_id in inodetofile):
5785
6105
  # Hard link detected
5786
6106
  ftype = 1
5787
6107
  flinkname = inodetofile[unique_id]
5788
- if altinode:
5789
- fcurinode = format(int(unique_id[1]), 'x').lower()
5790
- else:
5791
- fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5792
6108
  else:
5793
- # New inode
5794
- inodelist.append(unique_id)
6109
+ # First time seeing this inode
5795
6110
  inodetofile[unique_id] = fname
6111
+ if(unique_id not in inodetoforminode):
5796
6112
  inodetoforminode[unique_id] = curinode
5797
- if altinode:
5798
- fcurinode = format(int(unique_id[1]), 'x').lower()
5799
- else:
5800
- fcurinode = format(int(curinode), 'x').lower()
5801
- curinode += 1
6113
+ curinode = curinode + 1
6114
+ if(altinode):
6115
+ # altinode == True → use real inode number
6116
+ fcurinode = format(int(unique_id[1]), 'x').lower()
6117
+ else:
6118
+ # altinode == False → use synthetic inode id
6119
+ fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5802
6120
  else:
5803
6121
  # Handle cases where inodes are not supported or symlinks are followed
5804
6122
  fcurinode = format(int(curinode), 'x').lower()
5805
- curinode += 1
6123
+ curinode = curinode + 1
5806
6124
  curfid = curfid + 1
5807
6125
  if(ftype == 2):
5808
6126
  flinkname = os.readlink(fname)
5809
- if(not os.path.exists(flinkname)):
6127
+ if(not os.path.exists(fname)):
5810
6128
  return False
5811
6129
  try:
5812
6130
  fdev = fstatinfo.st_rdev
5813
6131
  except AttributeError:
5814
6132
  fdev = 0
5815
- getfdev = GetDevMajorMinor(fdev)
5816
- fdev_minor = getfdev[0]
5817
- fdev_major = getfdev[1]
6133
+ try:
6134
+ frdev = fstatinfo.st_rdev
6135
+ except AttributeError:
6136
+ frdev = 0
5818
6137
  # Types that should be considered zero-length in the archive context:
5819
6138
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
5820
6139
  # Types that have actual data to read:
@@ -5825,13 +6144,28 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5825
6144
  fsize = format(int(fstatinfo.st_size), 'x').lower()
5826
6145
  else:
5827
6146
  fsize = format(int(fstatinfo.st_size), 'x').lower()
5828
- fatime = format(int(fstatinfo.st_atime), 'x').lower()
5829
- fmtime = format(int(fstatinfo.st_mtime), 'x').lower()
5830
- fctime = format(int(fstatinfo.st_ctime), 'x').lower()
6147
+ if(hasattr(fstatinfo, "st_atime_ns")):
6148
+ fatime = format(int(fstatinfo.st_atime_ns), 'x').lower()
6149
+ else:
6150
+ fatime = format(int(to_ns(fstatinfo.st_atime)), 'x').lower()
6151
+ if(hasattr(fstatinfo, "st_mtime_ns")):
6152
+ fmtime = format(int(fstatinfo.st_mtime_ns), 'x').lower()
6153
+ else:
6154
+ fmtime = format(int(to_ns(fstatinfo.st_mtime)), 'x').lower()
6155
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6156
+ fctime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6157
+ else:
6158
+ fctime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
5831
6159
  if(hasattr(fstatinfo, "st_birthtime")):
5832
- fbtime = format(int(fstatinfo.st_birthtime), 'x').lower()
6160
+ if(hasattr(fstatinfo, "st_birthtime_ns")):
6161
+ fbtime = format(int(fstatinfo.st_birthtime_ns), 'x').lower()
6162
+ else:
6163
+ fbtime = format(int(to_ns(fstatinfo.st_birthtime)), 'x').lower()
5833
6164
  else:
5834
- fbtime = format(int(fstatinfo.st_ctime), 'x').lower()
6165
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6166
+ fbtime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6167
+ else:
6168
+ fbtime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
5835
6169
  fmode = format(int(fstatinfo.st_mode), 'x').lower()
5836
6170
  fchmode = format(int(stat.S_IMODE(fstatinfo.st_mode)), 'x').lower()
5837
6171
  ftypemod = format(int(stat.S_IFMT(fstatinfo.st_mode)), 'x').lower()
@@ -5858,8 +6192,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5858
6192
  except ImportError:
5859
6193
  fgname = ""
5860
6194
  fdev = format(int(fdev), 'x').lower()
5861
- fdev_minor = format(int(fdev_minor), 'x').lower()
5862
- fdev_major = format(int(fdev_major), 'x').lower()
6195
+ frdev = format(int(frdev), 'x').lower()
5863
6196
  finode = format(int(finode), 'x').lower()
5864
6197
  flinkcount = format(int(flinkcount), 'x').lower()
5865
6198
  if(hasattr(fstatinfo, "st_file_attributes")):
@@ -5920,10 +6253,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5920
6253
  fcompression = curcompression
5921
6254
  fcontents.close()
5922
6255
  fcontents = cfcontents
5923
- elif followlink and (ftype == 1 or ftype == 2):
5924
- if(not os.path.exists(flinkname)):
6256
+ elif followlink and (ftype == 2 or ftype in data_types):
6257
+ if(not os.path.exists(fname)):
5925
6258
  return False
5926
- flstatinfo = os.stat(flinkname)
5927
6259
  with open(flinkname, "rb") as fpc:
5928
6260
  shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
5929
6261
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
@@ -5974,10 +6306,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5974
6306
  fcompression = ""
5975
6307
  fcontents.seek(0, 0)
5976
6308
  ftypehex = format(ftype, 'x').lower()
5977
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
5978
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
5979
- AppendFileHeaderWithContent(
5980
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6309
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6310
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6311
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
5981
6312
  try:
5982
6313
  fp.flush()
5983
6314
  if(hasattr(os, "sync")):
@@ -5986,12 +6317,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5986
6317
  pass
5987
6318
  return fp
5988
6319
 
5989
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6320
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5990
6321
  if(not hasattr(fp, "write")):
5991
6322
  return False
5992
- if(verbose):
5993
- logging.basicConfig(format="%(message)s",
5994
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
5995
6323
  curinode = 0
5996
6324
  curfid = 0
5997
6325
  inodelist = []
@@ -6055,7 +6383,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6055
6383
  except FileNotFoundError:
6056
6384
  return False
6057
6385
  numfiles = int(len(tarfp.getmembers()))
6058
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6386
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6059
6387
  try:
6060
6388
  fp.flush()
6061
6389
  if(hasattr(os, "sync")):
@@ -6073,6 +6401,15 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6073
6401
  fpremode = member.mode
6074
6402
  ffullmode = member.mode
6075
6403
  flinkcount = 0
6404
+ fblksize = 0
6405
+ if(hasattr(fstatinfo, "st_blksize")):
6406
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6407
+ fblocks = 0
6408
+ if(hasattr(fstatinfo, "st_blocks")):
6409
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6410
+ fflags = 0
6411
+ if(hasattr(fstatinfo, "st_flags")):
6412
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6076
6413
  ftype = 0
6077
6414
  if(member.isreg()):
6078
6415
  ffullmode = member.mode + stat.S_IFREG
@@ -6110,12 +6447,11 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6110
6447
  curfid = curfid + 1
6111
6448
  if(ftype == 2):
6112
6449
  flinkname = member.linkname
6450
+ fdev = format(int("0"), 'x').lower()
6113
6451
  try:
6114
- fdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6452
+ frdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6115
6453
  except AttributeError:
6116
- fdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6117
- fdev_minor = format(int(member.devminor), 'x').lower()
6118
- fdev_major = format(int(member.devmajor), 'x').lower()
6454
+ frdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6119
6455
  # Types that should be considered zero-length in the archive context:
6120
6456
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
6121
6457
  # Types that have actual data to read:
@@ -6126,10 +6462,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6126
6462
  fsize = format(int(member.size), 'x').lower()
6127
6463
  else:
6128
6464
  fsize = format(int(member.size), 'x').lower()
6129
- fatime = format(int(member.mtime), 'x').lower()
6130
- fmtime = format(int(member.mtime), 'x').lower()
6131
- fctime = format(int(member.mtime), 'x').lower()
6132
- fbtime = format(int(member.mtime), 'x').lower()
6465
+ fatime = format(int(to_ns(member.mtime)), 'x').lower()
6466
+ fmtime = format(int(to_ns(member.mtime)), 'x').lower()
6467
+ fctime = format(int(to_ns(member.mtime)), 'x').lower()
6468
+ fbtime = format(int(to_ns(member.mtime)), 'x').lower()
6133
6469
  fmode = format(int(ffullmode), 'x').lower()
6134
6470
  fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
6135
6471
  ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
@@ -6196,10 +6532,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6196
6532
  fcompression = ""
6197
6533
  fcontents.seek(0, 0)
6198
6534
  ftypehex = format(ftype, 'x').lower()
6199
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6200
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6201
- AppendFileHeaderWithContent(
6202
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6535
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6536
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6537
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6203
6538
  try:
6204
6539
  fp.flush()
6205
6540
  if(hasattr(os, "sync")):
@@ -6209,12 +6544,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6209
6544
  fcontents.close()
6210
6545
  return fp
6211
6546
 
6212
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6547
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6213
6548
  if(not hasattr(fp, "write")):
6214
6549
  return False
6215
- if(verbose):
6216
- logging.basicConfig(format="%(message)s",
6217
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6218
6550
  curinode = 0
6219
6551
  curfid = 0
6220
6552
  inodelist = []
@@ -6248,7 +6580,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6248
6580
  if(ziptest):
6249
6581
  VerbosePrintOut("Bad file found!")
6250
6582
  numfiles = int(len(zipfp.infolist()))
6251
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6583
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6252
6584
  try:
6253
6585
  fp.flush()
6254
6586
  if(hasattr(os, "sync")):
@@ -6269,6 +6601,15 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6269
6601
  else:
6270
6602
  fpremode = int(stat.S_IFREG | 0x1b6)
6271
6603
  flinkcount = 0
6604
+ fblksize = 0
6605
+ if(hasattr(fstatinfo, "st_blksize")):
6606
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6607
+ fblocks = 0
6608
+ if(hasattr(fstatinfo, "st_blocks")):
6609
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6610
+ fflags = 0
6611
+ if(hasattr(fstatinfo, "st_flags")):
6612
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6272
6613
  ftype = 0
6273
6614
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6274
6615
  ftype = 5
@@ -6279,8 +6620,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6279
6620
  fcurinode = format(int(curfid), 'x').lower()
6280
6621
  curfid = curfid + 1
6281
6622
  fdev = format(int(0), 'x').lower()
6282
- fdev_minor = format(int(0), 'x').lower()
6283
- fdev_major = format(int(0), 'x').lower()
6623
+ frdev = format(int(0), 'x').lower()
6284
6624
  if(ftype == 5):
6285
6625
  fsize = format(int("0"), 'x').lower()
6286
6626
  elif(ftype == 0):
@@ -6288,13 +6628,13 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6288
6628
  else:
6289
6629
  fsize = format(int(member.file_size), 'x').lower()
6290
6630
  fatime = format(
6291
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6631
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6292
6632
  fmtime = format(
6293
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6633
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6294
6634
  fctime = format(
6295
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6635
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6296
6636
  fbtime = format(
6297
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6637
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6298
6638
  if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
6299
6639
  fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
6300
6640
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
@@ -6410,10 +6750,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6410
6750
  fcompression = ""
6411
6751
  fcontents.seek(0, 0)
6412
6752
  ftypehex = format(ftype, 'x').lower()
6413
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6414
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6415
- AppendFileHeaderWithContent(
6416
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6753
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6754
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6755
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6417
6756
  try:
6418
6757
  fp.flush()
6419
6758
  if(hasattr(os, "sync")):
@@ -6424,16 +6763,12 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6424
6763
  return fp
6425
6764
 
6426
6765
  if(not rarfile_support):
6427
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6766
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6428
6767
  return False
6429
-
6430
- if(rarfile_support):
6431
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6768
+ else:
6769
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6432
6770
  if(not hasattr(fp, "write")):
6433
6771
  return False
6434
- if(verbose):
6435
- logging.basicConfig(format="%(message)s",
6436
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6437
6772
  curinode = 0
6438
6773
  curfid = 0
6439
6774
  inodelist = []
@@ -6449,7 +6784,7 @@ if(rarfile_support):
6449
6784
  if(rartest):
6450
6785
  VerbosePrintOut("Bad file found!")
6451
6786
  numfiles = int(len(rarfp.infolist()))
6452
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6787
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6453
6788
  try:
6454
6789
  fp.flush()
6455
6790
  if(hasattr(os, "sync")):
@@ -6505,6 +6840,15 @@ if(rarfile_support):
6505
6840
  fcompression = ""
6506
6841
  fcsize = format(int(0), 'x').lower()
6507
6842
  flinkcount = 0
6843
+ fblksize = 0
6844
+ if(hasattr(fstatinfo, "st_blksize")):
6845
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6846
+ fblocks = 0
6847
+ if(hasattr(fstatinfo, "st_blocks")):
6848
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6849
+ fflags = 0
6850
+ if(hasattr(fstatinfo, "st_flags")):
6851
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6508
6852
  ftype = 0
6509
6853
  if(member.is_file()):
6510
6854
  ftype = 0
@@ -6519,8 +6863,7 @@ if(rarfile_support):
6519
6863
  fcurinode = format(int(curfid), 'x').lower()
6520
6864
  curfid = curfid + 1
6521
6865
  fdev = format(int(0), 'x').lower()
6522
- fdev_minor = format(int(0), 'x').lower()
6523
- fdev_major = format(int(0), 'x').lower()
6866
+ frdev = format(int(0), 'x').lower()
6524
6867
  if(ftype == 5):
6525
6868
  fsize = format(int("0"), 'x').lower()
6526
6869
  elif(ftype == 0):
@@ -6529,20 +6872,20 @@ if(rarfile_support):
6529
6872
  fsize = format(int(member.file_size), 'x').lower()
6530
6873
  try:
6531
6874
  if(member.atime):
6532
- fatime = format(int(member.atime.timestamp()), 'x').lower()
6875
+ fatime = format(int(to_ns(member.atime.timestamp())), 'x').lower()
6533
6876
  else:
6534
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
6877
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6535
6878
  except AttributeError:
6536
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
6537
- fmtime = format(int(member.mtime.timestamp()), 'x').lower()
6879
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6880
+ fmtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6538
6881
  try:
6539
6882
  if(member.ctime):
6540
- fctime = format(int(member.ctime.timestamp()), 'x').lower()
6883
+ fctime = format(int(to_ns(member.ctime.timestamp())), 'x').lower()
6541
6884
  else:
6542
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
6885
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6543
6886
  except AttributeError:
6544
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
6545
- fbtime = format(int(member.mtime.timestamp()), 'x').lower()
6887
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6888
+ fbtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6546
6889
  if(is_unix and member.external_attr != 0):
6547
6890
  fmode = format(int(member.external_attr), 'x').lower()
6548
6891
  fchmode = format(
@@ -6644,10 +6987,9 @@ if(rarfile_support):
6644
6987
  fcompression = ""
6645
6988
  fcontents.seek(0, 0)
6646
6989
  ftypehex = format(ftype, 'x').lower()
6647
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6648
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6649
- AppendFileHeaderWithContent(
6650
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6990
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6991
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6992
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6651
6993
  try:
6652
6994
  fp.flush()
6653
6995
  if(hasattr(os, "sync")):
@@ -6658,16 +7000,12 @@ if(rarfile_support):
6658
7000
  return fp
6659
7001
 
6660
7002
  if(not py7zr_support):
6661
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7003
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6662
7004
  return False
6663
-
6664
- if(py7zr_support):
6665
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7005
+ else:
7006
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6666
7007
  if(not hasattr(fp, "write")):
6667
7008
  return False
6668
- if(verbose):
6669
- logging.basicConfig(format="%(message)s",
6670
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6671
7009
  formver = formatspecs['format_ver']
6672
7010
  fileheaderver = str(int(formver.replace(".", "")))
6673
7011
  curinode = 0
@@ -6685,7 +7023,7 @@ if(py7zr_support):
6685
7023
  if(sztestalt):
6686
7024
  VerbosePrintOut("Bad file found!")
6687
7025
  numfiles = int(len(szpfp.list()))
6688
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
7026
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6689
7027
  try:
6690
7028
  fp.flush()
6691
7029
  if(hasattr(os, "sync")):
@@ -6708,6 +7046,15 @@ if(py7zr_support):
6708
7046
  fcompression = ""
6709
7047
  fcsize = format(int(0), 'x').lower()
6710
7048
  flinkcount = 0
7049
+ fblksize = 0
7050
+ if(hasattr(fstatinfo, "st_blksize")):
7051
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
7052
+ fblocks = 0
7053
+ if(hasattr(fstatinfo, "st_blocks")):
7054
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
7055
+ fflags = 0
7056
+ if(hasattr(fstatinfo, "st_flags")):
7057
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6711
7058
  ftype = 0
6712
7059
  if(member.is_directory):
6713
7060
  ftype = 5
@@ -6718,14 +7065,13 @@ if(py7zr_support):
6718
7065
  fcurinode = format(int(curfid), 'x').lower()
6719
7066
  curfid = curfid + 1
6720
7067
  fdev = format(int(0), 'x').lower()
6721
- fdev_minor = format(int(0), 'x').lower()
6722
- fdev_major = format(int(0), 'x').lower()
7068
+ frdev = format(int(0), 'x').lower()
6723
7069
  if(ftype == 5):
6724
7070
  fsize = format(int("0"), 'x').lower()
6725
- fatime = format(int(member.creationtime.timestamp()), 'x').lower()
6726
- fmtime = format(int(member.creationtime.timestamp()), 'x').lower()
6727
- fctime = format(int(member.creationtime.timestamp()), 'x').lower()
6728
- fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
7071
+ fatime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7072
+ fmtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7073
+ fctime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7074
+ fbtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
6729
7075
  if(member.is_directory):
6730
7076
  fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6731
7077
  fchmode = format(
@@ -6818,10 +7164,9 @@ if(py7zr_support):
6818
7164
  fcompression = ""
6819
7165
  fcontents.seek(0, 0)
6820
7166
  ftypehex = format(ftype, 'x').lower()
6821
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6822
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6823
- AppendFileHeaderWithContent(
6824
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7167
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7168
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
7169
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6825
7170
  try:
6826
7171
  fp.flush()
6827
7172
  if(hasattr(os, "sync")):
@@ -6831,11 +7176,9 @@ if(py7zr_support):
6831
7176
  fcontents.close()
6832
7177
  return fp
6833
7178
 
6834
- def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7179
+ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6835
7180
  if(not hasattr(fp, "write")):
6836
7181
  return False
6837
- if(verbose):
6838
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6839
7182
  GetDirList = inlist
6840
7183
  if(not GetDirList):
6841
7184
  return False
@@ -6847,7 +7190,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
6847
7190
  inodetoforminode = {}
6848
7191
  numfiles = int(len(GetDirList))
6849
7192
  fnumfiles = format(numfiles, 'x').lower()
6850
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
7193
+ AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6851
7194
  for curfname in GetDirList:
6852
7195
  ftype = format(curfname[0], 'x').lower()
6853
7196
  fencoding = curfname[1]
@@ -6861,44 +7204,45 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
6861
7204
  fbasedir = os.path.dirname(fname)
6862
7205
  flinkname = curfname[4]
6863
7206
  fsize = format(curfname[5], 'x').lower()
6864
- fatime = format(curfname[6], 'x').lower()
6865
- fmtime = format(curfname[7], 'x').lower()
6866
- fctime = format(curfname[8], 'x').lower()
6867
- fbtime = format(curfname[9], 'x').lower()
6868
- fmode = format(curfname[10], 'x').lower()
6869
- fwinattributes = format(curfname[11], 'x').lower()
6870
- fcompression = curfname[12]
6871
- fcsize = format(curfname[13], 'x').lower()
6872
- fuid = format(curfname[14], 'x').lower()
6873
- funame = curfname[15]
6874
- fgid = format(curfname[16], 'x').lower()
6875
- fgname = curfname[17]
6876
- fid = format(curfname[18], 'x').lower()
6877
- finode = format(curfname[19], 'x').lower()
6878
- flinkcount = format(curfname[20], 'x').lower()
6879
- fdev = format(curfname[21], 'x').lower()
6880
- fdev_minor = format(curfname[22], 'x').lower()
6881
- fdev_major = format(curfname[23], 'x').lower()
6882
- fseeknextfile = curfname[24]
6883
- extradata = curfname[25]
6884
- fheaderchecksumtype = curfname[26]
6885
- fcontentchecksumtype = curfname[27]
6886
- fcontents = curfname[28]
7207
+ fblksize = format(curfname[6], 'x').lower()
7208
+ fblocks = format(curfname[7], 'x').lower()
7209
+ fflags = format(curfname[8], 'x').lower()
7210
+ fatime = format(curfname[9], 'x').lower()
7211
+ fmtime = format(curfname[10], 'x').lower()
7212
+ fctime = format(curfname[11], 'x').lower()
7213
+ fbtime = format(curfname[12], 'x').lower()
7214
+ fmode = format(curfname[13], 'x').lower()
7215
+ fwinattributes = format(curfname[14], 'x').lower()
7216
+ fcompression = curfname[15]
7217
+ fcsize = format(curfname[16], 'x').lower()
7218
+ fuid = format(curfname[17], 'x').lower()
7219
+ funame = curfname[18]
7220
+ fgid = format(curfname[19], 'x').lower()
7221
+ fgname = curfname[20]
7222
+ fid = format(curfname[21], 'x').lower()
7223
+ finode = format(curfname[22], 'x').lower()
7224
+ flinkcount = format(curfname[23], 'x').lower()
7225
+ fdev = format(curfname[24], 'x').lower()
7226
+ frdev = format(curfname[25], 'x').lower()
7227
+ fseeknextfile = curfname[26]
7228
+ extradata = curfname[27]
7229
+ fheaderchecksumtype = curfname[28]
7230
+ fcontentchecksumtype = curfname[29]
7231
+ fcontents = curfname[30]
6887
7232
  fencoding = GetFileEncoding(fcontents, 0, False)[0]
6888
- tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
6889
- fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
7233
+ tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
7234
+ fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile]
6890
7235
  fcontents.seek(0, 0)
6891
- AppendFileHeaderWithContent(
6892
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7236
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6893
7237
  return fp
6894
7238
 
6895
7239
 
6896
- def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6897
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
6898
- return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7240
+ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7241
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
7242
+ return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose)
6899
7243
 
6900
7244
 
6901
- def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7245
+ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
6902
7246
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
6903
7247
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
6904
7248
  get_in_ext = os.path.splitext(outfile)
@@ -6942,8 +7286,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
6942
7286
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
6943
7287
  except PermissionError:
6944
7288
  return False
6945
- AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
6946
- compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
7289
+ AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, saltkey, verbose)
6947
7290
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
6948
7291
  fp = CompressOpenFileAlt(
6949
7292
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -6972,12 +7315,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
6972
7315
  fp.close()
6973
7316
  return True
6974
7317
 
6975
- def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7318
+ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
6976
7319
  if not isinstance(infiles, list):
6977
7320
  infiles = [infiles]
6978
7321
  returnout = False
6979
7322
  for infileslist in infiles:
6980
- returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
7323
+ returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, True)
6981
7324
  if(not returnout):
6982
7325
  break
6983
7326
  else:
@@ -6987,7 +7330,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
6987
7330
  return True
6988
7331
  return returnout
6989
7332
 
6990
- def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7333
+ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, saltkey=None, returnfp=False):
6991
7334
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
6992
7335
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
6993
7336
  get_in_ext = os.path.splitext(outfile)
@@ -7028,8 +7371,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7028
7371
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7029
7372
  except PermissionError:
7030
7373
  return False
7031
- AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
7032
- compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
7374
+ AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, saltkey, verbose)
7033
7375
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7034
7376
  fp = CompressOpenFileAlt(
7035
7377
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7059,7 +7401,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7059
7401
  fp.close()
7060
7402
  return True
7061
7403
 
7062
- def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7404
+ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7063
7405
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7064
7406
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7065
7407
  get_in_ext = os.path.splitext(outfile)
@@ -7101,8 +7443,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7101
7443
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7102
7444
  except PermissionError:
7103
7445
  return False
7104
- AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression,
7105
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7446
+ AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7106
7447
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7107
7448
  fp = CompressOpenFileAlt(
7108
7449
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7132,12 +7473,12 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7132
7473
  fp.close()
7133
7474
  return True
7134
7475
 
7135
- def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7476
+ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7136
7477
  if not isinstance(infiles, list):
7137
7478
  infiles = [infiles]
7138
7479
  returnout = False
7139
7480
  for infileslist in infiles:
7140
- returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7481
+ returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7141
7482
  if(not returnout):
7142
7483
  break
7143
7484
  else:
@@ -7147,7 +7488,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
7147
7488
  return True
7148
7489
  return returnout
7149
7490
 
7150
- def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7491
+ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7151
7492
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7152
7493
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7153
7494
  get_in_ext = os.path.splitext(outfile)
@@ -7189,8 +7530,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7189
7530
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7190
7531
  except PermissionError:
7191
7532
  return False
7192
- AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression,
7193
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7533
+ AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7194
7534
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7195
7535
  fp = CompressOpenFileAlt(
7196
7536
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7220,12 +7560,12 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7220
7560
  fp.close()
7221
7561
  return True
7222
7562
 
7223
- def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7563
+ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7224
7564
  if not isinstance(infiles, list):
7225
7565
  infiles = [infiles]
7226
7566
  returnout = False
7227
7567
  for infileslist in infiles:
7228
- returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7568
+ returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7229
7569
  if(not returnout):
7230
7570
  break
7231
7571
  else:
@@ -7236,11 +7576,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
7236
7576
  return returnout
7237
7577
 
7238
7578
  if(not rarfile_support):
7239
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7579
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7240
7580
  return False
7241
-
7242
- if(rarfile_support):
7243
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7581
+ else:
7582
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7244
7583
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7245
7584
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7246
7585
  get_in_ext = os.path.splitext(outfile)
@@ -7282,8 +7621,7 @@ if(rarfile_support):
7282
7621
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7283
7622
  except PermissionError:
7284
7623
  return False
7285
- AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression,
7286
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7624
+ AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7287
7625
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7288
7626
  fp = CompressOpenFileAlt(
7289
7627
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7313,12 +7651,12 @@ if(rarfile_support):
7313
7651
  fp.close()
7314
7652
  return True
7315
7653
 
7316
- def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7654
+ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7317
7655
  if not isinstance(infiles, list):
7318
7656
  infiles = [infiles]
7319
7657
  returnout = False
7320
7658
  for infileslist in infiles:
7321
- returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7659
+ returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7322
7660
  if(not returnout):
7323
7661
  break
7324
7662
  else:
@@ -7329,11 +7667,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
7329
7667
  return returnout
7330
7668
 
7331
7669
  if(not py7zr_support):
7332
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7670
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7333
7671
  return False
7334
-
7335
- if(py7zr_support):
7336
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7672
+ else:
7673
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7337
7674
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7338
7675
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7339
7676
  get_in_ext = os.path.splitext(outfile)
@@ -7375,8 +7712,7 @@ if(py7zr_support):
7375
7712
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7376
7713
  except PermissionError:
7377
7714
  return False
7378
- AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
7379
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7715
+ AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7380
7716
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7381
7717
  fp = CompressOpenFileAlt(
7382
7718
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7406,12 +7742,12 @@ if(py7zr_support):
7406
7742
  fp.close()
7407
7743
  return True
7408
7744
 
7409
- def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7745
+ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7410
7746
  if not isinstance(infiles, list):
7411
7747
  infiles = [infiles]
7412
7748
  returnout = False
7413
7749
  for infileslist in infiles:
7414
- returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7750
+ returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7415
7751
  if(not returnout):
7416
7752
  break
7417
7753
  else:
@@ -7421,9 +7757,9 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
7421
7757
  return True
7422
7758
  return returnout
7423
7759
 
7424
- def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7425
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7426
- return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
7760
+ def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
7761
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
7762
+ return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
7427
7763
 
7428
7764
 
7429
7765
  def PrintPermissionString(fchmode, ftype):
@@ -9164,58 +9500,54 @@ def CheckSumSupport(checkfor, guaranteed=True):
9164
9500
  return False
9165
9501
 
9166
9502
 
9167
- def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9168
- return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9503
+ def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
9504
+ return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9169
9505
 
9170
- def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9171
- return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9506
+ def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
9507
+ return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9172
9508
 
9173
- def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9174
- return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
9509
+ def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9510
+ return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, returnfp)
9175
9511
 
9176
9512
 
9177
- def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9178
- return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9513
+ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9514
+ return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9179
9515
 
9180
9516
 
9181
- def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9182
- return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9517
+ def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9518
+ return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9183
9519
 
9184
9520
 
9185
9521
  if(not rarfile_support):
9186
- def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9522
+ def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9187
9523
  return False
9188
-
9189
- if(rarfile_support):
9190
- def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9191
- return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9524
+ else:
9525
+ def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9526
+ return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9192
9527
 
9193
9528
 
9194
9529
  if(not py7zr_support):
9195
- def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9530
+ def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9196
9531
  return False
9197
-
9198
- if(py7zr_support):
9199
- def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9200
- return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9532
+ else:
9533
+ def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9534
+ return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9201
9535
 
9202
9536
 
9203
- def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9537
+ def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9204
9538
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
9205
9539
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9206
9540
  formatspecs = formatspecs[checkcompressfile]
9207
- if(verbose):
9208
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9209
9541
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
9210
- return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9542
+ return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9211
9543
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
9212
- return PackArchiveFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9544
+ return PackArchiveFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9213
9545
  elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
9214
- return PackArchiveFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9546
+ return PackArchiveFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9215
9547
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
9216
- return PackArchiveFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9548
+ return PackArchiveFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9217
9549
  elif(IsSingleDict(formatspecs) and checkcompressfile == formatspecs['format_magic']):
9218
- return RePackArchiveFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, verbose, returnfp)
9550
+ return RePackArchiveFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9219
9551
  else:
9220
9552
  return False
9221
9553
  return False
@@ -9284,19 +9616,12 @@ def ArchiveFileArrayValidate(listarrayfiles, verbose=False):
9284
9616
  ok = False
9285
9617
  return ok
9286
9618
 
9287
- def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9288
- formatspecs=__file_format_multi_dict__, # keep default like original
9289
- seektoend=False, verbose=False, returnfp=False):
9290
- if(verbose):
9291
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9292
-
9619
+ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9293
9620
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
9294
9621
  formatspecs = formatspecs[fmttype]
9295
9622
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
9296
9623
  fmttype = "auto"
9297
-
9298
9624
  curloc = filestart
9299
-
9300
9625
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9301
9626
  curloc = infile.tell()
9302
9627
  fp = infile
@@ -9312,7 +9637,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9312
9637
  if(not fp):
9313
9638
  return False
9314
9639
  fp.seek(filestart, 0)
9315
-
9316
9640
  elif(infile == "-"):
9317
9641
  fp = MkTempFile()
9318
9642
  shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
@@ -9324,7 +9648,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9324
9648
  if(not fp):
9325
9649
  return False
9326
9650
  fp.seek(filestart, 0)
9327
-
9328
9651
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
9329
9652
  fp = MkTempFile()
9330
9653
  fp.write(infile)
@@ -9336,7 +9659,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9336
9659
  if(not fp):
9337
9660
  return False
9338
9661
  fp.seek(filestart, 0)
9339
-
9340
9662
  elif(re.findall(__download_proto_support__, infile)):
9341
9663
  fp = download_file_from_internet_file(infile)
9342
9664
  fp = UncompressFileAlt(fp, formatspecs, filestart)
@@ -9347,7 +9669,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9347
9669
  if(not fp):
9348
9670
  return False
9349
9671
  fp.seek(filestart, 0)
9350
-
9351
9672
  else:
9352
9673
  infile = RemoveWindowsPath(infile)
9353
9674
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
@@ -9394,11 +9715,9 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9394
9715
  fp.seek(0, 2)
9395
9716
  except (OSError, ValueError):
9396
9717
  SeekToEndOfFile(fp)
9397
-
9398
9718
  CatSize = fp.tell()
9399
9719
  CatSizeEnd = CatSize
9400
9720
  fp.seek(curloc, 0)
9401
-
9402
9721
  if(IsNestedDict(formatspecs)):
9403
9722
  compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
9404
9723
  if(compresschecking not in formatspecs):
@@ -9406,43 +9725,59 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9406
9725
  else:
9407
9726
  formatspecs = formatspecs[compresschecking]
9408
9727
  fp.seek(filestart, 0)
9409
-
9410
9728
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
9411
9729
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
9412
9730
  formdelsize = len(formatspecs['format_delimiter'])
9413
9731
  formdel = fp.read(formdelsize).decode("UTF-8")
9414
-
9415
9732
  if(formstring != formatspecs['format_magic'] + inheaderver):
9416
9733
  return False
9417
9734
  if(formdel != formatspecs['format_delimiter']):
9418
9735
  return False
9419
-
9420
- if(formatspecs['new_style']):
9736
+ headeroffset = fp.tell()
9737
+ if(__use_new_style__):
9421
9738
  inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9422
9739
  else:
9423
9740
  inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9424
-
9425
- fnumextrafieldsize = int(inheader[6], 16)
9426
- fnumextrafields = int(inheader[7], 16)
9427
- extrastart = 8
9741
+ fnumextrafieldsize = int(inheader[15], 16)
9742
+ fnumextrafields = int(inheader[16], 16)
9743
+ extrastart = 17
9428
9744
  extraend = extrastart + fnumextrafields
9429
9745
  formversion = re.findall("([\\d]+)", formstring)
9430
9746
  fheadsize = int(inheader[0], 16)
9431
9747
  fnumfields = int(inheader[1], 16)
9432
- fhencoding = inheader[2]
9433
- fostype = inheader[3]
9434
- fpythontype = inheader[4]
9435
- fnumfiles = int(inheader[5], 16)
9748
+ fnumfiles = int(inheader[8], 16)
9436
9749
  fprechecksumtype = inheader[-2]
9437
9750
  fprechecksum = inheader[-1]
9438
-
9751
+ outfseeknextfile = inheader[9]
9752
+ fjsonsize = int(inheader[12], 16)
9753
+ fjsonchecksumtype = inheader[13]
9754
+ fjsonchecksum = inheader[14]
9755
+ headerjsonoffset = fp.tell()
9756
+ fprejsoncontent = fp.read(fjsonsize)
9757
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
9758
+ # Next seek directive
9759
+ if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9760
+ fseeknextasnum = int(outfseeknextfile.replace("+", ""))
9761
+ if(abs(fseeknextasnum) == 0):
9762
+ pass
9763
+ fp.seek(fseeknextasnum, 1)
9764
+ elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
9765
+ fseeknextasnum = int(outfseeknextfile)
9766
+ if(abs(fseeknextasnum) == 0):
9767
+ pass
9768
+ fp.seek(fseeknextasnum, 1)
9769
+ elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
9770
+ fseeknextasnum = int(outfseeknextfile)
9771
+ if(abs(fseeknextasnum) == 0):
9772
+ pass
9773
+ fp.seek(fseeknextasnum, 0)
9774
+ else:
9775
+ return False
9439
9776
  il = 0
9440
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
9441
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
9442
-
9777
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
9778
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
9443
9779
  valid_archive = True
9444
9780
  invalid_archive = False
9445
-
9446
9781
  if(verbose):
9447
9782
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9448
9783
  try:
@@ -9454,78 +9789,56 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9454
9789
  else:
9455
9790
  VerbosePrintOut(infile)
9456
9791
  VerbosePrintOut("Number of Records " + str(fnumfiles))
9457
-
9458
9792
  if(headercheck):
9459
9793
  if(verbose):
9460
- VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
9794
+ VerbosePrintOut("File Header Checksum Passed at offset " + str(headeroffset))
9461
9795
  VerbosePrintOut("'" + fprechecksum + "' == " + "'" + newfcs + "'")
9462
9796
  else:
9463
9797
  # always flip flags, even when not verbose
9464
9798
  valid_archive = False
9465
9799
  invalid_archive = True
9466
9800
  if(verbose):
9467
- VerbosePrintOut("File Header Checksum Failed at offset " + str(0))
9801
+ VerbosePrintOut("File Header Checksum Failed at offset " + str(headeroffset))
9468
9802
  VerbosePrintOut("'" + fprechecksum + "' != " + "'" + newfcs + "'")
9469
-
9803
+ if(fjsonsize > 0):
9804
+ if(CheckChecksums(jsonfcs, fjsonchecksum)):
9805
+ if(verbose):
9806
+ VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(headerjsonoffset))
9807
+ VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
9808
+ else:
9809
+ valid_archive = False
9810
+ invalid_archive = True
9811
+ if(verbose):
9812
+ VerbosePrintOut("File JSON Data Checksum Error at offset " + str(headerjsonoffset))
9813
+ VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9470
9814
  if(verbose):
9471
9815
  VerbosePrintOut("")
9472
-
9473
9816
  # Iterate either until EOF (seektoend) or fixed count
9474
9817
  while (fp.tell() < CatSizeEnd) if seektoend else (il < fnumfiles):
9475
9818
  outfhstart = fp.tell()
9476
- if(formatspecs['new_style']):
9819
+ if(__use_new_style__):
9477
9820
  inheaderdata = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9478
9821
  else:
9479
9822
  inheaderdata = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9480
9823
 
9481
9824
  if(len(inheaderdata) == 0):
9482
9825
  break
9483
-
9484
- outfheadsize = int(inheaderdata[0], 16)
9485
- outfnumfields = int(inheaderdata[1], 16)
9486
- outftype = int(inheaderdata[2], 16)
9487
- # FIX: these must come from inheaderdata, not inheader
9488
- outfostype = inheaderdata[3]
9489
- outfencoding = inheaderdata[4]
9490
-
9491
9826
  if(re.findall("^[.|/]", inheaderdata[5])):
9492
9827
  outfname = inheaderdata[5]
9493
9828
  else:
9494
9829
  outfname = "./" + inheaderdata[5]
9495
9830
  outfbasedir = os.path.dirname(outfname)
9496
-
9497
- outflinkname = inheaderdata[6]
9498
9831
  outfsize = int(inheaderdata[7], 16)
9499
- outfatime = int(inheaderdata[8], 16)
9500
- outfmtime = int(inheaderdata[9], 16)
9501
- outfctime = int(inheaderdata[10], 16)
9502
- outfbtime = int(inheaderdata[11], 16)
9503
- outfmode = int(inheaderdata[12], 16)
9504
- outfchmode = stat.S_IMODE(outfmode)
9505
- outftypemod = stat.S_IFMT(outfmode)
9506
- outfwinattributes = int(inheaderdata[13], 16)
9507
- outfcompression = inheaderdata[14]
9508
- outfcsize = int(inheaderdata[15], 16)
9509
- outfuid = int(inheaderdata[16], 16)
9510
- outfuname = inheaderdata[17]
9511
- outfgid = int(inheaderdata[18], 16)
9512
- outfgname = inheaderdata[19]
9513
- fid = int(inheaderdata[20], 16)
9514
- finode = int(inheaderdata[21], 16)
9515
- flinkcount = int(inheaderdata[22], 16)
9516
- outfdev = int(inheaderdata[23], 16)
9517
- outfdev_minor = int(inheaderdata[24], 16)
9518
- outfdev_major = int(inheaderdata[25], 16)
9519
- outfseeknextfile = inheaderdata[26]
9520
- outfjsontype = inheaderdata[27]
9521
- outfjsonlen = int(inheaderdata[28], 16)
9522
- outfjsonsize = int(inheaderdata[29], 16)
9523
- outfjsonchecksumtype = inheaderdata[30]
9524
- outfjsonchecksum = inheaderdata[31]
9525
-
9832
+ outfcompression = inheaderdata[17]
9833
+ outfcsize = int(inheaderdata[18], 16)
9834
+ fid = int(inheaderdata[23], 16)
9835
+ finode = int(inheaderdata[24], 16)
9836
+ outfseeknextfile = inheaderdata[28]
9837
+ outfjsonsize = int(inheaderdata[31], 16)
9838
+ outfjsonchecksumtype = inheaderdata[32]
9839
+ outfjsonchecksum = inheaderdata[33]
9526
9840
  outfhend = fp.tell() - 1 # (kept for parity; not used)
9527
9841
  outfjstart = fp.tell()
9528
-
9529
9842
  # Read JSON bytes; compute checksum on bytes for robustness
9530
9843
  outfprejsoncontent_bytes = fp.read(outfjsonsize)
9531
9844
  # Decode for any downstream text needs (not used further here)
@@ -9533,27 +9846,21 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9533
9846
  outfprejsoncontent = outfprejsoncontent_bytes.decode("UTF-8")
9534
9847
  except Exception:
9535
9848
  outfprejsoncontent = None
9536
-
9537
9849
  outfjend = fp.tell()
9538
9850
  fp.seek(len(formatspecs['format_delimiter']), 1)
9539
-
9540
- injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs)
9541
-
9542
- outfextrasize = int(inheaderdata[32], 16)
9543
- outfextrafields = int(inheaderdata[33], 16)
9851
+ injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs, saltkey)
9852
+ outfextrafields = int(inheaderdata[35], 16)
9544
9853
  extrafieldslist = []
9545
- extrastart = 34
9854
+ extrastart = 36
9546
9855
  extraend = extrastart + outfextrafields
9547
-
9548
9856
  outfcs = inheaderdata[-2].lower()
9549
9857
  outfccs = inheaderdata[-1].lower()
9550
- infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
9551
-
9858
+ infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs, saltkey)
9552
9859
  if(verbose):
9553
9860
  VerbosePrintOut(outfname)
9554
9861
  VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
9555
9862
 
9556
- if(hmac.compare_digest(outfcs, infcs)):
9863
+ if(CheckChecksums(outfcs, infcs)):
9557
9864
  if(verbose):
9558
9865
  VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
9559
9866
  VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
@@ -9563,9 +9870,8 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9563
9870
  if(verbose):
9564
9871
  VerbosePrintOut("File Header Checksum Failed at offset " + str(outfhstart))
9565
9872
  VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
9566
-
9567
9873
  if(outfjsonsize > 0):
9568
- if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
9874
+ if(CheckChecksums(injsonfcs, outfjsonchecksum)):
9569
9875
  if(verbose):
9570
9876
  VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
9571
9877
  VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
@@ -9575,21 +9881,19 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9575
9881
  if(verbose):
9576
9882
  VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
9577
9883
  VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9578
-
9579
9884
  outfcontentstart = fp.tell()
9580
9885
  outfcontents = b"" # FIX: bytes for Py2/3 consistency
9581
9886
  pyhascontents = False
9582
-
9583
9887
  if(outfsize > 0):
9584
9888
  if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
9585
9889
  outfcontents = fp.read(outfsize)
9586
9890
  else:
9587
9891
  outfcontents = fp.read(outfcsize)
9588
9892
 
9589
- infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
9893
+ infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs, saltkey)
9590
9894
  pyhascontents = True
9591
9895
 
9592
- if(hmac.compare_digest(outfccs, infccs)):
9896
+ if(CheckChecksums(outfccs, infccs)):
9593
9897
  if(verbose):
9594
9898
  VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
9595
9899
  VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
@@ -9599,10 +9903,8 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9599
9903
  if(verbose):
9600
9904
  VerbosePrintOut("File Content Checksum Failed at offset " + str(outfcontentstart))
9601
9905
  VerbosePrintOut("'" + outfccs + "' != " + "'" + infccs + "'")
9602
-
9603
9906
  if(verbose):
9604
9907
  VerbosePrintOut("")
9605
-
9606
9908
  # Next seek directive
9607
9909
  if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9608
9910
  fseeknextasnum = int(outfseeknextfile.replace("+", ""))
@@ -9621,9 +9923,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9621
9923
  fp.seek(fseeknextasnum, 0)
9622
9924
  else:
9623
9925
  return False
9624
-
9625
9926
  il = il + 1
9626
-
9627
9927
  if(valid_archive):
9628
9928
  if(returnfp):
9629
9929
  return fp
@@ -9635,34 +9935,34 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
9635
9935
  return False
9636
9936
 
9637
9937
 
9638
- def ArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9639
- return ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9938
+ def ArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9939
+ return ArchiveFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9640
9940
 
9641
9941
 
9642
- def ArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9942
+ def ArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9643
9943
  if(isinstance(infile, (list, tuple, ))):
9644
9944
  pass
9645
9945
  else:
9646
9946
  infile = [infile]
9647
9947
  outretval = True
9648
9948
  for curfname in infile:
9649
- curretfile = ArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9949
+ curretfile = ArchiveFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9650
9950
  if(not curretfile):
9651
9951
  outretval = False
9652
9952
  return outretval
9653
9953
 
9654
- def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9655
- return ArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9954
+ def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9955
+ return ArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9656
9956
 
9657
9957
 
9658
- def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9958
+ def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9659
9959
  outretval = []
9660
9960
  outstartfile = filestart
9661
9961
  outfsize = float('inf')
9662
9962
  while True:
9663
9963
  if outstartfile >= outfsize: # stop when function signals False
9664
9964
  break
9665
- is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
9965
+ is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, saltkey, seektoend, verbose, True)
9666
9966
  if is_valid_file is False: # stop when function signals False
9667
9967
  outretval.append(is_valid_file)
9668
9968
  break
@@ -9679,33 +9979,36 @@ def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=
9679
9979
  if(returnfp):
9680
9980
  return infile
9681
9981
  else:
9682
- infile.close()
9982
+ try:
9983
+ infile.close()
9984
+ except AttributeError:
9985
+ return False
9683
9986
  return outretval
9684
9987
 
9685
9988
 
9686
9989
 
9687
- def StackedArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9688
- return StackedArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9990
+ def StackedArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9991
+ return StackedArchiveFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9689
9992
 
9690
9993
 
9691
- def StackedArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9994
+ def StackedArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9692
9995
  if(isinstance(infile, (list, tuple, ))):
9693
9996
  pass
9694
9997
  else:
9695
9998
  infile = [infile]
9696
9999
  outretval = True
9697
10000
  for curfname in infile:
9698
- curretfile = StackedArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10001
+ curretfile = StackedArchiveFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9699
10002
  if(not curretfile):
9700
10003
  outretval = False
9701
10004
  return outretval
9702
10005
 
9703
- def StackedArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9704
- return StackedArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10006
+ def StackedArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10007
+ return StackedArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9705
10008
 
9706
10009
 
9707
- def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9708
- outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
10010
+ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10011
+ outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
9709
10012
  if not returnfp:
9710
10013
  for item in outfp:
9711
10014
  fp = item.get('fp')
@@ -9719,26 +10022,26 @@ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend
9719
10022
  return outfp
9720
10023
 
9721
10024
 
9722
- def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10025
+ def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9723
10026
  if(isinstance(infile, (list, tuple, ))):
9724
10027
  pass
9725
10028
  else:
9726
10029
  infile = [infile]
9727
10030
  outretval = []
9728
10031
  for curfname in infile:
9729
- outretval.append(ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp))
10032
+ outretval.append(ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp))
9730
10033
  return outretval
9731
10034
 
9732
- def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9733
- return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
10035
+ def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10036
+ return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9734
10037
 
9735
10038
 
9736
- def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10039
+ def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9737
10040
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9738
10041
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9739
10042
  formatspecs = formatspecs[checkcompressfile]
9740
10043
  fp = MkTempFile(instr)
9741
- listarrayfiles = ArchiveFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10044
+ listarrayfiles = ArchiveFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9742
10045
  return listarrayfiles
9743
10046
 
9744
10047
 
@@ -9747,9 +10050,8 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9747
10050
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9748
10051
  formatspecs = formatspecs[checkcompressfile]
9749
10052
  fp = MkTempFile()
9750
- fp = PackArchiveFileFromTarFile(
9751
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9752
- listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10053
+ fp = PackArchiveFileFromTarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10054
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9753
10055
  return listarrayfiles
9754
10056
 
9755
10057
 
@@ -9758,9 +10060,8 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9758
10060
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9759
10061
  formatspecs = formatspecs[checkcompressfile]
9760
10062
  fp = MkTempFile()
9761
- fp = PackArchiveFileFromZipFile(
9762
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9763
- listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10063
+ fp = PackArchiveFileFromZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10064
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9764
10065
  return listarrayfiles
9765
10066
 
9766
10067
 
@@ -9774,9 +10075,8 @@ if(rarfile_support):
9774
10075
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9775
10076
  formatspecs = formatspecs[checkcompressfile]
9776
10077
  fp = MkTempFile()
9777
- fp = PackArchiveFileFromRarFile(
9778
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9779
- listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10078
+ fp = PackArchiveFileFromRarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10079
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9780
10080
  return listarrayfiles
9781
10081
 
9782
10082
  if(not py7zr_support):
@@ -9789,13 +10089,12 @@ if(py7zr_support):
9789
10089
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9790
10090
  formatspecs = formatspecs[checkcompressfile]
9791
10091
  fp = MkTempFile()
9792
- fp = PackArchiveFileFromSevenZipFile(
9793
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9794
- listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10092
+ fp = PackArchiveFileFromSevenZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10093
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9795
10094
  return listarrayfiles
9796
10095
 
9797
10096
 
9798
- def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10097
+ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9799
10098
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9800
10099
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9801
10100
  formatspecs = formatspecs[checkcompressfile]
@@ -9808,17 +10107,16 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
9808
10107
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
9809
10108
  return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
9810
10109
  elif(checkcompressfile == formatspecs['format_magic']):
9811
- return ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10110
+ return ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9812
10111
  else:
9813
10112
  return False
9814
10113
  return False
9815
10114
 
9816
10115
 
9817
- def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
10116
+ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, saltkey=None, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
9818
10117
  outarray = MkTempFile()
9819
- packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
9820
- compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
9821
- listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
10118
+ packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
10119
+ listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9822
10120
  return listarrayfiles
9823
10121
 
9824
10122
 
@@ -9940,12 +10238,12 @@ def ArchiveFileArrayToArrayIndex(inarray, returnfp=False):
9940
10238
  return out
9941
10239
 
9942
10240
 
9943
- def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
10241
+ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
9944
10242
  # ---------- Safe defaults ----------
9945
10243
  if compressionuselist is None:
9946
10244
  compressionuselist = compressionlistalt
9947
10245
  if checksumtype is None:
9948
- checksumtype = ["md5", "md5", "md5", "md5"]
10246
+ checksumtype = ["md5", "md5", "md5", "md5", "md5"]
9949
10247
  if extradata is None:
9950
10248
  extradata = []
9951
10249
  if jsondata is None:
@@ -9964,7 +10262,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
9964
10262
  infile = RemoveWindowsPath(infile)
9965
10263
  listarrayfileslist = ArchiveFileToArray(
9966
10264
  infile, "auto", filestart, seekstart, seekend,
9967
- False, True, True, skipchecksum, formatspecs, seektoend, False
10265
+ False, True, True, skipchecksum, formatspecs, insaltkey, seektoend, False
9968
10266
  )
9969
10267
 
9970
10268
  # ---------- Format specs selection ----------
@@ -10031,9 +10329,6 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10031
10329
  if (compression is None) or (compressionuselist and compression not in compressionuselist):
10032
10330
  compression = "auto"
10033
10331
 
10034
- if verbose:
10035
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10036
-
10037
10332
  # No files?
10038
10333
  if not listarrayfiles.get('ffilelist'):
10039
10334
  return False
@@ -10046,7 +10341,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10046
10341
  if lenlist != fnumfiles:
10047
10342
  fnumfiles = lenlist
10048
10343
 
10049
- AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), [], checksumtype[0], formatspecs)
10344
+ AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), listarrayfiles['fextradata'], listarrayfiles['fjsondata'], [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
10050
10345
 
10051
10346
  # loop counters
10052
10347
  lcfi = 0
@@ -10076,6 +10371,9 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10076
10371
  # fields (hex-encoded where expected)
10077
10372
  fheadersize = format(int(cur_entry['fheadersize']), 'x').lower()
10078
10373
  fsize = format(int(cur_entry['fsize']), 'x').lower()
10374
+ fblksize = format(int(cur_entry['fblksize']), 'x').lower()
10375
+ fblocks = format(int(cur_entry['fblocks']), 'x').lower()
10376
+ fflags = format(int(cur_entry['fflags']), 'x').lower()
10079
10377
  flinkname = cur_entry['flinkname']
10080
10378
  fatime = format(int(cur_entry['fatime']), 'x').lower()
10081
10379
  fmtime = format(int(cur_entry['fmtime']), 'x').lower()
@@ -10094,8 +10392,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10094
10392
  fcompression = cur_entry['fcompression']
10095
10393
  fcsize = format(int(cur_entry['fcsize']), 'x').lower()
10096
10394
  fdev = format(int(cur_entry['fdev']), 'x').lower()
10097
- fdev_minor = format(int(cur_entry['fminor']), 'x').lower()
10098
- fdev_major = format(int(cur_entry['fmajor']), 'x').lower()
10395
+ frdev = format(int(cur_entry['frdev']), 'x').lower()
10099
10396
  fseeknextfile = cur_entry['fseeknextfile']
10100
10397
 
10101
10398
  # extra fields sizing
@@ -10106,6 +10403,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10106
10403
  # extradata/jsondata defaults per file
10107
10404
  if not followlink and len(extradata) <= 0:
10108
10405
  extradata = cur_entry['fextradata']
10406
+
10407
+ fvendorfields = cur_entry['fvendorfields']
10408
+ ffvendorfieldslist = []
10409
+ if(fvendorfields>0):
10410
+ ffvendorfieldslist = cur_entry['fvendorfieldslist']
10411
+
10109
10412
  if not followlink and len(jsondata) <= 0:
10110
10413
  jsondata = cur_entry['fjsondata']
10111
10414
 
@@ -10141,7 +10444,11 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10141
10444
  fcontents.seek(0, 0)
10142
10445
  cfcontents.seek(0, 0)
10143
10446
  cfcontents = CompressOpenFileAlt(
10144
- cfcontents, compressionuselist[ilmin], compressionlevel, compressionuselist, formatspecs
10447
+ cfcontents,
10448
+ compressionuselist[ilmin],
10449
+ compressionlevel,
10450
+ compressionuselist,
10451
+ formatspecs
10145
10452
  )
10146
10453
  if cfcontents:
10147
10454
  cfcontents.seek(0, 2)
@@ -10149,7 +10456,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10149
10456
  cfcontents.close()
10150
10457
  else:
10151
10458
  ilcsize.append(float("inf"))
10152
- ilmin += 1
10459
+ ilmin = ilmin + 1
10153
10460
  ilcmin = ilcsize.index(min(ilcsize))
10154
10461
  curcompression = compressionuselist[ilcmin]
10155
10462
 
@@ -10158,16 +10465,24 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10158
10465
  shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10159
10466
  cfcontents.seek(0, 0)
10160
10467
  cfcontents = CompressOpenFileAlt(
10161
- cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
10468
+ cfcontents,
10469
+ curcompression,
10470
+ compressionlevel,
10471
+ compressionuselist,
10472
+ formatspecs
10162
10473
  )
10163
10474
  cfcontents.seek(0, 2)
10164
- cfsize_val = cfcontents.tell()
10165
- if ucfsize > cfsize_val:
10166
- fcsize = format(int(cfsize_val), 'x').lower()
10475
+ cfsize = cfcontents.tell()
10476
+ if ucfsize > cfsize:
10477
+ fcsize = format(int(cfsize), 'x').lower()
10167
10478
  fcompression = curcompression
10168
10479
  fcontents.close()
10169
10480
  fcontents = cfcontents
10170
10481
 
10482
+ if fcompression == "none":
10483
+ fcompression = ""
10484
+ fcontents.seek(0, 0)
10485
+
10171
10486
  # link following (fixed: use listarrayfiles, not prelistarrayfiles)
10172
10487
  if followlink:
10173
10488
  if (cur_entry['ftype'] == 1 or cur_entry['ftype'] == 2):
@@ -10176,6 +10491,9 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10176
10491
  flinkinfo = listarrayfiles['ffilelist'][flinkid]
10177
10492
  fheadersize = format(int(flinkinfo['fheadersize']), 'x').lower()
10178
10493
  fsize = format(int(flinkinfo['fsize']), 'x').lower()
10494
+ fblksize = format(int(flinkinfo['fblksize']), 'x').lower()
10495
+ fblocks = format(int(flinkinfo['fblocks']), 'x').lower()
10496
+ fflags = format(int(flinkinfo['fflags']), 'x').lower()
10179
10497
  flinkname = flinkinfo['flinkname']
10180
10498
  fatime = format(int(flinkinfo['fatime']), 'x').lower()
10181
10499
  fmtime = format(int(flinkinfo['fmtime']), 'x').lower()
@@ -10194,14 +10512,19 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10194
10512
  fcompression = flinkinfo['fcompression']
10195
10513
  fcsize = format(int(flinkinfo['fcsize']), 'x').lower()
10196
10514
  fdev = format(int(flinkinfo['fdev']), 'x').lower()
10197
- fdev_minor = format(int(flinkinfo['fminor']), 'x').lower()
10198
- fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
10515
+ frdev = format(int(flinkinfo['frdev']), 'x').lower()
10199
10516
  fseeknextfile = flinkinfo['fseeknextfile']
10200
10517
  if (len(flinkinfo['fextradata']) > flinkinfo['fextrafields']
10201
10518
  and len(flinkinfo['fextradata']) > 0):
10202
10519
  flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
10203
10520
  if len(extradata) < 0:
10204
10521
  extradata = flinkinfo['fextradata']
10522
+
10523
+ fvendorfields = flinkinfo['fvendorfields']
10524
+ ffvendorfieldslist = []
10525
+ if(fvendorfields>0):
10526
+ ffvendorfieldslist = flinkinfo['fvendorfieldslist']
10527
+
10205
10528
  if len(jsondata) < 0:
10206
10529
  jsondata = flinkinfo['fjsondata']
10207
10530
  fcontents = flinkinfo['fcontents']
@@ -10230,15 +10553,15 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10230
10553
  fcompression = ""
10231
10554
 
10232
10555
  tmpoutlist = [
10233
- ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime,
10556
+ ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime,
10234
10557
  fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame,
10235
- fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile
10558
+ fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, fseeknextfile
10236
10559
  ]
10237
10560
 
10238
- AppendFileHeaderWithContent(
10239
- fp, tmpoutlist, extradata, jsondata, fcontents.read(),
10240
- [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs
10241
- )
10561
+ if(fvendorfields>0 and len(ffvendorfieldslist)>0):
10562
+ extradata.extend(fvendorfields)
10563
+
10564
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(),[checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, outsaltkey)
10242
10565
  try:
10243
10566
  fcontents.close()
10244
10567
  except Exception:
@@ -10283,12 +10606,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
10283
10606
  pass
10284
10607
  return True
10285
10608
 
10286
- def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
10609
+ def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10287
10610
  if not isinstance(infiles, list):
10288
10611
  infiles = [infiles]
10289
10612
  returnout = False
10290
10613
  for infileslist in infiles:
10291
- returnout = RePackArchiveFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, True)
10614
+ returnout = RePackArchiveFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, True)
10292
10615
  if(not returnout):
10293
10616
  break
10294
10617
  else:
@@ -10298,33 +10621,28 @@ def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="aut
10298
10621
  return True
10299
10622
  return returnout
10300
10623
 
10301
- def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10624
+ def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10302
10625
  fp = MkTempFile(instr)
10303
- listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10304
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10626
+ listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, returnfp)
10305
10627
  return listarrayfiles
10306
10628
 
10307
10629
 
10308
- def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10630
+ def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10309
10631
  outarray = MkTempFile()
10310
- packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10311
- compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
10312
- listarrayfiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10313
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10632
+ packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
10633
+ listarrayfiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, saltkey, seektoend, verbose, returnfp)
10314
10634
  return listarrayfiles
10315
10635
 
10316
10636
 
10317
- def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
10637
+ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
10318
10638
  if(outdir is not None):
10319
10639
  outdir = RemoveWindowsPath(outdir)
10320
- if(verbose):
10321
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10322
10640
  if(isinstance(infile, dict)):
10323
10641
  listarrayfiles = infile
10324
10642
  else:
10325
10643
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10326
10644
  infile = RemoveWindowsPath(infile)
10327
- listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
10645
+ listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10328
10646
  if(not listarrayfiles):
10329
10647
  return False
10330
10648
  lenlist = len(listarrayfiles['ffilelist'])
@@ -10560,9 +10878,9 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
10560
10878
  return True
10561
10879
 
10562
10880
 
10563
- def UnPackArchiveFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10881
+ def UnPackArchiveFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10564
10882
  fp = MkTempFile(instr)
10565
- listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
10883
+ listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, returnfp)
10566
10884
  return listarrayfiles
10567
10885
 
10568
10886
  def ftype_to_str(ftype):
@@ -10580,9 +10898,7 @@ def ftype_to_str(ftype):
10580
10898
  # Default to "file" if unknown
10581
10899
  return mapping.get(ftype, "file")
10582
10900
 
10583
- def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10584
- if(verbose):
10585
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10901
+ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10586
10902
  if(isinstance(infile, dict)):
10587
10903
  listarrayfileslist = [infile]
10588
10904
  if(isinstance(infile, list)):
@@ -10590,7 +10906,7 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
10590
10906
  else:
10591
10907
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10592
10908
  infile = RemoveWindowsPath(infile)
10593
- listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
10909
+ listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10594
10910
  if(not listarrayfileslist):
10595
10911
  return False
10596
10912
  for listarrayfiles in listarrayfileslist:
@@ -10627,8 +10943,11 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
10627
10943
  VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
10628
10944
  listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
10629
10945
  else:
10946
+ ts_ns = listarrayfiles['ffilelist'][lcfi]['fmtime']
10947
+ sec, ns = divmod(int(ts_ns), 10**9)
10948
+ dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
10630
10949
  VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
10631
- listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
10950
+ listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + dt.strftime('%Y-%m-%d %H:%M') + " " + printfname)
10632
10951
  lcfi = lcfi + 1
10633
10952
  if(returnfp):
10634
10953
  return listarrayfiles['fp']
@@ -10636,25 +10955,25 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
10636
10955
  return True
10637
10956
 
10638
10957
 
10639
- def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10958
+ def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10640
10959
  if(isinstance(infile, (list, tuple, ))):
10641
10960
  pass
10642
10961
  else:
10643
10962
  infile = [infile]
10644
10963
  outretval = {}
10645
10964
  for curfname in infile:
10646
- outretval[curfname] = ArchiveFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
10965
+ outretval[curfname] = ArchiveFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10647
10966
  return outretval
10648
10967
 
10649
10968
 
10650
- def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10969
+ def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10651
10970
  outretval = []
10652
10971
  outstartfile = filestart
10653
10972
  outfsize = float('inf')
10654
10973
  while True:
10655
10974
  if outstartfile >= outfsize: # stop when function signals False
10656
10975
  break
10657
- list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
10976
+ list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, True)
10658
10977
  if list_file_retu is False: # stop when function signals False
10659
10978
  outretval.append(list_file_retu)
10660
10979
  else:
@@ -10670,30 +10989,31 @@ def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0
10670
10989
  if(returnfp):
10671
10990
  return infile
10672
10991
  else:
10673
- infile.close()
10992
+ try:
10993
+ infile.close()
10994
+ except AttributeError:
10995
+ return False
10674
10996
  return outretval
10675
10997
 
10676
10998
 
10677
- def MultipleStackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10999
+ def MultipleStackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10678
11000
  if(isinstance(infile, (list, tuple, ))):
10679
11001
  pass
10680
11002
  else:
10681
11003
  infile = [infile]
10682
11004
  outretval = {}
10683
11005
  for curfname in infile:
10684
- outretval[curfname] = StackedArchiveFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
11006
+ outretval[curfname] = StackedArchiveFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10685
11007
  return outretval
10686
11008
 
10687
11009
 
10688
- def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11010
+ def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10689
11011
  fp = MkTempFile(instr)
10690
- listarrayfiles = ArchiveFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
11012
+ listarrayfiles = ArchiveFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10691
11013
  return listarrayfiles
10692
11014
 
10693
11015
 
10694
11016
  def TarFileListFiles(infile, verbose=False, returnfp=False):
10695
- if(verbose):
10696
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10697
11017
  if(infile == "-"):
10698
11018
  infile = MkTempFile()
10699
11019
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -10814,8 +11134,6 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
10814
11134
 
10815
11135
 
10816
11136
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
10817
- if(verbose):
10818
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10819
11137
  if(infile == "-"):
10820
11138
  infile = MkTempFile()
10821
11139
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -10941,8 +11259,6 @@ if(not rarfile_support):
10941
11259
 
10942
11260
  if(rarfile_support):
10943
11261
  def RarFileListFiles(infile, verbose=False, returnfp=False):
10944
- if(verbose):
10945
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10946
11262
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
10947
11263
  return False
10948
11264
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -11070,8 +11386,6 @@ if(not py7zr_support):
11070
11386
 
11071
11387
  if(py7zr_support):
11072
11388
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
11073
- if(verbose):
11074
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11075
11389
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11076
11390
  return False
11077
11391
  lcfi = 0
@@ -11165,8 +11479,6 @@ if(py7zr_support):
11165
11479
 
11166
11480
 
11167
11481
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
11168
- if(verbose):
11169
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11170
11482
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
11171
11483
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
11172
11484
  formatspecs = formatspecs[checkcompressfile]
@@ -11193,44 +11505,6 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
11193
11505
  outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
11194
11506
  return listarrayfiles
11195
11507
 
11196
- """
11197
- PyNeoFile compatibility layer
11198
- """
11199
-
11200
- def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11201
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
11202
-
11203
- def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11204
- return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
11205
-
11206
- def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11207
- return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
11208
-
11209
- def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11210
- return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
11211
-
11212
- def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11213
- return PackArchiveFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
11214
-
11215
- def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
11216
- return ArchiveFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
11217
-
11218
- def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
11219
- return UnPackArchiveFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
11220
-
11221
- def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11222
- return RePackArchiveFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11223
-
11224
- def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
11225
- return ArchiveFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
11226
-
11227
- def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
11228
- return ArchiveFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
11229
-
11230
- def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11231
- intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
11232
- return RePackArchiveFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11233
-
11234
11508
  def detect_cwd(ftp, file_dir):
11235
11509
  """
11236
11510
  Test whether cwd into file_dir works. Returns True if it does,