PyFoxFile 0.25.0__py3-none-any.whl → 0.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyfoxfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyfoxfile.py - Last Update: 11/3/2025 Ver. 0.25.0 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyfoxfile.py - Last Update: 11/12/2025 Ver. 0.26.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -416,9 +416,13 @@ def is_only_nonprintable(var):
416
416
  __file_format_multi_dict__ = {}
417
417
  __file_format_default__ = "FoxFile"
418
418
  __include_defaults__ = True
419
- __use_inmemfile__ = True
419
+ __use_inmem__ = True
420
+ __use_memfd__ = True
420
421
  __use_spoolfile__ = False
421
422
  __use_spooldir__ = tempfile.gettempdir()
423
+ __use_new_style__ = True
424
+ __use_advanced_list__ = True
425
+ __use_alt_inode__ = False
422
426
  BYTES_PER_KiB = 1024
423
427
  BYTES_PER_MiB = 1024 * BYTES_PER_KiB
424
428
  # Spool: not tiny, but won’t blow up RAM if many are in use
@@ -462,9 +466,13 @@ if __use_ini_file__ and os.path.exists(__config_file__):
462
466
  __file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
463
467
  __program_name__ = decode_unicode_escape(config.get('config', 'proname'))
464
468
  __include_defaults__ = config.getboolean('config', 'includedef')
465
- __use_inmemfile__ = config.getboolean('config', 'inmemfile')
469
+ __use_inmem__ = config.getboolean('config', 'useinmem')
470
+ __use_memfd__ = config.getboolean('config', 'usememfd')
466
471
  __use_spoolfile__ = config.getboolean('config', 'usespoolfile')
467
472
  __spoolfile_size__ = config.getint('config', 'spoolfilesize')
473
+ __use_new_style__ = config.getboolean('config', 'newstyle')
474
+ __use_advanced_list__ = config.getboolean('config', 'advancedlist')
475
+ __use_alt_inode__ = config.getboolean('config', 'altinode')
468
476
  # Loop through all sections
469
477
  for section in config.sections():
470
478
  if section == "config":
@@ -472,8 +480,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
472
480
 
473
481
  required_keys = [
474
482
  "len", "hex", "ver", "name",
475
- "magic", "delimiter", "extension",
476
- "newstyle", "advancedlist", "altinode"
483
+ "magic", "delimiter", "extension"
477
484
  ]
478
485
 
479
486
  # Py2+Py3 compatible key presence check
@@ -493,9 +500,6 @@ if __use_ini_file__ and os.path.exists(__config_file__):
493
500
  'format_hex': config.get(section, 'hex'),
494
501
  'format_delimiter': delim,
495
502
  'format_ver': config.get(section, 'ver'),
496
- 'new_style': config.getboolean(section, 'newstyle'),
497
- 'use_advanced_list': config.getboolean(section, 'advancedlist'),
498
- 'use_alt_inode': config.getboolean(section, 'altinode'),
499
503
  'format_extension': decode_unicode_escape(config.get(section, 'extension')),
500
504
  }
501
505
  })
@@ -556,16 +560,19 @@ elif __use_json_file__ and os.path.exists(__config_file__):
556
560
  cfg_config = cfg.get('config', {}) or {}
557
561
  __file_format_default__ = decode_unicode_escape(_get(cfg_config, 'default', ''))
558
562
  __program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
559
- __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', False))
560
- __use_inmemfile__ = _to_bool(_get(cfg_config, 'inmemfile', False))
563
+ __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', True))
564
+ __use_inmem__ = _to_bool(_get(cfg_config, 'useinmem', True))
565
+ __use_memfd__ = _to_bool(_get(cfg_config, 'usememfd', True))
561
566
  __use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
562
567
  __spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
568
+ __use_new_style__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
569
+ __use_advanced_list__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
570
+ __use_alt_inode__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
563
571
 
564
572
  # --- iterate format sections (everything except "config") ---
565
573
  required_keys = [
566
574
  "len", "hex", "ver", "name",
567
- "magic", "delimiter", "extension",
568
- "newstyle", "advancedlist", "altinode"
575
+ "magic", "delimiter", "extension"
569
576
  ]
570
577
 
571
578
  for section_name, section in cfg.items():
@@ -583,9 +590,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
583
590
  fmt_hex = decode_unicode_escape(_get(section, 'hex', ''))
584
591
  fmt_ver = decode_unicode_escape(_get(section, 'ver', ''))
585
592
  delim = decode_unicode_escape(_get(section, 'delimiter', ''))
586
- new_style = _to_bool(_get(section, 'newstyle', False))
587
- adv_list = _to_bool(_get(section, 'advancedlist', False))
588
- alt_inode = _to_bool(_get(section, 'altinode', False))
589
593
  extension = decode_unicode_escape(_get(section, 'extension', ''))
590
594
 
591
595
  # keep your delimiter validation semantics
@@ -600,9 +604,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
600
604
  'format_hex': fmt_hex,
601
605
  'format_delimiter': delim,
602
606
  'format_ver': fmt_ver,
603
- 'new_style': new_style,
604
- 'use_advanced_list': adv_list,
605
- 'use_alt_inode': alt_inode,
606
607
  'format_extension': extension,
607
608
  }
608
609
  })
@@ -641,21 +642,18 @@ __file_format_len__ = __file_format_multi_dict__[__file_format_default__]['forma
641
642
  __file_format_hex__ = __file_format_multi_dict__[__file_format_default__]['format_hex']
642
643
  __file_format_delimiter__ = __file_format_multi_dict__[__file_format_default__]['format_delimiter']
643
644
  __file_format_ver__ = __file_format_multi_dict__[__file_format_default__]['format_ver']
644
- __use_new_style__ = __file_format_multi_dict__[__file_format_default__]['new_style']
645
- __use_advanced_list__ = __file_format_multi_dict__[__file_format_default__]['use_advanced_list']
646
- __use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt_inode']
647
645
  __file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
648
646
  __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
649
647
  __project__ = __program_name__
650
648
  __program_alt_name__ = __program_name__
651
649
  __project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
652
650
  __project_release_url__ = __project_url__+"/releases/latest"
653
- __version_info__ = (0, 25, 0, "RC 1", 1)
654
- __version_date_info__ = (2025, 11, 5, "RC 1", 1)
651
+ __version_info__ = (0, 26, 0, "RC 1", 1)
652
+ __version_date_info__ = (2025, 11, 12, "RC 1", 1)
655
653
  __version_date__ = str(__version_date_info__[0]) + "." + str(
656
654
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
657
655
  __revision__ = __version_info__[3]
658
- __revision_id__ = "$Id: 1e44250af6454c3f042d7212eb751c2c18543954 $"
656
+ __revision_id__ = "$Id: 472d03caa48196f32a369d9ab15f43ee378c9557 $"
659
657
  if(__version_info__[4] is not None):
660
658
  __version_date_plusrc__ = __version_date__ + \
661
659
  "-" + str(__version_date_info__[4])
@@ -667,6 +665,9 @@ if(__version_info__[3] is not None):
667
665
  if(__version_info__[3] is None):
668
666
  __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
669
667
 
668
+ _logger = logging.getLogger(__project__) # library-style logger
669
+ _logger.addHandler(logging.NullHandler()) # don't emit logs unless app configures logging
670
+
670
671
  # From: https://stackoverflow.com/a/28568003
671
672
  # By Phaxmohdem
672
673
 
@@ -1036,6 +1037,20 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20, **
1036
1037
  VerbosePrintOut(dbgtxt, outtype, dbgenable, dgblevel, **kwargs)
1037
1038
  return dbgtxt
1038
1039
 
1040
+ def to_ns(timestamp):
1041
+ """
1042
+ Convert a second-resolution timestamp (int or float)
1043
+ into a nanosecond timestamp (int) by zero-padding.
1044
+ Works in Python 2 and Python 3.
1045
+ """
1046
+ try:
1047
+ # Convert incoming timestamp to float so it works for int or float
1048
+ seconds = float(timestamp)
1049
+ except (TypeError, ValueError):
1050
+ raise ValueError("Timestamp must be int or float")
1051
+
1052
+ # Multiply by 1e9 to get nanoseconds, then cast to int
1053
+ return int(seconds * 1000000000)
1039
1054
 
1040
1055
  def _split_posix(name):
1041
1056
  """
@@ -2059,34 +2074,53 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
2059
2074
 
2060
2075
 
2061
2076
  def MkTempFile(data=None,
2062
- inmem=__use_inmemfile__,
2077
+ inmem=__use_inmem__, usememfd=__use_memfd__,
2063
2078
  isbytes=True,
2064
- prefix="",
2079
+ prefix=__program_name__,
2065
2080
  delete=True,
2066
2081
  encoding="utf-8",
2067
- newline=None, # text mode only; in-memory objects ignore newline semantics
2082
+ newline=None,
2083
+ text_errors="strict",
2068
2084
  dir=None,
2069
2085
  suffix="",
2070
2086
  use_spool=__use_spoolfile__,
2087
+ autoswitch_spool=False,
2071
2088
  spool_max=__spoolfile_size__,
2072
- spool_dir=__use_spooldir__):
2089
+ spool_dir=__use_spooldir__,
2090
+ reset_to_start=True,
2091
+ memfd_name=None,
2092
+ memfd_allow_sealing=False,
2093
+ memfd_flags_extra=0,
2094
+ on_create=None):
2073
2095
  """
2074
2096
  Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
2075
2097
 
2076
2098
  Storage:
2077
- - inmem=True -> BytesIO (bytes) or StringIO (text)
2078
- - inmem=False, use_spool=True -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2079
- - inmem=False, use_spool=False -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2099
+ - inmem=True, usememfd=True, isbytes=True and memfd available
2100
+ -> memfd-backed anonymous file (binary)
2101
+ - inmem=True, otherwise
2102
+ -> BytesIO (bytes) or StringIO (text)
2103
+ - inmem=False, use_spool=True
2104
+ -> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
2105
+ - inmem=False, use_spool=False
2106
+ -> NamedTemporaryFile (binary), optionally TextIOWrapper for text
2080
2107
 
2081
2108
  Text vs bytes:
2082
2109
  - isbytes=True -> file expects bytes; 'data' must be bytes-like
2083
- - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and encoding
2084
- apply only for spooled/named files (not BytesIO/StringIO).
2110
+ - isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
2111
+ encoding apply only for spooled/named files (not BytesIO/StringIO).
2085
2112
 
2086
2113
  Notes:
2087
- - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by other processes.
2088
- Use delete=False if you need to pass the path elsewhere.
2089
- - For text: in-memory StringIO ignores 'newline' (as usual).
2114
+ - On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
2115
+ other processes. Use delete=False if you need to pass the path elsewhere.
2116
+ - For text: in-memory StringIO ignores 'newline' and 'text_errors' (as usual).
2117
+ - When available, and if usememfd=True, memfd is used only for inmem=True and isbytes=True,
2118
+ providing an anonymous in-memory file descriptor (Linux-only). Text in-memory still uses
2119
+ StringIO to preserve newline semantics.
2120
+ - If autoswitch_spool=True and initial data size exceeds spool_max, in-memory storage is
2121
+ skipped and a spooled file is used instead (if use_spool=True).
2122
+ - If on_create is not None, it is called as on_create(fp, kind) where kind is one of:
2123
+ "memfd", "bytesio", "stringio", "spool", "disk".
2090
2124
  """
2091
2125
 
2092
2126
  # -- sanitize simple params (avoid None surprises) --
@@ -2118,23 +2152,65 @@ def MkTempFile(data=None,
2118
2152
  else:
2119
2153
  init = None
2120
2154
 
2155
+ # Size of init for autoswitch; only meaningful for bytes
2156
+ init_len = len(init) if (init is not None and isbytes) else None
2157
+
2121
2158
  # -------- In-memory --------
2122
2159
  if inmem:
2123
- if isbytes:
2124
- f = io.BytesIO(init if init is not None else b"")
2125
- else:
2126
- # newline not enforced for StringIO; matches stdlib semantics
2127
- f = io.StringIO(init if init is not None else "")
2128
- # already positioned at 0 with provided init; ensure rewind for symmetry
2129
- f.seek(0)
2130
- return f
2160
+ # If autoswitch is enabled and data is larger than spool_max, and
2161
+ # spooling is allowed, skip the in-memory branch and fall through
2162
+ # to the spool/disk logic below.
2163
+ if autoswitch_spool and use_spool and init_len is not None and init_len > spool_max:
2164
+ pass # fall through to spool/disk sections
2165
+ else:
2166
+ # Use memfd only for bytes, and only where available (Linux, Python 3.8+)
2167
+ if usememfd and isbytes and hasattr(os, "memfd_create"):
2168
+ name = memfd_name or prefix or "MkTempFile"
2169
+ flags = 0
2170
+ # Close-on-exec is almost always what you want for temps
2171
+ if hasattr(os, "MFD_CLOEXEC"):
2172
+ flags |= os.MFD_CLOEXEC
2173
+ # Optional sealing support if requested and available
2174
+ if memfd_allow_sealing and hasattr(os, "MFD_ALLOW_SEALING"):
2175
+ flags |= os.MFD_ALLOW_SEALING
2176
+ # Extra custom flags (e.g. hugepage flags) if caller wants them
2177
+ if memfd_flags_extra:
2178
+ flags |= memfd_flags_extra
2179
+
2180
+ fd = os.memfd_create(name, flags)
2181
+ # Binary read/write file-like object backed by RAM
2182
+ f = os.fdopen(fd, "w+b")
2183
+
2184
+ if init is not None:
2185
+ f.write(init)
2186
+ if reset_to_start:
2187
+ f.seek(0)
2188
+
2189
+ if on_create is not None:
2190
+ on_create(f, "memfd")
2191
+ return f
2192
+
2193
+ # Fallback: pure Python in-memory objects
2194
+ if isbytes:
2195
+ f = io.BytesIO(init if init is not None else b"")
2196
+ kind = "bytesio"
2197
+ else:
2198
+ # newline/text_errors not enforced for StringIO; matches stdlib semantics
2199
+ f = io.StringIO(init if init is not None else "")
2200
+ kind = "stringio"
2201
+
2202
+ if reset_to_start:
2203
+ f.seek(0)
2204
+
2205
+ if on_create is not None:
2206
+ on_create(f, kind)
2207
+ return f
2131
2208
 
2132
2209
  # Helper: wrap a binary file into a text file with encoding/newline
2133
2210
  def _wrap_text(handle):
2134
2211
  # For both Py2 & Py3, TextIOWrapper gives consistent newline/encoding behavior
2135
- tw = io.TextIOWrapper(handle, encoding=encoding, newline=newline)
2136
- # Position at start; if we wrote initial data below, we will rewind after writing
2137
- return tw
2212
+ return io.TextIOWrapper(handle, encoding=encoding,
2213
+ newline=newline, errors=text_errors)
2138
2214
 
2139
2215
  # -------- Spooled (RAM then disk) --------
2140
2216
  if use_spool:
@@ -2142,19 +2218,33 @@ def MkTempFile(data=None,
2142
2218
  bin_mode = "w+b" # read/write, binary
2143
2219
  b = tempfile.SpooledTemporaryFile(max_size=spool_max, mode=bin_mode, dir=spool_dir)
2144
2220
  f = b if isbytes else _wrap_text(b)
2221
+
2145
2222
  if init is not None:
2146
2223
  f.write(init)
2224
+ if reset_to_start:
2225
+ f.seek(0)
2226
+ elif reset_to_start:
2147
2227
  f.seek(0)
2228
+
2229
+ if on_create is not None:
2230
+ on_create(f, "spool")
2148
2231
  return f
2149
2232
 
2150
2233
  # -------- On-disk temp (NamedTemporaryFile) --------
2151
2234
  # Always create binary file; wrap for text if needed for uniform Py2/3 behavior
2152
- b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix, dir=dir, delete=delete)
2235
+ b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
2236
+ dir=dir, delete=delete)
2153
2237
  f = b if isbytes else _wrap_text(b)
2154
2238
 
2155
2239
  if init is not None:
2156
2240
  f.write(init)
2241
+ if reset_to_start:
2242
+ f.seek(0)
2243
+ elif reset_to_start:
2157
2244
  f.seek(0)
2245
+
2246
+ if on_create is not None:
2247
+ on_create(f, "disk")
2158
2248
  return f
2159
2249
 
2160
2250
 
@@ -3674,7 +3764,7 @@ def _bytes_to_int(b):
3674
3764
  # =========================
3675
3765
  # Public checksum API
3676
3766
  # =========================
3677
- def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
3767
+ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3678
3768
  """
3679
3769
  Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
3680
3770
  or a single field) and compute the requested checksum. Returns lowercase hex.
@@ -3686,15 +3776,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatsp
3686
3776
  if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
3687
3777
  hdr_bytes = _to_bytes(hdr_bytes)
3688
3778
  hdr_bytes = bytes(hdr_bytes)
3689
-
3779
+ saltkeyval = None
3780
+ if(hasattr(saltkey, "read")):
3781
+ saltkeyval = skfp.read()
3782
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
3783
+ saltkeyval = saltkeyval.encode("UTF-8")
3784
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
3785
+ saltkeyval = saltkey
3786
+ elif(saltkey is not None and os.path.exists(saltkey)):
3787
+ with open(saltkey, "rb") as skfp:
3788
+ saltkeyval = skfp.read()
3789
+ else:
3790
+ saltkey = None
3791
+ if(saltkeyval is None):
3792
+ saltkey = None
3690
3793
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3691
- h = hashlib.new(algo_key)
3692
- h.update(hdr_bytes)
3693
- return h.hexdigest().lower()
3794
+ if(saltkey is None or saltkeyval is None):
3795
+ h = hashlib.new(algo_key, hdr_bytes)
3796
+ else:
3797
+ h = hmac.new(saltkeyval, hdr_bytes, digestmod=algo_key)
3798
+ return h.hexdigest().lower()
3694
3799
 
3695
3800
  return "0"
3696
3801
 
3697
- def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
3802
+ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
3698
3803
  """
3699
3804
  Accepts bytes/str/file-like.
3700
3805
  - Hashlib algos: streamed in 1 MiB chunks.
@@ -3702,13 +3807,29 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3702
3807
  - Falls back to one-shot for non-file-like inputs.
3703
3808
  """
3704
3809
  algo_key = (checksumtype or "md5").lower()
3705
-
3810
+ saltkeyval = None
3811
+ if(hasattr(saltkey, "read")):
3812
+ saltkeyval = skfp.read()
3813
+ if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
3814
+ saltkeyval = saltkeyval.encode("UTF-8")
3815
+ elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
3816
+ saltkeyval = saltkey
3817
+ elif(saltkey is not None and os.path.exists(saltkey)):
3818
+ with open(saltkey, "rb") as skfp:
3819
+ saltkeyval = skfp.read()
3820
+ else:
3821
+ saltkey = None
3822
+ if(saltkeyval is None):
3823
+ saltkey = None
3706
3824
  # file-like streaming
3707
3825
  if hasattr(inbytes, "read"):
3708
3826
  # hashlib
3709
3827
 
3710
3828
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3711
- h = hashlib.new(algo_key)
3829
+ if(saltkey is None or saltkeyval is None):
3830
+ h = hashlib.new(algo_key)
3831
+ else:
3832
+ h = hmac.new(saltkeyval, digestmod=algo_key)
3712
3833
  while True:
3713
3834
  chunk = inbytes.read(__filebuff_size__)
3714
3835
  if not chunk:
@@ -3729,26 +3850,41 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
3729
3850
  # one-shot
3730
3851
 
3731
3852
  if CheckSumSupport(algo_key, hashlib_guaranteed):
3732
- h = hashlib.new(algo_key)
3733
- h.update(data)
3853
+ if(saltkey is None or saltkeyval is None):
3854
+ h = hashlib.new(algo_key, data)
3855
+ else:
3856
+ h = hmac.new(saltkeyval, data, digestmod=algo_key)
3734
3857
  return h.hexdigest().lower()
3735
3858
 
3736
3859
  return "0"
3737
3860
 
3738
- def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3739
- calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
3861
+ def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
3862
+ calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs, saltkey)
3740
3863
  want = (inchecksum or "0").strip().lower()
3741
3864
  if want.startswith("0x"):
3742
3865
  want = want[2:]
3743
- return hmac.compare_digest(want, calc)
3866
+ return CheckChecksums(want, calc)
3744
3867
 
3745
- def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
3746
- calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
3868
+ def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
3869
+ calc = GetFileChecksum(infile, checksumtype, True, formatspecs, saltkey)
3747
3870
  want = (inchecksum or "0").strip().lower()
3748
3871
  if want.startswith("0x"):
3749
3872
  want = want[2:]
3750
- return hmac.compare_digest(want, calc)
3873
+ return CheckChecksums(want, calc)
3874
+
3875
+ def CheckChecksums(inchecksum, outchecksum):
3876
+ # Normalize as text first
3877
+ calc = (inchecksum or "0").strip().lower()
3878
+ want = (outchecksum or "0").strip().lower()
3879
+
3880
+ if want.startswith("0x"):
3881
+ want = want[2:]
3751
3882
 
3883
+ # Now force both to bytes
3884
+ calc_b = _to_bytes(calc) # defaults to utf-8, strict
3885
+ want_b = _to_bytes(want)
3886
+
3887
+ return hmac.compare_digest(want_b, calc_b)
3752
3888
 
3753
3889
  def MajorMinorToDev(major, minor):
3754
3890
  """
@@ -4117,11 +4253,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
4117
4253
  return first_two + headerdata
4118
4254
 
4119
4255
 
4120
- def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4256
+ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4121
4257
  if(not hasattr(fp, "read")):
4122
4258
  return False
4123
4259
  delimiter = formatspecs['format_delimiter']
4124
- if(formatspecs['new_style']):
4260
+ if(__use_new_style__):
4125
4261
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4126
4262
  else:
4127
4263
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4209,15 +4345,14 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4209
4345
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4210
4346
  pass
4211
4347
  fp.seek(len(delimiter), 1)
4212
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4213
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4348
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4349
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4214
4350
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4215
4351
  fname + " at offset " + str(fheaderstart))
4216
4352
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4217
4353
  return False
4218
4354
  fp.seek(len(delimiter), 1)
4219
- newfcs = GetHeaderChecksum(
4220
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4355
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4221
4356
  HeaderOut.append(fjsoncontent)
4222
4357
  if(fcs != newfcs and not skipchecksum):
4223
4358
  VerbosePrintOut("File Header Checksum Error with file " +
@@ -4236,10 +4371,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4236
4371
  else:
4237
4372
  fp.seek(fcsize, 1)
4238
4373
  fcontents.seek(0, 0)
4239
- newfccs = GetFileChecksum(
4240
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4374
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4241
4375
  fcontents.seek(0, 0)
4242
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4376
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4243
4377
  VerbosePrintOut("File Content Checksum Error with file " +
4244
4378
  fname + " at offset " + str(fcontentstart))
4245
4379
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4276,12 +4410,12 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4276
4410
  return HeaderOut
4277
4411
 
4278
4412
 
4279
- def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4413
+ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4280
4414
  if(not hasattr(fp, "read")):
4281
4415
  return False
4282
4416
  delimiter = formatspecs['format_delimiter']
4283
4417
  fheaderstart = fp.tell()
4284
- if(formatspecs['new_style']):
4418
+ if(__use_new_style__):
4285
4419
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4286
4420
  else:
4287
4421
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4299,40 +4433,51 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4299
4433
  fbasedir = os.path.dirname(fname)
4300
4434
  flinkname = HeaderOut[6]
4301
4435
  fsize = int(HeaderOut[7], 16)
4302
- fatime = int(HeaderOut[8], 16)
4303
- fmtime = int(HeaderOut[9], 16)
4304
- fctime = int(HeaderOut[10], 16)
4305
- fbtime = int(HeaderOut[11], 16)
4306
- fmode = int(HeaderOut[12], 16)
4436
+ fblksize = int(HeaderOut[8], 16)
4437
+ fblocks = int(HeaderOut[9], 16)
4438
+ fflags = int(HeaderOut[10], 16)
4439
+ fatime = int(HeaderOut[11], 16)
4440
+ fmtime = int(HeaderOut[12], 16)
4441
+ fctime = int(HeaderOut[13], 16)
4442
+ fbtime = int(HeaderOut[14], 16)
4443
+ fmode = int(HeaderOut[15], 16)
4307
4444
  fchmode = stat.S_IMODE(fmode)
4308
4445
  ftypemod = stat.S_IFMT(fmode)
4309
- fwinattributes = int(HeaderOut[13], 16)
4310
- fcompression = HeaderOut[14]
4311
- fcsize = int(HeaderOut[15], 16)
4312
- fuid = int(HeaderOut[16], 16)
4313
- funame = HeaderOut[17]
4314
- fgid = int(HeaderOut[18], 16)
4315
- fgname = HeaderOut[19]
4316
- fid = int(HeaderOut[20], 16)
4317
- finode = int(HeaderOut[21], 16)
4318
- flinkcount = int(HeaderOut[22], 16)
4319
- fdev = int(HeaderOut[23], 16)
4320
- fdev_minor = int(HeaderOut[24], 16)
4321
- fdev_major = int(HeaderOut[25], 16)
4322
- fseeknextfile = HeaderOut[26]
4323
- fjsontype = HeaderOut[27]
4324
- fjsonlen = int(HeaderOut[28], 16)
4325
- fjsonsize = int(HeaderOut[29], 16)
4326
- fjsonchecksumtype = HeaderOut[30]
4327
- fjsonchecksum = HeaderOut[31]
4328
- fextrasize = int(HeaderOut[32], 16)
4329
- fextrafields = int(HeaderOut[33], 16)
4446
+ fwinattributes = int(HeaderOut[16], 16)
4447
+ fcompression = HeaderOut[17]
4448
+ fcsize = int(HeaderOut[18], 16)
4449
+ fuid = int(HeaderOut[19], 16)
4450
+ funame = HeaderOut[20]
4451
+ fgid = int(HeaderOut[21], 16)
4452
+ fgname = HeaderOut[22]
4453
+ fid = int(HeaderOut[23], 16)
4454
+ finode = int(HeaderOut[24], 16)
4455
+ flinkcount = int(HeaderOut[25], 16)
4456
+ fdev = int(HeaderOut[26], 16)
4457
+ frdev = int(HeaderOut[27], 16)
4458
+ fseeknextfile = HeaderOut[28]
4459
+ fjsontype = HeaderOut[29]
4460
+ fjsonlen = int(HeaderOut[30], 16)
4461
+ fjsonsize = int(HeaderOut[31], 16)
4462
+ fjsonchecksumtype = HeaderOut[32]
4463
+ fjsonchecksum = HeaderOut[33]
4464
+ fextrasize = int(HeaderOut[34], 16)
4465
+ fextrafields = int(HeaderOut[35], 16)
4330
4466
  fextrafieldslist = []
4331
- extrastart = 34
4467
+ extrastart = 36
4332
4468
  extraend = extrastart + fextrafields
4333
4469
  while(extrastart < extraend):
4334
4470
  fextrafieldslist.append(HeaderOut[extrastart])
4335
4471
  extrastart = extrastart + 1
4472
+ fvendorfieldslist = []
4473
+ fvendorfields = 0;
4474
+ if(len(HeaderOut)>extraend):
4475
+ extrastart = extraend
4476
+ extraend = len(HeaderOut) - 4
4477
+ while(extrastart < extraend):
4478
+ fvendorfieldslist.append(HeaderOut[extrastart])
4479
+ extrastart = extrastart + 1
4480
+ fvendorfields = fvendorfields + 1
4336
4481
  if(fextrafields==1):
4337
4482
  try:
4338
4483
  fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
@@ -4410,16 +4555,15 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4410
4555
  pass
4411
4556
  fp.seek(len(delimiter), 1)
4412
4557
  fjend = fp.tell() - 1
4413
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4414
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4558
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4559
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4415
4560
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4416
4561
  fname + " at offset " + str(fheaderstart))
4417
4562
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4418
4563
  return False
4419
4564
  fcs = HeaderOut[-2].lower()
4420
4565
  fccs = HeaderOut[-1].lower()
4421
- newfcs = GetHeaderChecksum(
4422
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4566
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4423
4567
  if(fcs != newfcs and not skipchecksum):
4424
4568
  VerbosePrintOut("File Header Checksum Error with file " +
4425
4569
  fname + " at offset " + str(fheaderstart))
@@ -4442,10 +4586,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4442
4586
  fp.seek(fcsize, 1)
4443
4587
  pyhascontents = False
4444
4588
  fcontents.seek(0, 0)
4445
- newfccs = GetFileChecksum(
4446
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4589
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4447
4590
  fcontents.seek(0, 0)
4448
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4591
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4449
4592
  VerbosePrintOut("File Content Checksum Error with file " +
4450
4593
  fname + " at offset " + str(fcontentstart))
4451
4594
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4462,8 +4605,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4462
4605
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4463
4606
  cfcontents.close()
4464
4607
  fcontents.seek(0, 0)
4465
- fccs = GetFileChecksum(
4466
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4608
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4467
4609
  fcontentend = fp.tell()
4468
4610
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4469
4611
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4485,17 +4627,17 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4485
4627
  fcontents.seek(0, 0)
4486
4628
  if(not contentasfile):
4487
4629
  fcontents = fcontents.read()
4488
- outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
4489
- 'fdev': fdev, 'fminor': fdev_minor, 'fmajor': fdev_major, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
4630
+ outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fblksize': fblksize, 'fblocks': fblocks, 'fflags': fflags, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
4631
+ 'fdev': fdev, 'frdev': frdev, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
4490
4632
  return outlist
4491
4633
 
4492
4634
 
4493
- def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4635
+ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4494
4636
  if(not hasattr(fp, "read")):
4495
4637
  return False
4496
4638
  delimiter = formatspecs['format_delimiter']
4497
4639
  fheaderstart = fp.tell()
4498
- if(formatspecs['new_style']):
4640
+ if(__use_new_style__):
4499
4641
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4500
4642
  else:
4501
4643
  HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
@@ -4513,36 +4655,38 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4513
4655
  fbasedir = os.path.dirname(fname)
4514
4656
  flinkname = HeaderOut[6]
4515
4657
  fsize = int(HeaderOut[7], 16)
4516
- fatime = int(HeaderOut[8], 16)
4517
- fmtime = int(HeaderOut[9], 16)
4518
- fctime = int(HeaderOut[10], 16)
4519
- fbtime = int(HeaderOut[11], 16)
4520
- fmode = int(HeaderOut[12], 16)
4658
+ fblksize = int(HeaderOut[8], 16)
4659
+ fblocks = int(HeaderOut[9], 16)
4660
+ fflags = int(HeaderOut[10], 16)
4661
+ fatime = int(HeaderOut[11], 16)
4662
+ fmtime = int(HeaderOut[12], 16)
4663
+ fctime = int(HeaderOut[13], 16)
4664
+ fbtime = int(HeaderOut[14], 16)
4665
+ fmode = int(HeaderOut[15], 16)
4521
4666
  fchmode = stat.S_IMODE(fmode)
4522
4667
  ftypemod = stat.S_IFMT(fmode)
4523
- fwinattributes = int(HeaderOut[13], 16)
4524
- fcompression = HeaderOut[14]
4525
- fcsize = int(HeaderOut[15], 16)
4526
- fuid = int(HeaderOut[16], 16)
4527
- funame = HeaderOut[17]
4528
- fgid = int(HeaderOut[18], 16)
4529
- fgname = HeaderOut[19]
4530
- fid = int(HeaderOut[20], 16)
4531
- finode = int(HeaderOut[21], 16)
4532
- flinkcount = int(HeaderOut[22], 16)
4533
- fdev = int(HeaderOut[23], 16)
4534
- fdev_minor = int(HeaderOut[24], 16)
4535
- fdev_major = int(HeaderOut[25], 16)
4536
- fseeknextfile = HeaderOut[26]
4537
- fjsontype = HeaderOut[27]
4538
- fjsonlen = int(HeaderOut[28], 16)
4539
- fjsonsize = int(HeaderOut[29], 16)
4540
- fjsonchecksumtype = HeaderOut[30]
4541
- fjsonchecksum = HeaderOut[31]
4542
- fextrasize = int(HeaderOut[32], 16)
4543
- fextrafields = int(HeaderOut[33], 16)
4668
+ fwinattributes = int(HeaderOut[16], 16)
4669
+ fcompression = HeaderOut[17]
4670
+ fcsize = int(HeaderOut[18], 16)
4671
+ fuid = int(HeaderOut[19], 16)
4672
+ funame = HeaderOut[20]
4673
+ fgid = int(HeaderOut[21], 16)
4674
+ fgname = HeaderOut[22]
4675
+ fid = int(HeaderOut[23], 16)
4676
+ finode = int(HeaderOut[24], 16)
4677
+ flinkcount = int(HeaderOut[25], 16)
4678
+ fdev = int(HeaderOut[26], 16)
4679
+ frdev = int(HeaderOut[27], 16)
4680
+ fseeknextfile = HeaderOut[28]
4681
+ fjsontype = HeaderOut[29]
4682
+ fjsonlen = int(HeaderOut[30], 16)
4683
+ fjsonsize = int(HeaderOut[31], 16)
4684
+ fjsonchecksumtype = HeaderOut[32]
4685
+ fjsonchecksum = HeaderOut[33]
4686
+ fextrasize = int(HeaderOut[34], 16)
4687
+ fextrafields = int(HeaderOut[35], 16)
4544
4688
  fextrafieldslist = []
4545
- extrastart = 34
4689
+ extrastart = 36
4546
4690
  extraend = extrastart + fextrafields
4547
4691
  while(extrastart < extraend):
4548
4692
  fextrafieldslist.append(HeaderOut[extrastart])
@@ -4622,16 +4766,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4622
4766
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4623
4767
  pass
4624
4768
  fp.seek(len(delimiter), 1)
4625
- jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4626
- if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4769
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
4770
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
4627
4771
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4628
4772
  fname + " at offset " + str(fheaderstart))
4629
4773
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
4630
4774
  return False
4631
4775
  fcs = HeaderOut[-2].lower()
4632
4776
  fccs = HeaderOut[-1].lower()
4633
- newfcs = GetHeaderChecksum(
4634
- HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
4777
+ newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
4635
4778
  if(fcs != newfcs and not skipchecksum):
4636
4779
  VerbosePrintOut("File Header Checksum Error with file " +
4637
4780
  fname + " at offset " + str(fheaderstart))
@@ -4654,9 +4797,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4654
4797
  fp.seek(fcsize, 1)
4655
4798
  pyhascontents = False
4656
4799
  fcontents.seek(0, 0)
4657
- newfccs = GetFileChecksum(
4658
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4659
- if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4800
+ newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4801
+ if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
4660
4802
  VerbosePrintOut("File Content Checksum Error with file " +
4661
4803
  fname + " at offset " + str(fcontentstart))
4662
4804
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4673,8 +4815,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4673
4815
  shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4674
4816
  cfcontents.close()
4675
4817
  fcontents.seek(0, 0)
4676
- fccs = GetFileChecksum(
4677
- fcontents, HeaderOut[-3].lower(), False, formatspecs)
4818
+ fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
4678
4819
  fcontentend = fp.tell()
4679
4820
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4680
4821
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -4696,12 +4837,12 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
4696
4837
  fcontents.seek(0, 0)
4697
4838
  if(not contentasfile):
4698
4839
  fcontents = fcontents.read()
4699
- outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4700
- finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
4840
+ outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
4841
+ finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
4701
4842
  return outlist
4702
4843
 
4703
4844
 
4704
- def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
4845
+ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
4705
4846
  if(not hasattr(fp, "read")):
4706
4847
  return False
4707
4848
  delimiter = formatspecs['format_delimiter']
@@ -4721,7 +4862,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4721
4862
  return False
4722
4863
  if(formdel != formatspecs['format_delimiter']):
4723
4864
  return False
4724
- if(formatspecs['new_style']):
4865
+ if(__use_new_style__):
4725
4866
  inheader = ReadFileHeaderDataBySize(
4726
4867
  fp, formatspecs['format_delimiter'])
4727
4868
  else:
@@ -4729,20 +4870,42 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4729
4870
  fp, formatspecs['format_delimiter'])
4730
4871
  fprechecksumtype = inheader[-2]
4731
4872
  fprechecksum = inheader[-1]
4732
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4733
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
4873
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
4874
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4734
4875
  if(not headercheck and not skipchecksum):
4735
4876
  VerbosePrintOut(
4736
4877
  "File Header Checksum Error with file at offset " + str(0))
4737
4878
  VerbosePrintOut("'" + fprechecksum + "' != " +
4738
4879
  "'" + newfcs + "'")
4739
4880
  return False
4740
- fnumfiles = int(inheader[4], 16)
4881
+ fnumfiles = int(inheader[8], 16)
4882
+ outfseeknextfile = inheaderdata[9]
4883
+ fjsonsize = int(inheaderdata[12], 16)
4884
+ fjsonchecksumtype = inheader[13]
4885
+ fjsonchecksum = inheader[14]
4886
+ fp.read(fjsonsize)
4887
+ # Next seek directive
4888
+ if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
4889
+ fseeknextasnum = int(outfseeknextfile.replace("+", ""))
4890
+ if(abs(fseeknextasnum) == 0):
4891
+ pass
4892
+ fp.seek(fseeknextasnum, 1)
4893
+ elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
4894
+ fseeknextasnum = int(outfseeknextfile)
4895
+ if(abs(fseeknextasnum) == 0):
4896
+ pass
4897
+ fp.seek(fseeknextasnum, 1)
4898
+ elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
4899
+ fseeknextasnum = int(outfseeknextfile)
4900
+ if(abs(fseeknextasnum) == 0):
4901
+ pass
4902
+ fp.seek(fseeknextasnum, 0)
4903
+ else:
4904
+ return False
4741
4905
  countnum = 0
4742
4906
  flist = []
4743
4907
  while(countnum < fnumfiles):
4744
- HeaderOut = ReadFileHeaderDataWithContent(
4745
- fp, listonly, uncompress, skipchecksum, formatspecs)
4908
+ HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
4746
4909
  if(len(HeaderOut) == 0):
4747
4910
  break
4748
4911
  flist.append(HeaderOut)
@@ -4750,7 +4913,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
4750
4913
  return flist
4751
4914
 
4752
4915
 
4753
- def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
4916
+ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
4754
4917
  if(not hasattr(fp, "read")):
4755
4918
  return False
4756
4919
  delimiter = formatspecs['format_delimiter']
@@ -4770,16 +4933,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4770
4933
  return False
4771
4934
  if(formdel != formatspecs['format_delimiter']):
4772
4935
  return False
4773
- if(formatspecs['new_style']):
4936
+ if(__use_new_style__):
4774
4937
  inheader = ReadFileHeaderDataBySize(
4775
4938
  fp, formatspecs['format_delimiter'])
4776
4939
  else:
4777
4940
  inheader = ReadFileHeaderDataWoSize(
4778
4941
  fp, formatspecs['format_delimiter'])
4779
- fnumextrafieldsize = int(inheader[6], 16)
4780
- fnumextrafields = int(inheader[7], 16)
4942
+ fnumextrafieldsize = int(inheader[15], 16)
4943
+ fnumextrafields = int(inheader[16], 16)
4781
4944
  fextrafieldslist = []
4782
- extrastart = 8
4945
+ extrastart = 17
4783
4946
  extraend = extrastart + fnumextrafields
4784
4947
  while(extrastart < extraend):
4785
4948
  fextrafieldslist.append(inheader[extrastart])
@@ -4793,17 +4956,126 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4793
4956
  fextrafieldslist = json.loads(fextrafieldslist[0])
4794
4957
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4795
4958
  pass
4959
+ fvendorfieldslist = []
4960
+ fvendorfields = 0;
4961
+ if(len(inheader)>extraend):
4962
+ extrastart = extraend
4963
+ extraend = len(inheader) - 2
4964
+ while(extrastart < extraend):
4965
+ fvendorfieldslist.append(HeaderOut[extrastart])
4966
+ extrastart = extrastart + 1
4967
+ fvendorfields = fvendorfields + 1
4796
4968
  formversion = re.findall("([\\d]+)", formstring)
4797
4969
  fheadsize = int(inheader[0], 16)
4798
4970
  fnumfields = int(inheader[1], 16)
4799
- fhencoding = inheader[2]
4800
- fostype = inheader[3]
4801
- fpythontype = inheader[4]
4802
- fnumfiles = int(inheader[5], 16)
4971
+ fheadctime = int(inheader[1], 16)
4972
+ fheadmtime = int(inheader[1], 16)
4973
+ fhencoding = inheader[4]
4974
+ fostype = inheader[5]
4975
+ fpythontype = inheader[6]
4976
+ fprojectname = inheader[7]
4977
+ fnumfiles = int(inheader[8], 16)
4978
+ fseeknextfile = inheader[9]
4979
+ fjsontype = inheader[10]
4980
+ fjsonlen = int(inheader[11], 16)
4981
+ fjsonsize = int(inheader[12], 16)
4982
+ fjsonchecksumtype = inheader[13]
4983
+ fjsonchecksum = inheader[14]
4984
+ fjsoncontent = {}
4985
+ fjstart = fp.tell()
4986
+ if(fjsontype=="json"):
4987
+ fjsoncontent = {}
4988
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4989
+ if(fjsonsize > 0):
4990
+ try:
4991
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4992
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4993
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4994
+ try:
4995
+ fjsonrawcontent = fprejsoncontent
4996
+ fjsoncontent = json.loads(fprejsoncontent)
4997
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4998
+ fprejsoncontent = ""
4999
+ fjsonrawcontent = fprejsoncontent
5000
+ fjsoncontent = {}
5001
+ else:
5002
+ fprejsoncontent = ""
5003
+ fjsonrawcontent = fprejsoncontent
5004
+ fjsoncontent = {}
5005
+ elif(testyaml and fjsontype == "yaml"):
5006
+ fjsoncontent = {}
5007
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5008
+ if (fjsonsize > 0):
5009
+ try:
5010
+ # try base64 → utf-8 → YAML
5011
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
5012
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5013
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
5014
+ try:
5015
+ # fall back to treating the bytes as plain text YAML
5016
+ fjsonrawcontent = fprejsoncontent
5017
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
5018
+ except (UnicodeDecodeError, yaml.YAMLError):
5019
+ # final fallback: empty
5020
+ fprejsoncontent = ""
5021
+ fjsonrawcontent = fprejsoncontent
5022
+ fjsoncontent = {}
5023
+ else:
5024
+ fprejsoncontent = ""
5025
+ fjsonrawcontent = fprejsoncontent
5026
+ fjsoncontent = {}
5027
+ elif(not testyaml and fjsontype == "yaml"):
5028
+ fjsoncontent = {}
5029
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5030
+ fprejsoncontent = ""
5031
+ fjsonrawcontent = fprejsoncontent
5032
+ elif(fjsontype=="list"):
5033
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5034
+ flisttmp = MkTempFile()
5035
+ flisttmp.write(fprejsoncontent.encode())
5036
+ flisttmp.seek(0)
5037
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
5038
+ flisttmp.close()
5039
+ fjsonrawcontent = fjsoncontent
5040
+ if(fjsonlen==1):
5041
+ try:
5042
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
5043
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
5044
+ fjsonlen = len(fjsoncontent)
5045
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5046
+ try:
5047
+ fjsonrawcontent = fjsoncontent[0]
5048
+ fjsoncontent = json.loads(fjsoncontent[0])
5049
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5050
+ pass
5051
+ fjend = fp.tell()
5052
+ if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5053
+ fseeknextasnum = int(fseeknextfile.replace("+", ""))
5054
+ if(abs(fseeknextasnum) == 0):
5055
+ pass
5056
+ fp.seek(fseeknextasnum, 1)
5057
+ elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
5058
+ fseeknextasnum = int(fseeknextfile)
5059
+ if(abs(fseeknextasnum) == 0):
5060
+ pass
5061
+ fp.seek(fseeknextasnum, 1)
5062
+ elif(re.findall("^([0-9]+)", fseeknextfile)):
5063
+ fseeknextasnum = int(fseeknextfile)
5064
+ if(abs(fseeknextasnum) == 0):
5065
+ pass
5066
+ fp.seek(fseeknextasnum, 0)
5067
+ else:
5068
+ return False
5069
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5070
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
5071
+ VerbosePrintOut("File JSON Data Checksum Error with file " +
5072
+ fname + " at offset " + str(fheaderstart))
5073
+ VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
5074
+ return False
4803
5075
  fprechecksumtype = inheader[-2]
4804
5076
  fprechecksum = inheader[-1]
4805
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4806
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5077
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5078
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4807
5079
  if(not headercheck and not skipchecksum):
4808
5080
  VerbosePrintOut(
4809
5081
  "File Header Checksum Error with file at offset " + str(0))
@@ -4812,7 +5084,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4812
5084
  return False
4813
5085
  formversions = re.search('(.*?)(\\d+)', formstring).groups()
4814
5086
  fcompresstype = ""
4815
- outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
5087
+ outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fmtime': fheadmtime, 'fctime': fheadctime, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'ffilelist': []}
4816
5088
  if (seekstart < 0) or (seekstart > fnumfiles):
4817
5089
  seekstart = 0
4818
5090
  if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
@@ -4839,16 +5111,15 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4839
5111
  prefjsonchecksum = preheaderdata[31]
4840
5112
  prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
4841
5113
  fp.seek(len(delimiter), 1)
4842
- prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
4843
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5114
+ prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5115
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
4844
5116
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4845
5117
  prefname + " at offset " + str(prefhstart))
4846
5118
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
4847
5119
  return False
4848
- prenewfcs = GetHeaderChecksum(
4849
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5120
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
4850
5121
  prefcs = preheaderdata[-2]
4851
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5122
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
4852
5123
  VerbosePrintOut("File Header Checksum Error with file " +
4853
5124
  prefname + " at offset " + str(prefhstart))
4854
5125
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -4863,11 +5134,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4863
5134
  if(prefsize > 0):
4864
5135
  prefcontents.write(fp.read(prefsize))
4865
5136
  prefcontents.seek(0, 0)
4866
- prenewfccs = GetFileChecksum(
4867
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5137
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
4868
5138
  prefccs = preheaderdata[-1]
4869
5139
  pyhascontents = True
4870
- if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5140
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
4871
5141
  VerbosePrintOut("File Content Checksum Error with file " +
4872
5142
  prefname + " at offset " + str(prefcontentstart))
4873
5143
  VerbosePrintOut("'" + prefccs +
@@ -4894,8 +5164,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4894
5164
  realidnum = 0
4895
5165
  countnum = seekstart
4896
5166
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
4897
- HeaderOut = ReadFileHeaderDataWithContentToArray(
4898
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5167
+ HeaderOut = ReadFileHeaderDataWithContentToArray(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
4899
5168
  if(len(HeaderOut) == 0):
4900
5169
  break
4901
5170
  HeaderOut.update({'fid': realidnum, 'fidalt': realidnum})
@@ -4906,7 +5175,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
4906
5175
  return outlist
4907
5176
 
4908
5177
 
4909
- def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
5178
+ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
4910
5179
  if(not hasattr(fp, "read")):
4911
5180
  return False
4912
5181
  delimiter = formatspecs['format_delimiter']
@@ -4926,16 +5195,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4926
5195
  return False
4927
5196
  if(formdel != formatspecs['format_delimiter']):
4928
5197
  return False
4929
- if(formatspecs['new_style']):
5198
+ if(__use_new_style__):
4930
5199
  inheader = ReadFileHeaderDataBySize(
4931
5200
  fp, formatspecs['format_delimiter'])
4932
5201
  else:
4933
5202
  inheader = ReadFileHeaderDataWoSize(
4934
5203
  fp, formatspecs['format_delimiter'])
4935
- fnumextrafieldsize = int(inheader[6], 16)
4936
- fnumextrafields = int(inheader[7], 16)
5204
+ fnumextrafieldsize = int(inheader[15], 16)
5205
+ fnumextrafields = int(inheader[16], 16)
4937
5206
  fextrafieldslist = []
4938
- extrastart = 8
5207
+ extrastart = 17
4939
5208
  extraend = extrastart + fnumextrafields
4940
5209
  while(extrastart < extraend):
4941
5210
  fextrafieldslist.append(inheader[extrastart])
@@ -4952,14 +5221,44 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4952
5221
  formversion = re.findall("([\\d]+)", formstring)
4953
5222
  fheadsize = int(inheader[0], 16)
4954
5223
  fnumfields = int(inheader[1], 16)
4955
- fhencoding = inheader[2]
4956
- fostype = inheader[3]
4957
- fpythontype = inheader[4]
4958
- fnumfiles = int(inheader[5], 16)
5224
+ fnumfiles = int(inheader[8], 16)
5225
+ fseeknextfile = inheaderdata[9]
5226
+ fjsontype = int(inheader[10], 16)
5227
+ fjsonlen = int(inheader[11], 16)
5228
+ fjsonsize = int(inheader[12], 16)
5229
+ fjsonchecksumtype = inheader[13]
5230
+ fjsonchecksum = inheader[14]
5231
+ fjsoncontent = {}
5232
+ fjstart = fp.tell()
5233
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5234
+ fjend = fp.tell()
5235
+ if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5236
+ fseeknextasnum = int(fseeknextfile.replace("+", ""))
5237
+ if(abs(fseeknextasnum) == 0):
5238
+ pass
5239
+ fp.seek(fseeknextasnum, 1)
5240
+ elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
5241
+ fseeknextasnum = int(fseeknextfile)
5242
+ if(abs(fseeknextasnum) == 0):
5243
+ pass
5244
+ fp.seek(fseeknextasnum, 1)
5245
+ elif(re.findall("^([0-9]+)", fseeknextfile)):
5246
+ fseeknextasnum = int(fseeknextfile)
5247
+ if(abs(fseeknextasnum) == 0):
5248
+ pass
5249
+ fp.seek(fseeknextasnum, 0)
5250
+ else:
5251
+ return False
5252
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
5253
+ if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
5254
+ VerbosePrintOut("File JSON Data Checksum Error with file " +
5255
+ fname + " at offset " + str(fheaderstart))
5256
+ VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
5257
+ return False
4959
5258
  fprechecksumtype = inheader[-2]
4960
5259
  fprechecksum = inheader[-1]
4961
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
4962
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
5260
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
5261
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
4963
5262
  if(not headercheck and not skipchecksum):
4964
5263
  VerbosePrintOut(
4965
5264
  "File Header Checksum Error with file at offset " + str(0))
@@ -4978,7 +5277,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
4978
5277
  il = 0
4979
5278
  while(il < seekstart):
4980
5279
  prefhstart = fp.tell()
4981
- if(formatspecs['new_style']):
5280
+ if(__use_new_style__):
4982
5281
  preheaderdata = ReadFileHeaderDataBySize(
4983
5282
  fp, formatspecs['format_delimiter'])
4984
5283
  else:
@@ -5000,16 +5299,15 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5000
5299
  prefjsonchecksum = preheaderdata[31]
5001
5300
  prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
5002
5301
  fp.seek(len(delimiter), 1)
5003
- prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
5004
- if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5302
+ prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
5303
+ if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5005
5304
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5006
5305
  prefname + " at offset " + str(prefhstart))
5007
5306
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
5008
5307
  return False
5009
- prenewfcs = GetHeaderChecksum(
5010
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5308
+ prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
5011
5309
  prefcs = preheaderdata[-2]
5012
- if(prefcs != prenewfcs and not skipchecksum):
5310
+ if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
5013
5311
  VerbosePrintOut("File Header Checksum Error with file " +
5014
5312
  prefname + " at offset " + str(prefhstart))
5015
5313
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -5026,11 +5324,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5026
5324
  prefcontents = fp.read(prefsize)
5027
5325
  else:
5028
5326
  prefcontents = fp.read(prefcsize)
5029
- prenewfccs = GetFileChecksum(
5030
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5327
+ prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
5031
5328
  prefccs = preheaderdata[-1]
5032
5329
  pyhascontents = True
5033
- if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5330
+ if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
5034
5331
  VerbosePrintOut("File Content Checksum Error with file " +
5035
5332
  prefname + " at offset " + str(prefcontentstart))
5036
5333
  VerbosePrintOut("'" + prefccs +
@@ -5057,8 +5354,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5057
5354
  realidnum = 0
5058
5355
  countnum = seekstart
5059
5356
  while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
5060
- HeaderOut = ReadFileHeaderDataWithContentToList(
5061
- fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
5357
+ HeaderOut = ReadFileHeaderDataWithContentToList(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
5062
5358
  if(len(HeaderOut) == 0):
5063
5359
  break
5064
5360
  outlist.append(HeaderOut)
@@ -5066,7 +5362,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5066
5362
  realidnum = realidnum + 1
5067
5363
  return outlist
5068
5364
 
5069
- def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5365
+ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5070
5366
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5071
5367
  fp = infile
5072
5368
  try:
@@ -5161,7 +5457,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5161
5457
  else:
5162
5458
  break
5163
5459
  readfp.seek(oldfppos, 0)
5164
- ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5460
+ ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5165
5461
  currentfilepos = readfp.tell()
5166
5462
  else:
5167
5463
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5183,27 +5479,27 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5183
5479
  else:
5184
5480
  break
5185
5481
  infp.seek(oldinfppos, 0)
5186
- ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5482
+ ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5187
5483
  currentinfilepos = infp.tell()
5188
5484
  currentfilepos = readfp.tell()
5189
5485
  return ArchiveList
5190
5486
 
5191
5487
 
5192
- def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5488
+ def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5193
5489
  if(isinstance(infile, (list, tuple, ))):
5194
5490
  pass
5195
5491
  else:
5196
5492
  infile = [infile]
5197
5493
  outretval = []
5198
5494
  for curfname in infile:
5199
- outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
5495
+ outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5200
5496
  return outretval
5201
5497
 
5202
- def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5203
- return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
5498
+ def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5499
+ return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5204
5500
 
5205
5501
 
5206
- def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5502
+ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5207
5503
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5208
5504
  fp = infile
5209
5505
  try:
@@ -5298,7 +5594,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5298
5594
  else:
5299
5595
  break
5300
5596
  readfp.seek(oldfppos, 0)
5301
- ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5597
+ ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5302
5598
  currentfilepos = readfp.tell()
5303
5599
  else:
5304
5600
  infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
@@ -5320,24 +5616,24 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5320
5616
  else:
5321
5617
  break
5322
5618
  infp.seek(oldinfppos, 0)
5323
- ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
5619
+ ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
5324
5620
  currentinfilepos = infp.tell()
5325
5621
  currentfilepos = readfp.tell()
5326
5622
  return ArchiveList
5327
5623
 
5328
5624
 
5329
- def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5625
+ def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5330
5626
  if(isinstance(infile, (list, tuple, ))):
5331
5627
  pass
5332
5628
  else:
5333
5629
  infile = [infile]
5334
5630
  outretval = {}
5335
5631
  for curfname in infile:
5336
- outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
5632
+ outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
5337
5633
  return outretval
5338
5634
 
5339
- def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
5340
- return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
5635
+ def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
5636
+ return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
5341
5637
 
5342
5638
 
5343
5639
  def _field_to_bytes(x):
@@ -5391,12 +5687,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
5391
5687
  def _hex_lower(n):
5392
5688
  return format(int(n), 'x').lower()
5393
5689
 
5394
- def AppendFileHeader(fp,
5395
- numfiles,
5396
- fencoding,
5397
- extradata=None,
5398
- checksumtype="md5",
5399
- formatspecs=__file_format_dict__):
5690
+ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5400
5691
  """
5401
5692
  Build and write the archive file header.
5402
5693
  Returns the same file-like 'fp' on success, or False on failure.
@@ -5444,24 +5735,47 @@ def AppendFileHeader(fp,
5444
5735
  # 4) core header fields before checksum:
5445
5736
  # tmpoutlenhex, fencoding, platform.system(), fnumfiles
5446
5737
  fnumfiles_hex = _hex_lower(numfiles)
5447
-
5738
+ fjsontype = "json"
5739
+ if(len(jsondata) > 0):
5740
+ try:
5741
+ fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
5742
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
5743
+ fjsoncontent = "".encode("UTF-8")
5744
+ else:
5745
+ fjsoncontent = "".encode("UTF-8")
5746
+ fjsonsize = format(len(fjsoncontent), 'x').lower()
5747
+ fjsonlen = format(len(jsondata), 'x').lower()
5748
+ tmpoutlist = []
5749
+ tmpoutlist.append(fjsontype)
5750
+ tmpoutlist.append(fjsonlen)
5751
+ tmpoutlist.append(fjsonsize)
5752
+ if(len(jsondata) > 0):
5753
+ tmpoutlist.append(checksumtype[1])
5754
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs, saltkey))
5755
+ else:
5756
+ tmpoutlist.append("none")
5757
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5448
5758
  # Preserve your original "tmpoutlen" computation exactly
5449
- tmpoutlist = [extrasizelen, extrafields] # you used this as a separate list
5450
- tmpoutlen = 4 + len(tmpoutlist) + len(xlist) + 2
5759
+ tmpoutlist.append(extrasizelen)
5760
+ tmpoutlist.append(extrafields)
5761
+ tmpoutlen = 10 + len(tmpoutlist) + len(xlist)
5451
5762
  tmpoutlenhex = _hex_lower(tmpoutlen)
5452
-
5763
+ if(hasattr(time, "time_ns")):
5764
+ fctime = format(int(time.time_ns()), 'x').lower()
5765
+ else:
5766
+ fctime = format(int(to_ns(time.time())), 'x').lower()
5453
5767
  # Serialize the first group
5454
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
5768
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
5455
5769
  # Append tmpoutlist
5456
5770
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5457
5771
  # Append extradata items if any
5458
5772
  if xlist:
5459
5773
  fnumfilesa += AppendNullBytes(xlist, delimiter)
5460
5774
  # Append checksum type
5461
- fnumfilesa += AppendNullByte(checksumtype, delimiter)
5775
+ fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
5462
5776
 
5463
5777
  # 5) inner checksum over fnumfilesa
5464
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
5778
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5465
5779
  tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
5466
5780
 
5467
5781
  # 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
@@ -5474,7 +5788,7 @@ def AppendFileHeader(fp,
5474
5788
  + fnumfilesa
5475
5789
  )
5476
5790
 
5477
- outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
5791
+ outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
5478
5792
  fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
5479
5793
 
5480
5794
  # 8) final total size field (again per your original logic)
@@ -5482,10 +5796,11 @@ def AppendFileHeader(fp,
5482
5796
  formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
5483
5797
  # Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
5484
5798
  # Keeping that behavior for compatibility.
5485
-
5799
+ nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
5800
+ outfileout = fnumfilesa + fjsoncontent + nullstrecd
5486
5801
  # 9) write and try to sync
5487
5802
  try:
5488
- fp.write(fnumfilesa)
5803
+ fp.write(outfileout)
5489
5804
  except (OSError, io.UnsupportedOperation):
5490
5805
  return False
5491
5806
 
@@ -5506,21 +5821,21 @@ def AppendFileHeader(fp,
5506
5821
  return fp
5507
5822
 
5508
5823
 
5509
- def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5824
+ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
5510
5825
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
5511
5826
  formatspecs = formatspecs[fmttype]
5512
5827
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
5513
5828
  fmttype = __file_format_default__
5514
5829
  formatspecs = formatspecs[fmttype]
5515
- AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
5830
+ AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs, saltkey)
5516
5831
  return fp
5517
5832
 
5518
5833
 
5519
- def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5520
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
5834
+ def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
5835
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5521
5836
 
5522
5837
 
5523
- def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
5838
+ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, returnfp=False):
5524
5839
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
5525
5840
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
5526
5841
  get_in_ext = os.path.splitext(outfile)
@@ -5550,6 +5865,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5550
5865
  fp = MkTempFile()
5551
5866
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
5552
5867
  fp = outfile
5868
+ return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
5553
5869
  elif(re.findall(__upload_proto_support__, outfile)):
5554
5870
  fp = MkTempFile()
5555
5871
  else:
@@ -5561,7 +5877,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5561
5877
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
5562
5878
  except PermissionError:
5563
5879
  return False
5564
- AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
5880
+ AppendFileHeader(fp, 0, "UTF-8", ['hello', 'goodbye'], {}, checksumtype, formatspecs, saltkey)
5565
5881
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
5566
5882
  fp = CompressOpenFileAlt(
5567
5883
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -5592,11 +5908,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
5592
5908
  return True
5593
5909
 
5594
5910
 
5595
- def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
5596
- return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
5911
+ def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, returnfp=False):
5912
+ return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, returnfp)
5597
5913
 
5598
5914
 
5599
- def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
5915
+ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
5600
5916
  if(not hasattr(fp, "write")):
5601
5917
  return False
5602
5918
  if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
@@ -5628,10 +5944,10 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5628
5944
  tmpoutlist.append(fjsonsize)
5629
5945
  if(len(jsondata) > 0):
5630
5946
  tmpoutlist.append(checksumtype[2])
5631
- tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs))
5947
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs, saltkey))
5632
5948
  else:
5633
5949
  tmpoutlist.append("none")
5634
- tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
5950
+ tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
5635
5951
  tmpoutlist.append(extrasizelen)
5636
5952
  tmpoutlist.append(extrafields)
5637
5953
  outfileoutstr = AppendNullBytes(
@@ -5646,22 +5962,18 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5646
5962
  outfileoutstr = outfileoutstr + \
5647
5963
  AppendNullBytes(checksumlist, formatspecs['format_delimiter'])
5648
5964
  nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
5649
- outfileheadercshex = GetFileChecksum(
5650
- outfileoutstr, checksumtype[0], True, formatspecs)
5965
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5651
5966
  if(len(filecontent) == 0):
5652
- outfilecontentcshex = GetFileChecksum(
5653
- filecontent, "none", False, formatspecs)
5967
+ outfilecontentcshex = GetFileChecksum(filecontent, "none", False, formatspecs, saltkey)
5654
5968
  else:
5655
- outfilecontentcshex = GetFileChecksum(
5656
- filecontent, checksumtype[1], False, formatspecs)
5969
+ outfilecontentcshex = GetFileChecksum(filecontent, checksumtype[1], False, formatspecs, saltkey)
5657
5970
  tmpfileoutstr = outfileoutstr + \
5658
5971
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5659
5972
  formatspecs['format_delimiter'])
5660
5973
  formheaersize = format(int(len(tmpfileoutstr) - len(formatspecs['format_delimiter'])), 'x').lower()
5661
5974
  outfileoutstr = AppendNullByte(
5662
5975
  formheaersize, formatspecs['format_delimiter']) + outfileoutstr
5663
- outfileheadercshex = GetFileChecksum(
5664
- outfileoutstr, checksumtype[0], True, formatspecs)
5976
+ outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
5665
5977
  outfileoutstr = outfileoutstr + \
5666
5978
  AppendNullBytes([outfileheadercshex, outfilecontentcshex],
5667
5979
  formatspecs['format_delimiter'])
@@ -5679,14 +5991,11 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
5679
5991
  pass
5680
5992
  return fp
5681
5993
 
5682
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
5994
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5683
5995
  if(not hasattr(fp, "write")):
5684
5996
  return False
5685
- advancedlist = formatspecs['use_advanced_list']
5686
- altinode = formatspecs['use_alt_inode']
5687
- if(verbose):
5688
- logging.basicConfig(format="%(message)s",
5689
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
5997
+ advancedlist = __use_advanced_list__
5998
+ altinode = __use_alt_inode__
5690
5999
  infilelist = []
5691
6000
  if(infiles == "-"):
5692
6001
  for line in PY_STDIN_TEXT:
@@ -5728,7 +6037,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5728
6037
  inodetoforminode = {}
5729
6038
  numfiles = int(len(GetDirList))
5730
6039
  fnumfiles = format(numfiles, 'x').lower()
5731
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6040
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
5732
6041
  try:
5733
6042
  fp.flush()
5734
6043
  if(hasattr(os, "sync")):
@@ -5757,14 +6066,24 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5757
6066
  FullSizeFilesAlt += fstatinfo.st_rsize
5758
6067
  except AttributeError:
5759
6068
  FullSizeFilesAlt += fstatinfo.st_size
6069
+ fblksize = 0
6070
+ if(hasattr(fstatinfo, "st_blksize")):
6071
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6072
+ fblocks = 0
6073
+ if(hasattr(fstatinfo, "st_blocks")):
6074
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6075
+ fflags = 0
6076
+ if(hasattr(fstatinfo, "st_flags")):
6077
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
5760
6078
  ftype = 0
5761
- if(hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
6079
+ if(not followlink and hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
5762
6080
  ftype = 13
5763
- elif(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
5764
- ftype = 12
5765
6081
  elif(stat.S_ISREG(fpremode)):
5766
- ftype = 0
5767
- elif(stat.S_ISLNK(fpremode)):
6082
+ if(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_size > 0 and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
6083
+ ftype = 12
6084
+ else:
6085
+ ftype = 0
6086
+ elif(not followlink and stat.S_ISLNK(fpremode)):
5768
6087
  ftype = 2
5769
6088
  elif(stat.S_ISCHR(fpremode)):
5770
6089
  ftype = 3
@@ -5786,43 +6105,42 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5786
6105
  ftype = 0
5787
6106
  flinkname = ""
5788
6107
  fcurfid = format(int(curfid), 'x').lower()
5789
- if not followlink and finode != 0:
6108
+ if(not followlink and finode != 0):
5790
6109
  unique_id = (fstatinfo.st_dev, finode)
5791
- if ftype != 1:
5792
- if unique_id in inodelist:
6110
+ if(ftype != 1):
6111
+ if(unique_id in inodetofile):
5793
6112
  # Hard link detected
5794
6113
  ftype = 1
5795
6114
  flinkname = inodetofile[unique_id]
5796
- if altinode:
5797
- fcurinode = format(int(unique_id[1]), 'x').lower()
5798
- else:
5799
- fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5800
6115
  else:
5801
- # New inode
5802
- inodelist.append(unique_id)
6116
+ # First time seeing this inode
5803
6117
  inodetofile[unique_id] = fname
6118
+ if(unique_id not in inodetoforminode):
5804
6119
  inodetoforminode[unique_id] = curinode
5805
- if altinode:
5806
- fcurinode = format(int(unique_id[1]), 'x').lower()
5807
- else:
5808
- fcurinode = format(int(curinode), 'x').lower()
5809
- curinode += 1
6120
+ curinode = curinode + 1
6121
+ if(altinode):
6122
+ # altinode == True → use real inode number
6123
+ fcurinode = format(int(unique_id[1]), 'x').lower()
6124
+ else:
6125
+ # altinode == False → use synthetic inode id
6126
+ fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
5810
6127
  else:
5811
6128
  # Handle cases where inodes are not supported or symlinks are followed
5812
6129
  fcurinode = format(int(curinode), 'x').lower()
5813
- curinode += 1
6130
+ curinode = curinode + 1
5814
6131
  curfid = curfid + 1
5815
6132
  if(ftype == 2):
5816
6133
  flinkname = os.readlink(fname)
5817
- if(not os.path.exists(flinkname)):
6134
+ if(not os.path.exists(fname)):
5818
6135
  return False
5819
6136
  try:
5820
6137
  fdev = fstatinfo.st_rdev
5821
6138
  except AttributeError:
5822
6139
  fdev = 0
5823
- getfdev = GetDevMajorMinor(fdev)
5824
- fdev_minor = getfdev[0]
5825
- fdev_major = getfdev[1]
6140
+ try:
6141
+ frdev = fstatinfo.st_rdev
6142
+ except AttributeError:
6143
+ frdev = 0
5826
6144
  # Types that should be considered zero-length in the archive context:
5827
6145
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
5828
6146
  # Types that have actual data to read:
@@ -5833,13 +6151,28 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5833
6151
  fsize = format(int(fstatinfo.st_size), 'x').lower()
5834
6152
  else:
5835
6153
  fsize = format(int(fstatinfo.st_size), 'x').lower()
5836
- fatime = format(int(fstatinfo.st_atime), 'x').lower()
5837
- fmtime = format(int(fstatinfo.st_mtime), 'x').lower()
5838
- fctime = format(int(fstatinfo.st_ctime), 'x').lower()
6154
+ if(hasattr(fstatinfo, "st_atime_ns")):
6155
+ fatime = format(int(fstatinfo.st_atime_ns), 'x').lower()
6156
+ else:
6157
+ fatime = format(int(to_ns(fstatinfo.st_atime)), 'x').lower()
6158
+ if(hasattr(fstatinfo, "st_mtime_ns")):
6159
+ fmtime = format(int(fstatinfo.st_mtime_ns), 'x').lower()
6160
+ else:
6161
+ fmtime = format(int(to_ns(fstatinfo.st_mtime)), 'x').lower()
6162
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6163
+ fctime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6164
+ else:
6165
+ fctime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
5839
6166
  if(hasattr(fstatinfo, "st_birthtime")):
5840
- fbtime = format(int(fstatinfo.st_birthtime), 'x').lower()
6167
+ if(hasattr(fstatinfo, "st_birthtime_ns")):
6168
+ fbtime = format(int(fstatinfo.st_birthtime_ns), 'x').lower()
6169
+ else:
6170
+ fbtime = format(int(to_ns(fstatinfo.st_birthtime)), 'x').lower()
5841
6171
  else:
5842
- fbtime = format(int(fstatinfo.st_ctime), 'x').lower()
6172
+ if(hasattr(fstatinfo, "st_ctime_ns")):
6173
+ fbtime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
6174
+ else:
6175
+ fbtime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
5843
6176
  fmode = format(int(fstatinfo.st_mode), 'x').lower()
5844
6177
  fchmode = format(int(stat.S_IMODE(fstatinfo.st_mode)), 'x').lower()
5845
6178
  ftypemod = format(int(stat.S_IFMT(fstatinfo.st_mode)), 'x').lower()
@@ -5866,8 +6199,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5866
6199
  except ImportError:
5867
6200
  fgname = ""
5868
6201
  fdev = format(int(fdev), 'x').lower()
5869
- fdev_minor = format(int(fdev_minor), 'x').lower()
5870
- fdev_major = format(int(fdev_major), 'x').lower()
6202
+ frdev = format(int(frdev), 'x').lower()
5871
6203
  finode = format(int(finode), 'x').lower()
5872
6204
  flinkcount = format(int(flinkcount), 'x').lower()
5873
6205
  if(hasattr(fstatinfo, "st_file_attributes")):
@@ -5928,10 +6260,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5928
6260
  fcompression = curcompression
5929
6261
  fcontents.close()
5930
6262
  fcontents = cfcontents
5931
- elif followlink and (ftype == 1 or ftype == 2):
5932
- if(not os.path.exists(flinkname)):
6263
+ elif followlink and (ftype == 2 or ftype in data_types):
6264
+ if(not os.path.exists(fname)):
5933
6265
  return False
5934
- flstatinfo = os.stat(flinkname)
5935
6266
  with open(flinkname, "rb") as fpc:
5936
6267
  shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
5937
6268
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
@@ -5982,10 +6313,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5982
6313
  fcompression = ""
5983
6314
  fcontents.seek(0, 0)
5984
6315
  ftypehex = format(ftype, 'x').lower()
5985
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
5986
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
5987
- AppendFileHeaderWithContent(
5988
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6316
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6317
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6318
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
5989
6319
  try:
5990
6320
  fp.flush()
5991
6321
  if(hasattr(os, "sync")):
@@ -5994,12 +6324,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
5994
6324
  pass
5995
6325
  return fp
5996
6326
 
5997
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6327
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
5998
6328
  if(not hasattr(fp, "write")):
5999
6329
  return False
6000
- if(verbose):
6001
- logging.basicConfig(format="%(message)s",
6002
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6003
6330
  curinode = 0
6004
6331
  curfid = 0
6005
6332
  inodelist = []
@@ -6063,7 +6390,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6063
6390
  except FileNotFoundError:
6064
6391
  return False
6065
6392
  numfiles = int(len(tarfp.getmembers()))
6066
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6393
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6067
6394
  try:
6068
6395
  fp.flush()
6069
6396
  if(hasattr(os, "sync")):
@@ -6081,6 +6408,15 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6081
6408
  fpremode = member.mode
6082
6409
  ffullmode = member.mode
6083
6410
  flinkcount = 0
6411
+ fblksize = 0
6412
+ if(hasattr(fstatinfo, "st_blksize")):
6413
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6414
+ fblocks = 0
6415
+ if(hasattr(fstatinfo, "st_blocks")):
6416
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6417
+ fflags = 0
6418
+ if(hasattr(fstatinfo, "st_flags")):
6419
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6084
6420
  ftype = 0
6085
6421
  if(member.isreg()):
6086
6422
  ffullmode = member.mode + stat.S_IFREG
@@ -6118,12 +6454,11 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6118
6454
  curfid = curfid + 1
6119
6455
  if(ftype == 2):
6120
6456
  flinkname = member.linkname
6457
+ fdev = format(int("0"), 'x').lower()
6121
6458
  try:
6122
- fdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6459
+ frdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
6123
6460
  except AttributeError:
6124
- fdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6125
- fdev_minor = format(int(member.devminor), 'x').lower()
6126
- fdev_major = format(int(member.devmajor), 'x').lower()
6461
+ frdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
6127
6462
  # Types that should be considered zero-length in the archive context:
6128
6463
  zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
6129
6464
  # Types that have actual data to read:
@@ -6134,10 +6469,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6134
6469
  fsize = format(int(member.size), 'x').lower()
6135
6470
  else:
6136
6471
  fsize = format(int(member.size), 'x').lower()
6137
- fatime = format(int(member.mtime), 'x').lower()
6138
- fmtime = format(int(member.mtime), 'x').lower()
6139
- fctime = format(int(member.mtime), 'x').lower()
6140
- fbtime = format(int(member.mtime), 'x').lower()
6472
+ fatime = format(int(to_ns(member.mtime)), 'x').lower()
6473
+ fmtime = format(int(to_ns(member.mtime)), 'x').lower()
6474
+ fctime = format(int(to_ns(member.mtime)), 'x').lower()
6475
+ fbtime = format(int(to_ns(member.mtime)), 'x').lower()
6141
6476
  fmode = format(int(ffullmode), 'x').lower()
6142
6477
  fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
6143
6478
  ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
@@ -6204,10 +6539,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6204
6539
  fcompression = ""
6205
6540
  fcontents.seek(0, 0)
6206
6541
  ftypehex = format(ftype, 'x').lower()
6207
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6208
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6209
- AppendFileHeaderWithContent(
6210
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6542
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6543
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6544
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6211
6545
  try:
6212
6546
  fp.flush()
6213
6547
  if(hasattr(os, "sync")):
@@ -6217,12 +6551,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6217
6551
  fcontents.close()
6218
6552
  return fp
6219
6553
 
6220
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6554
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6221
6555
  if(not hasattr(fp, "write")):
6222
6556
  return False
6223
- if(verbose):
6224
- logging.basicConfig(format="%(message)s",
6225
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6226
6557
  curinode = 0
6227
6558
  curfid = 0
6228
6559
  inodelist = []
@@ -6256,7 +6587,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6256
6587
  if(ziptest):
6257
6588
  VerbosePrintOut("Bad file found!")
6258
6589
  numfiles = int(len(zipfp.infolist()))
6259
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6590
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6260
6591
  try:
6261
6592
  fp.flush()
6262
6593
  if(hasattr(os, "sync")):
@@ -6277,6 +6608,15 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6277
6608
  else:
6278
6609
  fpremode = int(stat.S_IFREG | 0x1b6)
6279
6610
  flinkcount = 0
6611
+ fblksize = 0
6612
+ if(hasattr(fstatinfo, "st_blksize")):
6613
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6614
+ fblocks = 0
6615
+ if(hasattr(fstatinfo, "st_blocks")):
6616
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6617
+ fflags = 0
6618
+ if(hasattr(fstatinfo, "st_flags")):
6619
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6280
6620
  ftype = 0
6281
6621
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6282
6622
  ftype = 5
@@ -6287,8 +6627,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6287
6627
  fcurinode = format(int(curfid), 'x').lower()
6288
6628
  curfid = curfid + 1
6289
6629
  fdev = format(int(0), 'x').lower()
6290
- fdev_minor = format(int(0), 'x').lower()
6291
- fdev_major = format(int(0), 'x').lower()
6630
+ frdev = format(int(0), 'x').lower()
6292
6631
  if(ftype == 5):
6293
6632
  fsize = format(int("0"), 'x').lower()
6294
6633
  elif(ftype == 0):
@@ -6296,13 +6635,13 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6296
6635
  else:
6297
6636
  fsize = format(int(member.file_size), 'x').lower()
6298
6637
  fatime = format(
6299
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6638
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6300
6639
  fmtime = format(
6301
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6640
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6302
6641
  fctime = format(
6303
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6642
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6304
6643
  fbtime = format(
6305
- int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
6644
+ int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
6306
6645
  if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
6307
6646
  fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
6308
6647
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
@@ -6418,10 +6757,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6418
6757
  fcompression = ""
6419
6758
  fcontents.seek(0, 0)
6420
6759
  ftypehex = format(ftype, 'x').lower()
6421
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6422
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6423
- AppendFileHeaderWithContent(
6424
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6760
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6761
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6762
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6425
6763
  try:
6426
6764
  fp.flush()
6427
6765
  if(hasattr(os, "sync")):
@@ -6432,16 +6770,12 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6432
6770
  return fp
6433
6771
 
6434
6772
  if(not rarfile_support):
6435
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6773
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6436
6774
  return False
6437
-
6438
- if(rarfile_support):
6439
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6775
+ else:
6776
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6440
6777
  if(not hasattr(fp, "write")):
6441
6778
  return False
6442
- if(verbose):
6443
- logging.basicConfig(format="%(message)s",
6444
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6445
6779
  curinode = 0
6446
6780
  curfid = 0
6447
6781
  inodelist = []
@@ -6457,7 +6791,7 @@ if(rarfile_support):
6457
6791
  if(rartest):
6458
6792
  VerbosePrintOut("Bad file found!")
6459
6793
  numfiles = int(len(rarfp.infolist()))
6460
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6794
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6461
6795
  try:
6462
6796
  fp.flush()
6463
6797
  if(hasattr(os, "sync")):
@@ -6513,6 +6847,15 @@ if(rarfile_support):
6513
6847
  fcompression = ""
6514
6848
  fcsize = format(int(0), 'x').lower()
6515
6849
  flinkcount = 0
6850
+ fblksize = 0
6851
+ if(hasattr(fstatinfo, "st_blksize")):
6852
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
6853
+ fblocks = 0
6854
+ if(hasattr(fstatinfo, "st_blocks")):
6855
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
6856
+ fflags = 0
6857
+ if(hasattr(fstatinfo, "st_flags")):
6858
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6516
6859
  ftype = 0
6517
6860
  if(member.is_file()):
6518
6861
  ftype = 0
@@ -6527,8 +6870,7 @@ if(rarfile_support):
6527
6870
  fcurinode = format(int(curfid), 'x').lower()
6528
6871
  curfid = curfid + 1
6529
6872
  fdev = format(int(0), 'x').lower()
6530
- fdev_minor = format(int(0), 'x').lower()
6531
- fdev_major = format(int(0), 'x').lower()
6873
+ frdev = format(int(0), 'x').lower()
6532
6874
  if(ftype == 5):
6533
6875
  fsize = format(int("0"), 'x').lower()
6534
6876
  elif(ftype == 0):
@@ -6537,20 +6879,20 @@ if(rarfile_support):
6537
6879
  fsize = format(int(member.file_size), 'x').lower()
6538
6880
  try:
6539
6881
  if(member.atime):
6540
- fatime = format(int(member.atime.timestamp()), 'x').lower()
6882
+ fatime = format(int(to_ns(member.atime.timestamp())), 'x').lower()
6541
6883
  else:
6542
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
6884
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6543
6885
  except AttributeError:
6544
- fatime = format(int(member.mtime.timestamp()), 'x').lower()
6545
- fmtime = format(int(member.mtime.timestamp()), 'x').lower()
6886
+ fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6887
+ fmtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6546
6888
  try:
6547
6889
  if(member.ctime):
6548
- fctime = format(int(member.ctime.timestamp()), 'x').lower()
6890
+ fctime = format(int(to_ns(member.ctime.timestamp())), 'x').lower()
6549
6891
  else:
6550
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
6892
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6551
6893
  except AttributeError:
6552
- fctime = format(int(member.mtime.timestamp()), 'x').lower()
6553
- fbtime = format(int(member.mtime.timestamp()), 'x').lower()
6894
+ fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6895
+ fbtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
6554
6896
  if(is_unix and member.external_attr != 0):
6555
6897
  fmode = format(int(member.external_attr), 'x').lower()
6556
6898
  fchmode = format(
@@ -6652,10 +6994,9 @@ if(rarfile_support):
6652
6994
  fcompression = ""
6653
6995
  fcontents.seek(0, 0)
6654
6996
  ftypehex = format(ftype, 'x').lower()
6655
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6656
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6657
- AppendFileHeaderWithContent(
6658
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6997
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6998
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
6999
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6659
7000
  try:
6660
7001
  fp.flush()
6661
7002
  if(hasattr(os, "sync")):
@@ -6666,16 +7007,12 @@ if(rarfile_support):
6666
7007
  return fp
6667
7008
 
6668
7009
  if(not py7zr_support):
6669
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7010
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6670
7011
  return False
6671
-
6672
- if(py7zr_support):
6673
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7012
+ else:
7013
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6674
7014
  if(not hasattr(fp, "write")):
6675
7015
  return False
6676
- if(verbose):
6677
- logging.basicConfig(format="%(message)s",
6678
- stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6679
7016
  formver = formatspecs['format_ver']
6680
7017
  fileheaderver = str(int(formver.replace(".", "")))
6681
7018
  curinode = 0
@@ -6693,7 +7030,7 @@ if(py7zr_support):
6693
7030
  if(sztestalt):
6694
7031
  VerbosePrintOut("Bad file found!")
6695
7032
  numfiles = int(len(szpfp.list()))
6696
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
7033
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6697
7034
  try:
6698
7035
  fp.flush()
6699
7036
  if(hasattr(os, "sync")):
@@ -6716,6 +7053,15 @@ if(py7zr_support):
6716
7053
  fcompression = ""
6717
7054
  fcsize = format(int(0), 'x').lower()
6718
7055
  flinkcount = 0
7056
+ fblksize = 0
7057
+ if(hasattr(fstatinfo, "st_blksize")):
7058
+ fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
7059
+ fblocks = 0
7060
+ if(hasattr(fstatinfo, "st_blocks")):
7061
+ fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
7062
+ fflags = 0
7063
+ if(hasattr(fstatinfo, "st_flags")):
7064
+ fflags = format(int(fstatinfo.st_flags), 'x').lower()
6719
7065
  ftype = 0
6720
7066
  if(member.is_directory):
6721
7067
  ftype = 5
@@ -6726,14 +7072,13 @@ if(py7zr_support):
6726
7072
  fcurinode = format(int(curfid), 'x').lower()
6727
7073
  curfid = curfid + 1
6728
7074
  fdev = format(int(0), 'x').lower()
6729
- fdev_minor = format(int(0), 'x').lower()
6730
- fdev_major = format(int(0), 'x').lower()
7075
+ frdev = format(int(0), 'x').lower()
6731
7076
  if(ftype == 5):
6732
7077
  fsize = format(int("0"), 'x').lower()
6733
- fatime = format(int(member.creationtime.timestamp()), 'x').lower()
6734
- fmtime = format(int(member.creationtime.timestamp()), 'x').lower()
6735
- fctime = format(int(member.creationtime.timestamp()), 'x').lower()
6736
- fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
7078
+ fatime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7079
+ fmtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7080
+ fctime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
7081
+ fbtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
6737
7082
  if(member.is_directory):
6738
7083
  fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6739
7084
  fchmode = format(
@@ -6826,10 +7171,9 @@ if(py7zr_support):
6826
7171
  fcompression = ""
6827
7172
  fcontents.seek(0, 0)
6828
7173
  ftypehex = format(ftype, 'x').lower()
6829
- tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6830
- fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6831
- AppendFileHeaderWithContent(
6832
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7174
+ tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
7175
+ fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
7176
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6833
7177
  try:
6834
7178
  fp.flush()
6835
7179
  if(hasattr(os, "sync")):
@@ -6839,11 +7183,9 @@ if(py7zr_support):
6839
7183
  fcontents.close()
6840
7184
  return fp
6841
7185
 
6842
- def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7186
+ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6843
7187
  if(not hasattr(fp, "write")):
6844
7188
  return False
6845
- if(verbose):
6846
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6847
7189
  GetDirList = inlist
6848
7190
  if(not GetDirList):
6849
7191
  return False
@@ -6855,7 +7197,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
6855
7197
  inodetoforminode = {}
6856
7198
  numfiles = int(len(GetDirList))
6857
7199
  fnumfiles = format(numfiles, 'x').lower()
6858
- AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
7200
+ AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6859
7201
  for curfname in GetDirList:
6860
7202
  ftype = format(curfname[0], 'x').lower()
6861
7203
  fencoding = curfname[1]
@@ -6869,44 +7211,45 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
6869
7211
  fbasedir = os.path.dirname(fname)
6870
7212
  flinkname = curfname[4]
6871
7213
  fsize = format(curfname[5], 'x').lower()
6872
- fatime = format(curfname[6], 'x').lower()
6873
- fmtime = format(curfname[7], 'x').lower()
6874
- fctime = format(curfname[8], 'x').lower()
6875
- fbtime = format(curfname[9], 'x').lower()
6876
- fmode = format(curfname[10], 'x').lower()
6877
- fwinattributes = format(curfname[11], 'x').lower()
6878
- fcompression = curfname[12]
6879
- fcsize = format(curfname[13], 'x').lower()
6880
- fuid = format(curfname[14], 'x').lower()
6881
- funame = curfname[15]
6882
- fgid = format(curfname[16], 'x').lower()
6883
- fgname = curfname[17]
6884
- fid = format(curfname[18], 'x').lower()
6885
- finode = format(curfname[19], 'x').lower()
6886
- flinkcount = format(curfname[20], 'x').lower()
6887
- fdev = format(curfname[21], 'x').lower()
6888
- fdev_minor = format(curfname[22], 'x').lower()
6889
- fdev_major = format(curfname[23], 'x').lower()
6890
- fseeknextfile = curfname[24]
6891
- extradata = curfname[25]
6892
- fheaderchecksumtype = curfname[26]
6893
- fcontentchecksumtype = curfname[27]
6894
- fcontents = curfname[28]
7214
+ fblksize = format(curfname[6], 'x').lower()
7215
+ fblocks = format(curfname[7], 'x').lower()
7216
+ fflags = format(curfname[8], 'x').lower()
7217
+ fatime = format(curfname[9], 'x').lower()
7218
+ fmtime = format(curfname[10], 'x').lower()
7219
+ fctime = format(curfname[11], 'x').lower()
7220
+ fbtime = format(curfname[12], 'x').lower()
7221
+ fmode = format(curfname[13], 'x').lower()
7222
+ fwinattributes = format(curfname[14], 'x').lower()
7223
+ fcompression = curfname[15]
7224
+ fcsize = format(curfname[16], 'x').lower()
7225
+ fuid = format(curfname[17], 'x').lower()
7226
+ funame = curfname[18]
7227
+ fgid = format(curfname[19], 'x').lower()
7228
+ fgname = curfname[20]
7229
+ fid = format(curfname[21], 'x').lower()
7230
+ finode = format(curfname[22], 'x').lower()
7231
+ flinkcount = format(curfname[23], 'x').lower()
7232
+ fdev = format(curfname[24], 'x').lower()
7233
+ frdev = format(curfname[25], 'x').lower()
7234
+ fseeknextfile = curfname[26]
7235
+ extradata = curfname[27]
7236
+ fheaderchecksumtype = curfname[28]
7237
+ fcontentchecksumtype = curfname[29]
7238
+ fcontents = curfname[30]
6895
7239
  fencoding = GetFileEncoding(fcontents, 0, False)[0]
6896
- tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
6897
- fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
7240
+ tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
7241
+ fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile]
6898
7242
  fcontents.seek(0, 0)
6899
- AppendFileHeaderWithContent(
6900
- fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7243
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
6901
7244
  return fp
6902
7245
 
6903
7246
 
6904
- def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6905
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
6906
- return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7247
+ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
7248
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
7249
+ return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose)
6907
7250
 
6908
7251
 
6909
- def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7252
+ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
6910
7253
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
6911
7254
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
6912
7255
  get_in_ext = os.path.splitext(outfile)
@@ -6950,8 +7293,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
6950
7293
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
6951
7294
  except PermissionError:
6952
7295
  return False
6953
- AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
6954
- compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
7296
+ AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, saltkey, verbose)
6955
7297
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
6956
7298
  fp = CompressOpenFileAlt(
6957
7299
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -6980,12 +7322,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
6980
7322
  fp.close()
6981
7323
  return True
6982
7324
 
6983
- def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7325
+ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
6984
7326
  if not isinstance(infiles, list):
6985
7327
  infiles = [infiles]
6986
7328
  returnout = False
6987
7329
  for infileslist in infiles:
6988
- returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
7330
+ returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, True)
6989
7331
  if(not returnout):
6990
7332
  break
6991
7333
  else:
@@ -6995,7 +7337,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
6995
7337
  return True
6996
7338
  return returnout
6997
7339
 
6998
- def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7340
+ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, saltkey=None, returnfp=False):
6999
7341
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7000
7342
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7001
7343
  get_in_ext = os.path.splitext(outfile)
@@ -7036,8 +7378,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7036
7378
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7037
7379
  except PermissionError:
7038
7380
  return False
7039
- AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
7040
- compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
7381
+ AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, saltkey, verbose)
7041
7382
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7042
7383
  fp = CompressOpenFileAlt(
7043
7384
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7067,7 +7408,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7067
7408
  fp.close()
7068
7409
  return True
7069
7410
 
7070
- def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7411
+ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7071
7412
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7072
7413
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7073
7414
  get_in_ext = os.path.splitext(outfile)
@@ -7109,8 +7450,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7109
7450
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7110
7451
  except PermissionError:
7111
7452
  return False
7112
- AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression,
7113
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7453
+ AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7114
7454
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7115
7455
  fp = CompressOpenFileAlt(
7116
7456
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7140,12 +7480,12 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7140
7480
  fp.close()
7141
7481
  return True
7142
7482
 
7143
- def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7483
+ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7144
7484
  if not isinstance(infiles, list):
7145
7485
  infiles = [infiles]
7146
7486
  returnout = False
7147
7487
  for infileslist in infiles:
7148
- returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7488
+ returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7149
7489
  if(not returnout):
7150
7490
  break
7151
7491
  else:
@@ -7155,7 +7495,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
7155
7495
  return True
7156
7496
  return returnout
7157
7497
 
7158
- def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7498
+ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7159
7499
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7160
7500
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7161
7501
  get_in_ext = os.path.splitext(outfile)
@@ -7197,8 +7537,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7197
7537
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7198
7538
  except PermissionError:
7199
7539
  return False
7200
- AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression,
7201
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7540
+ AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7202
7541
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7203
7542
  fp = CompressOpenFileAlt(
7204
7543
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7228,12 +7567,12 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7228
7567
  fp.close()
7229
7568
  return True
7230
7569
 
7231
- def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7570
+ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7232
7571
  if not isinstance(infiles, list):
7233
7572
  infiles = [infiles]
7234
7573
  returnout = False
7235
7574
  for infileslist in infiles:
7236
- returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7575
+ returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7237
7576
  if(not returnout):
7238
7577
  break
7239
7578
  else:
@@ -7244,11 +7583,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
7244
7583
  return returnout
7245
7584
 
7246
7585
  if(not rarfile_support):
7247
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7586
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7248
7587
  return False
7249
-
7250
- if(rarfile_support):
7251
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7588
+ else:
7589
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7252
7590
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7253
7591
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7254
7592
  get_in_ext = os.path.splitext(outfile)
@@ -7290,8 +7628,7 @@ if(rarfile_support):
7290
7628
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7291
7629
  except PermissionError:
7292
7630
  return False
7293
- AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression,
7294
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7631
+ AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7295
7632
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7296
7633
  fp = CompressOpenFileAlt(
7297
7634
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7321,12 +7658,12 @@ if(rarfile_support):
7321
7658
  fp.close()
7322
7659
  return True
7323
7660
 
7324
- def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7661
+ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7325
7662
  if not isinstance(infiles, list):
7326
7663
  infiles = [infiles]
7327
7664
  returnout = False
7328
7665
  for infileslist in infiles:
7329
- returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7666
+ returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7330
7667
  if(not returnout):
7331
7668
  break
7332
7669
  else:
@@ -7337,11 +7674,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
7337
7674
  return returnout
7338
7675
 
7339
7676
  if(not py7zr_support):
7340
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7677
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7341
7678
  return False
7342
-
7343
- if(py7zr_support):
7344
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7679
+ else:
7680
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7345
7681
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7346
7682
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7347
7683
  get_in_ext = os.path.splitext(outfile)
@@ -7383,8 +7719,7 @@ if(py7zr_support):
7383
7719
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7384
7720
  except PermissionError:
7385
7721
  return False
7386
- AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
7387
- compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
7722
+ AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
7388
7723
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7389
7724
  fp = CompressOpenFileAlt(
7390
7725
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7414,12 +7749,12 @@ if(py7zr_support):
7414
7749
  fp.close()
7415
7750
  return True
7416
7751
 
7417
- def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7752
+ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
7418
7753
  if not isinstance(infiles, list):
7419
7754
  infiles = [infiles]
7420
7755
  returnout = False
7421
7756
  for infileslist in infiles:
7422
- returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7757
+ returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
7423
7758
  if(not returnout):
7424
7759
  break
7425
7760
  else:
@@ -7429,9 +7764,9 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
7429
7764
  return True
7430
7765
  return returnout
7431
7766
 
7432
- def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7433
- inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7434
- return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
7767
+ def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
7768
+ inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
7769
+ return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
7435
7770
 
7436
7771
 
7437
7772
  def PrintPermissionString(fchmode, ftype):
@@ -9172,58 +9507,54 @@ def CheckSumSupport(checkfor, guaranteed=True):
9172
9507
  return False
9173
9508
 
9174
9509
 
9175
- def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9176
- return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9510
+ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
9511
+ return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9177
9512
 
9178
- def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9179
- return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9513
+ def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
9514
+ return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
9180
9515
 
9181
- def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9182
- return PackFoxFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
9516
+ def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9517
+ return PackFoxFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, returnfp)
9183
9518
 
9184
9519
 
9185
- def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9186
- return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9520
+ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9521
+ return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9187
9522
 
9188
9523
 
9189
- def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9190
- return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9524
+ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9525
+ return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9191
9526
 
9192
9527
 
9193
9528
  if(not rarfile_support):
9194
- def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9529
+ def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9195
9530
  return False
9196
-
9197
- if(rarfile_support):
9198
- def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9199
- return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9531
+ else:
9532
+ def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9533
+ return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9200
9534
 
9201
9535
 
9202
9536
  if(not py7zr_support):
9203
- def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9537
+ def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9204
9538
  return False
9205
-
9206
- if(py7zr_support):
9207
- def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9208
- return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9539
+ else:
9540
+ def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9541
+ return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
9209
9542
 
9210
9543
 
9211
- def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9544
+ def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
9212
9545
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
9213
9546
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9214
9547
  formatspecs = formatspecs[checkcompressfile]
9215
- if(verbose):
9216
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9217
9548
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
9218
- return PackFoxFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9549
+ return PackFoxFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9219
9550
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
9220
- return PackFoxFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9551
+ return PackFoxFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9221
9552
  elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
9222
- return PackFoxFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9553
+ return PackFoxFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9223
9554
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
9224
- return PackFoxFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9555
+ return PackFoxFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9225
9556
  elif(IsSingleDict(formatspecs) and checkcompressfile == formatspecs['format_magic']):
9226
- return RePackFoxFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, verbose, returnfp)
9557
+ return RePackFoxFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
9227
9558
  else:
9228
9559
  return False
9229
9560
  return False
@@ -9292,19 +9623,12 @@ def FoxFileArrayValidate(listarrayfiles, verbose=False):
9292
9623
  ok = False
9293
9624
  return ok
9294
9625
 
9295
- def FoxFileValidate(infile, fmttype="auto", filestart=0,
9296
- formatspecs=__file_format_multi_dict__, # keep default like original
9297
- seektoend=False, verbose=False, returnfp=False):
9298
- if(verbose):
9299
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9300
-
9626
+ def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9301
9627
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
9302
9628
  formatspecs = formatspecs[fmttype]
9303
9629
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
9304
9630
  fmttype = "auto"
9305
-
9306
9631
  curloc = filestart
9307
-
9308
9632
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9309
9633
  curloc = infile.tell()
9310
9634
  fp = infile
@@ -9320,7 +9644,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9320
9644
  if(not fp):
9321
9645
  return False
9322
9646
  fp.seek(filestart, 0)
9323
-
9324
9647
  elif(infile == "-"):
9325
9648
  fp = MkTempFile()
9326
9649
  shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
@@ -9332,7 +9655,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9332
9655
  if(not fp):
9333
9656
  return False
9334
9657
  fp.seek(filestart, 0)
9335
-
9336
9658
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
9337
9659
  fp = MkTempFile()
9338
9660
  fp.write(infile)
@@ -9344,7 +9666,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9344
9666
  if(not fp):
9345
9667
  return False
9346
9668
  fp.seek(filestart, 0)
9347
-
9348
9669
  elif(re.findall(__download_proto_support__, infile)):
9349
9670
  fp = download_file_from_internet_file(infile)
9350
9671
  fp = UncompressFileAlt(fp, formatspecs, filestart)
@@ -9355,7 +9676,6 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9355
9676
  if(not fp):
9356
9677
  return False
9357
9678
  fp.seek(filestart, 0)
9358
-
9359
9679
  else:
9360
9680
  infile = RemoveWindowsPath(infile)
9361
9681
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
@@ -9402,11 +9722,9 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9402
9722
  fp.seek(0, 2)
9403
9723
  except (OSError, ValueError):
9404
9724
  SeekToEndOfFile(fp)
9405
-
9406
9725
  CatSize = fp.tell()
9407
9726
  CatSizeEnd = CatSize
9408
9727
  fp.seek(curloc, 0)
9409
-
9410
9728
  if(IsNestedDict(formatspecs)):
9411
9729
  compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
9412
9730
  if(compresschecking not in formatspecs):
@@ -9414,43 +9732,59 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9414
9732
  else:
9415
9733
  formatspecs = formatspecs[compresschecking]
9416
9734
  fp.seek(filestart, 0)
9417
-
9418
9735
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
9419
9736
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
9420
9737
  formdelsize = len(formatspecs['format_delimiter'])
9421
9738
  formdel = fp.read(formdelsize).decode("UTF-8")
9422
-
9423
9739
  if(formstring != formatspecs['format_magic'] + inheaderver):
9424
9740
  return False
9425
9741
  if(formdel != formatspecs['format_delimiter']):
9426
9742
  return False
9427
-
9428
- if(formatspecs['new_style']):
9743
+ headeroffset = fp.tell()
9744
+ if(__use_new_style__):
9429
9745
  inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9430
9746
  else:
9431
9747
  inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9432
-
9433
- fnumextrafieldsize = int(inheader[6], 16)
9434
- fnumextrafields = int(inheader[7], 16)
9435
- extrastart = 8
9748
+ fnumextrafieldsize = int(inheader[15], 16)
9749
+ fnumextrafields = int(inheader[16], 16)
9750
+ extrastart = 17
9436
9751
  extraend = extrastart + fnumextrafields
9437
9752
  formversion = re.findall("([\\d]+)", formstring)
9438
9753
  fheadsize = int(inheader[0], 16)
9439
9754
  fnumfields = int(inheader[1], 16)
9440
- fhencoding = inheader[2]
9441
- fostype = inheader[3]
9442
- fpythontype = inheader[4]
9443
- fnumfiles = int(inheader[5], 16)
9755
+ fnumfiles = int(inheader[8], 16)
9444
9756
  fprechecksumtype = inheader[-2]
9445
9757
  fprechecksum = inheader[-1]
9446
-
9758
+ outfseeknextfile = inheader[9]
9759
+ fjsonsize = int(inheader[12], 16)
9760
+ fjsonchecksumtype = inheader[13]
9761
+ fjsonchecksum = inheader[14]
9762
+ headerjsonoffset = fp.tell()
9763
+ fprejsoncontent = fp.read(fjsonsize)
9764
+ jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
9765
+ # Next seek directive
9766
+ if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9767
+ fseeknextasnum = int(outfseeknextfile.replace("+", ""))
9768
+ if(abs(fseeknextasnum) == 0):
9769
+ pass
9770
+ fp.seek(fseeknextasnum, 1)
9771
+ elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
9772
+ fseeknextasnum = int(outfseeknextfile)
9773
+ if(abs(fseeknextasnum) == 0):
9774
+ pass
9775
+ fp.seek(fseeknextasnum, 1)
9776
+ elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
9777
+ fseeknextasnum = int(outfseeknextfile)
9778
+ if(abs(fseeknextasnum) == 0):
9779
+ pass
9780
+ fp.seek(fseeknextasnum, 0)
9781
+ else:
9782
+ return False
9447
9783
  il = 0
9448
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
9449
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
9450
-
9784
+ headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
9785
+ newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
9451
9786
  valid_archive = True
9452
9787
  invalid_archive = False
9453
-
9454
9788
  if(verbose):
9455
9789
  if(hasattr(infile, "read") or hasattr(infile, "write")):
9456
9790
  try:
@@ -9462,78 +9796,56 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9462
9796
  else:
9463
9797
  VerbosePrintOut(infile)
9464
9798
  VerbosePrintOut("Number of Records " + str(fnumfiles))
9465
-
9466
9799
  if(headercheck):
9467
9800
  if(verbose):
9468
- VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
9801
+ VerbosePrintOut("File Header Checksum Passed at offset " + str(headeroffset))
9469
9802
  VerbosePrintOut("'" + fprechecksum + "' == " + "'" + newfcs + "'")
9470
9803
  else:
9471
9804
  # always flip flags, even when not verbose
9472
9805
  valid_archive = False
9473
9806
  invalid_archive = True
9474
9807
  if(verbose):
9475
- VerbosePrintOut("File Header Checksum Failed at offset " + str(0))
9808
+ VerbosePrintOut("File Header Checksum Failed at offset " + str(headeroffset))
9476
9809
  VerbosePrintOut("'" + fprechecksum + "' != " + "'" + newfcs + "'")
9477
-
9810
+ if(fjsonsize > 0):
9811
+ if(CheckChecksums(jsonfcs, fjsonchecksum)):
9812
+ if(verbose):
9813
+ VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(headerjsonoffset))
9814
+ VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
9815
+ else:
9816
+ valid_archive = False
9817
+ invalid_archive = True
9818
+ if(verbose):
9819
+ VerbosePrintOut("File JSON Data Checksum Error at offset " + str(headerjsonoffset))
9820
+ VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9478
9821
  if(verbose):
9479
9822
  VerbosePrintOut("")
9480
-
9481
9823
  # Iterate either until EOF (seektoend) or fixed count
9482
9824
  while (fp.tell() < CatSizeEnd) if seektoend else (il < fnumfiles):
9483
9825
  outfhstart = fp.tell()
9484
- if(formatspecs['new_style']):
9826
+ if(__use_new_style__):
9485
9827
  inheaderdata = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
9486
9828
  else:
9487
9829
  inheaderdata = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
9488
9830
 
9489
9831
  if(len(inheaderdata) == 0):
9490
9832
  break
9491
-
9492
- outfheadsize = int(inheaderdata[0], 16)
9493
- outfnumfields = int(inheaderdata[1], 16)
9494
- outftype = int(inheaderdata[2], 16)
9495
- # FIX: these must come from inheaderdata, not inheader
9496
- outfostype = inheaderdata[3]
9497
- outfencoding = inheaderdata[4]
9498
-
9499
9833
  if(re.findall("^[.|/]", inheaderdata[5])):
9500
9834
  outfname = inheaderdata[5]
9501
9835
  else:
9502
9836
  outfname = "./" + inheaderdata[5]
9503
9837
  outfbasedir = os.path.dirname(outfname)
9504
-
9505
- outflinkname = inheaderdata[6]
9506
9838
  outfsize = int(inheaderdata[7], 16)
9507
- outfatime = int(inheaderdata[8], 16)
9508
- outfmtime = int(inheaderdata[9], 16)
9509
- outfctime = int(inheaderdata[10], 16)
9510
- outfbtime = int(inheaderdata[11], 16)
9511
- outfmode = int(inheaderdata[12], 16)
9512
- outfchmode = stat.S_IMODE(outfmode)
9513
- outftypemod = stat.S_IFMT(outfmode)
9514
- outfwinattributes = int(inheaderdata[13], 16)
9515
- outfcompression = inheaderdata[14]
9516
- outfcsize = int(inheaderdata[15], 16)
9517
- outfuid = int(inheaderdata[16], 16)
9518
- outfuname = inheaderdata[17]
9519
- outfgid = int(inheaderdata[18], 16)
9520
- outfgname = inheaderdata[19]
9521
- fid = int(inheaderdata[20], 16)
9522
- finode = int(inheaderdata[21], 16)
9523
- flinkcount = int(inheaderdata[22], 16)
9524
- outfdev = int(inheaderdata[23], 16)
9525
- outfdev_minor = int(inheaderdata[24], 16)
9526
- outfdev_major = int(inheaderdata[25], 16)
9527
- outfseeknextfile = inheaderdata[26]
9528
- outfjsontype = inheaderdata[27]
9529
- outfjsonlen = int(inheaderdata[28], 16)
9530
- outfjsonsize = int(inheaderdata[29], 16)
9531
- outfjsonchecksumtype = inheaderdata[30]
9532
- outfjsonchecksum = inheaderdata[31]
9533
-
9839
+ outfcompression = inheaderdata[17]
9840
+ outfcsize = int(inheaderdata[18], 16)
9841
+ fid = int(inheaderdata[23], 16)
9842
+ finode = int(inheaderdata[24], 16)
9843
+ outfseeknextfile = inheaderdata[28]
9844
+ outfjsonsize = int(inheaderdata[31], 16)
9845
+ outfjsonchecksumtype = inheaderdata[32]
9846
+ outfjsonchecksum = inheaderdata[33]
9534
9847
  outfhend = fp.tell() - 1 # (kept for parity; not used)
9535
9848
  outfjstart = fp.tell()
9536
-
9537
9849
  # Read JSON bytes; compute checksum on bytes for robustness
9538
9850
  outfprejsoncontent_bytes = fp.read(outfjsonsize)
9539
9851
  # Decode for any downstream text needs (not used further here)
@@ -9541,27 +9853,21 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9541
9853
  outfprejsoncontent = outfprejsoncontent_bytes.decode("UTF-8")
9542
9854
  except Exception:
9543
9855
  outfprejsoncontent = None
9544
-
9545
9856
  outfjend = fp.tell()
9546
9857
  fp.seek(len(formatspecs['format_delimiter']), 1)
9547
-
9548
- injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs)
9549
-
9550
- outfextrasize = int(inheaderdata[32], 16)
9551
- outfextrafields = int(inheaderdata[33], 16)
9858
+ injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs, saltkey)
9859
+ outfextrafields = int(inheaderdata[35], 16)
9552
9860
  extrafieldslist = []
9553
- extrastart = 34
9861
+ extrastart = 36
9554
9862
  extraend = extrastart + outfextrafields
9555
-
9556
9863
  outfcs = inheaderdata[-2].lower()
9557
9864
  outfccs = inheaderdata[-1].lower()
9558
- infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
9559
-
9865
+ infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs, saltkey)
9560
9866
  if(verbose):
9561
9867
  VerbosePrintOut(outfname)
9562
9868
  VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
9563
9869
 
9564
- if(hmac.compare_digest(outfcs, infcs)):
9870
+ if(CheckChecksums(outfcs, infcs)):
9565
9871
  if(verbose):
9566
9872
  VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
9567
9873
  VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
@@ -9571,9 +9877,8 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9571
9877
  if(verbose):
9572
9878
  VerbosePrintOut("File Header Checksum Failed at offset " + str(outfhstart))
9573
9879
  VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
9574
-
9575
9880
  if(outfjsonsize > 0):
9576
- if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
9881
+ if(CheckChecksums(injsonfcs, outfjsonchecksum)):
9577
9882
  if(verbose):
9578
9883
  VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
9579
9884
  VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
@@ -9583,21 +9888,19 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9583
9888
  if(verbose):
9584
9889
  VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
9585
9890
  VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
9586
-
9587
9891
  outfcontentstart = fp.tell()
9588
9892
  outfcontents = b"" # FIX: bytes for Py2/3 consistency
9589
9893
  pyhascontents = False
9590
-
9591
9894
  if(outfsize > 0):
9592
9895
  if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
9593
9896
  outfcontents = fp.read(outfsize)
9594
9897
  else:
9595
9898
  outfcontents = fp.read(outfcsize)
9596
9899
 
9597
- infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
9900
+ infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs, saltkey)
9598
9901
  pyhascontents = True
9599
9902
 
9600
- if(hmac.compare_digest(outfccs, infccs)):
9903
+ if(CheckChecksums(outfccs, infccs)):
9601
9904
  if(verbose):
9602
9905
  VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
9603
9906
  VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
@@ -9607,10 +9910,8 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9607
9910
  if(verbose):
9608
9911
  VerbosePrintOut("File Content Checksum Failed at offset " + str(outfcontentstart))
9609
9912
  VerbosePrintOut("'" + outfccs + "' != " + "'" + infccs + "'")
9610
-
9611
9913
  if(verbose):
9612
9914
  VerbosePrintOut("")
9613
-
9614
9915
  # Next seek directive
9615
9916
  if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
9616
9917
  fseeknextasnum = int(outfseeknextfile.replace("+", ""))
@@ -9629,9 +9930,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9629
9930
  fp.seek(fseeknextasnum, 0)
9630
9931
  else:
9631
9932
  return False
9632
-
9633
9933
  il = il + 1
9634
-
9635
9934
  if(valid_archive):
9636
9935
  if(returnfp):
9637
9936
  return fp
@@ -9643,34 +9942,34 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9643
9942
  return False
9644
9943
 
9645
9944
 
9646
- def FoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9647
- return FoxFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9945
+ def FoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9946
+ return FoxFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9648
9947
 
9649
9948
 
9650
- def FoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9949
+ def FoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9651
9950
  if(isinstance(infile, (list, tuple, ))):
9652
9951
  pass
9653
9952
  else:
9654
9953
  infile = [infile]
9655
9954
  outretval = True
9656
9955
  for curfname in infile:
9657
- curretfile = FoxFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9956
+ curretfile = FoxFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9658
9957
  if(not curretfile):
9659
9958
  outretval = False
9660
9959
  return outretval
9661
9960
 
9662
- def FoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9663
- return FoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9961
+ def FoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9962
+ return FoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9664
9963
 
9665
9964
 
9666
- def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9965
+ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9667
9966
  outretval = []
9668
9967
  outstartfile = filestart
9669
9968
  outfsize = float('inf')
9670
9969
  while True:
9671
9970
  if outstartfile >= outfsize: # stop when function signals False
9672
9971
  break
9673
- is_valid_file = FoxFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
9972
+ is_valid_file = FoxFileValidate(infile, fmttype, outstartfile, formatspecs, saltkey, seektoend, verbose, True)
9674
9973
  if is_valid_file is False: # stop when function signals False
9675
9974
  outretval.append(is_valid_file)
9676
9975
  break
@@ -9687,33 +9986,36 @@ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
9687
9986
  if(returnfp):
9688
9987
  return infile
9689
9988
  else:
9690
- infile.close()
9989
+ try:
9990
+ infile.close()
9991
+ except AttributeError:
9992
+ return False
9691
9993
  return outretval
9692
9994
 
9693
9995
 
9694
9996
 
9695
- def StackedFoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9696
- return StackedFoxFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
9997
+ def StackedFoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9998
+ return StackedFoxFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9697
9999
 
9698
10000
 
9699
- def StackedFoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10001
+ def StackedFoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
9700
10002
  if(isinstance(infile, (list, tuple, ))):
9701
10003
  pass
9702
10004
  else:
9703
10005
  infile = [infile]
9704
10006
  outretval = True
9705
10007
  for curfname in infile:
9706
- curretfile = StackedFoxFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10008
+ curretfile = StackedFoxFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9707
10009
  if(not curretfile):
9708
10010
  outretval = False
9709
10011
  return outretval
9710
10012
 
9711
- def StackedFoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
9712
- return StackedFoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
10013
+ def StackedFoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10014
+ return StackedFoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
9713
10015
 
9714
10016
 
9715
- def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9716
- outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
10017
+ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10018
+ outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
9717
10019
  if not returnfp:
9718
10020
  for item in outfp:
9719
10021
  fp = item.get('fp')
@@ -9727,26 +10029,26 @@ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
9727
10029
  return outfp
9728
10030
 
9729
10031
 
9730
- def MultipleFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10032
+ def MultipleFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9731
10033
  if(isinstance(infile, (list, tuple, ))):
9732
10034
  pass
9733
10035
  else:
9734
10036
  infile = [infile]
9735
10037
  outretval = []
9736
10038
  for curfname in infile:
9737
- outretval.append(FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp))
10039
+ outretval.append(FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp))
9738
10040
  return outretval
9739
10041
 
9740
- def MultipleFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
9741
- return MultipleFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
10042
+ def MultipleFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10043
+ return MultipleFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9742
10044
 
9743
10045
 
9744
- def FoxFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10046
+ def FoxFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9745
10047
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9746
10048
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9747
10049
  formatspecs = formatspecs[checkcompressfile]
9748
10050
  fp = MkTempFile(instr)
9749
- listarrayfiles = FoxFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10051
+ listarrayfiles = FoxFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9750
10052
  return listarrayfiles
9751
10053
 
9752
10054
 
@@ -9755,9 +10057,8 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9755
10057
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9756
10058
  formatspecs = formatspecs[checkcompressfile]
9757
10059
  fp = MkTempFile()
9758
- fp = PackFoxFileFromTarFile(
9759
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9760
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10060
+ fp = PackFoxFileFromTarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10061
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9761
10062
  return listarrayfiles
9762
10063
 
9763
10064
 
@@ -9766,9 +10067,8 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
9766
10067
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9767
10068
  formatspecs = formatspecs[checkcompressfile]
9768
10069
  fp = MkTempFile()
9769
- fp = PackFoxFileFromZipFile(
9770
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9771
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10070
+ fp = PackFoxFileFromZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10071
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9772
10072
  return listarrayfiles
9773
10073
 
9774
10074
 
@@ -9782,9 +10082,8 @@ if(rarfile_support):
9782
10082
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9783
10083
  formatspecs = formatspecs[checkcompressfile]
9784
10084
  fp = MkTempFile()
9785
- fp = PackFoxFileFromRarFile(
9786
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9787
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10085
+ fp = PackFoxFileFromRarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10086
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9788
10087
  return listarrayfiles
9789
10088
 
9790
10089
  if(not py7zr_support):
@@ -9797,13 +10096,12 @@ if(py7zr_support):
9797
10096
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9798
10097
  formatspecs = formatspecs[checkcompressfile]
9799
10098
  fp = MkTempFile()
9800
- fp = PackFoxFileFromSevenZipFile(
9801
- infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
9802
- listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10099
+ fp = PackFoxFileFromSevenZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
10100
+ listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
9803
10101
  return listarrayfiles
9804
10102
 
9805
10103
 
9806
- def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10104
+ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
9807
10105
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9808
10106
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9809
10107
  formatspecs = formatspecs[checkcompressfile]
@@ -9816,17 +10114,16 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
9816
10114
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
9817
10115
  return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
9818
10116
  elif(checkcompressfile == formatspecs['format_magic']):
9819
- return FoxFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10117
+ return FoxFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9820
10118
  else:
9821
10119
  return False
9822
10120
  return False
9823
10121
 
9824
10122
 
9825
- def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
10123
+ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, saltkey=None, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
9826
10124
  outarray = MkTempFile()
9827
- packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
9828
- compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
9829
- listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
10125
+ packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
10126
+ listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
9830
10127
  return listarrayfiles
9831
10128
 
9832
10129
 
@@ -9948,12 +10245,12 @@ def FoxFileArrayToArrayIndex(inarray, returnfp=False):
9948
10245
  return out
9949
10246
 
9950
10247
 
9951
- def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
10248
+ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
9952
10249
  # ---------- Safe defaults ----------
9953
10250
  if compressionuselist is None:
9954
10251
  compressionuselist = compressionlistalt
9955
10252
  if checksumtype is None:
9956
- checksumtype = ["md5", "md5", "md5", "md5"]
10253
+ checksumtype = ["md5", "md5", "md5", "md5", "md5"]
9957
10254
  if extradata is None:
9958
10255
  extradata = []
9959
10256
  if jsondata is None:
@@ -9972,7 +10269,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
9972
10269
  infile = RemoveWindowsPath(infile)
9973
10270
  listarrayfileslist = FoxFileToArray(
9974
10271
  infile, "auto", filestart, seekstart, seekend,
9975
- False, True, True, skipchecksum, formatspecs, seektoend, False
10272
+ False, True, True, skipchecksum, formatspecs, insaltkey, seektoend, False
9976
10273
  )
9977
10274
 
9978
10275
  # ---------- Format specs selection ----------
@@ -10039,9 +10336,6 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10039
10336
  if (compression is None) or (compressionuselist and compression not in compressionuselist):
10040
10337
  compression = "auto"
10041
10338
 
10042
- if verbose:
10043
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10044
-
10045
10339
  # No files?
10046
10340
  if not listarrayfiles.get('ffilelist'):
10047
10341
  return False
@@ -10054,7 +10348,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10054
10348
  if lenlist != fnumfiles:
10055
10349
  fnumfiles = lenlist
10056
10350
 
10057
- AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), [], checksumtype[0], formatspecs)
10351
+ AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), listarrayfiles['fextradata'], listarrayfiles['fjsondata'], [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
10058
10352
 
10059
10353
  # loop counters
10060
10354
  lcfi = 0
@@ -10084,6 +10378,9 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10084
10378
  # fields (hex-encoded where expected)
10085
10379
  fheadersize = format(int(cur_entry['fheadersize']), 'x').lower()
10086
10380
  fsize = format(int(cur_entry['fsize']), 'x').lower()
10381
+ fblksize = format(int(cur_entry['fblksize']), 'x').lower()
10382
+ fblocks = format(int(cur_entry['fblocks']), 'x').lower()
10383
+ fflags = format(int(cur_entry['fflags']), 'x').lower()
10087
10384
  flinkname = cur_entry['flinkname']
10088
10385
  fatime = format(int(cur_entry['fatime']), 'x').lower()
10089
10386
  fmtime = format(int(cur_entry['fmtime']), 'x').lower()
@@ -10102,8 +10399,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10102
10399
  fcompression = cur_entry['fcompression']
10103
10400
  fcsize = format(int(cur_entry['fcsize']), 'x').lower()
10104
10401
  fdev = format(int(cur_entry['fdev']), 'x').lower()
10105
- fdev_minor = format(int(cur_entry['fminor']), 'x').lower()
10106
- fdev_major = format(int(cur_entry['fmajor']), 'x').lower()
10402
+ frdev = format(int(cur_entry['frdev']), 'x').lower()
10107
10403
  fseeknextfile = cur_entry['fseeknextfile']
10108
10404
 
10109
10405
  # extra fields sizing
@@ -10114,6 +10410,12 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10114
10410
  # extradata/jsondata defaults per file
10115
10411
  if not followlink and len(extradata) <= 0:
10116
10412
  extradata = cur_entry['fextradata']
10413
+
10414
+ fvendorfields = cur_entry['fvendorfields']
10415
+ ffvendorfieldslist = []
10416
+ if(fvendorfields>0):
10417
+ ffvendorfieldslist = cur_entry['fvendorfieldslist']
10418
+
10117
10419
  if not followlink and len(jsondata) <= 0:
10118
10420
  jsondata = cur_entry['fjsondata']
10119
10421
 
@@ -10149,7 +10451,11 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10149
10451
  fcontents.seek(0, 0)
10150
10452
  cfcontents.seek(0, 0)
10151
10453
  cfcontents = CompressOpenFileAlt(
10152
- cfcontents, compressionuselist[ilmin], compressionlevel, compressionuselist, formatspecs
10454
+ cfcontents,
10455
+ compressionuselist[ilmin],
10456
+ compressionlevel,
10457
+ compressionuselist,
10458
+ formatspecs
10153
10459
  )
10154
10460
  if cfcontents:
10155
10461
  cfcontents.seek(0, 2)
@@ -10157,7 +10463,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10157
10463
  cfcontents.close()
10158
10464
  else:
10159
10465
  ilcsize.append(float("inf"))
10160
- ilmin += 1
10466
+ ilmin = ilmin + 1
10161
10467
  ilcmin = ilcsize.index(min(ilcsize))
10162
10468
  curcompression = compressionuselist[ilcmin]
10163
10469
 
@@ -10166,16 +10472,24 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10166
10472
  shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10167
10473
  cfcontents.seek(0, 0)
10168
10474
  cfcontents = CompressOpenFileAlt(
10169
- cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
10475
+ cfcontents,
10476
+ curcompression,
10477
+ compressionlevel,
10478
+ compressionuselist,
10479
+ formatspecs
10170
10480
  )
10171
10481
  cfcontents.seek(0, 2)
10172
- cfsize_val = cfcontents.tell()
10173
- if ucfsize > cfsize_val:
10174
- fcsize = format(int(cfsize_val), 'x').lower()
10482
+ cfsize = cfcontents.tell()
10483
+ if ucfsize > cfsize:
10484
+ fcsize = format(int(cfsize), 'x').lower()
10175
10485
  fcompression = curcompression
10176
10486
  fcontents.close()
10177
10487
  fcontents = cfcontents
10178
10488
 
10489
+ if fcompression == "none":
10490
+ fcompression = ""
10491
+ fcontents.seek(0, 0)
10492
+
10179
10493
  # link following (fixed: use listarrayfiles, not prelistarrayfiles)
10180
10494
  if followlink:
10181
10495
  if (cur_entry['ftype'] == 1 or cur_entry['ftype'] == 2):
@@ -10184,6 +10498,9 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10184
10498
  flinkinfo = listarrayfiles['ffilelist'][flinkid]
10185
10499
  fheadersize = format(int(flinkinfo['fheadersize']), 'x').lower()
10186
10500
  fsize = format(int(flinkinfo['fsize']), 'x').lower()
10501
+ fblksize = format(int(flinkinfo['fblksize']), 'x').lower()
10502
+ fblocks = format(int(flinkinfo['fblocks']), 'x').lower()
10503
+ fflags = format(int(flinkinfo['fflags']), 'x').lower()
10187
10504
  flinkname = flinkinfo['flinkname']
10188
10505
  fatime = format(int(flinkinfo['fatime']), 'x').lower()
10189
10506
  fmtime = format(int(flinkinfo['fmtime']), 'x').lower()
@@ -10202,14 +10519,19 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10202
10519
  fcompression = flinkinfo['fcompression']
10203
10520
  fcsize = format(int(flinkinfo['fcsize']), 'x').lower()
10204
10521
  fdev = format(int(flinkinfo['fdev']), 'x').lower()
10205
- fdev_minor = format(int(flinkinfo['fminor']), 'x').lower()
10206
- fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
10522
+ frdev = format(int(flinkinfo['frdev']), 'x').lower()
10207
10523
  fseeknextfile = flinkinfo['fseeknextfile']
10208
10524
  if (len(flinkinfo['fextradata']) > flinkinfo['fextrafields']
10209
10525
  and len(flinkinfo['fextradata']) > 0):
10210
10526
  flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
10211
10527
  if len(extradata) < 0:
10212
10528
  extradata = flinkinfo['fextradata']
10529
+
10530
+ fvendorfields = flinkinfo['fvendorfields']
10531
+ ffvendorfieldslist = []
10532
+ if(fvendorfields>0):
10533
+ ffvendorfieldslist = flinkinfo['fvendorfieldslist']
10534
+
10213
10535
  if len(jsondata) < 0:
10214
10536
  jsondata = flinkinfo['fjsondata']
10215
10537
  fcontents = flinkinfo['fcontents']
@@ -10238,15 +10560,15 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10238
10560
  fcompression = ""
10239
10561
 
10240
10562
  tmpoutlist = [
10241
- ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime,
10563
+ ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime,
10242
10564
  fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame,
10243
- fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile
10565
+ fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, fseeknextfile
10244
10566
  ]
10245
10567
 
10246
- AppendFileHeaderWithContent(
10247
- fp, tmpoutlist, extradata, jsondata, fcontents.read(),
10248
- [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs
10249
- )
10568
+ if(fvendorfields>0 and len(ffvendorfieldslist)>0):
10569
+ extradata.extend(fvendorfields)
10570
+
10571
+ AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(),[checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, outsaltkey)
10250
10572
  try:
10251
10573
  fcontents.close()
10252
10574
  except Exception:
@@ -10291,12 +10613,12 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10291
10613
  pass
10292
10614
  return True
10293
10615
 
10294
- def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=None, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=None, skipchecksum=False, extradata=None, jsondata=None, formatspecs=None, seektoend=False, verbose=False, returnfp=False):
10616
+ def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10295
10617
  if not isinstance(infiles, list):
10296
10618
  infiles = [infiles]
10297
10619
  returnout = False
10298
10620
  for infileslist in infiles:
10299
- returnout = RePackFoxFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, True)
10621
+ returnout = RePackFoxFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, True)
10300
10622
  if(not returnout):
10301
10623
  break
10302
10624
  else:
@@ -10306,33 +10628,28 @@ def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto",
10306
10628
  return True
10307
10629
  return returnout
10308
10630
 
10309
- def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10631
+ def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
10310
10632
  fp = MkTempFile(instr)
10311
- listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10312
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10633
+ listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, returnfp)
10313
10634
  return listarrayfiles
10314
10635
 
10315
10636
 
10316
- def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10637
+ def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10317
10638
  outarray = MkTempFile()
10318
- packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10319
- compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
10320
- listarrayfiles = RePackFoxFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10321
- checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10639
+ packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
10640
+ listarrayfiles = RePackFoxFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, saltkey, seektoend, verbose, returnfp)
10322
10641
  return listarrayfiles
10323
10642
 
10324
10643
 
10325
- def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
10644
+ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
10326
10645
  if(outdir is not None):
10327
10646
  outdir = RemoveWindowsPath(outdir)
10328
- if(verbose):
10329
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10330
10647
  if(isinstance(infile, dict)):
10331
10648
  listarrayfiles = infile
10332
10649
  else:
10333
10650
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10334
10651
  infile = RemoveWindowsPath(infile)
10335
- listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
10652
+ listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10336
10653
  if(not listarrayfiles):
10337
10654
  return False
10338
10655
  lenlist = len(listarrayfiles['ffilelist'])
@@ -10568,9 +10885,9 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
10568
10885
  return True
10569
10886
 
10570
10887
 
10571
- def UnPackFoxFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
10888
+ def UnPackFoxFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
10572
10889
  fp = MkTempFile(instr)
10573
- listarrayfiles = UnPackFoxFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
10890
+ listarrayfiles = UnPackFoxFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, returnfp)
10574
10891
  return listarrayfiles
10575
10892
 
10576
10893
  def ftype_to_str(ftype):
@@ -10588,9 +10905,7 @@ def ftype_to_str(ftype):
10588
10905
  # Default to "file" if unknown
10589
10906
  return mapping.get(ftype, "file")
10590
10907
 
10591
- def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10592
- if(verbose):
10593
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10908
+ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10594
10909
  if(isinstance(infile, dict)):
10595
10910
  listarrayfileslist = [infile]
10596
10911
  if(isinstance(infile, list)):
@@ -10598,7 +10913,7 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10598
10913
  else:
10599
10914
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
10600
10915
  infile = RemoveWindowsPath(infile)
10601
- listarrayfileslist = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
10916
+ listarrayfileslist = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10602
10917
  if(not listarrayfileslist):
10603
10918
  return False
10604
10919
  for listarrayfiles in listarrayfileslist:
@@ -10635,8 +10950,11 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10635
10950
  VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
10636
10951
  listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
10637
10952
  else:
10953
+ ts_ns = listarrayfiles['ffilelist'][lcfi]['fmtime']
10954
+ sec, ns = divmod(int(ts_ns), 10**9)
10955
+ dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
10638
10956
  VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
10639
- listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
10957
+ listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + dt.strftime('%Y-%m-%d %H:%M') + " " + printfname)
10640
10958
  lcfi = lcfi + 1
10641
10959
  if(returnfp):
10642
10960
  return listarrayfiles['fp']
@@ -10644,25 +10962,25 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
10644
10962
  return True
10645
10963
 
10646
10964
 
10647
- def MultipleFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
10965
+ def MultipleFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10648
10966
  if(isinstance(infile, (list, tuple, ))):
10649
10967
  pass
10650
10968
  else:
10651
10969
  infile = [infile]
10652
10970
  outretval = {}
10653
10971
  for curfname in infile:
10654
- outretval[curfname] = FoxFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
10972
+ outretval[curfname] = FoxFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10655
10973
  return outretval
10656
10974
 
10657
10975
 
10658
- def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10976
+ def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10659
10977
  outretval = []
10660
10978
  outstartfile = filestart
10661
10979
  outfsize = float('inf')
10662
10980
  while True:
10663
10981
  if outstartfile >= outfsize: # stop when function signals False
10664
10982
  break
10665
- list_file_retu = FoxFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
10983
+ list_file_retu = FoxFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, True)
10666
10984
  if list_file_retu is False: # stop when function signals False
10667
10985
  outretval.append(list_file_retu)
10668
10986
  else:
@@ -10678,30 +10996,31 @@ def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
10678
10996
  if(returnfp):
10679
10997
  return infile
10680
10998
  else:
10681
- infile.close()
10999
+ try:
11000
+ infile.close()
11001
+ except AttributeError:
11002
+ return False
10682
11003
  return outretval
10683
11004
 
10684
11005
 
10685
- def MultipleStackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
11006
+ def MultipleStackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
10686
11007
  if(isinstance(infile, (list, tuple, ))):
10687
11008
  pass
10688
11009
  else:
10689
11010
  infile = [infile]
10690
11011
  outretval = {}
10691
11012
  for curfname in infile:
10692
- outretval[curfname] = StackedFoxFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
11013
+ outretval[curfname] = StackedFoxFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
10693
11014
  return outretval
10694
11015
 
10695
11016
 
10696
- def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11017
+ def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
10697
11018
  fp = MkTempFile(instr)
10698
- listarrayfiles = FoxFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
11019
+ listarrayfiles = FoxFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
10699
11020
  return listarrayfiles
10700
11021
 
10701
11022
 
10702
11023
  def TarFileListFiles(infile, verbose=False, returnfp=False):
10703
- if(verbose):
10704
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10705
11024
  if(infile == "-"):
10706
11025
  infile = MkTempFile()
10707
11026
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -10822,8 +11141,6 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
10822
11141
 
10823
11142
 
10824
11143
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
10825
- if(verbose):
10826
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10827
11144
  if(infile == "-"):
10828
11145
  infile = MkTempFile()
10829
11146
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
@@ -10949,8 +11266,6 @@ if(not rarfile_support):
10949
11266
 
10950
11267
  if(rarfile_support):
10951
11268
  def RarFileListFiles(infile, verbose=False, returnfp=False):
10952
- if(verbose):
10953
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10954
11269
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
10955
11270
  return False
10956
11271
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -11078,8 +11393,6 @@ if(not py7zr_support):
11078
11393
 
11079
11394
  if(py7zr_support):
11080
11395
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
11081
- if(verbose):
11082
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11083
11396
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11084
11397
  return False
11085
11398
  lcfi = 0
@@ -11173,8 +11486,6 @@ if(py7zr_support):
11173
11486
 
11174
11487
 
11175
11488
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
11176
- if(verbose):
11177
- logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11178
11489
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
11179
11490
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
11180
11491
  formatspecs = formatspecs[checkcompressfile]
@@ -11201,44 +11512,6 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
11201
11512
  outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
11202
11513
  return listarrayfiles
11203
11514
 
11204
- """
11205
- PyNeoFile compatibility layer
11206
- """
11207
-
11208
- def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11209
- return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
11210
-
11211
- def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11212
- return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
11213
-
11214
- def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11215
- return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
11216
-
11217
- def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11218
- return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
11219
-
11220
- def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11221
- return PackFoxFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
11222
-
11223
- def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
11224
- return FoxFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
11225
-
11226
- def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
11227
- return UnPackFoxFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
11228
-
11229
- def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11230
- return RePackFoxFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11231
-
11232
- def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
11233
- return FoxFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
11234
-
11235
- def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
11236
- return FoxFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
11237
-
11238
- def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11239
- intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
11240
- return RePackFoxFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11241
-
11242
11515
  def detect_cwd(ftp, file_dir):
11243
11516
  """
11244
11517
  Test whether cwd into file_dir works. Returns True if it does,
@@ -12842,7 +13115,7 @@ def run_tcp_file_server(fileobj, url, on_progress=None):
12842
13115
  Ends after serving exactly one client or wait window elapses.
12843
13116
 
12844
13117
  URL example:
12845
- tcp://user:pass@0.0.0.0:5000/path/my.fox?
13118
+ tcp://user:pass@0.0.0.0:5000/path/my.arc?
12846
13119
  auth=1&enforce_path=1&rate=200000&timeout=5&wait=30&ssl=0
12847
13120
  """
12848
13121
  parts, o = _parse_net_url(url) # already returns proto/host/port/timeout/ssl/etc.
@@ -13044,7 +13317,7 @@ def run_udp_file_server(fileobj, url, on_progress=None):
13044
13317
  Ends after serving exactly one client or wait window elapses.
13045
13318
 
13046
13319
  URL example:
13047
- udp://user:pass@0.0.0.0:5001/path/my.fox?
13320
+ udp://user:pass@0.0.0.0:5001/path/my.arc?
13048
13321
  auth=1&enforce_path=1&rate=250000&timeout=5&wait=30
13049
13322
  """
13050
13323
  parts, o = _parse_net_url(url)
@@ -13468,7 +13741,7 @@ def run_tcp_file_server(fileobj, url, on_progress=None):
13468
13741
  Ends after serving exactly one client or wait window elapses.
13469
13742
 
13470
13743
  URL example:
13471
- tcp://user:pass@0.0.0.0:5000/path/my.fox?
13744
+ tcp://user:pass@0.0.0.0:5000/path/my.arc?
13472
13745
  auth=1&enforce_path=1&rate=200000&timeout=5&wait=30&ssl=0
13473
13746
  """
13474
13747
  parts, o = _parse_net_url(url) # already returns proto/host/port/timeout/ssl/etc.
@@ -14020,7 +14293,7 @@ def run_udp_file_server(fileobj, url, on_progress=None):
14020
14293
  Ends after serving exactly one client or wait window elapses.
14021
14294
 
14022
14295
  URL example:
14023
- udp://user:pass@0.0.0.0:5001/path/my.fox?
14296
+ udp://user:pass@0.0.0.0:5001/path/my.arc?
14024
14297
  auth=1&enforce_path=1&rate=250000&timeout=5&wait=30
14025
14298
  """
14026
14299
  parts, o = _parse_net_url(url)