PyArchiveFile 0.25.2__py3-none-any.whl → 0.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.25.2.data → pyarchivefile-0.26.0.data}/scripts/archivefile.py +31 -22
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.26.0.dist-info}/METADATA +1 -1
- pyarchivefile-0.26.0.dist-info/RECORD +10 -0
- pyarchivefile.py +799 -723
- pyarchivefile-0.25.2.dist-info/RECORD +0 -10
- {pyarchivefile-0.25.2.data → pyarchivefile-0.26.0.data}/scripts/archiveneofile.py +0 -0
- {pyarchivefile-0.25.2.data → pyarchivefile-0.26.0.data}/scripts/neoarchivefile.py +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.26.0.dist-info}/WHEEL +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.26.0.dist-info}/licenses/LICENSE +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.26.0.dist-info}/top_level.txt +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.26.0.dist-info}/zip-safe +0 -0
pyarchivefile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 11/12/2025 Ver. 0.26.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -416,9 +416,13 @@ def is_only_nonprintable(var):
|
|
|
416
416
|
__file_format_multi_dict__ = {}
|
|
417
417
|
__file_format_default__ = "ArchiveFile"
|
|
418
418
|
__include_defaults__ = True
|
|
419
|
-
|
|
419
|
+
__use_inmem__ = True
|
|
420
|
+
__use_memfd__ = True
|
|
420
421
|
__use_spoolfile__ = False
|
|
421
422
|
__use_spooldir__ = tempfile.gettempdir()
|
|
423
|
+
__use_new_style__ = True
|
|
424
|
+
__use_advanced_list__ = True
|
|
425
|
+
__use_alt_inode__ = False
|
|
422
426
|
BYTES_PER_KiB = 1024
|
|
423
427
|
BYTES_PER_MiB = 1024 * BYTES_PER_KiB
|
|
424
428
|
# Spool: not tiny, but won’t blow up RAM if many are in use
|
|
@@ -462,9 +466,13 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
462
466
|
__file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
|
|
463
467
|
__program_name__ = decode_unicode_escape(config.get('config', 'proname'))
|
|
464
468
|
__include_defaults__ = config.getboolean('config', 'includedef')
|
|
465
|
-
|
|
469
|
+
__use_inmem__ = config.getboolean('config', 'useinmem')
|
|
470
|
+
__use_memfd__ = config.getboolean('config', 'usememfd')
|
|
466
471
|
__use_spoolfile__ = config.getboolean('config', 'usespoolfile')
|
|
467
472
|
__spoolfile_size__ = config.getint('config', 'spoolfilesize')
|
|
473
|
+
__use_new_style__ = config.getboolean('config', 'newstyle')
|
|
474
|
+
__use_advanced_list__ = config.getboolean('config', 'advancedlist')
|
|
475
|
+
__use_alt_inode__ = config.getboolean('config', 'altinode')
|
|
468
476
|
# Loop through all sections
|
|
469
477
|
for section in config.sections():
|
|
470
478
|
if section == "config":
|
|
@@ -472,8 +480,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
472
480
|
|
|
473
481
|
required_keys = [
|
|
474
482
|
"len", "hex", "ver", "name",
|
|
475
|
-
"magic", "delimiter", "extension"
|
|
476
|
-
"newstyle", "advancedlist", "altinode"
|
|
483
|
+
"magic", "delimiter", "extension"
|
|
477
484
|
]
|
|
478
485
|
|
|
479
486
|
# Py2+Py3 compatible key presence check
|
|
@@ -493,9 +500,6 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
493
500
|
'format_hex': config.get(section, 'hex'),
|
|
494
501
|
'format_delimiter': delim,
|
|
495
502
|
'format_ver': config.get(section, 'ver'),
|
|
496
|
-
'new_style': config.getboolean(section, 'newstyle'),
|
|
497
|
-
'use_advanced_list': config.getboolean(section, 'advancedlist'),
|
|
498
|
-
'use_alt_inode': config.getboolean(section, 'altinode'),
|
|
499
503
|
'format_extension': decode_unicode_escape(config.get(section, 'extension')),
|
|
500
504
|
}
|
|
501
505
|
})
|
|
@@ -556,16 +560,19 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
556
560
|
cfg_config = cfg.get('config', {}) or {}
|
|
557
561
|
__file_format_default__ = decode_unicode_escape(_get(cfg_config, 'default', ''))
|
|
558
562
|
__program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
|
|
559
|
-
__include_defaults__ = _to_bool(_get(cfg_config, 'includedef',
|
|
560
|
-
|
|
563
|
+
__include_defaults__ = _to_bool(_get(cfg_config, 'includedef', True))
|
|
564
|
+
__use_inmem__ = _to_bool(_get(cfg_config, 'useinmem', True))
|
|
565
|
+
__use_memfd__ = _to_bool(_get(cfg_config, 'usememfd', True))
|
|
561
566
|
__use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
|
|
562
567
|
__spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
|
|
568
|
+
__use_new_style__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
|
|
569
|
+
__use_advanced_list__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
|
|
570
|
+
__use_alt_inode__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
|
|
563
571
|
|
|
564
572
|
# --- iterate format sections (everything except "config") ---
|
|
565
573
|
required_keys = [
|
|
566
574
|
"len", "hex", "ver", "name",
|
|
567
|
-
"magic", "delimiter", "extension"
|
|
568
|
-
"newstyle", "advancedlist", "altinode"
|
|
575
|
+
"magic", "delimiter", "extension"
|
|
569
576
|
]
|
|
570
577
|
|
|
571
578
|
for section_name, section in cfg.items():
|
|
@@ -583,9 +590,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
583
590
|
fmt_hex = decode_unicode_escape(_get(section, 'hex', ''))
|
|
584
591
|
fmt_ver = decode_unicode_escape(_get(section, 'ver', ''))
|
|
585
592
|
delim = decode_unicode_escape(_get(section, 'delimiter', ''))
|
|
586
|
-
new_style = _to_bool(_get(section, 'newstyle', False))
|
|
587
|
-
adv_list = _to_bool(_get(section, 'advancedlist', False))
|
|
588
|
-
alt_inode = _to_bool(_get(section, 'altinode', False))
|
|
589
593
|
extension = decode_unicode_escape(_get(section, 'extension', ''))
|
|
590
594
|
|
|
591
595
|
# keep your delimiter validation semantics
|
|
@@ -600,9 +604,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
600
604
|
'format_hex': fmt_hex,
|
|
601
605
|
'format_delimiter': delim,
|
|
602
606
|
'format_ver': fmt_ver,
|
|
603
|
-
'new_style': new_style,
|
|
604
|
-
'use_advanced_list': adv_list,
|
|
605
|
-
'use_alt_inode': alt_inode,
|
|
606
607
|
'format_extension': extension,
|
|
607
608
|
}
|
|
608
609
|
})
|
|
@@ -621,6 +622,7 @@ elif __use_json_file__ and not os.path.exists(__config_file__):
|
|
|
621
622
|
if not __use_ini_file__ and not __include_defaults__:
|
|
622
623
|
__include_defaults__ = True
|
|
623
624
|
if __include_defaults__:
|
|
625
|
+
# Arc / Neo
|
|
624
626
|
add_format(__file_format_multi_dict__, "ArchiveFile", "ArchiveFile", ".arc", "ArchiveFile")
|
|
625
627
|
add_format(__file_format_multi_dict__, "NeoFile", "NeoFile", ".neo", "NeoFile")
|
|
626
628
|
|
|
@@ -633,21 +635,18 @@ __file_format_len__ = __file_format_multi_dict__[__file_format_default__]['forma
|
|
|
633
635
|
__file_format_hex__ = __file_format_multi_dict__[__file_format_default__]['format_hex']
|
|
634
636
|
__file_format_delimiter__ = __file_format_multi_dict__[__file_format_default__]['format_delimiter']
|
|
635
637
|
__file_format_ver__ = __file_format_multi_dict__[__file_format_default__]['format_ver']
|
|
636
|
-
__use_new_style__ = __file_format_multi_dict__[__file_format_default__]['new_style']
|
|
637
|
-
__use_advanced_list__ = __file_format_multi_dict__[__file_format_default__]['use_advanced_list']
|
|
638
|
-
__use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt_inode']
|
|
639
638
|
__file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
|
|
640
639
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
641
640
|
__project__ = __program_name__
|
|
642
641
|
__program_alt_name__ = __program_name__
|
|
643
642
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
644
643
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
645
|
-
__version_info__ = (0,
|
|
646
|
-
__version_date_info__ = (2025, 11,
|
|
644
|
+
__version_info__ = (0, 26, 0, "RC 1", 1)
|
|
645
|
+
__version_date_info__ = (2025, 11, 12, "RC 1", 1)
|
|
647
646
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
648
647
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
649
648
|
__revision__ = __version_info__[3]
|
|
650
|
-
__revision_id__ = "$Id:
|
|
649
|
+
__revision_id__ = "$Id: 5871acc6c09340f9594b10109029aa90a9e6c6aa $"
|
|
651
650
|
if(__version_info__[4] is not None):
|
|
652
651
|
__version_date_plusrc__ = __version_date__ + \
|
|
653
652
|
"-" + str(__version_date_info__[4])
|
|
@@ -659,6 +658,9 @@ if(__version_info__[3] is not None):
|
|
|
659
658
|
if(__version_info__[3] is None):
|
|
660
659
|
__version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
|
|
661
660
|
|
|
661
|
+
_logger = logging.getLogger(__project__) # library-style logger
|
|
662
|
+
_logger.addHandler(logging.NullHandler()) # don't emit logs unless app configures logging
|
|
663
|
+
|
|
662
664
|
# From: https://stackoverflow.com/a/28568003
|
|
663
665
|
# By Phaxmohdem
|
|
664
666
|
|
|
@@ -1028,6 +1030,20 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20, **
|
|
|
1028
1030
|
VerbosePrintOut(dbgtxt, outtype, dbgenable, dgblevel, **kwargs)
|
|
1029
1031
|
return dbgtxt
|
|
1030
1032
|
|
|
1033
|
+
def to_ns(timestamp):
|
|
1034
|
+
"""
|
|
1035
|
+
Convert a second-resolution timestamp (int or float)
|
|
1036
|
+
into a nanosecond timestamp (int) by zero-padding.
|
|
1037
|
+
Works in Python 2 and Python 3.
|
|
1038
|
+
"""
|
|
1039
|
+
try:
|
|
1040
|
+
# Convert incoming timestamp to float so it works for int or float
|
|
1041
|
+
seconds = float(timestamp)
|
|
1042
|
+
except (TypeError, ValueError):
|
|
1043
|
+
raise ValueError("Timestamp must be int or float")
|
|
1044
|
+
|
|
1045
|
+
# Multiply by 1e9 to get nanoseconds, then cast to int
|
|
1046
|
+
return int(seconds * 1000000000)
|
|
1031
1047
|
|
|
1032
1048
|
def _split_posix(name):
|
|
1033
1049
|
"""
|
|
@@ -2051,34 +2067,53 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
|
|
|
2051
2067
|
|
|
2052
2068
|
|
|
2053
2069
|
def MkTempFile(data=None,
|
|
2054
|
-
inmem=
|
|
2070
|
+
inmem=__use_inmem__, usememfd=__use_memfd__,
|
|
2055
2071
|
isbytes=True,
|
|
2056
|
-
prefix=
|
|
2072
|
+
prefix=__program_name__,
|
|
2057
2073
|
delete=True,
|
|
2058
2074
|
encoding="utf-8",
|
|
2059
|
-
newline=None,
|
|
2075
|
+
newline=None,
|
|
2076
|
+
text_errors="strict",
|
|
2060
2077
|
dir=None,
|
|
2061
2078
|
suffix="",
|
|
2062
2079
|
use_spool=__use_spoolfile__,
|
|
2080
|
+
autoswitch_spool=False,
|
|
2063
2081
|
spool_max=__spoolfile_size__,
|
|
2064
|
-
spool_dir=__use_spooldir__
|
|
2082
|
+
spool_dir=__use_spooldir__,
|
|
2083
|
+
reset_to_start=True,
|
|
2084
|
+
memfd_name=None,
|
|
2085
|
+
memfd_allow_sealing=False,
|
|
2086
|
+
memfd_flags_extra=0,
|
|
2087
|
+
on_create=None):
|
|
2065
2088
|
"""
|
|
2066
2089
|
Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
|
|
2067
2090
|
|
|
2068
2091
|
Storage:
|
|
2069
|
-
- inmem=True
|
|
2070
|
-
|
|
2071
|
-
- inmem=
|
|
2092
|
+
- inmem=True, usememfd=True, isbytes=True and memfd available
|
|
2093
|
+
-> memfd-backed anonymous file (binary)
|
|
2094
|
+
- inmem=True, otherwise
|
|
2095
|
+
-> BytesIO (bytes) or StringIO (text)
|
|
2096
|
+
- inmem=False, use_spool=True
|
|
2097
|
+
-> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
|
|
2098
|
+
- inmem=False, use_spool=False
|
|
2099
|
+
-> NamedTemporaryFile (binary), optionally TextIOWrapper for text
|
|
2072
2100
|
|
|
2073
2101
|
Text vs bytes:
|
|
2074
2102
|
- isbytes=True -> file expects bytes; 'data' must be bytes-like
|
|
2075
|
-
- isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
|
|
2076
|
-
apply only for spooled/named files (not BytesIO/StringIO).
|
|
2103
|
+
- isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
|
|
2104
|
+
encoding apply only for spooled/named files (not BytesIO/StringIO).
|
|
2077
2105
|
|
|
2078
2106
|
Notes:
|
|
2079
|
-
- On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
|
|
2080
|
-
Use delete=False if you need to pass the path elsewhere.
|
|
2081
|
-
- For text: in-memory StringIO ignores 'newline' (as usual).
|
|
2107
|
+
- On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
|
|
2108
|
+
other processes. Use delete=False if you need to pass the path elsewhere.
|
|
2109
|
+
- For text: in-memory StringIO ignores 'newline' and 'text_errors' (as usual).
|
|
2110
|
+
- When available, and if usememfd=True, memfd is used only for inmem=True and isbytes=True,
|
|
2111
|
+
providing an anonymous in-memory file descriptor (Linux-only). Text in-memory still uses
|
|
2112
|
+
StringIO to preserve newline semantics.
|
|
2113
|
+
- If autoswitch_spool=True and initial data size exceeds spool_max, in-memory storage is
|
|
2114
|
+
skipped and a spooled file is used instead (if use_spool=True).
|
|
2115
|
+
- If on_create is not None, it is called as on_create(fp, kind) where kind is one of:
|
|
2116
|
+
"memfd", "bytesio", "stringio", "spool", "disk".
|
|
2082
2117
|
"""
|
|
2083
2118
|
|
|
2084
2119
|
# -- sanitize simple params (avoid None surprises) --
|
|
@@ -2110,23 +2145,65 @@ def MkTempFile(data=None,
|
|
|
2110
2145
|
else:
|
|
2111
2146
|
init = None
|
|
2112
2147
|
|
|
2148
|
+
# Size of init for autoswitch; only meaningful for bytes
|
|
2149
|
+
init_len = len(init) if (init is not None and isbytes) else None
|
|
2150
|
+
|
|
2113
2151
|
# -------- In-memory --------
|
|
2114
2152
|
if inmem:
|
|
2115
|
-
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
2153
|
+
# If autoswitch is enabled and data is larger than spool_max, and
|
|
2154
|
+
# spooling is allowed, skip the in-memory branch and fall through
|
|
2155
|
+
# to the spool/disk logic below.
|
|
2156
|
+
if autoswitch_spool and use_spool and init_len is not None and init_len > spool_max:
|
|
2157
|
+
pass # fall through to spool/disk sections
|
|
2158
|
+
else:
|
|
2159
|
+
# Use memfd only for bytes, and only where available (Linux, Python 3.8+)
|
|
2160
|
+
if usememfd and isbytes and hasattr(os, "memfd_create"):
|
|
2161
|
+
name = memfd_name or prefix or "MkTempFile"
|
|
2162
|
+
flags = 0
|
|
2163
|
+
# Close-on-exec is almost always what you want for temps
|
|
2164
|
+
if hasattr(os, "MFD_CLOEXEC"):
|
|
2165
|
+
flags |= os.MFD_CLOEXEC
|
|
2166
|
+
# Optional sealing support if requested and available
|
|
2167
|
+
if memfd_allow_sealing and hasattr(os, "MFD_ALLOW_SEALING"):
|
|
2168
|
+
flags |= os.MFD_ALLOW_SEALING
|
|
2169
|
+
# Extra custom flags (e.g. hugepage flags) if caller wants them
|
|
2170
|
+
if memfd_flags_extra:
|
|
2171
|
+
flags |= memfd_flags_extra
|
|
2172
|
+
|
|
2173
|
+
fd = os.memfd_create(name, flags)
|
|
2174
|
+
# Binary read/write file-like object backed by RAM
|
|
2175
|
+
f = os.fdopen(fd, "w+b")
|
|
2176
|
+
|
|
2177
|
+
if init is not None:
|
|
2178
|
+
f.write(init)
|
|
2179
|
+
if reset_to_start:
|
|
2180
|
+
f.seek(0)
|
|
2181
|
+
|
|
2182
|
+
if on_create is not None:
|
|
2183
|
+
on_create(f, "memfd")
|
|
2184
|
+
return f
|
|
2185
|
+
|
|
2186
|
+
# Fallback: pure Python in-memory objects
|
|
2187
|
+
if isbytes:
|
|
2188
|
+
f = io.BytesIO(init if init is not None else b"")
|
|
2189
|
+
kind = "bytesio"
|
|
2190
|
+
else:
|
|
2191
|
+
# newline/text_errors not enforced for StringIO; matches stdlib semantics
|
|
2192
|
+
f = io.StringIO(init if init is not None else "")
|
|
2193
|
+
kind = "stringio"
|
|
2194
|
+
|
|
2195
|
+
if reset_to_start:
|
|
2196
|
+
f.seek(0)
|
|
2197
|
+
|
|
2198
|
+
if on_create is not None:
|
|
2199
|
+
on_create(f, kind)
|
|
2200
|
+
return f
|
|
2123
2201
|
|
|
2124
2202
|
# Helper: wrap a binary file into a text file with encoding/newline
|
|
2125
2203
|
def _wrap_text(handle):
|
|
2126
2204
|
# For both Py2 & Py3, TextIOWrapper gives consistent newline/encoding behavior
|
|
2127
|
-
|
|
2128
|
-
|
|
2129
|
-
return tw
|
|
2205
|
+
return io.TextIOWrapper(handle, encoding=encoding,
|
|
2206
|
+
newline=newline, errors=text_errors)
|
|
2130
2207
|
|
|
2131
2208
|
# -------- Spooled (RAM then disk) --------
|
|
2132
2209
|
if use_spool:
|
|
@@ -2134,19 +2211,33 @@ def MkTempFile(data=None,
|
|
|
2134
2211
|
bin_mode = "w+b" # read/write, binary
|
|
2135
2212
|
b = tempfile.SpooledTemporaryFile(max_size=spool_max, mode=bin_mode, dir=spool_dir)
|
|
2136
2213
|
f = b if isbytes else _wrap_text(b)
|
|
2214
|
+
|
|
2137
2215
|
if init is not None:
|
|
2138
2216
|
f.write(init)
|
|
2217
|
+
if reset_to_start:
|
|
2218
|
+
f.seek(0)
|
|
2219
|
+
elif reset_to_start:
|
|
2139
2220
|
f.seek(0)
|
|
2221
|
+
|
|
2222
|
+
if on_create is not None:
|
|
2223
|
+
on_create(f, "spool")
|
|
2140
2224
|
return f
|
|
2141
2225
|
|
|
2142
2226
|
# -------- On-disk temp (NamedTemporaryFile) --------
|
|
2143
2227
|
# Always create binary file; wrap for text if needed for uniform Py2/3 behavior
|
|
2144
|
-
b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
|
|
2228
|
+
b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
|
|
2229
|
+
dir=dir, delete=delete)
|
|
2145
2230
|
f = b if isbytes else _wrap_text(b)
|
|
2146
2231
|
|
|
2147
2232
|
if init is not None:
|
|
2148
2233
|
f.write(init)
|
|
2234
|
+
if reset_to_start:
|
|
2235
|
+
f.seek(0)
|
|
2236
|
+
elif reset_to_start:
|
|
2149
2237
|
f.seek(0)
|
|
2238
|
+
|
|
2239
|
+
if on_create is not None:
|
|
2240
|
+
on_create(f, "disk")
|
|
2150
2241
|
return f
|
|
2151
2242
|
|
|
2152
2243
|
|
|
@@ -3666,7 +3757,7 @@ def _bytes_to_int(b):
|
|
|
3666
3757
|
# =========================
|
|
3667
3758
|
# Public checksum API
|
|
3668
3759
|
# =========================
|
|
3669
|
-
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
3760
|
+
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
|
|
3670
3761
|
"""
|
|
3671
3762
|
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
3672
3763
|
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
@@ -3678,15 +3769,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatsp
|
|
|
3678
3769
|
if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
|
|
3679
3770
|
hdr_bytes = _to_bytes(hdr_bytes)
|
|
3680
3771
|
hdr_bytes = bytes(hdr_bytes)
|
|
3681
|
-
|
|
3772
|
+
saltkeyval = None
|
|
3773
|
+
if(hasattr(saltkey, "read")):
|
|
3774
|
+
saltkeyval = skfp.read()
|
|
3775
|
+
if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
|
|
3776
|
+
saltkeyval = saltkeyval.encode("UTF-8")
|
|
3777
|
+
elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
|
|
3778
|
+
saltkeyval = saltkey
|
|
3779
|
+
elif(saltkey is not None and os.path.exists(saltkey)):
|
|
3780
|
+
with open(saltkey, "rb") as skfp:
|
|
3781
|
+
saltkeyval = skfp.read()
|
|
3782
|
+
else:
|
|
3783
|
+
saltkey = None
|
|
3784
|
+
if(saltkeyval is None):
|
|
3785
|
+
saltkey = None
|
|
3682
3786
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3683
|
-
|
|
3684
|
-
|
|
3685
|
-
|
|
3787
|
+
if(saltkey is None or saltkeyval is None):
|
|
3788
|
+
h = hashlib.new(algo_key, hdr_bytes)
|
|
3789
|
+
else:
|
|
3790
|
+
h = hmac.new(saltkeyval, hdr_bytes, digestmod=algo_key)
|
|
3791
|
+
return h.hexdigest().lower()
|
|
3686
3792
|
|
|
3687
3793
|
return "0"
|
|
3688
3794
|
|
|
3689
|
-
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
3795
|
+
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
|
|
3690
3796
|
"""
|
|
3691
3797
|
Accepts bytes/str/file-like.
|
|
3692
3798
|
- Hashlib algos: streamed in 1 MiB chunks.
|
|
@@ -3694,13 +3800,29 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
|
|
|
3694
3800
|
- Falls back to one-shot for non-file-like inputs.
|
|
3695
3801
|
"""
|
|
3696
3802
|
algo_key = (checksumtype or "md5").lower()
|
|
3697
|
-
|
|
3803
|
+
saltkeyval = None
|
|
3804
|
+
if(hasattr(saltkey, "read")):
|
|
3805
|
+
saltkeyval = skfp.read()
|
|
3806
|
+
if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
|
|
3807
|
+
saltkeyval = saltkeyval.encode("UTF-8")
|
|
3808
|
+
elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
|
|
3809
|
+
saltkeyval = saltkey
|
|
3810
|
+
elif(saltkey is not None and os.path.exists(saltkey)):
|
|
3811
|
+
with open(saltkey, "rb") as skfp:
|
|
3812
|
+
saltkeyval = skfp.read()
|
|
3813
|
+
else:
|
|
3814
|
+
saltkey = None
|
|
3815
|
+
if(saltkeyval is None):
|
|
3816
|
+
saltkey = None
|
|
3698
3817
|
# file-like streaming
|
|
3699
3818
|
if hasattr(inbytes, "read"):
|
|
3700
3819
|
# hashlib
|
|
3701
3820
|
|
|
3702
3821
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3703
|
-
|
|
3822
|
+
if(saltkey is None or saltkeyval is None):
|
|
3823
|
+
h = hashlib.new(algo_key)
|
|
3824
|
+
else:
|
|
3825
|
+
h = hmac.new(saltkeyval, digestmod=algo_key)
|
|
3704
3826
|
while True:
|
|
3705
3827
|
chunk = inbytes.read(__filebuff_size__)
|
|
3706
3828
|
if not chunk:
|
|
@@ -3721,26 +3843,41 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
|
|
|
3721
3843
|
# one-shot
|
|
3722
3844
|
|
|
3723
3845
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3724
|
-
|
|
3725
|
-
|
|
3846
|
+
if(saltkey is None or saltkeyval is None):
|
|
3847
|
+
h = hashlib.new(algo_key, data)
|
|
3848
|
+
else:
|
|
3849
|
+
h = hmac.new(saltkeyval, data, digestmod=algo_key)
|
|
3726
3850
|
return h.hexdigest().lower()
|
|
3727
3851
|
|
|
3728
3852
|
return "0"
|
|
3729
3853
|
|
|
3730
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3731
|
-
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
3854
|
+
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
|
|
3855
|
+
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs, saltkey)
|
|
3732
3856
|
want = (inchecksum or "0").strip().lower()
|
|
3733
3857
|
if want.startswith("0x"):
|
|
3734
3858
|
want = want[2:]
|
|
3735
|
-
return
|
|
3859
|
+
return CheckChecksums(want, calc)
|
|
3736
3860
|
|
|
3737
|
-
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3738
|
-
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
3861
|
+
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
|
|
3862
|
+
calc = GetFileChecksum(infile, checksumtype, True, formatspecs, saltkey)
|
|
3739
3863
|
want = (inchecksum or "0").strip().lower()
|
|
3740
3864
|
if want.startswith("0x"):
|
|
3741
3865
|
want = want[2:]
|
|
3742
|
-
return
|
|
3866
|
+
return CheckChecksums(want, calc)
|
|
3743
3867
|
|
|
3868
|
+
def CheckChecksums(inchecksum, outchecksum):
|
|
3869
|
+
# Normalize as text first
|
|
3870
|
+
calc = (inchecksum or "0").strip().lower()
|
|
3871
|
+
want = (outchecksum or "0").strip().lower()
|
|
3872
|
+
|
|
3873
|
+
if want.startswith("0x"):
|
|
3874
|
+
want = want[2:]
|
|
3875
|
+
|
|
3876
|
+
# Now force both to bytes
|
|
3877
|
+
calc_b = _to_bytes(calc) # defaults to utf-8, strict
|
|
3878
|
+
want_b = _to_bytes(want)
|
|
3879
|
+
|
|
3880
|
+
return hmac.compare_digest(want_b, calc_b)
|
|
3744
3881
|
|
|
3745
3882
|
def MajorMinorToDev(major, minor):
|
|
3746
3883
|
"""
|
|
@@ -4109,11 +4246,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
|
|
|
4109
4246
|
return first_two + headerdata
|
|
4110
4247
|
|
|
4111
4248
|
|
|
4112
|
-
def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4249
|
+
def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4113
4250
|
if(not hasattr(fp, "read")):
|
|
4114
4251
|
return False
|
|
4115
4252
|
delimiter = formatspecs['format_delimiter']
|
|
4116
|
-
if(
|
|
4253
|
+
if(__use_new_style__):
|
|
4117
4254
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4118
4255
|
else:
|
|
4119
4256
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4201,15 +4338,14 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4201
4338
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4202
4339
|
pass
|
|
4203
4340
|
fp.seek(len(delimiter), 1)
|
|
4204
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4205
|
-
if(not
|
|
4341
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4342
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4206
4343
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4207
4344
|
fname + " at offset " + str(fheaderstart))
|
|
4208
4345
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4209
4346
|
return False
|
|
4210
4347
|
fp.seek(len(delimiter), 1)
|
|
4211
|
-
newfcs = GetHeaderChecksum(
|
|
4212
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
4348
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4213
4349
|
HeaderOut.append(fjsoncontent)
|
|
4214
4350
|
if(fcs != newfcs and not skipchecksum):
|
|
4215
4351
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
@@ -4228,10 +4364,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4228
4364
|
else:
|
|
4229
4365
|
fp.seek(fcsize, 1)
|
|
4230
4366
|
fcontents.seek(0, 0)
|
|
4231
|
-
newfccs = GetFileChecksum(
|
|
4232
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4367
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4233
4368
|
fcontents.seek(0, 0)
|
|
4234
|
-
if(not
|
|
4369
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4235
4370
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4236
4371
|
fname + " at offset " + str(fcontentstart))
|
|
4237
4372
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4268,12 +4403,12 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4268
4403
|
return HeaderOut
|
|
4269
4404
|
|
|
4270
4405
|
|
|
4271
|
-
def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4406
|
+
def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4272
4407
|
if(not hasattr(fp, "read")):
|
|
4273
4408
|
return False
|
|
4274
4409
|
delimiter = formatspecs['format_delimiter']
|
|
4275
4410
|
fheaderstart = fp.tell()
|
|
4276
|
-
if(
|
|
4411
|
+
if(__use_new_style__):
|
|
4277
4412
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4278
4413
|
else:
|
|
4279
4414
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4291,40 +4426,51 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4291
4426
|
fbasedir = os.path.dirname(fname)
|
|
4292
4427
|
flinkname = HeaderOut[6]
|
|
4293
4428
|
fsize = int(HeaderOut[7], 16)
|
|
4294
|
-
|
|
4295
|
-
|
|
4296
|
-
|
|
4297
|
-
|
|
4298
|
-
|
|
4429
|
+
fblksize = int(HeaderOut[8], 16)
|
|
4430
|
+
fblocks = int(HeaderOut[9], 16)
|
|
4431
|
+
fflags = int(HeaderOut[10], 16)
|
|
4432
|
+
fatime = int(HeaderOut[11], 16)
|
|
4433
|
+
fmtime = int(HeaderOut[12], 16)
|
|
4434
|
+
fctime = int(HeaderOut[13], 16)
|
|
4435
|
+
fbtime = int(HeaderOut[14], 16)
|
|
4436
|
+
fmode = int(HeaderOut[15], 16)
|
|
4299
4437
|
fchmode = stat.S_IMODE(fmode)
|
|
4300
4438
|
ftypemod = stat.S_IFMT(fmode)
|
|
4301
|
-
fwinattributes = int(HeaderOut[
|
|
4302
|
-
fcompression = HeaderOut[
|
|
4303
|
-
fcsize = int(HeaderOut[
|
|
4304
|
-
fuid = int(HeaderOut[
|
|
4305
|
-
funame = HeaderOut[
|
|
4306
|
-
fgid = int(HeaderOut[
|
|
4307
|
-
fgname = HeaderOut[
|
|
4308
|
-
fid = int(HeaderOut[
|
|
4309
|
-
finode = int(HeaderOut[
|
|
4310
|
-
flinkcount = int(HeaderOut[
|
|
4311
|
-
fdev = int(HeaderOut[
|
|
4312
|
-
|
|
4313
|
-
|
|
4314
|
-
|
|
4315
|
-
|
|
4316
|
-
|
|
4317
|
-
|
|
4318
|
-
|
|
4319
|
-
|
|
4320
|
-
|
|
4321
|
-
fextrafields = int(HeaderOut[33], 16)
|
|
4439
|
+
fwinattributes = int(HeaderOut[16], 16)
|
|
4440
|
+
fcompression = HeaderOut[17]
|
|
4441
|
+
fcsize = int(HeaderOut[18], 16)
|
|
4442
|
+
fuid = int(HeaderOut[19], 16)
|
|
4443
|
+
funame = HeaderOut[20]
|
|
4444
|
+
fgid = int(HeaderOut[21], 16)
|
|
4445
|
+
fgname = HeaderOut[22]
|
|
4446
|
+
fid = int(HeaderOut[23], 16)
|
|
4447
|
+
finode = int(HeaderOut[24], 16)
|
|
4448
|
+
flinkcount = int(HeaderOut[25], 16)
|
|
4449
|
+
fdev = int(HeaderOut[26], 16)
|
|
4450
|
+
frdev = int(HeaderOut[27], 16)
|
|
4451
|
+
fseeknextfile = HeaderOut[28]
|
|
4452
|
+
fjsontype = HeaderOut[29]
|
|
4453
|
+
fjsonlen = int(HeaderOut[30], 16)
|
|
4454
|
+
fjsonsize = int(HeaderOut[31], 16)
|
|
4455
|
+
fjsonchecksumtype = HeaderOut[32]
|
|
4456
|
+
fjsonchecksum = HeaderOut[33]
|
|
4457
|
+
fextrasize = int(HeaderOut[34], 16)
|
|
4458
|
+
fextrafields = int(HeaderOut[35], 16)
|
|
4322
4459
|
fextrafieldslist = []
|
|
4323
|
-
extrastart =
|
|
4460
|
+
extrastart = 36
|
|
4324
4461
|
extraend = extrastart + fextrafields
|
|
4325
4462
|
while(extrastart < extraend):
|
|
4326
4463
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4327
4464
|
extrastart = extrastart + 1
|
|
4465
|
+
fvendorfieldslist = []
|
|
4466
|
+
fvendorfields = 0;
|
|
4467
|
+
if(len(HeaderOut)>extraend):
|
|
4468
|
+
extrastart = extraend
|
|
4469
|
+
extraend = len(HeaderOut) - 4
|
|
4470
|
+
while(extrastart < extraend):
|
|
4471
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
4472
|
+
extrastart = extrastart + 1
|
|
4473
|
+
fvendorfields = fvendorfields + 1
|
|
4328
4474
|
if(fextrafields==1):
|
|
4329
4475
|
try:
|
|
4330
4476
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4402,16 +4548,15 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4402
4548
|
pass
|
|
4403
4549
|
fp.seek(len(delimiter), 1)
|
|
4404
4550
|
fjend = fp.tell() - 1
|
|
4405
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4406
|
-
if(not
|
|
4551
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4552
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4407
4553
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4408
4554
|
fname + " at offset " + str(fheaderstart))
|
|
4409
4555
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4410
4556
|
return False
|
|
4411
4557
|
fcs = HeaderOut[-2].lower()
|
|
4412
4558
|
fccs = HeaderOut[-1].lower()
|
|
4413
|
-
newfcs = GetHeaderChecksum(
|
|
4414
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
4559
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4415
4560
|
if(fcs != newfcs and not skipchecksum):
|
|
4416
4561
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4417
4562
|
fname + " at offset " + str(fheaderstart))
|
|
@@ -4434,10 +4579,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4434
4579
|
fp.seek(fcsize, 1)
|
|
4435
4580
|
pyhascontents = False
|
|
4436
4581
|
fcontents.seek(0, 0)
|
|
4437
|
-
newfccs = GetFileChecksum(
|
|
4438
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4582
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4439
4583
|
fcontents.seek(0, 0)
|
|
4440
|
-
if(not
|
|
4584
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4441
4585
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4442
4586
|
fname + " at offset " + str(fcontentstart))
|
|
4443
4587
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4454,8 +4598,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4454
4598
|
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4455
4599
|
cfcontents.close()
|
|
4456
4600
|
fcontents.seek(0, 0)
|
|
4457
|
-
fccs = GetFileChecksum(
|
|
4458
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4601
|
+
fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4459
4602
|
fcontentend = fp.tell()
|
|
4460
4603
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4461
4604
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -4477,17 +4620,17 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4477
4620
|
fcontents.seek(0, 0)
|
|
4478
4621
|
if(not contentasfile):
|
|
4479
4622
|
fcontents = fcontents.read()
|
|
4480
|
-
outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
|
|
4481
|
-
'fdev': fdev, '
|
|
4623
|
+
outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fblksize': fblksize, 'fblocks': fblocks, 'fflags': fflags, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
|
|
4624
|
+
'fdev': fdev, 'frdev': frdev, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
|
|
4482
4625
|
return outlist
|
|
4483
4626
|
|
|
4484
4627
|
|
|
4485
|
-
def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4628
|
+
def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4486
4629
|
if(not hasattr(fp, "read")):
|
|
4487
4630
|
return False
|
|
4488
4631
|
delimiter = formatspecs['format_delimiter']
|
|
4489
4632
|
fheaderstart = fp.tell()
|
|
4490
|
-
if(
|
|
4633
|
+
if(__use_new_style__):
|
|
4491
4634
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4492
4635
|
else:
|
|
4493
4636
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4505,36 +4648,38 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4505
4648
|
fbasedir = os.path.dirname(fname)
|
|
4506
4649
|
flinkname = HeaderOut[6]
|
|
4507
4650
|
fsize = int(HeaderOut[7], 16)
|
|
4508
|
-
|
|
4509
|
-
|
|
4510
|
-
|
|
4511
|
-
|
|
4512
|
-
|
|
4651
|
+
fblksize = int(HeaderOut[8], 16)
|
|
4652
|
+
fblocks = int(HeaderOut[9], 16)
|
|
4653
|
+
fflags = int(HeaderOut[10], 16)
|
|
4654
|
+
fatime = int(HeaderOut[11], 16)
|
|
4655
|
+
fmtime = int(HeaderOut[12], 16)
|
|
4656
|
+
fctime = int(HeaderOut[13], 16)
|
|
4657
|
+
fbtime = int(HeaderOut[14], 16)
|
|
4658
|
+
fmode = int(HeaderOut[15], 16)
|
|
4513
4659
|
fchmode = stat.S_IMODE(fmode)
|
|
4514
4660
|
ftypemod = stat.S_IFMT(fmode)
|
|
4515
|
-
fwinattributes = int(HeaderOut[
|
|
4516
|
-
fcompression = HeaderOut[
|
|
4517
|
-
fcsize = int(HeaderOut[
|
|
4518
|
-
fuid = int(HeaderOut[
|
|
4519
|
-
funame = HeaderOut[
|
|
4520
|
-
fgid = int(HeaderOut[
|
|
4521
|
-
fgname = HeaderOut[
|
|
4522
|
-
fid = int(HeaderOut[
|
|
4523
|
-
finode = int(HeaderOut[
|
|
4524
|
-
flinkcount = int(HeaderOut[
|
|
4525
|
-
fdev = int(HeaderOut[
|
|
4526
|
-
|
|
4527
|
-
|
|
4528
|
-
|
|
4529
|
-
|
|
4530
|
-
|
|
4531
|
-
|
|
4532
|
-
|
|
4533
|
-
|
|
4534
|
-
|
|
4535
|
-
fextrafields = int(HeaderOut[33], 16)
|
|
4661
|
+
fwinattributes = int(HeaderOut[16], 16)
|
|
4662
|
+
fcompression = HeaderOut[17]
|
|
4663
|
+
fcsize = int(HeaderOut[18], 16)
|
|
4664
|
+
fuid = int(HeaderOut[19], 16)
|
|
4665
|
+
funame = HeaderOut[20]
|
|
4666
|
+
fgid = int(HeaderOut[21], 16)
|
|
4667
|
+
fgname = HeaderOut[22]
|
|
4668
|
+
fid = int(HeaderOut[23], 16)
|
|
4669
|
+
finode = int(HeaderOut[24], 16)
|
|
4670
|
+
flinkcount = int(HeaderOut[25], 16)
|
|
4671
|
+
fdev = int(HeaderOut[26], 16)
|
|
4672
|
+
frdev = int(HeaderOut[27], 16)
|
|
4673
|
+
fseeknextfile = HeaderOut[28]
|
|
4674
|
+
fjsontype = HeaderOut[29]
|
|
4675
|
+
fjsonlen = int(HeaderOut[30], 16)
|
|
4676
|
+
fjsonsize = int(HeaderOut[31], 16)
|
|
4677
|
+
fjsonchecksumtype = HeaderOut[32]
|
|
4678
|
+
fjsonchecksum = HeaderOut[33]
|
|
4679
|
+
fextrasize = int(HeaderOut[34], 16)
|
|
4680
|
+
fextrafields = int(HeaderOut[35], 16)
|
|
4536
4681
|
fextrafieldslist = []
|
|
4537
|
-
extrastart =
|
|
4682
|
+
extrastart = 36
|
|
4538
4683
|
extraend = extrastart + fextrafields
|
|
4539
4684
|
while(extrastart < extraend):
|
|
4540
4685
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
@@ -4614,16 +4759,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4614
4759
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4615
4760
|
pass
|
|
4616
4761
|
fp.seek(len(delimiter), 1)
|
|
4617
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4618
|
-
if(not
|
|
4762
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4763
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4619
4764
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4620
4765
|
fname + " at offset " + str(fheaderstart))
|
|
4621
4766
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4622
4767
|
return False
|
|
4623
4768
|
fcs = HeaderOut[-2].lower()
|
|
4624
4769
|
fccs = HeaderOut[-1].lower()
|
|
4625
|
-
newfcs = GetHeaderChecksum(
|
|
4626
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
4770
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4627
4771
|
if(fcs != newfcs and not skipchecksum):
|
|
4628
4772
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4629
4773
|
fname + " at offset " + str(fheaderstart))
|
|
@@ -4646,9 +4790,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4646
4790
|
fp.seek(fcsize, 1)
|
|
4647
4791
|
pyhascontents = False
|
|
4648
4792
|
fcontents.seek(0, 0)
|
|
4649
|
-
newfccs = GetFileChecksum(
|
|
4650
|
-
|
|
4651
|
-
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4793
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4794
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4652
4795
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4653
4796
|
fname + " at offset " + str(fcontentstart))
|
|
4654
4797
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4665,8 +4808,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4665
4808
|
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4666
4809
|
cfcontents.close()
|
|
4667
4810
|
fcontents.seek(0, 0)
|
|
4668
|
-
fccs = GetFileChecksum(
|
|
4669
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4811
|
+
fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4670
4812
|
fcontentend = fp.tell()
|
|
4671
4813
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4672
4814
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -4688,12 +4830,12 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4688
4830
|
fcontents.seek(0, 0)
|
|
4689
4831
|
if(not contentasfile):
|
|
4690
4832
|
fcontents = fcontents.read()
|
|
4691
|
-
outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
|
|
4692
|
-
finode, flinkcount, fdev,
|
|
4833
|
+
outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame, fgid, fgname, fid,
|
|
4834
|
+
finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent, fextrafieldslist, HeaderOut[-4], HeaderOut[-3], fcontents]
|
|
4693
4835
|
return outlist
|
|
4694
4836
|
|
|
4695
4837
|
|
|
4696
|
-
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4838
|
+
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4697
4839
|
if(not hasattr(fp, "read")):
|
|
4698
4840
|
return False
|
|
4699
4841
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -4713,7 +4855,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4713
4855
|
return False
|
|
4714
4856
|
if(formdel != formatspecs['format_delimiter']):
|
|
4715
4857
|
return False
|
|
4716
|
-
if(
|
|
4858
|
+
if(__use_new_style__):
|
|
4717
4859
|
inheader = ReadFileHeaderDataBySize(
|
|
4718
4860
|
fp, formatspecs['format_delimiter'])
|
|
4719
4861
|
else:
|
|
@@ -4721,19 +4863,19 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4721
4863
|
fp, formatspecs['format_delimiter'])
|
|
4722
4864
|
fprechecksumtype = inheader[-2]
|
|
4723
4865
|
fprechecksum = inheader[-1]
|
|
4724
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
4725
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
4866
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
4867
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4726
4868
|
if(not headercheck and not skipchecksum):
|
|
4727
4869
|
VerbosePrintOut(
|
|
4728
4870
|
"File Header Checksum Error with file at offset " + str(0))
|
|
4729
4871
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4730
4872
|
"'" + newfcs + "'")
|
|
4731
4873
|
return False
|
|
4732
|
-
fnumfiles = int(inheader[
|
|
4733
|
-
outfseeknextfile = inheaderdata[
|
|
4734
|
-
fjsonsize = int(inheaderdata[
|
|
4735
|
-
fjsonchecksumtype = inheader[
|
|
4736
|
-
fjsonchecksum = inheader[
|
|
4874
|
+
fnumfiles = int(inheader[8], 16)
|
|
4875
|
+
outfseeknextfile = inheaderdata[9]
|
|
4876
|
+
fjsonsize = int(inheaderdata[12], 16)
|
|
4877
|
+
fjsonchecksumtype = inheader[13]
|
|
4878
|
+
fjsonchecksum = inheader[14]
|
|
4737
4879
|
fp.read(fjsonsize)
|
|
4738
4880
|
# Next seek directive
|
|
4739
4881
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
@@ -4756,8 +4898,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4756
4898
|
countnum = 0
|
|
4757
4899
|
flist = []
|
|
4758
4900
|
while(countnum < fnumfiles):
|
|
4759
|
-
HeaderOut = ReadFileHeaderDataWithContent(
|
|
4760
|
-
fp, listonly, uncompress, skipchecksum, formatspecs)
|
|
4901
|
+
HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
|
|
4761
4902
|
if(len(HeaderOut) == 0):
|
|
4762
4903
|
break
|
|
4763
4904
|
flist.append(HeaderOut)
|
|
@@ -4765,7 +4906,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4765
4906
|
return flist
|
|
4766
4907
|
|
|
4767
4908
|
|
|
4768
|
-
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
4909
|
+
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
|
|
4769
4910
|
if(not hasattr(fp, "read")):
|
|
4770
4911
|
return False
|
|
4771
4912
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -4785,16 +4926,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4785
4926
|
return False
|
|
4786
4927
|
if(formdel != formatspecs['format_delimiter']):
|
|
4787
4928
|
return False
|
|
4788
|
-
if(
|
|
4929
|
+
if(__use_new_style__):
|
|
4789
4930
|
inheader = ReadFileHeaderDataBySize(
|
|
4790
4931
|
fp, formatspecs['format_delimiter'])
|
|
4791
4932
|
else:
|
|
4792
4933
|
inheader = ReadFileHeaderDataWoSize(
|
|
4793
4934
|
fp, formatspecs['format_delimiter'])
|
|
4794
|
-
fnumextrafieldsize = int(inheader[
|
|
4795
|
-
fnumextrafields = int(inheader[
|
|
4935
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
4936
|
+
fnumextrafields = int(inheader[16], 16)
|
|
4796
4937
|
fextrafieldslist = []
|
|
4797
|
-
extrastart =
|
|
4938
|
+
extrastart = 17
|
|
4798
4939
|
extraend = extrastart + fnumextrafields
|
|
4799
4940
|
while(extrastart < extraend):
|
|
4800
4941
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -4808,20 +4949,31 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4808
4949
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4809
4950
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4810
4951
|
pass
|
|
4952
|
+
fvendorfieldslist = []
|
|
4953
|
+
fvendorfields = 0;
|
|
4954
|
+
if(len(inheader)>extraend):
|
|
4955
|
+
extrastart = extraend
|
|
4956
|
+
extraend = len(inheader) - 2
|
|
4957
|
+
while(extrastart < extraend):
|
|
4958
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
4959
|
+
extrastart = extrastart + 1
|
|
4960
|
+
fvendorfields = fvendorfields + 1
|
|
4811
4961
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4812
4962
|
fheadsize = int(inheader[0], 16)
|
|
4813
4963
|
fnumfields = int(inheader[1], 16)
|
|
4814
|
-
|
|
4815
|
-
|
|
4816
|
-
|
|
4817
|
-
|
|
4818
|
-
|
|
4819
|
-
|
|
4820
|
-
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
|
|
4964
|
+
fheadctime = int(inheader[1], 16)
|
|
4965
|
+
fheadmtime = int(inheader[1], 16)
|
|
4966
|
+
fhencoding = inheader[4]
|
|
4967
|
+
fostype = inheader[5]
|
|
4968
|
+
fpythontype = inheader[6]
|
|
4969
|
+
fprojectname = inheader[7]
|
|
4970
|
+
fnumfiles = int(inheader[8], 16)
|
|
4971
|
+
fseeknextfile = inheader[9]
|
|
4972
|
+
fjsontype = inheader[10]
|
|
4973
|
+
fjsonlen = int(inheader[11], 16)
|
|
4974
|
+
fjsonsize = int(inheader[12], 16)
|
|
4975
|
+
fjsonchecksumtype = inheader[13]
|
|
4976
|
+
fjsonchecksum = inheader[14]
|
|
4825
4977
|
fjsoncontent = {}
|
|
4826
4978
|
fjstart = fp.tell()
|
|
4827
4979
|
if(fjsontype=="json"):
|
|
@@ -4907,16 +5059,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4907
5059
|
fp.seek(fseeknextasnum, 0)
|
|
4908
5060
|
else:
|
|
4909
5061
|
return False
|
|
4910
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4911
|
-
if(not
|
|
5062
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5063
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4912
5064
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4913
5065
|
fname + " at offset " + str(fheaderstart))
|
|
4914
5066
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4915
5067
|
return False
|
|
4916
5068
|
fprechecksumtype = inheader[-2]
|
|
4917
5069
|
fprechecksum = inheader[-1]
|
|
4918
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
4919
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5070
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5071
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4920
5072
|
if(not headercheck and not skipchecksum):
|
|
4921
5073
|
VerbosePrintOut(
|
|
4922
5074
|
"File Header Checksum Error with file at offset " + str(0))
|
|
@@ -4925,7 +5077,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4925
5077
|
return False
|
|
4926
5078
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
4927
5079
|
fcompresstype = ""
|
|
4928
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
5080
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fmtime': fheadmtime, 'fctime': fheadctime, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'ffilelist': []}
|
|
4929
5081
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
4930
5082
|
seekstart = 0
|
|
4931
5083
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -4952,16 +5104,15 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4952
5104
|
prefjsonchecksum = preheaderdata[31]
|
|
4953
5105
|
prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
4954
5106
|
fp.seek(len(delimiter), 1)
|
|
4955
|
-
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
4956
|
-
if(not
|
|
5107
|
+
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
|
|
5108
|
+
if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
4957
5109
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4958
5110
|
prefname + " at offset " + str(prefhstart))
|
|
4959
5111
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
4960
5112
|
return False
|
|
4961
|
-
prenewfcs = GetHeaderChecksum(
|
|
4962
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5113
|
+
prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
4963
5114
|
prefcs = preheaderdata[-2]
|
|
4964
|
-
if(not
|
|
5115
|
+
if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
|
|
4965
5116
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4966
5117
|
prefname + " at offset " + str(prefhstart))
|
|
4967
5118
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -4976,11 +5127,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4976
5127
|
if(prefsize > 0):
|
|
4977
5128
|
prefcontents.write(fp.read(prefsize))
|
|
4978
5129
|
prefcontents.seek(0, 0)
|
|
4979
|
-
prenewfccs = GetFileChecksum(
|
|
4980
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5130
|
+
prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
4981
5131
|
prefccs = preheaderdata[-1]
|
|
4982
5132
|
pyhascontents = True
|
|
4983
|
-
if(not
|
|
5133
|
+
if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
|
|
4984
5134
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4985
5135
|
prefname + " at offset " + str(prefcontentstart))
|
|
4986
5136
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5007,8 +5157,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5007
5157
|
realidnum = 0
|
|
5008
5158
|
countnum = seekstart
|
|
5009
5159
|
while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
|
|
5010
|
-
HeaderOut = ReadFileHeaderDataWithContentToArray(
|
|
5011
|
-
fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
|
|
5160
|
+
HeaderOut = ReadFileHeaderDataWithContentToArray(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
|
|
5012
5161
|
if(len(HeaderOut) == 0):
|
|
5013
5162
|
break
|
|
5014
5163
|
HeaderOut.update({'fid': realidnum, 'fidalt': realidnum})
|
|
@@ -5019,7 +5168,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5019
5168
|
return outlist
|
|
5020
5169
|
|
|
5021
5170
|
|
|
5022
|
-
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
5171
|
+
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
|
|
5023
5172
|
if(not hasattr(fp, "read")):
|
|
5024
5173
|
return False
|
|
5025
5174
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -5039,16 +5188,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5039
5188
|
return False
|
|
5040
5189
|
if(formdel != formatspecs['format_delimiter']):
|
|
5041
5190
|
return False
|
|
5042
|
-
if(
|
|
5191
|
+
if(__use_new_style__):
|
|
5043
5192
|
inheader = ReadFileHeaderDataBySize(
|
|
5044
5193
|
fp, formatspecs['format_delimiter'])
|
|
5045
5194
|
else:
|
|
5046
5195
|
inheader = ReadFileHeaderDataWoSize(
|
|
5047
5196
|
fp, formatspecs['format_delimiter'])
|
|
5048
|
-
fnumextrafieldsize = int(inheader[
|
|
5049
|
-
fnumextrafields = int(inheader[
|
|
5197
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
5198
|
+
fnumextrafields = int(inheader[16], 16)
|
|
5050
5199
|
fextrafieldslist = []
|
|
5051
|
-
extrastart =
|
|
5200
|
+
extrastart = 17
|
|
5052
5201
|
extraend = extrastart + fnumextrafields
|
|
5053
5202
|
while(extrastart < extraend):
|
|
5054
5203
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5065,13 +5214,13 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5065
5214
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5066
5215
|
fheadsize = int(inheader[0], 16)
|
|
5067
5216
|
fnumfields = int(inheader[1], 16)
|
|
5068
|
-
fnumfiles = int(inheader[
|
|
5069
|
-
fseeknextfile = inheaderdata[
|
|
5070
|
-
fjsontype = int(inheader[
|
|
5071
|
-
fjsonlen = int(inheader[
|
|
5072
|
-
fjsonsize = int(inheader[
|
|
5073
|
-
fjsonchecksumtype = inheader[
|
|
5074
|
-
fjsonchecksum = inheader[
|
|
5217
|
+
fnumfiles = int(inheader[8], 16)
|
|
5218
|
+
fseeknextfile = inheaderdata[9]
|
|
5219
|
+
fjsontype = int(inheader[10], 16)
|
|
5220
|
+
fjsonlen = int(inheader[11], 16)
|
|
5221
|
+
fjsonsize = int(inheader[12], 16)
|
|
5222
|
+
fjsonchecksumtype = inheader[13]
|
|
5223
|
+
fjsonchecksum = inheader[14]
|
|
5075
5224
|
fjsoncontent = {}
|
|
5076
5225
|
fjstart = fp.tell()
|
|
5077
5226
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
@@ -5093,16 +5242,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5093
5242
|
fp.seek(fseeknextasnum, 0)
|
|
5094
5243
|
else:
|
|
5095
5244
|
return False
|
|
5096
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5097
|
-
if(not
|
|
5245
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5246
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5098
5247
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5099
5248
|
fname + " at offset " + str(fheaderstart))
|
|
5100
5249
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5101
5250
|
return False
|
|
5102
5251
|
fprechecksumtype = inheader[-2]
|
|
5103
5252
|
fprechecksum = inheader[-1]
|
|
5104
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
5105
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5253
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5254
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5106
5255
|
if(not headercheck and not skipchecksum):
|
|
5107
5256
|
VerbosePrintOut(
|
|
5108
5257
|
"File Header Checksum Error with file at offset " + str(0))
|
|
@@ -5121,7 +5270,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5121
5270
|
il = 0
|
|
5122
5271
|
while(il < seekstart):
|
|
5123
5272
|
prefhstart = fp.tell()
|
|
5124
|
-
if(
|
|
5273
|
+
if(__use_new_style__):
|
|
5125
5274
|
preheaderdata = ReadFileHeaderDataBySize(
|
|
5126
5275
|
fp, formatspecs['format_delimiter'])
|
|
5127
5276
|
else:
|
|
@@ -5143,16 +5292,15 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5143
5292
|
prefjsonchecksum = preheaderdata[31]
|
|
5144
5293
|
prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5145
5294
|
fp.seek(len(delimiter), 1)
|
|
5146
|
-
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5147
|
-
if(not
|
|
5295
|
+
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
|
|
5296
|
+
if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5148
5297
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5149
5298
|
prefname + " at offset " + str(prefhstart))
|
|
5150
5299
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
5151
5300
|
return False
|
|
5152
|
-
prenewfcs = GetHeaderChecksum(
|
|
5153
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5301
|
+
prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
5154
5302
|
prefcs = preheaderdata[-2]
|
|
5155
|
-
if(not
|
|
5303
|
+
if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
|
|
5156
5304
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5157
5305
|
prefname + " at offset " + str(prefhstart))
|
|
5158
5306
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5169,11 +5317,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5169
5317
|
prefcontents = fp.read(prefsize)
|
|
5170
5318
|
else:
|
|
5171
5319
|
prefcontents = fp.read(prefcsize)
|
|
5172
|
-
prenewfccs = GetFileChecksum(
|
|
5173
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5320
|
+
prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
5174
5321
|
prefccs = preheaderdata[-1]
|
|
5175
5322
|
pyhascontents = True
|
|
5176
|
-
if(not
|
|
5323
|
+
if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
|
|
5177
5324
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5178
5325
|
prefname + " at offset " + str(prefcontentstart))
|
|
5179
5326
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5200,8 +5347,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5200
5347
|
realidnum = 0
|
|
5201
5348
|
countnum = seekstart
|
|
5202
5349
|
while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
|
|
5203
|
-
HeaderOut = ReadFileHeaderDataWithContentToList(
|
|
5204
|
-
fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
|
|
5350
|
+
HeaderOut = ReadFileHeaderDataWithContentToList(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
|
|
5205
5351
|
if(len(HeaderOut) == 0):
|
|
5206
5352
|
break
|
|
5207
5353
|
outlist.append(HeaderOut)
|
|
@@ -5209,7 +5355,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5209
5355
|
realidnum = realidnum + 1
|
|
5210
5356
|
return outlist
|
|
5211
5357
|
|
|
5212
|
-
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5358
|
+
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5213
5359
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5214
5360
|
fp = infile
|
|
5215
5361
|
try:
|
|
@@ -5304,7 +5450,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5304
5450
|
else:
|
|
5305
5451
|
break
|
|
5306
5452
|
readfp.seek(oldfppos, 0)
|
|
5307
|
-
ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5453
|
+
ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5308
5454
|
currentfilepos = readfp.tell()
|
|
5309
5455
|
else:
|
|
5310
5456
|
infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
|
|
@@ -5326,27 +5472,27 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5326
5472
|
else:
|
|
5327
5473
|
break
|
|
5328
5474
|
infp.seek(oldinfppos, 0)
|
|
5329
|
-
ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5475
|
+
ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5330
5476
|
currentinfilepos = infp.tell()
|
|
5331
5477
|
currentfilepos = readfp.tell()
|
|
5332
5478
|
return ArchiveList
|
|
5333
5479
|
|
|
5334
5480
|
|
|
5335
|
-
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5481
|
+
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5336
5482
|
if(isinstance(infile, (list, tuple, ))):
|
|
5337
5483
|
pass
|
|
5338
5484
|
else:
|
|
5339
5485
|
infile = [infile]
|
|
5340
5486
|
outretval = []
|
|
5341
5487
|
for curfname in infile:
|
|
5342
|
-
outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
|
|
5488
|
+
outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
|
|
5343
5489
|
return outretval
|
|
5344
5490
|
|
|
5345
|
-
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5346
|
-
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
5491
|
+
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5492
|
+
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
5347
5493
|
|
|
5348
5494
|
|
|
5349
|
-
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5495
|
+
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5350
5496
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5351
5497
|
fp = infile
|
|
5352
5498
|
try:
|
|
@@ -5441,7 +5587,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5441
5587
|
else:
|
|
5442
5588
|
break
|
|
5443
5589
|
readfp.seek(oldfppos, 0)
|
|
5444
|
-
ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5590
|
+
ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5445
5591
|
currentfilepos = readfp.tell()
|
|
5446
5592
|
else:
|
|
5447
5593
|
infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
|
|
@@ -5463,24 +5609,24 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5463
5609
|
else:
|
|
5464
5610
|
break
|
|
5465
5611
|
infp.seek(oldinfppos, 0)
|
|
5466
|
-
ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5612
|
+
ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5467
5613
|
currentinfilepos = infp.tell()
|
|
5468
5614
|
currentfilepos = readfp.tell()
|
|
5469
5615
|
return ArchiveList
|
|
5470
5616
|
|
|
5471
5617
|
|
|
5472
|
-
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5618
|
+
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5473
5619
|
if(isinstance(infile, (list, tuple, ))):
|
|
5474
5620
|
pass
|
|
5475
5621
|
else:
|
|
5476
5622
|
infile = [infile]
|
|
5477
5623
|
outretval = {}
|
|
5478
5624
|
for curfname in infile:
|
|
5479
|
-
outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
|
|
5625
|
+
outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
|
|
5480
5626
|
return outretval
|
|
5481
5627
|
|
|
5482
|
-
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5483
|
-
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
5628
|
+
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5629
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
5484
5630
|
|
|
5485
5631
|
|
|
5486
5632
|
def _field_to_bytes(x):
|
|
@@ -5534,7 +5680,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5534
5680
|
def _hex_lower(n):
|
|
5535
5681
|
return format(int(n), 'x').lower()
|
|
5536
5682
|
|
|
5537
|
-
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
5683
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
|
|
5538
5684
|
"""
|
|
5539
5685
|
Build and write the archive file header.
|
|
5540
5686
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5598,18 +5744,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5598
5744
|
tmpoutlist.append(fjsonsize)
|
|
5599
5745
|
if(len(jsondata) > 0):
|
|
5600
5746
|
tmpoutlist.append(checksumtype[1])
|
|
5601
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
5747
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs, saltkey))
|
|
5602
5748
|
else:
|
|
5603
5749
|
tmpoutlist.append("none")
|
|
5604
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
5750
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
|
|
5605
5751
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5606
5752
|
tmpoutlist.append(extrasizelen)
|
|
5607
5753
|
tmpoutlist.append(extrafields)
|
|
5608
|
-
tmpoutlen =
|
|
5754
|
+
tmpoutlen = 10 + len(tmpoutlist) + len(xlist)
|
|
5609
5755
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5610
|
-
|
|
5756
|
+
if(hasattr(time, "time_ns")):
|
|
5757
|
+
fctime = format(int(time.time_ns()), 'x').lower()
|
|
5758
|
+
else:
|
|
5759
|
+
fctime = format(int(to_ns(time.time())), 'x').lower()
|
|
5611
5760
|
# Serialize the first group
|
|
5612
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5761
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5613
5762
|
# Append tmpoutlist
|
|
5614
5763
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5615
5764
|
# Append extradata items if any
|
|
@@ -5619,7 +5768,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5619
5768
|
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5620
5769
|
|
|
5621
5770
|
# 5) inner checksum over fnumfilesa
|
|
5622
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5771
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
|
|
5623
5772
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5624
5773
|
|
|
5625
5774
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5632,7 +5781,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5632
5781
|
+ fnumfilesa
|
|
5633
5782
|
)
|
|
5634
5783
|
|
|
5635
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5784
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
|
|
5636
5785
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5637
5786
|
|
|
5638
5787
|
# 8) final total size field (again per your original logic)
|
|
@@ -5665,21 +5814,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5665
5814
|
return fp
|
|
5666
5815
|
|
|
5667
5816
|
|
|
5668
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5817
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
|
|
5669
5818
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5670
5819
|
formatspecs = formatspecs[fmttype]
|
|
5671
5820
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5672
5821
|
fmttype = __file_format_default__
|
|
5673
5822
|
formatspecs = formatspecs[fmttype]
|
|
5674
|
-
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5823
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs, saltkey)
|
|
5675
5824
|
return fp
|
|
5676
5825
|
|
|
5677
5826
|
|
|
5678
|
-
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5679
|
-
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
5827
|
+
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
|
|
5828
|
+
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
|
|
5680
5829
|
|
|
5681
5830
|
|
|
5682
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5831
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, returnfp=False):
|
|
5683
5832
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5684
5833
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5685
5834
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -5709,6 +5858,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5709
5858
|
fp = MkTempFile()
|
|
5710
5859
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5711
5860
|
fp = outfile
|
|
5861
|
+
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
|
|
5712
5862
|
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5713
5863
|
fp = MkTempFile()
|
|
5714
5864
|
else:
|
|
@@ -5720,7 +5870,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5720
5870
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
5721
5871
|
except PermissionError:
|
|
5722
5872
|
return False
|
|
5723
|
-
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5873
|
+
AppendFileHeader(fp, 0, "UTF-8", ['hello', 'goodbye'], {}, checksumtype, formatspecs, saltkey)
|
|
5724
5874
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5725
5875
|
fp = CompressOpenFileAlt(
|
|
5726
5876
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -5751,11 +5901,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5751
5901
|
return True
|
|
5752
5902
|
|
|
5753
5903
|
|
|
5754
|
-
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
5755
|
-
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
5904
|
+
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, returnfp=False):
|
|
5905
|
+
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, returnfp)
|
|
5756
5906
|
|
|
5757
5907
|
|
|
5758
|
-
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
|
|
5908
|
+
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
|
|
5759
5909
|
if(not hasattr(fp, "write")):
|
|
5760
5910
|
return False
|
|
5761
5911
|
if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
|
|
@@ -5787,10 +5937,10 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5787
5937
|
tmpoutlist.append(fjsonsize)
|
|
5788
5938
|
if(len(jsondata) > 0):
|
|
5789
5939
|
tmpoutlist.append(checksumtype[2])
|
|
5790
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs))
|
|
5940
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs, saltkey))
|
|
5791
5941
|
else:
|
|
5792
5942
|
tmpoutlist.append("none")
|
|
5793
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
5943
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
|
|
5794
5944
|
tmpoutlist.append(extrasizelen)
|
|
5795
5945
|
tmpoutlist.append(extrafields)
|
|
5796
5946
|
outfileoutstr = AppendNullBytes(
|
|
@@ -5805,22 +5955,18 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5805
5955
|
outfileoutstr = outfileoutstr + \
|
|
5806
5956
|
AppendNullBytes(checksumlist, formatspecs['format_delimiter'])
|
|
5807
5957
|
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5808
|
-
outfileheadercshex = GetFileChecksum(
|
|
5809
|
-
outfileoutstr, checksumtype[0], True, formatspecs)
|
|
5958
|
+
outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
|
|
5810
5959
|
if(len(filecontent) == 0):
|
|
5811
|
-
outfilecontentcshex = GetFileChecksum(
|
|
5812
|
-
filecontent, "none", False, formatspecs)
|
|
5960
|
+
outfilecontentcshex = GetFileChecksum(filecontent, "none", False, formatspecs, saltkey)
|
|
5813
5961
|
else:
|
|
5814
|
-
outfilecontentcshex = GetFileChecksum(
|
|
5815
|
-
filecontent, checksumtype[1], False, formatspecs)
|
|
5962
|
+
outfilecontentcshex = GetFileChecksum(filecontent, checksumtype[1], False, formatspecs, saltkey)
|
|
5816
5963
|
tmpfileoutstr = outfileoutstr + \
|
|
5817
5964
|
AppendNullBytes([outfileheadercshex, outfilecontentcshex],
|
|
5818
5965
|
formatspecs['format_delimiter'])
|
|
5819
5966
|
formheaersize = format(int(len(tmpfileoutstr) - len(formatspecs['format_delimiter'])), 'x').lower()
|
|
5820
5967
|
outfileoutstr = AppendNullByte(
|
|
5821
5968
|
formheaersize, formatspecs['format_delimiter']) + outfileoutstr
|
|
5822
|
-
outfileheadercshex = GetFileChecksum(
|
|
5823
|
-
outfileoutstr, checksumtype[0], True, formatspecs)
|
|
5969
|
+
outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
|
|
5824
5970
|
outfileoutstr = outfileoutstr + \
|
|
5825
5971
|
AppendNullBytes([outfileheadercshex, outfilecontentcshex],
|
|
5826
5972
|
formatspecs['format_delimiter'])
|
|
@@ -5838,14 +5984,11 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5838
5984
|
pass
|
|
5839
5985
|
return fp
|
|
5840
5986
|
|
|
5841
|
-
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
5987
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
5842
5988
|
if(not hasattr(fp, "write")):
|
|
5843
5989
|
return False
|
|
5844
|
-
advancedlist =
|
|
5845
|
-
altinode =
|
|
5846
|
-
if(verbose):
|
|
5847
|
-
logging.basicConfig(format="%(message)s",
|
|
5848
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
5990
|
+
advancedlist = __use_advanced_list__
|
|
5991
|
+
altinode = __use_alt_inode__
|
|
5849
5992
|
infilelist = []
|
|
5850
5993
|
if(infiles == "-"):
|
|
5851
5994
|
for line in PY_STDIN_TEXT:
|
|
@@ -5887,7 +6030,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5887
6030
|
inodetoforminode = {}
|
|
5888
6031
|
numfiles = int(len(GetDirList))
|
|
5889
6032
|
fnumfiles = format(numfiles, 'x').lower()
|
|
5890
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6033
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
5891
6034
|
try:
|
|
5892
6035
|
fp.flush()
|
|
5893
6036
|
if(hasattr(os, "sync")):
|
|
@@ -5916,14 +6059,24 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5916
6059
|
FullSizeFilesAlt += fstatinfo.st_rsize
|
|
5917
6060
|
except AttributeError:
|
|
5918
6061
|
FullSizeFilesAlt += fstatinfo.st_size
|
|
6062
|
+
fblksize = 0
|
|
6063
|
+
if(hasattr(fstatinfo, "st_blksize")):
|
|
6064
|
+
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6065
|
+
fblocks = 0
|
|
6066
|
+
if(hasattr(fstatinfo, "st_blocks")):
|
|
6067
|
+
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6068
|
+
fflags = 0
|
|
6069
|
+
if(hasattr(fstatinfo, "st_flags")):
|
|
6070
|
+
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
5919
6071
|
ftype = 0
|
|
5920
|
-
if(hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
|
|
6072
|
+
if(not followlink and hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
|
|
5921
6073
|
ftype = 13
|
|
5922
|
-
elif(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
|
|
5923
|
-
ftype = 12
|
|
5924
6074
|
elif(stat.S_ISREG(fpremode)):
|
|
5925
|
-
|
|
5926
|
-
|
|
6075
|
+
if(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_size > 0 and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
|
|
6076
|
+
ftype = 12
|
|
6077
|
+
else:
|
|
6078
|
+
ftype = 0
|
|
6079
|
+
elif(not followlink and stat.S_ISLNK(fpremode)):
|
|
5927
6080
|
ftype = 2
|
|
5928
6081
|
elif(stat.S_ISCHR(fpremode)):
|
|
5929
6082
|
ftype = 3
|
|
@@ -5945,43 +6098,42 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5945
6098
|
ftype = 0
|
|
5946
6099
|
flinkname = ""
|
|
5947
6100
|
fcurfid = format(int(curfid), 'x').lower()
|
|
5948
|
-
if
|
|
6101
|
+
if(not followlink and finode != 0):
|
|
5949
6102
|
unique_id = (fstatinfo.st_dev, finode)
|
|
5950
|
-
if
|
|
5951
|
-
if
|
|
6103
|
+
if(ftype != 1):
|
|
6104
|
+
if(unique_id in inodetofile):
|
|
5952
6105
|
# Hard link detected
|
|
5953
6106
|
ftype = 1
|
|
5954
6107
|
flinkname = inodetofile[unique_id]
|
|
5955
|
-
if altinode:
|
|
5956
|
-
fcurinode = format(int(unique_id[1]), 'x').lower()
|
|
5957
|
-
else:
|
|
5958
|
-
fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
|
|
5959
6108
|
else:
|
|
5960
|
-
#
|
|
5961
|
-
inodelist.append(unique_id)
|
|
6109
|
+
# First time seeing this inode
|
|
5962
6110
|
inodetofile[unique_id] = fname
|
|
6111
|
+
if(unique_id not in inodetoforminode):
|
|
5963
6112
|
inodetoforminode[unique_id] = curinode
|
|
5964
|
-
|
|
5965
|
-
|
|
5966
|
-
|
|
5967
|
-
|
|
5968
|
-
|
|
6113
|
+
curinode = curinode + 1
|
|
6114
|
+
if(altinode):
|
|
6115
|
+
# altinode == True → use real inode number
|
|
6116
|
+
fcurinode = format(int(unique_id[1]), 'x').lower()
|
|
6117
|
+
else:
|
|
6118
|
+
# altinode == False → use synthetic inode id
|
|
6119
|
+
fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
|
|
5969
6120
|
else:
|
|
5970
6121
|
# Handle cases where inodes are not supported or symlinks are followed
|
|
5971
6122
|
fcurinode = format(int(curinode), 'x').lower()
|
|
5972
|
-
curinode
|
|
6123
|
+
curinode = curinode + 1
|
|
5973
6124
|
curfid = curfid + 1
|
|
5974
6125
|
if(ftype == 2):
|
|
5975
6126
|
flinkname = os.readlink(fname)
|
|
5976
|
-
if(not os.path.exists(
|
|
6127
|
+
if(not os.path.exists(fname)):
|
|
5977
6128
|
return False
|
|
5978
6129
|
try:
|
|
5979
6130
|
fdev = fstatinfo.st_rdev
|
|
5980
6131
|
except AttributeError:
|
|
5981
6132
|
fdev = 0
|
|
5982
|
-
|
|
5983
|
-
|
|
5984
|
-
|
|
6133
|
+
try:
|
|
6134
|
+
frdev = fstatinfo.st_rdev
|
|
6135
|
+
except AttributeError:
|
|
6136
|
+
frdev = 0
|
|
5985
6137
|
# Types that should be considered zero-length in the archive context:
|
|
5986
6138
|
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
|
|
5987
6139
|
# Types that have actual data to read:
|
|
@@ -5992,13 +6144,28 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5992
6144
|
fsize = format(int(fstatinfo.st_size), 'x').lower()
|
|
5993
6145
|
else:
|
|
5994
6146
|
fsize = format(int(fstatinfo.st_size), 'x').lower()
|
|
5995
|
-
|
|
5996
|
-
|
|
5997
|
-
|
|
6147
|
+
if(hasattr(fstatinfo, "st_atime_ns")):
|
|
6148
|
+
fatime = format(int(fstatinfo.st_atime_ns), 'x').lower()
|
|
6149
|
+
else:
|
|
6150
|
+
fatime = format(int(to_ns(fstatinfo.st_atime)), 'x').lower()
|
|
6151
|
+
if(hasattr(fstatinfo, "st_mtime_ns")):
|
|
6152
|
+
fmtime = format(int(fstatinfo.st_mtime_ns), 'x').lower()
|
|
6153
|
+
else:
|
|
6154
|
+
fmtime = format(int(to_ns(fstatinfo.st_mtime)), 'x').lower()
|
|
6155
|
+
if(hasattr(fstatinfo, "st_ctime_ns")):
|
|
6156
|
+
fctime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
|
|
6157
|
+
else:
|
|
6158
|
+
fctime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
|
|
5998
6159
|
if(hasattr(fstatinfo, "st_birthtime")):
|
|
5999
|
-
|
|
6160
|
+
if(hasattr(fstatinfo, "st_birthtime_ns")):
|
|
6161
|
+
fbtime = format(int(fstatinfo.st_birthtime_ns), 'x').lower()
|
|
6162
|
+
else:
|
|
6163
|
+
fbtime = format(int(to_ns(fstatinfo.st_birthtime)), 'x').lower()
|
|
6000
6164
|
else:
|
|
6001
|
-
|
|
6165
|
+
if(hasattr(fstatinfo, "st_ctime_ns")):
|
|
6166
|
+
fbtime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
|
|
6167
|
+
else:
|
|
6168
|
+
fbtime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
|
|
6002
6169
|
fmode = format(int(fstatinfo.st_mode), 'x').lower()
|
|
6003
6170
|
fchmode = format(int(stat.S_IMODE(fstatinfo.st_mode)), 'x').lower()
|
|
6004
6171
|
ftypemod = format(int(stat.S_IFMT(fstatinfo.st_mode)), 'x').lower()
|
|
@@ -6025,8 +6192,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6025
6192
|
except ImportError:
|
|
6026
6193
|
fgname = ""
|
|
6027
6194
|
fdev = format(int(fdev), 'x').lower()
|
|
6028
|
-
|
|
6029
|
-
fdev_major = format(int(fdev_major), 'x').lower()
|
|
6195
|
+
frdev = format(int(frdev), 'x').lower()
|
|
6030
6196
|
finode = format(int(finode), 'x').lower()
|
|
6031
6197
|
flinkcount = format(int(flinkcount), 'x').lower()
|
|
6032
6198
|
if(hasattr(fstatinfo, "st_file_attributes")):
|
|
@@ -6087,10 +6253,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6087
6253
|
fcompression = curcompression
|
|
6088
6254
|
fcontents.close()
|
|
6089
6255
|
fcontents = cfcontents
|
|
6090
|
-
elif followlink and (ftype ==
|
|
6091
|
-
if(not os.path.exists(
|
|
6256
|
+
elif followlink and (ftype == 2 or ftype in data_types):
|
|
6257
|
+
if(not os.path.exists(fname)):
|
|
6092
6258
|
return False
|
|
6093
|
-
flstatinfo = os.stat(flinkname)
|
|
6094
6259
|
with open(flinkname, "rb") as fpc:
|
|
6095
6260
|
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6096
6261
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
@@ -6141,10 +6306,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6141
6306
|
fcompression = ""
|
|
6142
6307
|
fcontents.seek(0, 0)
|
|
6143
6308
|
ftypehex = format(ftype, 'x').lower()
|
|
6144
|
-
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6145
|
-
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
6146
|
-
AppendFileHeaderWithContent(
|
|
6147
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6309
|
+
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6310
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6311
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
6148
6312
|
try:
|
|
6149
6313
|
fp.flush()
|
|
6150
6314
|
if(hasattr(os, "sync")):
|
|
@@ -6153,12 +6317,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6153
6317
|
pass
|
|
6154
6318
|
return fp
|
|
6155
6319
|
|
|
6156
|
-
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6320
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6157
6321
|
if(not hasattr(fp, "write")):
|
|
6158
6322
|
return False
|
|
6159
|
-
if(verbose):
|
|
6160
|
-
logging.basicConfig(format="%(message)s",
|
|
6161
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6162
6323
|
curinode = 0
|
|
6163
6324
|
curfid = 0
|
|
6164
6325
|
inodelist = []
|
|
@@ -6222,7 +6383,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6222
6383
|
except FileNotFoundError:
|
|
6223
6384
|
return False
|
|
6224
6385
|
numfiles = int(len(tarfp.getmembers()))
|
|
6225
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6386
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6226
6387
|
try:
|
|
6227
6388
|
fp.flush()
|
|
6228
6389
|
if(hasattr(os, "sync")):
|
|
@@ -6240,6 +6401,15 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6240
6401
|
fpremode = member.mode
|
|
6241
6402
|
ffullmode = member.mode
|
|
6242
6403
|
flinkcount = 0
|
|
6404
|
+
fblksize = 0
|
|
6405
|
+
if(hasattr(fstatinfo, "st_blksize")):
|
|
6406
|
+
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6407
|
+
fblocks = 0
|
|
6408
|
+
if(hasattr(fstatinfo, "st_blocks")):
|
|
6409
|
+
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6410
|
+
fflags = 0
|
|
6411
|
+
if(hasattr(fstatinfo, "st_flags")):
|
|
6412
|
+
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6243
6413
|
ftype = 0
|
|
6244
6414
|
if(member.isreg()):
|
|
6245
6415
|
ffullmode = member.mode + stat.S_IFREG
|
|
@@ -6277,12 +6447,11 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6277
6447
|
curfid = curfid + 1
|
|
6278
6448
|
if(ftype == 2):
|
|
6279
6449
|
flinkname = member.linkname
|
|
6450
|
+
fdev = format(int("0"), 'x').lower()
|
|
6280
6451
|
try:
|
|
6281
|
-
|
|
6452
|
+
frdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
|
|
6282
6453
|
except AttributeError:
|
|
6283
|
-
|
|
6284
|
-
fdev_minor = format(int(member.devminor), 'x').lower()
|
|
6285
|
-
fdev_major = format(int(member.devmajor), 'x').lower()
|
|
6454
|
+
frdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
|
|
6286
6455
|
# Types that should be considered zero-length in the archive context:
|
|
6287
6456
|
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
|
|
6288
6457
|
# Types that have actual data to read:
|
|
@@ -6293,10 +6462,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6293
6462
|
fsize = format(int(member.size), 'x').lower()
|
|
6294
6463
|
else:
|
|
6295
6464
|
fsize = format(int(member.size), 'x').lower()
|
|
6296
|
-
fatime = format(int(member.mtime), 'x').lower()
|
|
6297
|
-
fmtime = format(int(member.mtime), 'x').lower()
|
|
6298
|
-
fctime = format(int(member.mtime), 'x').lower()
|
|
6299
|
-
fbtime = format(int(member.mtime), 'x').lower()
|
|
6465
|
+
fatime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6466
|
+
fmtime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6467
|
+
fctime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6468
|
+
fbtime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6300
6469
|
fmode = format(int(ffullmode), 'x').lower()
|
|
6301
6470
|
fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
|
|
6302
6471
|
ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
|
|
@@ -6363,10 +6532,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6363
6532
|
fcompression = ""
|
|
6364
6533
|
fcontents.seek(0, 0)
|
|
6365
6534
|
ftypehex = format(ftype, 'x').lower()
|
|
6366
|
-
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6367
|
-
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
6368
|
-
AppendFileHeaderWithContent(
|
|
6369
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6535
|
+
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6536
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6537
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
6370
6538
|
try:
|
|
6371
6539
|
fp.flush()
|
|
6372
6540
|
if(hasattr(os, "sync")):
|
|
@@ -6376,12 +6544,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6376
6544
|
fcontents.close()
|
|
6377
6545
|
return fp
|
|
6378
6546
|
|
|
6379
|
-
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6547
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6380
6548
|
if(not hasattr(fp, "write")):
|
|
6381
6549
|
return False
|
|
6382
|
-
if(verbose):
|
|
6383
|
-
logging.basicConfig(format="%(message)s",
|
|
6384
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6385
6550
|
curinode = 0
|
|
6386
6551
|
curfid = 0
|
|
6387
6552
|
inodelist = []
|
|
@@ -6415,7 +6580,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6415
6580
|
if(ziptest):
|
|
6416
6581
|
VerbosePrintOut("Bad file found!")
|
|
6417
6582
|
numfiles = int(len(zipfp.infolist()))
|
|
6418
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6583
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6419
6584
|
try:
|
|
6420
6585
|
fp.flush()
|
|
6421
6586
|
if(hasattr(os, "sync")):
|
|
@@ -6436,6 +6601,15 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6436
6601
|
else:
|
|
6437
6602
|
fpremode = int(stat.S_IFREG | 0x1b6)
|
|
6438
6603
|
flinkcount = 0
|
|
6604
|
+
fblksize = 0
|
|
6605
|
+
if(hasattr(fstatinfo, "st_blksize")):
|
|
6606
|
+
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6607
|
+
fblocks = 0
|
|
6608
|
+
if(hasattr(fstatinfo, "st_blocks")):
|
|
6609
|
+
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6610
|
+
fflags = 0
|
|
6611
|
+
if(hasattr(fstatinfo, "st_flags")):
|
|
6612
|
+
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6439
6613
|
ftype = 0
|
|
6440
6614
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
6441
6615
|
ftype = 5
|
|
@@ -6446,8 +6620,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6446
6620
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6447
6621
|
curfid = curfid + 1
|
|
6448
6622
|
fdev = format(int(0), 'x').lower()
|
|
6449
|
-
|
|
6450
|
-
fdev_major = format(int(0), 'x').lower()
|
|
6623
|
+
frdev = format(int(0), 'x').lower()
|
|
6451
6624
|
if(ftype == 5):
|
|
6452
6625
|
fsize = format(int("0"), 'x').lower()
|
|
6453
6626
|
elif(ftype == 0):
|
|
@@ -6455,13 +6628,13 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6455
6628
|
else:
|
|
6456
6629
|
fsize = format(int(member.file_size), 'x').lower()
|
|
6457
6630
|
fatime = format(
|
|
6458
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
6631
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6459
6632
|
fmtime = format(
|
|
6460
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
6633
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6461
6634
|
fctime = format(
|
|
6462
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
6635
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6463
6636
|
fbtime = format(
|
|
6464
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
6637
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6465
6638
|
if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
|
|
6466
6639
|
fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
|
|
6467
6640
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
@@ -6577,10 +6750,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6577
6750
|
fcompression = ""
|
|
6578
6751
|
fcontents.seek(0, 0)
|
|
6579
6752
|
ftypehex = format(ftype, 'x').lower()
|
|
6580
|
-
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6581
|
-
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
6582
|
-
AppendFileHeaderWithContent(
|
|
6583
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6753
|
+
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6754
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6755
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
6584
6756
|
try:
|
|
6585
6757
|
fp.flush()
|
|
6586
6758
|
if(hasattr(os, "sync")):
|
|
@@ -6591,15 +6763,12 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6591
6763
|
return fp
|
|
6592
6764
|
|
|
6593
6765
|
if(not rarfile_support):
|
|
6594
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6766
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6595
6767
|
return False
|
|
6596
6768
|
else:
|
|
6597
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6769
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6598
6770
|
if(not hasattr(fp, "write")):
|
|
6599
6771
|
return False
|
|
6600
|
-
if(verbose):
|
|
6601
|
-
logging.basicConfig(format="%(message)s",
|
|
6602
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6603
6772
|
curinode = 0
|
|
6604
6773
|
curfid = 0
|
|
6605
6774
|
inodelist = []
|
|
@@ -6615,7 +6784,7 @@ else:
|
|
|
6615
6784
|
if(rartest):
|
|
6616
6785
|
VerbosePrintOut("Bad file found!")
|
|
6617
6786
|
numfiles = int(len(rarfp.infolist()))
|
|
6618
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6787
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6619
6788
|
try:
|
|
6620
6789
|
fp.flush()
|
|
6621
6790
|
if(hasattr(os, "sync")):
|
|
@@ -6671,6 +6840,15 @@ else:
|
|
|
6671
6840
|
fcompression = ""
|
|
6672
6841
|
fcsize = format(int(0), 'x').lower()
|
|
6673
6842
|
flinkcount = 0
|
|
6843
|
+
fblksize = 0
|
|
6844
|
+
if(hasattr(fstatinfo, "st_blksize")):
|
|
6845
|
+
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6846
|
+
fblocks = 0
|
|
6847
|
+
if(hasattr(fstatinfo, "st_blocks")):
|
|
6848
|
+
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6849
|
+
fflags = 0
|
|
6850
|
+
if(hasattr(fstatinfo, "st_flags")):
|
|
6851
|
+
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6674
6852
|
ftype = 0
|
|
6675
6853
|
if(member.is_file()):
|
|
6676
6854
|
ftype = 0
|
|
@@ -6685,8 +6863,7 @@ else:
|
|
|
6685
6863
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6686
6864
|
curfid = curfid + 1
|
|
6687
6865
|
fdev = format(int(0), 'x').lower()
|
|
6688
|
-
|
|
6689
|
-
fdev_major = format(int(0), 'x').lower()
|
|
6866
|
+
frdev = format(int(0), 'x').lower()
|
|
6690
6867
|
if(ftype == 5):
|
|
6691
6868
|
fsize = format(int("0"), 'x').lower()
|
|
6692
6869
|
elif(ftype == 0):
|
|
@@ -6695,20 +6872,20 @@ else:
|
|
|
6695
6872
|
fsize = format(int(member.file_size), 'x').lower()
|
|
6696
6873
|
try:
|
|
6697
6874
|
if(member.atime):
|
|
6698
|
-
fatime = format(int(member.atime.timestamp()), 'x').lower()
|
|
6875
|
+
fatime = format(int(to_ns(member.atime.timestamp())), 'x').lower()
|
|
6699
6876
|
else:
|
|
6700
|
-
fatime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6877
|
+
fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6701
6878
|
except AttributeError:
|
|
6702
|
-
fatime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6703
|
-
fmtime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6879
|
+
fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6880
|
+
fmtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6704
6881
|
try:
|
|
6705
6882
|
if(member.ctime):
|
|
6706
|
-
fctime = format(int(member.ctime.timestamp()), 'x').lower()
|
|
6883
|
+
fctime = format(int(to_ns(member.ctime.timestamp())), 'x').lower()
|
|
6707
6884
|
else:
|
|
6708
|
-
fctime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6885
|
+
fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6709
6886
|
except AttributeError:
|
|
6710
|
-
fctime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6711
|
-
fbtime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6887
|
+
fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6888
|
+
fbtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6712
6889
|
if(is_unix and member.external_attr != 0):
|
|
6713
6890
|
fmode = format(int(member.external_attr), 'x').lower()
|
|
6714
6891
|
fchmode = format(
|
|
@@ -6810,10 +6987,9 @@ else:
|
|
|
6810
6987
|
fcompression = ""
|
|
6811
6988
|
fcontents.seek(0, 0)
|
|
6812
6989
|
ftypehex = format(ftype, 'x').lower()
|
|
6813
|
-
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6814
|
-
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
6815
|
-
AppendFileHeaderWithContent(
|
|
6816
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6990
|
+
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6991
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6992
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
6817
6993
|
try:
|
|
6818
6994
|
fp.flush()
|
|
6819
6995
|
if(hasattr(os, "sync")):
|
|
@@ -6824,15 +7000,12 @@ else:
|
|
|
6824
7000
|
return fp
|
|
6825
7001
|
|
|
6826
7002
|
if(not py7zr_support):
|
|
6827
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7003
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6828
7004
|
return False
|
|
6829
7005
|
else:
|
|
6830
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7006
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6831
7007
|
if(not hasattr(fp, "write")):
|
|
6832
7008
|
return False
|
|
6833
|
-
if(verbose):
|
|
6834
|
-
logging.basicConfig(format="%(message)s",
|
|
6835
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6836
7009
|
formver = formatspecs['format_ver']
|
|
6837
7010
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
6838
7011
|
curinode = 0
|
|
@@ -6850,7 +7023,7 @@ else:
|
|
|
6850
7023
|
if(sztestalt):
|
|
6851
7024
|
VerbosePrintOut("Bad file found!")
|
|
6852
7025
|
numfiles = int(len(szpfp.list()))
|
|
6853
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7026
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6854
7027
|
try:
|
|
6855
7028
|
fp.flush()
|
|
6856
7029
|
if(hasattr(os, "sync")):
|
|
@@ -6873,6 +7046,15 @@ else:
|
|
|
6873
7046
|
fcompression = ""
|
|
6874
7047
|
fcsize = format(int(0), 'x').lower()
|
|
6875
7048
|
flinkcount = 0
|
|
7049
|
+
fblksize = 0
|
|
7050
|
+
if(hasattr(fstatinfo, "st_blksize")):
|
|
7051
|
+
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
7052
|
+
fblocks = 0
|
|
7053
|
+
if(hasattr(fstatinfo, "st_blocks")):
|
|
7054
|
+
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
7055
|
+
fflags = 0
|
|
7056
|
+
if(hasattr(fstatinfo, "st_flags")):
|
|
7057
|
+
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6876
7058
|
ftype = 0
|
|
6877
7059
|
if(member.is_directory):
|
|
6878
7060
|
ftype = 5
|
|
@@ -6883,14 +7065,13 @@ else:
|
|
|
6883
7065
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6884
7066
|
curfid = curfid + 1
|
|
6885
7067
|
fdev = format(int(0), 'x').lower()
|
|
6886
|
-
|
|
6887
|
-
fdev_major = format(int(0), 'x').lower()
|
|
7068
|
+
frdev = format(int(0), 'x').lower()
|
|
6888
7069
|
if(ftype == 5):
|
|
6889
7070
|
fsize = format(int("0"), 'x').lower()
|
|
6890
|
-
fatime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6891
|
-
fmtime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6892
|
-
fctime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6893
|
-
fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
7071
|
+
fatime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7072
|
+
fmtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7073
|
+
fctime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7074
|
+
fbtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
6894
7075
|
if(member.is_directory):
|
|
6895
7076
|
fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
|
|
6896
7077
|
fchmode = format(
|
|
@@ -6983,10 +7164,9 @@ else:
|
|
|
6983
7164
|
fcompression = ""
|
|
6984
7165
|
fcontents.seek(0, 0)
|
|
6985
7166
|
ftypehex = format(ftype, 'x').lower()
|
|
6986
|
-
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6987
|
-
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
6988
|
-
AppendFileHeaderWithContent(
|
|
6989
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7167
|
+
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7168
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))]
|
|
7169
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
6990
7170
|
try:
|
|
6991
7171
|
fp.flush()
|
|
6992
7172
|
if(hasattr(os, "sync")):
|
|
@@ -6996,11 +7176,9 @@ else:
|
|
|
6996
7176
|
fcontents.close()
|
|
6997
7177
|
return fp
|
|
6998
7178
|
|
|
6999
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7179
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7000
7180
|
if(not hasattr(fp, "write")):
|
|
7001
7181
|
return False
|
|
7002
|
-
if(verbose):
|
|
7003
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7004
7182
|
GetDirList = inlist
|
|
7005
7183
|
if(not GetDirList):
|
|
7006
7184
|
return False
|
|
@@ -7012,7 +7190,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7012
7190
|
inodetoforminode = {}
|
|
7013
7191
|
numfiles = int(len(GetDirList))
|
|
7014
7192
|
fnumfiles = format(numfiles, 'x').lower()
|
|
7015
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7193
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7016
7194
|
for curfname in GetDirList:
|
|
7017
7195
|
ftype = format(curfname[0], 'x').lower()
|
|
7018
7196
|
fencoding = curfname[1]
|
|
@@ -7026,44 +7204,45 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7026
7204
|
fbasedir = os.path.dirname(fname)
|
|
7027
7205
|
flinkname = curfname[4]
|
|
7028
7206
|
fsize = format(curfname[5], 'x').lower()
|
|
7029
|
-
|
|
7030
|
-
|
|
7031
|
-
|
|
7032
|
-
|
|
7033
|
-
|
|
7034
|
-
|
|
7035
|
-
|
|
7036
|
-
|
|
7037
|
-
|
|
7038
|
-
|
|
7039
|
-
|
|
7040
|
-
|
|
7041
|
-
|
|
7042
|
-
|
|
7043
|
-
|
|
7044
|
-
|
|
7045
|
-
|
|
7046
|
-
|
|
7047
|
-
|
|
7048
|
-
|
|
7049
|
-
|
|
7050
|
-
|
|
7051
|
-
|
|
7207
|
+
fblksize = format(curfname[6], 'x').lower()
|
|
7208
|
+
fblocks = format(curfname[7], 'x').lower()
|
|
7209
|
+
fflags = format(curfname[8], 'x').lower()
|
|
7210
|
+
fatime = format(curfname[9], 'x').lower()
|
|
7211
|
+
fmtime = format(curfname[10], 'x').lower()
|
|
7212
|
+
fctime = format(curfname[11], 'x').lower()
|
|
7213
|
+
fbtime = format(curfname[12], 'x').lower()
|
|
7214
|
+
fmode = format(curfname[13], 'x').lower()
|
|
7215
|
+
fwinattributes = format(curfname[14], 'x').lower()
|
|
7216
|
+
fcompression = curfname[15]
|
|
7217
|
+
fcsize = format(curfname[16], 'x').lower()
|
|
7218
|
+
fuid = format(curfname[17], 'x').lower()
|
|
7219
|
+
funame = curfname[18]
|
|
7220
|
+
fgid = format(curfname[19], 'x').lower()
|
|
7221
|
+
fgname = curfname[20]
|
|
7222
|
+
fid = format(curfname[21], 'x').lower()
|
|
7223
|
+
finode = format(curfname[22], 'x').lower()
|
|
7224
|
+
flinkcount = format(curfname[23], 'x').lower()
|
|
7225
|
+
fdev = format(curfname[24], 'x').lower()
|
|
7226
|
+
frdev = format(curfname[25], 'x').lower()
|
|
7227
|
+
fseeknextfile = curfname[26]
|
|
7228
|
+
extradata = curfname[27]
|
|
7229
|
+
fheaderchecksumtype = curfname[28]
|
|
7230
|
+
fcontentchecksumtype = curfname[29]
|
|
7231
|
+
fcontents = curfname[30]
|
|
7052
7232
|
fencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7053
|
-
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
7054
|
-
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev,
|
|
7233
|
+
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
7234
|
+
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile]
|
|
7055
7235
|
fcontents.seek(0, 0)
|
|
7056
|
-
AppendFileHeaderWithContent(
|
|
7057
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7236
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
7058
7237
|
return fp
|
|
7059
7238
|
|
|
7060
7239
|
|
|
7061
|
-
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7062
|
-
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7063
|
-
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
7240
|
+
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7241
|
+
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
|
|
7242
|
+
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7064
7243
|
|
|
7065
7244
|
|
|
7066
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7245
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7067
7246
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7068
7247
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7069
7248
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7107,8 +7286,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7107
7286
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7108
7287
|
except PermissionError:
|
|
7109
7288
|
return False
|
|
7110
|
-
AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
|
|
7111
|
-
compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
|
|
7289
|
+
AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7112
7290
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7113
7291
|
fp = CompressOpenFileAlt(
|
|
7114
7292
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7137,12 +7315,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7137
7315
|
fp.close()
|
|
7138
7316
|
return True
|
|
7139
7317
|
|
|
7140
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7318
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7141
7319
|
if not isinstance(infiles, list):
|
|
7142
7320
|
infiles = [infiles]
|
|
7143
7321
|
returnout = False
|
|
7144
7322
|
for infileslist in infiles:
|
|
7145
|
-
returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
|
|
7323
|
+
returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7146
7324
|
if(not returnout):
|
|
7147
7325
|
break
|
|
7148
7326
|
else:
|
|
@@ -7152,7 +7330,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
7152
7330
|
return True
|
|
7153
7331
|
return returnout
|
|
7154
7332
|
|
|
7155
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7333
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, saltkey=None, returnfp=False):
|
|
7156
7334
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7157
7335
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7158
7336
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7193,8 +7371,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7193
7371
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7194
7372
|
except PermissionError:
|
|
7195
7373
|
return False
|
|
7196
|
-
AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
|
|
7197
|
-
compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
|
|
7374
|
+
AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7198
7375
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7199
7376
|
fp = CompressOpenFileAlt(
|
|
7200
7377
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7224,7 +7401,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7224
7401
|
fp.close()
|
|
7225
7402
|
return True
|
|
7226
7403
|
|
|
7227
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7404
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7228
7405
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7229
7406
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7230
7407
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7266,8 +7443,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7266
7443
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7267
7444
|
except PermissionError:
|
|
7268
7445
|
return False
|
|
7269
|
-
AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression,
|
|
7270
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
7446
|
+
AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7271
7447
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7272
7448
|
fp = CompressOpenFileAlt(
|
|
7273
7449
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7297,12 +7473,12 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7297
7473
|
fp.close()
|
|
7298
7474
|
return True
|
|
7299
7475
|
|
|
7300
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7476
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7301
7477
|
if not isinstance(infiles, list):
|
|
7302
7478
|
infiles = [infiles]
|
|
7303
7479
|
returnout = False
|
|
7304
7480
|
for infileslist in infiles:
|
|
7305
|
-
returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
7481
|
+
returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7306
7482
|
if(not returnout):
|
|
7307
7483
|
break
|
|
7308
7484
|
else:
|
|
@@ -7312,7 +7488,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7312
7488
|
return True
|
|
7313
7489
|
return returnout
|
|
7314
7490
|
|
|
7315
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7491
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7316
7492
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7317
7493
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7318
7494
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7354,8 +7530,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7354
7530
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7355
7531
|
except PermissionError:
|
|
7356
7532
|
return False
|
|
7357
|
-
AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression,
|
|
7358
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
7533
|
+
AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7359
7534
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7360
7535
|
fp = CompressOpenFileAlt(
|
|
7361
7536
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7385,12 +7560,12 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7385
7560
|
fp.close()
|
|
7386
7561
|
return True
|
|
7387
7562
|
|
|
7388
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7563
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7389
7564
|
if not isinstance(infiles, list):
|
|
7390
7565
|
infiles = [infiles]
|
|
7391
7566
|
returnout = False
|
|
7392
7567
|
for infileslist in infiles:
|
|
7393
|
-
returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
7568
|
+
returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7394
7569
|
if(not returnout):
|
|
7395
7570
|
break
|
|
7396
7571
|
else:
|
|
@@ -7401,10 +7576,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7401
7576
|
return returnout
|
|
7402
7577
|
|
|
7403
7578
|
if(not rarfile_support):
|
|
7404
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7579
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7405
7580
|
return False
|
|
7406
7581
|
else:
|
|
7407
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7582
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7408
7583
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7409
7584
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7410
7585
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7446,8 +7621,7 @@ else:
|
|
|
7446
7621
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7447
7622
|
except PermissionError:
|
|
7448
7623
|
return False
|
|
7449
|
-
AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression,
|
|
7450
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
7624
|
+
AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7451
7625
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7452
7626
|
fp = CompressOpenFileAlt(
|
|
7453
7627
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7477,12 +7651,12 @@ else:
|
|
|
7477
7651
|
fp.close()
|
|
7478
7652
|
return True
|
|
7479
7653
|
|
|
7480
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7654
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7481
7655
|
if not isinstance(infiles, list):
|
|
7482
7656
|
infiles = [infiles]
|
|
7483
7657
|
returnout = False
|
|
7484
7658
|
for infileslist in infiles:
|
|
7485
|
-
returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
7659
|
+
returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7486
7660
|
if(not returnout):
|
|
7487
7661
|
break
|
|
7488
7662
|
else:
|
|
@@ -7493,10 +7667,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7493
7667
|
return returnout
|
|
7494
7668
|
|
|
7495
7669
|
if(not py7zr_support):
|
|
7496
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7670
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7497
7671
|
return False
|
|
7498
7672
|
else:
|
|
7499
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7673
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7500
7674
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7501
7675
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7502
7676
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7538,8 +7712,7 @@ else:
|
|
|
7538
7712
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7539
7713
|
except PermissionError:
|
|
7540
7714
|
return False
|
|
7541
|
-
AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
|
|
7542
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
7715
|
+
AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7543
7716
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7544
7717
|
fp = CompressOpenFileAlt(
|
|
7545
7718
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7569,12 +7742,12 @@ else:
|
|
|
7569
7742
|
fp.close()
|
|
7570
7743
|
return True
|
|
7571
7744
|
|
|
7572
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7745
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7573
7746
|
if not isinstance(infiles, list):
|
|
7574
7747
|
infiles = [infiles]
|
|
7575
7748
|
returnout = False
|
|
7576
7749
|
for infileslist in infiles:
|
|
7577
|
-
returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
7750
|
+
returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7578
7751
|
if(not returnout):
|
|
7579
7752
|
break
|
|
7580
7753
|
else:
|
|
@@ -7584,9 +7757,9 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
7584
7757
|
return True
|
|
7585
7758
|
return returnout
|
|
7586
7759
|
|
|
7587
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7588
|
-
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7589
|
-
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
7760
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7761
|
+
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
|
|
7762
|
+
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
7590
7763
|
|
|
7591
7764
|
|
|
7592
7765
|
def PrintPermissionString(fchmode, ftype):
|
|
@@ -9327,56 +9500,54 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9327
9500
|
return False
|
|
9328
9501
|
|
|
9329
9502
|
|
|
9330
|
-
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9331
|
-
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9503
|
+
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9504
|
+
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9332
9505
|
|
|
9333
|
-
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9334
|
-
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9506
|
+
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9507
|
+
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9335
9508
|
|
|
9336
|
-
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9337
|
-
return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
9509
|
+
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9510
|
+
return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, returnfp)
|
|
9338
9511
|
|
|
9339
9512
|
|
|
9340
|
-
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9341
|
-
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9513
|
+
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9514
|
+
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9342
9515
|
|
|
9343
9516
|
|
|
9344
|
-
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9345
|
-
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9517
|
+
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9518
|
+
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9346
9519
|
|
|
9347
9520
|
|
|
9348
9521
|
if(not rarfile_support):
|
|
9349
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9522
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9350
9523
|
return False
|
|
9351
9524
|
else:
|
|
9352
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9353
|
-
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9525
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9526
|
+
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9354
9527
|
|
|
9355
9528
|
|
|
9356
9529
|
if(not py7zr_support):
|
|
9357
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9530
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9358
9531
|
return False
|
|
9359
9532
|
else:
|
|
9360
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9361
|
-
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9533
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9534
|
+
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9362
9535
|
|
|
9363
9536
|
|
|
9364
|
-
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9537
|
+
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9365
9538
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9366
9539
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9367
9540
|
formatspecs = formatspecs[checkcompressfile]
|
|
9368
|
-
if(verbose):
|
|
9369
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9370
9541
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
9371
|
-
return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9542
|
+
return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9372
9543
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
9373
|
-
return PackArchiveFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9544
|
+
return PackArchiveFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9374
9545
|
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
9375
|
-
return PackArchiveFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9546
|
+
return PackArchiveFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9376
9547
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
9377
|
-
return PackArchiveFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9548
|
+
return PackArchiveFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9378
9549
|
elif(IsSingleDict(formatspecs) and checkcompressfile == formatspecs['format_magic']):
|
|
9379
|
-
return RePackArchiveFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9550
|
+
return RePackArchiveFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9380
9551
|
else:
|
|
9381
9552
|
return False
|
|
9382
9553
|
return False
|
|
@@ -9445,19 +9616,12 @@ def ArchiveFileArrayValidate(listarrayfiles, verbose=False):
|
|
|
9445
9616
|
ok = False
|
|
9446
9617
|
return ok
|
|
9447
9618
|
|
|
9448
|
-
def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
9449
|
-
formatspecs=__file_format_multi_dict__, # keep default like original
|
|
9450
|
-
seektoend=False, verbose=False, returnfp=False):
|
|
9451
|
-
if(verbose):
|
|
9452
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9453
|
-
|
|
9619
|
+
def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9454
9620
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
9455
9621
|
formatspecs = formatspecs[fmttype]
|
|
9456
9622
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
9457
9623
|
fmttype = "auto"
|
|
9458
|
-
|
|
9459
9624
|
curloc = filestart
|
|
9460
|
-
|
|
9461
9625
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
9462
9626
|
curloc = infile.tell()
|
|
9463
9627
|
fp = infile
|
|
@@ -9473,7 +9637,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9473
9637
|
if(not fp):
|
|
9474
9638
|
return False
|
|
9475
9639
|
fp.seek(filestart, 0)
|
|
9476
|
-
|
|
9477
9640
|
elif(infile == "-"):
|
|
9478
9641
|
fp = MkTempFile()
|
|
9479
9642
|
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
@@ -9485,7 +9648,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9485
9648
|
if(not fp):
|
|
9486
9649
|
return False
|
|
9487
9650
|
fp.seek(filestart, 0)
|
|
9488
|
-
|
|
9489
9651
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
9490
9652
|
fp = MkTempFile()
|
|
9491
9653
|
fp.write(infile)
|
|
@@ -9497,7 +9659,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9497
9659
|
if(not fp):
|
|
9498
9660
|
return False
|
|
9499
9661
|
fp.seek(filestart, 0)
|
|
9500
|
-
|
|
9501
9662
|
elif(re.findall(__download_proto_support__, infile)):
|
|
9502
9663
|
fp = download_file_from_internet_file(infile)
|
|
9503
9664
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
@@ -9508,7 +9669,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9508
9669
|
if(not fp):
|
|
9509
9670
|
return False
|
|
9510
9671
|
fp.seek(filestart, 0)
|
|
9511
|
-
|
|
9512
9672
|
else:
|
|
9513
9673
|
infile = RemoveWindowsPath(infile)
|
|
9514
9674
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
@@ -9555,11 +9715,9 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9555
9715
|
fp.seek(0, 2)
|
|
9556
9716
|
except (OSError, ValueError):
|
|
9557
9717
|
SeekToEndOfFile(fp)
|
|
9558
|
-
|
|
9559
9718
|
CatSize = fp.tell()
|
|
9560
9719
|
CatSizeEnd = CatSize
|
|
9561
9720
|
fp.seek(curloc, 0)
|
|
9562
|
-
|
|
9563
9721
|
if(IsNestedDict(formatspecs)):
|
|
9564
9722
|
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
9565
9723
|
if(compresschecking not in formatspecs):
|
|
@@ -9567,54 +9725,36 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9567
9725
|
else:
|
|
9568
9726
|
formatspecs = formatspecs[compresschecking]
|
|
9569
9727
|
fp.seek(filestart, 0)
|
|
9570
|
-
|
|
9571
9728
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
9572
9729
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
9573
9730
|
formdelsize = len(formatspecs['format_delimiter'])
|
|
9574
9731
|
formdel = fp.read(formdelsize).decode("UTF-8")
|
|
9575
|
-
|
|
9576
9732
|
if(formstring != formatspecs['format_magic'] + inheaderver):
|
|
9577
9733
|
return False
|
|
9578
9734
|
if(formdel != formatspecs['format_delimiter']):
|
|
9579
9735
|
return False
|
|
9580
|
-
|
|
9581
|
-
if(
|
|
9736
|
+
headeroffset = fp.tell()
|
|
9737
|
+
if(__use_new_style__):
|
|
9582
9738
|
inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9583
9739
|
else:
|
|
9584
9740
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9585
|
-
|
|
9586
|
-
|
|
9587
|
-
|
|
9588
|
-
extrastart = 15
|
|
9741
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
9742
|
+
fnumextrafields = int(inheader[16], 16)
|
|
9743
|
+
extrastart = 17
|
|
9589
9744
|
extraend = extrastart + fnumextrafields
|
|
9590
9745
|
formversion = re.findall("([\\d]+)", formstring)
|
|
9591
9746
|
fheadsize = int(inheader[0], 16)
|
|
9592
9747
|
fnumfields = int(inheader[1], 16)
|
|
9593
|
-
fnumfiles = int(inheader[
|
|
9748
|
+
fnumfiles = int(inheader[8], 16)
|
|
9594
9749
|
fprechecksumtype = inheader[-2]
|
|
9595
9750
|
fprechecksum = inheader[-1]
|
|
9596
|
-
outfseeknextfile = inheader[
|
|
9597
|
-
fjsonsize = int(inheader[
|
|
9598
|
-
fjsonchecksumtype = inheader[
|
|
9599
|
-
fjsonchecksum = inheader[
|
|
9751
|
+
outfseeknextfile = inheader[9]
|
|
9752
|
+
fjsonsize = int(inheader[12], 16)
|
|
9753
|
+
fjsonchecksumtype = inheader[13]
|
|
9754
|
+
fjsonchecksum = inheader[14]
|
|
9755
|
+
headerjsonoffset = fp.tell()
|
|
9600
9756
|
fprejsoncontent = fp.read(fjsonsize)
|
|
9601
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9602
|
-
if(fjsonsize > 0):
|
|
9603
|
-
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9604
|
-
if(verbose):
|
|
9605
|
-
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9606
|
-
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9607
|
-
else:
|
|
9608
|
-
valid_archive = False
|
|
9609
|
-
invalid_archive = True
|
|
9610
|
-
if(verbose):
|
|
9611
|
-
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9612
|
-
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9613
|
-
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9614
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9615
|
-
fname + " at offset " + str(fheaderstart))
|
|
9616
|
-
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9617
|
-
return False
|
|
9757
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
9618
9758
|
# Next seek directive
|
|
9619
9759
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9620
9760
|
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
@@ -9633,14 +9773,11 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9633
9773
|
fp.seek(fseeknextasnum, 0)
|
|
9634
9774
|
else:
|
|
9635
9775
|
return False
|
|
9636
|
-
|
|
9637
9776
|
il = 0
|
|
9638
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
9639
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
9640
|
-
|
|
9777
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
9778
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
9641
9779
|
valid_archive = True
|
|
9642
9780
|
invalid_archive = False
|
|
9643
|
-
|
|
9644
9781
|
if(verbose):
|
|
9645
9782
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
9646
9783
|
try:
|
|
@@ -9652,78 +9789,56 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9652
9789
|
else:
|
|
9653
9790
|
VerbosePrintOut(infile)
|
|
9654
9791
|
VerbosePrintOut("Number of Records " + str(fnumfiles))
|
|
9655
|
-
|
|
9656
9792
|
if(headercheck):
|
|
9657
9793
|
if(verbose):
|
|
9658
|
-
VerbosePrintOut("File Header Checksum Passed at offset " + str(
|
|
9794
|
+
VerbosePrintOut("File Header Checksum Passed at offset " + str(headeroffset))
|
|
9659
9795
|
VerbosePrintOut("'" + fprechecksum + "' == " + "'" + newfcs + "'")
|
|
9660
9796
|
else:
|
|
9661
9797
|
# always flip flags, even when not verbose
|
|
9662
9798
|
valid_archive = False
|
|
9663
9799
|
invalid_archive = True
|
|
9664
9800
|
if(verbose):
|
|
9665
|
-
VerbosePrintOut("File Header Checksum Failed at offset " + str(
|
|
9801
|
+
VerbosePrintOut("File Header Checksum Failed at offset " + str(headeroffset))
|
|
9666
9802
|
VerbosePrintOut("'" + fprechecksum + "' != " + "'" + newfcs + "'")
|
|
9667
|
-
|
|
9803
|
+
if(fjsonsize > 0):
|
|
9804
|
+
if(CheckChecksums(jsonfcs, fjsonchecksum)):
|
|
9805
|
+
if(verbose):
|
|
9806
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(headerjsonoffset))
|
|
9807
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9808
|
+
else:
|
|
9809
|
+
valid_archive = False
|
|
9810
|
+
invalid_archive = True
|
|
9811
|
+
if(verbose):
|
|
9812
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(headerjsonoffset))
|
|
9813
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9668
9814
|
if(verbose):
|
|
9669
9815
|
VerbosePrintOut("")
|
|
9670
|
-
|
|
9671
9816
|
# Iterate either until EOF (seektoend) or fixed count
|
|
9672
9817
|
while (fp.tell() < CatSizeEnd) if seektoend else (il < fnumfiles):
|
|
9673
9818
|
outfhstart = fp.tell()
|
|
9674
|
-
if(
|
|
9819
|
+
if(__use_new_style__):
|
|
9675
9820
|
inheaderdata = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9676
9821
|
else:
|
|
9677
9822
|
inheaderdata = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9678
9823
|
|
|
9679
9824
|
if(len(inheaderdata) == 0):
|
|
9680
9825
|
break
|
|
9681
|
-
|
|
9682
|
-
outfheadsize = int(inheaderdata[0], 16)
|
|
9683
|
-
outfnumfields = int(inheaderdata[1], 16)
|
|
9684
|
-
outftype = int(inheaderdata[2], 16)
|
|
9685
|
-
# FIX: these must come from inheaderdata, not inheader
|
|
9686
|
-
outfostype = inheaderdata[3]
|
|
9687
|
-
outfencoding = inheaderdata[4]
|
|
9688
|
-
|
|
9689
9826
|
if(re.findall("^[.|/]", inheaderdata[5])):
|
|
9690
9827
|
outfname = inheaderdata[5]
|
|
9691
9828
|
else:
|
|
9692
9829
|
outfname = "./" + inheaderdata[5]
|
|
9693
9830
|
outfbasedir = os.path.dirname(outfname)
|
|
9694
|
-
|
|
9695
|
-
outflinkname = inheaderdata[6]
|
|
9696
9831
|
outfsize = int(inheaderdata[7], 16)
|
|
9697
|
-
|
|
9698
|
-
|
|
9699
|
-
|
|
9700
|
-
|
|
9701
|
-
|
|
9702
|
-
|
|
9703
|
-
|
|
9704
|
-
|
|
9705
|
-
outfcompression = inheaderdata[14]
|
|
9706
|
-
outfcsize = int(inheaderdata[15], 16)
|
|
9707
|
-
outfuid = int(inheaderdata[16], 16)
|
|
9708
|
-
outfuname = inheaderdata[17]
|
|
9709
|
-
outfgid = int(inheaderdata[18], 16)
|
|
9710
|
-
outfgname = inheaderdata[19]
|
|
9711
|
-
fid = int(inheaderdata[20], 16)
|
|
9712
|
-
finode = int(inheaderdata[21], 16)
|
|
9713
|
-
flinkcount = int(inheaderdata[22], 16)
|
|
9714
|
-
outfdev = int(inheaderdata[23], 16)
|
|
9715
|
-
outfdev_minor = int(inheaderdata[24], 16)
|
|
9716
|
-
outfdev_major = int(inheaderdata[25], 16)
|
|
9717
|
-
outfseeknextfile = inheaderdata[26]
|
|
9718
|
-
outfjsontype = inheaderdata[27]
|
|
9719
|
-
outfjsonlen = int(inheaderdata[28], 16)
|
|
9720
|
-
outfjsonsize = int(inheaderdata[29], 16)
|
|
9721
|
-
outfjsonchecksumtype = inheaderdata[30]
|
|
9722
|
-
outfjsonchecksum = inheaderdata[31]
|
|
9723
|
-
|
|
9832
|
+
outfcompression = inheaderdata[17]
|
|
9833
|
+
outfcsize = int(inheaderdata[18], 16)
|
|
9834
|
+
fid = int(inheaderdata[23], 16)
|
|
9835
|
+
finode = int(inheaderdata[24], 16)
|
|
9836
|
+
outfseeknextfile = inheaderdata[28]
|
|
9837
|
+
outfjsonsize = int(inheaderdata[31], 16)
|
|
9838
|
+
outfjsonchecksumtype = inheaderdata[32]
|
|
9839
|
+
outfjsonchecksum = inheaderdata[33]
|
|
9724
9840
|
outfhend = fp.tell() - 1 # (kept for parity; not used)
|
|
9725
9841
|
outfjstart = fp.tell()
|
|
9726
|
-
|
|
9727
9842
|
# Read JSON bytes; compute checksum on bytes for robustness
|
|
9728
9843
|
outfprejsoncontent_bytes = fp.read(outfjsonsize)
|
|
9729
9844
|
# Decode for any downstream text needs (not used further here)
|
|
@@ -9731,27 +9846,21 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9731
9846
|
outfprejsoncontent = outfprejsoncontent_bytes.decode("UTF-8")
|
|
9732
9847
|
except Exception:
|
|
9733
9848
|
outfprejsoncontent = None
|
|
9734
|
-
|
|
9735
9849
|
outfjend = fp.tell()
|
|
9736
9850
|
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
9737
|
-
|
|
9738
|
-
|
|
9739
|
-
|
|
9740
|
-
outfextrasize = int(inheaderdata[32], 16)
|
|
9741
|
-
outfextrafields = int(inheaderdata[33], 16)
|
|
9851
|
+
injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs, saltkey)
|
|
9852
|
+
outfextrafields = int(inheaderdata[35], 16)
|
|
9742
9853
|
extrafieldslist = []
|
|
9743
|
-
extrastart =
|
|
9854
|
+
extrastart = 36
|
|
9744
9855
|
extraend = extrastart + outfextrafields
|
|
9745
|
-
|
|
9746
9856
|
outfcs = inheaderdata[-2].lower()
|
|
9747
9857
|
outfccs = inheaderdata[-1].lower()
|
|
9748
|
-
infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
|
|
9749
|
-
|
|
9858
|
+
infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
9750
9859
|
if(verbose):
|
|
9751
9860
|
VerbosePrintOut(outfname)
|
|
9752
9861
|
VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
|
|
9753
9862
|
|
|
9754
|
-
if(
|
|
9863
|
+
if(CheckChecksums(outfcs, infcs)):
|
|
9755
9864
|
if(verbose):
|
|
9756
9865
|
VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
|
|
9757
9866
|
VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
|
|
@@ -9761,9 +9870,8 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9761
9870
|
if(verbose):
|
|
9762
9871
|
VerbosePrintOut("File Header Checksum Failed at offset " + str(outfhstart))
|
|
9763
9872
|
VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
|
|
9764
|
-
|
|
9765
9873
|
if(outfjsonsize > 0):
|
|
9766
|
-
if(
|
|
9874
|
+
if(CheckChecksums(injsonfcs, outfjsonchecksum)):
|
|
9767
9875
|
if(verbose):
|
|
9768
9876
|
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9769
9877
|
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
@@ -9773,21 +9881,19 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9773
9881
|
if(verbose):
|
|
9774
9882
|
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9775
9883
|
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9776
|
-
|
|
9777
9884
|
outfcontentstart = fp.tell()
|
|
9778
9885
|
outfcontents = b"" # FIX: bytes for Py2/3 consistency
|
|
9779
9886
|
pyhascontents = False
|
|
9780
|
-
|
|
9781
9887
|
if(outfsize > 0):
|
|
9782
9888
|
if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
|
|
9783
9889
|
outfcontents = fp.read(outfsize)
|
|
9784
9890
|
else:
|
|
9785
9891
|
outfcontents = fp.read(outfcsize)
|
|
9786
9892
|
|
|
9787
|
-
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
|
|
9893
|
+
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
9788
9894
|
pyhascontents = True
|
|
9789
9895
|
|
|
9790
|
-
if(
|
|
9896
|
+
if(CheckChecksums(outfccs, infccs)):
|
|
9791
9897
|
if(verbose):
|
|
9792
9898
|
VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
|
|
9793
9899
|
VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
|
|
@@ -9797,10 +9903,8 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9797
9903
|
if(verbose):
|
|
9798
9904
|
VerbosePrintOut("File Content Checksum Failed at offset " + str(outfcontentstart))
|
|
9799
9905
|
VerbosePrintOut("'" + outfccs + "' != " + "'" + infccs + "'")
|
|
9800
|
-
|
|
9801
9906
|
if(verbose):
|
|
9802
9907
|
VerbosePrintOut("")
|
|
9803
|
-
|
|
9804
9908
|
# Next seek directive
|
|
9805
9909
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9806
9910
|
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
@@ -9819,9 +9923,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9819
9923
|
fp.seek(fseeknextasnum, 0)
|
|
9820
9924
|
else:
|
|
9821
9925
|
return False
|
|
9822
|
-
|
|
9823
9926
|
il = il + 1
|
|
9824
|
-
|
|
9825
9927
|
if(valid_archive):
|
|
9826
9928
|
if(returnfp):
|
|
9827
9929
|
return fp
|
|
@@ -9833,34 +9935,34 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9833
9935
|
return False
|
|
9834
9936
|
|
|
9835
9937
|
|
|
9836
|
-
def ArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9837
|
-
return ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
9938
|
+
def ArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9939
|
+
return ArchiveFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9838
9940
|
|
|
9839
9941
|
|
|
9840
|
-
def ArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9942
|
+
def ArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9841
9943
|
if(isinstance(infile, (list, tuple, ))):
|
|
9842
9944
|
pass
|
|
9843
9945
|
else:
|
|
9844
9946
|
infile = [infile]
|
|
9845
9947
|
outretval = True
|
|
9846
9948
|
for curfname in infile:
|
|
9847
|
-
curretfile = ArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
9949
|
+
curretfile = ArchiveFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9848
9950
|
if(not curretfile):
|
|
9849
9951
|
outretval = False
|
|
9850
9952
|
return outretval
|
|
9851
9953
|
|
|
9852
|
-
def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9853
|
-
return ArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
9954
|
+
def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9955
|
+
return ArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9854
9956
|
|
|
9855
9957
|
|
|
9856
|
-
def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9958
|
+
def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9857
9959
|
outretval = []
|
|
9858
9960
|
outstartfile = filestart
|
|
9859
9961
|
outfsize = float('inf')
|
|
9860
9962
|
while True:
|
|
9861
9963
|
if outstartfile >= outfsize: # stop when function signals False
|
|
9862
9964
|
break
|
|
9863
|
-
is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
|
|
9965
|
+
is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, saltkey, seektoend, verbose, True)
|
|
9864
9966
|
if is_valid_file is False: # stop when function signals False
|
|
9865
9967
|
outretval.append(is_valid_file)
|
|
9866
9968
|
break
|
|
@@ -9877,33 +9979,36 @@ def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=
|
|
|
9877
9979
|
if(returnfp):
|
|
9878
9980
|
return infile
|
|
9879
9981
|
else:
|
|
9880
|
-
|
|
9982
|
+
try:
|
|
9983
|
+
infile.close()
|
|
9984
|
+
except AttributeError:
|
|
9985
|
+
return False
|
|
9881
9986
|
return outretval
|
|
9882
9987
|
|
|
9883
9988
|
|
|
9884
9989
|
|
|
9885
|
-
def StackedArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9886
|
-
return StackedArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
9990
|
+
def StackedArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9991
|
+
return StackedArchiveFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9887
9992
|
|
|
9888
9993
|
|
|
9889
|
-
def StackedArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9994
|
+
def StackedArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9890
9995
|
if(isinstance(infile, (list, tuple, ))):
|
|
9891
9996
|
pass
|
|
9892
9997
|
else:
|
|
9893
9998
|
infile = [infile]
|
|
9894
9999
|
outretval = True
|
|
9895
10000
|
for curfname in infile:
|
|
9896
|
-
curretfile = StackedArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10001
|
+
curretfile = StackedArchiveFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9897
10002
|
if(not curretfile):
|
|
9898
10003
|
outretval = False
|
|
9899
10004
|
return outretval
|
|
9900
10005
|
|
|
9901
|
-
def StackedArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9902
|
-
return StackedArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10006
|
+
def StackedArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10007
|
+
return StackedArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9903
10008
|
|
|
9904
10009
|
|
|
9905
|
-
def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
9906
|
-
outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
10010
|
+
def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10011
|
+
outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
9907
10012
|
if not returnfp:
|
|
9908
10013
|
for item in outfp:
|
|
9909
10014
|
fp = item.get('fp')
|
|
@@ -9917,26 +10022,26 @@ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend
|
|
|
9917
10022
|
return outfp
|
|
9918
10023
|
|
|
9919
10024
|
|
|
9920
|
-
def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10025
|
+
def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9921
10026
|
if(isinstance(infile, (list, tuple, ))):
|
|
9922
10027
|
pass
|
|
9923
10028
|
else:
|
|
9924
10029
|
infile = [infile]
|
|
9925
10030
|
outretval = []
|
|
9926
10031
|
for curfname in infile:
|
|
9927
|
-
outretval.append(ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp))
|
|
10032
|
+
outretval.append(ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp))
|
|
9928
10033
|
return outretval
|
|
9929
10034
|
|
|
9930
|
-
def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
9931
|
-
return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10035
|
+
def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10036
|
+
return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
9932
10037
|
|
|
9933
10038
|
|
|
9934
|
-
def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10039
|
+
def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9935
10040
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9936
10041
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9937
10042
|
formatspecs = formatspecs[checkcompressfile]
|
|
9938
10043
|
fp = MkTempFile(instr)
|
|
9939
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10044
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
9940
10045
|
return listarrayfiles
|
|
9941
10046
|
|
|
9942
10047
|
|
|
@@ -9945,9 +10050,8 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
9945
10050
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9946
10051
|
formatspecs = formatspecs[checkcompressfile]
|
|
9947
10052
|
fp = MkTempFile()
|
|
9948
|
-
fp = PackArchiveFileFromTarFile(
|
|
9949
|
-
|
|
9950
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10053
|
+
fp = PackArchiveFileFromTarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10054
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9951
10055
|
return listarrayfiles
|
|
9952
10056
|
|
|
9953
10057
|
|
|
@@ -9956,9 +10060,8 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
9956
10060
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9957
10061
|
formatspecs = formatspecs[checkcompressfile]
|
|
9958
10062
|
fp = MkTempFile()
|
|
9959
|
-
fp = PackArchiveFileFromZipFile(
|
|
9960
|
-
|
|
9961
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10063
|
+
fp = PackArchiveFileFromZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10064
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9962
10065
|
return listarrayfiles
|
|
9963
10066
|
|
|
9964
10067
|
|
|
@@ -9972,9 +10075,8 @@ if(rarfile_support):
|
|
|
9972
10075
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9973
10076
|
formatspecs = formatspecs[checkcompressfile]
|
|
9974
10077
|
fp = MkTempFile()
|
|
9975
|
-
fp = PackArchiveFileFromRarFile(
|
|
9976
|
-
|
|
9977
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10078
|
+
fp = PackArchiveFileFromRarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10079
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9978
10080
|
return listarrayfiles
|
|
9979
10081
|
|
|
9980
10082
|
if(not py7zr_support):
|
|
@@ -9987,13 +10089,12 @@ if(py7zr_support):
|
|
|
9987
10089
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9988
10090
|
formatspecs = formatspecs[checkcompressfile]
|
|
9989
10091
|
fp = MkTempFile()
|
|
9990
|
-
fp = PackArchiveFileFromSevenZipFile(
|
|
9991
|
-
|
|
9992
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10092
|
+
fp = PackArchiveFileFromSevenZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10093
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9993
10094
|
return listarrayfiles
|
|
9994
10095
|
|
|
9995
10096
|
|
|
9996
|
-
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10097
|
+
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9997
10098
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9998
10099
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9999
10100
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -10006,17 +10107,16 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
|
|
|
10006
10107
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
10007
10108
|
return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10008
10109
|
elif(checkcompressfile == formatspecs['format_magic']):
|
|
10009
|
-
return ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10110
|
+
return ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10010
10111
|
else:
|
|
10011
10112
|
return False
|
|
10012
10113
|
return False
|
|
10013
10114
|
|
|
10014
10115
|
|
|
10015
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10116
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, saltkey=None, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10016
10117
|
outarray = MkTempFile()
|
|
10017
|
-
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10018
|
-
|
|
10019
|
-
listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10118
|
+
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
|
|
10119
|
+
listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10020
10120
|
return listarrayfiles
|
|
10021
10121
|
|
|
10022
10122
|
|
|
@@ -10138,12 +10238,12 @@ def ArchiveFileArrayToArrayIndex(inarray, returnfp=False):
|
|
|
10138
10238
|
return out
|
|
10139
10239
|
|
|
10140
10240
|
|
|
10141
|
-
def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=
|
|
10241
|
+
def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10142
10242
|
# ---------- Safe defaults ----------
|
|
10143
10243
|
if compressionuselist is None:
|
|
10144
10244
|
compressionuselist = compressionlistalt
|
|
10145
10245
|
if checksumtype is None:
|
|
10146
|
-
checksumtype = ["md5", "md5", "md5", "md5"]
|
|
10246
|
+
checksumtype = ["md5", "md5", "md5", "md5", "md5"]
|
|
10147
10247
|
if extradata is None:
|
|
10148
10248
|
extradata = []
|
|
10149
10249
|
if jsondata is None:
|
|
@@ -10162,7 +10262,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10162
10262
|
infile = RemoveWindowsPath(infile)
|
|
10163
10263
|
listarrayfileslist = ArchiveFileToArray(
|
|
10164
10264
|
infile, "auto", filestart, seekstart, seekend,
|
|
10165
|
-
False, True, True, skipchecksum, formatspecs, seektoend, False
|
|
10265
|
+
False, True, True, skipchecksum, formatspecs, insaltkey, seektoend, False
|
|
10166
10266
|
)
|
|
10167
10267
|
|
|
10168
10268
|
# ---------- Format specs selection ----------
|
|
@@ -10229,9 +10329,6 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10229
10329
|
if (compression is None) or (compressionuselist and compression not in compressionuselist):
|
|
10230
10330
|
compression = "auto"
|
|
10231
10331
|
|
|
10232
|
-
if verbose:
|
|
10233
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10234
|
-
|
|
10235
10332
|
# No files?
|
|
10236
10333
|
if not listarrayfiles.get('ffilelist'):
|
|
10237
10334
|
return False
|
|
@@ -10244,7 +10341,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10244
10341
|
if lenlist != fnumfiles:
|
|
10245
10342
|
fnumfiles = lenlist
|
|
10246
10343
|
|
|
10247
|
-
AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), [], checksumtype[0], formatspecs)
|
|
10344
|
+
AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), listarrayfiles['fextradata'], listarrayfiles['fjsondata'], [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
|
|
10248
10345
|
|
|
10249
10346
|
# loop counters
|
|
10250
10347
|
lcfi = 0
|
|
@@ -10274,6 +10371,9 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10274
10371
|
# fields (hex-encoded where expected)
|
|
10275
10372
|
fheadersize = format(int(cur_entry['fheadersize']), 'x').lower()
|
|
10276
10373
|
fsize = format(int(cur_entry['fsize']), 'x').lower()
|
|
10374
|
+
fblksize = format(int(cur_entry['fblksize']), 'x').lower()
|
|
10375
|
+
fblocks = format(int(cur_entry['fblocks']), 'x').lower()
|
|
10376
|
+
fflags = format(int(cur_entry['fflags']), 'x').lower()
|
|
10277
10377
|
flinkname = cur_entry['flinkname']
|
|
10278
10378
|
fatime = format(int(cur_entry['fatime']), 'x').lower()
|
|
10279
10379
|
fmtime = format(int(cur_entry['fmtime']), 'x').lower()
|
|
@@ -10292,8 +10392,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10292
10392
|
fcompression = cur_entry['fcompression']
|
|
10293
10393
|
fcsize = format(int(cur_entry['fcsize']), 'x').lower()
|
|
10294
10394
|
fdev = format(int(cur_entry['fdev']), 'x').lower()
|
|
10295
|
-
|
|
10296
|
-
fdev_major = format(int(cur_entry['fmajor']), 'x').lower()
|
|
10395
|
+
frdev = format(int(cur_entry['frdev']), 'x').lower()
|
|
10297
10396
|
fseeknextfile = cur_entry['fseeknextfile']
|
|
10298
10397
|
|
|
10299
10398
|
# extra fields sizing
|
|
@@ -10304,6 +10403,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10304
10403
|
# extradata/jsondata defaults per file
|
|
10305
10404
|
if not followlink and len(extradata) <= 0:
|
|
10306
10405
|
extradata = cur_entry['fextradata']
|
|
10406
|
+
|
|
10407
|
+
fvendorfields = cur_entry['fvendorfields']
|
|
10408
|
+
ffvendorfieldslist = []
|
|
10409
|
+
if(fvendorfields>0):
|
|
10410
|
+
ffvendorfieldslist = cur_entry['fvendorfieldslist']
|
|
10411
|
+
|
|
10307
10412
|
if not followlink and len(jsondata) <= 0:
|
|
10308
10413
|
jsondata = cur_entry['fjsondata']
|
|
10309
10414
|
|
|
@@ -10339,7 +10444,11 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10339
10444
|
fcontents.seek(0, 0)
|
|
10340
10445
|
cfcontents.seek(0, 0)
|
|
10341
10446
|
cfcontents = CompressOpenFileAlt(
|
|
10342
|
-
cfcontents,
|
|
10447
|
+
cfcontents,
|
|
10448
|
+
compressionuselist[ilmin],
|
|
10449
|
+
compressionlevel,
|
|
10450
|
+
compressionuselist,
|
|
10451
|
+
formatspecs
|
|
10343
10452
|
)
|
|
10344
10453
|
if cfcontents:
|
|
10345
10454
|
cfcontents.seek(0, 2)
|
|
@@ -10347,7 +10456,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10347
10456
|
cfcontents.close()
|
|
10348
10457
|
else:
|
|
10349
10458
|
ilcsize.append(float("inf"))
|
|
10350
|
-
ilmin
|
|
10459
|
+
ilmin = ilmin + 1
|
|
10351
10460
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
10352
10461
|
curcompression = compressionuselist[ilcmin]
|
|
10353
10462
|
|
|
@@ -10356,16 +10465,24 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10356
10465
|
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10357
10466
|
cfcontents.seek(0, 0)
|
|
10358
10467
|
cfcontents = CompressOpenFileAlt(
|
|
10359
|
-
cfcontents,
|
|
10468
|
+
cfcontents,
|
|
10469
|
+
curcompression,
|
|
10470
|
+
compressionlevel,
|
|
10471
|
+
compressionuselist,
|
|
10472
|
+
formatspecs
|
|
10360
10473
|
)
|
|
10361
10474
|
cfcontents.seek(0, 2)
|
|
10362
|
-
|
|
10363
|
-
if ucfsize >
|
|
10364
|
-
fcsize = format(int(
|
|
10475
|
+
cfsize = cfcontents.tell()
|
|
10476
|
+
if ucfsize > cfsize:
|
|
10477
|
+
fcsize = format(int(cfsize), 'x').lower()
|
|
10365
10478
|
fcompression = curcompression
|
|
10366
10479
|
fcontents.close()
|
|
10367
10480
|
fcontents = cfcontents
|
|
10368
10481
|
|
|
10482
|
+
if fcompression == "none":
|
|
10483
|
+
fcompression = ""
|
|
10484
|
+
fcontents.seek(0, 0)
|
|
10485
|
+
|
|
10369
10486
|
# link following (fixed: use listarrayfiles, not prelistarrayfiles)
|
|
10370
10487
|
if followlink:
|
|
10371
10488
|
if (cur_entry['ftype'] == 1 or cur_entry['ftype'] == 2):
|
|
@@ -10374,6 +10491,9 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10374
10491
|
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
10375
10492
|
fheadersize = format(int(flinkinfo['fheadersize']), 'x').lower()
|
|
10376
10493
|
fsize = format(int(flinkinfo['fsize']), 'x').lower()
|
|
10494
|
+
fblksize = format(int(flinkinfo['fblksize']), 'x').lower()
|
|
10495
|
+
fblocks = format(int(flinkinfo['fblocks']), 'x').lower()
|
|
10496
|
+
fflags = format(int(flinkinfo['fflags']), 'x').lower()
|
|
10377
10497
|
flinkname = flinkinfo['flinkname']
|
|
10378
10498
|
fatime = format(int(flinkinfo['fatime']), 'x').lower()
|
|
10379
10499
|
fmtime = format(int(flinkinfo['fmtime']), 'x').lower()
|
|
@@ -10392,14 +10512,19 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10392
10512
|
fcompression = flinkinfo['fcompression']
|
|
10393
10513
|
fcsize = format(int(flinkinfo['fcsize']), 'x').lower()
|
|
10394
10514
|
fdev = format(int(flinkinfo['fdev']), 'x').lower()
|
|
10395
|
-
|
|
10396
|
-
fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
|
|
10515
|
+
frdev = format(int(flinkinfo['frdev']), 'x').lower()
|
|
10397
10516
|
fseeknextfile = flinkinfo['fseeknextfile']
|
|
10398
10517
|
if (len(flinkinfo['fextradata']) > flinkinfo['fextrafields']
|
|
10399
10518
|
and len(flinkinfo['fextradata']) > 0):
|
|
10400
10519
|
flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
|
|
10401
10520
|
if len(extradata) < 0:
|
|
10402
10521
|
extradata = flinkinfo['fextradata']
|
|
10522
|
+
|
|
10523
|
+
fvendorfields = flinkinfo['fvendorfields']
|
|
10524
|
+
ffvendorfieldslist = []
|
|
10525
|
+
if(fvendorfields>0):
|
|
10526
|
+
ffvendorfieldslist = flinkinfo['fvendorfieldslist']
|
|
10527
|
+
|
|
10403
10528
|
if len(jsondata) < 0:
|
|
10404
10529
|
jsondata = flinkinfo['fjsondata']
|
|
10405
10530
|
fcontents = flinkinfo['fcontents']
|
|
@@ -10428,15 +10553,15 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10428
10553
|
fcompression = ""
|
|
10429
10554
|
|
|
10430
10555
|
tmpoutlist = [
|
|
10431
|
-
ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime,
|
|
10556
|
+
ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime,
|
|
10432
10557
|
fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame,
|
|
10433
|
-
fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
10558
|
+
fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, fseeknextfile
|
|
10434
10559
|
]
|
|
10435
10560
|
|
|
10436
|
-
|
|
10437
|
-
|
|
10438
|
-
|
|
10439
|
-
)
|
|
10561
|
+
if(fvendorfields>0 and len(ffvendorfieldslist)>0):
|
|
10562
|
+
extradata.extend(fvendorfields)
|
|
10563
|
+
|
|
10564
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(),[checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, outsaltkey)
|
|
10440
10565
|
try:
|
|
10441
10566
|
fcontents.close()
|
|
10442
10567
|
except Exception:
|
|
@@ -10481,12 +10606,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10481
10606
|
pass
|
|
10482
10607
|
return True
|
|
10483
10608
|
|
|
10484
|
-
def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=
|
|
10609
|
+
def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10485
10610
|
if not isinstance(infiles, list):
|
|
10486
10611
|
infiles = [infiles]
|
|
10487
10612
|
returnout = False
|
|
10488
10613
|
for infileslist in infiles:
|
|
10489
|
-
returnout = RePackArchiveFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, True)
|
|
10614
|
+
returnout = RePackArchiveFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, True)
|
|
10490
10615
|
if(not returnout):
|
|
10491
10616
|
break
|
|
10492
10617
|
else:
|
|
@@ -10496,33 +10621,28 @@ def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="aut
|
|
|
10496
10621
|
return True
|
|
10497
10622
|
return returnout
|
|
10498
10623
|
|
|
10499
|
-
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=
|
|
10624
|
+
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10500
10625
|
fp = MkTempFile(instr)
|
|
10501
|
-
listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10502
|
-
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
10626
|
+
listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, returnfp)
|
|
10503
10627
|
return listarrayfiles
|
|
10504
10628
|
|
|
10505
10629
|
|
|
10506
|
-
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10630
|
+
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10507
10631
|
outarray = MkTempFile()
|
|
10508
|
-
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10509
|
-
|
|
10510
|
-
listarrayfiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10511
|
-
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
10632
|
+
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
|
|
10633
|
+
listarrayfiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
10512
10634
|
return listarrayfiles
|
|
10513
10635
|
|
|
10514
10636
|
|
|
10515
|
-
def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
10637
|
+
def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
10516
10638
|
if(outdir is not None):
|
|
10517
10639
|
outdir = RemoveWindowsPath(outdir)
|
|
10518
|
-
if(verbose):
|
|
10519
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10520
10640
|
if(isinstance(infile, dict)):
|
|
10521
10641
|
listarrayfiles = infile
|
|
10522
10642
|
else:
|
|
10523
10643
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
10524
10644
|
infile = RemoveWindowsPath(infile)
|
|
10525
|
-
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10645
|
+
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10526
10646
|
if(not listarrayfiles):
|
|
10527
10647
|
return False
|
|
10528
10648
|
lenlist = len(listarrayfiles['ffilelist'])
|
|
@@ -10758,9 +10878,9 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
10758
10878
|
return True
|
|
10759
10879
|
|
|
10760
10880
|
|
|
10761
|
-
def UnPackArchiveFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10881
|
+
def UnPackArchiveFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10762
10882
|
fp = MkTempFile(instr)
|
|
10763
|
-
listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
10883
|
+
listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
10764
10884
|
return listarrayfiles
|
|
10765
10885
|
|
|
10766
10886
|
def ftype_to_str(ftype):
|
|
@@ -10778,9 +10898,7 @@ def ftype_to_str(ftype):
|
|
|
10778
10898
|
# Default to "file" if unknown
|
|
10779
10899
|
return mapping.get(ftype, "file")
|
|
10780
10900
|
|
|
10781
|
-
def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10782
|
-
if(verbose):
|
|
10783
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10901
|
+
def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10784
10902
|
if(isinstance(infile, dict)):
|
|
10785
10903
|
listarrayfileslist = [infile]
|
|
10786
10904
|
if(isinstance(infile, list)):
|
|
@@ -10788,7 +10906,7 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
|
|
|
10788
10906
|
else:
|
|
10789
10907
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
10790
10908
|
infile = RemoveWindowsPath(infile)
|
|
10791
|
-
listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10909
|
+
listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10792
10910
|
if(not listarrayfileslist):
|
|
10793
10911
|
return False
|
|
10794
10912
|
for listarrayfiles in listarrayfileslist:
|
|
@@ -10825,8 +10943,11 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
|
|
|
10825
10943
|
VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
|
|
10826
10944
|
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
|
|
10827
10945
|
else:
|
|
10946
|
+
ts_ns = listarrayfiles['ffilelist'][lcfi]['fmtime']
|
|
10947
|
+
sec, ns = divmod(int(ts_ns), 10**9)
|
|
10948
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
10828
10949
|
VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
|
|
10829
|
-
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " +
|
|
10950
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + dt.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
10830
10951
|
lcfi = lcfi + 1
|
|
10831
10952
|
if(returnfp):
|
|
10832
10953
|
return listarrayfiles['fp']
|
|
@@ -10834,25 +10955,25 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
|
|
|
10834
10955
|
return True
|
|
10835
10956
|
|
|
10836
10957
|
|
|
10837
|
-
def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10958
|
+
def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10838
10959
|
if(isinstance(infile, (list, tuple, ))):
|
|
10839
10960
|
pass
|
|
10840
10961
|
else:
|
|
10841
10962
|
infile = [infile]
|
|
10842
10963
|
outretval = {}
|
|
10843
10964
|
for curfname in infile:
|
|
10844
|
-
outretval[curfname] = ArchiveFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
10965
|
+
outretval[curfname] = ArchiveFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
|
|
10845
10966
|
return outretval
|
|
10846
10967
|
|
|
10847
10968
|
|
|
10848
|
-
def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10969
|
+
def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10849
10970
|
outretval = []
|
|
10850
10971
|
outstartfile = filestart
|
|
10851
10972
|
outfsize = float('inf')
|
|
10852
10973
|
while True:
|
|
10853
10974
|
if outstartfile >= outfsize: # stop when function signals False
|
|
10854
10975
|
break
|
|
10855
|
-
list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
|
|
10976
|
+
list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, True)
|
|
10856
10977
|
if list_file_retu is False: # stop when function signals False
|
|
10857
10978
|
outretval.append(list_file_retu)
|
|
10858
10979
|
else:
|
|
@@ -10868,30 +10989,31 @@ def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
10868
10989
|
if(returnfp):
|
|
10869
10990
|
return infile
|
|
10870
10991
|
else:
|
|
10871
|
-
|
|
10992
|
+
try:
|
|
10993
|
+
infile.close()
|
|
10994
|
+
except AttributeError:
|
|
10995
|
+
return False
|
|
10872
10996
|
return outretval
|
|
10873
10997
|
|
|
10874
10998
|
|
|
10875
|
-
def MultipleStackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10999
|
+
def MultipleStackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10876
11000
|
if(isinstance(infile, (list, tuple, ))):
|
|
10877
11001
|
pass
|
|
10878
11002
|
else:
|
|
10879
11003
|
infile = [infile]
|
|
10880
11004
|
outretval = {}
|
|
10881
11005
|
for curfname in infile:
|
|
10882
|
-
outretval[curfname] = StackedArchiveFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11006
|
+
outretval[curfname] = StackedArchiveFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10883
11007
|
return outretval
|
|
10884
11008
|
|
|
10885
11009
|
|
|
10886
|
-
def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11010
|
+
def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10887
11011
|
fp = MkTempFile(instr)
|
|
10888
|
-
listarrayfiles = ArchiveFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
11012
|
+
listarrayfiles = ArchiveFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
|
|
10889
11013
|
return listarrayfiles
|
|
10890
11014
|
|
|
10891
11015
|
|
|
10892
11016
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
10893
|
-
if(verbose):
|
|
10894
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10895
11017
|
if(infile == "-"):
|
|
10896
11018
|
infile = MkTempFile()
|
|
10897
11019
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
@@ -11012,8 +11134,6 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11012
11134
|
|
|
11013
11135
|
|
|
11014
11136
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11015
|
-
if(verbose):
|
|
11016
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11017
11137
|
if(infile == "-"):
|
|
11018
11138
|
infile = MkTempFile()
|
|
11019
11139
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
@@ -11139,8 +11259,6 @@ if(not rarfile_support):
|
|
|
11139
11259
|
|
|
11140
11260
|
if(rarfile_support):
|
|
11141
11261
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11142
|
-
if(verbose):
|
|
11143
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11144
11262
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11145
11263
|
return False
|
|
11146
11264
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -11268,8 +11386,6 @@ if(not py7zr_support):
|
|
|
11268
11386
|
|
|
11269
11387
|
if(py7zr_support):
|
|
11270
11388
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11271
|
-
if(verbose):
|
|
11272
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11273
11389
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11274
11390
|
return False
|
|
11275
11391
|
lcfi = 0
|
|
@@ -11363,8 +11479,6 @@ if(py7zr_support):
|
|
|
11363
11479
|
|
|
11364
11480
|
|
|
11365
11481
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11366
|
-
if(verbose):
|
|
11367
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11368
11482
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11369
11483
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11370
11484
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -11391,44 +11505,6 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11391
11505
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
11392
11506
|
return listarrayfiles
|
|
11393
11507
|
|
|
11394
|
-
"""
|
|
11395
|
-
PyNeoFile compatibility layer
|
|
11396
|
-
"""
|
|
11397
|
-
|
|
11398
|
-
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11399
|
-
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
11400
|
-
|
|
11401
|
-
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11402
|
-
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
11403
|
-
|
|
11404
|
-
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11405
|
-
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
11406
|
-
|
|
11407
|
-
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11408
|
-
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
11409
|
-
|
|
11410
|
-
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
11411
|
-
return PackArchiveFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
11412
|
-
|
|
11413
|
-
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11414
|
-
return ArchiveFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
11415
|
-
|
|
11416
|
-
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11417
|
-
return UnPackArchiveFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
11418
|
-
|
|
11419
|
-
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11420
|
-
return RePackArchiveFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11421
|
-
|
|
11422
|
-
def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
11423
|
-
return ArchiveFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
|
|
11424
|
-
|
|
11425
|
-
def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
11426
|
-
return ArchiveFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
11427
|
-
|
|
11428
|
-
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11429
|
-
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
11430
|
-
return RePackArchiveFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11431
|
-
|
|
11432
11508
|
def detect_cwd(ftp, file_dir):
|
|
11433
11509
|
"""
|
|
11434
11510
|
Test whether cwd into file_dir works. Returns True if it does,
|