PyCatFile 0.25.2__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pycatfile-0.25.2.data → pycatfile-0.27.0.data}/scripts/catfile.py +32 -18
- {pycatfile-0.25.2.dist-info → pycatfile-0.27.0.dist-info}/METADATA +1 -1
- pycatfile-0.27.0.dist-info/RECORD +10 -0
- pycatfile.py +1485 -849
- pycatfile-0.25.2.dist-info/RECORD +0 -10
- {pycatfile-0.25.2.data → pycatfile-0.27.0.data}/scripts/catneofile.py +0 -0
- {pycatfile-0.25.2.data → pycatfile-0.27.0.data}/scripts/neocatfile.py +0 -0
- {pycatfile-0.25.2.dist-info → pycatfile-0.27.0.dist-info}/WHEEL +0 -0
- {pycatfile-0.25.2.dist-info → pycatfile-0.27.0.dist-info}/licenses/LICENSE +0 -0
- {pycatfile-0.25.2.dist-info → pycatfile-0.27.0.dist-info}/top_level.txt +0 -0
- {pycatfile-0.25.2.dist-info → pycatfile-0.27.0.dist-info}/zip-safe +0 -0
pycatfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pycatfile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pycatfile.py - Last Update: 11/14/2025 Ver. 0.27.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -24,7 +24,6 @@ import re
|
|
|
24
24
|
import sys
|
|
25
25
|
import time
|
|
26
26
|
import stat
|
|
27
|
-
import zlib
|
|
28
27
|
import mmap
|
|
29
28
|
import hmac
|
|
30
29
|
import base64
|
|
@@ -38,8 +37,8 @@ import zipfile
|
|
|
38
37
|
import binascii
|
|
39
38
|
import datetime
|
|
40
39
|
import platform
|
|
40
|
+
import collections
|
|
41
41
|
from io import StringIO, BytesIO
|
|
42
|
-
from collections import namedtuple
|
|
43
42
|
import posixpath # POSIX-safe joins/normpaths
|
|
44
43
|
try:
|
|
45
44
|
from backports import tempfile
|
|
@@ -50,12 +49,16 @@ try:
|
|
|
50
49
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
51
50
|
from socketserver import TCPServer
|
|
52
51
|
from urllib.parse import urlparse, parse_qs
|
|
53
|
-
import base64
|
|
54
52
|
except ImportError:
|
|
55
53
|
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
|
56
54
|
from SocketServer import TCPServer
|
|
57
55
|
from urlparse import urlparse, parse_qs
|
|
58
|
-
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Python 3.8+ only
|
|
59
|
+
from multiprocessing import shared_memory
|
|
60
|
+
except ImportError:
|
|
61
|
+
shared_memory = None
|
|
59
62
|
|
|
60
63
|
# FTP Support
|
|
61
64
|
ftpssl = True
|
|
@@ -146,6 +149,15 @@ try:
|
|
|
146
149
|
except Exception:
|
|
147
150
|
PATH_TYPES = (basestring,)
|
|
148
151
|
|
|
152
|
+
def running_interactively():
|
|
153
|
+
main = sys.modules.get("__main__")
|
|
154
|
+
no_main_file = not hasattr(main, "__file__")
|
|
155
|
+
interactive_flag = bool(getattr(sys.flags, "interactive", 0))
|
|
156
|
+
return no_main_file or interactive_flag
|
|
157
|
+
|
|
158
|
+
if running_interactively():
|
|
159
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
160
|
+
|
|
149
161
|
def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
150
162
|
"""
|
|
151
163
|
Normalize any input to text_type (unicode on Py2, str on Py3).
|
|
@@ -166,7 +178,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
|
166
178
|
|
|
167
179
|
# Handle pathlib.Path & other path-like objects
|
|
168
180
|
try:
|
|
169
|
-
import os
|
|
170
181
|
if hasattr(os, "fspath"):
|
|
171
182
|
fs = os.fspath(s)
|
|
172
183
|
if isinstance(fs, text_type):
|
|
@@ -207,7 +218,6 @@ except ImportError:
|
|
|
207
218
|
|
|
208
219
|
# Windows-specific setup
|
|
209
220
|
if os.name == "nt":
|
|
210
|
-
import io
|
|
211
221
|
def _wrap(stream):
|
|
212
222
|
buf = getattr(stream, "buffer", None)
|
|
213
223
|
is_tty = getattr(stream, "isatty", lambda: False)()
|
|
@@ -416,9 +426,13 @@ def is_only_nonprintable(var):
|
|
|
416
426
|
__file_format_multi_dict__ = {}
|
|
417
427
|
__file_format_default__ = "CatFile"
|
|
418
428
|
__include_defaults__ = True
|
|
419
|
-
|
|
429
|
+
__use_inmem__ = True
|
|
430
|
+
__use_memfd__ = True
|
|
420
431
|
__use_spoolfile__ = False
|
|
421
432
|
__use_spooldir__ = tempfile.gettempdir()
|
|
433
|
+
__use_new_style__ = True
|
|
434
|
+
__use_advanced_list__ = True
|
|
435
|
+
__use_alt_inode__ = False
|
|
422
436
|
BYTES_PER_KiB = 1024
|
|
423
437
|
BYTES_PER_MiB = 1024 * BYTES_PER_KiB
|
|
424
438
|
# Spool: not tiny, but won’t blow up RAM if many are in use
|
|
@@ -440,7 +454,13 @@ if('PYCATFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYCATFIL
|
|
|
440
454
|
else:
|
|
441
455
|
prescriptpath = get_importing_script_path()
|
|
442
456
|
if(prescriptpath is not None):
|
|
443
|
-
|
|
457
|
+
if(__use_ini_file__ and not __use_json_file__):
|
|
458
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
459
|
+
elif(__use_json_file__ and not __use_ini_file__):
|
|
460
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
|
|
461
|
+
else:
|
|
462
|
+
scriptconf = ""
|
|
463
|
+
prescriptpath = None
|
|
444
464
|
else:
|
|
445
465
|
scriptconf = ""
|
|
446
466
|
if os.path.exists(scriptconf):
|
|
@@ -462,9 +482,13 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
462
482
|
__file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
|
|
463
483
|
__program_name__ = decode_unicode_escape(config.get('config', 'proname'))
|
|
464
484
|
__include_defaults__ = config.getboolean('config', 'includedef')
|
|
465
|
-
|
|
485
|
+
__use_inmem__ = config.getboolean('config', 'useinmem')
|
|
486
|
+
__use_memfd__ = config.getboolean('config', 'usememfd')
|
|
466
487
|
__use_spoolfile__ = config.getboolean('config', 'usespoolfile')
|
|
467
488
|
__spoolfile_size__ = config.getint('config', 'spoolfilesize')
|
|
489
|
+
__use_new_style__ = config.getboolean('config', 'newstyle')
|
|
490
|
+
__use_advanced_list__ = config.getboolean('config', 'advancedlist')
|
|
491
|
+
__use_alt_inode__ = config.getboolean('config', 'altinode')
|
|
468
492
|
# Loop through all sections
|
|
469
493
|
for section in config.sections():
|
|
470
494
|
if section == "config":
|
|
@@ -472,8 +496,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
472
496
|
|
|
473
497
|
required_keys = [
|
|
474
498
|
"len", "hex", "ver", "name",
|
|
475
|
-
"magic", "delimiter", "extension"
|
|
476
|
-
"newstyle", "advancedlist", "altinode"
|
|
499
|
+
"magic", "delimiter", "extension"
|
|
477
500
|
]
|
|
478
501
|
|
|
479
502
|
# Py2+Py3 compatible key presence check
|
|
@@ -493,9 +516,6 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
493
516
|
'format_hex': config.get(section, 'hex'),
|
|
494
517
|
'format_delimiter': delim,
|
|
495
518
|
'format_ver': config.get(section, 'ver'),
|
|
496
|
-
'new_style': config.getboolean(section, 'newstyle'),
|
|
497
|
-
'use_advanced_list': config.getboolean(section, 'advancedlist'),
|
|
498
|
-
'use_alt_inode': config.getboolean(section, 'altinode'),
|
|
499
519
|
'format_extension': decode_unicode_escape(config.get(section, 'extension')),
|
|
500
520
|
}
|
|
501
521
|
})
|
|
@@ -556,16 +576,19 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
556
576
|
cfg_config = cfg.get('config', {}) or {}
|
|
557
577
|
__file_format_default__ = decode_unicode_escape(_get(cfg_config, 'default', ''))
|
|
558
578
|
__program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
|
|
559
|
-
__include_defaults__ = _to_bool(_get(cfg_config, 'includedef',
|
|
560
|
-
|
|
579
|
+
__include_defaults__ = _to_bool(_get(cfg_config, 'includedef', True))
|
|
580
|
+
__use_inmem__ = _to_bool(_get(cfg_config, 'useinmem', True))
|
|
581
|
+
__use_memfd__ = _to_bool(_get(cfg_config, 'usememfd', True))
|
|
561
582
|
__use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
|
|
562
583
|
__spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
|
|
584
|
+
__use_new_style__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
|
|
585
|
+
__use_advanced_list__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
|
|
586
|
+
__use_alt_inode__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
|
|
563
587
|
|
|
564
588
|
# --- iterate format sections (everything except "config") ---
|
|
565
589
|
required_keys = [
|
|
566
590
|
"len", "hex", "ver", "name",
|
|
567
|
-
"magic", "delimiter", "extension"
|
|
568
|
-
"newstyle", "advancedlist", "altinode"
|
|
591
|
+
"magic", "delimiter", "extension"
|
|
569
592
|
]
|
|
570
593
|
|
|
571
594
|
for section_name, section in cfg.items():
|
|
@@ -583,9 +606,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
583
606
|
fmt_hex = decode_unicode_escape(_get(section, 'hex', ''))
|
|
584
607
|
fmt_ver = decode_unicode_escape(_get(section, 'ver', ''))
|
|
585
608
|
delim = decode_unicode_escape(_get(section, 'delimiter', ''))
|
|
586
|
-
new_style = _to_bool(_get(section, 'newstyle', False))
|
|
587
|
-
adv_list = _to_bool(_get(section, 'advancedlist', False))
|
|
588
|
-
alt_inode = _to_bool(_get(section, 'altinode', False))
|
|
589
609
|
extension = decode_unicode_escape(_get(section, 'extension', ''))
|
|
590
610
|
|
|
591
611
|
# keep your delimiter validation semantics
|
|
@@ -600,9 +620,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
600
620
|
'format_hex': fmt_hex,
|
|
601
621
|
'format_delimiter': delim,
|
|
602
622
|
'format_ver': fmt_ver,
|
|
603
|
-
'new_style': new_style,
|
|
604
|
-
'use_advanced_list': adv_list,
|
|
605
|
-
'use_alt_inode': alt_inode,
|
|
606
623
|
'format_extension': extension,
|
|
607
624
|
}
|
|
608
625
|
})
|
|
@@ -640,21 +657,18 @@ __file_format_len__ = __file_format_multi_dict__[__file_format_default__]['forma
|
|
|
640
657
|
__file_format_hex__ = __file_format_multi_dict__[__file_format_default__]['format_hex']
|
|
641
658
|
__file_format_delimiter__ = __file_format_multi_dict__[__file_format_default__]['format_delimiter']
|
|
642
659
|
__file_format_ver__ = __file_format_multi_dict__[__file_format_default__]['format_ver']
|
|
643
|
-
__use_new_style__ = __file_format_multi_dict__[__file_format_default__]['new_style']
|
|
644
|
-
__use_advanced_list__ = __file_format_multi_dict__[__file_format_default__]['use_advanced_list']
|
|
645
|
-
__use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt_inode']
|
|
646
660
|
__file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
|
|
647
661
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
648
662
|
__project__ = __program_name__
|
|
649
663
|
__program_alt_name__ = __program_name__
|
|
650
664
|
__project_url__ = "https://github.com/GameMaker2k/PyCatFile"
|
|
651
665
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
652
|
-
__version_info__ = (0,
|
|
653
|
-
__version_date_info__ = (2025, 11,
|
|
666
|
+
__version_info__ = (0, 27, 0, "RC 1", 1)
|
|
667
|
+
__version_date_info__ = (2025, 11, 14, "RC 1", 1)
|
|
654
668
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
655
669
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
656
670
|
__revision__ = __version_info__[3]
|
|
657
|
-
__revision_id__ = "$Id:
|
|
671
|
+
__revision_id__ = "$Id: a0f8681f37b2a5e4682ca33c86dc2fe3ec56a903 $"
|
|
658
672
|
if(__version_info__[4] is not None):
|
|
659
673
|
__version_date_plusrc__ = __version_date__ + \
|
|
660
674
|
"-" + str(__version_date_info__[4])
|
|
@@ -666,6 +680,9 @@ if(__version_info__[3] is not None):
|
|
|
666
680
|
if(__version_info__[3] is None):
|
|
667
681
|
__version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
|
|
668
682
|
|
|
683
|
+
_logger = logging.getLogger(__project__) # library-style logger
|
|
684
|
+
_logger.addHandler(logging.NullHandler()) # don't emit logs unless app configures logging
|
|
685
|
+
|
|
669
686
|
# From: https://stackoverflow.com/a/28568003
|
|
670
687
|
# By Phaxmohdem
|
|
671
688
|
|
|
@@ -803,9 +820,9 @@ except Exception:
|
|
|
803
820
|
geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
|
|
804
821
|
proname=__project__, prover=__version__, prourl=__project_url__)
|
|
805
822
|
if(platform.python_implementation() != ""):
|
|
806
|
-
py_implementation = platform.python_implementation()
|
|
823
|
+
py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
|
|
807
824
|
if(platform.python_implementation() == ""):
|
|
808
|
-
py_implementation = "CPython"
|
|
825
|
+
py_implementation = "CPython"+str(platform.python_version_tuple()[0])
|
|
809
826
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
810
827
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
811
828
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -821,13 +838,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
|
|
|
821
838
|
|
|
822
839
|
compressionsupport = []
|
|
823
840
|
try:
|
|
824
|
-
|
|
841
|
+
try:
|
|
842
|
+
import compression.gzip as gzip
|
|
843
|
+
except ImportError:
|
|
844
|
+
import gzip
|
|
825
845
|
compressionsupport.append("gz")
|
|
826
846
|
compressionsupport.append("gzip")
|
|
827
847
|
except ImportError:
|
|
828
848
|
pass
|
|
829
849
|
try:
|
|
830
|
-
|
|
850
|
+
try:
|
|
851
|
+
import compression.bz2 as bz2
|
|
852
|
+
except ImportError:
|
|
853
|
+
import bz2
|
|
831
854
|
compressionsupport.append("bz2")
|
|
832
855
|
compressionsupport.append("bzip2")
|
|
833
856
|
except ImportError:
|
|
@@ -848,20 +871,20 @@ except ImportError:
|
|
|
848
871
|
pass
|
|
849
872
|
'''
|
|
850
873
|
try:
|
|
851
|
-
|
|
874
|
+
try:
|
|
875
|
+
import compression.zstd as zstd
|
|
876
|
+
except ImportError:
|
|
877
|
+
import pyzstd.zstdfile as zstd
|
|
852
878
|
compressionsupport.append("zst")
|
|
853
879
|
compressionsupport.append("zstd")
|
|
854
880
|
compressionsupport.append("zstandard")
|
|
855
881
|
except ImportError:
|
|
882
|
+
pass
|
|
883
|
+
try:
|
|
856
884
|
try:
|
|
857
|
-
import
|
|
858
|
-
compressionsupport.append("zst")
|
|
859
|
-
compressionsupport.append("zstd")
|
|
860
|
-
compressionsupport.append("zstandard")
|
|
885
|
+
import compression.lzma as lzma
|
|
861
886
|
except ImportError:
|
|
862
|
-
|
|
863
|
-
try:
|
|
864
|
-
import lzma
|
|
887
|
+
import lzma
|
|
865
888
|
compressionsupport.append("lzma")
|
|
866
889
|
compressionsupport.append("xz")
|
|
867
890
|
except ImportError:
|
|
@@ -871,12 +894,18 @@ except ImportError:
|
|
|
871
894
|
compressionsupport.append("xz")
|
|
872
895
|
except ImportError:
|
|
873
896
|
pass
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
897
|
+
try:
|
|
898
|
+
try:
|
|
899
|
+
import compression.zlib as zlib
|
|
900
|
+
except ImportError:
|
|
901
|
+
import zlib
|
|
902
|
+
compressionsupport.append("zlib")
|
|
903
|
+
compressionsupport.append("zl")
|
|
904
|
+
compressionsupport.append("zz")
|
|
905
|
+
compressionsupport.append("Z")
|
|
906
|
+
compressionsupport.append("z")
|
|
907
|
+
except ImportError:
|
|
908
|
+
pass
|
|
880
909
|
compressionlist = ['auto']
|
|
881
910
|
compressionlistalt = []
|
|
882
911
|
outextlist = []
|
|
@@ -1035,6 +1064,28 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20, **
|
|
|
1035
1064
|
VerbosePrintOut(dbgtxt, outtype, dbgenable, dgblevel, **kwargs)
|
|
1036
1065
|
return dbgtxt
|
|
1037
1066
|
|
|
1067
|
+
def to_ns(timestamp):
|
|
1068
|
+
"""
|
|
1069
|
+
Convert a second-resolution timestamp (int or float)
|
|
1070
|
+
into a nanosecond timestamp (int) by zero-padding.
|
|
1071
|
+
Works in Python 2 and Python 3.
|
|
1072
|
+
"""
|
|
1073
|
+
try:
|
|
1074
|
+
# Convert incoming timestamp to float so it works for int or float
|
|
1075
|
+
seconds = float(timestamp)
|
|
1076
|
+
except (TypeError, ValueError):
|
|
1077
|
+
raise ValueError("Timestamp must be int or float")
|
|
1078
|
+
|
|
1079
|
+
# Multiply by 1e9 to get nanoseconds, then cast to int
|
|
1080
|
+
return int(seconds * 1000000000)
|
|
1081
|
+
|
|
1082
|
+
def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
|
|
1083
|
+
ts_ns = int(ts_ns)
|
|
1084
|
+
sec, ns = divmod(ts_ns, 10**9)
|
|
1085
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
1086
|
+
base = dt.strftime(fmt)
|
|
1087
|
+
ns_str = "%09d" % ns
|
|
1088
|
+
return base + "." + ns_str
|
|
1038
1089
|
|
|
1039
1090
|
def _split_posix(name):
|
|
1040
1091
|
"""
|
|
@@ -2058,34 +2109,53 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
|
|
|
2058
2109
|
|
|
2059
2110
|
|
|
2060
2111
|
def MkTempFile(data=None,
|
|
2061
|
-
inmem=
|
|
2112
|
+
inmem=__use_inmem__, usememfd=__use_memfd__,
|
|
2062
2113
|
isbytes=True,
|
|
2063
|
-
prefix=
|
|
2114
|
+
prefix=__program_name__,
|
|
2064
2115
|
delete=True,
|
|
2065
2116
|
encoding="utf-8",
|
|
2066
|
-
newline=None,
|
|
2117
|
+
newline=None,
|
|
2118
|
+
text_errors="strict",
|
|
2067
2119
|
dir=None,
|
|
2068
2120
|
suffix="",
|
|
2069
2121
|
use_spool=__use_spoolfile__,
|
|
2122
|
+
autoswitch_spool=False,
|
|
2070
2123
|
spool_max=__spoolfile_size__,
|
|
2071
|
-
spool_dir=__use_spooldir__
|
|
2124
|
+
spool_dir=__use_spooldir__,
|
|
2125
|
+
reset_to_start=True,
|
|
2126
|
+
memfd_name=__program_name__,
|
|
2127
|
+
memfd_allow_sealing=False,
|
|
2128
|
+
memfd_flags_extra=0,
|
|
2129
|
+
on_create=None):
|
|
2072
2130
|
"""
|
|
2073
2131
|
Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
|
|
2074
2132
|
|
|
2075
2133
|
Storage:
|
|
2076
|
-
- inmem=True
|
|
2077
|
-
|
|
2078
|
-
- inmem=
|
|
2134
|
+
- inmem=True, usememfd=True, isbytes=True and memfd available
|
|
2135
|
+
-> memfd-backed anonymous file (binary)
|
|
2136
|
+
- inmem=True, otherwise
|
|
2137
|
+
-> BytesIO (bytes) or StringIO (text)
|
|
2138
|
+
- inmem=False, use_spool=True
|
|
2139
|
+
-> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
|
|
2140
|
+
- inmem=False, use_spool=False
|
|
2141
|
+
-> NamedTemporaryFile (binary), optionally TextIOWrapper for text
|
|
2079
2142
|
|
|
2080
2143
|
Text vs bytes:
|
|
2081
2144
|
- isbytes=True -> file expects bytes; 'data' must be bytes-like
|
|
2082
|
-
- isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
|
|
2083
|
-
apply only for spooled/named files (not BytesIO/StringIO).
|
|
2145
|
+
- isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
|
|
2146
|
+
encoding apply only for spooled/named files (not BytesIO/StringIO).
|
|
2084
2147
|
|
|
2085
2148
|
Notes:
|
|
2086
|
-
- On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
|
|
2087
|
-
Use delete=False if you need to pass the path elsewhere.
|
|
2088
|
-
- For text: in-memory StringIO ignores 'newline' (as usual).
|
|
2149
|
+
- On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
|
|
2150
|
+
other processes. Use delete=False if you need to pass the path elsewhere.
|
|
2151
|
+
- For text: in-memory StringIO ignores 'newline' and 'text_errors' (as usual).
|
|
2152
|
+
- When available, and if usememfd=True, memfd is used only for inmem=True and isbytes=True,
|
|
2153
|
+
providing an anonymous in-memory file descriptor (Linux-only). Text in-memory still uses
|
|
2154
|
+
StringIO to preserve newline semantics.
|
|
2155
|
+
- If autoswitch_spool=True and initial data size exceeds spool_max, in-memory storage is
|
|
2156
|
+
skipped and a spooled file is used instead (if use_spool=True).
|
|
2157
|
+
- If on_create is not None, it is called as on_create(fp, kind) where kind is one of:
|
|
2158
|
+
"memfd", "bytesio", "stringio", "spool", "disk".
|
|
2089
2159
|
"""
|
|
2090
2160
|
|
|
2091
2161
|
# -- sanitize simple params (avoid None surprises) --
|
|
@@ -2117,23 +2187,65 @@ def MkTempFile(data=None,
|
|
|
2117
2187
|
else:
|
|
2118
2188
|
init = None
|
|
2119
2189
|
|
|
2190
|
+
# Size of init for autoswitch; only meaningful for bytes
|
|
2191
|
+
init_len = len(init) if (init is not None and isbytes) else None
|
|
2192
|
+
|
|
2120
2193
|
# -------- In-memory --------
|
|
2121
2194
|
if inmem:
|
|
2122
|
-
|
|
2123
|
-
|
|
2124
|
-
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
|
|
2129
|
-
|
|
2195
|
+
# If autoswitch is enabled and data is larger than spool_max, and
|
|
2196
|
+
# spooling is allowed, skip the in-memory branch and fall through
|
|
2197
|
+
# to the spool/disk logic below.
|
|
2198
|
+
if autoswitch_spool and use_spool and init_len is not None and init_len > spool_max:
|
|
2199
|
+
pass # fall through to spool/disk sections
|
|
2200
|
+
else:
|
|
2201
|
+
# Use memfd only for bytes, and only where available (Linux, Python 3.8+)
|
|
2202
|
+
if usememfd and isbytes and hasattr(os, "memfd_create"):
|
|
2203
|
+
name = memfd_name or prefix or "MkTempFile"
|
|
2204
|
+
flags = 0
|
|
2205
|
+
# Close-on-exec is almost always what you want for temps
|
|
2206
|
+
if hasattr(os, "MFD_CLOEXEC"):
|
|
2207
|
+
flags |= os.MFD_CLOEXEC
|
|
2208
|
+
# Optional sealing support if requested and available
|
|
2209
|
+
if memfd_allow_sealing and hasattr(os, "MFD_ALLOW_SEALING"):
|
|
2210
|
+
flags |= os.MFD_ALLOW_SEALING
|
|
2211
|
+
# Extra custom flags (e.g. hugepage flags) if caller wants them
|
|
2212
|
+
if memfd_flags_extra:
|
|
2213
|
+
flags |= memfd_flags_extra
|
|
2214
|
+
|
|
2215
|
+
fd = os.memfd_create(name, flags)
|
|
2216
|
+
# Binary read/write file-like object backed by RAM
|
|
2217
|
+
f = os.fdopen(fd, "w+b")
|
|
2218
|
+
|
|
2219
|
+
if init is not None:
|
|
2220
|
+
f.write(init)
|
|
2221
|
+
if reset_to_start:
|
|
2222
|
+
f.seek(0)
|
|
2223
|
+
|
|
2224
|
+
if on_create is not None:
|
|
2225
|
+
on_create(f, "memfd")
|
|
2226
|
+
return f
|
|
2227
|
+
|
|
2228
|
+
# Fallback: pure Python in-memory objects
|
|
2229
|
+
if isbytes:
|
|
2230
|
+
f = io.BytesIO(init if init is not None else b"")
|
|
2231
|
+
kind = "bytesio"
|
|
2232
|
+
else:
|
|
2233
|
+
# newline/text_errors not enforced for StringIO; matches stdlib semantics
|
|
2234
|
+
f = io.StringIO(init if init is not None else "")
|
|
2235
|
+
kind = "stringio"
|
|
2236
|
+
|
|
2237
|
+
if reset_to_start:
|
|
2238
|
+
f.seek(0)
|
|
2239
|
+
|
|
2240
|
+
if on_create is not None:
|
|
2241
|
+
on_create(f, kind)
|
|
2242
|
+
return f
|
|
2130
2243
|
|
|
2131
2244
|
# Helper: wrap a binary file into a text file with encoding/newline
|
|
2132
2245
|
def _wrap_text(handle):
|
|
2133
2246
|
# For both Py2 & Py3, TextIOWrapper gives consistent newline/encoding behavior
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
return tw
|
|
2247
|
+
return io.TextIOWrapper(handle, encoding=encoding,
|
|
2248
|
+
newline=newline, errors=text_errors)
|
|
2137
2249
|
|
|
2138
2250
|
# -------- Spooled (RAM then disk) --------
|
|
2139
2251
|
if use_spool:
|
|
@@ -2141,19 +2253,33 @@ def MkTempFile(data=None,
|
|
|
2141
2253
|
bin_mode = "w+b" # read/write, binary
|
|
2142
2254
|
b = tempfile.SpooledTemporaryFile(max_size=spool_max, mode=bin_mode, dir=spool_dir)
|
|
2143
2255
|
f = b if isbytes else _wrap_text(b)
|
|
2256
|
+
|
|
2144
2257
|
if init is not None:
|
|
2145
2258
|
f.write(init)
|
|
2259
|
+
if reset_to_start:
|
|
2260
|
+
f.seek(0)
|
|
2261
|
+
elif reset_to_start:
|
|
2146
2262
|
f.seek(0)
|
|
2263
|
+
|
|
2264
|
+
if on_create is not None:
|
|
2265
|
+
on_create(f, "spool")
|
|
2147
2266
|
return f
|
|
2148
2267
|
|
|
2149
2268
|
# -------- On-disk temp (NamedTemporaryFile) --------
|
|
2150
2269
|
# Always create binary file; wrap for text if needed for uniform Py2/3 behavior
|
|
2151
|
-
b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
|
|
2270
|
+
b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
|
|
2271
|
+
dir=dir, delete=delete)
|
|
2152
2272
|
f = b if isbytes else _wrap_text(b)
|
|
2153
2273
|
|
|
2154
2274
|
if init is not None:
|
|
2155
2275
|
f.write(init)
|
|
2276
|
+
if reset_to_start:
|
|
2277
|
+
f.seek(0)
|
|
2278
|
+
elif reset_to_start:
|
|
2156
2279
|
f.seek(0)
|
|
2280
|
+
|
|
2281
|
+
if on_create is not None:
|
|
2282
|
+
on_create(f, "disk")
|
|
2157
2283
|
return f
|
|
2158
2284
|
|
|
2159
2285
|
|
|
@@ -2489,6 +2615,384 @@ def _is_valid_zlib_header(cmf, flg):
|
|
|
2489
2615
|
return False
|
|
2490
2616
|
return True
|
|
2491
2617
|
|
|
2618
|
+
class SharedMemoryFile(object):
|
|
2619
|
+
"""
|
|
2620
|
+
File-like wrapper around multiprocessing.shared_memory.SharedMemory.
|
|
2621
|
+
|
|
2622
|
+
Binary-only API, intended to behave similarly to a regular file opened in
|
|
2623
|
+
'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
|
|
2624
|
+
|
|
2625
|
+
Notes:
|
|
2626
|
+
- Requires Python 3.8+ at runtime to actually use SharedMemory.
|
|
2627
|
+
- On Python 2, importing is fine but constructing will raise RuntimeError.
|
|
2628
|
+
- There is no automatic resizing; buffer size is fixed by SharedMemory.
|
|
2629
|
+
- No real fileno(); this does not represent an OS-level file descriptor.
|
|
2630
|
+
- For text mode, wrap this with io.TextIOWrapper on Python 3:
|
|
2631
|
+
f = SharedMemoryFile(...)
|
|
2632
|
+
tf = io.TextIOWrapper(f, encoding="utf-8")
|
|
2633
|
+
"""
|
|
2634
|
+
|
|
2635
|
+
def __init__(self, shm=None, name=None, create=False, size=0,
|
|
2636
|
+
mode='r+b', offset=0, unlink_on_close=False):
|
|
2637
|
+
"""
|
|
2638
|
+
Parameters:
|
|
2639
|
+
shm : existing SharedMemory object (preferred).
|
|
2640
|
+
name : name of shared memory block (for attach or create).
|
|
2641
|
+
create: if True, create new SharedMemory; else attach existing.
|
|
2642
|
+
size : size in bytes (required when create=True).
|
|
2643
|
+
mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
|
|
2644
|
+
offset: starting offset within the shared memory buffer.
|
|
2645
|
+
unlink_on_close: if True, call shm.unlink() when close() is called.
|
|
2646
|
+
|
|
2647
|
+
Usage examples:
|
|
2648
|
+
|
|
2649
|
+
# Create new block and file-like wrapper
|
|
2650
|
+
f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
|
|
2651
|
+
|
|
2652
|
+
# Attach to existing shared memory by name
|
|
2653
|
+
f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
|
|
2654
|
+
|
|
2655
|
+
# Wrap an existing SharedMemory object
|
|
2656
|
+
shm = shared_memory.SharedMemory(create=True, size=1024)
|
|
2657
|
+
f = SharedMemoryFile(shm=shm, mode='r+b')
|
|
2658
|
+
"""
|
|
2659
|
+
if shared_memory is None:
|
|
2660
|
+
# No SharedMemory available on this interpreter
|
|
2661
|
+
raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
|
|
2662
|
+
"is not available on this Python version")
|
|
2663
|
+
|
|
2664
|
+
if 't' in mode:
|
|
2665
|
+
raise ValueError("SharedMemoryFile is binary-only; "
|
|
2666
|
+
"wrap it with io.TextIOWrapper for text")
|
|
2667
|
+
|
|
2668
|
+
self.mode = mode
|
|
2669
|
+
self._closed = False
|
|
2670
|
+
self._unlinked = False
|
|
2671
|
+
self._unlink_on_close = bool(unlink_on_close)
|
|
2672
|
+
|
|
2673
|
+
if shm is not None:
|
|
2674
|
+
self._shm = shm
|
|
2675
|
+
else:
|
|
2676
|
+
# name may be None when create=True
|
|
2677
|
+
self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
|
|
2678
|
+
|
|
2679
|
+
self._buf = self._shm.buf
|
|
2680
|
+
self._base_offset = int(offset)
|
|
2681
|
+
if self._base_offset < 0 or self._base_offset > len(self._buf):
|
|
2682
|
+
raise ValueError("offset out of range")
|
|
2683
|
+
|
|
2684
|
+
# We treat the accessible region as [base_offset, len(buf))
|
|
2685
|
+
self._size = len(self._buf) - self._base_offset
|
|
2686
|
+
self._pos = 0 # logical file position within that region
|
|
2687
|
+
|
|
2688
|
+
# ---------- basic properties ----------
|
|
2689
|
+
|
|
2690
|
+
@property
|
|
2691
|
+
def name(self):
|
|
2692
|
+
# SharedMemory name (may be None for anonymous)
|
|
2693
|
+
return getattr(self._shm, "name", None)
|
|
2694
|
+
|
|
2695
|
+
@property
|
|
2696
|
+
def closed(self):
|
|
2697
|
+
return self._closed
|
|
2698
|
+
|
|
2699
|
+
def readable(self):
|
|
2700
|
+
return ('r' in self.mode) or ('+' in self.mode)
|
|
2701
|
+
|
|
2702
|
+
def writable(self):
|
|
2703
|
+
return any(ch in self.mode for ch in ('w', 'a', '+'))
|
|
2704
|
+
|
|
2705
|
+
def seekable(self):
|
|
2706
|
+
return True
|
|
2707
|
+
|
|
2708
|
+
# ---------- core helpers ----------
|
|
2709
|
+
|
|
2710
|
+
def _check_closed(self):
|
|
2711
|
+
if self._closed:
|
|
2712
|
+
raise ValueError("I/O operation on closed SharedMemoryFile")
|
|
2713
|
+
|
|
2714
|
+
def _clamp_pos(self, pos):
|
|
2715
|
+
if pos < 0:
|
|
2716
|
+
return 0
|
|
2717
|
+
if pos > self._size:
|
|
2718
|
+
return self._size
|
|
2719
|
+
return pos
|
|
2720
|
+
|
|
2721
|
+
def _region_bounds(self):
|
|
2722
|
+
"""Return (start, end) absolute indices into the SharedMemory buffer."""
|
|
2723
|
+
start = self._base_offset + self._pos
|
|
2724
|
+
end = self._base_offset + self._size
|
|
2725
|
+
return start, end
|
|
2726
|
+
|
|
2727
|
+
# ---------- positioning ----------
|
|
2728
|
+
|
|
2729
|
+
def seek(self, offset, whence=0):
|
|
2730
|
+
"""
|
|
2731
|
+
Seek to a new file position.
|
|
2732
|
+
|
|
2733
|
+
whence: 0 = from start, 1 = from current, 2 = from end.
|
|
2734
|
+
"""
|
|
2735
|
+
self._check_closed()
|
|
2736
|
+
offset = int(offset)
|
|
2737
|
+
whence = int(whence)
|
|
2738
|
+
|
|
2739
|
+
if whence == 0: # from start
|
|
2740
|
+
new_pos = offset
|
|
2741
|
+
elif whence == 1: # from current
|
|
2742
|
+
new_pos = self._pos + offset
|
|
2743
|
+
elif whence == 2: # from end
|
|
2744
|
+
new_pos = self._size + offset
|
|
2745
|
+
else:
|
|
2746
|
+
raise ValueError("invalid whence (expected 0, 1, or 2)")
|
|
2747
|
+
|
|
2748
|
+
self._pos = self._clamp_pos(new_pos)
|
|
2749
|
+
return self._pos
|
|
2750
|
+
|
|
2751
|
+
def tell(self):
|
|
2752
|
+
return self._pos
|
|
2753
|
+
|
|
2754
|
+
# ---------- reading ----------
|
|
2755
|
+
|
|
2756
|
+
def read(self, size=-1):
|
|
2757
|
+
"""
|
|
2758
|
+
Read up to 'size' bytes (or to EOF if size<0 or None).
|
|
2759
|
+
Returns bytes (py3) or str (py2).
|
|
2760
|
+
"""
|
|
2761
|
+
self._check_closed()
|
|
2762
|
+
if not self.readable():
|
|
2763
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2764
|
+
|
|
2765
|
+
if size is None or size < 0:
|
|
2766
|
+
size = self._size - self._pos
|
|
2767
|
+
else:
|
|
2768
|
+
size = int(size)
|
|
2769
|
+
if size < 0:
|
|
2770
|
+
size = 0
|
|
2771
|
+
|
|
2772
|
+
if size == 0:
|
|
2773
|
+
return b'' if not PY2 else ''
|
|
2774
|
+
|
|
2775
|
+
start, end_abs = self._region_bounds()
|
|
2776
|
+
available = end_abs - (self._base_offset + self._pos)
|
|
2777
|
+
if available <= 0:
|
|
2778
|
+
return b'' if not PY2 else ''
|
|
2779
|
+
|
|
2780
|
+
size = min(size, available)
|
|
2781
|
+
|
|
2782
|
+
abs_start = self._base_offset + self._pos
|
|
2783
|
+
abs_end = abs_start + size
|
|
2784
|
+
|
|
2785
|
+
chunk = self._buf[abs_start:abs_end]
|
|
2786
|
+
if PY2:
|
|
2787
|
+
data = bytes(chunk) # bytes() -> str in py2
|
|
2788
|
+
else:
|
|
2789
|
+
data = bytes(chunk)
|
|
2790
|
+
|
|
2791
|
+
self._pos += len(data)
|
|
2792
|
+
return data
|
|
2793
|
+
|
|
2794
|
+
def readline(self, size=-1):
|
|
2795
|
+
"""
|
|
2796
|
+
Read a single line (ending with '\\n' or EOF).
|
|
2797
|
+
If size >= 0, at most that many bytes are returned.
|
|
2798
|
+
"""
|
|
2799
|
+
self._check_closed()
|
|
2800
|
+
if not self.readable():
|
|
2801
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2802
|
+
|
|
2803
|
+
# Determine maximum bytes we can scan
|
|
2804
|
+
start, end_abs = self._region_bounds()
|
|
2805
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2806
|
+
if remaining <= 0:
|
|
2807
|
+
return b'' if not PY2 else ''
|
|
2808
|
+
|
|
2809
|
+
if size is not None and size >= 0:
|
|
2810
|
+
size = int(size)
|
|
2811
|
+
max_len = min(size, remaining)
|
|
2812
|
+
else:
|
|
2813
|
+
max_len = remaining
|
|
2814
|
+
|
|
2815
|
+
abs_start = self._base_offset + self._pos
|
|
2816
|
+
abs_max = abs_start + max_len
|
|
2817
|
+
|
|
2818
|
+
# Work on a local bytes slice for easy .find()
|
|
2819
|
+
if PY2:
|
|
2820
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2821
|
+
else:
|
|
2822
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2823
|
+
|
|
2824
|
+
idx = buf_bytes.find(b'\n')
|
|
2825
|
+
if idx == -1:
|
|
2826
|
+
# No newline; read entire chunk
|
|
2827
|
+
line_bytes = buf_bytes
|
|
2828
|
+
else:
|
|
2829
|
+
line_bytes = buf_bytes[:idx + 1]
|
|
2830
|
+
|
|
2831
|
+
self._pos += len(line_bytes)
|
|
2832
|
+
|
|
2833
|
+
if PY2:
|
|
2834
|
+
return line_bytes # already str
|
|
2835
|
+
return line_bytes
|
|
2836
|
+
|
|
2837
|
+
def readinto(self, b):
|
|
2838
|
+
"""
|
|
2839
|
+
Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
|
|
2840
|
+
Returns number of bytes read.
|
|
2841
|
+
"""
|
|
2842
|
+
self._check_closed()
|
|
2843
|
+
if not self.readable():
|
|
2844
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2845
|
+
|
|
2846
|
+
# Normalize target buffer
|
|
2847
|
+
if isinstance(b, memoryview):
|
|
2848
|
+
mv = b
|
|
2849
|
+
else:
|
|
2850
|
+
mv = memoryview(b)
|
|
2851
|
+
|
|
2852
|
+
size = len(mv)
|
|
2853
|
+
if size <= 0:
|
|
2854
|
+
return 0
|
|
2855
|
+
|
|
2856
|
+
start, end_abs = self._region_bounds()
|
|
2857
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2858
|
+
if remaining <= 0:
|
|
2859
|
+
return 0
|
|
2860
|
+
|
|
2861
|
+
size = min(size, remaining)
|
|
2862
|
+
|
|
2863
|
+
abs_start = self._base_offset + self._pos
|
|
2864
|
+
abs_end = abs_start + size
|
|
2865
|
+
|
|
2866
|
+
mv[:size] = self._buf[abs_start:abs_end]
|
|
2867
|
+
self._pos += size
|
|
2868
|
+
return size
|
|
2869
|
+
|
|
2870
|
+
# ---------- writing ----------
|
|
2871
|
+
|
|
2872
|
+
def write(self, data):
|
|
2873
|
+
"""
|
|
2874
|
+
Write bytes-like object to the shared memory region.
|
|
2875
|
+
|
|
2876
|
+
Returns number of bytes written. Will raise if not opened writable
|
|
2877
|
+
or if writing would overflow the fixed-size region.
|
|
2878
|
+
"""
|
|
2879
|
+
self._check_closed()
|
|
2880
|
+
if not self.writable():
|
|
2881
|
+
raise IOError("SharedMemoryFile not opened for writing")
|
|
2882
|
+
|
|
2883
|
+
if isinstance(data, memoryview):
|
|
2884
|
+
data = bytes(data)
|
|
2885
|
+
elif isinstance(data, bytearray):
|
|
2886
|
+
data = bytes(data)
|
|
2887
|
+
|
|
2888
|
+
if not isinstance(data, binary_types):
|
|
2889
|
+
raise TypeError("write() expects a bytes-like object")
|
|
2890
|
+
|
|
2891
|
+
data_len = len(data)
|
|
2892
|
+
if data_len == 0:
|
|
2893
|
+
return 0
|
|
2894
|
+
|
|
2895
|
+
# Handle "append" semantics roughly: start from end on first write
|
|
2896
|
+
if 'a' in self.mode and self._pos == 0:
|
|
2897
|
+
# Move to logical end of region
|
|
2898
|
+
self._pos = self._size
|
|
2899
|
+
|
|
2900
|
+
start, end_abs = self._region_bounds()
|
|
2901
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2902
|
+
if data_len > remaining:
|
|
2903
|
+
raise IOError("write would overflow SharedMemory region (need %d, have %d)"
|
|
2904
|
+
% (data_len, remaining))
|
|
2905
|
+
|
|
2906
|
+
abs_start = self._base_offset + self._pos
|
|
2907
|
+
abs_end = abs_start + data_len
|
|
2908
|
+
|
|
2909
|
+
self._buf[abs_start:abs_end] = data
|
|
2910
|
+
self._pos += data_len
|
|
2911
|
+
return data_len
|
|
2912
|
+
|
|
2913
|
+
def flush(self):
|
|
2914
|
+
"""
|
|
2915
|
+
No-op for shared memory; provided for file-like compatibility.
|
|
2916
|
+
"""
|
|
2917
|
+
self._check_closed()
|
|
2918
|
+
# nothing to flush
|
|
2919
|
+
|
|
2920
|
+
# ---------- unlink / close / context manager ----------
|
|
2921
|
+
|
|
2922
|
+
def unlink(self):
|
|
2923
|
+
"""
|
|
2924
|
+
Unlink (destroy) the underlying shared memory block.
|
|
2925
|
+
|
|
2926
|
+
After unlink(), new processes cannot attach via name.
|
|
2927
|
+
Existing attachments (including this one) can continue to use
|
|
2928
|
+
the memory until they close() it.
|
|
2929
|
+
|
|
2930
|
+
This is idempotent: calling it more than once is safe.
|
|
2931
|
+
"""
|
|
2932
|
+
if self._unlinked:
|
|
2933
|
+
return
|
|
2934
|
+
|
|
2935
|
+
try:
|
|
2936
|
+
self._shm.unlink()
|
|
2937
|
+
except AttributeError:
|
|
2938
|
+
# Should not happen on normal Python 3.8+,
|
|
2939
|
+
# but keep a clear error if it does.
|
|
2940
|
+
raise RuntimeError("Underlying SharedMemory object "
|
|
2941
|
+
"does not support unlink()")
|
|
2942
|
+
|
|
2943
|
+
self._unlinked = True
|
|
2944
|
+
|
|
2945
|
+
def close(self):
|
|
2946
|
+
if self._closed:
|
|
2947
|
+
return
|
|
2948
|
+
self._closed = True
|
|
2949
|
+
|
|
2950
|
+
# Optionally unlink on close if requested
|
|
2951
|
+
if self._unlink_on_close and not self._unlinked:
|
|
2952
|
+
try:
|
|
2953
|
+
self.unlink()
|
|
2954
|
+
except Exception:
|
|
2955
|
+
# best-effort; close anyway
|
|
2956
|
+
pass
|
|
2957
|
+
|
|
2958
|
+
try:
|
|
2959
|
+
self._shm.close()
|
|
2960
|
+
except Exception:
|
|
2961
|
+
pass
|
|
2962
|
+
|
|
2963
|
+
def __enter__(self):
|
|
2964
|
+
self._check_closed()
|
|
2965
|
+
return self
|
|
2966
|
+
|
|
2967
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
2968
|
+
self.close()
|
|
2969
|
+
|
|
2970
|
+
# ---------- iteration ----------
|
|
2971
|
+
|
|
2972
|
+
def __iter__(self):
|
|
2973
|
+
return self
|
|
2974
|
+
|
|
2975
|
+
def __next__(self):
|
|
2976
|
+
line = self.readline()
|
|
2977
|
+
if (not line) or len(line) == 0:
|
|
2978
|
+
raise StopIteration
|
|
2979
|
+
return line
|
|
2980
|
+
|
|
2981
|
+
if PY2:
|
|
2982
|
+
next = __next__
|
|
2983
|
+
|
|
2984
|
+
# ---------- misc helpers ----------
|
|
2985
|
+
|
|
2986
|
+
def fileno(self):
|
|
2987
|
+
"""
|
|
2988
|
+
There is no real OS-level file descriptor; raise OSError for APIs
|
|
2989
|
+
that require a fileno().
|
|
2990
|
+
"""
|
|
2991
|
+
raise OSError("SharedMemoryFile does not have a real fileno()")
|
|
2992
|
+
|
|
2993
|
+
def isatty(self):
|
|
2994
|
+
return False
|
|
2995
|
+
|
|
2492
2996
|
# ---------- Main class ----------
|
|
2493
2997
|
class ZlibFile(object):
|
|
2494
2998
|
"""
|
|
@@ -3673,7 +4177,7 @@ def _bytes_to_int(b):
|
|
|
3673
4177
|
# =========================
|
|
3674
4178
|
# Public checksum API
|
|
3675
4179
|
# =========================
|
|
3676
|
-
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4180
|
+
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
|
|
3677
4181
|
"""
|
|
3678
4182
|
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
3679
4183
|
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
@@ -3685,15 +4189,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatsp
|
|
|
3685
4189
|
if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
|
|
3686
4190
|
hdr_bytes = _to_bytes(hdr_bytes)
|
|
3687
4191
|
hdr_bytes = bytes(hdr_bytes)
|
|
3688
|
-
|
|
4192
|
+
saltkeyval = None
|
|
4193
|
+
if(hasattr(saltkey, "read")):
|
|
4194
|
+
saltkeyval = skfp.read()
|
|
4195
|
+
if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
|
|
4196
|
+
saltkeyval = saltkeyval.encode("UTF-8")
|
|
4197
|
+
elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
|
|
4198
|
+
saltkeyval = saltkey
|
|
4199
|
+
elif(saltkey is not None and os.path.exists(saltkey)):
|
|
4200
|
+
with open(saltkey, "rb") as skfp:
|
|
4201
|
+
saltkeyval = skfp.read()
|
|
4202
|
+
else:
|
|
4203
|
+
saltkey = None
|
|
4204
|
+
if(saltkeyval is None):
|
|
4205
|
+
saltkey = None
|
|
3689
4206
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3690
|
-
|
|
3691
|
-
|
|
3692
|
-
|
|
4207
|
+
if(saltkey is None or saltkeyval is None):
|
|
4208
|
+
h = hashlib.new(algo_key, hdr_bytes)
|
|
4209
|
+
else:
|
|
4210
|
+
h = hmac.new(saltkeyval, hdr_bytes, digestmod=algo_key)
|
|
4211
|
+
return h.hexdigest().lower()
|
|
3693
4212
|
|
|
3694
4213
|
return "0"
|
|
3695
4214
|
|
|
3696
|
-
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4215
|
+
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
|
|
3697
4216
|
"""
|
|
3698
4217
|
Accepts bytes/str/file-like.
|
|
3699
4218
|
- Hashlib algos: streamed in 1 MiB chunks.
|
|
@@ -3701,13 +4220,29 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
|
|
|
3701
4220
|
- Falls back to one-shot for non-file-like inputs.
|
|
3702
4221
|
"""
|
|
3703
4222
|
algo_key = (checksumtype or "md5").lower()
|
|
3704
|
-
|
|
4223
|
+
saltkeyval = None
|
|
4224
|
+
if(hasattr(saltkey, "read")):
|
|
4225
|
+
saltkeyval = skfp.read()
|
|
4226
|
+
if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
|
|
4227
|
+
saltkeyval = saltkeyval.encode("UTF-8")
|
|
4228
|
+
elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
|
|
4229
|
+
saltkeyval = saltkey
|
|
4230
|
+
elif(saltkey is not None and os.path.exists(saltkey)):
|
|
4231
|
+
with open(saltkey, "rb") as skfp:
|
|
4232
|
+
saltkeyval = skfp.read()
|
|
4233
|
+
else:
|
|
4234
|
+
saltkey = None
|
|
4235
|
+
if(saltkeyval is None):
|
|
4236
|
+
saltkey = None
|
|
3705
4237
|
# file-like streaming
|
|
3706
4238
|
if hasattr(inbytes, "read"):
|
|
3707
4239
|
# hashlib
|
|
3708
4240
|
|
|
3709
4241
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3710
|
-
|
|
4242
|
+
if(saltkey is None or saltkeyval is None):
|
|
4243
|
+
h = hashlib.new(algo_key)
|
|
4244
|
+
else:
|
|
4245
|
+
h = hmac.new(saltkeyval, digestmod=algo_key)
|
|
3711
4246
|
while True:
|
|
3712
4247
|
chunk = inbytes.read(__filebuff_size__)
|
|
3713
4248
|
if not chunk:
|
|
@@ -3728,26 +4263,41 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
|
|
|
3728
4263
|
# one-shot
|
|
3729
4264
|
|
|
3730
4265
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3731
|
-
|
|
3732
|
-
|
|
4266
|
+
if(saltkey is None or saltkeyval is None):
|
|
4267
|
+
h = hashlib.new(algo_key, data)
|
|
4268
|
+
else:
|
|
4269
|
+
h = hmac.new(saltkeyval, data, digestmod=algo_key)
|
|
3733
4270
|
return h.hexdigest().lower()
|
|
3734
4271
|
|
|
3735
4272
|
return "0"
|
|
3736
4273
|
|
|
3737
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3738
|
-
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
4274
|
+
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
|
|
4275
|
+
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs, saltkey)
|
|
3739
4276
|
want = (inchecksum or "0").strip().lower()
|
|
3740
4277
|
if want.startswith("0x"):
|
|
3741
4278
|
want = want[2:]
|
|
3742
|
-
return
|
|
4279
|
+
return CheckChecksums(want, calc)
|
|
3743
4280
|
|
|
3744
|
-
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3745
|
-
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
4281
|
+
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
|
|
4282
|
+
calc = GetFileChecksum(infile, checksumtype, True, formatspecs, saltkey)
|
|
3746
4283
|
want = (inchecksum or "0").strip().lower()
|
|
3747
4284
|
if want.startswith("0x"):
|
|
3748
4285
|
want = want[2:]
|
|
3749
|
-
return
|
|
4286
|
+
return CheckChecksums(want, calc)
|
|
4287
|
+
|
|
4288
|
+
def CheckChecksums(inchecksum, outchecksum):
|
|
4289
|
+
# Normalize as text first
|
|
4290
|
+
calc = (inchecksum or "0").strip().lower()
|
|
4291
|
+
want = (outchecksum or "0").strip().lower()
|
|
3750
4292
|
|
|
4293
|
+
if want.startswith("0x"):
|
|
4294
|
+
want = want[2:]
|
|
4295
|
+
|
|
4296
|
+
# Now force both to bytes
|
|
4297
|
+
calc_b = _to_bytes(calc) # defaults to utf-8, strict
|
|
4298
|
+
want_b = _to_bytes(want)
|
|
4299
|
+
|
|
4300
|
+
return hmac.compare_digest(want_b, calc_b)
|
|
3751
4301
|
|
|
3752
4302
|
def MajorMinorToDev(major, minor):
|
|
3753
4303
|
"""
|
|
@@ -4116,11 +4666,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
|
|
|
4116
4666
|
return first_two + headerdata
|
|
4117
4667
|
|
|
4118
4668
|
|
|
4119
|
-
def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4669
|
+
def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4120
4670
|
if(not hasattr(fp, "read")):
|
|
4121
4671
|
return False
|
|
4122
4672
|
delimiter = formatspecs['format_delimiter']
|
|
4123
|
-
if(
|
|
4673
|
+
if(__use_new_style__):
|
|
4124
4674
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4125
4675
|
else:
|
|
4126
4676
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4208,15 +4758,14 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4208
4758
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4209
4759
|
pass
|
|
4210
4760
|
fp.seek(len(delimiter), 1)
|
|
4211
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4212
|
-
if(not
|
|
4761
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4762
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4213
4763
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4214
4764
|
fname + " at offset " + str(fheaderstart))
|
|
4215
4765
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4216
4766
|
return False
|
|
4217
4767
|
fp.seek(len(delimiter), 1)
|
|
4218
|
-
newfcs = GetHeaderChecksum(
|
|
4219
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
4768
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4220
4769
|
HeaderOut.append(fjsoncontent)
|
|
4221
4770
|
if(fcs != newfcs and not skipchecksum):
|
|
4222
4771
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
@@ -4235,10 +4784,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4235
4784
|
else:
|
|
4236
4785
|
fp.seek(fcsize, 1)
|
|
4237
4786
|
fcontents.seek(0, 0)
|
|
4238
|
-
newfccs = GetFileChecksum(
|
|
4239
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4787
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4240
4788
|
fcontents.seek(0, 0)
|
|
4241
|
-
if(not
|
|
4789
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4242
4790
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4243
4791
|
fname + " at offset " + str(fcontentstart))
|
|
4244
4792
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4275,12 +4823,12 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4275
4823
|
return HeaderOut
|
|
4276
4824
|
|
|
4277
4825
|
|
|
4278
|
-
def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4826
|
+
def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4279
4827
|
if(not hasattr(fp, "read")):
|
|
4280
4828
|
return False
|
|
4281
4829
|
delimiter = formatspecs['format_delimiter']
|
|
4282
4830
|
fheaderstart = fp.tell()
|
|
4283
|
-
if(
|
|
4831
|
+
if(__use_new_style__):
|
|
4284
4832
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4285
4833
|
else:
|
|
4286
4834
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4298,40 +4846,51 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4298
4846
|
fbasedir = os.path.dirname(fname)
|
|
4299
4847
|
flinkname = HeaderOut[6]
|
|
4300
4848
|
fsize = int(HeaderOut[7], 16)
|
|
4301
|
-
|
|
4302
|
-
|
|
4303
|
-
|
|
4304
|
-
|
|
4305
|
-
|
|
4849
|
+
fblksize = int(HeaderOut[8], 16)
|
|
4850
|
+
fblocks = int(HeaderOut[9], 16)
|
|
4851
|
+
fflags = int(HeaderOut[10], 16)
|
|
4852
|
+
fatime = int(HeaderOut[11], 16)
|
|
4853
|
+
fmtime = int(HeaderOut[12], 16)
|
|
4854
|
+
fctime = int(HeaderOut[13], 16)
|
|
4855
|
+
fbtime = int(HeaderOut[14], 16)
|
|
4856
|
+
fmode = int(HeaderOut[15], 16)
|
|
4306
4857
|
fchmode = stat.S_IMODE(fmode)
|
|
4307
4858
|
ftypemod = stat.S_IFMT(fmode)
|
|
4308
|
-
fwinattributes = int(HeaderOut[
|
|
4309
|
-
fcompression = HeaderOut[
|
|
4310
|
-
fcsize = int(HeaderOut[
|
|
4311
|
-
fuid = int(HeaderOut[
|
|
4312
|
-
funame = HeaderOut[
|
|
4313
|
-
fgid = int(HeaderOut[
|
|
4314
|
-
fgname = HeaderOut[
|
|
4315
|
-
fid = int(HeaderOut[
|
|
4316
|
-
finode = int(HeaderOut[
|
|
4317
|
-
flinkcount = int(HeaderOut[
|
|
4318
|
-
fdev = int(HeaderOut[
|
|
4319
|
-
|
|
4320
|
-
|
|
4321
|
-
|
|
4322
|
-
|
|
4323
|
-
|
|
4324
|
-
|
|
4325
|
-
|
|
4326
|
-
|
|
4327
|
-
|
|
4328
|
-
fextrafields = int(HeaderOut[33], 16)
|
|
4859
|
+
fwinattributes = int(HeaderOut[16], 16)
|
|
4860
|
+
fcompression = HeaderOut[17]
|
|
4861
|
+
fcsize = int(HeaderOut[18], 16)
|
|
4862
|
+
fuid = int(HeaderOut[19], 16)
|
|
4863
|
+
funame = HeaderOut[20]
|
|
4864
|
+
fgid = int(HeaderOut[21], 16)
|
|
4865
|
+
fgname = HeaderOut[22]
|
|
4866
|
+
fid = int(HeaderOut[23], 16)
|
|
4867
|
+
finode = int(HeaderOut[24], 16)
|
|
4868
|
+
flinkcount = int(HeaderOut[25], 16)
|
|
4869
|
+
fdev = int(HeaderOut[26], 16)
|
|
4870
|
+
frdev = int(HeaderOut[27], 16)
|
|
4871
|
+
fseeknextfile = HeaderOut[28]
|
|
4872
|
+
fjsontype = HeaderOut[29]
|
|
4873
|
+
fjsonlen = int(HeaderOut[30], 16)
|
|
4874
|
+
fjsonsize = int(HeaderOut[31], 16)
|
|
4875
|
+
fjsonchecksumtype = HeaderOut[32]
|
|
4876
|
+
fjsonchecksum = HeaderOut[33]
|
|
4877
|
+
fextrasize = int(HeaderOut[34], 16)
|
|
4878
|
+
fextrafields = int(HeaderOut[35], 16)
|
|
4329
4879
|
fextrafieldslist = []
|
|
4330
|
-
extrastart =
|
|
4880
|
+
extrastart = 36
|
|
4331
4881
|
extraend = extrastart + fextrafields
|
|
4332
4882
|
while(extrastart < extraend):
|
|
4333
4883
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4334
4884
|
extrastart = extrastart + 1
|
|
4885
|
+
fvendorfieldslist = []
|
|
4886
|
+
fvendorfields = 0;
|
|
4887
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
4888
|
+
extrastart = extraend
|
|
4889
|
+
extraend = len(HeaderOut) - 4
|
|
4890
|
+
while(extrastart < extraend):
|
|
4891
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
4892
|
+
extrastart = extrastart + 1
|
|
4893
|
+
fvendorfields = fvendorfields + 1
|
|
4335
4894
|
if(fextrafields==1):
|
|
4336
4895
|
try:
|
|
4337
4896
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4409,16 +4968,15 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4409
4968
|
pass
|
|
4410
4969
|
fp.seek(len(delimiter), 1)
|
|
4411
4970
|
fjend = fp.tell() - 1
|
|
4412
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4413
|
-
if(not
|
|
4971
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4972
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4414
4973
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4415
4974
|
fname + " at offset " + str(fheaderstart))
|
|
4416
4975
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4417
4976
|
return False
|
|
4418
4977
|
fcs = HeaderOut[-2].lower()
|
|
4419
4978
|
fccs = HeaderOut[-1].lower()
|
|
4420
|
-
newfcs = GetHeaderChecksum(
|
|
4421
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
4979
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4422
4980
|
if(fcs != newfcs and not skipchecksum):
|
|
4423
4981
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4424
4982
|
fname + " at offset " + str(fheaderstart))
|
|
@@ -4441,10 +4999,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4441
4999
|
fp.seek(fcsize, 1)
|
|
4442
5000
|
pyhascontents = False
|
|
4443
5001
|
fcontents.seek(0, 0)
|
|
4444
|
-
newfccs = GetFileChecksum(
|
|
4445
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5002
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4446
5003
|
fcontents.seek(0, 0)
|
|
4447
|
-
if(not
|
|
5004
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4448
5005
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4449
5006
|
fname + " at offset " + str(fcontentstart))
|
|
4450
5007
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4461,8 +5018,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4461
5018
|
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4462
5019
|
cfcontents.close()
|
|
4463
5020
|
fcontents.seek(0, 0)
|
|
4464
|
-
fccs = GetFileChecksum(
|
|
4465
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5021
|
+
fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4466
5022
|
fcontentend = fp.tell()
|
|
4467
5023
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4468
5024
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -4484,17 +5040,17 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4484
5040
|
fcontents.seek(0, 0)
|
|
4485
5041
|
if(not contentasfile):
|
|
4486
5042
|
fcontents = fcontents.read()
|
|
4487
|
-
outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
|
|
4488
|
-
'fdev': fdev, '
|
|
5043
|
+
outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fblksize': fblksize, 'fblocks': fblocks, 'fflags': fflags, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
|
|
5044
|
+
'fdev': fdev, 'frdev': frdev, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
|
|
4489
5045
|
return outlist
|
|
4490
5046
|
|
|
4491
5047
|
|
|
4492
|
-
def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
5048
|
+
def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4493
5049
|
if(not hasattr(fp, "read")):
|
|
4494
5050
|
return False
|
|
4495
5051
|
delimiter = formatspecs['format_delimiter']
|
|
4496
5052
|
fheaderstart = fp.tell()
|
|
4497
|
-
if(
|
|
5053
|
+
if(__use_new_style__):
|
|
4498
5054
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4499
5055
|
else:
|
|
4500
5056
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4512,40 +5068,51 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4512
5068
|
fbasedir = os.path.dirname(fname)
|
|
4513
5069
|
flinkname = HeaderOut[6]
|
|
4514
5070
|
fsize = int(HeaderOut[7], 16)
|
|
4515
|
-
|
|
4516
|
-
|
|
4517
|
-
|
|
4518
|
-
|
|
4519
|
-
|
|
5071
|
+
fblksize = int(HeaderOut[8], 16)
|
|
5072
|
+
fblocks = int(HeaderOut[9], 16)
|
|
5073
|
+
fflags = int(HeaderOut[10], 16)
|
|
5074
|
+
fatime = int(HeaderOut[11], 16)
|
|
5075
|
+
fmtime = int(HeaderOut[12], 16)
|
|
5076
|
+
fctime = int(HeaderOut[13], 16)
|
|
5077
|
+
fbtime = int(HeaderOut[14], 16)
|
|
5078
|
+
fmode = int(HeaderOut[15], 16)
|
|
4520
5079
|
fchmode = stat.S_IMODE(fmode)
|
|
4521
5080
|
ftypemod = stat.S_IFMT(fmode)
|
|
4522
|
-
fwinattributes = int(HeaderOut[
|
|
4523
|
-
fcompression = HeaderOut[
|
|
4524
|
-
fcsize = int(HeaderOut[
|
|
4525
|
-
fuid = int(HeaderOut[
|
|
4526
|
-
funame = HeaderOut[
|
|
4527
|
-
fgid = int(HeaderOut[
|
|
4528
|
-
fgname = HeaderOut[
|
|
4529
|
-
fid = int(HeaderOut[
|
|
4530
|
-
finode = int(HeaderOut[
|
|
4531
|
-
flinkcount = int(HeaderOut[
|
|
4532
|
-
fdev = int(HeaderOut[
|
|
4533
|
-
|
|
4534
|
-
|
|
4535
|
-
|
|
4536
|
-
|
|
4537
|
-
|
|
4538
|
-
|
|
4539
|
-
|
|
4540
|
-
|
|
4541
|
-
|
|
4542
|
-
fextrafields = int(HeaderOut[33], 16)
|
|
5081
|
+
fwinattributes = int(HeaderOut[16], 16)
|
|
5082
|
+
fcompression = HeaderOut[17]
|
|
5083
|
+
fcsize = int(HeaderOut[18], 16)
|
|
5084
|
+
fuid = int(HeaderOut[19], 16)
|
|
5085
|
+
funame = HeaderOut[20]
|
|
5086
|
+
fgid = int(HeaderOut[21], 16)
|
|
5087
|
+
fgname = HeaderOut[22]
|
|
5088
|
+
fid = int(HeaderOut[23], 16)
|
|
5089
|
+
finode = int(HeaderOut[24], 16)
|
|
5090
|
+
flinkcount = int(HeaderOut[25], 16)
|
|
5091
|
+
fdev = int(HeaderOut[26], 16)
|
|
5092
|
+
frdev = int(HeaderOut[27], 16)
|
|
5093
|
+
fseeknextfile = HeaderOut[28]
|
|
5094
|
+
fjsontype = HeaderOut[29]
|
|
5095
|
+
fjsonlen = int(HeaderOut[30], 16)
|
|
5096
|
+
fjsonsize = int(HeaderOut[31], 16)
|
|
5097
|
+
fjsonchecksumtype = HeaderOut[32]
|
|
5098
|
+
fjsonchecksum = HeaderOut[33]
|
|
5099
|
+
fextrasize = int(HeaderOut[34], 16)
|
|
5100
|
+
fextrafields = int(HeaderOut[35], 16)
|
|
4543
5101
|
fextrafieldslist = []
|
|
4544
|
-
extrastart =
|
|
5102
|
+
extrastart = 36
|
|
4545
5103
|
extraend = extrastart + fextrafields
|
|
4546
5104
|
while(extrastart < extraend):
|
|
4547
5105
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4548
5106
|
extrastart = extrastart + 1
|
|
5107
|
+
fvendorfieldslist = []
|
|
5108
|
+
fvendorfields = 0;
|
|
5109
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
5110
|
+
extrastart = extraend
|
|
5111
|
+
extraend = len(HeaderOut) - 4
|
|
5112
|
+
while(extrastart < extraend):
|
|
5113
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5114
|
+
extrastart = extrastart + 1
|
|
5115
|
+
fvendorfields = fvendorfields + 1
|
|
4549
5116
|
if(fextrafields==1):
|
|
4550
5117
|
try:
|
|
4551
5118
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4555,6 +5122,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4555
5122
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4556
5123
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4557
5124
|
pass
|
|
5125
|
+
fjstart = fp.tell()
|
|
4558
5126
|
if(fjsontype=="json"):
|
|
4559
5127
|
fjsoncontent = {}
|
|
4560
5128
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
@@ -4621,16 +5189,16 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4621
5189
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4622
5190
|
pass
|
|
4623
5191
|
fp.seek(len(delimiter), 1)
|
|
4624
|
-
|
|
4625
|
-
|
|
5192
|
+
fjend = fp.tell() - 1
|
|
5193
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5194
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4626
5195
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4627
5196
|
fname + " at offset " + str(fheaderstart))
|
|
4628
5197
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4629
5198
|
return False
|
|
4630
5199
|
fcs = HeaderOut[-2].lower()
|
|
4631
5200
|
fccs = HeaderOut[-1].lower()
|
|
4632
|
-
newfcs = GetHeaderChecksum(
|
|
4633
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
5201
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4634
5202
|
if(fcs != newfcs and not skipchecksum):
|
|
4635
5203
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4636
5204
|
fname + " at offset " + str(fheaderstart))
|
|
@@ -4653,9 +5221,9 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4653
5221
|
fp.seek(fcsize, 1)
|
|
4654
5222
|
pyhascontents = False
|
|
4655
5223
|
fcontents.seek(0, 0)
|
|
4656
|
-
newfccs = GetFileChecksum(
|
|
4657
|
-
|
|
4658
|
-
if(not
|
|
5224
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
5225
|
+
fcontents.seek(0, 0)
|
|
5226
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4659
5227
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4660
5228
|
fname + " at offset " + str(fcontentstart))
|
|
4661
5229
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4672,8 +5240,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4672
5240
|
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4673
5241
|
cfcontents.close()
|
|
4674
5242
|
fcontents.seek(0, 0)
|
|
4675
|
-
fccs = GetFileChecksum(
|
|
4676
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5243
|
+
fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4677
5244
|
fcontentend = fp.tell()
|
|
4678
5245
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4679
5246
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -4695,12 +5262,12 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4695
5262
|
fcontents.seek(0, 0)
|
|
4696
5263
|
if(not contentasfile):
|
|
4697
5264
|
fcontents = fcontents.read()
|
|
4698
|
-
outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
4699
|
-
finode, flinkcount, fdev,
|
|
5265
|
+
outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
5266
|
+
fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
|
|
4700
5267
|
return outlist
|
|
4701
5268
|
|
|
4702
5269
|
|
|
4703
|
-
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
5270
|
+
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4704
5271
|
if(not hasattr(fp, "read")):
|
|
4705
5272
|
return False
|
|
4706
5273
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -4713,6 +5280,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4713
5280
|
CatSizeEnd = CatSize
|
|
4714
5281
|
fp.seek(curloc, 0)
|
|
4715
5282
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5283
|
+
headeroffset = fp.tell()
|
|
4716
5284
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4717
5285
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4718
5286
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4720,7 +5288,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4720
5288
|
return False
|
|
4721
5289
|
if(formdel != formatspecs['format_delimiter']):
|
|
4722
5290
|
return False
|
|
4723
|
-
if(
|
|
5291
|
+
if(__use_new_style__):
|
|
4724
5292
|
inheader = ReadFileHeaderDataBySize(
|
|
4725
5293
|
fp, formatspecs['format_delimiter'])
|
|
4726
5294
|
else:
|
|
@@ -4728,19 +5296,19 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4728
5296
|
fp, formatspecs['format_delimiter'])
|
|
4729
5297
|
fprechecksumtype = inheader[-2]
|
|
4730
5298
|
fprechecksum = inheader[-1]
|
|
4731
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
4732
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5299
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5300
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4733
5301
|
if(not headercheck and not skipchecksum):
|
|
4734
5302
|
VerbosePrintOut(
|
|
4735
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5303
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
4736
5304
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4737
5305
|
"'" + newfcs + "'")
|
|
4738
5306
|
return False
|
|
4739
|
-
fnumfiles = int(inheader[
|
|
4740
|
-
outfseeknextfile = inheaderdata[
|
|
4741
|
-
fjsonsize = int(inheaderdata[
|
|
4742
|
-
fjsonchecksumtype = inheader[
|
|
4743
|
-
fjsonchecksum = inheader[
|
|
5307
|
+
fnumfiles = int(inheader[8], 16)
|
|
5308
|
+
outfseeknextfile = inheaderdata[9]
|
|
5309
|
+
fjsonsize = int(inheaderdata[12], 16)
|
|
5310
|
+
fjsonchecksumtype = inheader[13]
|
|
5311
|
+
fjsonchecksum = inheader[14]
|
|
4744
5312
|
fp.read(fjsonsize)
|
|
4745
5313
|
# Next seek directive
|
|
4746
5314
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
@@ -4763,8 +5331,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4763
5331
|
countnum = 0
|
|
4764
5332
|
flist = []
|
|
4765
5333
|
while(countnum < fnumfiles):
|
|
4766
|
-
HeaderOut = ReadFileHeaderDataWithContent(
|
|
4767
|
-
fp, listonly, uncompress, skipchecksum, formatspecs)
|
|
5334
|
+
HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
|
|
4768
5335
|
if(len(HeaderOut) == 0):
|
|
4769
5336
|
break
|
|
4770
5337
|
flist.append(HeaderOut)
|
|
@@ -4772,7 +5339,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4772
5339
|
return flist
|
|
4773
5340
|
|
|
4774
5341
|
|
|
4775
|
-
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
5342
|
+
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
|
|
4776
5343
|
if(not hasattr(fp, "read")):
|
|
4777
5344
|
return False
|
|
4778
5345
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -4785,6 +5352,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4785
5352
|
CatSizeEnd = CatSize
|
|
4786
5353
|
fp.seek(curloc, 0)
|
|
4787
5354
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5355
|
+
headeroffset = fp.tell()
|
|
4788
5356
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4789
5357
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4790
5358
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4792,16 +5360,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4792
5360
|
return False
|
|
4793
5361
|
if(formdel != formatspecs['format_delimiter']):
|
|
4794
5362
|
return False
|
|
4795
|
-
if(
|
|
5363
|
+
if(__use_new_style__):
|
|
4796
5364
|
inheader = ReadFileHeaderDataBySize(
|
|
4797
5365
|
fp, formatspecs['format_delimiter'])
|
|
4798
5366
|
else:
|
|
4799
5367
|
inheader = ReadFileHeaderDataWoSize(
|
|
4800
5368
|
fp, formatspecs['format_delimiter'])
|
|
4801
|
-
fnumextrafieldsize = int(inheader[
|
|
4802
|
-
fnumextrafields = int(inheader[
|
|
5369
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
5370
|
+
fnumextrafields = int(inheader[16], 16)
|
|
4803
5371
|
fextrafieldslist = []
|
|
4804
|
-
extrastart =
|
|
5372
|
+
extrastart = 17
|
|
4805
5373
|
extraend = extrastart + fnumextrafields
|
|
4806
5374
|
while(extrastart < extraend):
|
|
4807
5375
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -4815,20 +5383,31 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4815
5383
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4816
5384
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4817
5385
|
pass
|
|
5386
|
+
fvendorfieldslist = []
|
|
5387
|
+
fvendorfields = 0;
|
|
5388
|
+
if((len(inheader) - 2)>extraend):
|
|
5389
|
+
extrastart = extraend
|
|
5390
|
+
extraend = len(inheader) - 2
|
|
5391
|
+
while(extrastart < extraend):
|
|
5392
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5393
|
+
extrastart = extrastart + 1
|
|
5394
|
+
fvendorfields = fvendorfields + 1
|
|
4818
5395
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4819
5396
|
fheadsize = int(inheader[0], 16)
|
|
4820
5397
|
fnumfields = int(inheader[1], 16)
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
|
|
4825
|
-
|
|
4826
|
-
|
|
4827
|
-
|
|
4828
|
-
|
|
4829
|
-
|
|
4830
|
-
|
|
4831
|
-
|
|
5398
|
+
fheadctime = int(inheader[2], 16)
|
|
5399
|
+
fheadmtime = int(inheader[3], 16)
|
|
5400
|
+
fhencoding = inheader[4]
|
|
5401
|
+
fostype = inheader[5]
|
|
5402
|
+
fpythontype = inheader[6]
|
|
5403
|
+
fprojectname = inheader[7]
|
|
5404
|
+
fnumfiles = int(inheader[8], 16)
|
|
5405
|
+
fseeknextfile = inheader[9]
|
|
5406
|
+
fjsontype = inheader[10]
|
|
5407
|
+
fjsonlen = int(inheader[11], 16)
|
|
5408
|
+
fjsonsize = int(inheader[12], 16)
|
|
5409
|
+
fjsonchecksumtype = inheader[13]
|
|
5410
|
+
fjsonchecksum = inheader[14]
|
|
4832
5411
|
fjsoncontent = {}
|
|
4833
5412
|
fjstart = fp.tell()
|
|
4834
5413
|
if(fjsontype=="json"):
|
|
@@ -4914,25 +5493,25 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4914
5493
|
fp.seek(fseeknextasnum, 0)
|
|
4915
5494
|
else:
|
|
4916
5495
|
return False
|
|
4917
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4918
|
-
if(not
|
|
5496
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5497
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4919
5498
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4920
5499
|
fname + " at offset " + str(fheaderstart))
|
|
4921
5500
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4922
5501
|
return False
|
|
4923
5502
|
fprechecksumtype = inheader[-2]
|
|
4924
5503
|
fprechecksum = inheader[-1]
|
|
4925
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
4926
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5504
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5505
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4927
5506
|
if(not headercheck and not skipchecksum):
|
|
4928
5507
|
VerbosePrintOut(
|
|
4929
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5508
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
4930
5509
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4931
5510
|
"'" + newfcs + "'")
|
|
4932
5511
|
return False
|
|
4933
5512
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
4934
5513
|
fcompresstype = ""
|
|
4935
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
5514
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fmtime': fheadmtime, 'fctime': fheadctime, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'ffilelist': []}
|
|
4936
5515
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
4937
5516
|
seekstart = 0
|
|
4938
5517
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -4959,16 +5538,15 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4959
5538
|
prefjsonchecksum = preheaderdata[31]
|
|
4960
5539
|
prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
4961
5540
|
fp.seek(len(delimiter), 1)
|
|
4962
|
-
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
4963
|
-
if(not
|
|
5541
|
+
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
|
|
5542
|
+
if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
4964
5543
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4965
5544
|
prefname + " at offset " + str(prefhstart))
|
|
4966
5545
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
4967
5546
|
return False
|
|
4968
|
-
prenewfcs = GetHeaderChecksum(
|
|
4969
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5547
|
+
prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
4970
5548
|
prefcs = preheaderdata[-2]
|
|
4971
|
-
if(not
|
|
5549
|
+
if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
|
|
4972
5550
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4973
5551
|
prefname + " at offset " + str(prefhstart))
|
|
4974
5552
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -4983,11 +5561,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4983
5561
|
if(prefsize > 0):
|
|
4984
5562
|
prefcontents.write(fp.read(prefsize))
|
|
4985
5563
|
prefcontents.seek(0, 0)
|
|
4986
|
-
prenewfccs = GetFileChecksum(
|
|
4987
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5564
|
+
prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
4988
5565
|
prefccs = preheaderdata[-1]
|
|
4989
5566
|
pyhascontents = True
|
|
4990
|
-
if(not
|
|
5567
|
+
if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
|
|
4991
5568
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4992
5569
|
prefname + " at offset " + str(prefcontentstart))
|
|
4993
5570
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5014,8 +5591,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5014
5591
|
realidnum = 0
|
|
5015
5592
|
countnum = seekstart
|
|
5016
5593
|
while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
|
|
5017
|
-
HeaderOut = ReadFileHeaderDataWithContentToArray(
|
|
5018
|
-
fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
|
|
5594
|
+
HeaderOut = ReadFileHeaderDataWithContentToArray(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
|
|
5019
5595
|
if(len(HeaderOut) == 0):
|
|
5020
5596
|
break
|
|
5021
5597
|
HeaderOut.update({'fid': realidnum, 'fidalt': realidnum})
|
|
@@ -5026,7 +5602,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5026
5602
|
return outlist
|
|
5027
5603
|
|
|
5028
5604
|
|
|
5029
|
-
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
5605
|
+
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
|
|
5030
5606
|
if(not hasattr(fp, "read")):
|
|
5031
5607
|
return False
|
|
5032
5608
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -5039,6 +5615,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5039
5615
|
CatSizeEnd = CatSize
|
|
5040
5616
|
fp.seek(curloc, 0)
|
|
5041
5617
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5618
|
+
headeroffset = fp.tell()
|
|
5042
5619
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
5043
5620
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
5044
5621
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -5046,16 +5623,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5046
5623
|
return False
|
|
5047
5624
|
if(formdel != formatspecs['format_delimiter']):
|
|
5048
5625
|
return False
|
|
5049
|
-
if(
|
|
5626
|
+
if(__use_new_style__):
|
|
5050
5627
|
inheader = ReadFileHeaderDataBySize(
|
|
5051
5628
|
fp, formatspecs['format_delimiter'])
|
|
5052
5629
|
else:
|
|
5053
5630
|
inheader = ReadFileHeaderDataWoSize(
|
|
5054
5631
|
fp, formatspecs['format_delimiter'])
|
|
5055
|
-
fnumextrafieldsize = int(inheader[
|
|
5056
|
-
fnumextrafields = int(inheader[
|
|
5632
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
5633
|
+
fnumextrafields = int(inheader[16], 16)
|
|
5057
5634
|
fextrafieldslist = []
|
|
5058
|
-
extrastart =
|
|
5635
|
+
extrastart = 17
|
|
5059
5636
|
extraend = extrastart + fnumextrafields
|
|
5060
5637
|
while(extrastart < extraend):
|
|
5061
5638
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5069,19 +5646,98 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5069
5646
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
5070
5647
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5071
5648
|
pass
|
|
5649
|
+
fvendorfieldslist = []
|
|
5650
|
+
fvendorfields = 0;
|
|
5651
|
+
if((len(inheader) - 2)>extraend):
|
|
5652
|
+
extrastart = extraend
|
|
5653
|
+
extraend = len(inheader) - 2
|
|
5654
|
+
while(extrastart < extraend):
|
|
5655
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5656
|
+
extrastart = extrastart + 1
|
|
5657
|
+
fvendorfields = fvendorfields + 1
|
|
5072
5658
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5073
5659
|
fheadsize = int(inheader[0], 16)
|
|
5074
5660
|
fnumfields = int(inheader[1], 16)
|
|
5075
|
-
|
|
5076
|
-
|
|
5077
|
-
|
|
5078
|
-
|
|
5079
|
-
|
|
5080
|
-
|
|
5081
|
-
|
|
5661
|
+
fheadctime = int(inheader[2], 16)
|
|
5662
|
+
fheadmtime = int(inheader[3], 16)
|
|
5663
|
+
fhencoding = inheader[4]
|
|
5664
|
+
fostype = inheader[5]
|
|
5665
|
+
fpythontype = inheader[6]
|
|
5666
|
+
fprojectname = inheader[7]
|
|
5667
|
+
fnumfiles = int(inheader[8], 16)
|
|
5668
|
+
fseeknextfile = inheader[9]
|
|
5669
|
+
fjsontype = inheader[10]
|
|
5670
|
+
fjsonlen = int(inheader[11], 16)
|
|
5671
|
+
fjsonsize = int(inheader[12], 16)
|
|
5672
|
+
fjsonchecksumtype = inheader[13]
|
|
5673
|
+
fjsonchecksum = inheader[14]
|
|
5082
5674
|
fjsoncontent = {}
|
|
5083
5675
|
fjstart = fp.tell()
|
|
5084
|
-
|
|
5676
|
+
if(fjsontype=="json"):
|
|
5677
|
+
fjsoncontent = {}
|
|
5678
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5679
|
+
if(fjsonsize > 0):
|
|
5680
|
+
try:
|
|
5681
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5682
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
5683
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5684
|
+
try:
|
|
5685
|
+
fjsonrawcontent = fprejsoncontent
|
|
5686
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
5687
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5688
|
+
fprejsoncontent = ""
|
|
5689
|
+
fjsonrawcontent = fprejsoncontent
|
|
5690
|
+
fjsoncontent = {}
|
|
5691
|
+
else:
|
|
5692
|
+
fprejsoncontent = ""
|
|
5693
|
+
fjsonrawcontent = fprejsoncontent
|
|
5694
|
+
fjsoncontent = {}
|
|
5695
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
5696
|
+
fjsoncontent = {}
|
|
5697
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5698
|
+
if (fjsonsize > 0):
|
|
5699
|
+
try:
|
|
5700
|
+
# try base64 → utf-8 → YAML
|
|
5701
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5702
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5703
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
5704
|
+
try:
|
|
5705
|
+
# fall back to treating the bytes as plain text YAML
|
|
5706
|
+
fjsonrawcontent = fprejsoncontent
|
|
5707
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5708
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
5709
|
+
# final fallback: empty
|
|
5710
|
+
fprejsoncontent = ""
|
|
5711
|
+
fjsonrawcontent = fprejsoncontent
|
|
5712
|
+
fjsoncontent = {}
|
|
5713
|
+
else:
|
|
5714
|
+
fprejsoncontent = ""
|
|
5715
|
+
fjsonrawcontent = fprejsoncontent
|
|
5716
|
+
fjsoncontent = {}
|
|
5717
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
5718
|
+
fjsoncontent = {}
|
|
5719
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5720
|
+
fprejsoncontent = ""
|
|
5721
|
+
fjsonrawcontent = fprejsoncontent
|
|
5722
|
+
elif(fjsontype=="list"):
|
|
5723
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5724
|
+
flisttmp = MkTempFile()
|
|
5725
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
5726
|
+
flisttmp.seek(0)
|
|
5727
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
5728
|
+
flisttmp.close()
|
|
5729
|
+
fjsonrawcontent = fjsoncontent
|
|
5730
|
+
if(fjsonlen==1):
|
|
5731
|
+
try:
|
|
5732
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
5733
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
5734
|
+
fjsonlen = len(fjsoncontent)
|
|
5735
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5736
|
+
try:
|
|
5737
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
5738
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
5739
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5740
|
+
pass
|
|
5085
5741
|
fjend = fp.tell()
|
|
5086
5742
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5087
5743
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5100,19 +5756,19 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5100
5756
|
fp.seek(fseeknextasnum, 0)
|
|
5101
5757
|
else:
|
|
5102
5758
|
return False
|
|
5103
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5104
|
-
if(not
|
|
5759
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5760
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5105
5761
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5106
5762
|
fname + " at offset " + str(fheaderstart))
|
|
5107
5763
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5108
5764
|
return False
|
|
5109
5765
|
fprechecksumtype = inheader[-2]
|
|
5110
5766
|
fprechecksum = inheader[-1]
|
|
5111
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
5112
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5767
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5768
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5113
5769
|
if(not headercheck and not skipchecksum):
|
|
5114
5770
|
VerbosePrintOut(
|
|
5115
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5771
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5116
5772
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5117
5773
|
"'" + newfcs + "'")
|
|
5118
5774
|
return False
|
|
@@ -5128,7 +5784,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5128
5784
|
il = 0
|
|
5129
5785
|
while(il < seekstart):
|
|
5130
5786
|
prefhstart = fp.tell()
|
|
5131
|
-
if(
|
|
5787
|
+
if(__use_new_style__):
|
|
5132
5788
|
preheaderdata = ReadFileHeaderDataBySize(
|
|
5133
5789
|
fp, formatspecs['format_delimiter'])
|
|
5134
5790
|
else:
|
|
@@ -5150,16 +5806,15 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5150
5806
|
prefjsonchecksum = preheaderdata[31]
|
|
5151
5807
|
prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5152
5808
|
fp.seek(len(delimiter), 1)
|
|
5153
|
-
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5154
|
-
if(not
|
|
5809
|
+
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
|
|
5810
|
+
if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5155
5811
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5156
5812
|
prefname + " at offset " + str(prefhstart))
|
|
5157
5813
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
5158
5814
|
return False
|
|
5159
|
-
prenewfcs = GetHeaderChecksum(
|
|
5160
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5815
|
+
prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
5161
5816
|
prefcs = preheaderdata[-2]
|
|
5162
|
-
if(not
|
|
5817
|
+
if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
|
|
5163
5818
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5164
5819
|
prefname + " at offset " + str(prefhstart))
|
|
5165
5820
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5176,11 +5831,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5176
5831
|
prefcontents = fp.read(prefsize)
|
|
5177
5832
|
else:
|
|
5178
5833
|
prefcontents = fp.read(prefcsize)
|
|
5179
|
-
prenewfccs = GetFileChecksum(
|
|
5180
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5834
|
+
prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
5181
5835
|
prefccs = preheaderdata[-1]
|
|
5182
5836
|
pyhascontents = True
|
|
5183
|
-
if(not
|
|
5837
|
+
if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
|
|
5184
5838
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5185
5839
|
prefname + " at offset " + str(prefcontentstart))
|
|
5186
5840
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5207,8 +5861,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5207
5861
|
realidnum = 0
|
|
5208
5862
|
countnum = seekstart
|
|
5209
5863
|
while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
|
|
5210
|
-
HeaderOut = ReadFileHeaderDataWithContentToList(
|
|
5211
|
-
fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
|
|
5864
|
+
HeaderOut = ReadFileHeaderDataWithContentToList(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
|
|
5212
5865
|
if(len(HeaderOut) == 0):
|
|
5213
5866
|
break
|
|
5214
5867
|
outlist.append(HeaderOut)
|
|
@@ -5216,7 +5869,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5216
5869
|
realidnum = realidnum + 1
|
|
5217
5870
|
return outlist
|
|
5218
5871
|
|
|
5219
|
-
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5872
|
+
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5220
5873
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5221
5874
|
fp = infile
|
|
5222
5875
|
try:
|
|
@@ -5311,7 +5964,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5311
5964
|
else:
|
|
5312
5965
|
break
|
|
5313
5966
|
readfp.seek(oldfppos, 0)
|
|
5314
|
-
ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5967
|
+
ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5315
5968
|
currentfilepos = readfp.tell()
|
|
5316
5969
|
else:
|
|
5317
5970
|
infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
|
|
@@ -5333,27 +5986,27 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5333
5986
|
else:
|
|
5334
5987
|
break
|
|
5335
5988
|
infp.seek(oldinfppos, 0)
|
|
5336
|
-
ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5989
|
+
ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5337
5990
|
currentinfilepos = infp.tell()
|
|
5338
5991
|
currentfilepos = readfp.tell()
|
|
5339
5992
|
return ArchiveList
|
|
5340
5993
|
|
|
5341
5994
|
|
|
5342
|
-
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5995
|
+
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5343
5996
|
if(isinstance(infile, (list, tuple, ))):
|
|
5344
5997
|
pass
|
|
5345
5998
|
else:
|
|
5346
5999
|
infile = [infile]
|
|
5347
6000
|
outretval = []
|
|
5348
6001
|
for curfname in infile:
|
|
5349
|
-
outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
|
|
6002
|
+
outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
|
|
5350
6003
|
return outretval
|
|
5351
6004
|
|
|
5352
|
-
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5353
|
-
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
6005
|
+
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
6006
|
+
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
5354
6007
|
|
|
5355
6008
|
|
|
5356
|
-
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
6009
|
+
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5357
6010
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5358
6011
|
fp = infile
|
|
5359
6012
|
try:
|
|
@@ -5448,7 +6101,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5448
6101
|
else:
|
|
5449
6102
|
break
|
|
5450
6103
|
readfp.seek(oldfppos, 0)
|
|
5451
|
-
ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
6104
|
+
ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5452
6105
|
currentfilepos = readfp.tell()
|
|
5453
6106
|
else:
|
|
5454
6107
|
infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
|
|
@@ -5470,24 +6123,24 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5470
6123
|
else:
|
|
5471
6124
|
break
|
|
5472
6125
|
infp.seek(oldinfppos, 0)
|
|
5473
|
-
ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
6126
|
+
ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5474
6127
|
currentinfilepos = infp.tell()
|
|
5475
6128
|
currentfilepos = readfp.tell()
|
|
5476
6129
|
return ArchiveList
|
|
5477
6130
|
|
|
5478
6131
|
|
|
5479
|
-
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
6132
|
+
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5480
6133
|
if(isinstance(infile, (list, tuple, ))):
|
|
5481
6134
|
pass
|
|
5482
6135
|
else:
|
|
5483
6136
|
infile = [infile]
|
|
5484
6137
|
outretval = {}
|
|
5485
6138
|
for curfname in infile:
|
|
5486
|
-
outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
|
|
6139
|
+
outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
|
|
5487
6140
|
return outretval
|
|
5488
6141
|
|
|
5489
|
-
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5490
|
-
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
6142
|
+
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
6143
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
5491
6144
|
|
|
5492
6145
|
|
|
5493
6146
|
def _field_to_bytes(x):
|
|
@@ -5541,7 +6194,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5541
6194
|
def _hex_lower(n):
|
|
5542
6195
|
return format(int(n), 'x').lower()
|
|
5543
6196
|
|
|
5544
|
-
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
6197
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
|
|
5545
6198
|
"""
|
|
5546
6199
|
Build and write the archive file header.
|
|
5547
6200
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5605,18 +6258,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5605
6258
|
tmpoutlist.append(fjsonsize)
|
|
5606
6259
|
if(len(jsondata) > 0):
|
|
5607
6260
|
tmpoutlist.append(checksumtype[1])
|
|
5608
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
6261
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs, saltkey))
|
|
5609
6262
|
else:
|
|
5610
6263
|
tmpoutlist.append("none")
|
|
5611
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
6264
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
|
|
5612
6265
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5613
6266
|
tmpoutlist.append(extrasizelen)
|
|
5614
6267
|
tmpoutlist.append(extrafields)
|
|
5615
|
-
tmpoutlen =
|
|
6268
|
+
tmpoutlen = 10 + len(tmpoutlist) + len(xlist)
|
|
5616
6269
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5617
|
-
|
|
6270
|
+
if(hasattr(time, "time_ns")):
|
|
6271
|
+
fctime = format(int(time.time_ns()), 'x').lower()
|
|
6272
|
+
else:
|
|
6273
|
+
fctime = format(int(to_ns(time.time())), 'x').lower()
|
|
5618
6274
|
# Serialize the first group
|
|
5619
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
6275
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5620
6276
|
# Append tmpoutlist
|
|
5621
6277
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5622
6278
|
# Append extradata items if any
|
|
@@ -5626,7 +6282,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5626
6282
|
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5627
6283
|
|
|
5628
6284
|
# 5) inner checksum over fnumfilesa
|
|
5629
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
6285
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
|
|
5630
6286
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5631
6287
|
|
|
5632
6288
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5639,7 +6295,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5639
6295
|
+ fnumfilesa
|
|
5640
6296
|
)
|
|
5641
6297
|
|
|
5642
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
6298
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
|
|
5643
6299
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5644
6300
|
|
|
5645
6301
|
# 8) final total size field (again per your original logic)
|
|
@@ -5672,21 +6328,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5672
6328
|
return fp
|
|
5673
6329
|
|
|
5674
6330
|
|
|
5675
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
6331
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
|
|
5676
6332
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5677
6333
|
formatspecs = formatspecs[fmttype]
|
|
5678
6334
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5679
6335
|
fmttype = __file_format_default__
|
|
5680
6336
|
formatspecs = formatspecs[fmttype]
|
|
5681
|
-
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
6337
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs, saltkey)
|
|
5682
6338
|
return fp
|
|
5683
6339
|
|
|
5684
6340
|
|
|
5685
|
-
def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5686
|
-
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
6341
|
+
def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
|
|
6342
|
+
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
|
|
5687
6343
|
|
|
5688
6344
|
|
|
5689
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
6345
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, returnfp=False):
|
|
5690
6346
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5691
6347
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5692
6348
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -5716,6 +6372,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5716
6372
|
fp = MkTempFile()
|
|
5717
6373
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5718
6374
|
fp = outfile
|
|
6375
|
+
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
|
|
5719
6376
|
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5720
6377
|
fp = MkTempFile()
|
|
5721
6378
|
else:
|
|
@@ -5727,7 +6384,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5727
6384
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
5728
6385
|
except PermissionError:
|
|
5729
6386
|
return False
|
|
5730
|
-
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
6387
|
+
AppendFileHeader(fp, 0, "UTF-8", ['hello', 'goodbye'], {}, checksumtype, formatspecs, saltkey)
|
|
5731
6388
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5732
6389
|
fp = CompressOpenFileAlt(
|
|
5733
6390
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -5758,11 +6415,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5758
6415
|
return True
|
|
5759
6416
|
|
|
5760
6417
|
|
|
5761
|
-
def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
5762
|
-
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
6418
|
+
def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, returnfp=False):
|
|
6419
|
+
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, returnfp)
|
|
5763
6420
|
|
|
5764
6421
|
|
|
5765
|
-
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
|
|
6422
|
+
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
|
|
5766
6423
|
if(not hasattr(fp, "write")):
|
|
5767
6424
|
return False
|
|
5768
6425
|
if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
|
|
@@ -5794,10 +6451,10 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5794
6451
|
tmpoutlist.append(fjsonsize)
|
|
5795
6452
|
if(len(jsondata) > 0):
|
|
5796
6453
|
tmpoutlist.append(checksumtype[2])
|
|
5797
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs))
|
|
6454
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs, saltkey))
|
|
5798
6455
|
else:
|
|
5799
6456
|
tmpoutlist.append("none")
|
|
5800
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
6457
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
|
|
5801
6458
|
tmpoutlist.append(extrasizelen)
|
|
5802
6459
|
tmpoutlist.append(extrafields)
|
|
5803
6460
|
outfileoutstr = AppendNullBytes(
|
|
@@ -5812,22 +6469,18 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5812
6469
|
outfileoutstr = outfileoutstr + \
|
|
5813
6470
|
AppendNullBytes(checksumlist, formatspecs['format_delimiter'])
|
|
5814
6471
|
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5815
|
-
outfileheadercshex = GetFileChecksum(
|
|
5816
|
-
outfileoutstr, checksumtype[0], True, formatspecs)
|
|
6472
|
+
outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
|
|
5817
6473
|
if(len(filecontent) == 0):
|
|
5818
|
-
outfilecontentcshex = GetFileChecksum(
|
|
5819
|
-
filecontent, "none", False, formatspecs)
|
|
6474
|
+
outfilecontentcshex = GetFileChecksum(filecontent, "none", False, formatspecs, saltkey)
|
|
5820
6475
|
else:
|
|
5821
|
-
outfilecontentcshex = GetFileChecksum(
|
|
5822
|
-
filecontent, checksumtype[1], False, formatspecs)
|
|
6476
|
+
outfilecontentcshex = GetFileChecksum(filecontent, checksumtype[1], False, formatspecs, saltkey)
|
|
5823
6477
|
tmpfileoutstr = outfileoutstr + \
|
|
5824
6478
|
AppendNullBytes([outfileheadercshex, outfilecontentcshex],
|
|
5825
6479
|
formatspecs['format_delimiter'])
|
|
5826
6480
|
formheaersize = format(int(len(tmpfileoutstr) - len(formatspecs['format_delimiter'])), 'x').lower()
|
|
5827
6481
|
outfileoutstr = AppendNullByte(
|
|
5828
6482
|
formheaersize, formatspecs['format_delimiter']) + outfileoutstr
|
|
5829
|
-
outfileheadercshex = GetFileChecksum(
|
|
5830
|
-
outfileoutstr, checksumtype[0], True, formatspecs)
|
|
6483
|
+
outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
|
|
5831
6484
|
outfileoutstr = outfileoutstr + \
|
|
5832
6485
|
AppendNullBytes([outfileheadercshex, outfilecontentcshex],
|
|
5833
6486
|
formatspecs['format_delimiter'])
|
|
@@ -5845,14 +6498,9 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5845
6498
|
pass
|
|
5846
6499
|
return fp
|
|
5847
6500
|
|
|
5848
|
-
def
|
|
5849
|
-
|
|
5850
|
-
|
|
5851
|
-
advancedlist = formatspecs['use_advanced_list']
|
|
5852
|
-
altinode = formatspecs['use_alt_inode']
|
|
5853
|
-
if(verbose):
|
|
5854
|
-
logging.basicConfig(format="%(message)s",
|
|
5855
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6501
|
+
def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6502
|
+
advancedlist = __use_advanced_list__
|
|
6503
|
+
altinode = __use_alt_inode__
|
|
5856
6504
|
infilelist = []
|
|
5857
6505
|
if(infiles == "-"):
|
|
5858
6506
|
for line in PY_STDIN_TEXT:
|
|
@@ -5892,16 +6540,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5892
6540
|
inodetofile = {}
|
|
5893
6541
|
filetoinode = {}
|
|
5894
6542
|
inodetoforminode = {}
|
|
5895
|
-
numfiles = int(len(GetDirList))
|
|
5896
|
-
fnumfiles = format(numfiles, 'x').lower()
|
|
5897
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
5898
|
-
try:
|
|
5899
|
-
fp.flush()
|
|
5900
|
-
if(hasattr(os, "sync")):
|
|
5901
|
-
os.fsync(fp.fileno())
|
|
5902
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
5903
|
-
pass
|
|
5904
6543
|
FullSizeFilesAlt = 0
|
|
6544
|
+
tmpoutlist = []
|
|
5905
6545
|
for curfname in GetDirList:
|
|
5906
6546
|
fencoding = "UTF-8"
|
|
5907
6547
|
if(re.findall("^[.|/]", curfname)):
|
|
@@ -5923,14 +6563,24 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5923
6563
|
FullSizeFilesAlt += fstatinfo.st_rsize
|
|
5924
6564
|
except AttributeError:
|
|
5925
6565
|
FullSizeFilesAlt += fstatinfo.st_size
|
|
6566
|
+
fblksize = 0
|
|
6567
|
+
if(hasattr(fstatinfo, "st_blksize")):
|
|
6568
|
+
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6569
|
+
fblocks = 0
|
|
6570
|
+
if(hasattr(fstatinfo, "st_blocks")):
|
|
6571
|
+
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6572
|
+
fflags = 0
|
|
6573
|
+
if(hasattr(fstatinfo, "st_flags")):
|
|
6574
|
+
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
5926
6575
|
ftype = 0
|
|
5927
|
-
if(hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
|
|
6576
|
+
if(not followlink and hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
|
|
5928
6577
|
ftype = 13
|
|
5929
|
-
elif(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
|
|
5930
|
-
ftype = 12
|
|
5931
6578
|
elif(stat.S_ISREG(fpremode)):
|
|
5932
|
-
|
|
5933
|
-
|
|
6579
|
+
if(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_size > 0 and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
|
|
6580
|
+
ftype = 12
|
|
6581
|
+
else:
|
|
6582
|
+
ftype = 0
|
|
6583
|
+
elif(not followlink and stat.S_ISLNK(fpremode)):
|
|
5934
6584
|
ftype = 2
|
|
5935
6585
|
elif(stat.S_ISCHR(fpremode)):
|
|
5936
6586
|
ftype = 3
|
|
@@ -5952,43 +6602,42 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5952
6602
|
ftype = 0
|
|
5953
6603
|
flinkname = ""
|
|
5954
6604
|
fcurfid = format(int(curfid), 'x').lower()
|
|
5955
|
-
if
|
|
6605
|
+
if(not followlink and finode != 0):
|
|
5956
6606
|
unique_id = (fstatinfo.st_dev, finode)
|
|
5957
|
-
if
|
|
5958
|
-
if
|
|
6607
|
+
if(ftype != 1):
|
|
6608
|
+
if(unique_id in inodetofile):
|
|
5959
6609
|
# Hard link detected
|
|
5960
6610
|
ftype = 1
|
|
5961
6611
|
flinkname = inodetofile[unique_id]
|
|
5962
|
-
if altinode:
|
|
5963
|
-
fcurinode = format(int(unique_id[1]), 'x').lower()
|
|
5964
|
-
else:
|
|
5965
|
-
fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
|
|
5966
6612
|
else:
|
|
5967
|
-
#
|
|
5968
|
-
inodelist.append(unique_id)
|
|
6613
|
+
# First time seeing this inode
|
|
5969
6614
|
inodetofile[unique_id] = fname
|
|
6615
|
+
if(unique_id not in inodetoforminode):
|
|
5970
6616
|
inodetoforminode[unique_id] = curinode
|
|
5971
|
-
|
|
5972
|
-
|
|
5973
|
-
|
|
5974
|
-
|
|
5975
|
-
|
|
6617
|
+
curinode = curinode + 1
|
|
6618
|
+
if(altinode):
|
|
6619
|
+
# altinode == True → use real inode number
|
|
6620
|
+
fcurinode = format(int(unique_id[1]), 'x').lower()
|
|
6621
|
+
else:
|
|
6622
|
+
# altinode == False → use synthetic inode id
|
|
6623
|
+
fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
|
|
5976
6624
|
else:
|
|
5977
6625
|
# Handle cases where inodes are not supported or symlinks are followed
|
|
5978
6626
|
fcurinode = format(int(curinode), 'x').lower()
|
|
5979
|
-
curinode
|
|
6627
|
+
curinode = curinode + 1
|
|
5980
6628
|
curfid = curfid + 1
|
|
5981
6629
|
if(ftype == 2):
|
|
5982
6630
|
flinkname = os.readlink(fname)
|
|
5983
|
-
if(not os.path.exists(
|
|
6631
|
+
if(not os.path.exists(fname)):
|
|
5984
6632
|
return False
|
|
5985
6633
|
try:
|
|
5986
6634
|
fdev = fstatinfo.st_rdev
|
|
5987
6635
|
except AttributeError:
|
|
5988
6636
|
fdev = 0
|
|
5989
|
-
|
|
5990
|
-
|
|
5991
|
-
|
|
6637
|
+
try:
|
|
6638
|
+
frdev = fstatinfo.st_rdev
|
|
6639
|
+
except AttributeError:
|
|
6640
|
+
frdev = 0
|
|
5992
6641
|
# Types that should be considered zero-length in the archive context:
|
|
5993
6642
|
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
|
|
5994
6643
|
# Types that have actual data to read:
|
|
@@ -5999,13 +6648,28 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5999
6648
|
fsize = format(int(fstatinfo.st_size), 'x').lower()
|
|
6000
6649
|
else:
|
|
6001
6650
|
fsize = format(int(fstatinfo.st_size), 'x').lower()
|
|
6002
|
-
|
|
6003
|
-
|
|
6004
|
-
|
|
6651
|
+
if(hasattr(fstatinfo, "st_atime_ns")):
|
|
6652
|
+
fatime = format(int(fstatinfo.st_atime_ns), 'x').lower()
|
|
6653
|
+
else:
|
|
6654
|
+
fatime = format(int(to_ns(fstatinfo.st_atime)), 'x').lower()
|
|
6655
|
+
if(hasattr(fstatinfo, "st_mtime_ns")):
|
|
6656
|
+
fmtime = format(int(fstatinfo.st_mtime_ns), 'x').lower()
|
|
6657
|
+
else:
|
|
6658
|
+
fmtime = format(int(to_ns(fstatinfo.st_mtime)), 'x').lower()
|
|
6659
|
+
if(hasattr(fstatinfo, "st_ctime_ns")):
|
|
6660
|
+
fctime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
|
|
6661
|
+
else:
|
|
6662
|
+
fctime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
|
|
6005
6663
|
if(hasattr(fstatinfo, "st_birthtime")):
|
|
6006
|
-
|
|
6664
|
+
if(hasattr(fstatinfo, "st_birthtime_ns")):
|
|
6665
|
+
fbtime = format(int(fstatinfo.st_birthtime_ns), 'x').lower()
|
|
6666
|
+
else:
|
|
6667
|
+
fbtime = format(int(to_ns(fstatinfo.st_birthtime)), 'x').lower()
|
|
6007
6668
|
else:
|
|
6008
|
-
|
|
6669
|
+
if(hasattr(fstatinfo, "st_ctime_ns")):
|
|
6670
|
+
fbtime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
|
|
6671
|
+
else:
|
|
6672
|
+
fbtime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
|
|
6009
6673
|
fmode = format(int(fstatinfo.st_mode), 'x').lower()
|
|
6010
6674
|
fchmode = format(int(stat.S_IMODE(fstatinfo.st_mode)), 'x').lower()
|
|
6011
6675
|
ftypemod = format(int(stat.S_IFMT(fstatinfo.st_mode)), 'x').lower()
|
|
@@ -6032,8 +6696,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6032
6696
|
except ImportError:
|
|
6033
6697
|
fgname = ""
|
|
6034
6698
|
fdev = format(int(fdev), 'x').lower()
|
|
6035
|
-
|
|
6036
|
-
fdev_major = format(int(fdev_major), 'x').lower()
|
|
6699
|
+
frdev = format(int(frdev), 'x').lower()
|
|
6037
6700
|
finode = format(int(finode), 'x').lower()
|
|
6038
6701
|
flinkcount = format(int(flinkcount), 'x').lower()
|
|
6039
6702
|
if(hasattr(fstatinfo, "st_file_attributes")):
|
|
@@ -6053,7 +6716,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6053
6716
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6054
6717
|
fcontents.seek(0, 0)
|
|
6055
6718
|
if(typechecktest is not False):
|
|
6056
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6719
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6057
6720
|
fcontents.seek(0, 0)
|
|
6058
6721
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6059
6722
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6094,16 +6757,15 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6094
6757
|
fcompression = curcompression
|
|
6095
6758
|
fcontents.close()
|
|
6096
6759
|
fcontents = cfcontents
|
|
6097
|
-
elif followlink and (ftype ==
|
|
6098
|
-
if(not os.path.exists(
|
|
6760
|
+
elif followlink and (ftype == 2 or ftype in data_types):
|
|
6761
|
+
if(not os.path.exists(fname)):
|
|
6099
6762
|
return False
|
|
6100
|
-
flstatinfo = os.stat(flinkname)
|
|
6101
6763
|
with open(flinkname, "rb") as fpc:
|
|
6102
6764
|
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6103
6765
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6104
6766
|
fcontents.seek(0, 0)
|
|
6105
6767
|
if(typechecktest is not False):
|
|
6106
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6768
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6107
6769
|
fcontents.seek(0, 0)
|
|
6108
6770
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6109
6771
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6147,11 +6809,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6147
6809
|
if(fcompression == "none"):
|
|
6148
6810
|
fcompression = ""
|
|
6149
6811
|
fcontents.seek(0, 0)
|
|
6812
|
+
if(not contentasfile):
|
|
6813
|
+
fcontents = fcontents.read()
|
|
6150
6814
|
ftypehex = format(ftype, 'x').lower()
|
|
6151
|
-
tmpoutlist
|
|
6152
|
-
|
|
6153
|
-
|
|
6154
|
-
|
|
6815
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6816
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
6817
|
+
return tmpoutlist
|
|
6818
|
+
|
|
6819
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6820
|
+
GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
6821
|
+
if(not hasattr(fp, "write")):
|
|
6822
|
+
return False
|
|
6823
|
+
numfiles = int(len(GetDirList))
|
|
6824
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
6825
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6826
|
+
try:
|
|
6827
|
+
fp.flush()
|
|
6828
|
+
if(hasattr(os, "sync")):
|
|
6829
|
+
os.fsync(fp.fileno())
|
|
6830
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6831
|
+
pass
|
|
6832
|
+
for curfname in GetDirList:
|
|
6833
|
+
tmpoutlist = curfname['fheaders']
|
|
6834
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6155
6835
|
try:
|
|
6156
6836
|
fp.flush()
|
|
6157
6837
|
if(hasattr(os, "sync")):
|
|
@@ -6160,12 +6840,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6160
6840
|
pass
|
|
6161
6841
|
return fp
|
|
6162
6842
|
|
|
6163
|
-
def
|
|
6164
|
-
if(not hasattr(fp, "write")):
|
|
6165
|
-
return False
|
|
6166
|
-
if(verbose):
|
|
6167
|
-
logging.basicConfig(format="%(message)s",
|
|
6168
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6843
|
+
def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6169
6844
|
curinode = 0
|
|
6170
6845
|
curfid = 0
|
|
6171
6846
|
inodelist = []
|
|
@@ -6207,10 +6882,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6207
6882
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6208
6883
|
formatspecs = formatspecs[compresscheck]
|
|
6209
6884
|
if(compresscheck=="zstd"):
|
|
6210
|
-
if '
|
|
6211
|
-
infile = ZstdFile(
|
|
6212
|
-
elif 'pyzstd' in sys.modules:
|
|
6213
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6885
|
+
if 'zstd' in compressionsupport:
|
|
6886
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6214
6887
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6215
6888
|
else:
|
|
6216
6889
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -6219,23 +6892,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6219
6892
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6220
6893
|
formatspecs = formatspecs[compresscheck]
|
|
6221
6894
|
if(compresscheck=="zstd"):
|
|
6222
|
-
if '
|
|
6223
|
-
infile = ZstdFile(
|
|
6224
|
-
elif 'pyzstd' in sys.modules:
|
|
6225
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6895
|
+
if 'zstd' in compressionsupport:
|
|
6896
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6226
6897
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6227
6898
|
else:
|
|
6228
6899
|
tarfp = tarfile.open(infile, "r")
|
|
6229
6900
|
except FileNotFoundError:
|
|
6230
6901
|
return False
|
|
6231
|
-
|
|
6232
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6233
|
-
try:
|
|
6234
|
-
fp.flush()
|
|
6235
|
-
if(hasattr(os, "sync")):
|
|
6236
|
-
os.fsync(fp.fileno())
|
|
6237
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6238
|
-
pass
|
|
6902
|
+
tmpoutlist = []
|
|
6239
6903
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6240
6904
|
fencoding = "UTF-8"
|
|
6241
6905
|
if(re.findall("^[.|/]", member.name)):
|
|
@@ -6247,6 +6911,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6247
6911
|
fpremode = member.mode
|
|
6248
6912
|
ffullmode = member.mode
|
|
6249
6913
|
flinkcount = 0
|
|
6914
|
+
fblksize = 0
|
|
6915
|
+
fblocks = 0
|
|
6916
|
+
fflags = 0
|
|
6250
6917
|
ftype = 0
|
|
6251
6918
|
if(member.isreg()):
|
|
6252
6919
|
ffullmode = member.mode + stat.S_IFREG
|
|
@@ -6284,12 +6951,11 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6284
6951
|
curfid = curfid + 1
|
|
6285
6952
|
if(ftype == 2):
|
|
6286
6953
|
flinkname = member.linkname
|
|
6954
|
+
fdev = format(int("0"), 'x').lower()
|
|
6287
6955
|
try:
|
|
6288
|
-
|
|
6956
|
+
frdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
|
|
6289
6957
|
except AttributeError:
|
|
6290
|
-
|
|
6291
|
-
fdev_minor = format(int(member.devminor), 'x').lower()
|
|
6292
|
-
fdev_major = format(int(member.devmajor), 'x').lower()
|
|
6958
|
+
frdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
|
|
6293
6959
|
# Types that should be considered zero-length in the archive context:
|
|
6294
6960
|
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
|
|
6295
6961
|
# Types that have actual data to read:
|
|
@@ -6300,10 +6966,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6300
6966
|
fsize = format(int(member.size), 'x').lower()
|
|
6301
6967
|
else:
|
|
6302
6968
|
fsize = format(int(member.size), 'x').lower()
|
|
6303
|
-
fatime = format(int(member.mtime), 'x').lower()
|
|
6304
|
-
fmtime = format(int(member.mtime), 'x').lower()
|
|
6305
|
-
fctime = format(int(member.mtime), 'x').lower()
|
|
6306
|
-
fbtime = format(int(member.mtime), 'x').lower()
|
|
6969
|
+
fatime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6970
|
+
fmtime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6971
|
+
fctime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6972
|
+
fbtime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6307
6973
|
fmode = format(int(ffullmode), 'x').lower()
|
|
6308
6974
|
fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
|
|
6309
6975
|
ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
|
|
@@ -6325,7 +6991,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6325
6991
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6326
6992
|
fcontents.seek(0, 0)
|
|
6327
6993
|
if(typechecktest is not False):
|
|
6328
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6994
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6329
6995
|
fcontents.seek(0, 0)
|
|
6330
6996
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6331
6997
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6369,26 +7035,38 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6369
7035
|
if(fcompression == "none"):
|
|
6370
7036
|
fcompression = ""
|
|
6371
7037
|
fcontents.seek(0, 0)
|
|
7038
|
+
if(not contentasfile):
|
|
7039
|
+
fcontents = fcontents.read()
|
|
6372
7040
|
ftypehex = format(ftype, 'x').lower()
|
|
6373
|
-
tmpoutlist
|
|
6374
|
-
|
|
6375
|
-
|
|
6376
|
-
|
|
7041
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7042
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7043
|
+
return tmpoutlist
|
|
7044
|
+
|
|
7045
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7046
|
+
if(not hasattr(fp, "write")):
|
|
7047
|
+
return False
|
|
7048
|
+
GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7049
|
+
numfiles = int(len(GetDirList))
|
|
7050
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7051
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7052
|
+
try:
|
|
7053
|
+
fp.flush()
|
|
7054
|
+
if(hasattr(os, "sync")):
|
|
7055
|
+
os.fsync(fp.fileno())
|
|
7056
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7057
|
+
pass
|
|
7058
|
+
for curfname in GetDirList:
|
|
7059
|
+
tmpoutlist = curfname['fheaders']
|
|
7060
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6377
7061
|
try:
|
|
6378
7062
|
fp.flush()
|
|
6379
7063
|
if(hasattr(os, "sync")):
|
|
6380
7064
|
os.fsync(fp.fileno())
|
|
6381
7065
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6382
7066
|
pass
|
|
6383
|
-
fcontents.close()
|
|
6384
7067
|
return fp
|
|
6385
7068
|
|
|
6386
|
-
def
|
|
6387
|
-
if(not hasattr(fp, "write")):
|
|
6388
|
-
return False
|
|
6389
|
-
if(verbose):
|
|
6390
|
-
logging.basicConfig(format="%(message)s",
|
|
6391
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7069
|
+
def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6392
7070
|
curinode = 0
|
|
6393
7071
|
curfid = 0
|
|
6394
7072
|
inodelist = []
|
|
@@ -6421,14 +7099,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6421
7099
|
ziptest = zipfp.testzip()
|
|
6422
7100
|
if(ziptest):
|
|
6423
7101
|
VerbosePrintOut("Bad file found!")
|
|
6424
|
-
|
|
6425
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6426
|
-
try:
|
|
6427
|
-
fp.flush()
|
|
6428
|
-
if(hasattr(os, "sync")):
|
|
6429
|
-
os.fsync(fp.fileno())
|
|
6430
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6431
|
-
pass
|
|
7102
|
+
tmpoutlist = []
|
|
6432
7103
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6433
7104
|
fencoding = "UTF-8"
|
|
6434
7105
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -6443,6 +7114,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6443
7114
|
else:
|
|
6444
7115
|
fpremode = int(stat.S_IFREG | 0x1b6)
|
|
6445
7116
|
flinkcount = 0
|
|
7117
|
+
fblksize = 0
|
|
7118
|
+
fblocks = 0
|
|
7119
|
+
fflags = 0
|
|
6446
7120
|
ftype = 0
|
|
6447
7121
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
6448
7122
|
ftype = 5
|
|
@@ -6453,8 +7127,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6453
7127
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6454
7128
|
curfid = curfid + 1
|
|
6455
7129
|
fdev = format(int(0), 'x').lower()
|
|
6456
|
-
|
|
6457
|
-
fdev_major = format(int(0), 'x').lower()
|
|
7130
|
+
frdev = format(int(0), 'x').lower()
|
|
6458
7131
|
if(ftype == 5):
|
|
6459
7132
|
fsize = format(int("0"), 'x').lower()
|
|
6460
7133
|
elif(ftype == 0):
|
|
@@ -6462,13 +7135,13 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6462
7135
|
else:
|
|
6463
7136
|
fsize = format(int(member.file_size), 'x').lower()
|
|
6464
7137
|
fatime = format(
|
|
6465
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7138
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6466
7139
|
fmtime = format(
|
|
6467
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7140
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6468
7141
|
fctime = format(
|
|
6469
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7142
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6470
7143
|
fbtime = format(
|
|
6471
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7144
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6472
7145
|
if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
|
|
6473
7146
|
fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
|
|
6474
7147
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
@@ -6583,30 +7256,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6583
7256
|
if(fcompression == "none"):
|
|
6584
7257
|
fcompression = ""
|
|
6585
7258
|
fcontents.seek(0, 0)
|
|
7259
|
+
if(not contentasfile):
|
|
7260
|
+
fcontents = fcontents.read()
|
|
6586
7261
|
ftypehex = format(ftype, 'x').lower()
|
|
6587
|
-
tmpoutlist
|
|
6588
|
-
|
|
6589
|
-
|
|
6590
|
-
|
|
7262
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7263
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7264
|
+
return tmpoutlist
|
|
7265
|
+
|
|
7266
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7267
|
+
if(not hasattr(fp, "write")):
|
|
7268
|
+
return False
|
|
7269
|
+
GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7270
|
+
numfiles = int(len(GetDirList))
|
|
7271
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7272
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7273
|
+
try:
|
|
7274
|
+
fp.flush()
|
|
7275
|
+
if(hasattr(os, "sync")):
|
|
7276
|
+
os.fsync(fp.fileno())
|
|
7277
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7278
|
+
pass
|
|
7279
|
+
for curfname in GetDirList:
|
|
7280
|
+
tmpoutlist = curfname['fheaders']
|
|
7281
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6591
7282
|
try:
|
|
6592
7283
|
fp.flush()
|
|
6593
7284
|
if(hasattr(os, "sync")):
|
|
6594
7285
|
os.fsync(fp.fileno())
|
|
6595
7286
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6596
7287
|
pass
|
|
6597
|
-
fcontents.close()
|
|
6598
7288
|
return fp
|
|
6599
7289
|
|
|
6600
7290
|
if(not rarfile_support):
|
|
6601
|
-
def
|
|
7291
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6602
7292
|
return False
|
|
6603
|
-
|
|
6604
|
-
|
|
6605
|
-
|
|
6606
|
-
|
|
6607
|
-
if(verbose):
|
|
6608
|
-
logging.basicConfig(format="%(message)s",
|
|
6609
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7293
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7294
|
+
return False
|
|
7295
|
+
else:
|
|
7296
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6610
7297
|
curinode = 0
|
|
6611
7298
|
curfid = 0
|
|
6612
7299
|
inodelist = []
|
|
@@ -6621,20 +7308,7 @@ else:
|
|
|
6621
7308
|
rartest = rarfp.testrar()
|
|
6622
7309
|
if(rartest):
|
|
6623
7310
|
VerbosePrintOut("Bad file found!")
|
|
6624
|
-
|
|
6625
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6626
|
-
try:
|
|
6627
|
-
fp.flush()
|
|
6628
|
-
if(hasattr(os, "sync")):
|
|
6629
|
-
os.fsync(fp.fileno())
|
|
6630
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6631
|
-
pass
|
|
6632
|
-
try:
|
|
6633
|
-
fp.flush()
|
|
6634
|
-
if(hasattr(os, "sync")):
|
|
6635
|
-
os.fsync(fp.fileno())
|
|
6636
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6637
|
-
pass
|
|
7311
|
+
tmpoutlist = []
|
|
6638
7312
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
6639
7313
|
is_unix = False
|
|
6640
7314
|
is_windows = False
|
|
@@ -6678,6 +7352,9 @@ else:
|
|
|
6678
7352
|
fcompression = ""
|
|
6679
7353
|
fcsize = format(int(0), 'x').lower()
|
|
6680
7354
|
flinkcount = 0
|
|
7355
|
+
fblksize = 0
|
|
7356
|
+
fblocks = 0
|
|
7357
|
+
fflags = 0
|
|
6681
7358
|
ftype = 0
|
|
6682
7359
|
if(member.is_file()):
|
|
6683
7360
|
ftype = 0
|
|
@@ -6692,8 +7369,7 @@ else:
|
|
|
6692
7369
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6693
7370
|
curfid = curfid + 1
|
|
6694
7371
|
fdev = format(int(0), 'x').lower()
|
|
6695
|
-
|
|
6696
|
-
fdev_major = format(int(0), 'x').lower()
|
|
7372
|
+
frdev = format(int(0), 'x').lower()
|
|
6697
7373
|
if(ftype == 5):
|
|
6698
7374
|
fsize = format(int("0"), 'x').lower()
|
|
6699
7375
|
elif(ftype == 0):
|
|
@@ -6702,20 +7378,20 @@ else:
|
|
|
6702
7378
|
fsize = format(int(member.file_size), 'x').lower()
|
|
6703
7379
|
try:
|
|
6704
7380
|
if(member.atime):
|
|
6705
|
-
fatime = format(int(member.atime.timestamp()), 'x').lower()
|
|
7381
|
+
fatime = format(int(to_ns(member.atime.timestamp())), 'x').lower()
|
|
6706
7382
|
else:
|
|
6707
|
-
fatime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7383
|
+
fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6708
7384
|
except AttributeError:
|
|
6709
|
-
fatime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6710
|
-
fmtime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7385
|
+
fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
7386
|
+
fmtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6711
7387
|
try:
|
|
6712
7388
|
if(member.ctime):
|
|
6713
|
-
fctime = format(int(member.ctime.timestamp()), 'x').lower()
|
|
7389
|
+
fctime = format(int(to_ns(member.ctime.timestamp())), 'x').lower()
|
|
6714
7390
|
else:
|
|
6715
|
-
fctime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7391
|
+
fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6716
7392
|
except AttributeError:
|
|
6717
|
-
fctime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6718
|
-
fbtime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7393
|
+
fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
7394
|
+
fbtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6719
7395
|
if(is_unix and member.external_attr != 0):
|
|
6720
7396
|
fmode = format(int(member.external_attr), 'x').lower()
|
|
6721
7397
|
fchmode = format(
|
|
@@ -6816,30 +7492,84 @@ else:
|
|
|
6816
7492
|
if(fcompression == "none"):
|
|
6817
7493
|
fcompression = ""
|
|
6818
7494
|
fcontents.seek(0, 0)
|
|
7495
|
+
if(not contentasfile):
|
|
7496
|
+
fcontents = fcontents.read()
|
|
6819
7497
|
ftypehex = format(ftype, 'x').lower()
|
|
6820
|
-
tmpoutlist
|
|
6821
|
-
|
|
6822
|
-
|
|
6823
|
-
|
|
7498
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7499
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7500
|
+
return tmpoutlist
|
|
7501
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7502
|
+
if(not hasattr(fp, "write")):
|
|
7503
|
+
return False
|
|
7504
|
+
GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7505
|
+
numfiles = int(len(GetDirList))
|
|
7506
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7507
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7508
|
+
try:
|
|
7509
|
+
fp.flush()
|
|
7510
|
+
if(hasattr(os, "sync")):
|
|
7511
|
+
os.fsync(fp.fileno())
|
|
7512
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7513
|
+
pass
|
|
7514
|
+
for curfname in GetDirList:
|
|
7515
|
+
tmpoutlist = curfname['fheaders']
|
|
7516
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6824
7517
|
try:
|
|
6825
7518
|
fp.flush()
|
|
6826
7519
|
if(hasattr(os, "sync")):
|
|
6827
7520
|
os.fsync(fp.fileno())
|
|
6828
7521
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6829
7522
|
pass
|
|
6830
|
-
fcontents.close()
|
|
6831
7523
|
return fp
|
|
6832
7524
|
|
|
6833
7525
|
if(not py7zr_support):
|
|
6834
|
-
def
|
|
7526
|
+
def sevenzip_readall(infile, **kwargs):
|
|
6835
7527
|
return False
|
|
6836
7528
|
else:
|
|
6837
|
-
|
|
6838
|
-
|
|
6839
|
-
|
|
6840
|
-
|
|
6841
|
-
|
|
6842
|
-
|
|
7529
|
+
class _MemoryIO(py7zr.Py7zIO):
|
|
7530
|
+
"""In-memory file object used by py7zr's factory API."""
|
|
7531
|
+
def __init__(self):
|
|
7532
|
+
self._buf = bytearray()
|
|
7533
|
+
def write(self, data):
|
|
7534
|
+
# py7zr will call this repeatedly with chunks
|
|
7535
|
+
self._buf.extend(data)
|
|
7536
|
+
def read(self, size=None):
|
|
7537
|
+
if size is None:
|
|
7538
|
+
return bytes(self._buf)
|
|
7539
|
+
return bytes(self._buf[:size])
|
|
7540
|
+
def seek(self, offset, whence=0):
|
|
7541
|
+
# we don't really need seeking for your use case
|
|
7542
|
+
return 0
|
|
7543
|
+
def flush(self):
|
|
7544
|
+
pass
|
|
7545
|
+
def size(self):
|
|
7546
|
+
return len(self._buf)
|
|
7547
|
+
class _MemoryFactory(py7zr.WriterFactory):
|
|
7548
|
+
"""Factory that creates _MemoryIO objects and keeps them by filename."""
|
|
7549
|
+
def __init__(self):
|
|
7550
|
+
self.files = {}
|
|
7551
|
+
def create(self, filename: str) -> py7zr.Py7zIO:
|
|
7552
|
+
io_obj = _MemoryIO()
|
|
7553
|
+
self.files[filename] = io_obj
|
|
7554
|
+
return io_obj
|
|
7555
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7556
|
+
"""
|
|
7557
|
+
Replacement for SevenZipFile.readall() using the new py7zr API.
|
|
7558
|
+
|
|
7559
|
+
Returns: dict[filename -> _MemoryIO]
|
|
7560
|
+
"""
|
|
7561
|
+
factory = _MemoryFactory()
|
|
7562
|
+
with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
|
|
7563
|
+
archive.extractall(factory=factory)
|
|
7564
|
+
return factory.files
|
|
7565
|
+
|
|
7566
|
+
if(not py7zr_support):
|
|
7567
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7568
|
+
return False
|
|
7569
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7570
|
+
return False
|
|
7571
|
+
else:
|
|
7572
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6843
7573
|
formver = formatspecs['format_ver']
|
|
6844
7574
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
6845
7575
|
curinode = 0
|
|
@@ -6851,19 +7581,15 @@ else:
|
|
|
6851
7581
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
6852
7582
|
return False
|
|
6853
7583
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
6854
|
-
|
|
7584
|
+
try:
|
|
7585
|
+
file_content = szpfp.readall()
|
|
7586
|
+
except AttributeError:
|
|
7587
|
+
file_content = sevenzip_readall(infile)
|
|
6855
7588
|
#sztest = szpfp.testzip()
|
|
6856
7589
|
sztestalt = szpfp.test()
|
|
6857
7590
|
if(sztestalt):
|
|
6858
7591
|
VerbosePrintOut("Bad file found!")
|
|
6859
|
-
|
|
6860
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6861
|
-
try:
|
|
6862
|
-
fp.flush()
|
|
6863
|
-
if(hasattr(os, "sync")):
|
|
6864
|
-
os.fsync(fp.fileno())
|
|
6865
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6866
|
-
pass
|
|
7592
|
+
tmpoutlist = []
|
|
6867
7593
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
6868
7594
|
fencoding = "UTF-8"
|
|
6869
7595
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -6880,6 +7606,9 @@ else:
|
|
|
6880
7606
|
fcompression = ""
|
|
6881
7607
|
fcsize = format(int(0), 'x').lower()
|
|
6882
7608
|
flinkcount = 0
|
|
7609
|
+
fblksize = 0
|
|
7610
|
+
fblocks = 0
|
|
7611
|
+
fflags = 0
|
|
6883
7612
|
ftype = 0
|
|
6884
7613
|
if(member.is_directory):
|
|
6885
7614
|
ftype = 5
|
|
@@ -6890,14 +7619,13 @@ else:
|
|
|
6890
7619
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6891
7620
|
curfid = curfid + 1
|
|
6892
7621
|
fdev = format(int(0), 'x').lower()
|
|
6893
|
-
|
|
6894
|
-
fdev_major = format(int(0), 'x').lower()
|
|
7622
|
+
frdev = format(int(0), 'x').lower()
|
|
6895
7623
|
if(ftype == 5):
|
|
6896
7624
|
fsize = format(int("0"), 'x').lower()
|
|
6897
|
-
fatime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6898
|
-
fmtime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6899
|
-
fctime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6900
|
-
fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
7625
|
+
fatime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7626
|
+
fmtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7627
|
+
fctime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7628
|
+
fbtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
6901
7629
|
if(member.is_directory):
|
|
6902
7630
|
fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
|
|
6903
7631
|
fchmode = format(
|
|
@@ -6947,7 +7675,10 @@ else:
|
|
|
6947
7675
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6948
7676
|
fcontents.seek(0, 0)
|
|
6949
7677
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6950
|
-
|
|
7678
|
+
try:
|
|
7679
|
+
file_content[member.filename].close()
|
|
7680
|
+
except AttributeError:
|
|
7681
|
+
pass
|
|
6951
7682
|
if(typechecktest is False and not compresswholefile):
|
|
6952
7683
|
fcontents.seek(0, 2)
|
|
6953
7684
|
ucfsize = fcontents.tell()
|
|
@@ -6989,25 +7720,39 @@ else:
|
|
|
6989
7720
|
if(fcompression == "none"):
|
|
6990
7721
|
fcompression = ""
|
|
6991
7722
|
fcontents.seek(0, 0)
|
|
7723
|
+
if(not contentasfile):
|
|
7724
|
+
fcontents = fcontents.read()
|
|
6992
7725
|
ftypehex = format(ftype, 'x').lower()
|
|
6993
|
-
tmpoutlist
|
|
6994
|
-
|
|
6995
|
-
|
|
6996
|
-
|
|
7726
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7727
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7728
|
+
return tmpoutlist
|
|
7729
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7730
|
+
if(not hasattr(fp, "write")):
|
|
7731
|
+
return False
|
|
7732
|
+
GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7733
|
+
numfiles = int(len(GetDirList))
|
|
7734
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7735
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7736
|
+
try:
|
|
7737
|
+
fp.flush()
|
|
7738
|
+
if(hasattr(os, "sync")):
|
|
7739
|
+
os.fsync(fp.fileno())
|
|
7740
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7741
|
+
pass
|
|
7742
|
+
for curfname in GetDirList:
|
|
7743
|
+
tmpoutlist = curfname['fheaders']
|
|
7744
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6997
7745
|
try:
|
|
6998
7746
|
fp.flush()
|
|
6999
7747
|
if(hasattr(os, "sync")):
|
|
7000
7748
|
os.fsync(fp.fileno())
|
|
7001
7749
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7002
7750
|
pass
|
|
7003
|
-
fcontents.close()
|
|
7004
7751
|
return fp
|
|
7005
7752
|
|
|
7006
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7753
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7007
7754
|
if(not hasattr(fp, "write")):
|
|
7008
7755
|
return False
|
|
7009
|
-
if(verbose):
|
|
7010
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7011
7756
|
GetDirList = inlist
|
|
7012
7757
|
if(not GetDirList):
|
|
7013
7758
|
return False
|
|
@@ -7019,7 +7764,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7019
7764
|
inodetoforminode = {}
|
|
7020
7765
|
numfiles = int(len(GetDirList))
|
|
7021
7766
|
fnumfiles = format(numfiles, 'x').lower()
|
|
7022
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7767
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7023
7768
|
for curfname in GetDirList:
|
|
7024
7769
|
ftype = format(curfname[0], 'x').lower()
|
|
7025
7770
|
fencoding = curfname[1]
|
|
@@ -7033,44 +7778,45 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7033
7778
|
fbasedir = os.path.dirname(fname)
|
|
7034
7779
|
flinkname = curfname[4]
|
|
7035
7780
|
fsize = format(curfname[5], 'x').lower()
|
|
7036
|
-
|
|
7037
|
-
|
|
7038
|
-
|
|
7039
|
-
|
|
7040
|
-
|
|
7041
|
-
|
|
7042
|
-
|
|
7043
|
-
|
|
7044
|
-
|
|
7045
|
-
|
|
7046
|
-
|
|
7047
|
-
|
|
7048
|
-
|
|
7049
|
-
|
|
7050
|
-
|
|
7051
|
-
|
|
7052
|
-
|
|
7053
|
-
|
|
7054
|
-
|
|
7055
|
-
|
|
7056
|
-
|
|
7057
|
-
|
|
7058
|
-
|
|
7781
|
+
fblksize = format(curfname[6], 'x').lower()
|
|
7782
|
+
fblocks = format(curfname[7], 'x').lower()
|
|
7783
|
+
fflags = format(curfname[8], 'x').lower()
|
|
7784
|
+
fatime = format(curfname[9], 'x').lower()
|
|
7785
|
+
fmtime = format(curfname[10], 'x').lower()
|
|
7786
|
+
fctime = format(curfname[11], 'x').lower()
|
|
7787
|
+
fbtime = format(curfname[12], 'x').lower()
|
|
7788
|
+
fmode = format(curfname[13], 'x').lower()
|
|
7789
|
+
fwinattributes = format(curfname[14], 'x').lower()
|
|
7790
|
+
fcompression = curfname[15]
|
|
7791
|
+
fcsize = format(curfname[16], 'x').lower()
|
|
7792
|
+
fuid = format(curfname[17], 'x').lower()
|
|
7793
|
+
funame = curfname[18]
|
|
7794
|
+
fgid = format(curfname[19], 'x').lower()
|
|
7795
|
+
fgname = curfname[20]
|
|
7796
|
+
fid = format(curfname[21], 'x').lower()
|
|
7797
|
+
finode = format(curfname[22], 'x').lower()
|
|
7798
|
+
flinkcount = format(curfname[23], 'x').lower()
|
|
7799
|
+
fdev = format(curfname[24], 'x').lower()
|
|
7800
|
+
frdev = format(curfname[25], 'x').lower()
|
|
7801
|
+
fseeknextfile = curfname[26]
|
|
7802
|
+
extradata = curfname[27]
|
|
7803
|
+
fheaderchecksumtype = curfname[28]
|
|
7804
|
+
fcontentchecksumtype = curfname[29]
|
|
7805
|
+
fcontents = curfname[30]
|
|
7059
7806
|
fencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7060
|
-
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
7061
|
-
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev,
|
|
7807
|
+
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
7808
|
+
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile]
|
|
7062
7809
|
fcontents.seek(0, 0)
|
|
7063
|
-
AppendFileHeaderWithContent(
|
|
7064
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7810
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
7065
7811
|
return fp
|
|
7066
7812
|
|
|
7067
7813
|
|
|
7068
|
-
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7069
|
-
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7070
|
-
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
7814
|
+
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7815
|
+
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
|
|
7816
|
+
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7071
7817
|
|
|
7072
7818
|
|
|
7073
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7819
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7074
7820
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7075
7821
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7076
7822
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7114,8 +7860,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7114
7860
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7115
7861
|
except PermissionError:
|
|
7116
7862
|
return False
|
|
7117
|
-
AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
|
|
7118
|
-
compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
|
|
7863
|
+
AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7119
7864
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7120
7865
|
fp = CompressOpenFileAlt(
|
|
7121
7866
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7144,12 +7889,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7144
7889
|
fp.close()
|
|
7145
7890
|
return True
|
|
7146
7891
|
|
|
7147
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7892
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7148
7893
|
if not isinstance(infiles, list):
|
|
7149
7894
|
infiles = [infiles]
|
|
7150
7895
|
returnout = False
|
|
7151
7896
|
for infileslist in infiles:
|
|
7152
|
-
returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
|
|
7897
|
+
returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7153
7898
|
if(not returnout):
|
|
7154
7899
|
break
|
|
7155
7900
|
else:
|
|
@@ -7159,7 +7904,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
7159
7904
|
return True
|
|
7160
7905
|
return returnout
|
|
7161
7906
|
|
|
7162
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7907
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, saltkey=None, returnfp=False):
|
|
7163
7908
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7164
7909
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7165
7910
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7200,8 +7945,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7200
7945
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7201
7946
|
except PermissionError:
|
|
7202
7947
|
return False
|
|
7203
|
-
AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
|
|
7204
|
-
compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
|
|
7948
|
+
AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7205
7949
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7206
7950
|
fp = CompressOpenFileAlt(
|
|
7207
7951
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7231,7 +7975,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7231
7975
|
fp.close()
|
|
7232
7976
|
return True
|
|
7233
7977
|
|
|
7234
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7978
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7235
7979
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7236
7980
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7237
7981
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7273,8 +8017,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7273
8017
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7274
8018
|
except PermissionError:
|
|
7275
8019
|
return False
|
|
7276
|
-
AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression,
|
|
7277
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8020
|
+
AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7278
8021
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7279
8022
|
fp = CompressOpenFileAlt(
|
|
7280
8023
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7304,12 +8047,12 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7304
8047
|
fp.close()
|
|
7305
8048
|
return True
|
|
7306
8049
|
|
|
7307
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8050
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7308
8051
|
if not isinstance(infiles, list):
|
|
7309
8052
|
infiles = [infiles]
|
|
7310
8053
|
returnout = False
|
|
7311
8054
|
for infileslist in infiles:
|
|
7312
|
-
returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8055
|
+
returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7313
8056
|
if(not returnout):
|
|
7314
8057
|
break
|
|
7315
8058
|
else:
|
|
@@ -7319,7 +8062,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7319
8062
|
return True
|
|
7320
8063
|
return returnout
|
|
7321
8064
|
|
|
7322
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8065
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7323
8066
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7324
8067
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7325
8068
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7361,8 +8104,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7361
8104
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7362
8105
|
except PermissionError:
|
|
7363
8106
|
return False
|
|
7364
|
-
AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression,
|
|
7365
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8107
|
+
AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7366
8108
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7367
8109
|
fp = CompressOpenFileAlt(
|
|
7368
8110
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7392,12 +8134,12 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7392
8134
|
fp.close()
|
|
7393
8135
|
return True
|
|
7394
8136
|
|
|
7395
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8137
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7396
8138
|
if not isinstance(infiles, list):
|
|
7397
8139
|
infiles = [infiles]
|
|
7398
8140
|
returnout = False
|
|
7399
8141
|
for infileslist in infiles:
|
|
7400
|
-
returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8142
|
+
returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7401
8143
|
if(not returnout):
|
|
7402
8144
|
break
|
|
7403
8145
|
else:
|
|
@@ -7408,10 +8150,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7408
8150
|
return returnout
|
|
7409
8151
|
|
|
7410
8152
|
if(not rarfile_support):
|
|
7411
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8153
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7412
8154
|
return False
|
|
7413
8155
|
else:
|
|
7414
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8156
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7415
8157
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7416
8158
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7417
8159
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7453,8 +8195,7 @@ else:
|
|
|
7453
8195
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7454
8196
|
except PermissionError:
|
|
7455
8197
|
return False
|
|
7456
|
-
AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression,
|
|
7457
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8198
|
+
AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7458
8199
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7459
8200
|
fp = CompressOpenFileAlt(
|
|
7460
8201
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7484,12 +8225,12 @@ else:
|
|
|
7484
8225
|
fp.close()
|
|
7485
8226
|
return True
|
|
7486
8227
|
|
|
7487
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8228
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7488
8229
|
if not isinstance(infiles, list):
|
|
7489
8230
|
infiles = [infiles]
|
|
7490
8231
|
returnout = False
|
|
7491
8232
|
for infileslist in infiles:
|
|
7492
|
-
returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8233
|
+
returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7493
8234
|
if(not returnout):
|
|
7494
8235
|
break
|
|
7495
8236
|
else:
|
|
@@ -7500,10 +8241,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7500
8241
|
return returnout
|
|
7501
8242
|
|
|
7502
8243
|
if(not py7zr_support):
|
|
7503
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8244
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7504
8245
|
return False
|
|
7505
8246
|
else:
|
|
7506
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8247
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7507
8248
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7508
8249
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7509
8250
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7545,8 +8286,7 @@ else:
|
|
|
7545
8286
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7546
8287
|
except PermissionError:
|
|
7547
8288
|
return False
|
|
7548
|
-
AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
|
|
7549
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8289
|
+
AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7550
8290
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7551
8291
|
fp = CompressOpenFileAlt(
|
|
7552
8292
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7576,12 +8316,12 @@ else:
|
|
|
7576
8316
|
fp.close()
|
|
7577
8317
|
return True
|
|
7578
8318
|
|
|
7579
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8319
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7580
8320
|
if not isinstance(infiles, list):
|
|
7581
8321
|
infiles = [infiles]
|
|
7582
8322
|
returnout = False
|
|
7583
8323
|
for infileslist in infiles:
|
|
7584
|
-
returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8324
|
+
returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7585
8325
|
if(not returnout):
|
|
7586
8326
|
break
|
|
7587
8327
|
else:
|
|
@@ -7591,9 +8331,9 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
7591
8331
|
return True
|
|
7592
8332
|
return returnout
|
|
7593
8333
|
|
|
7594
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7595
|
-
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7596
|
-
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
8334
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
8335
|
+
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
|
|
8336
|
+
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
7597
8337
|
|
|
7598
8338
|
|
|
7599
8339
|
def PrintPermissionString(fchmode, ftype):
|
|
@@ -8277,10 +9017,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
|
|
|
8277
9017
|
elif(compresscheck == "lz4" and compresscheck in compressionsupport):
|
|
8278
9018
|
fp = lz4.frame.open(infile, "rb")
|
|
8279
9019
|
elif(compresscheck == "zstd" and compresscheck in compressionsupport):
|
|
8280
|
-
if '
|
|
8281
|
-
fp = ZstdFile(infile, mode="rb")
|
|
8282
|
-
elif 'pyzstd' in sys.modules:
|
|
8283
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
|
|
9020
|
+
if 'zstd' in compressionsupport:
|
|
9021
|
+
fp = zstd.ZstdFile(infile, mode="rb")
|
|
8284
9022
|
else:
|
|
8285
9023
|
return Flase
|
|
8286
9024
|
elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
|
|
@@ -8397,10 +9135,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
|
|
|
8397
9135
|
elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
|
|
8398
9136
|
wrapped = lzma.LZMAFile(src)
|
|
8399
9137
|
elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
|
|
8400
|
-
if '
|
|
8401
|
-
wrapped = ZstdFile(
|
|
8402
|
-
elif 'pyzstd' in sys.modules:
|
|
8403
|
-
wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
|
|
9138
|
+
if 'zstd' in compressionsupport:
|
|
9139
|
+
wrapped = zstd.ZstdFile(src, mode="rb")
|
|
8404
9140
|
else:
|
|
8405
9141
|
return False
|
|
8406
9142
|
elif kind == "lz4" and "lz4" in compressionsupport:
|
|
@@ -8468,10 +9204,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
|
|
|
8468
9204
|
elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
|
|
8469
9205
|
fp = bz2.open(infile, mode)
|
|
8470
9206
|
elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
|
|
8471
|
-
if '
|
|
8472
|
-
fp = ZstdFile(infile, mode=mode)
|
|
8473
|
-
elif 'pyzstd' in sys.modules:
|
|
8474
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
|
|
9207
|
+
if 'zstd' in compressionsupport:
|
|
9208
|
+
fp = zstd.ZstdFile(infile, mode=mode)
|
|
8475
9209
|
else:
|
|
8476
9210
|
return False
|
|
8477
9211
|
elif (compresscheck == "lz4" and "lz4" in compressionsupport):
|
|
@@ -9240,10 +9974,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
|
|
|
9240
9974
|
outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
|
|
9241
9975
|
|
|
9242
9976
|
elif (fextname == ".zst" and "zstandard" in compressionsupport):
|
|
9243
|
-
if '
|
|
9244
|
-
outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9245
|
-
elif 'pyzstd' in sys.modules:
|
|
9246
|
-
outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9977
|
+
if 'zstd' in compressionsupport:
|
|
9978
|
+
outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9247
9979
|
else:
|
|
9248
9980
|
return False # fix: 'Flase' -> False
|
|
9249
9981
|
|
|
@@ -9334,56 +10066,54 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9334
10066
|
return False
|
|
9335
10067
|
|
|
9336
10068
|
|
|
9337
|
-
def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9338
|
-
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
10069
|
+
def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10070
|
+
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9339
10071
|
|
|
9340
|
-
def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9341
|
-
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
10072
|
+
def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10073
|
+
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9342
10074
|
|
|
9343
|
-
def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9344
|
-
return PackCatFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
10075
|
+
def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10076
|
+
return PackCatFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, returnfp)
|
|
9345
10077
|
|
|
9346
10078
|
|
|
9347
|
-
def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9348
|
-
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10079
|
+
def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10080
|
+
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9349
10081
|
|
|
9350
10082
|
|
|
9351
|
-
def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9352
|
-
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10083
|
+
def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10084
|
+
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9353
10085
|
|
|
9354
10086
|
|
|
9355
10087
|
if(not rarfile_support):
|
|
9356
|
-
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
10088
|
+
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9357
10089
|
return False
|
|
9358
10090
|
else:
|
|
9359
|
-
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9360
|
-
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10091
|
+
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10092
|
+
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9361
10093
|
|
|
9362
10094
|
|
|
9363
10095
|
if(not py7zr_support):
|
|
9364
|
-
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
10096
|
+
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9365
10097
|
return False
|
|
9366
10098
|
else:
|
|
9367
|
-
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9368
|
-
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10099
|
+
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10100
|
+
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9369
10101
|
|
|
9370
10102
|
|
|
9371
|
-
def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
10103
|
+
def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9372
10104
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9373
10105
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9374
10106
|
formatspecs = formatspecs[checkcompressfile]
|
|
9375
|
-
if(verbose):
|
|
9376
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9377
10107
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
9378
|
-
return PackCatFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10108
|
+
return PackCatFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9379
10109
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
9380
|
-
return PackCatFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10110
|
+
return PackCatFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9381
10111
|
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
9382
|
-
return PackCatFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10112
|
+
return PackCatFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9383
10113
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
9384
|
-
return PackCatFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10114
|
+
return PackCatFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9385
10115
|
elif(IsSingleDict(formatspecs) and checkcompressfile == formatspecs['format_magic']):
|
|
9386
|
-
return RePackCatFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10116
|
+
return RePackCatFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9387
10117
|
else:
|
|
9388
10118
|
return False
|
|
9389
10119
|
return False
|
|
@@ -9452,19 +10182,12 @@ def CatFileArrayValidate(listarrayfiles, verbose=False):
|
|
|
9452
10182
|
ok = False
|
|
9453
10183
|
return ok
|
|
9454
10184
|
|
|
9455
|
-
def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
9456
|
-
formatspecs=__file_format_multi_dict__, # keep default like original
|
|
9457
|
-
seektoend=False, verbose=False, returnfp=False):
|
|
9458
|
-
if(verbose):
|
|
9459
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9460
|
-
|
|
10185
|
+
def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9461
10186
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
9462
10187
|
formatspecs = formatspecs[fmttype]
|
|
9463
10188
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
9464
10189
|
fmttype = "auto"
|
|
9465
|
-
|
|
9466
10190
|
curloc = filestart
|
|
9467
|
-
|
|
9468
10191
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
9469
10192
|
curloc = infile.tell()
|
|
9470
10193
|
fp = infile
|
|
@@ -9480,7 +10203,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9480
10203
|
if(not fp):
|
|
9481
10204
|
return False
|
|
9482
10205
|
fp.seek(filestart, 0)
|
|
9483
|
-
|
|
9484
10206
|
elif(infile == "-"):
|
|
9485
10207
|
fp = MkTempFile()
|
|
9486
10208
|
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
@@ -9492,7 +10214,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9492
10214
|
if(not fp):
|
|
9493
10215
|
return False
|
|
9494
10216
|
fp.seek(filestart, 0)
|
|
9495
|
-
|
|
9496
10217
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
9497
10218
|
fp = MkTempFile()
|
|
9498
10219
|
fp.write(infile)
|
|
@@ -9504,7 +10225,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9504
10225
|
if(not fp):
|
|
9505
10226
|
return False
|
|
9506
10227
|
fp.seek(filestart, 0)
|
|
9507
|
-
|
|
9508
10228
|
elif(re.findall(__download_proto_support__, infile)):
|
|
9509
10229
|
fp = download_file_from_internet_file(infile)
|
|
9510
10230
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
@@ -9515,7 +10235,6 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9515
10235
|
if(not fp):
|
|
9516
10236
|
return False
|
|
9517
10237
|
fp.seek(filestart, 0)
|
|
9518
|
-
|
|
9519
10238
|
else:
|
|
9520
10239
|
infile = RemoveWindowsPath(infile)
|
|
9521
10240
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
@@ -9562,11 +10281,9 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9562
10281
|
fp.seek(0, 2)
|
|
9563
10282
|
except (OSError, ValueError):
|
|
9564
10283
|
SeekToEndOfFile(fp)
|
|
9565
|
-
|
|
9566
10284
|
CatSize = fp.tell()
|
|
9567
10285
|
CatSizeEnd = CatSize
|
|
9568
10286
|
fp.seek(curloc, 0)
|
|
9569
|
-
|
|
9570
10287
|
if(IsNestedDict(formatspecs)):
|
|
9571
10288
|
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
9572
10289
|
if(compresschecking not in formatspecs):
|
|
@@ -9574,54 +10291,36 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9574
10291
|
else:
|
|
9575
10292
|
formatspecs = formatspecs[compresschecking]
|
|
9576
10293
|
fp.seek(filestart, 0)
|
|
9577
|
-
|
|
9578
10294
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
10295
|
+
headeroffset = fp.tell()
|
|
9579
10296
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
9580
10297
|
formdelsize = len(formatspecs['format_delimiter'])
|
|
9581
10298
|
formdel = fp.read(formdelsize).decode("UTF-8")
|
|
9582
|
-
|
|
9583
10299
|
if(formstring != formatspecs['format_magic'] + inheaderver):
|
|
9584
10300
|
return False
|
|
9585
10301
|
if(formdel != formatspecs['format_delimiter']):
|
|
9586
10302
|
return False
|
|
9587
|
-
|
|
9588
|
-
if(formatspecs['new_style']):
|
|
10303
|
+
if(__use_new_style__):
|
|
9589
10304
|
inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9590
10305
|
else:
|
|
9591
10306
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9592
|
-
|
|
9593
|
-
|
|
9594
|
-
|
|
9595
|
-
extrastart = 15
|
|
10307
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
10308
|
+
fnumextrafields = int(inheader[16], 16)
|
|
10309
|
+
extrastart = 17
|
|
9596
10310
|
extraend = extrastart + fnumextrafields
|
|
9597
10311
|
formversion = re.findall("([\\d]+)", formstring)
|
|
9598
10312
|
fheadsize = int(inheader[0], 16)
|
|
9599
10313
|
fnumfields = int(inheader[1], 16)
|
|
9600
|
-
fnumfiles = int(inheader[
|
|
10314
|
+
fnumfiles = int(inheader[8], 16)
|
|
9601
10315
|
fprechecksumtype = inheader[-2]
|
|
9602
10316
|
fprechecksum = inheader[-1]
|
|
9603
|
-
outfseeknextfile = inheader[
|
|
9604
|
-
fjsonsize = int(inheader[
|
|
9605
|
-
fjsonchecksumtype = inheader[
|
|
9606
|
-
fjsonchecksum = inheader[
|
|
10317
|
+
outfseeknextfile = inheader[9]
|
|
10318
|
+
fjsonsize = int(inheader[12], 16)
|
|
10319
|
+
fjsonchecksumtype = inheader[13]
|
|
10320
|
+
fjsonchecksum = inheader[14]
|
|
10321
|
+
headerjsonoffset = fp.tell()
|
|
9607
10322
|
fprejsoncontent = fp.read(fjsonsize)
|
|
9608
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9609
|
-
if(fjsonsize > 0):
|
|
9610
|
-
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9611
|
-
if(verbose):
|
|
9612
|
-
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9613
|
-
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9614
|
-
else:
|
|
9615
|
-
valid_archive = False
|
|
9616
|
-
invalid_archive = True
|
|
9617
|
-
if(verbose):
|
|
9618
|
-
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9619
|
-
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9620
|
-
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9621
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9622
|
-
fname + " at offset " + str(fheaderstart))
|
|
9623
|
-
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9624
|
-
return False
|
|
10323
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
9625
10324
|
# Next seek directive
|
|
9626
10325
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9627
10326
|
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
@@ -9640,14 +10339,11 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9640
10339
|
fp.seek(fseeknextasnum, 0)
|
|
9641
10340
|
else:
|
|
9642
10341
|
return False
|
|
9643
|
-
|
|
9644
10342
|
il = 0
|
|
9645
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
9646
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
9647
|
-
|
|
10343
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
10344
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
9648
10345
|
valid_archive = True
|
|
9649
10346
|
invalid_archive = False
|
|
9650
|
-
|
|
9651
10347
|
if(verbose):
|
|
9652
10348
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
9653
10349
|
try:
|
|
@@ -9659,78 +10355,56 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9659
10355
|
else:
|
|
9660
10356
|
VerbosePrintOut(infile)
|
|
9661
10357
|
VerbosePrintOut("Number of Records " + str(fnumfiles))
|
|
9662
|
-
|
|
9663
10358
|
if(headercheck):
|
|
9664
10359
|
if(verbose):
|
|
9665
|
-
VerbosePrintOut("File Header Checksum Passed at offset " + str(
|
|
10360
|
+
VerbosePrintOut("File Header Checksum Passed at offset " + str(headeroffset))
|
|
9666
10361
|
VerbosePrintOut("'" + fprechecksum + "' == " + "'" + newfcs + "'")
|
|
9667
10362
|
else:
|
|
9668
10363
|
# always flip flags, even when not verbose
|
|
9669
10364
|
valid_archive = False
|
|
9670
10365
|
invalid_archive = True
|
|
9671
10366
|
if(verbose):
|
|
9672
|
-
VerbosePrintOut("File Header Checksum Failed at offset " + str(
|
|
10367
|
+
VerbosePrintOut("File Header Checksum Failed at offset " + str(headeroffset))
|
|
9673
10368
|
VerbosePrintOut("'" + fprechecksum + "' != " + "'" + newfcs + "'")
|
|
9674
|
-
|
|
10369
|
+
if(fjsonsize > 0):
|
|
10370
|
+
if(CheckChecksums(jsonfcs, fjsonchecksum)):
|
|
10371
|
+
if(verbose):
|
|
10372
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(headerjsonoffset))
|
|
10373
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
10374
|
+
else:
|
|
10375
|
+
valid_archive = False
|
|
10376
|
+
invalid_archive = True
|
|
10377
|
+
if(verbose):
|
|
10378
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(headerjsonoffset))
|
|
10379
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9675
10380
|
if(verbose):
|
|
9676
10381
|
VerbosePrintOut("")
|
|
9677
|
-
|
|
9678
10382
|
# Iterate either until EOF (seektoend) or fixed count
|
|
9679
10383
|
while (fp.tell() < CatSizeEnd) if seektoend else (il < fnumfiles):
|
|
9680
10384
|
outfhstart = fp.tell()
|
|
9681
|
-
if(
|
|
10385
|
+
if(__use_new_style__):
|
|
9682
10386
|
inheaderdata = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9683
10387
|
else:
|
|
9684
10388
|
inheaderdata = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9685
10389
|
|
|
9686
10390
|
if(len(inheaderdata) == 0):
|
|
9687
10391
|
break
|
|
9688
|
-
|
|
9689
|
-
outfheadsize = int(inheaderdata[0], 16)
|
|
9690
|
-
outfnumfields = int(inheaderdata[1], 16)
|
|
9691
|
-
outftype = int(inheaderdata[2], 16)
|
|
9692
|
-
# FIX: these must come from inheaderdata, not inheader
|
|
9693
|
-
outfostype = inheaderdata[3]
|
|
9694
|
-
outfencoding = inheaderdata[4]
|
|
9695
|
-
|
|
9696
10392
|
if(re.findall("^[.|/]", inheaderdata[5])):
|
|
9697
10393
|
outfname = inheaderdata[5]
|
|
9698
10394
|
else:
|
|
9699
10395
|
outfname = "./" + inheaderdata[5]
|
|
9700
10396
|
outfbasedir = os.path.dirname(outfname)
|
|
9701
|
-
|
|
9702
|
-
outflinkname = inheaderdata[6]
|
|
9703
10397
|
outfsize = int(inheaderdata[7], 16)
|
|
9704
|
-
|
|
9705
|
-
|
|
9706
|
-
|
|
9707
|
-
|
|
9708
|
-
|
|
9709
|
-
|
|
9710
|
-
|
|
9711
|
-
|
|
9712
|
-
outfcompression = inheaderdata[14]
|
|
9713
|
-
outfcsize = int(inheaderdata[15], 16)
|
|
9714
|
-
outfuid = int(inheaderdata[16], 16)
|
|
9715
|
-
outfuname = inheaderdata[17]
|
|
9716
|
-
outfgid = int(inheaderdata[18], 16)
|
|
9717
|
-
outfgname = inheaderdata[19]
|
|
9718
|
-
fid = int(inheaderdata[20], 16)
|
|
9719
|
-
finode = int(inheaderdata[21], 16)
|
|
9720
|
-
flinkcount = int(inheaderdata[22], 16)
|
|
9721
|
-
outfdev = int(inheaderdata[23], 16)
|
|
9722
|
-
outfdev_minor = int(inheaderdata[24], 16)
|
|
9723
|
-
outfdev_major = int(inheaderdata[25], 16)
|
|
9724
|
-
outfseeknextfile = inheaderdata[26]
|
|
9725
|
-
outfjsontype = inheaderdata[27]
|
|
9726
|
-
outfjsonlen = int(inheaderdata[28], 16)
|
|
9727
|
-
outfjsonsize = int(inheaderdata[29], 16)
|
|
9728
|
-
outfjsonchecksumtype = inheaderdata[30]
|
|
9729
|
-
outfjsonchecksum = inheaderdata[31]
|
|
9730
|
-
|
|
10398
|
+
outfcompression = inheaderdata[17]
|
|
10399
|
+
outfcsize = int(inheaderdata[18], 16)
|
|
10400
|
+
fid = int(inheaderdata[23], 16)
|
|
10401
|
+
finode = int(inheaderdata[24], 16)
|
|
10402
|
+
outfseeknextfile = inheaderdata[28]
|
|
10403
|
+
outfjsonsize = int(inheaderdata[31], 16)
|
|
10404
|
+
outfjsonchecksumtype = inheaderdata[32]
|
|
10405
|
+
outfjsonchecksum = inheaderdata[33]
|
|
9731
10406
|
outfhend = fp.tell() - 1 # (kept for parity; not used)
|
|
9732
10407
|
outfjstart = fp.tell()
|
|
9733
|
-
|
|
9734
10408
|
# Read JSON bytes; compute checksum on bytes for robustness
|
|
9735
10409
|
outfprejsoncontent_bytes = fp.read(outfjsonsize)
|
|
9736
10410
|
# Decode for any downstream text needs (not used further here)
|
|
@@ -9738,27 +10412,21 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9738
10412
|
outfprejsoncontent = outfprejsoncontent_bytes.decode("UTF-8")
|
|
9739
10413
|
except Exception:
|
|
9740
10414
|
outfprejsoncontent = None
|
|
9741
|
-
|
|
9742
10415
|
outfjend = fp.tell()
|
|
9743
10416
|
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
9744
|
-
|
|
9745
|
-
|
|
9746
|
-
|
|
9747
|
-
outfextrasize = int(inheaderdata[32], 16)
|
|
9748
|
-
outfextrafields = int(inheaderdata[33], 16)
|
|
10417
|
+
injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs, saltkey)
|
|
10418
|
+
outfextrafields = int(inheaderdata[35], 16)
|
|
9749
10419
|
extrafieldslist = []
|
|
9750
|
-
extrastart =
|
|
10420
|
+
extrastart = 36
|
|
9751
10421
|
extraend = extrastart + outfextrafields
|
|
9752
|
-
|
|
9753
10422
|
outfcs = inheaderdata[-2].lower()
|
|
9754
10423
|
outfccs = inheaderdata[-1].lower()
|
|
9755
|
-
infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
|
|
9756
|
-
|
|
10424
|
+
infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
9757
10425
|
if(verbose):
|
|
9758
10426
|
VerbosePrintOut(outfname)
|
|
9759
10427
|
VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
|
|
9760
10428
|
|
|
9761
|
-
if(
|
|
10429
|
+
if(CheckChecksums(outfcs, infcs)):
|
|
9762
10430
|
if(verbose):
|
|
9763
10431
|
VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
|
|
9764
10432
|
VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
|
|
@@ -9768,9 +10436,8 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9768
10436
|
if(verbose):
|
|
9769
10437
|
VerbosePrintOut("File Header Checksum Failed at offset " + str(outfhstart))
|
|
9770
10438
|
VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
|
|
9771
|
-
|
|
9772
10439
|
if(outfjsonsize > 0):
|
|
9773
|
-
if(
|
|
10440
|
+
if(CheckChecksums(injsonfcs, outfjsonchecksum)):
|
|
9774
10441
|
if(verbose):
|
|
9775
10442
|
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9776
10443
|
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
@@ -9780,21 +10447,19 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9780
10447
|
if(verbose):
|
|
9781
10448
|
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9782
10449
|
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9783
|
-
|
|
9784
10450
|
outfcontentstart = fp.tell()
|
|
9785
10451
|
outfcontents = b"" # FIX: bytes for Py2/3 consistency
|
|
9786
10452
|
pyhascontents = False
|
|
9787
|
-
|
|
9788
10453
|
if(outfsize > 0):
|
|
9789
10454
|
if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
|
|
9790
10455
|
outfcontents = fp.read(outfsize)
|
|
9791
10456
|
else:
|
|
9792
10457
|
outfcontents = fp.read(outfcsize)
|
|
9793
10458
|
|
|
9794
|
-
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
|
|
10459
|
+
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
9795
10460
|
pyhascontents = True
|
|
9796
10461
|
|
|
9797
|
-
if(
|
|
10462
|
+
if(CheckChecksums(outfccs, infccs)):
|
|
9798
10463
|
if(verbose):
|
|
9799
10464
|
VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
|
|
9800
10465
|
VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
|
|
@@ -9804,10 +10469,8 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9804
10469
|
if(verbose):
|
|
9805
10470
|
VerbosePrintOut("File Content Checksum Failed at offset " + str(outfcontentstart))
|
|
9806
10471
|
VerbosePrintOut("'" + outfccs + "' != " + "'" + infccs + "'")
|
|
9807
|
-
|
|
9808
10472
|
if(verbose):
|
|
9809
10473
|
VerbosePrintOut("")
|
|
9810
|
-
|
|
9811
10474
|
# Next seek directive
|
|
9812
10475
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9813
10476
|
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
@@ -9826,9 +10489,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9826
10489
|
fp.seek(fseeknextasnum, 0)
|
|
9827
10490
|
else:
|
|
9828
10491
|
return False
|
|
9829
|
-
|
|
9830
10492
|
il = il + 1
|
|
9831
|
-
|
|
9832
10493
|
if(valid_archive):
|
|
9833
10494
|
if(returnfp):
|
|
9834
10495
|
return fp
|
|
@@ -9840,34 +10501,34 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9840
10501
|
return False
|
|
9841
10502
|
|
|
9842
10503
|
|
|
9843
|
-
def CatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9844
|
-
return CatFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10504
|
+
def CatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10505
|
+
return CatFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9845
10506
|
|
|
9846
10507
|
|
|
9847
|
-
def CatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10508
|
+
def CatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9848
10509
|
if(isinstance(infile, (list, tuple, ))):
|
|
9849
10510
|
pass
|
|
9850
10511
|
else:
|
|
9851
10512
|
infile = [infile]
|
|
9852
10513
|
outretval = True
|
|
9853
10514
|
for curfname in infile:
|
|
9854
|
-
curretfile = CatFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10515
|
+
curretfile = CatFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9855
10516
|
if(not curretfile):
|
|
9856
10517
|
outretval = False
|
|
9857
10518
|
return outretval
|
|
9858
10519
|
|
|
9859
|
-
def CatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9860
|
-
return CatFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10520
|
+
def CatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10521
|
+
return CatFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9861
10522
|
|
|
9862
10523
|
|
|
9863
|
-
def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10524
|
+
def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9864
10525
|
outretval = []
|
|
9865
10526
|
outstartfile = filestart
|
|
9866
10527
|
outfsize = float('inf')
|
|
9867
10528
|
while True:
|
|
9868
10529
|
if outstartfile >= outfsize: # stop when function signals False
|
|
9869
10530
|
break
|
|
9870
|
-
is_valid_file = CatFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
|
|
10531
|
+
is_valid_file = CatFileValidate(infile, fmttype, outstartfile, formatspecs, saltkey, seektoend, verbose, True)
|
|
9871
10532
|
if is_valid_file is False: # stop when function signals False
|
|
9872
10533
|
outretval.append(is_valid_file)
|
|
9873
10534
|
break
|
|
@@ -9884,33 +10545,36 @@ def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
|
|
|
9884
10545
|
if(returnfp):
|
|
9885
10546
|
return infile
|
|
9886
10547
|
else:
|
|
9887
|
-
|
|
10548
|
+
try:
|
|
10549
|
+
infile.close()
|
|
10550
|
+
except AttributeError:
|
|
10551
|
+
return False
|
|
9888
10552
|
return outretval
|
|
9889
10553
|
|
|
9890
10554
|
|
|
9891
10555
|
|
|
9892
|
-
def StackedCatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9893
|
-
return StackedCatFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10556
|
+
def StackedCatFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10557
|
+
return StackedCatFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9894
10558
|
|
|
9895
10559
|
|
|
9896
|
-
def StackedCatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10560
|
+
def StackedCatFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9897
10561
|
if(isinstance(infile, (list, tuple, ))):
|
|
9898
10562
|
pass
|
|
9899
10563
|
else:
|
|
9900
10564
|
infile = [infile]
|
|
9901
10565
|
outretval = True
|
|
9902
10566
|
for curfname in infile:
|
|
9903
|
-
curretfile = StackedCatFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10567
|
+
curretfile = StackedCatFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9904
10568
|
if(not curretfile):
|
|
9905
10569
|
outretval = False
|
|
9906
10570
|
return outretval
|
|
9907
10571
|
|
|
9908
|
-
def StackedCatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9909
|
-
return StackedCatFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10572
|
+
def StackedCatFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10573
|
+
return StackedCatFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9910
10574
|
|
|
9911
10575
|
|
|
9912
|
-
def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
9913
|
-
outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
10576
|
+
def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10577
|
+
outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
9914
10578
|
if not returnfp:
|
|
9915
10579
|
for item in outfp:
|
|
9916
10580
|
fp = item.get('fp')
|
|
@@ -9924,26 +10588,26 @@ def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
|
|
|
9924
10588
|
return outfp
|
|
9925
10589
|
|
|
9926
10590
|
|
|
9927
|
-
def MultipleCatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10591
|
+
def MultipleCatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9928
10592
|
if(isinstance(infile, (list, tuple, ))):
|
|
9929
10593
|
pass
|
|
9930
10594
|
else:
|
|
9931
10595
|
infile = [infile]
|
|
9932
10596
|
outretval = []
|
|
9933
10597
|
for curfname in infile:
|
|
9934
|
-
outretval.append(CatFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp))
|
|
10598
|
+
outretval.append(CatFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp))
|
|
9935
10599
|
return outretval
|
|
9936
10600
|
|
|
9937
|
-
def MultipleCatFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
9938
|
-
return MultipleCatFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10601
|
+
def MultipleCatFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10602
|
+
return MultipleCatFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
9939
10603
|
|
|
9940
10604
|
|
|
9941
|
-
def CatFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10605
|
+
def CatFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9942
10606
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9943
10607
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9944
10608
|
formatspecs = formatspecs[checkcompressfile]
|
|
9945
10609
|
fp = MkTempFile(instr)
|
|
9946
|
-
listarrayfiles = CatFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10610
|
+
listarrayfiles = CatFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
9947
10611
|
return listarrayfiles
|
|
9948
10612
|
|
|
9949
10613
|
|
|
@@ -9952,9 +10616,8 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
9952
10616
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9953
10617
|
formatspecs = formatspecs[checkcompressfile]
|
|
9954
10618
|
fp = MkTempFile()
|
|
9955
|
-
fp = PackCatFileFromTarFile(
|
|
9956
|
-
|
|
9957
|
-
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10619
|
+
fp = PackCatFileFromTarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10620
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9958
10621
|
return listarrayfiles
|
|
9959
10622
|
|
|
9960
10623
|
|
|
@@ -9963,9 +10626,8 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
9963
10626
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9964
10627
|
formatspecs = formatspecs[checkcompressfile]
|
|
9965
10628
|
fp = MkTempFile()
|
|
9966
|
-
fp = PackCatFileFromZipFile(
|
|
9967
|
-
|
|
9968
|
-
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10629
|
+
fp = PackCatFileFromZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10630
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9969
10631
|
return listarrayfiles
|
|
9970
10632
|
|
|
9971
10633
|
|
|
@@ -9979,9 +10641,8 @@ if(rarfile_support):
|
|
|
9979
10641
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9980
10642
|
formatspecs = formatspecs[checkcompressfile]
|
|
9981
10643
|
fp = MkTempFile()
|
|
9982
|
-
fp = PackCatFileFromRarFile(
|
|
9983
|
-
|
|
9984
|
-
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10644
|
+
fp = PackCatFileFromRarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10645
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9985
10646
|
return listarrayfiles
|
|
9986
10647
|
|
|
9987
10648
|
if(not py7zr_support):
|
|
@@ -9994,13 +10655,12 @@ if(py7zr_support):
|
|
|
9994
10655
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9995
10656
|
formatspecs = formatspecs[checkcompressfile]
|
|
9996
10657
|
fp = MkTempFile()
|
|
9997
|
-
fp = PackCatFileFromSevenZipFile(
|
|
9998
|
-
|
|
9999
|
-
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10658
|
+
fp = PackCatFileFromSevenZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10659
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
10000
10660
|
return listarrayfiles
|
|
10001
10661
|
|
|
10002
10662
|
|
|
10003
|
-
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10663
|
+
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10004
10664
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
10005
10665
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
10006
10666
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -10013,17 +10673,16 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
|
|
|
10013
10673
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
10014
10674
|
return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10015
10675
|
elif(checkcompressfile == formatspecs['format_magic']):
|
|
10016
|
-
return CatFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10676
|
+
return CatFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10017
10677
|
else:
|
|
10018
10678
|
return False
|
|
10019
10679
|
return False
|
|
10020
10680
|
|
|
10021
10681
|
|
|
10022
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10682
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, saltkey=None, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10023
10683
|
outarray = MkTempFile()
|
|
10024
|
-
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10025
|
-
|
|
10026
|
-
listarrayfiles = CatFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10684
|
+
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
|
|
10685
|
+
listarrayfiles = CatFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10027
10686
|
return listarrayfiles
|
|
10028
10687
|
|
|
10029
10688
|
|
|
@@ -10145,12 +10804,12 @@ def CatFileArrayToArrayIndex(inarray, returnfp=False):
|
|
|
10145
10804
|
return out
|
|
10146
10805
|
|
|
10147
10806
|
|
|
10148
|
-
def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=
|
|
10807
|
+
def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10149
10808
|
# ---------- Safe defaults ----------
|
|
10150
10809
|
if compressionuselist is None:
|
|
10151
10810
|
compressionuselist = compressionlistalt
|
|
10152
10811
|
if checksumtype is None:
|
|
10153
|
-
checksumtype = ["md5", "md5", "md5", "md5"]
|
|
10812
|
+
checksumtype = ["md5", "md5", "md5", "md5", "md5"]
|
|
10154
10813
|
if extradata is None:
|
|
10155
10814
|
extradata = []
|
|
10156
10815
|
if jsondata is None:
|
|
@@ -10169,7 +10828,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10169
10828
|
infile = RemoveWindowsPath(infile)
|
|
10170
10829
|
listarrayfileslist = CatFileToArray(
|
|
10171
10830
|
infile, "auto", filestart, seekstart, seekend,
|
|
10172
|
-
False, True, True, skipchecksum, formatspecs, seektoend, False
|
|
10831
|
+
False, True, True, skipchecksum, formatspecs, insaltkey, seektoend, False
|
|
10173
10832
|
)
|
|
10174
10833
|
|
|
10175
10834
|
# ---------- Format specs selection ----------
|
|
@@ -10236,9 +10895,6 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10236
10895
|
if (compression is None) or (compressionuselist and compression not in compressionuselist):
|
|
10237
10896
|
compression = "auto"
|
|
10238
10897
|
|
|
10239
|
-
if verbose:
|
|
10240
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10241
|
-
|
|
10242
10898
|
# No files?
|
|
10243
10899
|
if not listarrayfiles.get('ffilelist'):
|
|
10244
10900
|
return False
|
|
@@ -10251,7 +10907,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10251
10907
|
if lenlist != fnumfiles:
|
|
10252
10908
|
fnumfiles = lenlist
|
|
10253
10909
|
|
|
10254
|
-
AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), [], checksumtype[0], formatspecs)
|
|
10910
|
+
AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), listarrayfiles['fextradata'], listarrayfiles['fjsondata'], [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
|
|
10255
10911
|
|
|
10256
10912
|
# loop counters
|
|
10257
10913
|
lcfi = 0
|
|
@@ -10281,6 +10937,9 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10281
10937
|
# fields (hex-encoded where expected)
|
|
10282
10938
|
fheadersize = format(int(cur_entry['fheadersize']), 'x').lower()
|
|
10283
10939
|
fsize = format(int(cur_entry['fsize']), 'x').lower()
|
|
10940
|
+
fblksize = format(int(cur_entry['fblksize']), 'x').lower()
|
|
10941
|
+
fblocks = format(int(cur_entry['fblocks']), 'x').lower()
|
|
10942
|
+
fflags = format(int(cur_entry['fflags']), 'x').lower()
|
|
10284
10943
|
flinkname = cur_entry['flinkname']
|
|
10285
10944
|
fatime = format(int(cur_entry['fatime']), 'x').lower()
|
|
10286
10945
|
fmtime = format(int(cur_entry['fmtime']), 'x').lower()
|
|
@@ -10299,8 +10958,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10299
10958
|
fcompression = cur_entry['fcompression']
|
|
10300
10959
|
fcsize = format(int(cur_entry['fcsize']), 'x').lower()
|
|
10301
10960
|
fdev = format(int(cur_entry['fdev']), 'x').lower()
|
|
10302
|
-
|
|
10303
|
-
fdev_major = format(int(cur_entry['fmajor']), 'x').lower()
|
|
10961
|
+
frdev = format(int(cur_entry['frdev']), 'x').lower()
|
|
10304
10962
|
fseeknextfile = cur_entry['fseeknextfile']
|
|
10305
10963
|
|
|
10306
10964
|
# extra fields sizing
|
|
@@ -10311,6 +10969,12 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10311
10969
|
# extradata/jsondata defaults per file
|
|
10312
10970
|
if not followlink and len(extradata) <= 0:
|
|
10313
10971
|
extradata = cur_entry['fextradata']
|
|
10972
|
+
|
|
10973
|
+
fvendorfields = cur_entry['fvendorfields']
|
|
10974
|
+
ffvendorfieldslist = []
|
|
10975
|
+
if(fvendorfields>0):
|
|
10976
|
+
ffvendorfieldslist = cur_entry['fvendorfieldslist']
|
|
10977
|
+
|
|
10314
10978
|
if not followlink and len(jsondata) <= 0:
|
|
10315
10979
|
jsondata = cur_entry['fjsondata']
|
|
10316
10980
|
|
|
@@ -10346,7 +11010,11 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10346
11010
|
fcontents.seek(0, 0)
|
|
10347
11011
|
cfcontents.seek(0, 0)
|
|
10348
11012
|
cfcontents = CompressOpenFileAlt(
|
|
10349
|
-
cfcontents,
|
|
11013
|
+
cfcontents,
|
|
11014
|
+
compressionuselist[ilmin],
|
|
11015
|
+
compressionlevel,
|
|
11016
|
+
compressionuselist,
|
|
11017
|
+
formatspecs
|
|
10350
11018
|
)
|
|
10351
11019
|
if cfcontents:
|
|
10352
11020
|
cfcontents.seek(0, 2)
|
|
@@ -10354,7 +11022,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10354
11022
|
cfcontents.close()
|
|
10355
11023
|
else:
|
|
10356
11024
|
ilcsize.append(float("inf"))
|
|
10357
|
-
ilmin
|
|
11025
|
+
ilmin = ilmin + 1
|
|
10358
11026
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
10359
11027
|
curcompression = compressionuselist[ilcmin]
|
|
10360
11028
|
|
|
@@ -10363,16 +11031,24 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10363
11031
|
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10364
11032
|
cfcontents.seek(0, 0)
|
|
10365
11033
|
cfcontents = CompressOpenFileAlt(
|
|
10366
|
-
cfcontents,
|
|
11034
|
+
cfcontents,
|
|
11035
|
+
curcompression,
|
|
11036
|
+
compressionlevel,
|
|
11037
|
+
compressionuselist,
|
|
11038
|
+
formatspecs
|
|
10367
11039
|
)
|
|
10368
11040
|
cfcontents.seek(0, 2)
|
|
10369
|
-
|
|
10370
|
-
if ucfsize >
|
|
10371
|
-
fcsize = format(int(
|
|
11041
|
+
cfsize = cfcontents.tell()
|
|
11042
|
+
if ucfsize > cfsize:
|
|
11043
|
+
fcsize = format(int(cfsize), 'x').lower()
|
|
10372
11044
|
fcompression = curcompression
|
|
10373
11045
|
fcontents.close()
|
|
10374
11046
|
fcontents = cfcontents
|
|
10375
11047
|
|
|
11048
|
+
if fcompression == "none":
|
|
11049
|
+
fcompression = ""
|
|
11050
|
+
fcontents.seek(0, 0)
|
|
11051
|
+
|
|
10376
11052
|
# link following (fixed: use listarrayfiles, not prelistarrayfiles)
|
|
10377
11053
|
if followlink:
|
|
10378
11054
|
if (cur_entry['ftype'] == 1 or cur_entry['ftype'] == 2):
|
|
@@ -10381,6 +11057,9 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10381
11057
|
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
10382
11058
|
fheadersize = format(int(flinkinfo['fheadersize']), 'x').lower()
|
|
10383
11059
|
fsize = format(int(flinkinfo['fsize']), 'x').lower()
|
|
11060
|
+
fblksize = format(int(flinkinfo['fblksize']), 'x').lower()
|
|
11061
|
+
fblocks = format(int(flinkinfo['fblocks']), 'x').lower()
|
|
11062
|
+
fflags = format(int(flinkinfo['fflags']), 'x').lower()
|
|
10384
11063
|
flinkname = flinkinfo['flinkname']
|
|
10385
11064
|
fatime = format(int(flinkinfo['fatime']), 'x').lower()
|
|
10386
11065
|
fmtime = format(int(flinkinfo['fmtime']), 'x').lower()
|
|
@@ -10399,14 +11078,19 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10399
11078
|
fcompression = flinkinfo['fcompression']
|
|
10400
11079
|
fcsize = format(int(flinkinfo['fcsize']), 'x').lower()
|
|
10401
11080
|
fdev = format(int(flinkinfo['fdev']), 'x').lower()
|
|
10402
|
-
|
|
10403
|
-
fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
|
|
11081
|
+
frdev = format(int(flinkinfo['frdev']), 'x').lower()
|
|
10404
11082
|
fseeknextfile = flinkinfo['fseeknextfile']
|
|
10405
11083
|
if (len(flinkinfo['fextradata']) > flinkinfo['fextrafields']
|
|
10406
11084
|
and len(flinkinfo['fextradata']) > 0):
|
|
10407
11085
|
flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
|
|
10408
11086
|
if len(extradata) < 0:
|
|
10409
11087
|
extradata = flinkinfo['fextradata']
|
|
11088
|
+
|
|
11089
|
+
fvendorfields = flinkinfo['fvendorfields']
|
|
11090
|
+
ffvendorfieldslist = []
|
|
11091
|
+
if(fvendorfields>0):
|
|
11092
|
+
ffvendorfieldslist = flinkinfo['fvendorfieldslist']
|
|
11093
|
+
|
|
10410
11094
|
if len(jsondata) < 0:
|
|
10411
11095
|
jsondata = flinkinfo['fjsondata']
|
|
10412
11096
|
fcontents = flinkinfo['fcontents']
|
|
@@ -10435,15 +11119,15 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10435
11119
|
fcompression = ""
|
|
10436
11120
|
|
|
10437
11121
|
tmpoutlist = [
|
|
10438
|
-
ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime,
|
|
11122
|
+
ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime,
|
|
10439
11123
|
fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame,
|
|
10440
|
-
fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
11124
|
+
fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, fseeknextfile
|
|
10441
11125
|
]
|
|
10442
11126
|
|
|
10443
|
-
|
|
10444
|
-
|
|
10445
|
-
|
|
10446
|
-
)
|
|
11127
|
+
if(fvendorfields>0 and len(ffvendorfieldslist)>0):
|
|
11128
|
+
extradata.extend(fvendorfields)
|
|
11129
|
+
|
|
11130
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(),[checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, outsaltkey)
|
|
10447
11131
|
try:
|
|
10448
11132
|
fcontents.close()
|
|
10449
11133
|
except Exception:
|
|
@@ -10488,12 +11172,12 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10488
11172
|
pass
|
|
10489
11173
|
return True
|
|
10490
11174
|
|
|
10491
|
-
def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=
|
|
11175
|
+
def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10492
11176
|
if not isinstance(infiles, list):
|
|
10493
11177
|
infiles = [infiles]
|
|
10494
11178
|
returnout = False
|
|
10495
11179
|
for infileslist in infiles:
|
|
10496
|
-
returnout = RePackCatFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, True)
|
|
11180
|
+
returnout = RePackCatFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, True)
|
|
10497
11181
|
if(not returnout):
|
|
10498
11182
|
break
|
|
10499
11183
|
else:
|
|
@@ -10503,33 +11187,28 @@ def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto",
|
|
|
10503
11187
|
return True
|
|
10504
11188
|
return returnout
|
|
10505
11189
|
|
|
10506
|
-
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=
|
|
11190
|
+
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10507
11191
|
fp = MkTempFile(instr)
|
|
10508
|
-
listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10509
|
-
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
11192
|
+
listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, returnfp)
|
|
10510
11193
|
return listarrayfiles
|
|
10511
11194
|
|
|
10512
11195
|
|
|
10513
|
-
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11196
|
+
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10514
11197
|
outarray = MkTempFile()
|
|
10515
|
-
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10516
|
-
|
|
10517
|
-
listarrayfiles = RePackCatFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10518
|
-
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
11198
|
+
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
|
|
11199
|
+
listarrayfiles = RePackCatFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
10519
11200
|
return listarrayfiles
|
|
10520
11201
|
|
|
10521
11202
|
|
|
10522
|
-
def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
11203
|
+
def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
10523
11204
|
if(outdir is not None):
|
|
10524
11205
|
outdir = RemoveWindowsPath(outdir)
|
|
10525
|
-
if(verbose):
|
|
10526
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10527
11206
|
if(isinstance(infile, dict)):
|
|
10528
11207
|
listarrayfiles = infile
|
|
10529
11208
|
else:
|
|
10530
11209
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
10531
11210
|
infile = RemoveWindowsPath(infile)
|
|
10532
|
-
listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11211
|
+
listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10533
11212
|
if(not listarrayfiles):
|
|
10534
11213
|
return False
|
|
10535
11214
|
lenlist = len(listarrayfiles['ffilelist'])
|
|
@@ -10765,9 +11444,9 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
10765
11444
|
return True
|
|
10766
11445
|
|
|
10767
11446
|
|
|
10768
|
-
def UnPackCatFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11447
|
+
def UnPackCatFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10769
11448
|
fp = MkTempFile(instr)
|
|
10770
|
-
listarrayfiles = UnPackCatFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
11449
|
+
listarrayfiles = UnPackCatFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
10771
11450
|
return listarrayfiles
|
|
10772
11451
|
|
|
10773
11452
|
def ftype_to_str(ftype):
|
|
@@ -10785,9 +11464,7 @@ def ftype_to_str(ftype):
|
|
|
10785
11464
|
# Default to "file" if unknown
|
|
10786
11465
|
return mapping.get(ftype, "file")
|
|
10787
11466
|
|
|
10788
|
-
def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10789
|
-
if(verbose):
|
|
10790
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11467
|
+
def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10791
11468
|
if(isinstance(infile, dict)):
|
|
10792
11469
|
listarrayfileslist = [infile]
|
|
10793
11470
|
if(isinstance(infile, list)):
|
|
@@ -10795,7 +11472,7 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
|
|
|
10795
11472
|
else:
|
|
10796
11473
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
10797
11474
|
infile = RemoveWindowsPath(infile)
|
|
10798
|
-
listarrayfileslist = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11475
|
+
listarrayfileslist = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10799
11476
|
if(not listarrayfileslist):
|
|
10800
11477
|
return False
|
|
10801
11478
|
for listarrayfiles in listarrayfileslist:
|
|
@@ -10832,8 +11509,11 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
|
|
|
10832
11509
|
VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
|
|
10833
11510
|
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
|
|
10834
11511
|
else:
|
|
11512
|
+
ts_ns = listarrayfiles['ffilelist'][lcfi]['fmtime']
|
|
11513
|
+
sec, ns = divmod(int(ts_ns), 10**9)
|
|
11514
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
10835
11515
|
VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
|
|
10836
|
-
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " +
|
|
11516
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + dt.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
10837
11517
|
lcfi = lcfi + 1
|
|
10838
11518
|
if(returnfp):
|
|
10839
11519
|
return listarrayfiles['fp']
|
|
@@ -10841,25 +11521,25 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
|
|
|
10841
11521
|
return True
|
|
10842
11522
|
|
|
10843
11523
|
|
|
10844
|
-
def MultipleCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
11524
|
+
def MultipleCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10845
11525
|
if(isinstance(infile, (list, tuple, ))):
|
|
10846
11526
|
pass
|
|
10847
11527
|
else:
|
|
10848
11528
|
infile = [infile]
|
|
10849
11529
|
outretval = {}
|
|
10850
11530
|
for curfname in infile:
|
|
10851
|
-
outretval[curfname] = CatFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
11531
|
+
outretval[curfname] = CatFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
|
|
10852
11532
|
return outretval
|
|
10853
11533
|
|
|
10854
11534
|
|
|
10855
|
-
def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11535
|
+
def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10856
11536
|
outretval = []
|
|
10857
11537
|
outstartfile = filestart
|
|
10858
11538
|
outfsize = float('inf')
|
|
10859
11539
|
while True:
|
|
10860
11540
|
if outstartfile >= outfsize: # stop when function signals False
|
|
10861
11541
|
break
|
|
10862
|
-
list_file_retu = CatFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
|
|
11542
|
+
list_file_retu = CatFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, True)
|
|
10863
11543
|
if list_file_retu is False: # stop when function signals False
|
|
10864
11544
|
outretval.append(list_file_retu)
|
|
10865
11545
|
else:
|
|
@@ -10875,30 +11555,31 @@ def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
|
|
|
10875
11555
|
if(returnfp):
|
|
10876
11556
|
return infile
|
|
10877
11557
|
else:
|
|
10878
|
-
|
|
11558
|
+
try:
|
|
11559
|
+
infile.close()
|
|
11560
|
+
except AttributeError:
|
|
11561
|
+
return False
|
|
10879
11562
|
return outretval
|
|
10880
11563
|
|
|
10881
11564
|
|
|
10882
|
-
def MultipleStackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
11565
|
+
def MultipleStackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10883
11566
|
if(isinstance(infile, (list, tuple, ))):
|
|
10884
11567
|
pass
|
|
10885
11568
|
else:
|
|
10886
11569
|
infile = [infile]
|
|
10887
11570
|
outretval = {}
|
|
10888
11571
|
for curfname in infile:
|
|
10889
|
-
outretval[curfname] = StackedCatFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11572
|
+
outretval[curfname] = StackedCatFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10890
11573
|
return outretval
|
|
10891
11574
|
|
|
10892
11575
|
|
|
10893
|
-
def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11576
|
+
def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10894
11577
|
fp = MkTempFile(instr)
|
|
10895
|
-
listarrayfiles = CatFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
11578
|
+
listarrayfiles = CatFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
|
|
10896
11579
|
return listarrayfiles
|
|
10897
11580
|
|
|
10898
11581
|
|
|
10899
11582
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
10900
|
-
if(verbose):
|
|
10901
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10902
11583
|
if(infile == "-"):
|
|
10903
11584
|
infile = MkTempFile()
|
|
10904
11585
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
@@ -10934,10 +11615,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
10934
11615
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
10935
11616
|
formatspecs = formatspecs[compresscheck]
|
|
10936
11617
|
if(compresscheck=="zstd"):
|
|
10937
|
-
if '
|
|
10938
|
-
infile = ZstdFile(
|
|
10939
|
-
elif 'pyzstd' in sys.modules:
|
|
10940
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11618
|
+
if 'zstd' in compressionsupport:
|
|
11619
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
10941
11620
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
10942
11621
|
else:
|
|
10943
11622
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -10946,10 +11625,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
10946
11625
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
10947
11626
|
formatspecs = formatspecs[compresscheck]
|
|
10948
11627
|
if(compresscheck=="zstd"):
|
|
10949
|
-
if '
|
|
10950
|
-
infile = ZstdFile(
|
|
10951
|
-
elif 'pyzstd' in sys.modules:
|
|
10952
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11628
|
+
if 'zstd' in compressionsupport:
|
|
11629
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
10953
11630
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
10954
11631
|
else:
|
|
10955
11632
|
tarfp = tarfile.open(infile, "r")
|
|
@@ -11019,8 +11696,6 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11019
11696
|
|
|
11020
11697
|
|
|
11021
11698
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11022
|
-
if(verbose):
|
|
11023
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11024
11699
|
if(infile == "-"):
|
|
11025
11700
|
infile = MkTempFile()
|
|
11026
11701
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
@@ -11146,8 +11821,6 @@ if(not rarfile_support):
|
|
|
11146
11821
|
|
|
11147
11822
|
if(rarfile_support):
|
|
11148
11823
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11149
|
-
if(verbose):
|
|
11150
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11151
11824
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11152
11825
|
return False
|
|
11153
11826
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -11275,14 +11948,15 @@ if(not py7zr_support):
|
|
|
11275
11948
|
|
|
11276
11949
|
if(py7zr_support):
|
|
11277
11950
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11278
|
-
if(verbose):
|
|
11279
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11280
11951
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11281
11952
|
return False
|
|
11282
11953
|
lcfi = 0
|
|
11283
11954
|
returnval = {}
|
|
11284
11955
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
11285
|
-
|
|
11956
|
+
try:
|
|
11957
|
+
file_content = szpfp.readall()
|
|
11958
|
+
except AttributeError:
|
|
11959
|
+
file_content = sevenzip_readall(infile)
|
|
11286
11960
|
#sztest = szpfp.testzip()
|
|
11287
11961
|
sztestalt = szpfp.test()
|
|
11288
11962
|
if(sztestalt):
|
|
@@ -11326,7 +12000,10 @@ if(py7zr_support):
|
|
|
11326
12000
|
printfname = member.filename
|
|
11327
12001
|
if(ftype == 0):
|
|
11328
12002
|
fsize = len(file_content[member.filename].read())
|
|
11329
|
-
|
|
12003
|
+
try:
|
|
12004
|
+
file_content[member.filename].close()
|
|
12005
|
+
except AttributeError:
|
|
12006
|
+
pass
|
|
11330
12007
|
try:
|
|
11331
12008
|
fuid = int(os.getuid())
|
|
11332
12009
|
except (KeyError, AttributeError):
|
|
@@ -11370,8 +12047,6 @@ if(py7zr_support):
|
|
|
11370
12047
|
|
|
11371
12048
|
|
|
11372
12049
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11373
|
-
if(verbose):
|
|
11374
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11375
12050
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11376
12051
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11377
12052
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -11398,44 +12073,6 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11398
12073
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
11399
12074
|
return listarrayfiles
|
|
11400
12075
|
|
|
11401
|
-
"""
|
|
11402
|
-
PyNeoFile compatibility layer
|
|
11403
|
-
"""
|
|
11404
|
-
|
|
11405
|
-
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11406
|
-
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
11407
|
-
|
|
11408
|
-
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11409
|
-
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
11410
|
-
|
|
11411
|
-
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11412
|
-
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
11413
|
-
|
|
11414
|
-
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11415
|
-
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
11416
|
-
|
|
11417
|
-
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
11418
|
-
return PackCatFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
11419
|
-
|
|
11420
|
-
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11421
|
-
return CatFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
11422
|
-
|
|
11423
|
-
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11424
|
-
return UnPackCatFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
11425
|
-
|
|
11426
|
-
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11427
|
-
return RePackCatFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11428
|
-
|
|
11429
|
-
def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
11430
|
-
return CatFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
|
|
11431
|
-
|
|
11432
|
-
def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
11433
|
-
return CatFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
11434
|
-
|
|
11435
|
-
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11436
|
-
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
11437
|
-
return RePackCatFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11438
|
-
|
|
11439
12076
|
def detect_cwd(ftp, file_dir):
|
|
11440
12077
|
"""
|
|
11441
12078
|
Test whether cwd into file_dir works. Returns True if it does,
|
|
@@ -13501,7 +14138,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
|
|
|
13501
14138
|
if not ah or not ah.strip().lower().startswith("basic "):
|
|
13502
14139
|
return False
|
|
13503
14140
|
try:
|
|
13504
|
-
import base64
|
|
13505
14141
|
b64 = ah.strip().split(" ", 1)[1]
|
|
13506
14142
|
raw = base64.b64decode(_to_bytes(b64))
|
|
13507
14143
|
try: raw_txt = raw.decode("utf-8")
|