PyArchiveFile 0.25.2__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.25.2.data → pyarchivefile-0.27.0.data}/scripts/archivefile.py +31 -22
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.27.0.dist-info}/METADATA +1 -1
- pyarchivefile-0.27.0.dist-info/RECORD +10 -0
- pyarchivefile.py +1486 -849
- pyarchivefile-0.25.2.dist-info/RECORD +0 -10
- {pyarchivefile-0.25.2.data → pyarchivefile-0.27.0.data}/scripts/archiveneofile.py +0 -0
- {pyarchivefile-0.25.2.data → pyarchivefile-0.27.0.data}/scripts/neoarchivefile.py +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.27.0.dist-info}/WHEEL +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.27.0.dist-info}/licenses/LICENSE +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.27.0.dist-info}/top_level.txt +0 -0
- {pyarchivefile-0.25.2.dist-info → pyarchivefile-0.27.0.dist-info}/zip-safe +0 -0
pyarchivefile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 11/14/2025 Ver. 0.27.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -24,7 +24,6 @@ import re
|
|
|
24
24
|
import sys
|
|
25
25
|
import time
|
|
26
26
|
import stat
|
|
27
|
-
import zlib
|
|
28
27
|
import mmap
|
|
29
28
|
import hmac
|
|
30
29
|
import base64
|
|
@@ -38,8 +37,8 @@ import zipfile
|
|
|
38
37
|
import binascii
|
|
39
38
|
import datetime
|
|
40
39
|
import platform
|
|
40
|
+
import collections
|
|
41
41
|
from io import StringIO, BytesIO
|
|
42
|
-
from collections import namedtuple
|
|
43
42
|
import posixpath # POSIX-safe joins/normpaths
|
|
44
43
|
try:
|
|
45
44
|
from backports import tempfile
|
|
@@ -50,12 +49,16 @@ try:
|
|
|
50
49
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
51
50
|
from socketserver import TCPServer
|
|
52
51
|
from urllib.parse import urlparse, parse_qs
|
|
53
|
-
import base64
|
|
54
52
|
except ImportError:
|
|
55
53
|
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
|
56
54
|
from SocketServer import TCPServer
|
|
57
55
|
from urlparse import urlparse, parse_qs
|
|
58
|
-
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Python 3.8+ only
|
|
59
|
+
from multiprocessing import shared_memory
|
|
60
|
+
except ImportError:
|
|
61
|
+
shared_memory = None
|
|
59
62
|
|
|
60
63
|
# FTP Support
|
|
61
64
|
ftpssl = True
|
|
@@ -146,6 +149,15 @@ try:
|
|
|
146
149
|
except Exception:
|
|
147
150
|
PATH_TYPES = (basestring,)
|
|
148
151
|
|
|
152
|
+
def running_interactively():
|
|
153
|
+
main = sys.modules.get("__main__")
|
|
154
|
+
no_main_file = not hasattr(main, "__file__")
|
|
155
|
+
interactive_flag = bool(getattr(sys.flags, "interactive", 0))
|
|
156
|
+
return no_main_file or interactive_flag
|
|
157
|
+
|
|
158
|
+
if running_interactively():
|
|
159
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
160
|
+
|
|
149
161
|
def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
150
162
|
"""
|
|
151
163
|
Normalize any input to text_type (unicode on Py2, str on Py3).
|
|
@@ -166,7 +178,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
|
166
178
|
|
|
167
179
|
# Handle pathlib.Path & other path-like objects
|
|
168
180
|
try:
|
|
169
|
-
import os
|
|
170
181
|
if hasattr(os, "fspath"):
|
|
171
182
|
fs = os.fspath(s)
|
|
172
183
|
if isinstance(fs, text_type):
|
|
@@ -207,7 +218,6 @@ except ImportError:
|
|
|
207
218
|
|
|
208
219
|
# Windows-specific setup
|
|
209
220
|
if os.name == "nt":
|
|
210
|
-
import io
|
|
211
221
|
def _wrap(stream):
|
|
212
222
|
buf = getattr(stream, "buffer", None)
|
|
213
223
|
is_tty = getattr(stream, "isatty", lambda: False)()
|
|
@@ -416,9 +426,13 @@ def is_only_nonprintable(var):
|
|
|
416
426
|
__file_format_multi_dict__ = {}
|
|
417
427
|
__file_format_default__ = "ArchiveFile"
|
|
418
428
|
__include_defaults__ = True
|
|
419
|
-
|
|
429
|
+
__use_inmem__ = True
|
|
430
|
+
__use_memfd__ = True
|
|
420
431
|
__use_spoolfile__ = False
|
|
421
432
|
__use_spooldir__ = tempfile.gettempdir()
|
|
433
|
+
__use_new_style__ = True
|
|
434
|
+
__use_advanced_list__ = True
|
|
435
|
+
__use_alt_inode__ = False
|
|
422
436
|
BYTES_PER_KiB = 1024
|
|
423
437
|
BYTES_PER_MiB = 1024 * BYTES_PER_KiB
|
|
424
438
|
# Spool: not tiny, but won’t blow up RAM if many are in use
|
|
@@ -440,7 +454,13 @@ if('PYARCHIVEFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYAR
|
|
|
440
454
|
else:
|
|
441
455
|
prescriptpath = get_importing_script_path()
|
|
442
456
|
if(prescriptpath is not None):
|
|
443
|
-
|
|
457
|
+
if(__use_ini_file__ and not __use_json_file__):
|
|
458
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
459
|
+
elif(__use_json_file__ and not __use_ini_file__):
|
|
460
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
|
|
461
|
+
else:
|
|
462
|
+
scriptconf = ""
|
|
463
|
+
prescriptpath = None
|
|
444
464
|
else:
|
|
445
465
|
scriptconf = ""
|
|
446
466
|
if os.path.exists(scriptconf):
|
|
@@ -462,9 +482,13 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
462
482
|
__file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
|
|
463
483
|
__program_name__ = decode_unicode_escape(config.get('config', 'proname'))
|
|
464
484
|
__include_defaults__ = config.getboolean('config', 'includedef')
|
|
465
|
-
|
|
485
|
+
__use_inmem__ = config.getboolean('config', 'useinmem')
|
|
486
|
+
__use_memfd__ = config.getboolean('config', 'usememfd')
|
|
466
487
|
__use_spoolfile__ = config.getboolean('config', 'usespoolfile')
|
|
467
488
|
__spoolfile_size__ = config.getint('config', 'spoolfilesize')
|
|
489
|
+
__use_new_style__ = config.getboolean('config', 'newstyle')
|
|
490
|
+
__use_advanced_list__ = config.getboolean('config', 'advancedlist')
|
|
491
|
+
__use_alt_inode__ = config.getboolean('config', 'altinode')
|
|
468
492
|
# Loop through all sections
|
|
469
493
|
for section in config.sections():
|
|
470
494
|
if section == "config":
|
|
@@ -472,8 +496,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
472
496
|
|
|
473
497
|
required_keys = [
|
|
474
498
|
"len", "hex", "ver", "name",
|
|
475
|
-
"magic", "delimiter", "extension"
|
|
476
|
-
"newstyle", "advancedlist", "altinode"
|
|
499
|
+
"magic", "delimiter", "extension"
|
|
477
500
|
]
|
|
478
501
|
|
|
479
502
|
# Py2+Py3 compatible key presence check
|
|
@@ -493,9 +516,6 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
493
516
|
'format_hex': config.get(section, 'hex'),
|
|
494
517
|
'format_delimiter': delim,
|
|
495
518
|
'format_ver': config.get(section, 'ver'),
|
|
496
|
-
'new_style': config.getboolean(section, 'newstyle'),
|
|
497
|
-
'use_advanced_list': config.getboolean(section, 'advancedlist'),
|
|
498
|
-
'use_alt_inode': config.getboolean(section, 'altinode'),
|
|
499
519
|
'format_extension': decode_unicode_escape(config.get(section, 'extension')),
|
|
500
520
|
}
|
|
501
521
|
})
|
|
@@ -556,16 +576,19 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
556
576
|
cfg_config = cfg.get('config', {}) or {}
|
|
557
577
|
__file_format_default__ = decode_unicode_escape(_get(cfg_config, 'default', ''))
|
|
558
578
|
__program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
|
|
559
|
-
__include_defaults__ = _to_bool(_get(cfg_config, 'includedef',
|
|
560
|
-
|
|
579
|
+
__include_defaults__ = _to_bool(_get(cfg_config, 'includedef', True))
|
|
580
|
+
__use_inmem__ = _to_bool(_get(cfg_config, 'useinmem', True))
|
|
581
|
+
__use_memfd__ = _to_bool(_get(cfg_config, 'usememfd', True))
|
|
561
582
|
__use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
|
|
562
583
|
__spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
|
|
584
|
+
__use_new_style__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
|
|
585
|
+
__use_advanced_list__ = _to_bool(_get(cfg_config, 'usespoolfile', True))
|
|
586
|
+
__use_alt_inode__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
|
|
563
587
|
|
|
564
588
|
# --- iterate format sections (everything except "config") ---
|
|
565
589
|
required_keys = [
|
|
566
590
|
"len", "hex", "ver", "name",
|
|
567
|
-
"magic", "delimiter", "extension"
|
|
568
|
-
"newstyle", "advancedlist", "altinode"
|
|
591
|
+
"magic", "delimiter", "extension"
|
|
569
592
|
]
|
|
570
593
|
|
|
571
594
|
for section_name, section in cfg.items():
|
|
@@ -583,9 +606,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
583
606
|
fmt_hex = decode_unicode_escape(_get(section, 'hex', ''))
|
|
584
607
|
fmt_ver = decode_unicode_escape(_get(section, 'ver', ''))
|
|
585
608
|
delim = decode_unicode_escape(_get(section, 'delimiter', ''))
|
|
586
|
-
new_style = _to_bool(_get(section, 'newstyle', False))
|
|
587
|
-
adv_list = _to_bool(_get(section, 'advancedlist', False))
|
|
588
|
-
alt_inode = _to_bool(_get(section, 'altinode', False))
|
|
589
609
|
extension = decode_unicode_escape(_get(section, 'extension', ''))
|
|
590
610
|
|
|
591
611
|
# keep your delimiter validation semantics
|
|
@@ -600,9 +620,6 @@ elif __use_json_file__ and os.path.exists(__config_file__):
|
|
|
600
620
|
'format_hex': fmt_hex,
|
|
601
621
|
'format_delimiter': delim,
|
|
602
622
|
'format_ver': fmt_ver,
|
|
603
|
-
'new_style': new_style,
|
|
604
|
-
'use_advanced_list': adv_list,
|
|
605
|
-
'use_alt_inode': alt_inode,
|
|
606
623
|
'format_extension': extension,
|
|
607
624
|
}
|
|
608
625
|
})
|
|
@@ -621,6 +638,7 @@ elif __use_json_file__ and not os.path.exists(__config_file__):
|
|
|
621
638
|
if not __use_ini_file__ and not __include_defaults__:
|
|
622
639
|
__include_defaults__ = True
|
|
623
640
|
if __include_defaults__:
|
|
641
|
+
# Arc / Neo
|
|
624
642
|
add_format(__file_format_multi_dict__, "ArchiveFile", "ArchiveFile", ".arc", "ArchiveFile")
|
|
625
643
|
add_format(__file_format_multi_dict__, "NeoFile", "NeoFile", ".neo", "NeoFile")
|
|
626
644
|
|
|
@@ -633,21 +651,18 @@ __file_format_len__ = __file_format_multi_dict__[__file_format_default__]['forma
|
|
|
633
651
|
__file_format_hex__ = __file_format_multi_dict__[__file_format_default__]['format_hex']
|
|
634
652
|
__file_format_delimiter__ = __file_format_multi_dict__[__file_format_default__]['format_delimiter']
|
|
635
653
|
__file_format_ver__ = __file_format_multi_dict__[__file_format_default__]['format_ver']
|
|
636
|
-
__use_new_style__ = __file_format_multi_dict__[__file_format_default__]['new_style']
|
|
637
|
-
__use_advanced_list__ = __file_format_multi_dict__[__file_format_default__]['use_advanced_list']
|
|
638
|
-
__use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt_inode']
|
|
639
654
|
__file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
|
|
640
655
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
641
656
|
__project__ = __program_name__
|
|
642
657
|
__program_alt_name__ = __program_name__
|
|
643
658
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
644
659
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
645
|
-
__version_info__ = (0,
|
|
646
|
-
__version_date_info__ = (2025, 11,
|
|
660
|
+
__version_info__ = (0, 27, 0, "RC 1", 1)
|
|
661
|
+
__version_date_info__ = (2025, 11, 14, "RC 1", 1)
|
|
647
662
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
648
663
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
649
664
|
__revision__ = __version_info__[3]
|
|
650
|
-
__revision_id__ = "$Id:
|
|
665
|
+
__revision_id__ = "$Id: 1cfe8ace647fdc1e6f8536a80322ea8257584b78 $"
|
|
651
666
|
if(__version_info__[4] is not None):
|
|
652
667
|
__version_date_plusrc__ = __version_date__ + \
|
|
653
668
|
"-" + str(__version_date_info__[4])
|
|
@@ -659,6 +674,9 @@ if(__version_info__[3] is not None):
|
|
|
659
674
|
if(__version_info__[3] is None):
|
|
660
675
|
__version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
|
|
661
676
|
|
|
677
|
+
_logger = logging.getLogger(__project__) # library-style logger
|
|
678
|
+
_logger.addHandler(logging.NullHandler()) # don't emit logs unless app configures logging
|
|
679
|
+
|
|
662
680
|
# From: https://stackoverflow.com/a/28568003
|
|
663
681
|
# By Phaxmohdem
|
|
664
682
|
|
|
@@ -796,9 +814,9 @@ except Exception:
|
|
|
796
814
|
geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
|
|
797
815
|
proname=__project__, prover=__version__, prourl=__project_url__)
|
|
798
816
|
if(platform.python_implementation() != ""):
|
|
799
|
-
py_implementation = platform.python_implementation()
|
|
817
|
+
py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
|
|
800
818
|
if(platform.python_implementation() == ""):
|
|
801
|
-
py_implementation = "CPython"
|
|
819
|
+
py_implementation = "CPython"+str(platform.python_version_tuple()[0])
|
|
802
820
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
803
821
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
804
822
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -814,13 +832,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
|
|
|
814
832
|
|
|
815
833
|
compressionsupport = []
|
|
816
834
|
try:
|
|
817
|
-
|
|
835
|
+
try:
|
|
836
|
+
import compression.gzip as gzip
|
|
837
|
+
except ImportError:
|
|
838
|
+
import gzip
|
|
818
839
|
compressionsupport.append("gz")
|
|
819
840
|
compressionsupport.append("gzip")
|
|
820
841
|
except ImportError:
|
|
821
842
|
pass
|
|
822
843
|
try:
|
|
823
|
-
|
|
844
|
+
try:
|
|
845
|
+
import compression.bz2 as bz2
|
|
846
|
+
except ImportError:
|
|
847
|
+
import bz2
|
|
824
848
|
compressionsupport.append("bz2")
|
|
825
849
|
compressionsupport.append("bzip2")
|
|
826
850
|
except ImportError:
|
|
@@ -841,20 +865,20 @@ except ImportError:
|
|
|
841
865
|
pass
|
|
842
866
|
'''
|
|
843
867
|
try:
|
|
844
|
-
|
|
868
|
+
try:
|
|
869
|
+
import compression.zstd as zstd
|
|
870
|
+
except ImportError:
|
|
871
|
+
import pyzstd.zstdfile as zstd
|
|
845
872
|
compressionsupport.append("zst")
|
|
846
873
|
compressionsupport.append("zstd")
|
|
847
874
|
compressionsupport.append("zstandard")
|
|
848
875
|
except ImportError:
|
|
876
|
+
pass
|
|
877
|
+
try:
|
|
849
878
|
try:
|
|
850
|
-
import
|
|
851
|
-
compressionsupport.append("zst")
|
|
852
|
-
compressionsupport.append("zstd")
|
|
853
|
-
compressionsupport.append("zstandard")
|
|
879
|
+
import compression.lzma as lzma
|
|
854
880
|
except ImportError:
|
|
855
|
-
|
|
856
|
-
try:
|
|
857
|
-
import lzma
|
|
881
|
+
import lzma
|
|
858
882
|
compressionsupport.append("lzma")
|
|
859
883
|
compressionsupport.append("xz")
|
|
860
884
|
except ImportError:
|
|
@@ -864,12 +888,18 @@ except ImportError:
|
|
|
864
888
|
compressionsupport.append("xz")
|
|
865
889
|
except ImportError:
|
|
866
890
|
pass
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
891
|
+
try:
|
|
892
|
+
try:
|
|
893
|
+
import compression.zlib as zlib
|
|
894
|
+
except ImportError:
|
|
895
|
+
import zlib
|
|
896
|
+
compressionsupport.append("zlib")
|
|
897
|
+
compressionsupport.append("zl")
|
|
898
|
+
compressionsupport.append("zz")
|
|
899
|
+
compressionsupport.append("Z")
|
|
900
|
+
compressionsupport.append("z")
|
|
901
|
+
except ImportError:
|
|
902
|
+
pass
|
|
873
903
|
compressionlist = ['auto']
|
|
874
904
|
compressionlistalt = []
|
|
875
905
|
outextlist = []
|
|
@@ -1028,6 +1058,28 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20, **
|
|
|
1028
1058
|
VerbosePrintOut(dbgtxt, outtype, dbgenable, dgblevel, **kwargs)
|
|
1029
1059
|
return dbgtxt
|
|
1030
1060
|
|
|
1061
|
+
def to_ns(timestamp):
|
|
1062
|
+
"""
|
|
1063
|
+
Convert a second-resolution timestamp (int or float)
|
|
1064
|
+
into a nanosecond timestamp (int) by zero-padding.
|
|
1065
|
+
Works in Python 2 and Python 3.
|
|
1066
|
+
"""
|
|
1067
|
+
try:
|
|
1068
|
+
# Convert incoming timestamp to float so it works for int or float
|
|
1069
|
+
seconds = float(timestamp)
|
|
1070
|
+
except (TypeError, ValueError):
|
|
1071
|
+
raise ValueError("Timestamp must be int or float")
|
|
1072
|
+
|
|
1073
|
+
# Multiply by 1e9 to get nanoseconds, then cast to int
|
|
1074
|
+
return int(seconds * 1000000000)
|
|
1075
|
+
|
|
1076
|
+
def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
|
|
1077
|
+
ts_ns = int(ts_ns)
|
|
1078
|
+
sec, ns = divmod(ts_ns, 10**9)
|
|
1079
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
1080
|
+
base = dt.strftime(fmt)
|
|
1081
|
+
ns_str = "%09d" % ns
|
|
1082
|
+
return base + "." + ns_str
|
|
1031
1083
|
|
|
1032
1084
|
def _split_posix(name):
|
|
1033
1085
|
"""
|
|
@@ -2051,34 +2103,53 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
|
|
|
2051
2103
|
|
|
2052
2104
|
|
|
2053
2105
|
def MkTempFile(data=None,
|
|
2054
|
-
inmem=
|
|
2106
|
+
inmem=__use_inmem__, usememfd=__use_memfd__,
|
|
2055
2107
|
isbytes=True,
|
|
2056
|
-
prefix=
|
|
2108
|
+
prefix=__program_name__,
|
|
2057
2109
|
delete=True,
|
|
2058
2110
|
encoding="utf-8",
|
|
2059
|
-
newline=None,
|
|
2111
|
+
newline=None,
|
|
2112
|
+
text_errors="strict",
|
|
2060
2113
|
dir=None,
|
|
2061
2114
|
suffix="",
|
|
2062
2115
|
use_spool=__use_spoolfile__,
|
|
2116
|
+
autoswitch_spool=False,
|
|
2063
2117
|
spool_max=__spoolfile_size__,
|
|
2064
|
-
spool_dir=__use_spooldir__
|
|
2118
|
+
spool_dir=__use_spooldir__,
|
|
2119
|
+
reset_to_start=True,
|
|
2120
|
+
memfd_name=__program_name__,
|
|
2121
|
+
memfd_allow_sealing=False,
|
|
2122
|
+
memfd_flags_extra=0,
|
|
2123
|
+
on_create=None):
|
|
2065
2124
|
"""
|
|
2066
2125
|
Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
|
|
2067
2126
|
|
|
2068
2127
|
Storage:
|
|
2069
|
-
- inmem=True
|
|
2070
|
-
|
|
2071
|
-
- inmem=
|
|
2128
|
+
- inmem=True, usememfd=True, isbytes=True and memfd available
|
|
2129
|
+
-> memfd-backed anonymous file (binary)
|
|
2130
|
+
- inmem=True, otherwise
|
|
2131
|
+
-> BytesIO (bytes) or StringIO (text)
|
|
2132
|
+
- inmem=False, use_spool=True
|
|
2133
|
+
-> SpooledTemporaryFile (binary), optionally TextIOWrapper for text
|
|
2134
|
+
- inmem=False, use_spool=False
|
|
2135
|
+
-> NamedTemporaryFile (binary), optionally TextIOWrapper for text
|
|
2072
2136
|
|
|
2073
2137
|
Text vs bytes:
|
|
2074
2138
|
- isbytes=True -> file expects bytes; 'data' must be bytes-like
|
|
2075
|
-
- isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
|
|
2076
|
-
apply only for spooled/named files (not BytesIO/StringIO).
|
|
2139
|
+
- isbytes=False -> file expects text; 'data' must be text (unicode/str). Newline translation and
|
|
2140
|
+
encoding apply only for spooled/named files (not BytesIO/StringIO).
|
|
2077
2141
|
|
|
2078
2142
|
Notes:
|
|
2079
|
-
- On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
|
|
2080
|
-
Use delete=False if you need to pass the path elsewhere.
|
|
2081
|
-
- For text: in-memory StringIO ignores 'newline' (as usual).
|
|
2143
|
+
- On Windows, NamedTemporaryFile(delete=True) keeps the file open and cannot be reopened by
|
|
2144
|
+
other processes. Use delete=False if you need to pass the path elsewhere.
|
|
2145
|
+
- For text: in-memory StringIO ignores 'newline' and 'text_errors' (as usual).
|
|
2146
|
+
- When available, and if usememfd=True, memfd is used only for inmem=True and isbytes=True,
|
|
2147
|
+
providing an anonymous in-memory file descriptor (Linux-only). Text in-memory still uses
|
|
2148
|
+
StringIO to preserve newline semantics.
|
|
2149
|
+
- If autoswitch_spool=True and initial data size exceeds spool_max, in-memory storage is
|
|
2150
|
+
skipped and a spooled file is used instead (if use_spool=True).
|
|
2151
|
+
- If on_create is not None, it is called as on_create(fp, kind) where kind is one of:
|
|
2152
|
+
"memfd", "bytesio", "stringio", "spool", "disk".
|
|
2082
2153
|
"""
|
|
2083
2154
|
|
|
2084
2155
|
# -- sanitize simple params (avoid None surprises) --
|
|
@@ -2110,23 +2181,65 @@ def MkTempFile(data=None,
|
|
|
2110
2181
|
else:
|
|
2111
2182
|
init = None
|
|
2112
2183
|
|
|
2184
|
+
# Size of init for autoswitch; only meaningful for bytes
|
|
2185
|
+
init_len = len(init) if (init is not None and isbytes) else None
|
|
2186
|
+
|
|
2113
2187
|
# -------- In-memory --------
|
|
2114
2188
|
if inmem:
|
|
2115
|
-
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
2189
|
+
# If autoswitch is enabled and data is larger than spool_max, and
|
|
2190
|
+
# spooling is allowed, skip the in-memory branch and fall through
|
|
2191
|
+
# to the spool/disk logic below.
|
|
2192
|
+
if autoswitch_spool and use_spool and init_len is not None and init_len > spool_max:
|
|
2193
|
+
pass # fall through to spool/disk sections
|
|
2194
|
+
else:
|
|
2195
|
+
# Use memfd only for bytes, and only where available (Linux, Python 3.8+)
|
|
2196
|
+
if usememfd and isbytes and hasattr(os, "memfd_create"):
|
|
2197
|
+
name = memfd_name or prefix or "MkTempFile"
|
|
2198
|
+
flags = 0
|
|
2199
|
+
# Close-on-exec is almost always what you want for temps
|
|
2200
|
+
if hasattr(os, "MFD_CLOEXEC"):
|
|
2201
|
+
flags |= os.MFD_CLOEXEC
|
|
2202
|
+
# Optional sealing support if requested and available
|
|
2203
|
+
if memfd_allow_sealing and hasattr(os, "MFD_ALLOW_SEALING"):
|
|
2204
|
+
flags |= os.MFD_ALLOW_SEALING
|
|
2205
|
+
# Extra custom flags (e.g. hugepage flags) if caller wants them
|
|
2206
|
+
if memfd_flags_extra:
|
|
2207
|
+
flags |= memfd_flags_extra
|
|
2208
|
+
|
|
2209
|
+
fd = os.memfd_create(name, flags)
|
|
2210
|
+
# Binary read/write file-like object backed by RAM
|
|
2211
|
+
f = os.fdopen(fd, "w+b")
|
|
2212
|
+
|
|
2213
|
+
if init is not None:
|
|
2214
|
+
f.write(init)
|
|
2215
|
+
if reset_to_start:
|
|
2216
|
+
f.seek(0)
|
|
2217
|
+
|
|
2218
|
+
if on_create is not None:
|
|
2219
|
+
on_create(f, "memfd")
|
|
2220
|
+
return f
|
|
2221
|
+
|
|
2222
|
+
# Fallback: pure Python in-memory objects
|
|
2223
|
+
if isbytes:
|
|
2224
|
+
f = io.BytesIO(init if init is not None else b"")
|
|
2225
|
+
kind = "bytesio"
|
|
2226
|
+
else:
|
|
2227
|
+
# newline/text_errors not enforced for StringIO; matches stdlib semantics
|
|
2228
|
+
f = io.StringIO(init if init is not None else "")
|
|
2229
|
+
kind = "stringio"
|
|
2230
|
+
|
|
2231
|
+
if reset_to_start:
|
|
2232
|
+
f.seek(0)
|
|
2233
|
+
|
|
2234
|
+
if on_create is not None:
|
|
2235
|
+
on_create(f, kind)
|
|
2236
|
+
return f
|
|
2123
2237
|
|
|
2124
2238
|
# Helper: wrap a binary file into a text file with encoding/newline
|
|
2125
2239
|
def _wrap_text(handle):
|
|
2126
2240
|
# For both Py2 & Py3, TextIOWrapper gives consistent newline/encoding behavior
|
|
2127
|
-
|
|
2128
|
-
|
|
2129
|
-
return tw
|
|
2241
|
+
return io.TextIOWrapper(handle, encoding=encoding,
|
|
2242
|
+
newline=newline, errors=text_errors)
|
|
2130
2243
|
|
|
2131
2244
|
# -------- Spooled (RAM then disk) --------
|
|
2132
2245
|
if use_spool:
|
|
@@ -2134,19 +2247,33 @@ def MkTempFile(data=None,
|
|
|
2134
2247
|
bin_mode = "w+b" # read/write, binary
|
|
2135
2248
|
b = tempfile.SpooledTemporaryFile(max_size=spool_max, mode=bin_mode, dir=spool_dir)
|
|
2136
2249
|
f = b if isbytes else _wrap_text(b)
|
|
2250
|
+
|
|
2137
2251
|
if init is not None:
|
|
2138
2252
|
f.write(init)
|
|
2253
|
+
if reset_to_start:
|
|
2254
|
+
f.seek(0)
|
|
2255
|
+
elif reset_to_start:
|
|
2139
2256
|
f.seek(0)
|
|
2257
|
+
|
|
2258
|
+
if on_create is not None:
|
|
2259
|
+
on_create(f, "spool")
|
|
2140
2260
|
return f
|
|
2141
2261
|
|
|
2142
2262
|
# -------- On-disk temp (NamedTemporaryFile) --------
|
|
2143
2263
|
# Always create binary file; wrap for text if needed for uniform Py2/3 behavior
|
|
2144
|
-
b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
|
|
2264
|
+
b = tempfile.NamedTemporaryFile(mode="w+b", prefix=prefix, suffix=suffix,
|
|
2265
|
+
dir=dir, delete=delete)
|
|
2145
2266
|
f = b if isbytes else _wrap_text(b)
|
|
2146
2267
|
|
|
2147
2268
|
if init is not None:
|
|
2148
2269
|
f.write(init)
|
|
2270
|
+
if reset_to_start:
|
|
2271
|
+
f.seek(0)
|
|
2272
|
+
elif reset_to_start:
|
|
2149
2273
|
f.seek(0)
|
|
2274
|
+
|
|
2275
|
+
if on_create is not None:
|
|
2276
|
+
on_create(f, "disk")
|
|
2150
2277
|
return f
|
|
2151
2278
|
|
|
2152
2279
|
|
|
@@ -2482,6 +2609,384 @@ def _is_valid_zlib_header(cmf, flg):
|
|
|
2482
2609
|
return False
|
|
2483
2610
|
return True
|
|
2484
2611
|
|
|
2612
|
+
class SharedMemoryFile(object):
|
|
2613
|
+
"""
|
|
2614
|
+
File-like wrapper around multiprocessing.shared_memory.SharedMemory.
|
|
2615
|
+
|
|
2616
|
+
Binary-only API, intended to behave similarly to a regular file opened in
|
|
2617
|
+
'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
|
|
2618
|
+
|
|
2619
|
+
Notes:
|
|
2620
|
+
- Requires Python 3.8+ at runtime to actually use SharedMemory.
|
|
2621
|
+
- On Python 2, importing is fine but constructing will raise RuntimeError.
|
|
2622
|
+
- There is no automatic resizing; buffer size is fixed by SharedMemory.
|
|
2623
|
+
- No real fileno(); this does not represent an OS-level file descriptor.
|
|
2624
|
+
- For text mode, wrap this with io.TextIOWrapper on Python 3:
|
|
2625
|
+
f = SharedMemoryFile(...)
|
|
2626
|
+
tf = io.TextIOWrapper(f, encoding="utf-8")
|
|
2627
|
+
"""
|
|
2628
|
+
|
|
2629
|
+
def __init__(self, shm=None, name=None, create=False, size=0,
|
|
2630
|
+
mode='r+b', offset=0, unlink_on_close=False):
|
|
2631
|
+
"""
|
|
2632
|
+
Parameters:
|
|
2633
|
+
shm : existing SharedMemory object (preferred).
|
|
2634
|
+
name : name of shared memory block (for attach or create).
|
|
2635
|
+
create: if True, create new SharedMemory; else attach existing.
|
|
2636
|
+
size : size in bytes (required when create=True).
|
|
2637
|
+
mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
|
|
2638
|
+
offset: starting offset within the shared memory buffer.
|
|
2639
|
+
unlink_on_close: if True, call shm.unlink() when close() is called.
|
|
2640
|
+
|
|
2641
|
+
Usage examples:
|
|
2642
|
+
|
|
2643
|
+
# Create new block and file-like wrapper
|
|
2644
|
+
f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
|
|
2645
|
+
|
|
2646
|
+
# Attach to existing shared memory by name
|
|
2647
|
+
f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
|
|
2648
|
+
|
|
2649
|
+
# Wrap an existing SharedMemory object
|
|
2650
|
+
shm = shared_memory.SharedMemory(create=True, size=1024)
|
|
2651
|
+
f = SharedMemoryFile(shm=shm, mode='r+b')
|
|
2652
|
+
"""
|
|
2653
|
+
if shared_memory is None:
|
|
2654
|
+
# No SharedMemory available on this interpreter
|
|
2655
|
+
raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
|
|
2656
|
+
"is not available on this Python version")
|
|
2657
|
+
|
|
2658
|
+
if 't' in mode:
|
|
2659
|
+
raise ValueError("SharedMemoryFile is binary-only; "
|
|
2660
|
+
"wrap it with io.TextIOWrapper for text")
|
|
2661
|
+
|
|
2662
|
+
self.mode = mode
|
|
2663
|
+
self._closed = False
|
|
2664
|
+
self._unlinked = False
|
|
2665
|
+
self._unlink_on_close = bool(unlink_on_close)
|
|
2666
|
+
|
|
2667
|
+
if shm is not None:
|
|
2668
|
+
self._shm = shm
|
|
2669
|
+
else:
|
|
2670
|
+
# name may be None when create=True
|
|
2671
|
+
self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
|
|
2672
|
+
|
|
2673
|
+
self._buf = self._shm.buf
|
|
2674
|
+
self._base_offset = int(offset)
|
|
2675
|
+
if self._base_offset < 0 or self._base_offset > len(self._buf):
|
|
2676
|
+
raise ValueError("offset out of range")
|
|
2677
|
+
|
|
2678
|
+
# We treat the accessible region as [base_offset, len(buf))
|
|
2679
|
+
self._size = len(self._buf) - self._base_offset
|
|
2680
|
+
self._pos = 0 # logical file position within that region
|
|
2681
|
+
|
|
2682
|
+
# ---------- basic properties ----------
|
|
2683
|
+
|
|
2684
|
+
@property
|
|
2685
|
+
def name(self):
|
|
2686
|
+
# SharedMemory name (may be None for anonymous)
|
|
2687
|
+
return getattr(self._shm, "name", None)
|
|
2688
|
+
|
|
2689
|
+
@property
|
|
2690
|
+
def closed(self):
|
|
2691
|
+
return self._closed
|
|
2692
|
+
|
|
2693
|
+
def readable(self):
|
|
2694
|
+
return ('r' in self.mode) or ('+' in self.mode)
|
|
2695
|
+
|
|
2696
|
+
def writable(self):
|
|
2697
|
+
return any(ch in self.mode for ch in ('w', 'a', '+'))
|
|
2698
|
+
|
|
2699
|
+
def seekable(self):
|
|
2700
|
+
return True
|
|
2701
|
+
|
|
2702
|
+
# ---------- core helpers ----------
|
|
2703
|
+
|
|
2704
|
+
def _check_closed(self):
|
|
2705
|
+
if self._closed:
|
|
2706
|
+
raise ValueError("I/O operation on closed SharedMemoryFile")
|
|
2707
|
+
|
|
2708
|
+
def _clamp_pos(self, pos):
|
|
2709
|
+
if pos < 0:
|
|
2710
|
+
return 0
|
|
2711
|
+
if pos > self._size:
|
|
2712
|
+
return self._size
|
|
2713
|
+
return pos
|
|
2714
|
+
|
|
2715
|
+
def _region_bounds(self):
|
|
2716
|
+
"""Return (start, end) absolute indices into the SharedMemory buffer."""
|
|
2717
|
+
start = self._base_offset + self._pos
|
|
2718
|
+
end = self._base_offset + self._size
|
|
2719
|
+
return start, end
|
|
2720
|
+
|
|
2721
|
+
# ---------- positioning ----------
|
|
2722
|
+
|
|
2723
|
+
def seek(self, offset, whence=0):
|
|
2724
|
+
"""
|
|
2725
|
+
Seek to a new file position.
|
|
2726
|
+
|
|
2727
|
+
whence: 0 = from start, 1 = from current, 2 = from end.
|
|
2728
|
+
"""
|
|
2729
|
+
self._check_closed()
|
|
2730
|
+
offset = int(offset)
|
|
2731
|
+
whence = int(whence)
|
|
2732
|
+
|
|
2733
|
+
if whence == 0: # from start
|
|
2734
|
+
new_pos = offset
|
|
2735
|
+
elif whence == 1: # from current
|
|
2736
|
+
new_pos = self._pos + offset
|
|
2737
|
+
elif whence == 2: # from end
|
|
2738
|
+
new_pos = self._size + offset
|
|
2739
|
+
else:
|
|
2740
|
+
raise ValueError("invalid whence (expected 0, 1, or 2)")
|
|
2741
|
+
|
|
2742
|
+
self._pos = self._clamp_pos(new_pos)
|
|
2743
|
+
return self._pos
|
|
2744
|
+
|
|
2745
|
+
def tell(self):
|
|
2746
|
+
return self._pos
|
|
2747
|
+
|
|
2748
|
+
# ---------- reading ----------
|
|
2749
|
+
|
|
2750
|
+
def read(self, size=-1):
|
|
2751
|
+
"""
|
|
2752
|
+
Read up to 'size' bytes (or to EOF if size<0 or None).
|
|
2753
|
+
Returns bytes (py3) or str (py2).
|
|
2754
|
+
"""
|
|
2755
|
+
self._check_closed()
|
|
2756
|
+
if not self.readable():
|
|
2757
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2758
|
+
|
|
2759
|
+
if size is None or size < 0:
|
|
2760
|
+
size = self._size - self._pos
|
|
2761
|
+
else:
|
|
2762
|
+
size = int(size)
|
|
2763
|
+
if size < 0:
|
|
2764
|
+
size = 0
|
|
2765
|
+
|
|
2766
|
+
if size == 0:
|
|
2767
|
+
return b'' if not PY2 else ''
|
|
2768
|
+
|
|
2769
|
+
start, end_abs = self._region_bounds()
|
|
2770
|
+
available = end_abs - (self._base_offset + self._pos)
|
|
2771
|
+
if available <= 0:
|
|
2772
|
+
return b'' if not PY2 else ''
|
|
2773
|
+
|
|
2774
|
+
size = min(size, available)
|
|
2775
|
+
|
|
2776
|
+
abs_start = self._base_offset + self._pos
|
|
2777
|
+
abs_end = abs_start + size
|
|
2778
|
+
|
|
2779
|
+
chunk = self._buf[abs_start:abs_end]
|
|
2780
|
+
if PY2:
|
|
2781
|
+
data = bytes(chunk) # bytes() -> str in py2
|
|
2782
|
+
else:
|
|
2783
|
+
data = bytes(chunk)
|
|
2784
|
+
|
|
2785
|
+
self._pos += len(data)
|
|
2786
|
+
return data
|
|
2787
|
+
|
|
2788
|
+
def readline(self, size=-1):
|
|
2789
|
+
"""
|
|
2790
|
+
Read a single line (ending with '\\n' or EOF).
|
|
2791
|
+
If size >= 0, at most that many bytes are returned.
|
|
2792
|
+
"""
|
|
2793
|
+
self._check_closed()
|
|
2794
|
+
if not self.readable():
|
|
2795
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2796
|
+
|
|
2797
|
+
# Determine maximum bytes we can scan
|
|
2798
|
+
start, end_abs = self._region_bounds()
|
|
2799
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2800
|
+
if remaining <= 0:
|
|
2801
|
+
return b'' if not PY2 else ''
|
|
2802
|
+
|
|
2803
|
+
if size is not None and size >= 0:
|
|
2804
|
+
size = int(size)
|
|
2805
|
+
max_len = min(size, remaining)
|
|
2806
|
+
else:
|
|
2807
|
+
max_len = remaining
|
|
2808
|
+
|
|
2809
|
+
abs_start = self._base_offset + self._pos
|
|
2810
|
+
abs_max = abs_start + max_len
|
|
2811
|
+
|
|
2812
|
+
# Work on a local bytes slice for easy .find()
|
|
2813
|
+
if PY2:
|
|
2814
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2815
|
+
else:
|
|
2816
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2817
|
+
|
|
2818
|
+
idx = buf_bytes.find(b'\n')
|
|
2819
|
+
if idx == -1:
|
|
2820
|
+
# No newline; read entire chunk
|
|
2821
|
+
line_bytes = buf_bytes
|
|
2822
|
+
else:
|
|
2823
|
+
line_bytes = buf_bytes[:idx + 1]
|
|
2824
|
+
|
|
2825
|
+
self._pos += len(line_bytes)
|
|
2826
|
+
|
|
2827
|
+
if PY2:
|
|
2828
|
+
return line_bytes # already str
|
|
2829
|
+
return line_bytes
|
|
2830
|
+
|
|
2831
|
+
def readinto(self, b):
|
|
2832
|
+
"""
|
|
2833
|
+
Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
|
|
2834
|
+
Returns number of bytes read.
|
|
2835
|
+
"""
|
|
2836
|
+
self._check_closed()
|
|
2837
|
+
if not self.readable():
|
|
2838
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2839
|
+
|
|
2840
|
+
# Normalize target buffer
|
|
2841
|
+
if isinstance(b, memoryview):
|
|
2842
|
+
mv = b
|
|
2843
|
+
else:
|
|
2844
|
+
mv = memoryview(b)
|
|
2845
|
+
|
|
2846
|
+
size = len(mv)
|
|
2847
|
+
if size <= 0:
|
|
2848
|
+
return 0
|
|
2849
|
+
|
|
2850
|
+
start, end_abs = self._region_bounds()
|
|
2851
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2852
|
+
if remaining <= 0:
|
|
2853
|
+
return 0
|
|
2854
|
+
|
|
2855
|
+
size = min(size, remaining)
|
|
2856
|
+
|
|
2857
|
+
abs_start = self._base_offset + self._pos
|
|
2858
|
+
abs_end = abs_start + size
|
|
2859
|
+
|
|
2860
|
+
mv[:size] = self._buf[abs_start:abs_end]
|
|
2861
|
+
self._pos += size
|
|
2862
|
+
return size
|
|
2863
|
+
|
|
2864
|
+
# ---------- writing ----------
|
|
2865
|
+
|
|
2866
|
+
def write(self, data):
|
|
2867
|
+
"""
|
|
2868
|
+
Write bytes-like object to the shared memory region.
|
|
2869
|
+
|
|
2870
|
+
Returns number of bytes written. Will raise if not opened writable
|
|
2871
|
+
or if writing would overflow the fixed-size region.
|
|
2872
|
+
"""
|
|
2873
|
+
self._check_closed()
|
|
2874
|
+
if not self.writable():
|
|
2875
|
+
raise IOError("SharedMemoryFile not opened for writing")
|
|
2876
|
+
|
|
2877
|
+
if isinstance(data, memoryview):
|
|
2878
|
+
data = bytes(data)
|
|
2879
|
+
elif isinstance(data, bytearray):
|
|
2880
|
+
data = bytes(data)
|
|
2881
|
+
|
|
2882
|
+
if not isinstance(data, binary_types):
|
|
2883
|
+
raise TypeError("write() expects a bytes-like object")
|
|
2884
|
+
|
|
2885
|
+
data_len = len(data)
|
|
2886
|
+
if data_len == 0:
|
|
2887
|
+
return 0
|
|
2888
|
+
|
|
2889
|
+
# Handle "append" semantics roughly: start from end on first write
|
|
2890
|
+
if 'a' in self.mode and self._pos == 0:
|
|
2891
|
+
# Move to logical end of region
|
|
2892
|
+
self._pos = self._size
|
|
2893
|
+
|
|
2894
|
+
start, end_abs = self._region_bounds()
|
|
2895
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2896
|
+
if data_len > remaining:
|
|
2897
|
+
raise IOError("write would overflow SharedMemory region (need %d, have %d)"
|
|
2898
|
+
% (data_len, remaining))
|
|
2899
|
+
|
|
2900
|
+
abs_start = self._base_offset + self._pos
|
|
2901
|
+
abs_end = abs_start + data_len
|
|
2902
|
+
|
|
2903
|
+
self._buf[abs_start:abs_end] = data
|
|
2904
|
+
self._pos += data_len
|
|
2905
|
+
return data_len
|
|
2906
|
+
|
|
2907
|
+
def flush(self):
|
|
2908
|
+
"""
|
|
2909
|
+
No-op for shared memory; provided for file-like compatibility.
|
|
2910
|
+
"""
|
|
2911
|
+
self._check_closed()
|
|
2912
|
+
# nothing to flush
|
|
2913
|
+
|
|
2914
|
+
# ---------- unlink / close / context manager ----------
|
|
2915
|
+
|
|
2916
|
+
def unlink(self):
|
|
2917
|
+
"""
|
|
2918
|
+
Unlink (destroy) the underlying shared memory block.
|
|
2919
|
+
|
|
2920
|
+
After unlink(), new processes cannot attach via name.
|
|
2921
|
+
Existing attachments (including this one) can continue to use
|
|
2922
|
+
the memory until they close() it.
|
|
2923
|
+
|
|
2924
|
+
This is idempotent: calling it more than once is safe.
|
|
2925
|
+
"""
|
|
2926
|
+
if self._unlinked:
|
|
2927
|
+
return
|
|
2928
|
+
|
|
2929
|
+
try:
|
|
2930
|
+
self._shm.unlink()
|
|
2931
|
+
except AttributeError:
|
|
2932
|
+
# Should not happen on normal Python 3.8+,
|
|
2933
|
+
# but keep a clear error if it does.
|
|
2934
|
+
raise RuntimeError("Underlying SharedMemory object "
|
|
2935
|
+
"does not support unlink()")
|
|
2936
|
+
|
|
2937
|
+
self._unlinked = True
|
|
2938
|
+
|
|
2939
|
+
def close(self):
|
|
2940
|
+
if self._closed:
|
|
2941
|
+
return
|
|
2942
|
+
self._closed = True
|
|
2943
|
+
|
|
2944
|
+
# Optionally unlink on close if requested
|
|
2945
|
+
if self._unlink_on_close and not self._unlinked:
|
|
2946
|
+
try:
|
|
2947
|
+
self.unlink()
|
|
2948
|
+
except Exception:
|
|
2949
|
+
# best-effort; close anyway
|
|
2950
|
+
pass
|
|
2951
|
+
|
|
2952
|
+
try:
|
|
2953
|
+
self._shm.close()
|
|
2954
|
+
except Exception:
|
|
2955
|
+
pass
|
|
2956
|
+
|
|
2957
|
+
def __enter__(self):
|
|
2958
|
+
self._check_closed()
|
|
2959
|
+
return self
|
|
2960
|
+
|
|
2961
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
2962
|
+
self.close()
|
|
2963
|
+
|
|
2964
|
+
# ---------- iteration ----------
|
|
2965
|
+
|
|
2966
|
+
def __iter__(self):
|
|
2967
|
+
return self
|
|
2968
|
+
|
|
2969
|
+
def __next__(self):
|
|
2970
|
+
line = self.readline()
|
|
2971
|
+
if (not line) or len(line) == 0:
|
|
2972
|
+
raise StopIteration
|
|
2973
|
+
return line
|
|
2974
|
+
|
|
2975
|
+
if PY2:
|
|
2976
|
+
next = __next__
|
|
2977
|
+
|
|
2978
|
+
# ---------- misc helpers ----------
|
|
2979
|
+
|
|
2980
|
+
def fileno(self):
|
|
2981
|
+
"""
|
|
2982
|
+
There is no real OS-level file descriptor; raise OSError for APIs
|
|
2983
|
+
that require a fileno().
|
|
2984
|
+
"""
|
|
2985
|
+
raise OSError("SharedMemoryFile does not have a real fileno()")
|
|
2986
|
+
|
|
2987
|
+
def isatty(self):
|
|
2988
|
+
return False
|
|
2989
|
+
|
|
2485
2990
|
# ---------- Main class ----------
|
|
2486
2991
|
class ZlibFile(object):
|
|
2487
2992
|
"""
|
|
@@ -3666,7 +4171,7 @@ def _bytes_to_int(b):
|
|
|
3666
4171
|
# =========================
|
|
3667
4172
|
# Public checksum API
|
|
3668
4173
|
# =========================
|
|
3669
|
-
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4174
|
+
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
|
|
3670
4175
|
"""
|
|
3671
4176
|
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
3672
4177
|
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
@@ -3678,15 +4183,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatsp
|
|
|
3678
4183
|
if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
|
|
3679
4184
|
hdr_bytes = _to_bytes(hdr_bytes)
|
|
3680
4185
|
hdr_bytes = bytes(hdr_bytes)
|
|
3681
|
-
|
|
4186
|
+
saltkeyval = None
|
|
4187
|
+
if(hasattr(saltkey, "read")):
|
|
4188
|
+
saltkeyval = skfp.read()
|
|
4189
|
+
if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
|
|
4190
|
+
saltkeyval = saltkeyval.encode("UTF-8")
|
|
4191
|
+
elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
|
|
4192
|
+
saltkeyval = saltkey
|
|
4193
|
+
elif(saltkey is not None and os.path.exists(saltkey)):
|
|
4194
|
+
with open(saltkey, "rb") as skfp:
|
|
4195
|
+
saltkeyval = skfp.read()
|
|
4196
|
+
else:
|
|
4197
|
+
saltkey = None
|
|
4198
|
+
if(saltkeyval is None):
|
|
4199
|
+
saltkey = None
|
|
3682
4200
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3683
|
-
|
|
3684
|
-
|
|
3685
|
-
|
|
4201
|
+
if(saltkey is None or saltkeyval is None):
|
|
4202
|
+
h = hashlib.new(algo_key, hdr_bytes)
|
|
4203
|
+
else:
|
|
4204
|
+
h = hmac.new(saltkeyval, hdr_bytes, digestmod=algo_key)
|
|
4205
|
+
return h.hexdigest().lower()
|
|
3686
4206
|
|
|
3687
4207
|
return "0"
|
|
3688
4208
|
|
|
3689
|
-
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4209
|
+
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__, saltkey=None):
|
|
3690
4210
|
"""
|
|
3691
4211
|
Accepts bytes/str/file-like.
|
|
3692
4212
|
- Hashlib algos: streamed in 1 MiB chunks.
|
|
@@ -3694,13 +4214,29 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
|
|
|
3694
4214
|
- Falls back to one-shot for non-file-like inputs.
|
|
3695
4215
|
"""
|
|
3696
4216
|
algo_key = (checksumtype or "md5").lower()
|
|
3697
|
-
|
|
4217
|
+
saltkeyval = None
|
|
4218
|
+
if(hasattr(saltkey, "read")):
|
|
4219
|
+
saltkeyval = skfp.read()
|
|
4220
|
+
if(not isinstance(saltkeyval, bytes) and sys.version_info[0] >= 3):
|
|
4221
|
+
saltkeyval = saltkeyval.encode("UTF-8")
|
|
4222
|
+
elif(isinstance(saltkey, bytes) and sys.version_info[0] >= 3):
|
|
4223
|
+
saltkeyval = saltkey
|
|
4224
|
+
elif(saltkey is not None and os.path.exists(saltkey)):
|
|
4225
|
+
with open(saltkey, "rb") as skfp:
|
|
4226
|
+
saltkeyval = skfp.read()
|
|
4227
|
+
else:
|
|
4228
|
+
saltkey = None
|
|
4229
|
+
if(saltkeyval is None):
|
|
4230
|
+
saltkey = None
|
|
3698
4231
|
# file-like streaming
|
|
3699
4232
|
if hasattr(inbytes, "read"):
|
|
3700
4233
|
# hashlib
|
|
3701
4234
|
|
|
3702
4235
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3703
|
-
|
|
4236
|
+
if(saltkey is None or saltkeyval is None):
|
|
4237
|
+
h = hashlib.new(algo_key)
|
|
4238
|
+
else:
|
|
4239
|
+
h = hmac.new(saltkeyval, digestmod=algo_key)
|
|
3704
4240
|
while True:
|
|
3705
4241
|
chunk = inbytes.read(__filebuff_size__)
|
|
3706
4242
|
if not chunk:
|
|
@@ -3721,26 +4257,41 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__
|
|
|
3721
4257
|
# one-shot
|
|
3722
4258
|
|
|
3723
4259
|
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3724
|
-
|
|
3725
|
-
|
|
4260
|
+
if(saltkey is None or saltkeyval is None):
|
|
4261
|
+
h = hashlib.new(algo_key, data)
|
|
4262
|
+
else:
|
|
4263
|
+
h = hmac.new(saltkeyval, data, digestmod=algo_key)
|
|
3726
4264
|
return h.hexdigest().lower()
|
|
3727
4265
|
|
|
3728
4266
|
return "0"
|
|
3729
4267
|
|
|
3730
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3731
|
-
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
4268
|
+
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
|
|
4269
|
+
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs, saltkey)
|
|
3732
4270
|
want = (inchecksum or "0").strip().lower()
|
|
3733
4271
|
if want.startswith("0x"):
|
|
3734
4272
|
want = want[2:]
|
|
3735
|
-
return
|
|
4273
|
+
return CheckChecksums(want, calc)
|
|
3736
4274
|
|
|
3737
|
-
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3738
|
-
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
4275
|
+
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__, saltkey=None):
|
|
4276
|
+
calc = GetFileChecksum(infile, checksumtype, True, formatspecs, saltkey)
|
|
3739
4277
|
want = (inchecksum or "0").strip().lower()
|
|
3740
4278
|
if want.startswith("0x"):
|
|
3741
4279
|
want = want[2:]
|
|
3742
|
-
return
|
|
4280
|
+
return CheckChecksums(want, calc)
|
|
4281
|
+
|
|
4282
|
+
def CheckChecksums(inchecksum, outchecksum):
|
|
4283
|
+
# Normalize as text first
|
|
4284
|
+
calc = (inchecksum or "0").strip().lower()
|
|
4285
|
+
want = (outchecksum or "0").strip().lower()
|
|
3743
4286
|
|
|
4287
|
+
if want.startswith("0x"):
|
|
4288
|
+
want = want[2:]
|
|
4289
|
+
|
|
4290
|
+
# Now force both to bytes
|
|
4291
|
+
calc_b = _to_bytes(calc) # defaults to utf-8, strict
|
|
4292
|
+
want_b = _to_bytes(want)
|
|
4293
|
+
|
|
4294
|
+
return hmac.compare_digest(want_b, calc_b)
|
|
3744
4295
|
|
|
3745
4296
|
def MajorMinorToDev(major, minor):
|
|
3746
4297
|
"""
|
|
@@ -4109,11 +4660,11 @@ def ReadFileHeaderDataWoSize(fp, delimiter=_default_delim(None)):
|
|
|
4109
4660
|
return first_two + headerdata
|
|
4110
4661
|
|
|
4111
4662
|
|
|
4112
|
-
def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4663
|
+
def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4113
4664
|
if(not hasattr(fp, "read")):
|
|
4114
4665
|
return False
|
|
4115
4666
|
delimiter = formatspecs['format_delimiter']
|
|
4116
|
-
if(
|
|
4667
|
+
if(__use_new_style__):
|
|
4117
4668
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4118
4669
|
else:
|
|
4119
4670
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4201,15 +4752,14 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4201
4752
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4202
4753
|
pass
|
|
4203
4754
|
fp.seek(len(delimiter), 1)
|
|
4204
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4205
|
-
if(not
|
|
4755
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4756
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4206
4757
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4207
4758
|
fname + " at offset " + str(fheaderstart))
|
|
4208
4759
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4209
4760
|
return False
|
|
4210
4761
|
fp.seek(len(delimiter), 1)
|
|
4211
|
-
newfcs = GetHeaderChecksum(
|
|
4212
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
4762
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4213
4763
|
HeaderOut.append(fjsoncontent)
|
|
4214
4764
|
if(fcs != newfcs and not skipchecksum):
|
|
4215
4765
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
@@ -4228,10 +4778,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4228
4778
|
else:
|
|
4229
4779
|
fp.seek(fcsize, 1)
|
|
4230
4780
|
fcontents.seek(0, 0)
|
|
4231
|
-
newfccs = GetFileChecksum(
|
|
4232
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4781
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4233
4782
|
fcontents.seek(0, 0)
|
|
4234
|
-
if(not
|
|
4783
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4235
4784
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4236
4785
|
fname + " at offset " + str(fcontentstart))
|
|
4237
4786
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4268,12 +4817,12 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4268
4817
|
return HeaderOut
|
|
4269
4818
|
|
|
4270
4819
|
|
|
4271
|
-
def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
4820
|
+
def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4272
4821
|
if(not hasattr(fp, "read")):
|
|
4273
4822
|
return False
|
|
4274
4823
|
delimiter = formatspecs['format_delimiter']
|
|
4275
4824
|
fheaderstart = fp.tell()
|
|
4276
|
-
if(
|
|
4825
|
+
if(__use_new_style__):
|
|
4277
4826
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4278
4827
|
else:
|
|
4279
4828
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4291,40 +4840,51 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4291
4840
|
fbasedir = os.path.dirname(fname)
|
|
4292
4841
|
flinkname = HeaderOut[6]
|
|
4293
4842
|
fsize = int(HeaderOut[7], 16)
|
|
4294
|
-
|
|
4295
|
-
|
|
4296
|
-
|
|
4297
|
-
|
|
4298
|
-
|
|
4843
|
+
fblksize = int(HeaderOut[8], 16)
|
|
4844
|
+
fblocks = int(HeaderOut[9], 16)
|
|
4845
|
+
fflags = int(HeaderOut[10], 16)
|
|
4846
|
+
fatime = int(HeaderOut[11], 16)
|
|
4847
|
+
fmtime = int(HeaderOut[12], 16)
|
|
4848
|
+
fctime = int(HeaderOut[13], 16)
|
|
4849
|
+
fbtime = int(HeaderOut[14], 16)
|
|
4850
|
+
fmode = int(HeaderOut[15], 16)
|
|
4299
4851
|
fchmode = stat.S_IMODE(fmode)
|
|
4300
4852
|
ftypemod = stat.S_IFMT(fmode)
|
|
4301
|
-
fwinattributes = int(HeaderOut[
|
|
4302
|
-
fcompression = HeaderOut[
|
|
4303
|
-
fcsize = int(HeaderOut[
|
|
4304
|
-
fuid = int(HeaderOut[
|
|
4305
|
-
funame = HeaderOut[
|
|
4306
|
-
fgid = int(HeaderOut[
|
|
4307
|
-
fgname = HeaderOut[
|
|
4308
|
-
fid = int(HeaderOut[
|
|
4309
|
-
finode = int(HeaderOut[
|
|
4310
|
-
flinkcount = int(HeaderOut[
|
|
4311
|
-
fdev = int(HeaderOut[
|
|
4312
|
-
|
|
4313
|
-
|
|
4314
|
-
|
|
4315
|
-
|
|
4316
|
-
|
|
4317
|
-
|
|
4318
|
-
|
|
4319
|
-
|
|
4320
|
-
|
|
4321
|
-
fextrafields = int(HeaderOut[33], 16)
|
|
4853
|
+
fwinattributes = int(HeaderOut[16], 16)
|
|
4854
|
+
fcompression = HeaderOut[17]
|
|
4855
|
+
fcsize = int(HeaderOut[18], 16)
|
|
4856
|
+
fuid = int(HeaderOut[19], 16)
|
|
4857
|
+
funame = HeaderOut[20]
|
|
4858
|
+
fgid = int(HeaderOut[21], 16)
|
|
4859
|
+
fgname = HeaderOut[22]
|
|
4860
|
+
fid = int(HeaderOut[23], 16)
|
|
4861
|
+
finode = int(HeaderOut[24], 16)
|
|
4862
|
+
flinkcount = int(HeaderOut[25], 16)
|
|
4863
|
+
fdev = int(HeaderOut[26], 16)
|
|
4864
|
+
frdev = int(HeaderOut[27], 16)
|
|
4865
|
+
fseeknextfile = HeaderOut[28]
|
|
4866
|
+
fjsontype = HeaderOut[29]
|
|
4867
|
+
fjsonlen = int(HeaderOut[30], 16)
|
|
4868
|
+
fjsonsize = int(HeaderOut[31], 16)
|
|
4869
|
+
fjsonchecksumtype = HeaderOut[32]
|
|
4870
|
+
fjsonchecksum = HeaderOut[33]
|
|
4871
|
+
fextrasize = int(HeaderOut[34], 16)
|
|
4872
|
+
fextrafields = int(HeaderOut[35], 16)
|
|
4322
4873
|
fextrafieldslist = []
|
|
4323
|
-
extrastart =
|
|
4874
|
+
extrastart = 36
|
|
4324
4875
|
extraend = extrastart + fextrafields
|
|
4325
4876
|
while(extrastart < extraend):
|
|
4326
4877
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4327
4878
|
extrastart = extrastart + 1
|
|
4879
|
+
fvendorfieldslist = []
|
|
4880
|
+
fvendorfields = 0;
|
|
4881
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
4882
|
+
extrastart = extraend
|
|
4883
|
+
extraend = len(HeaderOut) - 4
|
|
4884
|
+
while(extrastart < extraend):
|
|
4885
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
4886
|
+
extrastart = extrastart + 1
|
|
4887
|
+
fvendorfields = fvendorfields + 1
|
|
4328
4888
|
if(fextrafields==1):
|
|
4329
4889
|
try:
|
|
4330
4890
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4402,16 +4962,15 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4402
4962
|
pass
|
|
4403
4963
|
fp.seek(len(delimiter), 1)
|
|
4404
4964
|
fjend = fp.tell() - 1
|
|
4405
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4406
|
-
if(not
|
|
4965
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4966
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4407
4967
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4408
4968
|
fname + " at offset " + str(fheaderstart))
|
|
4409
4969
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4410
4970
|
return False
|
|
4411
4971
|
fcs = HeaderOut[-2].lower()
|
|
4412
4972
|
fccs = HeaderOut[-1].lower()
|
|
4413
|
-
newfcs = GetHeaderChecksum(
|
|
4414
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
4973
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4415
4974
|
if(fcs != newfcs and not skipchecksum):
|
|
4416
4975
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4417
4976
|
fname + " at offset " + str(fheaderstart))
|
|
@@ -4434,10 +4993,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4434
4993
|
fp.seek(fcsize, 1)
|
|
4435
4994
|
pyhascontents = False
|
|
4436
4995
|
fcontents.seek(0, 0)
|
|
4437
|
-
newfccs = GetFileChecksum(
|
|
4438
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4996
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4439
4997
|
fcontents.seek(0, 0)
|
|
4440
|
-
if(not
|
|
4998
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4441
4999
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4442
5000
|
fname + " at offset " + str(fcontentstart))
|
|
4443
5001
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4454,8 +5012,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4454
5012
|
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4455
5013
|
cfcontents.close()
|
|
4456
5014
|
fcontents.seek(0, 0)
|
|
4457
|
-
fccs = GetFileChecksum(
|
|
4458
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5015
|
+
fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4459
5016
|
fcontentend = fp.tell()
|
|
4460
5017
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4461
5018
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -4477,17 +5034,17 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4477
5034
|
fcontents.seek(0, 0)
|
|
4478
5035
|
if(not contentasfile):
|
|
4479
5036
|
fcontents = fcontents.read()
|
|
4480
|
-
outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
|
|
4481
|
-
'fdev': fdev, '
|
|
5037
|
+
outlist = {'fheadersize': fheadsize, 'fhstart': fheaderstart, 'fhend': fhend, 'ftype': ftype, 'fencoding': fencoding, 'fcencoding': fcencoding, 'fname': fname, 'fbasedir': fbasedir, 'flinkname': flinkname, 'fsize': fsize, 'fblksize': fblksize, 'fblocks': fblocks, 'fflags': fflags, 'fatime': fatime, 'fmtime': fmtime, 'fctime': fctime, 'fbtime': fbtime, 'fmode': fmode, 'fchmode': fchmode, 'ftypemod': ftypemod, 'fwinattributes': fwinattributes, 'fcompression': fcompression, 'fcsize': fcsize, 'fuid': fuid, 'funame': funame, 'fgid': fgid, 'fgname': fgname, 'finode': finode, 'flinkcount': flinkcount,
|
|
5038
|
+
'fdev': fdev, 'frdev': frdev, 'fseeknextfile': fseeknextfile, 'fheaderchecksumtype': HeaderOut[-4], 'fjsonchecksumtype': fjsonchecksumtype, 'fcontentchecksumtype': HeaderOut[-3], 'fnumfields': fnumfields + 2, 'frawheader': HeaderOut, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'fextrafields': fextrafields, 'fextrafieldsize': fextrasize, 'fextradata': fextrafieldslist, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fheaderchecksum': fcs, 'fjsonchecksum': fjsonchecksum, 'fcontentchecksum': fccs, 'fhascontents': pyhascontents, 'fcontentstart': fcontentstart, 'fcontentend': fcontentend, 'fcontentasfile': contentasfile, 'fcontents': fcontents}
|
|
4482
5039
|
return outlist
|
|
4483
5040
|
|
|
4484
5041
|
|
|
4485
|
-
def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
5042
|
+
def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4486
5043
|
if(not hasattr(fp, "read")):
|
|
4487
5044
|
return False
|
|
4488
5045
|
delimiter = formatspecs['format_delimiter']
|
|
4489
5046
|
fheaderstart = fp.tell()
|
|
4490
|
-
if(
|
|
5047
|
+
if(__use_new_style__):
|
|
4491
5048
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4492
5049
|
else:
|
|
4493
5050
|
HeaderOut = ReadFileHeaderDataWoSize(fp, delimiter)
|
|
@@ -4505,40 +5062,51 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4505
5062
|
fbasedir = os.path.dirname(fname)
|
|
4506
5063
|
flinkname = HeaderOut[6]
|
|
4507
5064
|
fsize = int(HeaderOut[7], 16)
|
|
4508
|
-
|
|
4509
|
-
|
|
4510
|
-
|
|
4511
|
-
|
|
4512
|
-
|
|
5065
|
+
fblksize = int(HeaderOut[8], 16)
|
|
5066
|
+
fblocks = int(HeaderOut[9], 16)
|
|
5067
|
+
fflags = int(HeaderOut[10], 16)
|
|
5068
|
+
fatime = int(HeaderOut[11], 16)
|
|
5069
|
+
fmtime = int(HeaderOut[12], 16)
|
|
5070
|
+
fctime = int(HeaderOut[13], 16)
|
|
5071
|
+
fbtime = int(HeaderOut[14], 16)
|
|
5072
|
+
fmode = int(HeaderOut[15], 16)
|
|
4513
5073
|
fchmode = stat.S_IMODE(fmode)
|
|
4514
5074
|
ftypemod = stat.S_IFMT(fmode)
|
|
4515
|
-
fwinattributes = int(HeaderOut[
|
|
4516
|
-
fcompression = HeaderOut[
|
|
4517
|
-
fcsize = int(HeaderOut[
|
|
4518
|
-
fuid = int(HeaderOut[
|
|
4519
|
-
funame = HeaderOut[
|
|
4520
|
-
fgid = int(HeaderOut[
|
|
4521
|
-
fgname = HeaderOut[
|
|
4522
|
-
fid = int(HeaderOut[
|
|
4523
|
-
finode = int(HeaderOut[
|
|
4524
|
-
flinkcount = int(HeaderOut[
|
|
4525
|
-
fdev = int(HeaderOut[
|
|
4526
|
-
|
|
4527
|
-
|
|
4528
|
-
|
|
4529
|
-
|
|
4530
|
-
|
|
4531
|
-
|
|
4532
|
-
|
|
4533
|
-
|
|
4534
|
-
|
|
4535
|
-
fextrafields = int(HeaderOut[33], 16)
|
|
5075
|
+
fwinattributes = int(HeaderOut[16], 16)
|
|
5076
|
+
fcompression = HeaderOut[17]
|
|
5077
|
+
fcsize = int(HeaderOut[18], 16)
|
|
5078
|
+
fuid = int(HeaderOut[19], 16)
|
|
5079
|
+
funame = HeaderOut[20]
|
|
5080
|
+
fgid = int(HeaderOut[21], 16)
|
|
5081
|
+
fgname = HeaderOut[22]
|
|
5082
|
+
fid = int(HeaderOut[23], 16)
|
|
5083
|
+
finode = int(HeaderOut[24], 16)
|
|
5084
|
+
flinkcount = int(HeaderOut[25], 16)
|
|
5085
|
+
fdev = int(HeaderOut[26], 16)
|
|
5086
|
+
frdev = int(HeaderOut[27], 16)
|
|
5087
|
+
fseeknextfile = HeaderOut[28]
|
|
5088
|
+
fjsontype = HeaderOut[29]
|
|
5089
|
+
fjsonlen = int(HeaderOut[30], 16)
|
|
5090
|
+
fjsonsize = int(HeaderOut[31], 16)
|
|
5091
|
+
fjsonchecksumtype = HeaderOut[32]
|
|
5092
|
+
fjsonchecksum = HeaderOut[33]
|
|
5093
|
+
fextrasize = int(HeaderOut[34], 16)
|
|
5094
|
+
fextrafields = int(HeaderOut[35], 16)
|
|
4536
5095
|
fextrafieldslist = []
|
|
4537
|
-
extrastart =
|
|
5096
|
+
extrastart = 36
|
|
4538
5097
|
extraend = extrastart + fextrafields
|
|
4539
5098
|
while(extrastart < extraend):
|
|
4540
5099
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4541
5100
|
extrastart = extrastart + 1
|
|
5101
|
+
fvendorfieldslist = []
|
|
5102
|
+
fvendorfields = 0;
|
|
5103
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
5104
|
+
extrastart = extraend
|
|
5105
|
+
extraend = len(HeaderOut) - 4
|
|
5106
|
+
while(extrastart < extraend):
|
|
5107
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5108
|
+
extrastart = extrastart + 1
|
|
5109
|
+
fvendorfields = fvendorfields + 1
|
|
4542
5110
|
if(fextrafields==1):
|
|
4543
5111
|
try:
|
|
4544
5112
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4548,6 +5116,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4548
5116
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4549
5117
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4550
5118
|
pass
|
|
5119
|
+
fjstart = fp.tell()
|
|
4551
5120
|
if(fjsontype=="json"):
|
|
4552
5121
|
fjsoncontent = {}
|
|
4553
5122
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
@@ -4614,16 +5183,16 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4614
5183
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4615
5184
|
pass
|
|
4616
5185
|
fp.seek(len(delimiter), 1)
|
|
4617
|
-
|
|
4618
|
-
|
|
5186
|
+
fjend = fp.tell() - 1
|
|
5187
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5188
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4619
5189
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4620
5190
|
fname + " at offset " + str(fheaderstart))
|
|
4621
5191
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4622
5192
|
return False
|
|
4623
5193
|
fcs = HeaderOut[-2].lower()
|
|
4624
5194
|
fccs = HeaderOut[-1].lower()
|
|
4625
|
-
newfcs = GetHeaderChecksum(
|
|
4626
|
-
HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs)
|
|
5195
|
+
newfcs = GetHeaderChecksum(HeaderOut[:-2], HeaderOut[-4].lower(), True, formatspecs, saltkey)
|
|
4627
5196
|
if(fcs != newfcs and not skipchecksum):
|
|
4628
5197
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4629
5198
|
fname + " at offset " + str(fheaderstart))
|
|
@@ -4646,9 +5215,9 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4646
5215
|
fp.seek(fcsize, 1)
|
|
4647
5216
|
pyhascontents = False
|
|
4648
5217
|
fcontents.seek(0, 0)
|
|
4649
|
-
newfccs = GetFileChecksum(
|
|
4650
|
-
|
|
4651
|
-
if(not
|
|
5218
|
+
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
5219
|
+
fcontents.seek(0, 0)
|
|
5220
|
+
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4652
5221
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4653
5222
|
fname + " at offset " + str(fcontentstart))
|
|
4654
5223
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4665,8 +5234,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4665
5234
|
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4666
5235
|
cfcontents.close()
|
|
4667
5236
|
fcontents.seek(0, 0)
|
|
4668
|
-
fccs = GetFileChecksum(
|
|
4669
|
-
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5237
|
+
fccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
4670
5238
|
fcontentend = fp.tell()
|
|
4671
5239
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4672
5240
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -4688,12 +5256,12 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4688
5256
|
fcontents.seek(0, 0)
|
|
4689
5257
|
if(not contentasfile):
|
|
4690
5258
|
fcontents = fcontents.read()
|
|
4691
|
-
outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
4692
|
-
finode, flinkcount, fdev,
|
|
5259
|
+
outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
5260
|
+
fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
|
|
4693
5261
|
return outlist
|
|
4694
5262
|
|
|
4695
5263
|
|
|
4696
|
-
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
5264
|
+
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None):
|
|
4697
5265
|
if(not hasattr(fp, "read")):
|
|
4698
5266
|
return False
|
|
4699
5267
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -4706,6 +5274,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4706
5274
|
CatSizeEnd = CatSize
|
|
4707
5275
|
fp.seek(curloc, 0)
|
|
4708
5276
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5277
|
+
headeroffset = fp.tell()
|
|
4709
5278
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4710
5279
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4711
5280
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4713,7 +5282,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4713
5282
|
return False
|
|
4714
5283
|
if(formdel != formatspecs['format_delimiter']):
|
|
4715
5284
|
return False
|
|
4716
|
-
if(
|
|
5285
|
+
if(__use_new_style__):
|
|
4717
5286
|
inheader = ReadFileHeaderDataBySize(
|
|
4718
5287
|
fp, formatspecs['format_delimiter'])
|
|
4719
5288
|
else:
|
|
@@ -4721,19 +5290,19 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4721
5290
|
fp, formatspecs['format_delimiter'])
|
|
4722
5291
|
fprechecksumtype = inheader[-2]
|
|
4723
5292
|
fprechecksum = inheader[-1]
|
|
4724
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
4725
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5293
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5294
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4726
5295
|
if(not headercheck and not skipchecksum):
|
|
4727
5296
|
VerbosePrintOut(
|
|
4728
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5297
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
4729
5298
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4730
5299
|
"'" + newfcs + "'")
|
|
4731
5300
|
return False
|
|
4732
|
-
fnumfiles = int(inheader[
|
|
4733
|
-
outfseeknextfile = inheaderdata[
|
|
4734
|
-
fjsonsize = int(inheaderdata[
|
|
4735
|
-
fjsonchecksumtype = inheader[
|
|
4736
|
-
fjsonchecksum = inheader[
|
|
5301
|
+
fnumfiles = int(inheader[8], 16)
|
|
5302
|
+
outfseeknextfile = inheaderdata[9]
|
|
5303
|
+
fjsonsize = int(inheaderdata[12], 16)
|
|
5304
|
+
fjsonchecksumtype = inheader[13]
|
|
5305
|
+
fjsonchecksum = inheader[14]
|
|
4737
5306
|
fp.read(fjsonsize)
|
|
4738
5307
|
# Next seek directive
|
|
4739
5308
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
@@ -4756,8 +5325,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4756
5325
|
countnum = 0
|
|
4757
5326
|
flist = []
|
|
4758
5327
|
while(countnum < fnumfiles):
|
|
4759
|
-
HeaderOut = ReadFileHeaderDataWithContent(
|
|
4760
|
-
fp, listonly, uncompress, skipchecksum, formatspecs)
|
|
5328
|
+
HeaderOut = ReadFileHeaderDataWithContent(fp, listonly, uncompress, skipchecksum, formatspecs, saltkey)
|
|
4761
5329
|
if(len(HeaderOut) == 0):
|
|
4762
5330
|
break
|
|
4763
5331
|
flist.append(HeaderOut)
|
|
@@ -4765,7 +5333,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4765
5333
|
return flist
|
|
4766
5334
|
|
|
4767
5335
|
|
|
4768
|
-
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
5336
|
+
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
|
|
4769
5337
|
if(not hasattr(fp, "read")):
|
|
4770
5338
|
return False
|
|
4771
5339
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -4778,6 +5346,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4778
5346
|
CatSizeEnd = CatSize
|
|
4779
5347
|
fp.seek(curloc, 0)
|
|
4780
5348
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5349
|
+
headeroffset = fp.tell()
|
|
4781
5350
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4782
5351
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4783
5352
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4785,16 +5354,16 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4785
5354
|
return False
|
|
4786
5355
|
if(formdel != formatspecs['format_delimiter']):
|
|
4787
5356
|
return False
|
|
4788
|
-
if(
|
|
5357
|
+
if(__use_new_style__):
|
|
4789
5358
|
inheader = ReadFileHeaderDataBySize(
|
|
4790
5359
|
fp, formatspecs['format_delimiter'])
|
|
4791
5360
|
else:
|
|
4792
5361
|
inheader = ReadFileHeaderDataWoSize(
|
|
4793
5362
|
fp, formatspecs['format_delimiter'])
|
|
4794
|
-
fnumextrafieldsize = int(inheader[
|
|
4795
|
-
fnumextrafields = int(inheader[
|
|
5363
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
5364
|
+
fnumextrafields = int(inheader[16], 16)
|
|
4796
5365
|
fextrafieldslist = []
|
|
4797
|
-
extrastart =
|
|
5366
|
+
extrastart = 17
|
|
4798
5367
|
extraend = extrastart + fnumextrafields
|
|
4799
5368
|
while(extrastart < extraend):
|
|
4800
5369
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -4808,20 +5377,31 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4808
5377
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4809
5378
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4810
5379
|
pass
|
|
5380
|
+
fvendorfieldslist = []
|
|
5381
|
+
fvendorfields = 0;
|
|
5382
|
+
if((len(inheader) - 2)>extraend):
|
|
5383
|
+
extrastart = extraend
|
|
5384
|
+
extraend = len(inheader) - 2
|
|
5385
|
+
while(extrastart < extraend):
|
|
5386
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5387
|
+
extrastart = extrastart + 1
|
|
5388
|
+
fvendorfields = fvendorfields + 1
|
|
4811
5389
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4812
5390
|
fheadsize = int(inheader[0], 16)
|
|
4813
5391
|
fnumfields = int(inheader[1], 16)
|
|
4814
|
-
|
|
4815
|
-
|
|
4816
|
-
|
|
4817
|
-
|
|
4818
|
-
|
|
4819
|
-
|
|
4820
|
-
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
|
|
5392
|
+
fheadctime = int(inheader[2], 16)
|
|
5393
|
+
fheadmtime = int(inheader[3], 16)
|
|
5394
|
+
fhencoding = inheader[4]
|
|
5395
|
+
fostype = inheader[5]
|
|
5396
|
+
fpythontype = inheader[6]
|
|
5397
|
+
fprojectname = inheader[7]
|
|
5398
|
+
fnumfiles = int(inheader[8], 16)
|
|
5399
|
+
fseeknextfile = inheader[9]
|
|
5400
|
+
fjsontype = inheader[10]
|
|
5401
|
+
fjsonlen = int(inheader[11], 16)
|
|
5402
|
+
fjsonsize = int(inheader[12], 16)
|
|
5403
|
+
fjsonchecksumtype = inheader[13]
|
|
5404
|
+
fjsonchecksum = inheader[14]
|
|
4825
5405
|
fjsoncontent = {}
|
|
4826
5406
|
fjstart = fp.tell()
|
|
4827
5407
|
if(fjsontype=="json"):
|
|
@@ -4907,25 +5487,25 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4907
5487
|
fp.seek(fseeknextasnum, 0)
|
|
4908
5488
|
else:
|
|
4909
5489
|
return False
|
|
4910
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4911
|
-
if(not
|
|
5490
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5491
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4912
5492
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4913
5493
|
fname + " at offset " + str(fheaderstart))
|
|
4914
5494
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4915
5495
|
return False
|
|
4916
5496
|
fprechecksumtype = inheader[-2]
|
|
4917
5497
|
fprechecksum = inheader[-1]
|
|
4918
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
4919
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5498
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5499
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4920
5500
|
if(not headercheck and not skipchecksum):
|
|
4921
5501
|
VerbosePrintOut(
|
|
4922
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5502
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
4923
5503
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4924
5504
|
"'" + newfcs + "'")
|
|
4925
5505
|
return False
|
|
4926
5506
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
4927
5507
|
fcompresstype = ""
|
|
4928
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
5508
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fmtime': fheadmtime, 'fctime': fheadctime, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'fvendorfields': fvendorfields, 'fvendordata': fvendorfieldslist, 'ffilelist': []}
|
|
4929
5509
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
4930
5510
|
seekstart = 0
|
|
4931
5511
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -4952,16 +5532,15 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4952
5532
|
prefjsonchecksum = preheaderdata[31]
|
|
4953
5533
|
prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
4954
5534
|
fp.seek(len(delimiter), 1)
|
|
4955
|
-
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
4956
|
-
if(not
|
|
5535
|
+
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
|
|
5536
|
+
if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
4957
5537
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4958
5538
|
prefname + " at offset " + str(prefhstart))
|
|
4959
5539
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
4960
5540
|
return False
|
|
4961
|
-
prenewfcs = GetHeaderChecksum(
|
|
4962
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5541
|
+
prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
4963
5542
|
prefcs = preheaderdata[-2]
|
|
4964
|
-
if(not
|
|
5543
|
+
if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
|
|
4965
5544
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
4966
5545
|
prefname + " at offset " + str(prefhstart))
|
|
4967
5546
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -4976,11 +5555,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4976
5555
|
if(prefsize > 0):
|
|
4977
5556
|
prefcontents.write(fp.read(prefsize))
|
|
4978
5557
|
prefcontents.seek(0, 0)
|
|
4979
|
-
prenewfccs = GetFileChecksum(
|
|
4980
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5558
|
+
prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
4981
5559
|
prefccs = preheaderdata[-1]
|
|
4982
5560
|
pyhascontents = True
|
|
4983
|
-
if(not
|
|
5561
|
+
if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
|
|
4984
5562
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4985
5563
|
prefname + " at offset " + str(prefcontentstart))
|
|
4986
5564
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5007,8 +5585,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5007
5585
|
realidnum = 0
|
|
5008
5586
|
countnum = seekstart
|
|
5009
5587
|
while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
|
|
5010
|
-
HeaderOut = ReadFileHeaderDataWithContentToArray(
|
|
5011
|
-
fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
|
|
5588
|
+
HeaderOut = ReadFileHeaderDataWithContentToArray(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
|
|
5012
5589
|
if(len(HeaderOut) == 0):
|
|
5013
5590
|
break
|
|
5014
5591
|
HeaderOut.update({'fid': realidnum, 'fidalt': realidnum})
|
|
@@ -5019,7 +5596,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5019
5596
|
return outlist
|
|
5020
5597
|
|
|
5021
5598
|
|
|
5022
|
-
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
5599
|
+
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, saltkey=None, seektoend=False):
|
|
5023
5600
|
if(not hasattr(fp, "read")):
|
|
5024
5601
|
return False
|
|
5025
5602
|
delimiter = formatspecs['format_delimiter']
|
|
@@ -5032,6 +5609,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5032
5609
|
CatSizeEnd = CatSize
|
|
5033
5610
|
fp.seek(curloc, 0)
|
|
5034
5611
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5612
|
+
headeroffset = fp.tell()
|
|
5035
5613
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
5036
5614
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
5037
5615
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -5039,16 +5617,16 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5039
5617
|
return False
|
|
5040
5618
|
if(formdel != formatspecs['format_delimiter']):
|
|
5041
5619
|
return False
|
|
5042
|
-
if(
|
|
5620
|
+
if(__use_new_style__):
|
|
5043
5621
|
inheader = ReadFileHeaderDataBySize(
|
|
5044
5622
|
fp, formatspecs['format_delimiter'])
|
|
5045
5623
|
else:
|
|
5046
5624
|
inheader = ReadFileHeaderDataWoSize(
|
|
5047
5625
|
fp, formatspecs['format_delimiter'])
|
|
5048
|
-
fnumextrafieldsize = int(inheader[
|
|
5049
|
-
fnumextrafields = int(inheader[
|
|
5626
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
5627
|
+
fnumextrafields = int(inheader[16], 16)
|
|
5050
5628
|
fextrafieldslist = []
|
|
5051
|
-
extrastart =
|
|
5629
|
+
extrastart = 17
|
|
5052
5630
|
extraend = extrastart + fnumextrafields
|
|
5053
5631
|
while(extrastart < extraend):
|
|
5054
5632
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5062,19 +5640,98 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5062
5640
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
5063
5641
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5064
5642
|
pass
|
|
5643
|
+
fvendorfieldslist = []
|
|
5644
|
+
fvendorfields = 0;
|
|
5645
|
+
if((len(inheader) - 2)>extraend):
|
|
5646
|
+
extrastart = extraend
|
|
5647
|
+
extraend = len(inheader) - 2
|
|
5648
|
+
while(extrastart < extraend):
|
|
5649
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5650
|
+
extrastart = extrastart + 1
|
|
5651
|
+
fvendorfields = fvendorfields + 1
|
|
5065
5652
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5066
5653
|
fheadsize = int(inheader[0], 16)
|
|
5067
5654
|
fnumfields = int(inheader[1], 16)
|
|
5068
|
-
|
|
5069
|
-
|
|
5070
|
-
|
|
5071
|
-
|
|
5072
|
-
|
|
5073
|
-
|
|
5074
|
-
|
|
5655
|
+
fheadctime = int(inheader[2], 16)
|
|
5656
|
+
fheadmtime = int(inheader[3], 16)
|
|
5657
|
+
fhencoding = inheader[4]
|
|
5658
|
+
fostype = inheader[5]
|
|
5659
|
+
fpythontype = inheader[6]
|
|
5660
|
+
fprojectname = inheader[7]
|
|
5661
|
+
fnumfiles = int(inheader[8], 16)
|
|
5662
|
+
fseeknextfile = inheader[9]
|
|
5663
|
+
fjsontype = inheader[10]
|
|
5664
|
+
fjsonlen = int(inheader[11], 16)
|
|
5665
|
+
fjsonsize = int(inheader[12], 16)
|
|
5666
|
+
fjsonchecksumtype = inheader[13]
|
|
5667
|
+
fjsonchecksum = inheader[14]
|
|
5075
5668
|
fjsoncontent = {}
|
|
5076
5669
|
fjstart = fp.tell()
|
|
5077
|
-
|
|
5670
|
+
if(fjsontype=="json"):
|
|
5671
|
+
fjsoncontent = {}
|
|
5672
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5673
|
+
if(fjsonsize > 0):
|
|
5674
|
+
try:
|
|
5675
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5676
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
5677
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5678
|
+
try:
|
|
5679
|
+
fjsonrawcontent = fprejsoncontent
|
|
5680
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
5681
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5682
|
+
fprejsoncontent = ""
|
|
5683
|
+
fjsonrawcontent = fprejsoncontent
|
|
5684
|
+
fjsoncontent = {}
|
|
5685
|
+
else:
|
|
5686
|
+
fprejsoncontent = ""
|
|
5687
|
+
fjsonrawcontent = fprejsoncontent
|
|
5688
|
+
fjsoncontent = {}
|
|
5689
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
5690
|
+
fjsoncontent = {}
|
|
5691
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5692
|
+
if (fjsonsize > 0):
|
|
5693
|
+
try:
|
|
5694
|
+
# try base64 → utf-8 → YAML
|
|
5695
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5696
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5697
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
5698
|
+
try:
|
|
5699
|
+
# fall back to treating the bytes as plain text YAML
|
|
5700
|
+
fjsonrawcontent = fprejsoncontent
|
|
5701
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5702
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
5703
|
+
# final fallback: empty
|
|
5704
|
+
fprejsoncontent = ""
|
|
5705
|
+
fjsonrawcontent = fprejsoncontent
|
|
5706
|
+
fjsoncontent = {}
|
|
5707
|
+
else:
|
|
5708
|
+
fprejsoncontent = ""
|
|
5709
|
+
fjsonrawcontent = fprejsoncontent
|
|
5710
|
+
fjsoncontent = {}
|
|
5711
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
5712
|
+
fjsoncontent = {}
|
|
5713
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5714
|
+
fprejsoncontent = ""
|
|
5715
|
+
fjsonrawcontent = fprejsoncontent
|
|
5716
|
+
elif(fjsontype=="list"):
|
|
5717
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5718
|
+
flisttmp = MkTempFile()
|
|
5719
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
5720
|
+
flisttmp.seek(0)
|
|
5721
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
5722
|
+
flisttmp.close()
|
|
5723
|
+
fjsonrawcontent = fjsoncontent
|
|
5724
|
+
if(fjsonlen==1):
|
|
5725
|
+
try:
|
|
5726
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
5727
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
5728
|
+
fjsonlen = len(fjsoncontent)
|
|
5729
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5730
|
+
try:
|
|
5731
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
5732
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
5733
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5734
|
+
pass
|
|
5078
5735
|
fjend = fp.tell()
|
|
5079
5736
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5080
5737
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5093,19 +5750,19 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5093
5750
|
fp.seek(fseeknextasnum, 0)
|
|
5094
5751
|
else:
|
|
5095
5752
|
return False
|
|
5096
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5097
|
-
if(not
|
|
5753
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
5754
|
+
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5098
5755
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5099
5756
|
fname + " at offset " + str(fheaderstart))
|
|
5100
5757
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5101
5758
|
return False
|
|
5102
5759
|
fprechecksumtype = inheader[-2]
|
|
5103
5760
|
fprechecksum = inheader[-1]
|
|
5104
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
5105
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
5761
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
5762
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5106
5763
|
if(not headercheck and not skipchecksum):
|
|
5107
5764
|
VerbosePrintOut(
|
|
5108
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5765
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5109
5766
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5110
5767
|
"'" + newfcs + "'")
|
|
5111
5768
|
return False
|
|
@@ -5121,7 +5778,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5121
5778
|
il = 0
|
|
5122
5779
|
while(il < seekstart):
|
|
5123
5780
|
prefhstart = fp.tell()
|
|
5124
|
-
if(
|
|
5781
|
+
if(__use_new_style__):
|
|
5125
5782
|
preheaderdata = ReadFileHeaderDataBySize(
|
|
5126
5783
|
fp, formatspecs['format_delimiter'])
|
|
5127
5784
|
else:
|
|
@@ -5143,16 +5800,15 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5143
5800
|
prefjsonchecksum = preheaderdata[31]
|
|
5144
5801
|
prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5145
5802
|
fp.seek(len(delimiter), 1)
|
|
5146
|
-
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5147
|
-
if(not
|
|
5803
|
+
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs, saltkey)
|
|
5804
|
+
if(not CheckChecksums(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5148
5805
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5149
5806
|
prefname + " at offset " + str(prefhstart))
|
|
5150
5807
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
5151
5808
|
return False
|
|
5152
|
-
prenewfcs = GetHeaderChecksum(
|
|
5153
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5809
|
+
prenewfcs = GetHeaderChecksum(preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
5154
5810
|
prefcs = preheaderdata[-2]
|
|
5155
|
-
if(not
|
|
5811
|
+
if(not CheckChecksums(prefcs, prenewfcs) and not skipchecksum):
|
|
5156
5812
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5157
5813
|
prefname + " at offset " + str(prefhstart))
|
|
5158
5814
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5169,11 +5825,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5169
5825
|
prefcontents = fp.read(prefsize)
|
|
5170
5826
|
else:
|
|
5171
5827
|
prefcontents = fp.read(prefcsize)
|
|
5172
|
-
prenewfccs = GetFileChecksum(
|
|
5173
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5828
|
+
prenewfccs = GetFileChecksum(prefcontents, preheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
5174
5829
|
prefccs = preheaderdata[-1]
|
|
5175
5830
|
pyhascontents = True
|
|
5176
|
-
if(not
|
|
5831
|
+
if(not CheckChecksums(prefccs, prenewfccs) and not skipchecksum):
|
|
5177
5832
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5178
5833
|
prefname + " at offset " + str(prefcontentstart))
|
|
5179
5834
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5200,8 +5855,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5200
5855
|
realidnum = 0
|
|
5201
5856
|
countnum = seekstart
|
|
5202
5857
|
while (fp.tell() < CatSizeEnd) if seektoend else (countnum < seekend):
|
|
5203
|
-
HeaderOut = ReadFileHeaderDataWithContentToList(
|
|
5204
|
-
fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs)
|
|
5858
|
+
HeaderOut = ReadFileHeaderDataWithContentToList(fp, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey)
|
|
5205
5859
|
if(len(HeaderOut) == 0):
|
|
5206
5860
|
break
|
|
5207
5861
|
outlist.append(HeaderOut)
|
|
@@ -5209,7 +5863,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5209
5863
|
realidnum = realidnum + 1
|
|
5210
5864
|
return outlist
|
|
5211
5865
|
|
|
5212
|
-
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5866
|
+
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5213
5867
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5214
5868
|
fp = infile
|
|
5215
5869
|
try:
|
|
@@ -5304,7 +5958,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5304
5958
|
else:
|
|
5305
5959
|
break
|
|
5306
5960
|
readfp.seek(oldfppos, 0)
|
|
5307
|
-
ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5961
|
+
ArchiveList.append(ReadFileDataWithContentToArray(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5308
5962
|
currentfilepos = readfp.tell()
|
|
5309
5963
|
else:
|
|
5310
5964
|
infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
|
|
@@ -5326,27 +5980,27 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5326
5980
|
else:
|
|
5327
5981
|
break
|
|
5328
5982
|
infp.seek(oldinfppos, 0)
|
|
5329
|
-
ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
5983
|
+
ArchiveList.append(ReadFileDataWithContentToArray(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5330
5984
|
currentinfilepos = infp.tell()
|
|
5331
5985
|
currentfilepos = readfp.tell()
|
|
5332
5986
|
return ArchiveList
|
|
5333
5987
|
|
|
5334
5988
|
|
|
5335
|
-
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5989
|
+
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5336
5990
|
if(isinstance(infile, (list, tuple, ))):
|
|
5337
5991
|
pass
|
|
5338
5992
|
else:
|
|
5339
5993
|
infile = [infile]
|
|
5340
5994
|
outretval = []
|
|
5341
5995
|
for curfname in infile:
|
|
5342
|
-
outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
|
|
5996
|
+
outretval.append(ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
|
|
5343
5997
|
return outretval
|
|
5344
5998
|
|
|
5345
|
-
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5346
|
-
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
5999
|
+
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
6000
|
+
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
5347
6001
|
|
|
5348
6002
|
|
|
5349
|
-
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
6003
|
+
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5350
6004
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5351
6005
|
fp = infile
|
|
5352
6006
|
try:
|
|
@@ -5441,7 +6095,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5441
6095
|
else:
|
|
5442
6096
|
break
|
|
5443
6097
|
readfp.seek(oldfppos, 0)
|
|
5444
|
-
ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
6098
|
+
ArchiveList.append(ReadFileDataWithContentToList(readfp, currentfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5445
6099
|
currentfilepos = readfp.tell()
|
|
5446
6100
|
else:
|
|
5447
6101
|
infp = UncompressFileAlt(readfp, formatspecs, currentfilepos)
|
|
@@ -5463,24 +6117,24 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5463
6117
|
else:
|
|
5464
6118
|
break
|
|
5465
6119
|
infp.seek(oldinfppos, 0)
|
|
5466
|
-
ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, seektoend))
|
|
6120
|
+
ArchiveList.append(ReadFileDataWithContentToList(infp, currentinfilepos, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, informatspecs, saltkey, seektoend))
|
|
5467
6121
|
currentinfilepos = infp.tell()
|
|
5468
6122
|
currentfilepos = readfp.tell()
|
|
5469
6123
|
return ArchiveList
|
|
5470
6124
|
|
|
5471
6125
|
|
|
5472
|
-
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
6126
|
+
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
5473
6127
|
if(isinstance(infile, (list, tuple, ))):
|
|
5474
6128
|
pass
|
|
5475
6129
|
else:
|
|
5476
6130
|
infile = [infile]
|
|
5477
6131
|
outretval = {}
|
|
5478
6132
|
for curfname in infile:
|
|
5479
|
-
outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend))
|
|
6133
|
+
outretval.append(ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend))
|
|
5480
6134
|
return outretval
|
|
5481
6135
|
|
|
5482
|
-
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
5483
|
-
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
6136
|
+
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False):
|
|
6137
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
5484
6138
|
|
|
5485
6139
|
|
|
5486
6140
|
def _field_to_bytes(x):
|
|
@@ -5534,7 +6188,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5534
6188
|
def _hex_lower(n):
|
|
5535
6189
|
return format(int(n), 'x').lower()
|
|
5536
6190
|
|
|
5537
|
-
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
6191
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
|
|
5538
6192
|
"""
|
|
5539
6193
|
Build and write the archive file header.
|
|
5540
6194
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5598,18 +6252,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5598
6252
|
tmpoutlist.append(fjsonsize)
|
|
5599
6253
|
if(len(jsondata) > 0):
|
|
5600
6254
|
tmpoutlist.append(checksumtype[1])
|
|
5601
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
6255
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs, saltkey))
|
|
5602
6256
|
else:
|
|
5603
6257
|
tmpoutlist.append("none")
|
|
5604
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
6258
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
|
|
5605
6259
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5606
6260
|
tmpoutlist.append(extrasizelen)
|
|
5607
6261
|
tmpoutlist.append(extrafields)
|
|
5608
|
-
tmpoutlen =
|
|
6262
|
+
tmpoutlen = 10 + len(tmpoutlist) + len(xlist)
|
|
5609
6263
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5610
|
-
|
|
6264
|
+
if(hasattr(time, "time_ns")):
|
|
6265
|
+
fctime = format(int(time.time_ns()), 'x').lower()
|
|
6266
|
+
else:
|
|
6267
|
+
fctime = format(int(to_ns(time.time())), 'x').lower()
|
|
5611
6268
|
# Serialize the first group
|
|
5612
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
6269
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5613
6270
|
# Append tmpoutlist
|
|
5614
6271
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5615
6272
|
# Append extradata items if any
|
|
@@ -5619,7 +6276,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5619
6276
|
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5620
6277
|
|
|
5621
6278
|
# 5) inner checksum over fnumfilesa
|
|
5622
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
6279
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
|
|
5623
6280
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5624
6281
|
|
|
5625
6282
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5632,7 +6289,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5632
6289
|
+ fnumfilesa
|
|
5633
6290
|
)
|
|
5634
6291
|
|
|
5635
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
6292
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs, saltkey)
|
|
5636
6293
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5637
6294
|
|
|
5638
6295
|
# 8) final total size field (again per your original logic)
|
|
@@ -5665,21 +6322,21 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5665
6322
|
return fp
|
|
5666
6323
|
|
|
5667
6324
|
|
|
5668
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
6325
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
|
|
5669
6326
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5670
6327
|
formatspecs = formatspecs[fmttype]
|
|
5671
6328
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5672
6329
|
fmttype = __file_format_default__
|
|
5673
6330
|
formatspecs = formatspecs[fmttype]
|
|
5674
|
-
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
6331
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs, saltkey)
|
|
5675
6332
|
return fp
|
|
5676
6333
|
|
|
5677
6334
|
|
|
5678
|
-
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5679
|
-
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
6335
|
+
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None):
|
|
6336
|
+
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
|
|
5680
6337
|
|
|
5681
6338
|
|
|
5682
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
6339
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, returnfp=False):
|
|
5683
6340
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5684
6341
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5685
6342
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -5709,6 +6366,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5709
6366
|
fp = MkTempFile()
|
|
5710
6367
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5711
6368
|
fp = outfile
|
|
6369
|
+
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs, saltkey)
|
|
5712
6370
|
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5713
6371
|
fp = MkTempFile()
|
|
5714
6372
|
else:
|
|
@@ -5720,7 +6378,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5720
6378
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
5721
6379
|
except PermissionError:
|
|
5722
6380
|
return False
|
|
5723
|
-
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
6381
|
+
AppendFileHeader(fp, 0, "UTF-8", ['hello', 'goodbye'], {}, checksumtype, formatspecs, saltkey)
|
|
5724
6382
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5725
6383
|
fp = CompressOpenFileAlt(
|
|
5726
6384
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -5751,11 +6409,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5751
6409
|
return True
|
|
5752
6410
|
|
|
5753
6411
|
|
|
5754
|
-
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
5755
|
-
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
6412
|
+
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, returnfp=False):
|
|
6413
|
+
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, returnfp)
|
|
5756
6414
|
|
|
5757
6415
|
|
|
5758
|
-
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
|
|
6416
|
+
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
|
|
5759
6417
|
if(not hasattr(fp, "write")):
|
|
5760
6418
|
return False
|
|
5761
6419
|
if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
|
|
@@ -5787,10 +6445,10 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5787
6445
|
tmpoutlist.append(fjsonsize)
|
|
5788
6446
|
if(len(jsondata) > 0):
|
|
5789
6447
|
tmpoutlist.append(checksumtype[2])
|
|
5790
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs))
|
|
6448
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[2], True, formatspecs, saltkey))
|
|
5791
6449
|
else:
|
|
5792
6450
|
tmpoutlist.append("none")
|
|
5793
|
-
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
6451
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs, saltkey))
|
|
5794
6452
|
tmpoutlist.append(extrasizelen)
|
|
5795
6453
|
tmpoutlist.append(extrafields)
|
|
5796
6454
|
outfileoutstr = AppendNullBytes(
|
|
@@ -5805,22 +6463,18 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5805
6463
|
outfileoutstr = outfileoutstr + \
|
|
5806
6464
|
AppendNullBytes(checksumlist, formatspecs['format_delimiter'])
|
|
5807
6465
|
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5808
|
-
outfileheadercshex = GetFileChecksum(
|
|
5809
|
-
outfileoutstr, checksumtype[0], True, formatspecs)
|
|
6466
|
+
outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
|
|
5810
6467
|
if(len(filecontent) == 0):
|
|
5811
|
-
outfilecontentcshex = GetFileChecksum(
|
|
5812
|
-
filecontent, "none", False, formatspecs)
|
|
6468
|
+
outfilecontentcshex = GetFileChecksum(filecontent, "none", False, formatspecs, saltkey)
|
|
5813
6469
|
else:
|
|
5814
|
-
outfilecontentcshex = GetFileChecksum(
|
|
5815
|
-
filecontent, checksumtype[1], False, formatspecs)
|
|
6470
|
+
outfilecontentcshex = GetFileChecksum(filecontent, checksumtype[1], False, formatspecs, saltkey)
|
|
5816
6471
|
tmpfileoutstr = outfileoutstr + \
|
|
5817
6472
|
AppendNullBytes([outfileheadercshex, outfilecontentcshex],
|
|
5818
6473
|
formatspecs['format_delimiter'])
|
|
5819
6474
|
formheaersize = format(int(len(tmpfileoutstr) - len(formatspecs['format_delimiter'])), 'x').lower()
|
|
5820
6475
|
outfileoutstr = AppendNullByte(
|
|
5821
6476
|
formheaersize, formatspecs['format_delimiter']) + outfileoutstr
|
|
5822
|
-
outfileheadercshex = GetFileChecksum(
|
|
5823
|
-
outfileoutstr, checksumtype[0], True, formatspecs)
|
|
6477
|
+
outfileheadercshex = GetFileChecksum(outfileoutstr, checksumtype[0], True, formatspecs, saltkey)
|
|
5824
6478
|
outfileoutstr = outfileoutstr + \
|
|
5825
6479
|
AppendNullBytes([outfileheadercshex, outfilecontentcshex],
|
|
5826
6480
|
formatspecs['format_delimiter'])
|
|
@@ -5838,14 +6492,9 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5838
6492
|
pass
|
|
5839
6493
|
return fp
|
|
5840
6494
|
|
|
5841
|
-
def
|
|
5842
|
-
|
|
5843
|
-
|
|
5844
|
-
advancedlist = formatspecs['use_advanced_list']
|
|
5845
|
-
altinode = formatspecs['use_alt_inode']
|
|
5846
|
-
if(verbose):
|
|
5847
|
-
logging.basicConfig(format="%(message)s",
|
|
5848
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6495
|
+
def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6496
|
+
advancedlist = __use_advanced_list__
|
|
6497
|
+
altinode = __use_alt_inode__
|
|
5849
6498
|
infilelist = []
|
|
5850
6499
|
if(infiles == "-"):
|
|
5851
6500
|
for line in PY_STDIN_TEXT:
|
|
@@ -5885,16 +6534,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5885
6534
|
inodetofile = {}
|
|
5886
6535
|
filetoinode = {}
|
|
5887
6536
|
inodetoforminode = {}
|
|
5888
|
-
numfiles = int(len(GetDirList))
|
|
5889
|
-
fnumfiles = format(numfiles, 'x').lower()
|
|
5890
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
5891
|
-
try:
|
|
5892
|
-
fp.flush()
|
|
5893
|
-
if(hasattr(os, "sync")):
|
|
5894
|
-
os.fsync(fp.fileno())
|
|
5895
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
5896
|
-
pass
|
|
5897
6537
|
FullSizeFilesAlt = 0
|
|
6538
|
+
tmpoutlist = []
|
|
5898
6539
|
for curfname in GetDirList:
|
|
5899
6540
|
fencoding = "UTF-8"
|
|
5900
6541
|
if(re.findall("^[.|/]", curfname)):
|
|
@@ -5916,14 +6557,24 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5916
6557
|
FullSizeFilesAlt += fstatinfo.st_rsize
|
|
5917
6558
|
except AttributeError:
|
|
5918
6559
|
FullSizeFilesAlt += fstatinfo.st_size
|
|
6560
|
+
fblksize = 0
|
|
6561
|
+
if(hasattr(fstatinfo, "st_blksize")):
|
|
6562
|
+
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6563
|
+
fblocks = 0
|
|
6564
|
+
if(hasattr(fstatinfo, "st_blocks")):
|
|
6565
|
+
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6566
|
+
fflags = 0
|
|
6567
|
+
if(hasattr(fstatinfo, "st_flags")):
|
|
6568
|
+
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
5919
6569
|
ftype = 0
|
|
5920
|
-
if(hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
|
|
6570
|
+
if(not followlink and hasattr(os.path, "isjunction") and os.path.isjunction(fname)):
|
|
5921
6571
|
ftype = 13
|
|
5922
|
-
elif(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
|
|
5923
|
-
ftype = 12
|
|
5924
6572
|
elif(stat.S_ISREG(fpremode)):
|
|
5925
|
-
|
|
5926
|
-
|
|
6573
|
+
if(hasattr(fstatinfo, "st_blocks") and fstatinfo.st_size > 0 and fstatinfo.st_blocks * 512 < fstatinfo.st_size):
|
|
6574
|
+
ftype = 12
|
|
6575
|
+
else:
|
|
6576
|
+
ftype = 0
|
|
6577
|
+
elif(not followlink and stat.S_ISLNK(fpremode)):
|
|
5927
6578
|
ftype = 2
|
|
5928
6579
|
elif(stat.S_ISCHR(fpremode)):
|
|
5929
6580
|
ftype = 3
|
|
@@ -5945,43 +6596,42 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5945
6596
|
ftype = 0
|
|
5946
6597
|
flinkname = ""
|
|
5947
6598
|
fcurfid = format(int(curfid), 'x').lower()
|
|
5948
|
-
if
|
|
6599
|
+
if(not followlink and finode != 0):
|
|
5949
6600
|
unique_id = (fstatinfo.st_dev, finode)
|
|
5950
|
-
if
|
|
5951
|
-
if
|
|
6601
|
+
if(ftype != 1):
|
|
6602
|
+
if(unique_id in inodetofile):
|
|
5952
6603
|
# Hard link detected
|
|
5953
6604
|
ftype = 1
|
|
5954
6605
|
flinkname = inodetofile[unique_id]
|
|
5955
|
-
if altinode:
|
|
5956
|
-
fcurinode = format(int(unique_id[1]), 'x').lower()
|
|
5957
|
-
else:
|
|
5958
|
-
fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
|
|
5959
6606
|
else:
|
|
5960
|
-
#
|
|
5961
|
-
inodelist.append(unique_id)
|
|
6607
|
+
# First time seeing this inode
|
|
5962
6608
|
inodetofile[unique_id] = fname
|
|
6609
|
+
if(unique_id not in inodetoforminode):
|
|
5963
6610
|
inodetoforminode[unique_id] = curinode
|
|
5964
|
-
|
|
5965
|
-
|
|
5966
|
-
|
|
5967
|
-
|
|
5968
|
-
|
|
6611
|
+
curinode = curinode + 1
|
|
6612
|
+
if(altinode):
|
|
6613
|
+
# altinode == True → use real inode number
|
|
6614
|
+
fcurinode = format(int(unique_id[1]), 'x').lower()
|
|
6615
|
+
else:
|
|
6616
|
+
# altinode == False → use synthetic inode id
|
|
6617
|
+
fcurinode = format(int(inodetoforminode[unique_id]), 'x').lower()
|
|
5969
6618
|
else:
|
|
5970
6619
|
# Handle cases where inodes are not supported or symlinks are followed
|
|
5971
6620
|
fcurinode = format(int(curinode), 'x').lower()
|
|
5972
|
-
curinode
|
|
6621
|
+
curinode = curinode + 1
|
|
5973
6622
|
curfid = curfid + 1
|
|
5974
6623
|
if(ftype == 2):
|
|
5975
6624
|
flinkname = os.readlink(fname)
|
|
5976
|
-
if(not os.path.exists(
|
|
6625
|
+
if(not os.path.exists(fname)):
|
|
5977
6626
|
return False
|
|
5978
6627
|
try:
|
|
5979
6628
|
fdev = fstatinfo.st_rdev
|
|
5980
6629
|
except AttributeError:
|
|
5981
6630
|
fdev = 0
|
|
5982
|
-
|
|
5983
|
-
|
|
5984
|
-
|
|
6631
|
+
try:
|
|
6632
|
+
frdev = fstatinfo.st_rdev
|
|
6633
|
+
except AttributeError:
|
|
6634
|
+
frdev = 0
|
|
5985
6635
|
# Types that should be considered zero-length in the archive context:
|
|
5986
6636
|
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
|
|
5987
6637
|
# Types that have actual data to read:
|
|
@@ -5992,13 +6642,28 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5992
6642
|
fsize = format(int(fstatinfo.st_size), 'x').lower()
|
|
5993
6643
|
else:
|
|
5994
6644
|
fsize = format(int(fstatinfo.st_size), 'x').lower()
|
|
5995
|
-
|
|
5996
|
-
|
|
5997
|
-
|
|
6645
|
+
if(hasattr(fstatinfo, "st_atime_ns")):
|
|
6646
|
+
fatime = format(int(fstatinfo.st_atime_ns), 'x').lower()
|
|
6647
|
+
else:
|
|
6648
|
+
fatime = format(int(to_ns(fstatinfo.st_atime)), 'x').lower()
|
|
6649
|
+
if(hasattr(fstatinfo, "st_mtime_ns")):
|
|
6650
|
+
fmtime = format(int(fstatinfo.st_mtime_ns), 'x').lower()
|
|
6651
|
+
else:
|
|
6652
|
+
fmtime = format(int(to_ns(fstatinfo.st_mtime)), 'x').lower()
|
|
6653
|
+
if(hasattr(fstatinfo, "st_ctime_ns")):
|
|
6654
|
+
fctime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
|
|
6655
|
+
else:
|
|
6656
|
+
fctime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
|
|
5998
6657
|
if(hasattr(fstatinfo, "st_birthtime")):
|
|
5999
|
-
|
|
6658
|
+
if(hasattr(fstatinfo, "st_birthtime_ns")):
|
|
6659
|
+
fbtime = format(int(fstatinfo.st_birthtime_ns), 'x').lower()
|
|
6660
|
+
else:
|
|
6661
|
+
fbtime = format(int(to_ns(fstatinfo.st_birthtime)), 'x').lower()
|
|
6000
6662
|
else:
|
|
6001
|
-
|
|
6663
|
+
if(hasattr(fstatinfo, "st_ctime_ns")):
|
|
6664
|
+
fbtime = format(int(fstatinfo.st_ctime_ns), 'x').lower()
|
|
6665
|
+
else:
|
|
6666
|
+
fbtime = format(int(to_ns(fstatinfo.st_ctime)), 'x').lower()
|
|
6002
6667
|
fmode = format(int(fstatinfo.st_mode), 'x').lower()
|
|
6003
6668
|
fchmode = format(int(stat.S_IMODE(fstatinfo.st_mode)), 'x').lower()
|
|
6004
6669
|
ftypemod = format(int(stat.S_IFMT(fstatinfo.st_mode)), 'x').lower()
|
|
@@ -6025,8 +6690,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6025
6690
|
except ImportError:
|
|
6026
6691
|
fgname = ""
|
|
6027
6692
|
fdev = format(int(fdev), 'x').lower()
|
|
6028
|
-
|
|
6029
|
-
fdev_major = format(int(fdev_major), 'x').lower()
|
|
6693
|
+
frdev = format(int(frdev), 'x').lower()
|
|
6030
6694
|
finode = format(int(finode), 'x').lower()
|
|
6031
6695
|
flinkcount = format(int(flinkcount), 'x').lower()
|
|
6032
6696
|
if(hasattr(fstatinfo, "st_file_attributes")):
|
|
@@ -6046,7 +6710,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6046
6710
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6047
6711
|
fcontents.seek(0, 0)
|
|
6048
6712
|
if(typechecktest is not False):
|
|
6049
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6713
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6050
6714
|
fcontents.seek(0, 0)
|
|
6051
6715
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6052
6716
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6087,16 +6751,15 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6087
6751
|
fcompression = curcompression
|
|
6088
6752
|
fcontents.close()
|
|
6089
6753
|
fcontents = cfcontents
|
|
6090
|
-
elif followlink and (ftype ==
|
|
6091
|
-
if(not os.path.exists(
|
|
6754
|
+
elif followlink and (ftype == 2 or ftype in data_types):
|
|
6755
|
+
if(not os.path.exists(fname)):
|
|
6092
6756
|
return False
|
|
6093
|
-
flstatinfo = os.stat(flinkname)
|
|
6094
6757
|
with open(flinkname, "rb") as fpc:
|
|
6095
6758
|
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6096
6759
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6097
6760
|
fcontents.seek(0, 0)
|
|
6098
6761
|
if(typechecktest is not False):
|
|
6099
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6762
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6100
6763
|
fcontents.seek(0, 0)
|
|
6101
6764
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6102
6765
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6140,11 +6803,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6140
6803
|
if(fcompression == "none"):
|
|
6141
6804
|
fcompression = ""
|
|
6142
6805
|
fcontents.seek(0, 0)
|
|
6806
|
+
if(not contentasfile):
|
|
6807
|
+
fcontents = fcontents.read()
|
|
6143
6808
|
ftypehex = format(ftype, 'x').lower()
|
|
6144
|
-
tmpoutlist
|
|
6145
|
-
|
|
6146
|
-
|
|
6147
|
-
|
|
6809
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6810
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
6811
|
+
return tmpoutlist
|
|
6812
|
+
|
|
6813
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6814
|
+
GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
6815
|
+
if(not hasattr(fp, "write")):
|
|
6816
|
+
return False
|
|
6817
|
+
numfiles = int(len(GetDirList))
|
|
6818
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
6819
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6820
|
+
try:
|
|
6821
|
+
fp.flush()
|
|
6822
|
+
if(hasattr(os, "sync")):
|
|
6823
|
+
os.fsync(fp.fileno())
|
|
6824
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6825
|
+
pass
|
|
6826
|
+
for curfname in GetDirList:
|
|
6827
|
+
tmpoutlist = curfname['fheaders']
|
|
6828
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6148
6829
|
try:
|
|
6149
6830
|
fp.flush()
|
|
6150
6831
|
if(hasattr(os, "sync")):
|
|
@@ -6153,12 +6834,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6153
6834
|
pass
|
|
6154
6835
|
return fp
|
|
6155
6836
|
|
|
6156
|
-
def
|
|
6157
|
-
if(not hasattr(fp, "write")):
|
|
6158
|
-
return False
|
|
6159
|
-
if(verbose):
|
|
6160
|
-
logging.basicConfig(format="%(message)s",
|
|
6161
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6837
|
+
def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6162
6838
|
curinode = 0
|
|
6163
6839
|
curfid = 0
|
|
6164
6840
|
inodelist = []
|
|
@@ -6200,10 +6876,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6200
6876
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6201
6877
|
formatspecs = formatspecs[compresscheck]
|
|
6202
6878
|
if(compresscheck=="zstd"):
|
|
6203
|
-
if '
|
|
6204
|
-
infile = ZstdFile(
|
|
6205
|
-
elif 'pyzstd' in sys.modules:
|
|
6206
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6879
|
+
if 'zstd' in compressionsupport:
|
|
6880
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6207
6881
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6208
6882
|
else:
|
|
6209
6883
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -6212,23 +6886,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6212
6886
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6213
6887
|
formatspecs = formatspecs[compresscheck]
|
|
6214
6888
|
if(compresscheck=="zstd"):
|
|
6215
|
-
if '
|
|
6216
|
-
infile = ZstdFile(
|
|
6217
|
-
elif 'pyzstd' in sys.modules:
|
|
6218
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6889
|
+
if 'zstd' in compressionsupport:
|
|
6890
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6219
6891
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6220
6892
|
else:
|
|
6221
6893
|
tarfp = tarfile.open(infile, "r")
|
|
6222
6894
|
except FileNotFoundError:
|
|
6223
6895
|
return False
|
|
6224
|
-
|
|
6225
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6226
|
-
try:
|
|
6227
|
-
fp.flush()
|
|
6228
|
-
if(hasattr(os, "sync")):
|
|
6229
|
-
os.fsync(fp.fileno())
|
|
6230
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6231
|
-
pass
|
|
6896
|
+
tmpoutlist = []
|
|
6232
6897
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6233
6898
|
fencoding = "UTF-8"
|
|
6234
6899
|
if(re.findall("^[.|/]", member.name)):
|
|
@@ -6240,6 +6905,9 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6240
6905
|
fpremode = member.mode
|
|
6241
6906
|
ffullmode = member.mode
|
|
6242
6907
|
flinkcount = 0
|
|
6908
|
+
fblksize = 0
|
|
6909
|
+
fblocks = 0
|
|
6910
|
+
fflags = 0
|
|
6243
6911
|
ftype = 0
|
|
6244
6912
|
if(member.isreg()):
|
|
6245
6913
|
ffullmode = member.mode + stat.S_IFREG
|
|
@@ -6277,12 +6945,11 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6277
6945
|
curfid = curfid + 1
|
|
6278
6946
|
if(ftype == 2):
|
|
6279
6947
|
flinkname = member.linkname
|
|
6948
|
+
fdev = format(int("0"), 'x').lower()
|
|
6280
6949
|
try:
|
|
6281
|
-
|
|
6950
|
+
frdev = format(int(os.makedev(member.devmajor, member.devminor)), 'x').lower()
|
|
6282
6951
|
except AttributeError:
|
|
6283
|
-
|
|
6284
|
-
fdev_minor = format(int(member.devminor), 'x').lower()
|
|
6285
|
-
fdev_major = format(int(member.devmajor), 'x').lower()
|
|
6952
|
+
frdev = format(int(MakeDevAlt(member.devmajor, member.devminor)), 'x').lower()
|
|
6286
6953
|
# Types that should be considered zero-length in the archive context:
|
|
6287
6954
|
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
|
|
6288
6955
|
# Types that have actual data to read:
|
|
@@ -6293,10 +6960,10 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6293
6960
|
fsize = format(int(member.size), 'x').lower()
|
|
6294
6961
|
else:
|
|
6295
6962
|
fsize = format(int(member.size), 'x').lower()
|
|
6296
|
-
fatime = format(int(member.mtime), 'x').lower()
|
|
6297
|
-
fmtime = format(int(member.mtime), 'x').lower()
|
|
6298
|
-
fctime = format(int(member.mtime), 'x').lower()
|
|
6299
|
-
fbtime = format(int(member.mtime), 'x').lower()
|
|
6963
|
+
fatime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6964
|
+
fmtime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6965
|
+
fctime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6966
|
+
fbtime = format(int(to_ns(member.mtime)), 'x').lower()
|
|
6300
6967
|
fmode = format(int(ffullmode), 'x').lower()
|
|
6301
6968
|
fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
|
|
6302
6969
|
ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
|
|
@@ -6318,7 +6985,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6318
6985
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6319
6986
|
fcontents.seek(0, 0)
|
|
6320
6987
|
if(typechecktest is not False):
|
|
6321
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6988
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6322
6989
|
fcontents.seek(0, 0)
|
|
6323
6990
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6324
6991
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6362,26 +7029,38 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6362
7029
|
if(fcompression == "none"):
|
|
6363
7030
|
fcompression = ""
|
|
6364
7031
|
fcontents.seek(0, 0)
|
|
7032
|
+
if(not contentasfile):
|
|
7033
|
+
fcontents = fcontents.read()
|
|
6365
7034
|
ftypehex = format(ftype, 'x').lower()
|
|
6366
|
-
tmpoutlist
|
|
6367
|
-
|
|
6368
|
-
|
|
6369
|
-
|
|
7035
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7036
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7037
|
+
return tmpoutlist
|
|
7038
|
+
|
|
7039
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7040
|
+
if(not hasattr(fp, "write")):
|
|
7041
|
+
return False
|
|
7042
|
+
GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7043
|
+
numfiles = int(len(GetDirList))
|
|
7044
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7045
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7046
|
+
try:
|
|
7047
|
+
fp.flush()
|
|
7048
|
+
if(hasattr(os, "sync")):
|
|
7049
|
+
os.fsync(fp.fileno())
|
|
7050
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7051
|
+
pass
|
|
7052
|
+
for curfname in GetDirList:
|
|
7053
|
+
tmpoutlist = curfname['fheaders']
|
|
7054
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6370
7055
|
try:
|
|
6371
7056
|
fp.flush()
|
|
6372
7057
|
if(hasattr(os, "sync")):
|
|
6373
7058
|
os.fsync(fp.fileno())
|
|
6374
7059
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6375
7060
|
pass
|
|
6376
|
-
fcontents.close()
|
|
6377
7061
|
return fp
|
|
6378
7062
|
|
|
6379
|
-
def
|
|
6380
|
-
if(not hasattr(fp, "write")):
|
|
6381
|
-
return False
|
|
6382
|
-
if(verbose):
|
|
6383
|
-
logging.basicConfig(format="%(message)s",
|
|
6384
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7063
|
+
def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6385
7064
|
curinode = 0
|
|
6386
7065
|
curfid = 0
|
|
6387
7066
|
inodelist = []
|
|
@@ -6414,14 +7093,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6414
7093
|
ziptest = zipfp.testzip()
|
|
6415
7094
|
if(ziptest):
|
|
6416
7095
|
VerbosePrintOut("Bad file found!")
|
|
6417
|
-
|
|
6418
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6419
|
-
try:
|
|
6420
|
-
fp.flush()
|
|
6421
|
-
if(hasattr(os, "sync")):
|
|
6422
|
-
os.fsync(fp.fileno())
|
|
6423
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6424
|
-
pass
|
|
7096
|
+
tmpoutlist = []
|
|
6425
7097
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6426
7098
|
fencoding = "UTF-8"
|
|
6427
7099
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -6436,6 +7108,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6436
7108
|
else:
|
|
6437
7109
|
fpremode = int(stat.S_IFREG | 0x1b6)
|
|
6438
7110
|
flinkcount = 0
|
|
7111
|
+
fblksize = 0
|
|
7112
|
+
fblocks = 0
|
|
7113
|
+
fflags = 0
|
|
6439
7114
|
ftype = 0
|
|
6440
7115
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
6441
7116
|
ftype = 5
|
|
@@ -6446,8 +7121,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6446
7121
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6447
7122
|
curfid = curfid + 1
|
|
6448
7123
|
fdev = format(int(0), 'x').lower()
|
|
6449
|
-
|
|
6450
|
-
fdev_major = format(int(0), 'x').lower()
|
|
7124
|
+
frdev = format(int(0), 'x').lower()
|
|
6451
7125
|
if(ftype == 5):
|
|
6452
7126
|
fsize = format(int("0"), 'x').lower()
|
|
6453
7127
|
elif(ftype == 0):
|
|
@@ -6455,13 +7129,13 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6455
7129
|
else:
|
|
6456
7130
|
fsize = format(int(member.file_size), 'x').lower()
|
|
6457
7131
|
fatime = format(
|
|
6458
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7132
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6459
7133
|
fmtime = format(
|
|
6460
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7134
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6461
7135
|
fctime = format(
|
|
6462
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7136
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6463
7137
|
fbtime = format(
|
|
6464
|
-
int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
|
|
7138
|
+
int(to_ns(time.mktime(member.date_time + (0, 0, -1)))), 'x').lower()
|
|
6465
7139
|
if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
|
|
6466
7140
|
fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
|
|
6467
7141
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
@@ -6576,30 +7250,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6576
7250
|
if(fcompression == "none"):
|
|
6577
7251
|
fcompression = ""
|
|
6578
7252
|
fcontents.seek(0, 0)
|
|
7253
|
+
if(not contentasfile):
|
|
7254
|
+
fcontents = fcontents.read()
|
|
6579
7255
|
ftypehex = format(ftype, 'x').lower()
|
|
6580
|
-
tmpoutlist
|
|
6581
|
-
|
|
6582
|
-
|
|
6583
|
-
|
|
7256
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7257
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7258
|
+
return tmpoutlist
|
|
7259
|
+
|
|
7260
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7261
|
+
if(not hasattr(fp, "write")):
|
|
7262
|
+
return False
|
|
7263
|
+
GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7264
|
+
numfiles = int(len(GetDirList))
|
|
7265
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7266
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7267
|
+
try:
|
|
7268
|
+
fp.flush()
|
|
7269
|
+
if(hasattr(os, "sync")):
|
|
7270
|
+
os.fsync(fp.fileno())
|
|
7271
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7272
|
+
pass
|
|
7273
|
+
for curfname in GetDirList:
|
|
7274
|
+
tmpoutlist = curfname['fheaders']
|
|
7275
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6584
7276
|
try:
|
|
6585
7277
|
fp.flush()
|
|
6586
7278
|
if(hasattr(os, "sync")):
|
|
6587
7279
|
os.fsync(fp.fileno())
|
|
6588
7280
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6589
7281
|
pass
|
|
6590
|
-
fcontents.close()
|
|
6591
7282
|
return fp
|
|
6592
7283
|
|
|
6593
7284
|
if(not rarfile_support):
|
|
6594
|
-
def
|
|
7285
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6595
7286
|
return False
|
|
6596
|
-
|
|
6597
|
-
|
|
6598
|
-
|
|
6599
|
-
|
|
6600
|
-
if(verbose):
|
|
6601
|
-
logging.basicConfig(format="%(message)s",
|
|
6602
|
-
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7287
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7288
|
+
return False
|
|
7289
|
+
else:
|
|
7290
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6603
7291
|
curinode = 0
|
|
6604
7292
|
curfid = 0
|
|
6605
7293
|
inodelist = []
|
|
@@ -6614,20 +7302,7 @@ else:
|
|
|
6614
7302
|
rartest = rarfp.testrar()
|
|
6615
7303
|
if(rartest):
|
|
6616
7304
|
VerbosePrintOut("Bad file found!")
|
|
6617
|
-
|
|
6618
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6619
|
-
try:
|
|
6620
|
-
fp.flush()
|
|
6621
|
-
if(hasattr(os, "sync")):
|
|
6622
|
-
os.fsync(fp.fileno())
|
|
6623
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6624
|
-
pass
|
|
6625
|
-
try:
|
|
6626
|
-
fp.flush()
|
|
6627
|
-
if(hasattr(os, "sync")):
|
|
6628
|
-
os.fsync(fp.fileno())
|
|
6629
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6630
|
-
pass
|
|
7305
|
+
tmpoutlist = []
|
|
6631
7306
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
6632
7307
|
is_unix = False
|
|
6633
7308
|
is_windows = False
|
|
@@ -6671,6 +7346,9 @@ else:
|
|
|
6671
7346
|
fcompression = ""
|
|
6672
7347
|
fcsize = format(int(0), 'x').lower()
|
|
6673
7348
|
flinkcount = 0
|
|
7349
|
+
fblksize = 0
|
|
7350
|
+
fblocks = 0
|
|
7351
|
+
fflags = 0
|
|
6674
7352
|
ftype = 0
|
|
6675
7353
|
if(member.is_file()):
|
|
6676
7354
|
ftype = 0
|
|
@@ -6685,8 +7363,7 @@ else:
|
|
|
6685
7363
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6686
7364
|
curfid = curfid + 1
|
|
6687
7365
|
fdev = format(int(0), 'x').lower()
|
|
6688
|
-
|
|
6689
|
-
fdev_major = format(int(0), 'x').lower()
|
|
7366
|
+
frdev = format(int(0), 'x').lower()
|
|
6690
7367
|
if(ftype == 5):
|
|
6691
7368
|
fsize = format(int("0"), 'x').lower()
|
|
6692
7369
|
elif(ftype == 0):
|
|
@@ -6695,20 +7372,20 @@ else:
|
|
|
6695
7372
|
fsize = format(int(member.file_size), 'x').lower()
|
|
6696
7373
|
try:
|
|
6697
7374
|
if(member.atime):
|
|
6698
|
-
fatime = format(int(member.atime.timestamp()), 'x').lower()
|
|
7375
|
+
fatime = format(int(to_ns(member.atime.timestamp())), 'x').lower()
|
|
6699
7376
|
else:
|
|
6700
|
-
fatime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7377
|
+
fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6701
7378
|
except AttributeError:
|
|
6702
|
-
fatime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6703
|
-
fmtime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7379
|
+
fatime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
7380
|
+
fmtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6704
7381
|
try:
|
|
6705
7382
|
if(member.ctime):
|
|
6706
|
-
fctime = format(int(member.ctime.timestamp()), 'x').lower()
|
|
7383
|
+
fctime = format(int(to_ns(member.ctime.timestamp())), 'x').lower()
|
|
6707
7384
|
else:
|
|
6708
|
-
fctime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7385
|
+
fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6709
7386
|
except AttributeError:
|
|
6710
|
-
fctime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
6711
|
-
fbtime = format(int(member.mtime.timestamp()), 'x').lower()
|
|
7387
|
+
fctime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
7388
|
+
fbtime = format(int(to_ns(member.mtime.timestamp())), 'x').lower()
|
|
6712
7389
|
if(is_unix and member.external_attr != 0):
|
|
6713
7390
|
fmode = format(int(member.external_attr), 'x').lower()
|
|
6714
7391
|
fchmode = format(
|
|
@@ -6809,30 +7486,84 @@ else:
|
|
|
6809
7486
|
if(fcompression == "none"):
|
|
6810
7487
|
fcompression = ""
|
|
6811
7488
|
fcontents.seek(0, 0)
|
|
7489
|
+
if(not contentasfile):
|
|
7490
|
+
fcontents = fcontents.read()
|
|
6812
7491
|
ftypehex = format(ftype, 'x').lower()
|
|
6813
|
-
tmpoutlist
|
|
6814
|
-
|
|
6815
|
-
|
|
6816
|
-
|
|
7492
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7493
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7494
|
+
return tmpoutlist
|
|
7495
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7496
|
+
if(not hasattr(fp, "write")):
|
|
7497
|
+
return False
|
|
7498
|
+
GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7499
|
+
numfiles = int(len(GetDirList))
|
|
7500
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7501
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7502
|
+
try:
|
|
7503
|
+
fp.flush()
|
|
7504
|
+
if(hasattr(os, "sync")):
|
|
7505
|
+
os.fsync(fp.fileno())
|
|
7506
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7507
|
+
pass
|
|
7508
|
+
for curfname in GetDirList:
|
|
7509
|
+
tmpoutlist = curfname['fheaders']
|
|
7510
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6817
7511
|
try:
|
|
6818
7512
|
fp.flush()
|
|
6819
7513
|
if(hasattr(os, "sync")):
|
|
6820
7514
|
os.fsync(fp.fileno())
|
|
6821
7515
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6822
7516
|
pass
|
|
6823
|
-
fcontents.close()
|
|
6824
7517
|
return fp
|
|
6825
7518
|
|
|
6826
7519
|
if(not py7zr_support):
|
|
6827
|
-
def
|
|
7520
|
+
def sevenzip_readall(infile, **kwargs):
|
|
6828
7521
|
return False
|
|
6829
7522
|
else:
|
|
6830
|
-
|
|
6831
|
-
|
|
6832
|
-
|
|
6833
|
-
|
|
6834
|
-
|
|
6835
|
-
|
|
7523
|
+
class _MemoryIO(py7zr.Py7zIO):
|
|
7524
|
+
"""In-memory file object used by py7zr's factory API."""
|
|
7525
|
+
def __init__(self):
|
|
7526
|
+
self._buf = bytearray()
|
|
7527
|
+
def write(self, data):
|
|
7528
|
+
# py7zr will call this repeatedly with chunks
|
|
7529
|
+
self._buf.extend(data)
|
|
7530
|
+
def read(self, size=None):
|
|
7531
|
+
if size is None:
|
|
7532
|
+
return bytes(self._buf)
|
|
7533
|
+
return bytes(self._buf[:size])
|
|
7534
|
+
def seek(self, offset, whence=0):
|
|
7535
|
+
# we don't really need seeking for your use case
|
|
7536
|
+
return 0
|
|
7537
|
+
def flush(self):
|
|
7538
|
+
pass
|
|
7539
|
+
def size(self):
|
|
7540
|
+
return len(self._buf)
|
|
7541
|
+
class _MemoryFactory(py7zr.WriterFactory):
|
|
7542
|
+
"""Factory that creates _MemoryIO objects and keeps them by filename."""
|
|
7543
|
+
def __init__(self):
|
|
7544
|
+
self.files = {}
|
|
7545
|
+
def create(self, filename: str) -> py7zr.Py7zIO:
|
|
7546
|
+
io_obj = _MemoryIO()
|
|
7547
|
+
self.files[filename] = io_obj
|
|
7548
|
+
return io_obj
|
|
7549
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7550
|
+
"""
|
|
7551
|
+
Replacement for SevenZipFile.readall() using the new py7zr API.
|
|
7552
|
+
|
|
7553
|
+
Returns: dict[filename -> _MemoryIO]
|
|
7554
|
+
"""
|
|
7555
|
+
factory = _MemoryFactory()
|
|
7556
|
+
with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
|
|
7557
|
+
archive.extractall(factory=factory)
|
|
7558
|
+
return factory.files
|
|
7559
|
+
|
|
7560
|
+
if(not py7zr_support):
|
|
7561
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7562
|
+
return False
|
|
7563
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7564
|
+
return False
|
|
7565
|
+
else:
|
|
7566
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6836
7567
|
formver = formatspecs['format_ver']
|
|
6837
7568
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
6838
7569
|
curinode = 0
|
|
@@ -6844,19 +7575,15 @@ else:
|
|
|
6844
7575
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
6845
7576
|
return False
|
|
6846
7577
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
6847
|
-
|
|
7578
|
+
try:
|
|
7579
|
+
file_content = szpfp.readall()
|
|
7580
|
+
except AttributeError:
|
|
7581
|
+
file_content = sevenzip_readall(infile)
|
|
6848
7582
|
#sztest = szpfp.testzip()
|
|
6849
7583
|
sztestalt = szpfp.test()
|
|
6850
7584
|
if(sztestalt):
|
|
6851
7585
|
VerbosePrintOut("Bad file found!")
|
|
6852
|
-
|
|
6853
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6854
|
-
try:
|
|
6855
|
-
fp.flush()
|
|
6856
|
-
if(hasattr(os, "sync")):
|
|
6857
|
-
os.fsync(fp.fileno())
|
|
6858
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6859
|
-
pass
|
|
7586
|
+
tmpoutlist = []
|
|
6860
7587
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
6861
7588
|
fencoding = "UTF-8"
|
|
6862
7589
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -6873,6 +7600,9 @@ else:
|
|
|
6873
7600
|
fcompression = ""
|
|
6874
7601
|
fcsize = format(int(0), 'x').lower()
|
|
6875
7602
|
flinkcount = 0
|
|
7603
|
+
fblksize = 0
|
|
7604
|
+
fblocks = 0
|
|
7605
|
+
fflags = 0
|
|
6876
7606
|
ftype = 0
|
|
6877
7607
|
if(member.is_directory):
|
|
6878
7608
|
ftype = 5
|
|
@@ -6883,14 +7613,13 @@ else:
|
|
|
6883
7613
|
fcurinode = format(int(curfid), 'x').lower()
|
|
6884
7614
|
curfid = curfid + 1
|
|
6885
7615
|
fdev = format(int(0), 'x').lower()
|
|
6886
|
-
|
|
6887
|
-
fdev_major = format(int(0), 'x').lower()
|
|
7616
|
+
frdev = format(int(0), 'x').lower()
|
|
6888
7617
|
if(ftype == 5):
|
|
6889
7618
|
fsize = format(int("0"), 'x').lower()
|
|
6890
|
-
fatime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6891
|
-
fmtime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6892
|
-
fctime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
6893
|
-
fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
|
|
7619
|
+
fatime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7620
|
+
fmtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7621
|
+
fctime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
7622
|
+
fbtime = format(int(to_ns(member.creationtime.timestamp())), 'x').lower()
|
|
6894
7623
|
if(member.is_directory):
|
|
6895
7624
|
fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
|
|
6896
7625
|
fchmode = format(
|
|
@@ -6940,7 +7669,10 @@ else:
|
|
|
6940
7669
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6941
7670
|
fcontents.seek(0, 0)
|
|
6942
7671
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6943
|
-
|
|
7672
|
+
try:
|
|
7673
|
+
file_content[member.filename].close()
|
|
7674
|
+
except AttributeError:
|
|
7675
|
+
pass
|
|
6944
7676
|
if(typechecktest is False and not compresswholefile):
|
|
6945
7677
|
fcontents.seek(0, 2)
|
|
6946
7678
|
ucfsize = fcontents.tell()
|
|
@@ -6982,25 +7714,39 @@ else:
|
|
|
6982
7714
|
if(fcompression == "none"):
|
|
6983
7715
|
fcompression = ""
|
|
6984
7716
|
fcontents.seek(0, 0)
|
|
7717
|
+
if(not contentasfile):
|
|
7718
|
+
fcontents = fcontents.read()
|
|
6985
7719
|
ftypehex = format(ftype, 'x').lower()
|
|
6986
|
-
tmpoutlist
|
|
6987
|
-
|
|
6988
|
-
|
|
6989
|
-
|
|
7720
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7721
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7722
|
+
return tmpoutlist
|
|
7723
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7724
|
+
if(not hasattr(fp, "write")):
|
|
7725
|
+
return False
|
|
7726
|
+
GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7727
|
+
numfiles = int(len(GetDirList))
|
|
7728
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7729
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7730
|
+
try:
|
|
7731
|
+
fp.flush()
|
|
7732
|
+
if(hasattr(os, "sync")):
|
|
7733
|
+
os.fsync(fp.fileno())
|
|
7734
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7735
|
+
pass
|
|
7736
|
+
for curfname in GetDirList:
|
|
7737
|
+
tmpoutlist = curfname['fheaders']
|
|
7738
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6990
7739
|
try:
|
|
6991
7740
|
fp.flush()
|
|
6992
7741
|
if(hasattr(os, "sync")):
|
|
6993
7742
|
os.fsync(fp.fileno())
|
|
6994
7743
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6995
7744
|
pass
|
|
6996
|
-
fcontents.close()
|
|
6997
7745
|
return fp
|
|
6998
7746
|
|
|
6999
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7747
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7000
7748
|
if(not hasattr(fp, "write")):
|
|
7001
7749
|
return False
|
|
7002
|
-
if(verbose):
|
|
7003
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7004
7750
|
GetDirList = inlist
|
|
7005
7751
|
if(not GetDirList):
|
|
7006
7752
|
return False
|
|
@@ -7012,7 +7758,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7012
7758
|
inodetoforminode = {}
|
|
7013
7759
|
numfiles = int(len(GetDirList))
|
|
7014
7760
|
fnumfiles = format(numfiles, 'x').lower()
|
|
7015
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7761
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7016
7762
|
for curfname in GetDirList:
|
|
7017
7763
|
ftype = format(curfname[0], 'x').lower()
|
|
7018
7764
|
fencoding = curfname[1]
|
|
@@ -7026,44 +7772,45 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7026
7772
|
fbasedir = os.path.dirname(fname)
|
|
7027
7773
|
flinkname = curfname[4]
|
|
7028
7774
|
fsize = format(curfname[5], 'x').lower()
|
|
7029
|
-
|
|
7030
|
-
|
|
7031
|
-
|
|
7032
|
-
|
|
7033
|
-
|
|
7034
|
-
|
|
7035
|
-
|
|
7036
|
-
|
|
7037
|
-
|
|
7038
|
-
|
|
7039
|
-
|
|
7040
|
-
|
|
7041
|
-
|
|
7042
|
-
|
|
7043
|
-
|
|
7044
|
-
|
|
7045
|
-
|
|
7046
|
-
|
|
7047
|
-
|
|
7048
|
-
|
|
7049
|
-
|
|
7050
|
-
|
|
7051
|
-
|
|
7775
|
+
fblksize = format(curfname[6], 'x').lower()
|
|
7776
|
+
fblocks = format(curfname[7], 'x').lower()
|
|
7777
|
+
fflags = format(curfname[8], 'x').lower()
|
|
7778
|
+
fatime = format(curfname[9], 'x').lower()
|
|
7779
|
+
fmtime = format(curfname[10], 'x').lower()
|
|
7780
|
+
fctime = format(curfname[11], 'x').lower()
|
|
7781
|
+
fbtime = format(curfname[12], 'x').lower()
|
|
7782
|
+
fmode = format(curfname[13], 'x').lower()
|
|
7783
|
+
fwinattributes = format(curfname[14], 'x').lower()
|
|
7784
|
+
fcompression = curfname[15]
|
|
7785
|
+
fcsize = format(curfname[16], 'x').lower()
|
|
7786
|
+
fuid = format(curfname[17], 'x').lower()
|
|
7787
|
+
funame = curfname[18]
|
|
7788
|
+
fgid = format(curfname[19], 'x').lower()
|
|
7789
|
+
fgname = curfname[20]
|
|
7790
|
+
fid = format(curfname[21], 'x').lower()
|
|
7791
|
+
finode = format(curfname[22], 'x').lower()
|
|
7792
|
+
flinkcount = format(curfname[23], 'x').lower()
|
|
7793
|
+
fdev = format(curfname[24], 'x').lower()
|
|
7794
|
+
frdev = format(curfname[25], 'x').lower()
|
|
7795
|
+
fseeknextfile = curfname[26]
|
|
7796
|
+
extradata = curfname[27]
|
|
7797
|
+
fheaderchecksumtype = curfname[28]
|
|
7798
|
+
fcontentchecksumtype = curfname[29]
|
|
7799
|
+
fcontents = curfname[30]
|
|
7052
7800
|
fencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7053
|
-
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
7054
|
-
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev,
|
|
7801
|
+
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
7802
|
+
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile]
|
|
7055
7803
|
fcontents.seek(0, 0)
|
|
7056
|
-
AppendFileHeaderWithContent(
|
|
7057
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7804
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, saltkey)
|
|
7058
7805
|
return fp
|
|
7059
7806
|
|
|
7060
7807
|
|
|
7061
|
-
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7062
|
-
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7063
|
-
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
7808
|
+
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7809
|
+
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
|
|
7810
|
+
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7064
7811
|
|
|
7065
7812
|
|
|
7066
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7813
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7067
7814
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7068
7815
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7069
7816
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7107,8 +7854,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7107
7854
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7108
7855
|
except PermissionError:
|
|
7109
7856
|
return False
|
|
7110
|
-
AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
|
|
7111
|
-
compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
|
|
7857
|
+
AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7112
7858
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7113
7859
|
fp = CompressOpenFileAlt(
|
|
7114
7860
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7137,12 +7883,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7137
7883
|
fp.close()
|
|
7138
7884
|
return True
|
|
7139
7885
|
|
|
7140
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7886
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7141
7887
|
if not isinstance(infiles, list):
|
|
7142
7888
|
infiles = [infiles]
|
|
7143
7889
|
returnout = False
|
|
7144
7890
|
for infileslist in infiles:
|
|
7145
|
-
returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
|
|
7891
|
+
returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7146
7892
|
if(not returnout):
|
|
7147
7893
|
break
|
|
7148
7894
|
else:
|
|
@@ -7152,7 +7898,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
7152
7898
|
return True
|
|
7153
7899
|
return returnout
|
|
7154
7900
|
|
|
7155
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7901
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, saltkey=None, returnfp=False):
|
|
7156
7902
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7157
7903
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7158
7904
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7193,8 +7939,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7193
7939
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7194
7940
|
except PermissionError:
|
|
7195
7941
|
return False
|
|
7196
|
-
AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
|
|
7197
|
-
compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
|
|
7942
|
+
AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression, compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, saltkey, verbose)
|
|
7198
7943
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7199
7944
|
fp = CompressOpenFileAlt(
|
|
7200
7945
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7224,7 +7969,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7224
7969
|
fp.close()
|
|
7225
7970
|
return True
|
|
7226
7971
|
|
|
7227
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7972
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7228
7973
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7229
7974
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7230
7975
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7266,8 +8011,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7266
8011
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7267
8012
|
except PermissionError:
|
|
7268
8013
|
return False
|
|
7269
|
-
AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression,
|
|
7270
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8014
|
+
AppendFilesWithContentFromTarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7271
8015
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7272
8016
|
fp = CompressOpenFileAlt(
|
|
7273
8017
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7297,12 +8041,12 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7297
8041
|
fp.close()
|
|
7298
8042
|
return True
|
|
7299
8043
|
|
|
7300
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8044
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7301
8045
|
if not isinstance(infiles, list):
|
|
7302
8046
|
infiles = [infiles]
|
|
7303
8047
|
returnout = False
|
|
7304
8048
|
for infileslist in infiles:
|
|
7305
|
-
returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8049
|
+
returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7306
8050
|
if(not returnout):
|
|
7307
8051
|
break
|
|
7308
8052
|
else:
|
|
@@ -7312,7 +8056,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7312
8056
|
return True
|
|
7313
8057
|
return returnout
|
|
7314
8058
|
|
|
7315
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8059
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7316
8060
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7317
8061
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7318
8062
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7354,8 +8098,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7354
8098
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7355
8099
|
except PermissionError:
|
|
7356
8100
|
return False
|
|
7357
|
-
AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression,
|
|
7358
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8101
|
+
AppendFilesWithContentFromZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7359
8102
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7360
8103
|
fp = CompressOpenFileAlt(
|
|
7361
8104
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7385,12 +8128,12 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7385
8128
|
fp.close()
|
|
7386
8129
|
return True
|
|
7387
8130
|
|
|
7388
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8131
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7389
8132
|
if not isinstance(infiles, list):
|
|
7390
8133
|
infiles = [infiles]
|
|
7391
8134
|
returnout = False
|
|
7392
8135
|
for infileslist in infiles:
|
|
7393
|
-
returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8136
|
+
returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7394
8137
|
if(not returnout):
|
|
7395
8138
|
break
|
|
7396
8139
|
else:
|
|
@@ -7401,10 +8144,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7401
8144
|
return returnout
|
|
7402
8145
|
|
|
7403
8146
|
if(not rarfile_support):
|
|
7404
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8147
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7405
8148
|
return False
|
|
7406
8149
|
else:
|
|
7407
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8150
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7408
8151
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7409
8152
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7410
8153
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7446,8 +8189,7 @@ else:
|
|
|
7446
8189
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7447
8190
|
except PermissionError:
|
|
7448
8191
|
return False
|
|
7449
|
-
AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression,
|
|
7450
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8192
|
+
AppendFilesWithContentFromRarFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7451
8193
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7452
8194
|
fp = CompressOpenFileAlt(
|
|
7453
8195
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7477,12 +8219,12 @@ else:
|
|
|
7477
8219
|
fp.close()
|
|
7478
8220
|
return True
|
|
7479
8221
|
|
|
7480
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8222
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7481
8223
|
if not isinstance(infiles, list):
|
|
7482
8224
|
infiles = [infiles]
|
|
7483
8225
|
returnout = False
|
|
7484
8226
|
for infileslist in infiles:
|
|
7485
|
-
returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8227
|
+
returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7486
8228
|
if(not returnout):
|
|
7487
8229
|
break
|
|
7488
8230
|
else:
|
|
@@ -7493,10 +8235,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7493
8235
|
return returnout
|
|
7494
8236
|
|
|
7495
8237
|
if(not py7zr_support):
|
|
7496
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8238
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7497
8239
|
return False
|
|
7498
8240
|
else:
|
|
7499
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8241
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7500
8242
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7501
8243
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7502
8244
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7538,8 +8280,7 @@ else:
|
|
|
7538
8280
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
7539
8281
|
except PermissionError:
|
|
7540
8282
|
return False
|
|
7541
|
-
AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
|
|
7542
|
-
compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
|
|
8283
|
+
AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, saltkey, verbose)
|
|
7543
8284
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7544
8285
|
fp = CompressOpenFileAlt(
|
|
7545
8286
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7569,12 +8310,12 @@ else:
|
|
|
7569
8310
|
fp.close()
|
|
7570
8311
|
return True
|
|
7571
8312
|
|
|
7572
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8313
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
7573
8314
|
if not isinstance(infiles, list):
|
|
7574
8315
|
infiles = [infiles]
|
|
7575
8316
|
returnout = False
|
|
7576
8317
|
for infileslist in infiles:
|
|
7577
|
-
returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
|
|
8318
|
+
returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, True)
|
|
7578
8319
|
if(not returnout):
|
|
7579
8320
|
break
|
|
7580
8321
|
else:
|
|
@@ -7584,9 +8325,9 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
7584
8325
|
return True
|
|
7585
8326
|
return returnout
|
|
7586
8327
|
|
|
7587
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7588
|
-
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7589
|
-
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
8328
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
8329
|
+
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs, saltkey, False)
|
|
8330
|
+
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
7590
8331
|
|
|
7591
8332
|
|
|
7592
8333
|
def PrintPermissionString(fchmode, ftype):
|
|
@@ -8270,10 +9011,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
|
|
|
8270
9011
|
elif(compresscheck == "lz4" and compresscheck in compressionsupport):
|
|
8271
9012
|
fp = lz4.frame.open(infile, "rb")
|
|
8272
9013
|
elif(compresscheck == "zstd" and compresscheck in compressionsupport):
|
|
8273
|
-
if '
|
|
8274
|
-
fp = ZstdFile(infile, mode="rb")
|
|
8275
|
-
elif 'pyzstd' in sys.modules:
|
|
8276
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
|
|
9014
|
+
if 'zstd' in compressionsupport:
|
|
9015
|
+
fp = zstd.ZstdFile(infile, mode="rb")
|
|
8277
9016
|
else:
|
|
8278
9017
|
return Flase
|
|
8279
9018
|
elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
|
|
@@ -8390,10 +9129,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
|
|
|
8390
9129
|
elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
|
|
8391
9130
|
wrapped = lzma.LZMAFile(src)
|
|
8392
9131
|
elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
|
|
8393
|
-
if '
|
|
8394
|
-
wrapped = ZstdFile(
|
|
8395
|
-
elif 'pyzstd' in sys.modules:
|
|
8396
|
-
wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
|
|
9132
|
+
if 'zstd' in compressionsupport:
|
|
9133
|
+
wrapped = zstd.ZstdFile(src, mode="rb")
|
|
8397
9134
|
else:
|
|
8398
9135
|
return False
|
|
8399
9136
|
elif kind == "lz4" and "lz4" in compressionsupport:
|
|
@@ -8461,10 +9198,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
|
|
|
8461
9198
|
elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
|
|
8462
9199
|
fp = bz2.open(infile, mode)
|
|
8463
9200
|
elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
|
|
8464
|
-
if '
|
|
8465
|
-
fp = ZstdFile(infile, mode=mode)
|
|
8466
|
-
elif 'pyzstd' in sys.modules:
|
|
8467
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
|
|
9201
|
+
if 'zstd' in compressionsupport:
|
|
9202
|
+
fp = zstd.ZstdFile(infile, mode=mode)
|
|
8468
9203
|
else:
|
|
8469
9204
|
return False
|
|
8470
9205
|
elif (compresscheck == "lz4" and "lz4" in compressionsupport):
|
|
@@ -9233,10 +9968,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
|
|
|
9233
9968
|
outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
|
|
9234
9969
|
|
|
9235
9970
|
elif (fextname == ".zst" and "zstandard" in compressionsupport):
|
|
9236
|
-
if '
|
|
9237
|
-
outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9238
|
-
elif 'pyzstd' in sys.modules:
|
|
9239
|
-
outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9971
|
+
if 'zstd' in compressionsupport:
|
|
9972
|
+
outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9240
9973
|
else:
|
|
9241
9974
|
return False # fix: 'Flase' -> False
|
|
9242
9975
|
|
|
@@ -9327,56 +10060,54 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9327
10060
|
return False
|
|
9328
10061
|
|
|
9329
10062
|
|
|
9330
|
-
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9331
|
-
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
10063
|
+
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10064
|
+
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9332
10065
|
|
|
9333
|
-
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9334
|
-
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
10066
|
+
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10067
|
+
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9335
10068
|
|
|
9336
|
-
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9337
|
-
return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
10069
|
+
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10070
|
+
return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, returnfp)
|
|
9338
10071
|
|
|
9339
10072
|
|
|
9340
|
-
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9341
|
-
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10073
|
+
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10074
|
+
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9342
10075
|
|
|
9343
10076
|
|
|
9344
|
-
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9345
|
-
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10077
|
+
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10078
|
+
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9346
10079
|
|
|
9347
10080
|
|
|
9348
10081
|
if(not rarfile_support):
|
|
9349
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
10082
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9350
10083
|
return False
|
|
9351
10084
|
else:
|
|
9352
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9353
|
-
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10085
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10086
|
+
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9354
10087
|
|
|
9355
10088
|
|
|
9356
10089
|
if(not py7zr_support):
|
|
9357
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
10090
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9358
10091
|
return False
|
|
9359
10092
|
else:
|
|
9360
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9361
|
-
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
10093
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
10094
|
+
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, saltkey, verbose, returnfp)
|
|
9362
10095
|
|
|
9363
10096
|
|
|
9364
|
-
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
10097
|
+
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
9365
10098
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9366
10099
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9367
10100
|
formatspecs = formatspecs[checkcompressfile]
|
|
9368
|
-
if(verbose):
|
|
9369
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9370
10101
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
9371
|
-
return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10102
|
+
return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9372
10103
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
9373
|
-
return PackArchiveFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10104
|
+
return PackArchiveFileFromZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9374
10105
|
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
9375
|
-
return PackArchiveFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10106
|
+
return PackArchiveFileFromRarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9376
10107
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
9377
|
-
return PackArchiveFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10108
|
+
return PackArchiveFileFromSevenZipFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9378
10109
|
elif(IsSingleDict(formatspecs) and checkcompressfile == formatspecs['format_magic']):
|
|
9379
|
-
return RePackArchiveFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
10110
|
+
return RePackArchiveFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, False, 0, 0, checksumtype, False, extradata, jsondata, formatspecs, saltkey, verbose, returnfp)
|
|
9380
10111
|
else:
|
|
9381
10112
|
return False
|
|
9382
10113
|
return False
|
|
@@ -9445,19 +10176,12 @@ def ArchiveFileArrayValidate(listarrayfiles, verbose=False):
|
|
|
9445
10176
|
ok = False
|
|
9446
10177
|
return ok
|
|
9447
10178
|
|
|
9448
|
-
def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
9449
|
-
formatspecs=__file_format_multi_dict__, # keep default like original
|
|
9450
|
-
seektoend=False, verbose=False, returnfp=False):
|
|
9451
|
-
if(verbose):
|
|
9452
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9453
|
-
|
|
10179
|
+
def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9454
10180
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
9455
10181
|
formatspecs = formatspecs[fmttype]
|
|
9456
10182
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
9457
10183
|
fmttype = "auto"
|
|
9458
|
-
|
|
9459
10184
|
curloc = filestart
|
|
9460
|
-
|
|
9461
10185
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
9462
10186
|
curloc = infile.tell()
|
|
9463
10187
|
fp = infile
|
|
@@ -9473,7 +10197,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9473
10197
|
if(not fp):
|
|
9474
10198
|
return False
|
|
9475
10199
|
fp.seek(filestart, 0)
|
|
9476
|
-
|
|
9477
10200
|
elif(infile == "-"):
|
|
9478
10201
|
fp = MkTempFile()
|
|
9479
10202
|
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
@@ -9485,7 +10208,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9485
10208
|
if(not fp):
|
|
9486
10209
|
return False
|
|
9487
10210
|
fp.seek(filestart, 0)
|
|
9488
|
-
|
|
9489
10211
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
9490
10212
|
fp = MkTempFile()
|
|
9491
10213
|
fp.write(infile)
|
|
@@ -9497,7 +10219,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9497
10219
|
if(not fp):
|
|
9498
10220
|
return False
|
|
9499
10221
|
fp.seek(filestart, 0)
|
|
9500
|
-
|
|
9501
10222
|
elif(re.findall(__download_proto_support__, infile)):
|
|
9502
10223
|
fp = download_file_from_internet_file(infile)
|
|
9503
10224
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
@@ -9508,7 +10229,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9508
10229
|
if(not fp):
|
|
9509
10230
|
return False
|
|
9510
10231
|
fp.seek(filestart, 0)
|
|
9511
|
-
|
|
9512
10232
|
else:
|
|
9513
10233
|
infile = RemoveWindowsPath(infile)
|
|
9514
10234
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
@@ -9555,11 +10275,9 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9555
10275
|
fp.seek(0, 2)
|
|
9556
10276
|
except (OSError, ValueError):
|
|
9557
10277
|
SeekToEndOfFile(fp)
|
|
9558
|
-
|
|
9559
10278
|
CatSize = fp.tell()
|
|
9560
10279
|
CatSizeEnd = CatSize
|
|
9561
10280
|
fp.seek(curloc, 0)
|
|
9562
|
-
|
|
9563
10281
|
if(IsNestedDict(formatspecs)):
|
|
9564
10282
|
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
9565
10283
|
if(compresschecking not in formatspecs):
|
|
@@ -9567,54 +10285,36 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9567
10285
|
else:
|
|
9568
10286
|
formatspecs = formatspecs[compresschecking]
|
|
9569
10287
|
fp.seek(filestart, 0)
|
|
9570
|
-
|
|
9571
10288
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
10289
|
+
headeroffset = fp.tell()
|
|
9572
10290
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
9573
10291
|
formdelsize = len(formatspecs['format_delimiter'])
|
|
9574
10292
|
formdel = fp.read(formdelsize).decode("UTF-8")
|
|
9575
|
-
|
|
9576
10293
|
if(formstring != formatspecs['format_magic'] + inheaderver):
|
|
9577
10294
|
return False
|
|
9578
10295
|
if(formdel != formatspecs['format_delimiter']):
|
|
9579
10296
|
return False
|
|
9580
|
-
|
|
9581
|
-
if(formatspecs['new_style']):
|
|
10297
|
+
if(__use_new_style__):
|
|
9582
10298
|
inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9583
10299
|
else:
|
|
9584
10300
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9585
|
-
|
|
9586
|
-
|
|
9587
|
-
|
|
9588
|
-
extrastart = 15
|
|
10301
|
+
fnumextrafieldsize = int(inheader[15], 16)
|
|
10302
|
+
fnumextrafields = int(inheader[16], 16)
|
|
10303
|
+
extrastart = 17
|
|
9589
10304
|
extraend = extrastart + fnumextrafields
|
|
9590
10305
|
formversion = re.findall("([\\d]+)", formstring)
|
|
9591
10306
|
fheadsize = int(inheader[0], 16)
|
|
9592
10307
|
fnumfields = int(inheader[1], 16)
|
|
9593
|
-
fnumfiles = int(inheader[
|
|
10308
|
+
fnumfiles = int(inheader[8], 16)
|
|
9594
10309
|
fprechecksumtype = inheader[-2]
|
|
9595
10310
|
fprechecksum = inheader[-1]
|
|
9596
|
-
outfseeknextfile = inheader[
|
|
9597
|
-
fjsonsize = int(inheader[
|
|
9598
|
-
fjsonchecksumtype = inheader[
|
|
9599
|
-
fjsonchecksum = inheader[
|
|
10311
|
+
outfseeknextfile = inheader[9]
|
|
10312
|
+
fjsonsize = int(inheader[12], 16)
|
|
10313
|
+
fjsonchecksumtype = inheader[13]
|
|
10314
|
+
fjsonchecksum = inheader[14]
|
|
10315
|
+
headerjsonoffset = fp.tell()
|
|
9600
10316
|
fprejsoncontent = fp.read(fjsonsize)
|
|
9601
|
-
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9602
|
-
if(fjsonsize > 0):
|
|
9603
|
-
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9604
|
-
if(verbose):
|
|
9605
|
-
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9606
|
-
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9607
|
-
else:
|
|
9608
|
-
valid_archive = False
|
|
9609
|
-
invalid_archive = True
|
|
9610
|
-
if(verbose):
|
|
9611
|
-
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9612
|
-
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9613
|
-
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9614
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9615
|
-
fname + " at offset " + str(fheaderstart))
|
|
9616
|
-
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9617
|
-
return False
|
|
10317
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
9618
10318
|
# Next seek directive
|
|
9619
10319
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9620
10320
|
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
@@ -9633,14 +10333,11 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9633
10333
|
fp.seek(fseeknextasnum, 0)
|
|
9634
10334
|
else:
|
|
9635
10335
|
return False
|
|
9636
|
-
|
|
9637
10336
|
il = 0
|
|
9638
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
9639
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
9640
|
-
|
|
10337
|
+
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs, saltkey)
|
|
10338
|
+
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
9641
10339
|
valid_archive = True
|
|
9642
10340
|
invalid_archive = False
|
|
9643
|
-
|
|
9644
10341
|
if(verbose):
|
|
9645
10342
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
9646
10343
|
try:
|
|
@@ -9652,78 +10349,56 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9652
10349
|
else:
|
|
9653
10350
|
VerbosePrintOut(infile)
|
|
9654
10351
|
VerbosePrintOut("Number of Records " + str(fnumfiles))
|
|
9655
|
-
|
|
9656
10352
|
if(headercheck):
|
|
9657
10353
|
if(verbose):
|
|
9658
|
-
VerbosePrintOut("File Header Checksum Passed at offset " + str(
|
|
10354
|
+
VerbosePrintOut("File Header Checksum Passed at offset " + str(headeroffset))
|
|
9659
10355
|
VerbosePrintOut("'" + fprechecksum + "' == " + "'" + newfcs + "'")
|
|
9660
10356
|
else:
|
|
9661
10357
|
# always flip flags, even when not verbose
|
|
9662
10358
|
valid_archive = False
|
|
9663
10359
|
invalid_archive = True
|
|
9664
10360
|
if(verbose):
|
|
9665
|
-
VerbosePrintOut("File Header Checksum Failed at offset " + str(
|
|
10361
|
+
VerbosePrintOut("File Header Checksum Failed at offset " + str(headeroffset))
|
|
9666
10362
|
VerbosePrintOut("'" + fprechecksum + "' != " + "'" + newfcs + "'")
|
|
9667
|
-
|
|
10363
|
+
if(fjsonsize > 0):
|
|
10364
|
+
if(CheckChecksums(jsonfcs, fjsonchecksum)):
|
|
10365
|
+
if(verbose):
|
|
10366
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(headerjsonoffset))
|
|
10367
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
10368
|
+
else:
|
|
10369
|
+
valid_archive = False
|
|
10370
|
+
invalid_archive = True
|
|
10371
|
+
if(verbose):
|
|
10372
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(headerjsonoffset))
|
|
10373
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9668
10374
|
if(verbose):
|
|
9669
10375
|
VerbosePrintOut("")
|
|
9670
|
-
|
|
9671
10376
|
# Iterate either until EOF (seektoend) or fixed count
|
|
9672
10377
|
while (fp.tell() < CatSizeEnd) if seektoend else (il < fnumfiles):
|
|
9673
10378
|
outfhstart = fp.tell()
|
|
9674
|
-
if(
|
|
10379
|
+
if(__use_new_style__):
|
|
9675
10380
|
inheaderdata = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9676
10381
|
else:
|
|
9677
10382
|
inheaderdata = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9678
10383
|
|
|
9679
10384
|
if(len(inheaderdata) == 0):
|
|
9680
10385
|
break
|
|
9681
|
-
|
|
9682
|
-
outfheadsize = int(inheaderdata[0], 16)
|
|
9683
|
-
outfnumfields = int(inheaderdata[1], 16)
|
|
9684
|
-
outftype = int(inheaderdata[2], 16)
|
|
9685
|
-
# FIX: these must come from inheaderdata, not inheader
|
|
9686
|
-
outfostype = inheaderdata[3]
|
|
9687
|
-
outfencoding = inheaderdata[4]
|
|
9688
|
-
|
|
9689
10386
|
if(re.findall("^[.|/]", inheaderdata[5])):
|
|
9690
10387
|
outfname = inheaderdata[5]
|
|
9691
10388
|
else:
|
|
9692
10389
|
outfname = "./" + inheaderdata[5]
|
|
9693
10390
|
outfbasedir = os.path.dirname(outfname)
|
|
9694
|
-
|
|
9695
|
-
outflinkname = inheaderdata[6]
|
|
9696
10391
|
outfsize = int(inheaderdata[7], 16)
|
|
9697
|
-
|
|
9698
|
-
|
|
9699
|
-
|
|
9700
|
-
|
|
9701
|
-
|
|
9702
|
-
|
|
9703
|
-
|
|
9704
|
-
|
|
9705
|
-
outfcompression = inheaderdata[14]
|
|
9706
|
-
outfcsize = int(inheaderdata[15], 16)
|
|
9707
|
-
outfuid = int(inheaderdata[16], 16)
|
|
9708
|
-
outfuname = inheaderdata[17]
|
|
9709
|
-
outfgid = int(inheaderdata[18], 16)
|
|
9710
|
-
outfgname = inheaderdata[19]
|
|
9711
|
-
fid = int(inheaderdata[20], 16)
|
|
9712
|
-
finode = int(inheaderdata[21], 16)
|
|
9713
|
-
flinkcount = int(inheaderdata[22], 16)
|
|
9714
|
-
outfdev = int(inheaderdata[23], 16)
|
|
9715
|
-
outfdev_minor = int(inheaderdata[24], 16)
|
|
9716
|
-
outfdev_major = int(inheaderdata[25], 16)
|
|
9717
|
-
outfseeknextfile = inheaderdata[26]
|
|
9718
|
-
outfjsontype = inheaderdata[27]
|
|
9719
|
-
outfjsonlen = int(inheaderdata[28], 16)
|
|
9720
|
-
outfjsonsize = int(inheaderdata[29], 16)
|
|
9721
|
-
outfjsonchecksumtype = inheaderdata[30]
|
|
9722
|
-
outfjsonchecksum = inheaderdata[31]
|
|
9723
|
-
|
|
10392
|
+
outfcompression = inheaderdata[17]
|
|
10393
|
+
outfcsize = int(inheaderdata[18], 16)
|
|
10394
|
+
fid = int(inheaderdata[23], 16)
|
|
10395
|
+
finode = int(inheaderdata[24], 16)
|
|
10396
|
+
outfseeknextfile = inheaderdata[28]
|
|
10397
|
+
outfjsonsize = int(inheaderdata[31], 16)
|
|
10398
|
+
outfjsonchecksumtype = inheaderdata[32]
|
|
10399
|
+
outfjsonchecksum = inheaderdata[33]
|
|
9724
10400
|
outfhend = fp.tell() - 1 # (kept for parity; not used)
|
|
9725
10401
|
outfjstart = fp.tell()
|
|
9726
|
-
|
|
9727
10402
|
# Read JSON bytes; compute checksum on bytes for robustness
|
|
9728
10403
|
outfprejsoncontent_bytes = fp.read(outfjsonsize)
|
|
9729
10404
|
# Decode for any downstream text needs (not used further here)
|
|
@@ -9731,27 +10406,21 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9731
10406
|
outfprejsoncontent = outfprejsoncontent_bytes.decode("UTF-8")
|
|
9732
10407
|
except Exception:
|
|
9733
10408
|
outfprejsoncontent = None
|
|
9734
|
-
|
|
9735
10409
|
outfjend = fp.tell()
|
|
9736
10410
|
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
9737
|
-
|
|
9738
|
-
|
|
9739
|
-
|
|
9740
|
-
outfextrasize = int(inheaderdata[32], 16)
|
|
9741
|
-
outfextrafields = int(inheaderdata[33], 16)
|
|
10411
|
+
injsonfcs = GetFileChecksum(outfprejsoncontent_bytes, outfjsonchecksumtype, True, formatspecs, saltkey)
|
|
10412
|
+
outfextrafields = int(inheaderdata[35], 16)
|
|
9742
10413
|
extrafieldslist = []
|
|
9743
|
-
extrastart =
|
|
10414
|
+
extrastart = 36
|
|
9744
10415
|
extraend = extrastart + outfextrafields
|
|
9745
|
-
|
|
9746
10416
|
outfcs = inheaderdata[-2].lower()
|
|
9747
10417
|
outfccs = inheaderdata[-1].lower()
|
|
9748
|
-
infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
|
|
9749
|
-
|
|
10418
|
+
infcs = GetHeaderChecksum(inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs, saltkey)
|
|
9750
10419
|
if(verbose):
|
|
9751
10420
|
VerbosePrintOut(outfname)
|
|
9752
10421
|
VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
|
|
9753
10422
|
|
|
9754
|
-
if(
|
|
10423
|
+
if(CheckChecksums(outfcs, infcs)):
|
|
9755
10424
|
if(verbose):
|
|
9756
10425
|
VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
|
|
9757
10426
|
VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
|
|
@@ -9761,9 +10430,8 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9761
10430
|
if(verbose):
|
|
9762
10431
|
VerbosePrintOut("File Header Checksum Failed at offset " + str(outfhstart))
|
|
9763
10432
|
VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
|
|
9764
|
-
|
|
9765
10433
|
if(outfjsonsize > 0):
|
|
9766
|
-
if(
|
|
10434
|
+
if(CheckChecksums(injsonfcs, outfjsonchecksum)):
|
|
9767
10435
|
if(verbose):
|
|
9768
10436
|
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9769
10437
|
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
@@ -9773,21 +10441,19 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9773
10441
|
if(verbose):
|
|
9774
10442
|
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9775
10443
|
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9776
|
-
|
|
9777
10444
|
outfcontentstart = fp.tell()
|
|
9778
10445
|
outfcontents = b"" # FIX: bytes for Py2/3 consistency
|
|
9779
10446
|
pyhascontents = False
|
|
9780
|
-
|
|
9781
10447
|
if(outfsize > 0):
|
|
9782
10448
|
if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
|
|
9783
10449
|
outfcontents = fp.read(outfsize)
|
|
9784
10450
|
else:
|
|
9785
10451
|
outfcontents = fp.read(outfcsize)
|
|
9786
10452
|
|
|
9787
|
-
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
|
|
10453
|
+
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs, saltkey)
|
|
9788
10454
|
pyhascontents = True
|
|
9789
10455
|
|
|
9790
|
-
if(
|
|
10456
|
+
if(CheckChecksums(outfccs, infccs)):
|
|
9791
10457
|
if(verbose):
|
|
9792
10458
|
VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
|
|
9793
10459
|
VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
|
|
@@ -9797,10 +10463,8 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9797
10463
|
if(verbose):
|
|
9798
10464
|
VerbosePrintOut("File Content Checksum Failed at offset " + str(outfcontentstart))
|
|
9799
10465
|
VerbosePrintOut("'" + outfccs + "' != " + "'" + infccs + "'")
|
|
9800
|
-
|
|
9801
10466
|
if(verbose):
|
|
9802
10467
|
VerbosePrintOut("")
|
|
9803
|
-
|
|
9804
10468
|
# Next seek directive
|
|
9805
10469
|
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9806
10470
|
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
@@ -9819,9 +10483,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9819
10483
|
fp.seek(fseeknextasnum, 0)
|
|
9820
10484
|
else:
|
|
9821
10485
|
return False
|
|
9822
|
-
|
|
9823
10486
|
il = il + 1
|
|
9824
|
-
|
|
9825
10487
|
if(valid_archive):
|
|
9826
10488
|
if(returnfp):
|
|
9827
10489
|
return fp
|
|
@@ -9833,34 +10495,34 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9833
10495
|
return False
|
|
9834
10496
|
|
|
9835
10497
|
|
|
9836
|
-
def ArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9837
|
-
return ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10498
|
+
def ArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10499
|
+
return ArchiveFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9838
10500
|
|
|
9839
10501
|
|
|
9840
|
-
def ArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10502
|
+
def ArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9841
10503
|
if(isinstance(infile, (list, tuple, ))):
|
|
9842
10504
|
pass
|
|
9843
10505
|
else:
|
|
9844
10506
|
infile = [infile]
|
|
9845
10507
|
outretval = True
|
|
9846
10508
|
for curfname in infile:
|
|
9847
|
-
curretfile = ArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10509
|
+
curretfile = ArchiveFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9848
10510
|
if(not curretfile):
|
|
9849
10511
|
outretval = False
|
|
9850
10512
|
return outretval
|
|
9851
10513
|
|
|
9852
|
-
def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9853
|
-
return ArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10514
|
+
def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10515
|
+
return ArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9854
10516
|
|
|
9855
10517
|
|
|
9856
|
-
def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10518
|
+
def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9857
10519
|
outretval = []
|
|
9858
10520
|
outstartfile = filestart
|
|
9859
10521
|
outfsize = float('inf')
|
|
9860
10522
|
while True:
|
|
9861
10523
|
if outstartfile >= outfsize: # stop when function signals False
|
|
9862
10524
|
break
|
|
9863
|
-
is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
|
|
10525
|
+
is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, saltkey, seektoend, verbose, True)
|
|
9864
10526
|
if is_valid_file is False: # stop when function signals False
|
|
9865
10527
|
outretval.append(is_valid_file)
|
|
9866
10528
|
break
|
|
@@ -9877,33 +10539,36 @@ def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=
|
|
|
9877
10539
|
if(returnfp):
|
|
9878
10540
|
return infile
|
|
9879
10541
|
else:
|
|
9880
|
-
|
|
10542
|
+
try:
|
|
10543
|
+
infile.close()
|
|
10544
|
+
except AttributeError:
|
|
10545
|
+
return False
|
|
9881
10546
|
return outretval
|
|
9882
10547
|
|
|
9883
10548
|
|
|
9884
10549
|
|
|
9885
|
-
def StackedArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9886
|
-
return StackedArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10550
|
+
def StackedArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10551
|
+
return StackedArchiveFileValidate(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9887
10552
|
|
|
9888
10553
|
|
|
9889
|
-
def StackedArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10554
|
+
def StackedArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
9890
10555
|
if(isinstance(infile, (list, tuple, ))):
|
|
9891
10556
|
pass
|
|
9892
10557
|
else:
|
|
9893
10558
|
infile = [infile]
|
|
9894
10559
|
outretval = True
|
|
9895
10560
|
for curfname in infile:
|
|
9896
|
-
curretfile = StackedArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10561
|
+
curretfile = StackedArchiveFileValidate(curfname, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9897
10562
|
if(not curretfile):
|
|
9898
10563
|
outretval = False
|
|
9899
10564
|
return outretval
|
|
9900
10565
|
|
|
9901
|
-
def StackedArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9902
|
-
return StackedArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
10566
|
+
def StackedArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10567
|
+
return StackedArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
9903
10568
|
|
|
9904
10569
|
|
|
9905
|
-
def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
9906
|
-
outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
10570
|
+
def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10571
|
+
outfp = ReadInFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
9907
10572
|
if not returnfp:
|
|
9908
10573
|
for item in outfp:
|
|
9909
10574
|
fp = item.get('fp')
|
|
@@ -9917,26 +10582,26 @@ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend
|
|
|
9917
10582
|
return outfp
|
|
9918
10583
|
|
|
9919
10584
|
|
|
9920
|
-
def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10585
|
+
def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9921
10586
|
if(isinstance(infile, (list, tuple, ))):
|
|
9922
10587
|
pass
|
|
9923
10588
|
else:
|
|
9924
10589
|
infile = [infile]
|
|
9925
10590
|
outretval = []
|
|
9926
10591
|
for curfname in infile:
|
|
9927
|
-
outretval.append(ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp))
|
|
10592
|
+
outretval.append(ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp))
|
|
9928
10593
|
return outretval
|
|
9929
10594
|
|
|
9930
|
-
def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
9931
|
-
return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10595
|
+
def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10596
|
+
return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
9932
10597
|
|
|
9933
10598
|
|
|
9934
|
-
def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10599
|
+
def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9935
10600
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9936
10601
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9937
10602
|
formatspecs = formatspecs[checkcompressfile]
|
|
9938
10603
|
fp = MkTempFile(instr)
|
|
9939
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10604
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
9940
10605
|
return listarrayfiles
|
|
9941
10606
|
|
|
9942
10607
|
|
|
@@ -9945,9 +10610,8 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
9945
10610
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9946
10611
|
formatspecs = formatspecs[checkcompressfile]
|
|
9947
10612
|
fp = MkTempFile()
|
|
9948
|
-
fp = PackArchiveFileFromTarFile(
|
|
9949
|
-
|
|
9950
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10613
|
+
fp = PackArchiveFileFromTarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10614
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9951
10615
|
return listarrayfiles
|
|
9952
10616
|
|
|
9953
10617
|
|
|
@@ -9956,9 +10620,8 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
9956
10620
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9957
10621
|
formatspecs = formatspecs[checkcompressfile]
|
|
9958
10622
|
fp = MkTempFile()
|
|
9959
|
-
fp = PackArchiveFileFromZipFile(
|
|
9960
|
-
|
|
9961
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10623
|
+
fp = PackArchiveFileFromZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10624
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9962
10625
|
return listarrayfiles
|
|
9963
10626
|
|
|
9964
10627
|
|
|
@@ -9972,9 +10635,8 @@ if(rarfile_support):
|
|
|
9972
10635
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9973
10636
|
formatspecs = formatspecs[checkcompressfile]
|
|
9974
10637
|
fp = MkTempFile()
|
|
9975
|
-
fp = PackArchiveFileFromRarFile(
|
|
9976
|
-
|
|
9977
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10638
|
+
fp = PackArchiveFileFromRarFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10639
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9978
10640
|
return listarrayfiles
|
|
9979
10641
|
|
|
9980
10642
|
if(not py7zr_support):
|
|
@@ -9987,13 +10649,12 @@ if(py7zr_support):
|
|
|
9987
10649
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9988
10650
|
formatspecs = formatspecs[checkcompressfile]
|
|
9989
10651
|
fp = MkTempFile()
|
|
9990
|
-
fp = PackArchiveFileFromSevenZipFile(
|
|
9991
|
-
|
|
9992
|
-
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10652
|
+
fp = PackArchiveFileFromSevenZipFile(infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, None, False, True)
|
|
10653
|
+
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, None, seektoend, returnfp)
|
|
9993
10654
|
return listarrayfiles
|
|
9994
10655
|
|
|
9995
10656
|
|
|
9996
|
-
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
10657
|
+
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
9997
10658
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9998
10659
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9999
10660
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -10006,17 +10667,16 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
|
|
|
10006
10667
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
10007
10668
|
return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10008
10669
|
elif(checkcompressfile == formatspecs['format_magic']):
|
|
10009
|
-
return ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10670
|
+
return ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10010
10671
|
else:
|
|
10011
10672
|
return False
|
|
10012
10673
|
return False
|
|
10013
10674
|
|
|
10014
10675
|
|
|
10015
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10676
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, saltkey=None, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10016
10677
|
outarray = MkTempFile()
|
|
10017
|
-
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10018
|
-
|
|
10019
|
-
listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10678
|
+
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
|
|
10679
|
+
listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10020
10680
|
return listarrayfiles
|
|
10021
10681
|
|
|
10022
10682
|
|
|
@@ -10138,12 +10798,12 @@ def ArchiveFileArrayToArrayIndex(inarray, returnfp=False):
|
|
|
10138
10798
|
return out
|
|
10139
10799
|
|
|
10140
10800
|
|
|
10141
|
-
def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=
|
|
10801
|
+
def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10142
10802
|
# ---------- Safe defaults ----------
|
|
10143
10803
|
if compressionuselist is None:
|
|
10144
10804
|
compressionuselist = compressionlistalt
|
|
10145
10805
|
if checksumtype is None:
|
|
10146
|
-
checksumtype = ["md5", "md5", "md5", "md5"]
|
|
10806
|
+
checksumtype = ["md5", "md5", "md5", "md5", "md5"]
|
|
10147
10807
|
if extradata is None:
|
|
10148
10808
|
extradata = []
|
|
10149
10809
|
if jsondata is None:
|
|
@@ -10162,7 +10822,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10162
10822
|
infile = RemoveWindowsPath(infile)
|
|
10163
10823
|
listarrayfileslist = ArchiveFileToArray(
|
|
10164
10824
|
infile, "auto", filestart, seekstart, seekend,
|
|
10165
|
-
False, True, True, skipchecksum, formatspecs, seektoend, False
|
|
10825
|
+
False, True, True, skipchecksum, formatspecs, insaltkey, seektoend, False
|
|
10166
10826
|
)
|
|
10167
10827
|
|
|
10168
10828
|
# ---------- Format specs selection ----------
|
|
@@ -10229,9 +10889,6 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10229
10889
|
if (compression is None) or (compressionuselist and compression not in compressionuselist):
|
|
10230
10890
|
compression = "auto"
|
|
10231
10891
|
|
|
10232
|
-
if verbose:
|
|
10233
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10234
|
-
|
|
10235
10892
|
# No files?
|
|
10236
10893
|
if not listarrayfiles.get('ffilelist'):
|
|
10237
10894
|
return False
|
|
@@ -10244,7 +10901,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10244
10901
|
if lenlist != fnumfiles:
|
|
10245
10902
|
fnumfiles = lenlist
|
|
10246
10903
|
|
|
10247
|
-
AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), [], checksumtype[0], formatspecs)
|
|
10904
|
+
AppendFileHeader(fp, fnumfiles, listarrayfiles.get('fencoding', 'utf-8'), listarrayfiles['fextradata'], listarrayfiles['fjsondata'], [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
|
|
10248
10905
|
|
|
10249
10906
|
# loop counters
|
|
10250
10907
|
lcfi = 0
|
|
@@ -10274,6 +10931,9 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10274
10931
|
# fields (hex-encoded where expected)
|
|
10275
10932
|
fheadersize = format(int(cur_entry['fheadersize']), 'x').lower()
|
|
10276
10933
|
fsize = format(int(cur_entry['fsize']), 'x').lower()
|
|
10934
|
+
fblksize = format(int(cur_entry['fblksize']), 'x').lower()
|
|
10935
|
+
fblocks = format(int(cur_entry['fblocks']), 'x').lower()
|
|
10936
|
+
fflags = format(int(cur_entry['fflags']), 'x').lower()
|
|
10277
10937
|
flinkname = cur_entry['flinkname']
|
|
10278
10938
|
fatime = format(int(cur_entry['fatime']), 'x').lower()
|
|
10279
10939
|
fmtime = format(int(cur_entry['fmtime']), 'x').lower()
|
|
@@ -10292,8 +10952,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10292
10952
|
fcompression = cur_entry['fcompression']
|
|
10293
10953
|
fcsize = format(int(cur_entry['fcsize']), 'x').lower()
|
|
10294
10954
|
fdev = format(int(cur_entry['fdev']), 'x').lower()
|
|
10295
|
-
|
|
10296
|
-
fdev_major = format(int(cur_entry['fmajor']), 'x').lower()
|
|
10955
|
+
frdev = format(int(cur_entry['frdev']), 'x').lower()
|
|
10297
10956
|
fseeknextfile = cur_entry['fseeknextfile']
|
|
10298
10957
|
|
|
10299
10958
|
# extra fields sizing
|
|
@@ -10304,6 +10963,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10304
10963
|
# extradata/jsondata defaults per file
|
|
10305
10964
|
if not followlink and len(extradata) <= 0:
|
|
10306
10965
|
extradata = cur_entry['fextradata']
|
|
10966
|
+
|
|
10967
|
+
fvendorfields = cur_entry['fvendorfields']
|
|
10968
|
+
ffvendorfieldslist = []
|
|
10969
|
+
if(fvendorfields>0):
|
|
10970
|
+
ffvendorfieldslist = cur_entry['fvendorfieldslist']
|
|
10971
|
+
|
|
10307
10972
|
if not followlink and len(jsondata) <= 0:
|
|
10308
10973
|
jsondata = cur_entry['fjsondata']
|
|
10309
10974
|
|
|
@@ -10339,7 +11004,11 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10339
11004
|
fcontents.seek(0, 0)
|
|
10340
11005
|
cfcontents.seek(0, 0)
|
|
10341
11006
|
cfcontents = CompressOpenFileAlt(
|
|
10342
|
-
cfcontents,
|
|
11007
|
+
cfcontents,
|
|
11008
|
+
compressionuselist[ilmin],
|
|
11009
|
+
compressionlevel,
|
|
11010
|
+
compressionuselist,
|
|
11011
|
+
formatspecs
|
|
10343
11012
|
)
|
|
10344
11013
|
if cfcontents:
|
|
10345
11014
|
cfcontents.seek(0, 2)
|
|
@@ -10347,7 +11016,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10347
11016
|
cfcontents.close()
|
|
10348
11017
|
else:
|
|
10349
11018
|
ilcsize.append(float("inf"))
|
|
10350
|
-
ilmin
|
|
11019
|
+
ilmin = ilmin + 1
|
|
10351
11020
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
10352
11021
|
curcompression = compressionuselist[ilcmin]
|
|
10353
11022
|
|
|
@@ -10356,16 +11025,24 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10356
11025
|
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10357
11026
|
cfcontents.seek(0, 0)
|
|
10358
11027
|
cfcontents = CompressOpenFileAlt(
|
|
10359
|
-
cfcontents,
|
|
11028
|
+
cfcontents,
|
|
11029
|
+
curcompression,
|
|
11030
|
+
compressionlevel,
|
|
11031
|
+
compressionuselist,
|
|
11032
|
+
formatspecs
|
|
10360
11033
|
)
|
|
10361
11034
|
cfcontents.seek(0, 2)
|
|
10362
|
-
|
|
10363
|
-
if ucfsize >
|
|
10364
|
-
fcsize = format(int(
|
|
11035
|
+
cfsize = cfcontents.tell()
|
|
11036
|
+
if ucfsize > cfsize:
|
|
11037
|
+
fcsize = format(int(cfsize), 'x').lower()
|
|
10365
11038
|
fcompression = curcompression
|
|
10366
11039
|
fcontents.close()
|
|
10367
11040
|
fcontents = cfcontents
|
|
10368
11041
|
|
|
11042
|
+
if fcompression == "none":
|
|
11043
|
+
fcompression = ""
|
|
11044
|
+
fcontents.seek(0, 0)
|
|
11045
|
+
|
|
10369
11046
|
# link following (fixed: use listarrayfiles, not prelistarrayfiles)
|
|
10370
11047
|
if followlink:
|
|
10371
11048
|
if (cur_entry['ftype'] == 1 or cur_entry['ftype'] == 2):
|
|
@@ -10374,6 +11051,9 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10374
11051
|
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
10375
11052
|
fheadersize = format(int(flinkinfo['fheadersize']), 'x').lower()
|
|
10376
11053
|
fsize = format(int(flinkinfo['fsize']), 'x').lower()
|
|
11054
|
+
fblksize = format(int(flinkinfo['fblksize']), 'x').lower()
|
|
11055
|
+
fblocks = format(int(flinkinfo['fblocks']), 'x').lower()
|
|
11056
|
+
fflags = format(int(flinkinfo['fflags']), 'x').lower()
|
|
10377
11057
|
flinkname = flinkinfo['flinkname']
|
|
10378
11058
|
fatime = format(int(flinkinfo['fatime']), 'x').lower()
|
|
10379
11059
|
fmtime = format(int(flinkinfo['fmtime']), 'x').lower()
|
|
@@ -10392,14 +11072,19 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10392
11072
|
fcompression = flinkinfo['fcompression']
|
|
10393
11073
|
fcsize = format(int(flinkinfo['fcsize']), 'x').lower()
|
|
10394
11074
|
fdev = format(int(flinkinfo['fdev']), 'x').lower()
|
|
10395
|
-
|
|
10396
|
-
fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
|
|
11075
|
+
frdev = format(int(flinkinfo['frdev']), 'x').lower()
|
|
10397
11076
|
fseeknextfile = flinkinfo['fseeknextfile']
|
|
10398
11077
|
if (len(flinkinfo['fextradata']) > flinkinfo['fextrafields']
|
|
10399
11078
|
and len(flinkinfo['fextradata']) > 0):
|
|
10400
11079
|
flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
|
|
10401
11080
|
if len(extradata) < 0:
|
|
10402
11081
|
extradata = flinkinfo['fextradata']
|
|
11082
|
+
|
|
11083
|
+
fvendorfields = flinkinfo['fvendorfields']
|
|
11084
|
+
ffvendorfieldslist = []
|
|
11085
|
+
if(fvendorfields>0):
|
|
11086
|
+
ffvendorfieldslist = flinkinfo['fvendorfieldslist']
|
|
11087
|
+
|
|
10403
11088
|
if len(jsondata) < 0:
|
|
10404
11089
|
jsondata = flinkinfo['fjsondata']
|
|
10405
11090
|
fcontents = flinkinfo['fcontents']
|
|
@@ -10428,15 +11113,15 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10428
11113
|
fcompression = ""
|
|
10429
11114
|
|
|
10430
11115
|
tmpoutlist = [
|
|
10431
|
-
ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime,
|
|
11116
|
+
ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime,
|
|
10432
11117
|
fctime, fbtime, fmode, fwinattributes, fcompression, fcsize, fuid, funame,
|
|
10433
|
-
fgid, fgname, fcurfid, fcurinode, flinkcount, fdev,
|
|
11118
|
+
fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, fseeknextfile
|
|
10434
11119
|
]
|
|
10435
11120
|
|
|
10436
|
-
|
|
10437
|
-
|
|
10438
|
-
|
|
10439
|
-
)
|
|
11121
|
+
if(fvendorfields>0 and len(ffvendorfieldslist)>0):
|
|
11122
|
+
extradata.extend(fvendorfields)
|
|
11123
|
+
|
|
11124
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, extradata, jsondata, fcontents.read(),[checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs, outsaltkey)
|
|
10440
11125
|
try:
|
|
10441
11126
|
fcontents.close()
|
|
10442
11127
|
except Exception:
|
|
@@ -10481,12 +11166,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10481
11166
|
pass
|
|
10482
11167
|
return True
|
|
10483
11168
|
|
|
10484
|
-
def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=
|
|
11169
|
+
def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10485
11170
|
if not isinstance(infiles, list):
|
|
10486
11171
|
infiles = [infiles]
|
|
10487
11172
|
returnout = False
|
|
10488
11173
|
for infileslist in infiles:
|
|
10489
|
-
returnout = RePackArchiveFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, True)
|
|
11174
|
+
returnout = RePackArchiveFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, True)
|
|
10490
11175
|
if(not returnout):
|
|
10491
11176
|
break
|
|
10492
11177
|
else:
|
|
@@ -10496,33 +11181,28 @@ def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="aut
|
|
|
10496
11181
|
return True
|
|
10497
11182
|
return returnout
|
|
10498
11183
|
|
|
10499
|
-
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=
|
|
11184
|
+
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10500
11185
|
fp = MkTempFile(instr)
|
|
10501
|
-
listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10502
|
-
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
11186
|
+
listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, insaltkey, outsaltkey, seektoend, verbose, returnfp)
|
|
10503
11187
|
return listarrayfiles
|
|
10504
11188
|
|
|
10505
11189
|
|
|
10506
|
-
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11190
|
+
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10507
11191
|
outarray = MkTempFile()
|
|
10508
|
-
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10509
|
-
|
|
10510
|
-
listarrayfiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10511
|
-
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
11192
|
+
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, saltkey, verbose, True)
|
|
11193
|
+
listarrayfiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend, checksumtype, skipchecksum, extradata, jsondata, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
10512
11194
|
return listarrayfiles
|
|
10513
11195
|
|
|
10514
11196
|
|
|
10515
|
-
def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
11197
|
+
def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
10516
11198
|
if(outdir is not None):
|
|
10517
11199
|
outdir = RemoveWindowsPath(outdir)
|
|
10518
|
-
if(verbose):
|
|
10519
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10520
11200
|
if(isinstance(infile, dict)):
|
|
10521
11201
|
listarrayfiles = infile
|
|
10522
11202
|
else:
|
|
10523
11203
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
10524
11204
|
infile = RemoveWindowsPath(infile)
|
|
10525
|
-
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11205
|
+
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10526
11206
|
if(not listarrayfiles):
|
|
10527
11207
|
return False
|
|
10528
11208
|
lenlist = len(listarrayfiles['ffilelist'])
|
|
@@ -10758,9 +11438,9 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
10758
11438
|
return True
|
|
10759
11439
|
|
|
10760
11440
|
|
|
10761
|
-
def UnPackArchiveFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11441
|
+
def UnPackArchiveFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, returnfp=False):
|
|
10762
11442
|
fp = MkTempFile(instr)
|
|
10763
|
-
listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
11443
|
+
listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, returnfp)
|
|
10764
11444
|
return listarrayfiles
|
|
10765
11445
|
|
|
10766
11446
|
def ftype_to_str(ftype):
|
|
@@ -10778,9 +11458,7 @@ def ftype_to_str(ftype):
|
|
|
10778
11458
|
# Default to "file" if unknown
|
|
10779
11459
|
return mapping.get(ftype, "file")
|
|
10780
11460
|
|
|
10781
|
-
def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10782
|
-
if(verbose):
|
|
10783
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11461
|
+
def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10784
11462
|
if(isinstance(infile, dict)):
|
|
10785
11463
|
listarrayfileslist = [infile]
|
|
10786
11464
|
if(isinstance(infile, list)):
|
|
@@ -10788,7 +11466,7 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
|
|
|
10788
11466
|
else:
|
|
10789
11467
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
10790
11468
|
infile = RemoveWindowsPath(infile)
|
|
10791
|
-
listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11469
|
+
listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10792
11470
|
if(not listarrayfileslist):
|
|
10793
11471
|
return False
|
|
10794
11472
|
for listarrayfiles in listarrayfileslist:
|
|
@@ -10825,8 +11503,11 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
|
|
|
10825
11503
|
VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
|
|
10826
11504
|
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
|
|
10827
11505
|
else:
|
|
11506
|
+
ts_ns = listarrayfiles['ffilelist'][lcfi]['fmtime']
|
|
11507
|
+
sec, ns = divmod(int(ts_ns), 10**9)
|
|
11508
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
10828
11509
|
VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
|
|
10829
|
-
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " +
|
|
11510
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + dt.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
10830
11511
|
lcfi = lcfi + 1
|
|
10831
11512
|
if(returnfp):
|
|
10832
11513
|
return listarrayfiles['fp']
|
|
@@ -10834,25 +11515,25 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
|
|
|
10834
11515
|
return True
|
|
10835
11516
|
|
|
10836
11517
|
|
|
10837
|
-
def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
11518
|
+
def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10838
11519
|
if(isinstance(infile, (list, tuple, ))):
|
|
10839
11520
|
pass
|
|
10840
11521
|
else:
|
|
10841
11522
|
infile = [infile]
|
|
10842
11523
|
outretval = {}
|
|
10843
11524
|
for curfname in infile:
|
|
10844
|
-
outretval[curfname] = ArchiveFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
11525
|
+
outretval[curfname] = ArchiveFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
|
|
10845
11526
|
return outretval
|
|
10846
11527
|
|
|
10847
11528
|
|
|
10848
|
-
def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11529
|
+
def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10849
11530
|
outretval = []
|
|
10850
11531
|
outstartfile = filestart
|
|
10851
11532
|
outfsize = float('inf')
|
|
10852
11533
|
while True:
|
|
10853
11534
|
if outstartfile >= outfsize: # stop when function signals False
|
|
10854
11535
|
break
|
|
10855
|
-
list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
|
|
11536
|
+
list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, True)
|
|
10856
11537
|
if list_file_retu is False: # stop when function signals False
|
|
10857
11538
|
outretval.append(list_file_retu)
|
|
10858
11539
|
else:
|
|
@@ -10868,30 +11549,31 @@ def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
10868
11549
|
if(returnfp):
|
|
10869
11550
|
return infile
|
|
10870
11551
|
else:
|
|
10871
|
-
|
|
11552
|
+
try:
|
|
11553
|
+
infile.close()
|
|
11554
|
+
except AttributeError:
|
|
11555
|
+
return False
|
|
10872
11556
|
return outretval
|
|
10873
11557
|
|
|
10874
11558
|
|
|
10875
|
-
def MultipleStackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
11559
|
+
def MultipleStackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, returnfp=False):
|
|
10876
11560
|
if(isinstance(infile, (list, tuple, ))):
|
|
10877
11561
|
pass
|
|
10878
11562
|
else:
|
|
10879
11563
|
infile = [infile]
|
|
10880
11564
|
outretval = {}
|
|
10881
11565
|
for curfname in infile:
|
|
10882
|
-
outretval[curfname] = StackedArchiveFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11566
|
+
outretval[curfname] = StackedArchiveFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend, returnfp)
|
|
10883
11567
|
return outretval
|
|
10884
11568
|
|
|
10885
11569
|
|
|
10886
|
-
def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11570
|
+
def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, saltkey=None, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
10887
11571
|
fp = MkTempFile(instr)
|
|
10888
|
-
listarrayfiles = ArchiveFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
11572
|
+
listarrayfiles = ArchiveFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, saltkey, seektoend, verbose, newstyle, returnfp)
|
|
10889
11573
|
return listarrayfiles
|
|
10890
11574
|
|
|
10891
11575
|
|
|
10892
11576
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
10893
|
-
if(verbose):
|
|
10894
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10895
11577
|
if(infile == "-"):
|
|
10896
11578
|
infile = MkTempFile()
|
|
10897
11579
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
@@ -10927,10 +11609,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
10927
11609
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
10928
11610
|
formatspecs = formatspecs[compresscheck]
|
|
10929
11611
|
if(compresscheck=="zstd"):
|
|
10930
|
-
if '
|
|
10931
|
-
infile = ZstdFile(
|
|
10932
|
-
elif 'pyzstd' in sys.modules:
|
|
10933
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11612
|
+
if 'zstd' in compressionsupport:
|
|
11613
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
10934
11614
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
10935
11615
|
else:
|
|
10936
11616
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -10939,10 +11619,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
10939
11619
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
10940
11620
|
formatspecs = formatspecs[compresscheck]
|
|
10941
11621
|
if(compresscheck=="zstd"):
|
|
10942
|
-
if '
|
|
10943
|
-
infile = ZstdFile(
|
|
10944
|
-
elif 'pyzstd' in sys.modules:
|
|
10945
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11622
|
+
if 'zstd' in compressionsupport:
|
|
11623
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
10946
11624
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
10947
11625
|
else:
|
|
10948
11626
|
tarfp = tarfile.open(infile, "r")
|
|
@@ -11012,8 +11690,6 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11012
11690
|
|
|
11013
11691
|
|
|
11014
11692
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11015
|
-
if(verbose):
|
|
11016
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11017
11693
|
if(infile == "-"):
|
|
11018
11694
|
infile = MkTempFile()
|
|
11019
11695
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
@@ -11139,8 +11815,6 @@ if(not rarfile_support):
|
|
|
11139
11815
|
|
|
11140
11816
|
if(rarfile_support):
|
|
11141
11817
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11142
|
-
if(verbose):
|
|
11143
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11144
11818
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11145
11819
|
return False
|
|
11146
11820
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -11268,14 +11942,15 @@ if(not py7zr_support):
|
|
|
11268
11942
|
|
|
11269
11943
|
if(py7zr_support):
|
|
11270
11944
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11271
|
-
if(verbose):
|
|
11272
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11273
11945
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11274
11946
|
return False
|
|
11275
11947
|
lcfi = 0
|
|
11276
11948
|
returnval = {}
|
|
11277
11949
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
11278
|
-
|
|
11950
|
+
try:
|
|
11951
|
+
file_content = szpfp.readall()
|
|
11952
|
+
except AttributeError:
|
|
11953
|
+
file_content = sevenzip_readall(infile)
|
|
11279
11954
|
#sztest = szpfp.testzip()
|
|
11280
11955
|
sztestalt = szpfp.test()
|
|
11281
11956
|
if(sztestalt):
|
|
@@ -11319,7 +11994,10 @@ if(py7zr_support):
|
|
|
11319
11994
|
printfname = member.filename
|
|
11320
11995
|
if(ftype == 0):
|
|
11321
11996
|
fsize = len(file_content[member.filename].read())
|
|
11322
|
-
|
|
11997
|
+
try:
|
|
11998
|
+
file_content[member.filename].close()
|
|
11999
|
+
except AttributeError:
|
|
12000
|
+
pass
|
|
11323
12001
|
try:
|
|
11324
12002
|
fuid = int(os.getuid())
|
|
11325
12003
|
except (KeyError, AttributeError):
|
|
@@ -11363,8 +12041,6 @@ if(py7zr_support):
|
|
|
11363
12041
|
|
|
11364
12042
|
|
|
11365
12043
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11366
|
-
if(verbose):
|
|
11367
|
-
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11368
12044
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11369
12045
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11370
12046
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -11391,44 +12067,6 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11391
12067
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
11392
12068
|
return listarrayfiles
|
|
11393
12069
|
|
|
11394
|
-
"""
|
|
11395
|
-
PyNeoFile compatibility layer
|
|
11396
|
-
"""
|
|
11397
|
-
|
|
11398
|
-
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11399
|
-
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
11400
|
-
|
|
11401
|
-
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11402
|
-
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
11403
|
-
|
|
11404
|
-
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11405
|
-
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
11406
|
-
|
|
11407
|
-
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11408
|
-
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
11409
|
-
|
|
11410
|
-
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
11411
|
-
return PackArchiveFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
11412
|
-
|
|
11413
|
-
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11414
|
-
return ArchiveFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
11415
|
-
|
|
11416
|
-
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11417
|
-
return UnPackArchiveFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
11418
|
-
|
|
11419
|
-
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11420
|
-
return RePackArchiveFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11421
|
-
|
|
11422
|
-
def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
11423
|
-
return ArchiveFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
|
|
11424
|
-
|
|
11425
|
-
def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
11426
|
-
return ArchiveFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
11427
|
-
|
|
11428
|
-
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11429
|
-
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
11430
|
-
return RePackArchiveFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11431
|
-
|
|
11432
12070
|
def detect_cwd(ftp, file_dir):
|
|
11433
12071
|
"""
|
|
11434
12072
|
Test whether cwd into file_dir works. Returns True if it does,
|
|
@@ -13494,7 +14132,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
|
|
|
13494
14132
|
if not ah or not ah.strip().lower().startswith("basic "):
|
|
13495
14133
|
return False
|
|
13496
14134
|
try:
|
|
13497
|
-
import base64
|
|
13498
14135
|
b64 = ah.strip().split(" ", 1)[1]
|
|
13499
14136
|
raw = base64.b64decode(_to_bytes(b64))
|
|
13500
14137
|
try: raw_txt = raw.decode("utf-8")
|