PyCatFile 0.21.4__py3-none-any.whl → 0.22.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pycatfile-0.21.4.data → pycatfile-0.22.4.data}/scripts/catfile.py +15 -15
- {pycatfile-0.21.4.dist-info → pycatfile-0.22.4.dist-info}/METADATA +1 -1
- pycatfile-0.22.4.dist-info/RECORD +10 -0
- pycatfile.py +906 -1238
- pycatfile-0.21.4.dist-info/RECORD +0 -10
- {pycatfile-0.21.4.data → pycatfile-0.22.4.data}/scripts/catneofile.py +0 -0
- {pycatfile-0.21.4.data → pycatfile-0.22.4.data}/scripts/neocatfile.py +0 -0
- {pycatfile-0.21.4.dist-info → pycatfile-0.22.4.dist-info}/WHEEL +0 -0
- {pycatfile-0.21.4.dist-info → pycatfile-0.22.4.dist-info}/licenses/LICENSE +0 -0
- {pycatfile-0.21.4.dist-info → pycatfile-0.22.4.dist-info}/top_level.txt +0 -0
- {pycatfile-0.21.4.dist-info → pycatfile-0.22.4.dist-info}/zip-safe +0 -0
pycatfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pycatfile.py - Last Update: 8/
|
|
17
|
+
$FileInfo: pycatfile.py - Last Update: 8/29/2025 Ver. 0.22.4 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -38,6 +38,7 @@ import zipfile
|
|
|
38
38
|
import binascii
|
|
39
39
|
import platform
|
|
40
40
|
from io import StringIO, BytesIO
|
|
41
|
+
import posixpath as pp # POSIX-safe joins/normpaths
|
|
41
42
|
try:
|
|
42
43
|
from backports import tempfile
|
|
43
44
|
except ImportError:
|
|
@@ -45,10 +46,10 @@ except ImportError:
|
|
|
45
46
|
# FTP Support
|
|
46
47
|
ftpssl = True
|
|
47
48
|
try:
|
|
48
|
-
from ftplib import FTP, FTP_TLS
|
|
49
|
+
from ftplib import FTP, FTP_TLS, all_errors
|
|
49
50
|
except ImportError:
|
|
50
51
|
ftpssl = False
|
|
51
|
-
from ftplib import FTP
|
|
52
|
+
from ftplib import FTP, all_errors
|
|
52
53
|
|
|
53
54
|
try:
|
|
54
55
|
import ujson as json
|
|
@@ -102,9 +103,13 @@ baseint = tuple(baseint)
|
|
|
102
103
|
|
|
103
104
|
# URL Parsing
|
|
104
105
|
try:
|
|
105
|
-
|
|
106
|
+
# Python 3
|
|
107
|
+
from urllib.parse import urlparse, urlunparse, unquote
|
|
108
|
+
from urllib.request import url2pathname
|
|
106
109
|
except ImportError:
|
|
110
|
+
# Python 2
|
|
107
111
|
from urlparse import urlparse, urlunparse
|
|
112
|
+
from urllib import unquote, url2pathname
|
|
108
113
|
|
|
109
114
|
# Windows-specific setup
|
|
110
115
|
if os.name == "nt":
|
|
@@ -266,6 +271,8 @@ def get_default_threads():
|
|
|
266
271
|
|
|
267
272
|
|
|
268
273
|
__use_pysftp__ = False
|
|
274
|
+
__upload_proto_support__ = "^(ftp|ftps|sftp|scp)://"
|
|
275
|
+
__download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp)://"
|
|
269
276
|
if(not havepysftp):
|
|
270
277
|
__use_pysftp__ = False
|
|
271
278
|
__use_http_lib__ = "httpx"
|
|
@@ -385,12 +392,12 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
|
|
|
385
392
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
386
393
|
__project__ = __program_name__
|
|
387
394
|
__project_url__ = "https://github.com/GameMaker2k/PyCatFile"
|
|
388
|
-
__version_info__ = (0,
|
|
389
|
-
__version_date_info__ = (2025, 9,
|
|
395
|
+
__version_info__ = (0, 22, 4, "RC 1", 1)
|
|
396
|
+
__version_date_info__ = (2025, 9, 29, "RC 1", 1)
|
|
390
397
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
391
398
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
392
399
|
__revision__ = __version_info__[3]
|
|
393
|
-
__revision_id__ = "$Id:
|
|
400
|
+
__revision_id__ = "$Id: 3f8512dda0c87f8cea428942ed9299d7b298439a $"
|
|
394
401
|
if(__version_info__[4] is not None):
|
|
395
402
|
__version_date_plusrc__ = __version_date__ + \
|
|
396
403
|
"-" + str(__version_date_info__[4])
|
|
@@ -620,6 +627,182 @@ def _normalize_initial_data(data, isbytes, encoding):
|
|
|
620
627
|
return str(data)
|
|
621
628
|
|
|
622
629
|
|
|
630
|
+
def _split_posix(path_text):
|
|
631
|
+
"""Split POSIX paths regardless of OS; return list of components."""
|
|
632
|
+
# Normalize leading './'
|
|
633
|
+
if path_text.startswith(u'./'):
|
|
634
|
+
path_text = path_text[2:]
|
|
635
|
+
# Strip redundant slashes
|
|
636
|
+
path_text = re.sub(u'/+', u'/', path_text)
|
|
637
|
+
# Drop trailing '/' so 'dir/' -> ['dir']
|
|
638
|
+
if path_text.endswith(u'/'):
|
|
639
|
+
path_text = path_text[:-1]
|
|
640
|
+
return path_text.split(u'/') if path_text else []
|
|
641
|
+
|
|
642
|
+
def _is_abs_like(s):
|
|
643
|
+
"""Absolute targets (POSIX or Windows-drive style)."""
|
|
644
|
+
return s.startswith(u'/') or s.startswith(u'\\') or re.match(u'^[A-Za-z]:[/\\\\]', s)
|
|
645
|
+
|
|
646
|
+
def _resolves_outside(base_rel, target_rel):
|
|
647
|
+
"""
|
|
648
|
+
Given a base directory (relative, POSIX) and a target (relative),
|
|
649
|
+
return True if base/target resolves outside of base.
|
|
650
|
+
We anchor under '/' so normpath is root-anchored and portable.
|
|
651
|
+
"""
|
|
652
|
+
base_clean = u'/'.join(_split_posix(base_rel))
|
|
653
|
+
target_clean = u'/'.join(_split_posix(target_rel))
|
|
654
|
+
base_abs = u'/' + base_clean if base_clean else u'/'
|
|
655
|
+
combined = pp.normpath(pp.join(base_abs, target_clean))
|
|
656
|
+
if combined == base_abs or combined.startswith(base_abs + u'/'):
|
|
657
|
+
return False
|
|
658
|
+
return True
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
def DetectTarBombCatFileArray(listarrayfiles,
|
|
662
|
+
top_file_ratio_threshold=0.6,
|
|
663
|
+
min_members_for_ratio=4,
|
|
664
|
+
symlink_policy="escape-only", # 'escape-only' | 'deny' | 'single-folder-only'
|
|
665
|
+
to_text=to_text):
|
|
666
|
+
"""
|
|
667
|
+
Detect 'tarbomb-like' archives from CatFileToArray/TarFileToArray dicts.
|
|
668
|
+
|
|
669
|
+
Parameters:
|
|
670
|
+
listarrayfiles: dict with key 'ffilelist' -> list of entries (requires 'fname')
|
|
671
|
+
top_file_ratio_threshold: float, fraction of root files considered tarbomb
|
|
672
|
+
min_members_for_ratio: int, minimum members before ratio heuristic applies
|
|
673
|
+
symlink_policy:
|
|
674
|
+
- 'escape-only': only symlinks that escape parent/are absolute are unsafe
|
|
675
|
+
- 'deny': any symlink is unsafe
|
|
676
|
+
- 'single-folder-only': symlinks allowed only if archive has a single top-level folder
|
|
677
|
+
to_text: normalization function (your provided to_text)
|
|
678
|
+
|
|
679
|
+
Returns dict with:
|
|
680
|
+
- is_tarbomb, reasons, total_members, top_level_entries, top_level_files_count,
|
|
681
|
+
has_absolute_paths, has_parent_traversal,
|
|
682
|
+
symlink_escapes_root (bool), symlink_issues (list[{entry,target,reason}])
|
|
683
|
+
"""
|
|
684
|
+
files = listarrayfiles or {}
|
|
685
|
+
members = files.get('ffilelist') or []
|
|
686
|
+
|
|
687
|
+
names = []
|
|
688
|
+
has_abs = False
|
|
689
|
+
has_parent = False
|
|
690
|
+
|
|
691
|
+
# Symlink tracking
|
|
692
|
+
has_any_symlink = False
|
|
693
|
+
symlink_issues = []
|
|
694
|
+
any_symlink_escape = False
|
|
695
|
+
|
|
696
|
+
for m in members:
|
|
697
|
+
m = m or {}
|
|
698
|
+
name = to_text(m.get('fname', u""))
|
|
699
|
+
|
|
700
|
+
if _is_abs_like(name):
|
|
701
|
+
has_abs = True
|
|
702
|
+
|
|
703
|
+
parts = _split_posix(name)
|
|
704
|
+
if u'..' in parts:
|
|
705
|
+
has_parent = True
|
|
706
|
+
|
|
707
|
+
if not parts:
|
|
708
|
+
continue
|
|
709
|
+
|
|
710
|
+
norm_name = u'/'.join(parts)
|
|
711
|
+
names.append(norm_name)
|
|
712
|
+
|
|
713
|
+
# ---- Symlink detection ----
|
|
714
|
+
ftype = m.get('ftype')
|
|
715
|
+
is_symlink = (ftype == 2) or (to_text(ftype).lower() == u'symlink' if ftype is not None else False)
|
|
716
|
+
if is_symlink:
|
|
717
|
+
has_any_symlink = True
|
|
718
|
+
target = to_text(m.get('flinkname', u""))
|
|
719
|
+
# Absolute symlink target is unsafe
|
|
720
|
+
if _is_abs_like(target):
|
|
721
|
+
any_symlink_escape = True
|
|
722
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'absolute symlink target'})
|
|
723
|
+
else:
|
|
724
|
+
parent = u'/'.join(parts[:-1]) # may be ''
|
|
725
|
+
if _resolves_outside(parent, target):
|
|
726
|
+
any_symlink_escape = True
|
|
727
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'symlink escapes parent directory'})
|
|
728
|
+
|
|
729
|
+
total = len(names)
|
|
730
|
+
reasons = []
|
|
731
|
+
if total == 0:
|
|
732
|
+
return {
|
|
733
|
+
"is_tarbomb": False,
|
|
734
|
+
"reasons": ["archive contains no members"],
|
|
735
|
+
"total_members": 0,
|
|
736
|
+
"top_level_entries": [],
|
|
737
|
+
"top_level_files_count": 0,
|
|
738
|
+
"has_absolute_paths": has_abs,
|
|
739
|
+
"has_parent_traversal": has_parent,
|
|
740
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
741
|
+
"symlink_issues": symlink_issues,
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
# Layout counts
|
|
745
|
+
top_counts = {}
|
|
746
|
+
top_level_files_count = 0
|
|
747
|
+
for name in names:
|
|
748
|
+
parts = name.split(u'/')
|
|
749
|
+
first = parts[0]
|
|
750
|
+
top_counts[first] = top_counts.get(first, 0) + 1
|
|
751
|
+
if len(parts) == 1: # directly at archive root
|
|
752
|
+
top_level_files_count += 1
|
|
753
|
+
|
|
754
|
+
top_keys = sorted(top_counts.keys())
|
|
755
|
+
is_tarbomb = False
|
|
756
|
+
|
|
757
|
+
# Path-based dangers
|
|
758
|
+
if has_abs:
|
|
759
|
+
is_tarbomb = True
|
|
760
|
+
reasons.append("contains absolute paths (dangerous)")
|
|
761
|
+
if has_parent:
|
|
762
|
+
is_tarbomb = True
|
|
763
|
+
reasons.append("contains parent-traversal ('..') entries (dangerous)")
|
|
764
|
+
if any_symlink_escape:
|
|
765
|
+
is_tarbomb = True
|
|
766
|
+
reasons.append("contains symlinks that escape their parent directory")
|
|
767
|
+
|
|
768
|
+
# Symlink policy enforcement
|
|
769
|
+
if symlink_policy == "deny" and has_any_symlink:
|
|
770
|
+
is_tarbomb = True
|
|
771
|
+
reasons.append("symlinks present and policy is 'deny'")
|
|
772
|
+
elif symlink_policy == "single-folder-only" and has_any_symlink and len(top_keys) != 1:
|
|
773
|
+
is_tarbomb = True
|
|
774
|
+
reasons.append("symlinks present but archive lacks a single top-level folder")
|
|
775
|
+
|
|
776
|
+
# Tarbomb layout heuristics
|
|
777
|
+
if len(top_keys) == 1:
|
|
778
|
+
reasons.append("single top-level entry '{0}'".format(top_keys[0]))
|
|
779
|
+
else:
|
|
780
|
+
ratio = float(top_level_files_count) / float(total)
|
|
781
|
+
if total >= min_members_for_ratio and ratio > float(top_file_ratio_threshold):
|
|
782
|
+
is_tarbomb = True
|
|
783
|
+
reasons.append("high fraction of members ({0:.0%}) at archive root".format(ratio))
|
|
784
|
+
else:
|
|
785
|
+
max_bucket = max(top_counts.values()) if top_counts else 0
|
|
786
|
+
if max_bucket < total * 0.9:
|
|
787
|
+
is_tarbomb = True
|
|
788
|
+
reasons.append("multiple top-level entries with no dominant folder: {0}".format(
|
|
789
|
+
u", ".join(top_keys[:10])))
|
|
790
|
+
else:
|
|
791
|
+
reasons.append("multiple top-level entries but one dominates")
|
|
792
|
+
|
|
793
|
+
return {
|
|
794
|
+
"is_tarbomb": bool(is_tarbomb),
|
|
795
|
+
"reasons": reasons,
|
|
796
|
+
"total_members": total,
|
|
797
|
+
"top_level_entries": top_keys,
|
|
798
|
+
"top_level_files_count": top_level_files_count,
|
|
799
|
+
"has_absolute_paths": has_abs,
|
|
800
|
+
"has_parent_traversal": has_parent,
|
|
801
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
802
|
+
"symlink_issues": symlink_issues,
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
|
|
623
806
|
def MkTempFile(data=None, inmem=__use_inmemfile__, isbytes=True, prefix=__project__,
|
|
624
807
|
delete=True, encoding="utf-8"):
|
|
625
808
|
"""
|
|
@@ -686,6 +869,13 @@ def RemoveWindowsPath(dpath):
|
|
|
686
869
|
"""
|
|
687
870
|
if not dpath:
|
|
688
871
|
return ""
|
|
872
|
+
if re.match("^file://", dpath, re.IGNORECASE):
|
|
873
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
874
|
+
if dpath.lower().startswith("file://") and not dpath.lower().startswith("file:///"):
|
|
875
|
+
# insert the extra slash
|
|
876
|
+
dpath = "file:///" + dpath[7:]
|
|
877
|
+
dparsed = urlparse(dpath)
|
|
878
|
+
dpath = url2pathname(dparsed.path)
|
|
689
879
|
# Accept bytes and decode safely
|
|
690
880
|
if isinstance(dpath, (bytes, bytearray)):
|
|
691
881
|
dpath = dpath.decode("utf-8", "ignore")
|
|
@@ -701,6 +891,13 @@ def NormalizeRelativePath(inpath):
|
|
|
701
891
|
"""
|
|
702
892
|
Ensures the path is relative unless it is absolute. Prepares consistent relative paths.
|
|
703
893
|
"""
|
|
894
|
+
if re.match("^file://", inpath, re.IGNORECASE):
|
|
895
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
896
|
+
if inpath.lower().startswith("file://") and not inpath.lower().startswith("file:///"):
|
|
897
|
+
# insert the extra slash
|
|
898
|
+
inpath = "file:///" + inpath[7:]
|
|
899
|
+
dparsed = urlparse(inpath)
|
|
900
|
+
inpath = url2pathname(dparsed.path)
|
|
704
901
|
inpath = RemoveWindowsPath(inpath)
|
|
705
902
|
if os.path.isabs(inpath):
|
|
706
903
|
outpath = inpath
|
|
@@ -757,6 +954,13 @@ def ListDir(dirpath, followlink=False, duplicates=False, include_regex=None, exc
|
|
|
757
954
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
758
955
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
759
956
|
for mydirfile in dirpath:
|
|
957
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
958
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
959
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
960
|
+
# insert the extra slash
|
|
961
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
962
|
+
dparsed = urlparse(mydirfile)
|
|
963
|
+
mydirfile = url2pathname(dparsed.path)
|
|
760
964
|
if not os.path.exists(mydirfile):
|
|
761
965
|
return False
|
|
762
966
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -827,6 +1031,13 @@ def ListDirAdvanced(dirpath, followlink=False, duplicates=False, include_regex=N
|
|
|
827
1031
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
828
1032
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
829
1033
|
for mydirfile in dirpath:
|
|
1034
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
1035
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
1036
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
1037
|
+
# insert the extra slash
|
|
1038
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
1039
|
+
dparsed = urlparse(mydirfile)
|
|
1040
|
+
mydirfile = url2pathname(dparsed.path)
|
|
830
1041
|
if not os.path.exists(mydirfile):
|
|
831
1042
|
return False
|
|
832
1043
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -2036,7 +2247,7 @@ def ReadFileHeaderDataWoSize(fp, delimiter=__file_format_dict__['format_delimite
|
|
|
2036
2247
|
if(headersize <= 0 or headernumfields <= 0):
|
|
2037
2248
|
return []
|
|
2038
2249
|
headerdata = ReadTillNullByteByNum(fp, delimiter, headernumfields)
|
|
2039
|
-
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
2250
|
+
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
2040
2251
|
HeaderOut = preheaderdata + headerdata
|
|
2041
2252
|
return HeaderOut
|
|
2042
2253
|
|
|
@@ -2523,22 +2734,20 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2523
2734
|
return outlist
|
|
2524
2735
|
|
|
2525
2736
|
|
|
2526
|
-
def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2737
|
+
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2527
2738
|
if(not hasattr(fp, "read")):
|
|
2528
2739
|
return False
|
|
2529
2740
|
delimiter = formatspecs['format_delimiter']
|
|
2530
|
-
curloc =
|
|
2741
|
+
curloc = filestart
|
|
2531
2742
|
try:
|
|
2532
|
-
fp.seek(0, 2)
|
|
2743
|
+
fp.seek(0, 2)
|
|
2533
2744
|
except OSError:
|
|
2534
|
-
SeekToEndOfFile(fp)
|
|
2745
|
+
SeekToEndOfFile(fp)
|
|
2535
2746
|
except ValueError:
|
|
2536
|
-
SeekToEndOfFile(fp)
|
|
2537
|
-
CatSize = fp.tell()
|
|
2538
|
-
CatSizeEnd = CatSize
|
|
2747
|
+
SeekToEndOfFile(fp)
|
|
2748
|
+
CatSize = fp.tell()
|
|
2749
|
+
CatSizeEnd = CatSize
|
|
2539
2750
|
fp.seek(curloc, 0)
|
|
2540
|
-
if(curloc > 0):
|
|
2541
|
-
fp.seek(0, 0)
|
|
2542
2751
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2543
2752
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2544
2753
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2553,8 +2762,6 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2553
2762
|
else:
|
|
2554
2763
|
inheader = ReadFileHeaderDataWoSize(
|
|
2555
2764
|
fp, formatspecs['format_delimiter'])
|
|
2556
|
-
if(curloc > 0):
|
|
2557
|
-
fp.seek(curloc, 0)
|
|
2558
2765
|
fprechecksumtype = inheader[-2]
|
|
2559
2766
|
fprechecksum = inheader[-1]
|
|
2560
2767
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -2578,22 +2785,20 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2578
2785
|
return flist
|
|
2579
2786
|
|
|
2580
2787
|
|
|
2581
|
-
def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2788
|
+
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2582
2789
|
if(not hasattr(fp, "read")):
|
|
2583
2790
|
return False
|
|
2584
2791
|
delimiter = formatspecs['format_delimiter']
|
|
2585
|
-
curloc =
|
|
2792
|
+
curloc = filestart
|
|
2586
2793
|
try:
|
|
2587
|
-
fp.seek(0, 2)
|
|
2794
|
+
fp.seek(0, 2)
|
|
2588
2795
|
except OSError:
|
|
2589
|
-
SeekToEndOfFile(fp)
|
|
2796
|
+
SeekToEndOfFile(fp)
|
|
2590
2797
|
except ValueError:
|
|
2591
|
-
SeekToEndOfFile(fp)
|
|
2592
|
-
CatSize = fp.tell()
|
|
2593
|
-
CatSizeEnd = CatSize
|
|
2798
|
+
SeekToEndOfFile(fp)
|
|
2799
|
+
CatSize = fp.tell()
|
|
2800
|
+
CatSizeEnd = CatSize
|
|
2594
2801
|
fp.seek(curloc, 0)
|
|
2595
|
-
if(curloc > 0):
|
|
2596
|
-
fp.seek(0, 0)
|
|
2597
2802
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2598
2803
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2599
2804
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2625,8 +2830,6 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2625
2830
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2626
2831
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2627
2832
|
pass
|
|
2628
|
-
if(curloc > 0):
|
|
2629
|
-
fp.seek(curloc, 0)
|
|
2630
2833
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2631
2834
|
fheadsize = int(inheader[0], 16)
|
|
2632
2835
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2645,7 +2848,7 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2645
2848
|
return False
|
|
2646
2849
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
2647
2850
|
fcompresstype = ""
|
|
2648
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
2851
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
2649
2852
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
2650
2853
|
seekstart = 0
|
|
2651
2854
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -2738,22 +2941,20 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2738
2941
|
return outlist
|
|
2739
2942
|
|
|
2740
2943
|
|
|
2741
|
-
def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2944
|
+
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2742
2945
|
if(not hasattr(fp, "read")):
|
|
2743
2946
|
return False
|
|
2744
2947
|
delimiter = formatspecs['format_delimiter']
|
|
2745
|
-
curloc =
|
|
2948
|
+
curloc = filestart
|
|
2746
2949
|
try:
|
|
2747
|
-
fp.seek(0, 2)
|
|
2950
|
+
fp.seek(0, 2)
|
|
2748
2951
|
except OSError:
|
|
2749
|
-
SeekToEndOfFile(fp)
|
|
2952
|
+
SeekToEndOfFile(fp)
|
|
2750
2953
|
except ValueError:
|
|
2751
|
-
SeekToEndOfFile(fp)
|
|
2752
|
-
CatSize = fp.tell()
|
|
2753
|
-
CatSizeEnd = CatSize
|
|
2954
|
+
SeekToEndOfFile(fp)
|
|
2955
|
+
CatSize = fp.tell()
|
|
2956
|
+
CatSizeEnd = CatSize
|
|
2754
2957
|
fp.seek(curloc, 0)
|
|
2755
|
-
if(curloc > 0):
|
|
2756
|
-
fp.seek(0, 0)
|
|
2757
2958
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2758
2959
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2759
2960
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2785,8 +2986,6 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2785
2986
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2786
2987
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2787
2988
|
pass
|
|
2788
|
-
if(curloc > 0):
|
|
2789
|
-
fp.seek(curloc, 0)
|
|
2790
2989
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2791
2990
|
fheadsize = int(inheader[0], 16)
|
|
2792
2991
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2904,25 +3103,25 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2904
3103
|
return outlist
|
|
2905
3104
|
|
|
2906
3105
|
|
|
2907
|
-
def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3106
|
+
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2908
3107
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
2909
3108
|
formatspecs = formatspecs[fmttype]
|
|
2910
3109
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
2911
3110
|
fmttype = "auto"
|
|
2912
3111
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
2913
3112
|
fp = infile
|
|
2914
|
-
fp.seek(
|
|
2915
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3113
|
+
fp.seek(filestart, 0)
|
|
3114
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2916
3115
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2917
3116
|
formatspecs = formatspecs[compresscheck]
|
|
2918
3117
|
else:
|
|
2919
|
-
fp.seek(
|
|
2920
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3118
|
+
fp.seek(filestart, 0)
|
|
3119
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2921
3120
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2922
3121
|
formatspecs = formatspecs[checkcompressfile]
|
|
2923
|
-
fp.seek(
|
|
2924
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2925
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3122
|
+
fp.seek(filestart, 0)
|
|
3123
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3124
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
2926
3125
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
2927
3126
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
2928
3127
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -2957,58 +3156,58 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2957
3156
|
compresscheck = "zlib"
|
|
2958
3157
|
else:
|
|
2959
3158
|
return False
|
|
2960
|
-
fp.seek(
|
|
3159
|
+
fp.seek(filestart, 0)
|
|
2961
3160
|
elif(infile == "-"):
|
|
2962
3161
|
fp = MkTempFile()
|
|
2963
3162
|
if(hasattr(sys.stdin, "buffer")):
|
|
2964
3163
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
2965
3164
|
else:
|
|
2966
3165
|
shutil.copyfileobj(sys.stdin, fp)
|
|
2967
|
-
fp.seek(
|
|
2968
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2969
|
-
fp.seek(
|
|
2970
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3166
|
+
fp.seek(filestart, 0)
|
|
3167
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3168
|
+
fp.seek(filestart, 0)
|
|
3169
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2971
3170
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2972
3171
|
formatspecs = formatspecs[compresscheck]
|
|
2973
3172
|
else:
|
|
2974
|
-
fp.seek(
|
|
2975
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3173
|
+
fp.seek(filestart, 0)
|
|
3174
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2976
3175
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2977
3176
|
formatspecs = formatspecs[checkcompressfile]
|
|
2978
|
-
fp.seek(
|
|
3177
|
+
fp.seek(filestart, 0)
|
|
2979
3178
|
if(not fp):
|
|
2980
3179
|
return False
|
|
2981
|
-
fp.seek(
|
|
3180
|
+
fp.seek(filestart, 0)
|
|
2982
3181
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
2983
3182
|
fp = MkTempFile()
|
|
2984
3183
|
fp.write(infile)
|
|
2985
|
-
fp.seek(
|
|
2986
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2987
|
-
fp.seek(
|
|
2988
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3184
|
+
fp.seek(filestart, 0)
|
|
3185
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3186
|
+
fp.seek(filestart, 0)
|
|
3187
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2989
3188
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2990
3189
|
formatspecs = formatspecs[compresscheck]
|
|
2991
3190
|
else:
|
|
2992
|
-
fp.seek(
|
|
2993
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3191
|
+
fp.seek(filestart, 0)
|
|
3192
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2994
3193
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2995
3194
|
formatspecs = formatspecs[checkcompressfile]
|
|
2996
|
-
fp.seek(
|
|
3195
|
+
fp.seek(filestart, 0)
|
|
2997
3196
|
if(not fp):
|
|
2998
3197
|
return False
|
|
2999
|
-
fp.seek(
|
|
3000
|
-
elif(re.findall(
|
|
3198
|
+
fp.seek(filestart, 0)
|
|
3199
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
3001
3200
|
fp = download_file_from_internet_file(infile)
|
|
3002
|
-
fp.seek(
|
|
3003
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3201
|
+
fp.seek(filestart, 0)
|
|
3202
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3004
3203
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3005
3204
|
formatspecs = formatspecs[compresscheck]
|
|
3006
3205
|
else:
|
|
3007
|
-
fp.seek(
|
|
3008
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3206
|
+
fp.seek(filestart, 0)
|
|
3207
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3009
3208
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3010
3209
|
formatspecs = formatspecs[checkcompressfile]
|
|
3011
|
-
fp.seek(
|
|
3210
|
+
fp.seek(filestart, 0)
|
|
3012
3211
|
if(not compresscheck):
|
|
3013
3212
|
fextname = os.path.splitext(infile)[1]
|
|
3014
3213
|
if(fextname == ".gz"):
|
|
@@ -3029,14 +3228,14 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3029
3228
|
compresscheck = "zlib"
|
|
3030
3229
|
else:
|
|
3031
3230
|
return False
|
|
3032
|
-
fp.seek(
|
|
3033
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3231
|
+
fp.seek(filestart, 0)
|
|
3232
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3034
3233
|
if(not fp):
|
|
3035
3234
|
return False
|
|
3036
|
-
fp.seek(
|
|
3235
|
+
fp.seek(filestart, 0)
|
|
3037
3236
|
else:
|
|
3038
3237
|
infile = RemoveWindowsPath(infile)
|
|
3039
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3238
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
3040
3239
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3041
3240
|
formatspecs = formatspecs[checkcompressfile]
|
|
3042
3241
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -3051,7 +3250,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3051
3250
|
return False
|
|
3052
3251
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
3053
3252
|
return False
|
|
3054
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3253
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
3055
3254
|
if(not compresscheck):
|
|
3056
3255
|
fextname = os.path.splitext(infile)[1]
|
|
3057
3256
|
if(fextname == ".gz"):
|
|
@@ -3074,43 +3273,43 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3074
3273
|
return False
|
|
3075
3274
|
if(not compresscheck):
|
|
3076
3275
|
return False
|
|
3077
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
3078
|
-
return ReadFileDataWithContentToArray(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3276
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3277
|
+
return ReadFileDataWithContentToArray(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3079
3278
|
|
|
3080
3279
|
|
|
3081
|
-
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3280
|
+
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3082
3281
|
if(isinstance(infile, (list, tuple, ))):
|
|
3083
3282
|
pass
|
|
3084
3283
|
else:
|
|
3085
3284
|
infile = [infile]
|
|
3086
3285
|
outretval = {}
|
|
3087
3286
|
for curfname in infile:
|
|
3088
|
-
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3287
|
+
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3089
3288
|
return outretval
|
|
3090
3289
|
|
|
3091
|
-
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3092
|
-
return ReadInMultipleFileWithContentToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3290
|
+
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3291
|
+
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3093
3292
|
|
|
3094
3293
|
|
|
3095
|
-
def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3294
|
+
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3096
3295
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
3097
3296
|
formatspecs = formatspecs[fmttype]
|
|
3098
3297
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
3099
3298
|
fmttype = "auto"
|
|
3100
3299
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
3101
3300
|
fp = infile
|
|
3102
|
-
fp.seek(
|
|
3103
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3301
|
+
fp.seek(filestart, 0)
|
|
3302
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3104
3303
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3105
3304
|
formatspecs = formatspecs[compresscheck]
|
|
3106
3305
|
else:
|
|
3107
|
-
fp.seek(
|
|
3108
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3306
|
+
fp.seek(filestart, 0)
|
|
3307
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3109
3308
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3110
3309
|
formatspecs = formatspecs[checkcompressfile]
|
|
3111
|
-
fp.seek(
|
|
3112
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3113
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3310
|
+
fp.seek(filestart, 0)
|
|
3311
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3312
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
3114
3313
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
3115
3314
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
3116
3315
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -3145,58 +3344,58 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3145
3344
|
compresscheck = "zlib"
|
|
3146
3345
|
else:
|
|
3147
3346
|
return False
|
|
3148
|
-
fp.seek(
|
|
3347
|
+
fp.seek(filestart, 0)
|
|
3149
3348
|
elif(infile == "-"):
|
|
3150
3349
|
fp = MkTempFile()
|
|
3151
3350
|
if(hasattr(sys.stdin, "buffer")):
|
|
3152
3351
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
3153
3352
|
else:
|
|
3154
3353
|
shutil.copyfileobj(sys.stdin, fp)
|
|
3155
|
-
fp.seek(
|
|
3156
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3157
|
-
fp.seek(
|
|
3158
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3354
|
+
fp.seek(filestart, 0)
|
|
3355
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3356
|
+
fp.seek(filestart, 0)
|
|
3357
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3159
3358
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3160
3359
|
formatspecs = formatspecs[compresscheck]
|
|
3161
3360
|
else:
|
|
3162
|
-
fp.seek(
|
|
3163
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3361
|
+
fp.seek(filestart, 0)
|
|
3362
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3164
3363
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3165
3364
|
formatspecs = formatspecs[checkcompressfile]
|
|
3166
|
-
fp.seek(
|
|
3365
|
+
fp.seek(filestart, 0)
|
|
3167
3366
|
if(not fp):
|
|
3168
3367
|
return False
|
|
3169
|
-
fp.seek(
|
|
3368
|
+
fp.seek(filestart, 0)
|
|
3170
3369
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
3171
3370
|
fp = MkTempFile()
|
|
3172
3371
|
fp.write(infile)
|
|
3173
|
-
fp.seek(
|
|
3174
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3175
|
-
fp.seek(
|
|
3176
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3372
|
+
fp.seek(filestart, 0)
|
|
3373
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3374
|
+
fp.seek(filestart, 0)
|
|
3375
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3177
3376
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3178
3377
|
formatspecs = formatspecs[compresscheck]
|
|
3179
3378
|
else:
|
|
3180
|
-
fp.seek(
|
|
3181
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3379
|
+
fp.seek(filestart, 0)
|
|
3380
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3182
3381
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3183
3382
|
formatspecs = formatspecs[checkcompressfile]
|
|
3184
|
-
fp.seek(
|
|
3383
|
+
fp.seek(filestart, 0)
|
|
3185
3384
|
if(not fp):
|
|
3186
3385
|
return False
|
|
3187
|
-
fp.seek(
|
|
3188
|
-
elif(re.findall(
|
|
3386
|
+
fp.seek(filestart, 0)
|
|
3387
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
3189
3388
|
fp = download_file_from_internet_file(infile)
|
|
3190
|
-
fp.seek(
|
|
3191
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3389
|
+
fp.seek(filestart, 0)
|
|
3390
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3192
3391
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3193
3392
|
formatspecs = formatspecs[compresscheck]
|
|
3194
3393
|
else:
|
|
3195
|
-
fp.seek(
|
|
3196
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3394
|
+
fp.seek(filestart, 0)
|
|
3395
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3197
3396
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3198
3397
|
formatspecs = formatspecs[checkcompressfile]
|
|
3199
|
-
fp.seek(
|
|
3398
|
+
fp.seek(filestart, 0)
|
|
3200
3399
|
if(not compresscheck):
|
|
3201
3400
|
fextname = os.path.splitext(infile)[1]
|
|
3202
3401
|
if(fextname == ".gz"):
|
|
@@ -3217,14 +3416,14 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3217
3416
|
compresscheck = "zlib"
|
|
3218
3417
|
else:
|
|
3219
3418
|
return False
|
|
3220
|
-
fp.seek(
|
|
3221
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3419
|
+
fp.seek(filestart, 0)
|
|
3420
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3222
3421
|
if(not fp):
|
|
3223
3422
|
return False
|
|
3224
|
-
fp.seek(
|
|
3423
|
+
fp.seek(filestart, 0)
|
|
3225
3424
|
else:
|
|
3226
3425
|
infile = RemoveWindowsPath(infile)
|
|
3227
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3426
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
3228
3427
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3229
3428
|
formatspecs = formatspecs[checkcompressfile]
|
|
3230
3429
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -3239,7 +3438,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3239
3438
|
return False
|
|
3240
3439
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
3241
3440
|
return False
|
|
3242
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3441
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
3243
3442
|
if(not compresscheck):
|
|
3244
3443
|
fextname = os.path.splitext(infile)[1]
|
|
3245
3444
|
if(fextname == ".gz"):
|
|
@@ -3262,22 +3461,22 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3262
3461
|
return False
|
|
3263
3462
|
if(not compresscheck):
|
|
3264
3463
|
return False
|
|
3265
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
3266
|
-
return ReadFileDataWithContentToList(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3464
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3465
|
+
return ReadFileDataWithContentToList(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3267
3466
|
|
|
3268
3467
|
|
|
3269
|
-
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3468
|
+
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3270
3469
|
if(isinstance(infile, (list, tuple, ))):
|
|
3271
3470
|
pass
|
|
3272
3471
|
else:
|
|
3273
3472
|
infile = [infile]
|
|
3274
3473
|
outretval = {}
|
|
3275
3474
|
for curfname in infile:
|
|
3276
|
-
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3475
|
+
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3277
3476
|
return outretval
|
|
3278
3477
|
|
|
3279
|
-
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3280
|
-
return ReadInMultipleFileWithContentToList(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3478
|
+
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3479
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3281
3480
|
|
|
3282
3481
|
|
|
3283
3482
|
def AppendNullByte(indata, delimiter=__file_format_dict__['format_delimiter']):
|
|
@@ -3406,7 +3605,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3406
3605
|
fp = MkTempFile()
|
|
3407
3606
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3408
3607
|
fp = outfile
|
|
3409
|
-
elif(re.findall(
|
|
3608
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3410
3609
|
fp = MkTempFile()
|
|
3411
3610
|
else:
|
|
3412
3611
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -3442,7 +3641,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3442
3641
|
outvar = fp.read()
|
|
3443
3642
|
fp.close()
|
|
3444
3643
|
return outvar
|
|
3445
|
-
elif(re.findall(
|
|
3644
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3446
3645
|
fp = CompressOpenFileAlt(
|
|
3447
3646
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3448
3647
|
fp.seek(0, 0)
|
|
@@ -3746,9 +3945,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3746
3945
|
if not followlink and ftype in data_types:
|
|
3747
3946
|
with open(fname, "rb") as fpc:
|
|
3748
3947
|
shutil.copyfileobj(fpc, fcontents)
|
|
3749
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3948
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3750
3949
|
fcontents.seek(0, 0)
|
|
3751
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3950
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3752
3951
|
if(typechecktest is False and not compresswholefile):
|
|
3753
3952
|
fcontents.seek(0, 2)
|
|
3754
3953
|
ucfsize = fcontents.tell()
|
|
@@ -3793,9 +3992,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3793
3992
|
flstatinfo = os.stat(flinkname)
|
|
3794
3993
|
with open(flinkname, "rb") as fpc:
|
|
3795
3994
|
shutil.copyfileobj(fpc, fcontents)
|
|
3796
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3995
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3797
3996
|
fcontents.seek(0, 0)
|
|
3798
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3997
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3799
3998
|
if(typechecktest is False and not compresswholefile):
|
|
3800
3999
|
fcontents.seek(0, 2)
|
|
3801
4000
|
ucfsize = fcontents.tell()
|
|
@@ -3905,7 +4104,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
|
|
|
3905
4104
|
fheaderchecksumtype = curfname[26]
|
|
3906
4105
|
fcontentchecksumtype = curfname[27]
|
|
3907
4106
|
fcontents = curfname[28]
|
|
3908
|
-
fencoding = GetFileEncoding(fcontents, False)
|
|
4107
|
+
fencoding = GetFileEncoding(fcontents, 0, False)
|
|
3909
4108
|
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
3910
4109
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
3911
4110
|
fcontents.seek(0, 0)
|
|
@@ -3955,7 +4154,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3955
4154
|
fp = MkTempFile()
|
|
3956
4155
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3957
4156
|
fp = outfile
|
|
3958
|
-
elif(re.findall(
|
|
4157
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3959
4158
|
fp = MkTempFile()
|
|
3960
4159
|
else:
|
|
3961
4160
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -3992,7 +4191,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3992
4191
|
outvar = fp.read()
|
|
3993
4192
|
fp.close()
|
|
3994
4193
|
return outvar
|
|
3995
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4194
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
3996
4195
|
fp = CompressOpenFileAlt(
|
|
3997
4196
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3998
4197
|
fp.seek(0, 0)
|
|
@@ -4035,7 +4234,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
4035
4234
|
fp = MkTempFile()
|
|
4036
4235
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
4037
4236
|
fp = outfile
|
|
4038
|
-
elif(re.findall(
|
|
4237
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
4039
4238
|
fp = MkTempFile()
|
|
4040
4239
|
else:
|
|
4041
4240
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -4072,7 +4271,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
4072
4271
|
outvar = fp.read()
|
|
4073
4272
|
fp.close()
|
|
4074
4273
|
return outvar
|
|
4075
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4274
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
4076
4275
|
fp = CompressOpenFileAlt(
|
|
4077
4276
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
4078
4277
|
fp.seek(0, 0)
|
|
@@ -4156,7 +4355,8 @@ def GzipCompressData(data, compresslevel=9):
|
|
|
4156
4355
|
out = MkTempFile()
|
|
4157
4356
|
with gzip.GzipFile(fileobj=out, mode="wb", compresslevel=compresslevel) as f:
|
|
4158
4357
|
f.write(data)
|
|
4159
|
-
|
|
4358
|
+
out.seek(0, 0)
|
|
4359
|
+
compressed_data = out.read()
|
|
4160
4360
|
return compressed_data
|
|
4161
4361
|
|
|
4162
4362
|
|
|
@@ -4254,7 +4454,7 @@ def IsSingleDict(variable):
|
|
|
4254
4454
|
return True
|
|
4255
4455
|
|
|
4256
4456
|
|
|
4257
|
-
def GetFileEncoding(infile, closefp=True):
|
|
4457
|
+
def GetFileEncoding(infile, filestart=0, closefp=True):
|
|
4258
4458
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4259
4459
|
fp = infile
|
|
4260
4460
|
else:
|
|
@@ -4263,19 +4463,19 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4263
4463
|
except FileNotFoundError:
|
|
4264
4464
|
return False
|
|
4265
4465
|
file_encoding = "UTF-8"
|
|
4266
|
-
fp.seek(
|
|
4466
|
+
fp.seek(filestart, 0)
|
|
4267
4467
|
prefp = fp.read(2)
|
|
4268
4468
|
if(prefp == binascii.unhexlify("fffe")):
|
|
4269
4469
|
file_encoding = "UTF-16LE"
|
|
4270
4470
|
elif(prefp == binascii.unhexlify("feff")):
|
|
4271
4471
|
file_encoding = "UTF-16BE"
|
|
4272
|
-
fp.seek(
|
|
4472
|
+
fp.seek(filestart, 0)
|
|
4273
4473
|
prefp = fp.read(3)
|
|
4274
4474
|
if(prefp == binascii.unhexlify("efbbbf")):
|
|
4275
4475
|
file_encoding = "UTF-8"
|
|
4276
4476
|
elif(prefp == binascii.unhexlify("0efeff")):
|
|
4277
4477
|
file_encoding = "SCSU"
|
|
4278
|
-
fp.seek(
|
|
4478
|
+
fp.seek(filestart, 0)
|
|
4279
4479
|
prefp = fp.read(4)
|
|
4280
4480
|
if(prefp == binascii.unhexlify("fffe0000")):
|
|
4281
4481
|
file_encoding = "UTF-32LE"
|
|
@@ -4291,21 +4491,21 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4291
4491
|
file_encoding = "UTF-7"
|
|
4292
4492
|
elif(prefp == binascii.unhexlify("2b2f762f")):
|
|
4293
4493
|
file_encoding = "UTF-7"
|
|
4294
|
-
fp.seek(
|
|
4494
|
+
fp.seek(filestart, 0)
|
|
4295
4495
|
if(closefp):
|
|
4296
4496
|
fp.close()
|
|
4297
4497
|
return file_encoding
|
|
4298
4498
|
|
|
4299
4499
|
|
|
4300
|
-
def GetFileEncodingFromString(instring, closefp=True):
|
|
4500
|
+
def GetFileEncodingFromString(instring, filestart=0, closefp=True):
|
|
4301
4501
|
try:
|
|
4302
4502
|
instringsfile = MkTempFile(instring)
|
|
4303
4503
|
except TypeError:
|
|
4304
4504
|
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4305
|
-
return GetFileEncoding(instringsfile, closefp)
|
|
4505
|
+
return GetFileEncoding(instringsfile, filestart, closefp)
|
|
4306
4506
|
|
|
4307
4507
|
|
|
4308
|
-
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4508
|
+
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4309
4509
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4310
4510
|
fp = infile
|
|
4311
4511
|
else:
|
|
@@ -4314,7 +4514,8 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4314
4514
|
except FileNotFoundError:
|
|
4315
4515
|
return False
|
|
4316
4516
|
filetype = False
|
|
4317
|
-
|
|
4517
|
+
curloc = filestart
|
|
4518
|
+
fp.seek(filestart, 0)
|
|
4318
4519
|
prefp = fp.read(2)
|
|
4319
4520
|
if(prefp == binascii.unhexlify("1f8b")):
|
|
4320
4521
|
filetype = "gzip"
|
|
@@ -4330,13 +4531,13 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4330
4531
|
filetype = "zlib"
|
|
4331
4532
|
elif(prefp == binascii.unhexlify("1f9d")):
|
|
4332
4533
|
filetype = "zcompress"
|
|
4333
|
-
fp.seek(
|
|
4534
|
+
fp.seek(curloc, 0)
|
|
4334
4535
|
prefp = fp.read(3)
|
|
4335
4536
|
if(prefp == binascii.unhexlify("425a68")):
|
|
4336
4537
|
filetype = "bzip2"
|
|
4337
4538
|
elif(prefp == binascii.unhexlify("5d0000")):
|
|
4338
4539
|
filetype = "lzma"
|
|
4339
|
-
fp.seek(
|
|
4540
|
+
fp.seek(curloc, 0)
|
|
4340
4541
|
prefp = fp.read(4)
|
|
4341
4542
|
if(prefp == binascii.unhexlify("28b52ffd")):
|
|
4342
4543
|
filetype = "zstd"
|
|
@@ -4348,29 +4549,29 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4348
4549
|
filetype = "zipfile"
|
|
4349
4550
|
elif(prefp == binascii.unhexlify("504b0708")):
|
|
4350
4551
|
filetype = "zipfile"
|
|
4351
|
-
fp.seek(
|
|
4552
|
+
fp.seek(curloc, 0)
|
|
4352
4553
|
prefp = fp.read(5)
|
|
4353
4554
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4354
4555
|
filetype = "tarfile"
|
|
4355
4556
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4356
4557
|
filetype = "tarfile"
|
|
4357
|
-
fp.seek(
|
|
4558
|
+
fp.seek(curloc, 0)
|
|
4358
4559
|
prefp = fp.read(6)
|
|
4359
4560
|
if(prefp == binascii.unhexlify("fd377a585a00")):
|
|
4360
4561
|
filetype = "xz"
|
|
4361
4562
|
elif(prefp == binascii.unhexlify("377abcaf271c")):
|
|
4362
4563
|
filetype = "7zipfile"
|
|
4363
|
-
fp.seek(
|
|
4564
|
+
fp.seek(curloc, 0)
|
|
4364
4565
|
prefp = fp.read(7)
|
|
4365
4566
|
if(prefp == binascii.unhexlify("526172211a0700")):
|
|
4366
4567
|
filetype = "rarfile"
|
|
4367
4568
|
elif(prefp == binascii.unhexlify("2a2a4143452a2a")):
|
|
4368
4569
|
filetype = "ace"
|
|
4369
|
-
fp.seek(
|
|
4570
|
+
fp.seek(curloc, 0)
|
|
4370
4571
|
prefp = fp.read(7)
|
|
4371
4572
|
if(prefp == binascii.unhexlify("894c5a4f0d0a1a")):
|
|
4372
4573
|
filetype = "lzo"
|
|
4373
|
-
fp.seek(
|
|
4574
|
+
fp.seek(curloc, 0)
|
|
4374
4575
|
prefp = fp.read(8)
|
|
4375
4576
|
if(prefp == binascii.unhexlify("7573746172003030")):
|
|
4376
4577
|
filetype = "tarfile"
|
|
@@ -4378,7 +4579,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4378
4579
|
filetype = "tarfile"
|
|
4379
4580
|
if(prefp == binascii.unhexlify("526172211a070100")):
|
|
4380
4581
|
filetype = "rarfile"
|
|
4381
|
-
fp.seek(
|
|
4582
|
+
fp.seek(curloc, 0)
|
|
4382
4583
|
if(IsNestedDict(formatspecs)):
|
|
4383
4584
|
for key, value in formatspecs.items():
|
|
4384
4585
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4394,7 +4595,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4394
4595
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4395
4596
|
filetype = formatspecs[key]['format_magic']
|
|
4396
4597
|
continue
|
|
4397
|
-
fp.seek(
|
|
4598
|
+
fp.seek(curloc, 0)
|
|
4398
4599
|
elif(IsSingleDict(formatspecs)):
|
|
4399
4600
|
prefp = fp.read(formatspecs['format_len'])
|
|
4400
4601
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4409,15 +4610,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4409
4610
|
filetype = formatspecs['format_magic']
|
|
4410
4611
|
else:
|
|
4411
4612
|
pass
|
|
4412
|
-
fp.seek(
|
|
4613
|
+
fp.seek(curloc, 0)
|
|
4413
4614
|
prefp = fp.read(9)
|
|
4414
4615
|
if(prefp == binascii.unhexlify("894c5a4f000d0a1a0a")):
|
|
4415
4616
|
filetype = "lzo"
|
|
4416
|
-
fp.seek(
|
|
4617
|
+
fp.seek(curloc, 0)
|
|
4417
4618
|
prefp = fp.read(10)
|
|
4418
4619
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4419
4620
|
filetype = "tarfile"
|
|
4420
|
-
fp.seek(
|
|
4621
|
+
fp.seek(curloc, 0)
|
|
4421
4622
|
if(filetype == "gzip" or filetype == "bzip2" or filetype == "lzma" or filetype == "zstd" or filetype == "lz4" or filetype == "zlib"):
|
|
4422
4623
|
if(TarFileCheck(fp)):
|
|
4423
4624
|
filetype = "tarfile"
|
|
@@ -4432,14 +4633,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4432
4633
|
return "7zipfile"
|
|
4433
4634
|
else:
|
|
4434
4635
|
filetype = False
|
|
4435
|
-
fp.seek(
|
|
4636
|
+
fp.seek(curloc, 0)
|
|
4436
4637
|
if(closefp):
|
|
4437
4638
|
fp.close()
|
|
4438
4639
|
return filetype
|
|
4439
4640
|
|
|
4440
4641
|
|
|
4441
|
-
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4442
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4642
|
+
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4643
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4644
|
+
curloc = filestart
|
|
4443
4645
|
if(not compresscheck):
|
|
4444
4646
|
fextname = os.path.splitext(infile)[1]
|
|
4445
4647
|
if(fextname == ".gz"):
|
|
@@ -4488,7 +4690,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4488
4690
|
elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
4489
4691
|
return "7zipfile"
|
|
4490
4692
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4491
|
-
fp = UncompressFileAlt(infile, formatspecs)
|
|
4693
|
+
fp = UncompressFileAlt(infile, formatspecs, filestart)
|
|
4492
4694
|
else:
|
|
4493
4695
|
try:
|
|
4494
4696
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4518,10 +4720,11 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4518
4720
|
except FileNotFoundError:
|
|
4519
4721
|
return False
|
|
4520
4722
|
filetype = False
|
|
4723
|
+
fp.seek(filestart, 0)
|
|
4521
4724
|
prefp = fp.read(5)
|
|
4522
4725
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4523
4726
|
filetype = "tarfile"
|
|
4524
|
-
fp.seek(
|
|
4727
|
+
fp.seek(curloc, 0)
|
|
4525
4728
|
if(IsNestedDict(formatspecs)):
|
|
4526
4729
|
for key, value in formatspecs.items():
|
|
4527
4730
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4537,7 +4740,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4537
4740
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4538
4741
|
filetype = formatspecs[key]['format_magic']
|
|
4539
4742
|
continue
|
|
4540
|
-
fp.seek(
|
|
4743
|
+
fp.seek(curloc, 0)
|
|
4541
4744
|
elif(IsSingleDict(formatspecs)):
|
|
4542
4745
|
prefp = fp.read(formatspecs['format_len'])
|
|
4543
4746
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4552,36 +4755,36 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4552
4755
|
filetype = formatspecs['format_magic']
|
|
4553
4756
|
else:
|
|
4554
4757
|
pass
|
|
4555
|
-
fp.seek(
|
|
4758
|
+
fp.seek(curloc, 0)
|
|
4556
4759
|
prefp = fp.read(10)
|
|
4557
4760
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4558
4761
|
filetype = "tarfile"
|
|
4559
|
-
fp.seek(
|
|
4762
|
+
fp.seek(curloc, 0)
|
|
4560
4763
|
if(closefp):
|
|
4561
4764
|
fp.close()
|
|
4562
4765
|
return filetype
|
|
4563
4766
|
|
|
4564
4767
|
|
|
4565
|
-
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4768
|
+
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4566
4769
|
try:
|
|
4567
4770
|
instringsfile = MkTempFile(instring)
|
|
4568
4771
|
except TypeError:
|
|
4569
4772
|
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4570
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4773
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4571
4774
|
|
|
4572
4775
|
|
|
4573
|
-
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4776
|
+
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4574
4777
|
try:
|
|
4575
4778
|
instringsfile = MkTempFile(instring)
|
|
4576
4779
|
except TypeError:
|
|
4577
4780
|
instringsfile = MkTempFile(instring.decode("UTF-8"))
|
|
4578
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4781
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4579
4782
|
|
|
4580
4783
|
|
|
4581
|
-
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
4784
|
+
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4582
4785
|
if(not hasattr(fp, "read")):
|
|
4583
4786
|
return False
|
|
4584
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
4787
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4585
4788
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4586
4789
|
formatspecs = formatspecs[compresscheck]
|
|
4587
4790
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4615,8 +4818,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
|
4615
4818
|
return fp
|
|
4616
4819
|
|
|
4617
4820
|
|
|
4618
|
-
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
4619
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4821
|
+
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb", filestart=0):
|
|
4822
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4620
4823
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4621
4824
|
formatspecs = formatspecs[compresscheck]
|
|
4622
4825
|
if(sys.version_info[0] == 2 and compresscheck):
|
|
@@ -4662,8 +4865,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
|
4662
4865
|
return filefp
|
|
4663
4866
|
|
|
4664
4867
|
|
|
4665
|
-
def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
4666
|
-
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, False)
|
|
4868
|
+
def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4869
|
+
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, filestart, False)
|
|
4667
4870
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4668
4871
|
formatspecs = formatspecs[compresscheck]
|
|
4669
4872
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4690,32 +4893,32 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4690
4893
|
return fileuz
|
|
4691
4894
|
|
|
4692
4895
|
|
|
4693
|
-
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__):
|
|
4896
|
+
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4694
4897
|
filefp = StringIO()
|
|
4695
|
-
outstring = UncompressString(instring, formatspecs)
|
|
4898
|
+
outstring = UncompressString(instring, formatspecs, filestart)
|
|
4696
4899
|
filefp.write(outstring)
|
|
4697
4900
|
filefp.seek(0, 0)
|
|
4698
4901
|
return filefp
|
|
4699
4902
|
|
|
4700
|
-
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4903
|
+
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4701
4904
|
if(not hasattr(fp, "read")):
|
|
4702
4905
|
return False
|
|
4703
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4906
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4704
4907
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4705
4908
|
formatspecs = formatspecs[prechck]
|
|
4706
|
-
fp.seek(
|
|
4909
|
+
fp.seek(filestart, 0)
|
|
4707
4910
|
if(prechck!="zstd"):
|
|
4708
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4911
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4709
4912
|
filefp = StringIO()
|
|
4710
|
-
fp.seek(
|
|
4711
|
-
outstring = UncompressString(fp.read(), formatspecs)
|
|
4913
|
+
fp.seek(filestart, 0)
|
|
4914
|
+
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
4712
4915
|
filefp.write(outstring)
|
|
4713
4916
|
filefp.seek(0, 0)
|
|
4714
4917
|
return filefp
|
|
4715
4918
|
|
|
4716
4919
|
|
|
4717
|
-
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
4718
|
-
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, False)
|
|
4920
|
+
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4921
|
+
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, filestart, False)
|
|
4719
4922
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4720
4923
|
formatspecs = formatspecs[compresscheck]
|
|
4721
4924
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4740,26 +4943,26 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4740
4943
|
return fileuz
|
|
4741
4944
|
|
|
4742
4945
|
|
|
4743
|
-
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__):
|
|
4946
|
+
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4744
4947
|
filefp = MkTempFile()
|
|
4745
|
-
outstring = UncompressBytes(inbytes, formatspecs)
|
|
4948
|
+
outstring = UncompressBytes(inbytes, formatspecs, filestart)
|
|
4746
4949
|
filefp.write(outstring)
|
|
4747
4950
|
filefp.seek(0, 0)
|
|
4748
4951
|
return filefp
|
|
4749
4952
|
|
|
4750
4953
|
|
|
4751
|
-
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4954
|
+
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4752
4955
|
if(not hasattr(fp, "read")):
|
|
4753
4956
|
return False
|
|
4754
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4957
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4755
4958
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4756
4959
|
formatspecs = formatspecs[prechck]
|
|
4757
|
-
fp.seek(
|
|
4960
|
+
fp.seek(filestart, 0)
|
|
4758
4961
|
if(prechck!="zstd"):
|
|
4759
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4962
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4760
4963
|
filefp = MkTempFile()
|
|
4761
|
-
fp.seek(
|
|
4762
|
-
outstring = UncompressBytes(fp.read(), formatspecs)
|
|
4964
|
+
fp.seek(filestart, 0)
|
|
4965
|
+
outstring = UncompressBytes(fp.read(), formatspecs, 0)
|
|
4763
4966
|
filefp.write(outstring)
|
|
4764
4967
|
filefp.seek(0, 0)
|
|
4765
4968
|
return filefp
|
|
@@ -5006,7 +5209,7 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5006
5209
|
fp = MkTempFile()
|
|
5007
5210
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5008
5211
|
fp = outfile
|
|
5009
|
-
elif(re.findall(
|
|
5212
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5010
5213
|
fp = MkTempFile()
|
|
5011
5214
|
else:
|
|
5012
5215
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -5208,9 +5411,9 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5208
5411
|
if not followlink and ftype in data_types:
|
|
5209
5412
|
with open(fname, "rb") as fpc:
|
|
5210
5413
|
shutil.copyfileobj(fpc, fcontents)
|
|
5211
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5414
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5212
5415
|
fcontents.seek(0, 0)
|
|
5213
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5416
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5214
5417
|
if(typechecktest is False and not compresswholefile):
|
|
5215
5418
|
fcontents.seek(0, 2)
|
|
5216
5419
|
ucfsize = fcontents.tell()
|
|
@@ -5255,9 +5458,9 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5255
5458
|
flstatinfo = os.stat(flinkname)
|
|
5256
5459
|
with open(flinkname, "rb") as fpc:
|
|
5257
5460
|
shutil.copyfileobj(fpc, fcontents)
|
|
5258
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5461
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5259
5462
|
fcontents.seek(0, 0)
|
|
5260
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5463
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5261
5464
|
if(typechecktest is False and not compresswholefile):
|
|
5262
5465
|
fcontents.seek(0, 2)
|
|
5263
5466
|
ucfsize = fcontents.tell()
|
|
@@ -5335,7 +5538,7 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5335
5538
|
outvar = fp.read()
|
|
5336
5539
|
fp.close()
|
|
5337
5540
|
return outvar
|
|
5338
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5541
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5339
5542
|
fp = CompressOpenFileAlt(
|
|
5340
5543
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5341
5544
|
fp.seek(0, 0)
|
|
@@ -5390,7 +5593,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5390
5593
|
fp = MkTempFile()
|
|
5391
5594
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5392
5595
|
fp = outfile
|
|
5393
|
-
elif(re.findall(
|
|
5596
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5394
5597
|
fp = MkTempFile()
|
|
5395
5598
|
else:
|
|
5396
5599
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -5419,7 +5622,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5419
5622
|
if(not infile):
|
|
5420
5623
|
return False
|
|
5421
5624
|
infile.seek(0, 0)
|
|
5422
|
-
elif(re.findall(
|
|
5625
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5423
5626
|
infile = download_file_from_internet_file(infile)
|
|
5424
5627
|
infile.seek(0, 0)
|
|
5425
5628
|
if(not infile):
|
|
@@ -5443,7 +5646,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5443
5646
|
return False
|
|
5444
5647
|
try:
|
|
5445
5648
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5446
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
5649
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
5447
5650
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5448
5651
|
formatspecs = formatspecs[compresscheck]
|
|
5449
5652
|
if(compresscheck=="zstd"):
|
|
@@ -5455,7 +5658,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5455
5658
|
else:
|
|
5456
5659
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
5457
5660
|
else:
|
|
5458
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
5661
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
5459
5662
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5460
5663
|
formatspecs = formatspecs[compresscheck]
|
|
5461
5664
|
if(compresscheck=="zstd"):
|
|
@@ -5556,9 +5759,9 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5556
5759
|
fpc = tarfp.extractfile(member)
|
|
5557
5760
|
shutil.copyfileobj(fpc, fcontents)
|
|
5558
5761
|
fpc.close()
|
|
5559
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5762
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5560
5763
|
fcontents.seek(0, 0)
|
|
5561
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5764
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5562
5765
|
if(typechecktest is False and not compresswholefile):
|
|
5563
5766
|
fcontents.seek(0, 2)
|
|
5564
5767
|
ucfsize = fcontents.tell()
|
|
@@ -5636,7 +5839,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5636
5839
|
outvar = fp.read()
|
|
5637
5840
|
fp.close()
|
|
5638
5841
|
return outvar
|
|
5639
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5842
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5640
5843
|
fp = CompressOpenFileAlt(
|
|
5641
5844
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5642
5845
|
fp.seek(0, 0)
|
|
@@ -5687,7 +5890,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5687
5890
|
fp = MkTempFile()
|
|
5688
5891
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5689
5892
|
fp = outfile
|
|
5690
|
-
elif(re.findall(
|
|
5893
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5691
5894
|
fp = MkTempFile()
|
|
5692
5895
|
else:
|
|
5693
5896
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -5716,7 +5919,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5716
5919
|
if(not infile):
|
|
5717
5920
|
return False
|
|
5718
5921
|
infile.seek(0, 0)
|
|
5719
|
-
elif(re.findall(
|
|
5922
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5720
5923
|
infile = download_file_from_internet_file(infile)
|
|
5721
5924
|
infile.seek(0, 0)
|
|
5722
5925
|
if(not infile):
|
|
@@ -5853,9 +6056,9 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5853
6056
|
curcompression = "none"
|
|
5854
6057
|
if ftype == 0:
|
|
5855
6058
|
fcontents.write(zipfp.read(member.filename))
|
|
5856
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6059
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5857
6060
|
fcontents.seek(0, 0)
|
|
5858
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6061
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5859
6062
|
if(typechecktest is False and not compresswholefile):
|
|
5860
6063
|
fcontents.seek(0, 2)
|
|
5861
6064
|
ucfsize = fcontents.tell()
|
|
@@ -5930,7 +6133,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5930
6133
|
outvar = fp.read()
|
|
5931
6134
|
fp.close()
|
|
5932
6135
|
return outvar
|
|
5933
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6136
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5934
6137
|
fp = CompressOpenFileAlt(
|
|
5935
6138
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5936
6139
|
fp.seek(0, 0)
|
|
@@ -5986,7 +6189,7 @@ if(rarfile_support):
|
|
|
5986
6189
|
fp = MkTempFile()
|
|
5987
6190
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5988
6191
|
fp = outfile
|
|
5989
|
-
elif(re.findall(
|
|
6192
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5990
6193
|
fp = MkTempFile()
|
|
5991
6194
|
else:
|
|
5992
6195
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -6170,9 +6373,9 @@ if(rarfile_support):
|
|
|
6170
6373
|
curcompression = "none"
|
|
6171
6374
|
if ftype == 0:
|
|
6172
6375
|
fcontents.write(rarfp.read(member.filename))
|
|
6173
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6376
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6174
6377
|
fcontents.seek(0, 0)
|
|
6175
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6378
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6176
6379
|
if(typechecktest is False and not compresswholefile):
|
|
6177
6380
|
fcontents.seek(0, 2)
|
|
6178
6381
|
ucfsize = fcontents.tell()
|
|
@@ -6250,7 +6453,7 @@ if(rarfile_support):
|
|
|
6250
6453
|
outvar = fp.read()
|
|
6251
6454
|
fp.close()
|
|
6252
6455
|
return outvar
|
|
6253
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6456
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6254
6457
|
fp = CompressOpenFileAlt(
|
|
6255
6458
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6256
6459
|
fp.seek(0, 0)
|
|
@@ -6306,7 +6509,7 @@ if(py7zr_support):
|
|
|
6306
6509
|
fp = MkTempFile()
|
|
6307
6510
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6308
6511
|
fp = outfile
|
|
6309
|
-
elif(re.findall(
|
|
6512
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
6310
6513
|
fp = MkTempFile()
|
|
6311
6514
|
else:
|
|
6312
6515
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -6329,7 +6532,7 @@ if(py7zr_support):
|
|
|
6329
6532
|
return False
|
|
6330
6533
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
6331
6534
|
file_content = szpfp.readall()
|
|
6332
|
-
#sztest = szpfp.testzip()
|
|
6535
|
+
#sztest = szpfp.testzip()
|
|
6333
6536
|
sztestalt = szpfp.test()
|
|
6334
6537
|
if(sztestalt):
|
|
6335
6538
|
VerbosePrintOut("Bad file found!")
|
|
@@ -6423,9 +6626,9 @@ if(py7zr_support):
|
|
|
6423
6626
|
fcontents.write(file_content[member.filename].read())
|
|
6424
6627
|
fsize = format(fcontents.tell(), 'x').lower()
|
|
6425
6628
|
fcontents.seek(0, 0)
|
|
6426
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6629
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6427
6630
|
fcontents.seek(0, 0)
|
|
6428
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6631
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6429
6632
|
file_content[member.filename].close()
|
|
6430
6633
|
if(typechecktest is False and not compresswholefile):
|
|
6431
6634
|
fcontents.seek(0, 2)
|
|
@@ -6504,7 +6707,7 @@ if(py7zr_support):
|
|
|
6504
6707
|
outvar = fp.read()
|
|
6505
6708
|
fp.close()
|
|
6506
6709
|
return outvar
|
|
6507
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6710
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6508
6711
|
fp = CompressOpenFileAlt(
|
|
6509
6712
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6510
6713
|
fp.seek(0, 0)
|
|
@@ -6518,7 +6721,7 @@ if(py7zr_support):
|
|
|
6518
6721
|
|
|
6519
6722
|
|
|
6520
6723
|
def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
6521
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6724
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6522
6725
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6523
6726
|
formatspecs = formatspecs[checkcompressfile]
|
|
6524
6727
|
if(verbose):
|
|
@@ -6538,18 +6741,20 @@ def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", c
|
|
|
6538
6741
|
return False
|
|
6539
6742
|
|
|
6540
6743
|
|
|
6541
|
-
def
|
|
6744
|
+
def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
6745
|
+
if(verbose):
|
|
6746
|
+
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
6542
6747
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6543
6748
|
formatspecs = formatspecs[fmttype]
|
|
6544
6749
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6545
6750
|
fmttype = "auto"
|
|
6546
|
-
curloc =
|
|
6751
|
+
curloc = filestart
|
|
6547
6752
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6548
6753
|
curloc = infile.tell()
|
|
6549
6754
|
fp = infile
|
|
6550
|
-
fp.seek(
|
|
6551
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6552
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6755
|
+
fp.seek(filestart, 0)
|
|
6756
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6757
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6553
6758
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6554
6759
|
formatspecs = formatspecs[checkcompressfile]
|
|
6555
6760
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6566,45 +6771,45 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6566
6771
|
return False
|
|
6567
6772
|
if(not fp):
|
|
6568
6773
|
return False
|
|
6569
|
-
fp.seek(
|
|
6774
|
+
fp.seek(filestart, 0)
|
|
6570
6775
|
elif(infile == "-"):
|
|
6571
6776
|
fp = MkTempFile()
|
|
6572
6777
|
if(hasattr(sys.stdin, "buffer")):
|
|
6573
6778
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6574
6779
|
else:
|
|
6575
6780
|
shutil.copyfileobj(sys.stdin, fp)
|
|
6576
|
-
fp.seek(
|
|
6577
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6578
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6781
|
+
fp.seek(filestart, 0)
|
|
6782
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6783
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6579
6784
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6580
6785
|
formatspecs = formatspecs[checkcompressfile]
|
|
6581
6786
|
if(not fp):
|
|
6582
6787
|
return False
|
|
6583
|
-
fp.seek(
|
|
6788
|
+
fp.seek(filestart, 0)
|
|
6584
6789
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6585
6790
|
fp = MkTempFile()
|
|
6586
6791
|
fp.write(infile)
|
|
6587
|
-
fp.seek(
|
|
6588
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6589
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6792
|
+
fp.seek(filestart, 0)
|
|
6793
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6794
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6590
6795
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6591
6796
|
formatspecs = formatspecs[compresscheck]
|
|
6592
6797
|
if(not fp):
|
|
6593
6798
|
return False
|
|
6594
|
-
fp.seek(
|
|
6595
|
-
elif(re.findall(
|
|
6799
|
+
fp.seek(filestart, 0)
|
|
6800
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
6596
6801
|
fp = download_file_from_internet_file(infile)
|
|
6597
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6598
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6802
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6803
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6599
6804
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6600
6805
|
formatspecs = formatspecs[compresscheck]
|
|
6601
|
-
fp.seek(
|
|
6806
|
+
fp.seek(filestart, 0)
|
|
6602
6807
|
if(not fp):
|
|
6603
6808
|
return False
|
|
6604
|
-
fp.seek(
|
|
6809
|
+
fp.seek(filestart, 0)
|
|
6605
6810
|
else:
|
|
6606
6811
|
infile = RemoveWindowsPath(infile)
|
|
6607
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6812
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6608
6813
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6609
6814
|
formatspecs = formatspecs[checkcompressfile]
|
|
6610
6815
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6619,7 +6824,7 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6619
6824
|
return False
|
|
6620
6825
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6621
6826
|
return False
|
|
6622
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6827
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
6623
6828
|
if(not compresscheck):
|
|
6624
6829
|
fextname = os.path.splitext(infile)[1]
|
|
6625
6830
|
if(fextname == ".gz"):
|
|
@@ -6642,26 +6847,23 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6642
6847
|
return False
|
|
6643
6848
|
if(not compresscheck):
|
|
6644
6849
|
return False
|
|
6645
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6850
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
6646
6851
|
try:
|
|
6647
|
-
fp.seek(0, 2)
|
|
6852
|
+
fp.seek(0, 2)
|
|
6648
6853
|
except OSError:
|
|
6649
|
-
SeekToEndOfFile(fp)
|
|
6854
|
+
SeekToEndOfFile(fp)
|
|
6650
6855
|
except ValueError:
|
|
6651
|
-
SeekToEndOfFile(fp)
|
|
6652
|
-
CatSize = fp.tell()
|
|
6653
|
-
CatSizeEnd = CatSize
|
|
6856
|
+
SeekToEndOfFile(fp)
|
|
6857
|
+
CatSize = fp.tell()
|
|
6858
|
+
CatSizeEnd = CatSize
|
|
6654
6859
|
fp.seek(curloc, 0)
|
|
6655
|
-
if(curloc > 0):
|
|
6656
|
-
fp.seek(0, 0)
|
|
6657
6860
|
if(IsNestedDict(formatspecs)):
|
|
6658
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6861
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
6659
6862
|
if(compresschecking not in formatspecs):
|
|
6660
|
-
fp.seek(0, 0)
|
|
6661
6863
|
return False
|
|
6662
6864
|
else:
|
|
6663
6865
|
formatspecs = formatspecs[compresschecking]
|
|
6664
|
-
fp.seek(
|
|
6866
|
+
fp.seek(filestart, 0)
|
|
6665
6867
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6666
6868
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6667
6869
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -6678,23 +6880,8 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6678
6880
|
fp, formatspecs['format_delimiter'])
|
|
6679
6881
|
fnumextrafieldsize = int(inheader[5], 16)
|
|
6680
6882
|
fnumextrafields = int(inheader[6], 16)
|
|
6681
|
-
fextrafieldslist = []
|
|
6682
6883
|
extrastart = 7
|
|
6683
6884
|
extraend = extrastart + fnumextrafields
|
|
6684
|
-
while(extrastart < extraend):
|
|
6685
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
6686
|
-
extrastart = extrastart + 1
|
|
6687
|
-
if(fnumextrafields==1):
|
|
6688
|
-
try:
|
|
6689
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
6690
|
-
fnumextrafields = len(fextrafieldslist)
|
|
6691
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6692
|
-
try:
|
|
6693
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
6694
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6695
|
-
pass
|
|
6696
|
-
if(curloc > 0):
|
|
6697
|
-
fp.seek(curloc, 0)
|
|
6698
6885
|
formversion = re.findall("([\\d]+)", formstring)
|
|
6699
6886
|
fheadsize = int(inheader[0], 16)
|
|
6700
6887
|
fnumfields = int(inheader[1], 16)
|
|
@@ -6703,649 +6890,27 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6703
6890
|
fnumfiles = int(inheader[4], 16)
|
|
6704
6891
|
fprechecksumtype = inheader[-2]
|
|
6705
6892
|
fprechecksum = inheader[-1]
|
|
6893
|
+
il = 0
|
|
6706
6894
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
6707
6895
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
6708
|
-
|
|
6709
|
-
|
|
6710
|
-
|
|
6711
|
-
|
|
6712
|
-
|
|
6713
|
-
|
|
6714
|
-
|
|
6715
|
-
|
|
6716
|
-
|
|
6717
|
-
|
|
6718
|
-
|
|
6719
|
-
|
|
6720
|
-
|
|
6721
|
-
if(
|
|
6722
|
-
|
|
6723
|
-
|
|
6724
|
-
|
|
6725
|
-
|
|
6726
|
-
prefhstart = fp.tell()
|
|
6727
|
-
if(formatspecs['new_style']):
|
|
6728
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
6729
|
-
fp, formatspecs['format_delimiter'])
|
|
6730
|
-
else:
|
|
6731
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
6732
|
-
fp, formatspecs['format_delimiter'])
|
|
6733
|
-
if(len(preheaderdata) == 0):
|
|
6734
|
-
break
|
|
6735
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
6736
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
6737
|
-
preftype = int(preheaderdata[2], 16)
|
|
6738
|
-
prefencoding = preheaderdata[3]
|
|
6739
|
-
prefcencoding = preheaderdata[4]
|
|
6740
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
6741
|
-
prefname = preheaderdata[5]
|
|
6742
|
-
else:
|
|
6743
|
-
prefname = "./"+preheaderdata[5]
|
|
6744
|
-
prefbasedir = os.path.dirname(prefname)
|
|
6745
|
-
preflinkname = preheaderdata[6]
|
|
6746
|
-
prefsize = int(preheaderdata[7], 16)
|
|
6747
|
-
prefatime = int(preheaderdata[8], 16)
|
|
6748
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
6749
|
-
prefctime = int(preheaderdata[10], 16)
|
|
6750
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
6751
|
-
prefmode = int(preheaderdata[12], 16)
|
|
6752
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
6753
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
6754
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
6755
|
-
prefcompression = preheaderdata[14]
|
|
6756
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
6757
|
-
prefuid = int(preheaderdata[16], 16)
|
|
6758
|
-
prefuname = preheaderdata[17]
|
|
6759
|
-
prefgid = int(preheaderdata[18], 16)
|
|
6760
|
-
prefgname = preheaderdata[19]
|
|
6761
|
-
fid = int(preheaderdata[20], 16)
|
|
6762
|
-
finode = int(preheaderdata[21], 16)
|
|
6763
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
6764
|
-
prefdev = int(preheaderdata[23], 16)
|
|
6765
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
6766
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
6767
|
-
prefseeknextfile = preheaderdata[26]
|
|
6768
|
-
prefjsontype = preheaderdata[27]
|
|
6769
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
6770
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
6771
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
6772
|
-
prefjsonchecksum = preheaderdata[31]
|
|
6773
|
-
prefhend = fp.tell() - 1
|
|
6774
|
-
prefjstart = fp.tell()
|
|
6775
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
6776
|
-
prefjend = fp.tell()
|
|
6777
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
6778
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
6779
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
6780
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
6781
|
-
extrastart = 34
|
|
6782
|
-
extraend = extrastart + prefextrafields
|
|
6783
|
-
prefcs = preheaderdata[-2].lower()
|
|
6784
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
6785
|
-
prenewfcs = GetHeaderChecksum(
|
|
6786
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
6787
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
6788
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
6789
|
-
prefname + " at offset " + str(prefhstart))
|
|
6790
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
6791
|
-
"'" + prenewfcs + "'")
|
|
6792
|
-
return False
|
|
6793
|
-
if(prefjsonsize > 0):
|
|
6794
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
6795
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
6796
|
-
prefname + " at offset " + str(prefjstart))
|
|
6797
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
6798
|
-
return False
|
|
6799
|
-
prefcontentstart = fp.tell()
|
|
6800
|
-
prefcontents = ""
|
|
6801
|
-
pyhascontents = False
|
|
6802
|
-
if(prefsize > 0):
|
|
6803
|
-
if(prefcompression):
|
|
6804
|
-
prefcontents = fp.read(prefsize)
|
|
6805
|
-
else:
|
|
6806
|
-
prefcontents = fp.read(prefcsize)
|
|
6807
|
-
prenewfccs = GetFileChecksum(
|
|
6808
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
6809
|
-
pyhascontents = True
|
|
6810
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
6811
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
6812
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
6813
|
-
VerbosePrintOut("'" + prefccs +
|
|
6814
|
-
"' != " + "'" + prenewfccs + "'")
|
|
6815
|
-
return False
|
|
6816
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
6817
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
6818
|
-
if(abs(fseeknextasnum) == 0):
|
|
6819
|
-
pass
|
|
6820
|
-
fp.seek(fseeknextasnum, 1)
|
|
6821
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
6822
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6823
|
-
if(abs(fseeknextasnum) == 0):
|
|
6824
|
-
pass
|
|
6825
|
-
fp.seek(fseeknextasnum, 1)
|
|
6826
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
6827
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6828
|
-
if(abs(fseeknextasnum) == 0):
|
|
6829
|
-
pass
|
|
6830
|
-
fp.seek(fseeknextasnum, 0)
|
|
6831
|
-
else:
|
|
6832
|
-
return False
|
|
6833
|
-
il = il + 1
|
|
6834
|
-
fp.seek(seekstart, 0)
|
|
6835
|
-
fileidnum = il
|
|
6836
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
6837
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
6838
|
-
outftype = int(preheaderdata[2], 16)
|
|
6839
|
-
outfencoding = preheaderdata[3]
|
|
6840
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
6841
|
-
outfname = preheaderdata[4]
|
|
6842
|
-
else:
|
|
6843
|
-
outfname = "./"+preheaderdata[4]
|
|
6844
|
-
outflinkname = preheaderdata[5]
|
|
6845
|
-
outfsize = int(preheaderdata[6], 16)
|
|
6846
|
-
outfbasedir = os.path.dirname(outfname)
|
|
6847
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
6848
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
6849
|
-
if(returnfp):
|
|
6850
|
-
outlist.update({'fp': fp})
|
|
6851
|
-
else:
|
|
6852
|
-
fp.close()
|
|
6853
|
-
return outlist
|
|
6854
|
-
|
|
6855
|
-
|
|
6856
|
-
def CatFileSeekToFileName(infile, fmttype="auto", seekfile=None, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
6857
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6858
|
-
formatspecs = formatspecs[fmttype]
|
|
6859
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6860
|
-
fmttype = "auto"
|
|
6861
|
-
curloc = 0
|
|
6862
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6863
|
-
curloc = infile.tell()
|
|
6864
|
-
fp = infile
|
|
6865
|
-
fp.seek(0, 0)
|
|
6866
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6867
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6868
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6869
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6870
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6871
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6872
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6873
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6874
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6875
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6876
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6877
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6878
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6879
|
-
return False
|
|
6880
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6881
|
-
return False
|
|
6882
|
-
if(not fp):
|
|
6883
|
-
return False
|
|
6884
|
-
fp.seek(0, 0)
|
|
6885
|
-
elif(infile == "-"):
|
|
6886
|
-
fp = MkTempFile()
|
|
6887
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
6888
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6889
|
-
else:
|
|
6890
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
6891
|
-
fp.seek(0, 0)
|
|
6892
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6893
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6894
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6895
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6896
|
-
if(not fp):
|
|
6897
|
-
return False
|
|
6898
|
-
fp.seek(0, 0)
|
|
6899
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6900
|
-
fp = MkTempFile()
|
|
6901
|
-
fp.write(infile)
|
|
6902
|
-
fp.seek(0, 0)
|
|
6903
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6904
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6905
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6906
|
-
formatspecs = formatspecs[compresscheck]
|
|
6907
|
-
if(not fp):
|
|
6908
|
-
return False
|
|
6909
|
-
fp.seek(0, 0)
|
|
6910
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
6911
|
-
fp = download_file_from_internet_file(infile)
|
|
6912
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6913
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6914
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6915
|
-
formatspecs = formatspecs[compresscheck]
|
|
6916
|
-
fp.seek(0, 0)
|
|
6917
|
-
if(not fp):
|
|
6918
|
-
return False
|
|
6919
|
-
fp.seek(0, 0)
|
|
6920
|
-
else:
|
|
6921
|
-
infile = RemoveWindowsPath(infile)
|
|
6922
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6923
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6924
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6925
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6926
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6927
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6928
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6929
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6930
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6931
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6932
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6933
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6934
|
-
return False
|
|
6935
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6936
|
-
return False
|
|
6937
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6938
|
-
if(not compresscheck):
|
|
6939
|
-
fextname = os.path.splitext(infile)[1]
|
|
6940
|
-
if(fextname == ".gz"):
|
|
6941
|
-
compresscheck = "gzip"
|
|
6942
|
-
elif(fextname == ".bz2"):
|
|
6943
|
-
compresscheck = "bzip2"
|
|
6944
|
-
elif(fextname == ".zst"):
|
|
6945
|
-
compresscheck = "zstd"
|
|
6946
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
6947
|
-
compresscheck = "lz4"
|
|
6948
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
6949
|
-
compresscheck = "lzo"
|
|
6950
|
-
elif(fextname == ".lzma"):
|
|
6951
|
-
compresscheck = "lzma"
|
|
6952
|
-
elif(fextname == ".xz"):
|
|
6953
|
-
compresscheck = "xz"
|
|
6954
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
6955
|
-
compresscheck = "zlib"
|
|
6956
|
-
else:
|
|
6957
|
-
return False
|
|
6958
|
-
if(not compresscheck):
|
|
6959
|
-
return False
|
|
6960
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6961
|
-
try:
|
|
6962
|
-
fp.seek(0, 2);
|
|
6963
|
-
except OSError:
|
|
6964
|
-
SeekToEndOfFile(fp);
|
|
6965
|
-
except ValueError:
|
|
6966
|
-
SeekToEndOfFile(fp);
|
|
6967
|
-
CatSize = fp.tell();
|
|
6968
|
-
CatSizeEnd = CatSize;
|
|
6969
|
-
fp.seek(curloc, 0)
|
|
6970
|
-
if(curloc > 0):
|
|
6971
|
-
fp.seek(0, 0)
|
|
6972
|
-
if(IsNestedDict(formatspecs)):
|
|
6973
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6974
|
-
if(compresschecking not in formatspecs):
|
|
6975
|
-
return False
|
|
6976
|
-
else:
|
|
6977
|
-
formatspecs = formatspecs[compresschecking]
|
|
6978
|
-
fp.seek(0, 0)
|
|
6979
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6980
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6981
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
6982
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
6983
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
6984
|
-
return False
|
|
6985
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
6986
|
-
return False
|
|
6987
|
-
if(formatspecs['new_style']):
|
|
6988
|
-
inheader = ReadFileHeaderDataBySize(
|
|
6989
|
-
fp, formatspecs['format_delimiter'])
|
|
6990
|
-
else:
|
|
6991
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
6992
|
-
fp, formatspecs['format_delimiter'])
|
|
6993
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
6994
|
-
fnumextrafields = int(inheader[6], 16)
|
|
6995
|
-
fextrafieldslist = []
|
|
6996
|
-
extrastart = 7
|
|
6997
|
-
extraend = extrastart + fnumextrafields
|
|
6998
|
-
while(extrastart < extraend):
|
|
6999
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
7000
|
-
extrastart = extrastart + 1
|
|
7001
|
-
if(fnumextrafields==1):
|
|
7002
|
-
try:
|
|
7003
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
7004
|
-
fnumextrafields = len(fextrafieldslist)
|
|
7005
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7006
|
-
try:
|
|
7007
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
7008
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7009
|
-
pass
|
|
7010
|
-
if(curloc > 0):
|
|
7011
|
-
fp.seek(curloc, 0)
|
|
7012
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
7013
|
-
fheadsize = int(inheader[0], 16)
|
|
7014
|
-
fnumfields = int(inheader[1], 16)
|
|
7015
|
-
fhencoding = inheader[2]
|
|
7016
|
-
fostype = inheader[3]
|
|
7017
|
-
fnumfiles = int(inheader[4], 16)
|
|
7018
|
-
fprechecksumtype = inheader[-2]
|
|
7019
|
-
fprechecksum = inheader[-1]
|
|
7020
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
7021
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
7022
|
-
if(not headercheck and not skipchecksum):
|
|
7023
|
-
VerbosePrintOut(
|
|
7024
|
-
"File Header Checksum Error with file at offset " + str(0))
|
|
7025
|
-
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
7026
|
-
"'" + newfcs + "'")
|
|
7027
|
-
return False
|
|
7028
|
-
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
7029
|
-
fcompresstype = compresscheck
|
|
7030
|
-
if(fcompresstype==formatspecs['format_magic']):
|
|
7031
|
-
fcompresstype = ""
|
|
7032
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
7033
|
-
seekto = fnumfiles - 1
|
|
7034
|
-
filefound = False
|
|
7035
|
-
if(seekto >= 0):
|
|
7036
|
-
il = -1
|
|
7037
|
-
while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
|
|
7038
|
-
prefhstart = fp.tell()
|
|
7039
|
-
if(formatspecs['new_style']):
|
|
7040
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
7041
|
-
fp, formatspecs['format_delimiter'])
|
|
7042
|
-
else:
|
|
7043
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
7044
|
-
fp, formatspecs['format_delimiter'])
|
|
7045
|
-
if(len(preheaderdata) == 0):
|
|
7046
|
-
break
|
|
7047
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
7048
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
7049
|
-
preftype = int(preheaderdata[2], 16)
|
|
7050
|
-
prefencoding = preheaderdata[3]
|
|
7051
|
-
prefencoding = preheaderdata[4]
|
|
7052
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
7053
|
-
prefname = preheaderdata[5]
|
|
7054
|
-
else:
|
|
7055
|
-
prefname = "./"+preheaderdata[5]
|
|
7056
|
-
prefbasedir = os.path.dirname(prefname)
|
|
7057
|
-
preflinkname = preheaderdata[6]
|
|
7058
|
-
prefsize = int(preheaderdata[7], 16)
|
|
7059
|
-
prefatime = int(preheaderdata[8], 16)
|
|
7060
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
7061
|
-
prefctime = int(preheaderdata[10], 16)
|
|
7062
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
7063
|
-
prefmode = int(preheaderdata[12], 16)
|
|
7064
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
7065
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
7066
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
7067
|
-
prefcompression = preheaderdata[14]
|
|
7068
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
7069
|
-
prefuid = int(preheaderdata[16], 16)
|
|
7070
|
-
prefuname = preheaderdata[17]
|
|
7071
|
-
prefgid = int(preheaderdata[18], 16)
|
|
7072
|
-
prefgname = preheaderdata[19]
|
|
7073
|
-
fid = int(preheaderdata[20], 16)
|
|
7074
|
-
finode = int(preheaderdata[21], 16)
|
|
7075
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
7076
|
-
prefdev = int(preheaderdata[23], 16)
|
|
7077
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
7078
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
7079
|
-
prefseeknextfile = preheaderdata[26]
|
|
7080
|
-
prefjsontype = preheaderdata[27]
|
|
7081
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
7082
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
7083
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
7084
|
-
prefjsonchecksum = preheaderdata[31]
|
|
7085
|
-
prefhend = fp.tell() - 1
|
|
7086
|
-
prefjstart = fp.tell()
|
|
7087
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
7088
|
-
prefjend = fp.tell()
|
|
7089
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
7090
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
7091
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
7092
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
7093
|
-
extrastart = 34
|
|
7094
|
-
extraend = extrastart + prefextrafields
|
|
7095
|
-
prefcs = preheaderdata[-2].lower()
|
|
7096
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
7097
|
-
prenewfcs = GetHeaderChecksum(
|
|
7098
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
7099
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
7100
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
7101
|
-
prefname + " at offset " + str(prefhstart))
|
|
7102
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
7103
|
-
"'" + prenewfcs + "'")
|
|
7104
|
-
return False
|
|
7105
|
-
if(prefjsonsize > 0):
|
|
7106
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
7107
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
7108
|
-
prefname + " at offset " + str(prefjstart))
|
|
7109
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
7110
|
-
return False
|
|
7111
|
-
prefcontentstart = fp.tell()
|
|
7112
|
-
prefcontents = ""
|
|
7113
|
-
pyhascontents = False
|
|
7114
|
-
if(prefsize > 0):
|
|
7115
|
-
if(prefcompression):
|
|
7116
|
-
prefcontents = fp.read(prefsize)
|
|
7117
|
-
else:
|
|
7118
|
-
prefcontents = fp.read(prefcsize)
|
|
7119
|
-
prenewfccs = GetFileChecksum(
|
|
7120
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
7121
|
-
pyhascontents = True
|
|
7122
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
7123
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
7124
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
7125
|
-
VerbosePrintOut("'" + prefccs +
|
|
7126
|
-
"' != " + "'" + prenewfccs + "'")
|
|
7127
|
-
return False
|
|
7128
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
7129
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
7130
|
-
if(abs(fseeknextasnum) == 0):
|
|
7131
|
-
pass
|
|
7132
|
-
fp.seek(fseeknextasnum, 1)
|
|
7133
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
7134
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7135
|
-
if(abs(fseeknextasnum) == 0):
|
|
7136
|
-
pass
|
|
7137
|
-
fp.seek(fseeknextasnum, 1)
|
|
7138
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
7139
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7140
|
-
if(abs(fseeknextasnum) == 0):
|
|
7141
|
-
pass
|
|
7142
|
-
fp.seek(fseeknextasnum, 0)
|
|
7143
|
-
else:
|
|
7144
|
-
return False
|
|
7145
|
-
il = il + 1
|
|
7146
|
-
filefound = False
|
|
7147
|
-
if(prefname == seekfile):
|
|
7148
|
-
filefound = True
|
|
7149
|
-
break
|
|
7150
|
-
fp.seek(seekstart, 0)
|
|
7151
|
-
fileidnum = il
|
|
7152
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
7153
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
7154
|
-
outftype = int(preheaderdata[2], 16)
|
|
7155
|
-
outfencoding = preheaderdata[3]
|
|
7156
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
7157
|
-
outfname = preheaderdata[4]
|
|
7158
|
-
else:
|
|
7159
|
-
outfname = "./"+preheaderdata[4]
|
|
7160
|
-
outflinkname = preheaderdata[5]
|
|
7161
|
-
outfsize = int(preheaderdata[6], 16)
|
|
7162
|
-
outfbasedir = os.path.dirname(outfname)
|
|
7163
|
-
if(filefound):
|
|
7164
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
7165
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
7166
|
-
else:
|
|
7167
|
-
return False
|
|
7168
|
-
if(returnfp):
|
|
7169
|
-
outlist.update({'fp': fp})
|
|
7170
|
-
else:
|
|
7171
|
-
fp.close()
|
|
7172
|
-
return outlist
|
|
7173
|
-
|
|
7174
|
-
|
|
7175
|
-
def CatFileValidate(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7176
|
-
if(verbose):
|
|
7177
|
-
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
7178
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7179
|
-
formatspecs = formatspecs[fmttype]
|
|
7180
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7181
|
-
fmttype = "auto"
|
|
7182
|
-
curloc = 0
|
|
7183
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7184
|
-
curloc = infile.tell()
|
|
7185
|
-
fp = infile
|
|
7186
|
-
fp.seek(0, 0)
|
|
7187
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7188
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7189
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7190
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7191
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7192
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7193
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7194
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7195
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7196
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7197
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7198
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7199
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7200
|
-
return False
|
|
7201
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7202
|
-
return False
|
|
7203
|
-
if(not fp):
|
|
7204
|
-
return False
|
|
7205
|
-
fp.seek(0, 0)
|
|
7206
|
-
elif(infile == "-"):
|
|
7207
|
-
fp = MkTempFile()
|
|
7208
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
7209
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7210
|
-
else:
|
|
7211
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
7212
|
-
fp.seek(0, 0)
|
|
7213
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7214
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7215
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7216
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7217
|
-
if(not fp):
|
|
7218
|
-
return False
|
|
7219
|
-
fp.seek(0, 0)
|
|
7220
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7221
|
-
fp = MkTempFile()
|
|
7222
|
-
fp.write(infile)
|
|
7223
|
-
fp.seek(0, 0)
|
|
7224
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7225
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7226
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7227
|
-
formatspecs = formatspecs[compresscheck]
|
|
7228
|
-
if(not fp):
|
|
7229
|
-
return False
|
|
7230
|
-
fp.seek(0, 0)
|
|
7231
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
7232
|
-
fp = download_file_from_internet_file(infile)
|
|
7233
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7234
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7235
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7236
|
-
formatspecs = formatspecs[compresscheck]
|
|
7237
|
-
fp.seek(0, 0)
|
|
7238
|
-
if(not fp):
|
|
7239
|
-
return False
|
|
7240
|
-
fp.seek(0, 0)
|
|
7241
|
-
else:
|
|
7242
|
-
infile = RemoveWindowsPath(infile)
|
|
7243
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7244
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7245
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7246
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7247
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7248
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7249
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7250
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7251
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7252
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7253
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7254
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7255
|
-
return False
|
|
7256
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7257
|
-
return False
|
|
7258
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7259
|
-
if(not compresscheck):
|
|
7260
|
-
fextname = os.path.splitext(infile)[1]
|
|
7261
|
-
if(fextname == ".gz"):
|
|
7262
|
-
compresscheck = "gzip"
|
|
7263
|
-
elif(fextname == ".bz2"):
|
|
7264
|
-
compresscheck = "bzip2"
|
|
7265
|
-
elif(fextname == ".zst"):
|
|
7266
|
-
compresscheck = "zstd"
|
|
7267
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
7268
|
-
compresscheck = "lz4"
|
|
7269
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
7270
|
-
compresscheck = "lzo"
|
|
7271
|
-
elif(fextname == ".lzma"):
|
|
7272
|
-
compresscheck = "lzma"
|
|
7273
|
-
elif(fextname == ".xz"):
|
|
7274
|
-
compresscheck = "xz"
|
|
7275
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
7276
|
-
compresscheck = "zlib"
|
|
7277
|
-
else:
|
|
7278
|
-
return False
|
|
7279
|
-
if(not compresscheck):
|
|
7280
|
-
return False
|
|
7281
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7282
|
-
try:
|
|
7283
|
-
fp.seek(0, 2);
|
|
7284
|
-
except OSError:
|
|
7285
|
-
SeekToEndOfFile(fp);
|
|
7286
|
-
except ValueError:
|
|
7287
|
-
SeekToEndOfFile(fp);
|
|
7288
|
-
CatSize = fp.tell();
|
|
7289
|
-
CatSizeEnd = CatSize;
|
|
7290
|
-
fp.seek(curloc, 0)
|
|
7291
|
-
if(curloc > 0):
|
|
7292
|
-
fp.seek(0, 0)
|
|
7293
|
-
if(IsNestedDict(formatspecs)):
|
|
7294
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7295
|
-
if(compresschecking not in formatspecs):
|
|
7296
|
-
return False
|
|
7297
|
-
else:
|
|
7298
|
-
formatspecs = formatspecs[compresschecking]
|
|
7299
|
-
fp.seek(0, 0)
|
|
7300
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7301
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7302
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
7303
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
7304
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
7305
|
-
return False
|
|
7306
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
7307
|
-
return False
|
|
7308
|
-
if(formatspecs['new_style']):
|
|
7309
|
-
inheader = ReadFileHeaderDataBySize(
|
|
7310
|
-
fp, formatspecs['format_delimiter'])
|
|
7311
|
-
else:
|
|
7312
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
7313
|
-
fp, formatspecs['format_delimiter'])
|
|
7314
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
7315
|
-
fnumextrafields = int(inheader[6], 16)
|
|
7316
|
-
extrastart = 7
|
|
7317
|
-
extraend = extrastart + fnumextrafields
|
|
7318
|
-
if(curloc > 0):
|
|
7319
|
-
fp.seek(curloc, 0)
|
|
7320
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
7321
|
-
fheadsize = int(inheader[0], 16)
|
|
7322
|
-
fnumfields = int(inheader[1], 16)
|
|
7323
|
-
fhencoding = inheader[2]
|
|
7324
|
-
fostype = inheader[3]
|
|
7325
|
-
fnumfiles = int(inheader[4], 16)
|
|
7326
|
-
fprechecksumtype = inheader[-2]
|
|
7327
|
-
fprechecksum = inheader[-1]
|
|
7328
|
-
il = 0
|
|
7329
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
7330
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
7331
|
-
valid_archive = True
|
|
7332
|
-
invalid_archive = False
|
|
7333
|
-
if(verbose):
|
|
7334
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7335
|
-
try:
|
|
7336
|
-
VerbosePrintOut(infile.name)
|
|
7337
|
-
except AttributeError:
|
|
7338
|
-
pass
|
|
7339
|
-
elif(sys.version_info[0] >= 3 and isinstance(infile, bytes)):
|
|
7340
|
-
pass
|
|
7341
|
-
else:
|
|
7342
|
-
VerbosePrintOut(infile)
|
|
7343
|
-
VerbosePrintOut("Number of Records " + str(fnumfiles))
|
|
7344
|
-
if(headercheck):
|
|
7345
|
-
if(verbose):
|
|
7346
|
-
VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
|
|
7347
|
-
VerbosePrintOut("'" + fprechecksum + "' == " +
|
|
7348
|
-
"'" + newfcs + "'")
|
|
6896
|
+
valid_archive = True
|
|
6897
|
+
invalid_archive = False
|
|
6898
|
+
if(verbose):
|
|
6899
|
+
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6900
|
+
try:
|
|
6901
|
+
VerbosePrintOut(infile.name)
|
|
6902
|
+
except AttributeError:
|
|
6903
|
+
pass
|
|
6904
|
+
elif(sys.version_info[0] >= 3 and isinstance(infile, bytes)):
|
|
6905
|
+
pass
|
|
6906
|
+
else:
|
|
6907
|
+
VerbosePrintOut(infile)
|
|
6908
|
+
VerbosePrintOut("Number of Records " + str(fnumfiles))
|
|
6909
|
+
if(headercheck):
|
|
6910
|
+
if(verbose):
|
|
6911
|
+
VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
|
|
6912
|
+
VerbosePrintOut("'" + fprechecksum + "' == " +
|
|
6913
|
+
"'" + newfcs + "'")
|
|
7349
6914
|
else:
|
|
7350
6915
|
if(verbose):
|
|
7351
6916
|
VerbosePrintOut("File Header Checksum Failed at offset " + str(0))
|
|
@@ -7526,18 +7091,18 @@ def CatFileValidateMultiple(infile, fmttype="auto", formatspecs=__file_format_mu
|
|
|
7526
7091
|
def CatFileValidateMultipleFiles(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7527
7092
|
return CatFileValidateMultiple(infile, fmttype, formatspecs, verbose, returnfp)
|
|
7528
7093
|
|
|
7529
|
-
def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7094
|
+
def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7530
7095
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7531
7096
|
formatspecs = formatspecs[fmttype]
|
|
7532
7097
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7533
7098
|
fmttype = "auto"
|
|
7534
|
-
curloc =
|
|
7099
|
+
curloc = filestart
|
|
7535
7100
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7536
7101
|
curloc = infile.tell()
|
|
7537
7102
|
fp = infile
|
|
7538
|
-
fp.seek(
|
|
7539
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7540
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7103
|
+
fp.seek(filestart, 0)
|
|
7104
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7105
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7541
7106
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7542
7107
|
formatspecs = formatspecs[checkcompressfile]
|
|
7543
7108
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7554,45 +7119,45 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7554
7119
|
return False
|
|
7555
7120
|
if(not fp):
|
|
7556
7121
|
return False
|
|
7557
|
-
fp.seek(
|
|
7122
|
+
fp.seek(filestart, 0)
|
|
7558
7123
|
elif(infile == "-"):
|
|
7559
7124
|
fp = MkTempFile()
|
|
7560
7125
|
if(hasattr(sys.stdin, "buffer")):
|
|
7561
7126
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7562
7127
|
else:
|
|
7563
7128
|
shutil.copyfileobj(sys.stdin, fp)
|
|
7564
|
-
fp.seek(
|
|
7565
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7566
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7129
|
+
fp.seek(filestart, 0)
|
|
7130
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7131
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7567
7132
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7568
7133
|
formatspecs = formatspecs[checkcompressfile]
|
|
7569
7134
|
if(not fp):
|
|
7570
7135
|
return False
|
|
7571
|
-
fp.seek(
|
|
7136
|
+
fp.seek(filestart, 0)
|
|
7572
7137
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7573
7138
|
fp = MkTempFile()
|
|
7574
7139
|
fp.write(infile)
|
|
7575
|
-
fp.seek(
|
|
7576
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7577
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7140
|
+
fp.seek(filestart, 0)
|
|
7141
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7142
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7578
7143
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7579
7144
|
formatspecs = formatspecs[compresscheck]
|
|
7580
7145
|
if(not fp):
|
|
7581
7146
|
return False
|
|
7582
|
-
fp.seek(
|
|
7583
|
-
elif(re.findall(
|
|
7147
|
+
fp.seek(filestart, 0)
|
|
7148
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
7584
7149
|
fp = download_file_from_internet_file(infile)
|
|
7585
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7586
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7150
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7151
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7587
7152
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7588
7153
|
formatspecs = formatspecs[compresscheck]
|
|
7589
|
-
fp.seek(
|
|
7154
|
+
fp.seek(filestart, 0)
|
|
7590
7155
|
if(not fp):
|
|
7591
7156
|
return False
|
|
7592
|
-
fp.seek(
|
|
7157
|
+
fp.seek(filestart, 0)
|
|
7593
7158
|
else:
|
|
7594
7159
|
infile = RemoveWindowsPath(infile)
|
|
7595
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7160
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7596
7161
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7597
7162
|
formatspecs = formatspecs[checkcompressfile]
|
|
7598
7163
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7607,7 +7172,7 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7607
7172
|
return False
|
|
7608
7173
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7609
7174
|
return False
|
|
7610
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7175
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
7611
7176
|
if(not compresscheck):
|
|
7612
7177
|
fextname = os.path.splitext(infile)[1]
|
|
7613
7178
|
if(fextname == ".gz"):
|
|
@@ -7630,25 +7195,23 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7630
7195
|
return False
|
|
7631
7196
|
if(not compresscheck):
|
|
7632
7197
|
return False
|
|
7633
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7198
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
7634
7199
|
try:
|
|
7635
|
-
fp.seek(0, 2)
|
|
7200
|
+
fp.seek(0, 2)
|
|
7636
7201
|
except OSError:
|
|
7637
|
-
SeekToEndOfFile(fp)
|
|
7202
|
+
SeekToEndOfFile(fp)
|
|
7638
7203
|
except ValueError:
|
|
7639
|
-
SeekToEndOfFile(fp)
|
|
7640
|
-
CatSize = fp.tell()
|
|
7204
|
+
SeekToEndOfFile(fp)
|
|
7205
|
+
CatSize = fp.tell()
|
|
7641
7206
|
CatSizeEnd = CatSize;
|
|
7642
7207
|
fp.seek(curloc, 0)
|
|
7643
|
-
if(curloc > 0):
|
|
7644
|
-
fp.seek(0, 0)
|
|
7645
7208
|
if(IsNestedDict(formatspecs)):
|
|
7646
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7209
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7647
7210
|
if(compresschecking not in formatspecs):
|
|
7648
7211
|
return False
|
|
7649
7212
|
else:
|
|
7650
7213
|
formatspecs = formatspecs[compresschecking]
|
|
7651
|
-
fp.seek(
|
|
7214
|
+
fp.seek(filestart, 0)
|
|
7652
7215
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7653
7216
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7654
7217
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -7680,8 +7243,6 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7680
7243
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
7681
7244
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7682
7245
|
pass
|
|
7683
|
-
if(curloc > 0):
|
|
7684
|
-
fp.seek(curloc, 0)
|
|
7685
7246
|
formversion = re.findall("([\\d]+)", formstring)
|
|
7686
7247
|
fheadsize = int(inheader[0], 16)
|
|
7687
7248
|
fnumfields = int(inheader[1], 16)
|
|
@@ -7702,7 +7263,7 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7702
7263
|
fcompresstype = compresscheck
|
|
7703
7264
|
if(fcompresstype==formatspecs['format_magic']):
|
|
7704
7265
|
fcompresstype = ""
|
|
7705
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
7266
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
7706
7267
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
7707
7268
|
seekstart = 0
|
|
7708
7269
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -7946,7 +7507,7 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7946
7507
|
outfcontents.seek(0, 0)
|
|
7947
7508
|
if(uncompress):
|
|
7948
7509
|
cfcontents = UncompressFileAlt(
|
|
7949
|
-
outfcontents, formatspecs)
|
|
7510
|
+
outfcontents, formatspecs, 0)
|
|
7950
7511
|
cfcontents.seek(0, 0)
|
|
7951
7512
|
outfcontents = MkTempFile()
|
|
7952
7513
|
shutil.copyfileobj(cfcontents, outfcontents)
|
|
@@ -7991,49 +7552,49 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7991
7552
|
return outlist
|
|
7992
7553
|
|
|
7993
7554
|
|
|
7994
|
-
def MultipleCatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7555
|
+
def MultipleCatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7995
7556
|
if(isinstance(infile, (list, tuple, ))):
|
|
7996
7557
|
pass
|
|
7997
7558
|
else:
|
|
7998
7559
|
infile = [infile]
|
|
7999
7560
|
outretval = {}
|
|
8000
7561
|
for curfname in infile:
|
|
8001
|
-
curretfile[curfname] = CatFileToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7562
|
+
curretfile[curfname] = CatFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8002
7563
|
return outretval
|
|
8003
7564
|
|
|
8004
|
-
def MultipleCatFilesToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8005
|
-
return MultipleCatFileToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7565
|
+
def MultipleCatFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7566
|
+
return MultipleCatFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8006
7567
|
|
|
8007
7568
|
|
|
8008
|
-
def CatFileStringToArray(instr, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8009
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7569
|
+
def CatFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7570
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8010
7571
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8011
7572
|
formatspecs = formatspecs[checkcompressfile]
|
|
8012
7573
|
fp = MkTempFile(instr)
|
|
8013
|
-
|
|
8014
|
-
return
|
|
7574
|
+
listarrayfiles = CatFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7575
|
+
return listarrayfiles
|
|
8015
7576
|
|
|
8016
7577
|
|
|
8017
7578
|
def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8018
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7579
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8019
7580
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8020
7581
|
formatspecs = formatspecs[checkcompressfile]
|
|
8021
7582
|
fp = MkTempFile()
|
|
8022
7583
|
fp = PackCatFileFromTarFile(
|
|
8023
7584
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8024
|
-
|
|
8025
|
-
return
|
|
7585
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7586
|
+
return listarrayfiles
|
|
8026
7587
|
|
|
8027
7588
|
|
|
8028
7589
|
def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8029
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7590
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8030
7591
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8031
7592
|
formatspecs = formatspecs[checkcompressfile]
|
|
8032
7593
|
fp = MkTempFile()
|
|
8033
7594
|
fp = PackCatFileFromZipFile(
|
|
8034
7595
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8035
|
-
|
|
8036
|
-
return
|
|
7596
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7597
|
+
return listarrayfiles
|
|
8037
7598
|
|
|
8038
7599
|
|
|
8039
7600
|
if(not rarfile_support):
|
|
@@ -8042,14 +7603,14 @@ if(not rarfile_support):
|
|
|
8042
7603
|
|
|
8043
7604
|
if(rarfile_support):
|
|
8044
7605
|
def RarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8045
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7606
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8046
7607
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8047
7608
|
formatspecs = formatspecs[checkcompressfile]
|
|
8048
7609
|
fp = MkTempFile()
|
|
8049
7610
|
fp = PackCatFileFromRarFile(
|
|
8050
7611
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8051
|
-
|
|
8052
|
-
return
|
|
7612
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7613
|
+
return listarrayfiles
|
|
8053
7614
|
|
|
8054
7615
|
if(not py7zr_support):
|
|
8055
7616
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
@@ -8057,18 +7618,18 @@ if(not py7zr_support):
|
|
|
8057
7618
|
|
|
8058
7619
|
if(py7zr_support):
|
|
8059
7620
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8060
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7621
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8061
7622
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8062
7623
|
formatspecs = formatspecs[checkcompressfile]
|
|
8063
7624
|
fp = MkTempFile()
|
|
8064
7625
|
fp = PackCatFileFromSevenZipFile(
|
|
8065
7626
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8066
|
-
|
|
8067
|
-
return
|
|
7627
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7628
|
+
return listarrayfiles
|
|
8068
7629
|
|
|
8069
7630
|
|
|
8070
|
-
def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8071
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7631
|
+
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7632
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8072
7633
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8073
7634
|
formatspecs = formatspecs[checkcompressfile]
|
|
8074
7635
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -8080,78 +7641,78 @@ def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=
|
|
|
8080
7641
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
8081
7642
|
return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8082
7643
|
elif(checkcompressfile == formatspecs['format_magic']):
|
|
8083
|
-
return CatFileToArray(infile, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7644
|
+
return CatFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8084
7645
|
else:
|
|
8085
7646
|
return False
|
|
8086
7647
|
return False
|
|
8087
7648
|
|
|
8088
7649
|
|
|
8089
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
7650
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
8090
7651
|
outarray = MkTempFile()
|
|
8091
7652
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
8092
7653
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
8093
|
-
|
|
8094
|
-
return
|
|
7654
|
+
listarrayfiles = CatFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7655
|
+
return listarrayfiles
|
|
8095
7656
|
|
|
8096
7657
|
|
|
8097
7658
|
def CatFileArrayToArrayIndex(inarray, returnfp=False):
|
|
8098
7659
|
if(isinstance(inarray, dict)):
|
|
8099
|
-
|
|
7660
|
+
listarrayfiles = inarray
|
|
8100
7661
|
else:
|
|
8101
7662
|
return False
|
|
8102
|
-
if(not
|
|
7663
|
+
if(not listarrayfiles):
|
|
8103
7664
|
return False
|
|
8104
|
-
outarray = {'list':
|
|
7665
|
+
outarray = {'list': listarrayfiles, 'filetoid': {}, 'idtofile': {}, 'filetypes': {'directories': {'filetoid': {}, 'idtofile': {}}, 'files': {'filetoid': {}, 'idtofile': {}}, 'links': {'filetoid': {}, 'idtofile': {}}, 'symlinks': {'filetoid': {
|
|
8105
7666
|
}, 'idtofile': {}}, 'hardlinks': {'filetoid': {}, 'idtofile': {}}, 'character': {'filetoid': {}, 'idtofile': {}}, 'block': {'filetoid': {}, 'idtofile': {}}, 'fifo': {'filetoid': {}, 'idtofile': {}}, 'devices': {'filetoid': {}, 'idtofile': {}}}}
|
|
8106
7667
|
if(returnfp):
|
|
8107
|
-
outarray.update({'fp':
|
|
8108
|
-
lenlist = len(
|
|
7668
|
+
outarray.update({'fp': listarrayfiles['fp']})
|
|
7669
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8109
7670
|
lcfi = 0
|
|
8110
|
-
lcfx = int(
|
|
8111
|
-
if(lenlist >
|
|
7671
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7672
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8112
7673
|
lcfx = int(lenlist)
|
|
8113
7674
|
else:
|
|
8114
|
-
lcfx = int(
|
|
7675
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8115
7676
|
while(lcfi < lcfx):
|
|
8116
|
-
filetoidarray = {
|
|
8117
|
-
['fname']:
|
|
8118
|
-
idtofilearray = {
|
|
8119
|
-
['fid']:
|
|
7677
|
+
filetoidarray = {listarrayfiles['ffilelist'][lcfi]
|
|
7678
|
+
['fname']: listarrayfiles['ffilelist'][lcfi]['fid']}
|
|
7679
|
+
idtofilearray = {listarrayfiles['ffilelist'][lcfi]
|
|
7680
|
+
['fid']: listarrayfiles['ffilelist'][lcfi]['fname']}
|
|
8120
7681
|
outarray['filetoid'].update(filetoidarray)
|
|
8121
7682
|
outarray['idtofile'].update(idtofilearray)
|
|
8122
|
-
if(
|
|
7683
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8123
7684
|
outarray['filetypes']['files']['filetoid'].update(filetoidarray)
|
|
8124
7685
|
outarray['filetypes']['files']['idtofile'].update(idtofilearray)
|
|
8125
|
-
if(
|
|
7686
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8126
7687
|
outarray['filetypes']['hardlinks']['filetoid'].update(
|
|
8127
7688
|
filetoidarray)
|
|
8128
7689
|
outarray['filetypes']['hardlinks']['idtofile'].update(
|
|
8129
7690
|
idtofilearray)
|
|
8130
7691
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8131
7692
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8132
|
-
if(
|
|
7693
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8133
7694
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8134
7695
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8135
7696
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8136
7697
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8137
|
-
if(
|
|
7698
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 3):
|
|
8138
7699
|
outarray['filetypes']['character']['filetoid'].update(
|
|
8139
7700
|
filetoidarray)
|
|
8140
7701
|
outarray['filetypes']['character']['idtofile'].update(
|
|
8141
7702
|
idtofilearray)
|
|
8142
7703
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8143
7704
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8144
|
-
if(
|
|
7705
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 4):
|
|
8145
7706
|
outarray['filetypes']['block']['filetoid'].update(filetoidarray)
|
|
8146
7707
|
outarray['filetypes']['block']['idtofile'].update(idtofilearray)
|
|
8147
7708
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8148
7709
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8149
|
-
if(
|
|
7710
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8150
7711
|
outarray['filetypes']['directories']['filetoid'].update(
|
|
8151
7712
|
filetoidarray)
|
|
8152
7713
|
outarray['filetypes']['directories']['idtofile'].update(
|
|
8153
7714
|
idtofilearray)
|
|
8154
|
-
if(
|
|
7715
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6):
|
|
8155
7716
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8156
7717
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8157
7718
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
@@ -8160,13 +7721,13 @@ def CatFileArrayToArrayIndex(inarray, returnfp=False):
|
|
|
8160
7721
|
return outarray
|
|
8161
7722
|
|
|
8162
7723
|
|
|
8163
|
-
def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7724
|
+
def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8164
7725
|
if(isinstance(infile, dict)):
|
|
8165
|
-
|
|
7726
|
+
listarrayfiles = infile
|
|
8166
7727
|
else:
|
|
8167
7728
|
if(infile != "-" and not isinstance(infile, bytes) and not hasattr(infile, "read") and not hasattr(infile, "write")):
|
|
8168
7729
|
infile = RemoveWindowsPath(infile)
|
|
8169
|
-
|
|
7730
|
+
listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8170
7731
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
8171
7732
|
formatspecs = formatspecs[fmttype]
|
|
8172
7733
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
@@ -8192,14 +7753,14 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8192
7753
|
os.unlink(outfile)
|
|
8193
7754
|
except OSError:
|
|
8194
7755
|
pass
|
|
8195
|
-
if(not
|
|
7756
|
+
if(not listarrayfiles):
|
|
8196
7757
|
return False
|
|
8197
7758
|
if(outfile == "-" or outfile is None):
|
|
8198
7759
|
verbose = False
|
|
8199
7760
|
fp = MkTempFile()
|
|
8200
7761
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
8201
7762
|
fp = outfile
|
|
8202
|
-
elif(re.findall(
|
|
7763
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
8203
7764
|
fp = MkTempFile()
|
|
8204
7765
|
else:
|
|
8205
7766
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -8212,19 +7773,19 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8212
7773
|
return False
|
|
8213
7774
|
formver = formatspecs['format_ver']
|
|
8214
7775
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
8215
|
-
lenlist = len(
|
|
8216
|
-
fnumfiles = int(
|
|
7776
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7777
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8217
7778
|
if(lenlist > fnumfiles or lenlist < fnumfiles):
|
|
8218
7779
|
fnumfiles = lenlist
|
|
8219
|
-
AppendFileHeader(fp, fnumfiles,
|
|
8220
|
-
lenlist = len(
|
|
8221
|
-
fnumfiles = int(
|
|
7780
|
+
AppendFileHeader(fp, fnumfiles, listarrayfiles['fencoding'], [], checksumtype[0], formatspecs)
|
|
7781
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7782
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8222
7783
|
lcfi = 0
|
|
8223
|
-
lcfx = int(
|
|
8224
|
-
if(lenlist >
|
|
7784
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7785
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8225
7786
|
lcfx = int(lenlist)
|
|
8226
7787
|
else:
|
|
8227
|
-
lcfx = int(
|
|
7788
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8228
7789
|
curinode = 0
|
|
8229
7790
|
curfid = 0
|
|
8230
7791
|
inodelist = []
|
|
@@ -8232,66 +7793,66 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8232
7793
|
filetoinode = {}
|
|
8233
7794
|
reallcfi = 0
|
|
8234
7795
|
while(lcfi < lcfx):
|
|
8235
|
-
fencoding =
|
|
8236
|
-
fcencoding =
|
|
8237
|
-
if(re.findall("^[.|/]",
|
|
8238
|
-
fname =
|
|
7796
|
+
fencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7797
|
+
fcencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7798
|
+
if(re.findall("^[.|/]", listarrayfiles['ffilelist'][reallcfi]['fname'])):
|
|
7799
|
+
fname = listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8239
7800
|
else:
|
|
8240
|
-
fname = "./"+
|
|
7801
|
+
fname = "./"+listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8241
7802
|
if(verbose):
|
|
8242
7803
|
VerbosePrintOut(fname)
|
|
8243
7804
|
fheadersize = format(
|
|
8244
|
-
int(
|
|
7805
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fheadersize']), 'x').lower()
|
|
8245
7806
|
fsize = format(
|
|
8246
|
-
int(
|
|
8247
|
-
flinkname =
|
|
7807
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fsize']), 'x').lower()
|
|
7808
|
+
flinkname = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
8248
7809
|
fatime = format(
|
|
8249
|
-
int(
|
|
7810
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fatime']), 'x').lower()
|
|
8250
7811
|
fmtime = format(
|
|
8251
|
-
int(
|
|
7812
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmtime']), 'x').lower()
|
|
8252
7813
|
fctime = format(
|
|
8253
|
-
int(
|
|
7814
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fctime']), 'x').lower()
|
|
8254
7815
|
fbtime = format(
|
|
8255
|
-
int(
|
|
7816
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fbtime']), 'x').lower()
|
|
8256
7817
|
fmode = format(
|
|
8257
|
-
int(
|
|
7818
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmode']), 'x').lower()
|
|
8258
7819
|
fchmode = format(
|
|
8259
|
-
int(
|
|
7820
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fchmode']), 'x').lower()
|
|
8260
7821
|
fuid = format(
|
|
8261
|
-
int(
|
|
8262
|
-
funame =
|
|
7822
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fuid']), 'x').lower()
|
|
7823
|
+
funame = listarrayfiles['ffilelist'][reallcfi]['funame']
|
|
8263
7824
|
fgid = format(
|
|
8264
|
-
int(
|
|
8265
|
-
fgname =
|
|
7825
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fgid']), 'x').lower()
|
|
7826
|
+
fgname = listarrayfiles['ffilelist'][reallcfi]['fgname']
|
|
8266
7827
|
finode = format(
|
|
8267
|
-
int(
|
|
7828
|
+
int(listarrayfiles['ffilelist'][reallcfi]['finode']), 'x').lower()
|
|
8268
7829
|
flinkcount = format(
|
|
8269
|
-
int(
|
|
7830
|
+
int(listarrayfiles['ffilelist'][reallcfi]['flinkcount']), 'x').lower()
|
|
8270
7831
|
fwinattributes = format(
|
|
8271
|
-
int(
|
|
8272
|
-
fcompression =
|
|
7832
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fwinattributes']), 'x').lower()
|
|
7833
|
+
fcompression = listarrayfiles['ffilelist'][reallcfi]['fcompression']
|
|
8273
7834
|
fcsize = format(
|
|
8274
|
-
int(
|
|
7835
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fcsize']), 'x').lower()
|
|
8275
7836
|
fdev = format(
|
|
8276
|
-
int(
|
|
7837
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fdev']), 'x').lower()
|
|
8277
7838
|
fdev_minor = format(
|
|
8278
|
-
int(
|
|
7839
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fminor']), 'x').lower()
|
|
8279
7840
|
fdev_major = format(
|
|
8280
|
-
int(
|
|
8281
|
-
fseeknextfile =
|
|
8282
|
-
if(len(
|
|
8283
|
-
|
|
8284
|
-
|
|
7841
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
|
|
7842
|
+
fseeknextfile = listarrayfiles['ffilelist'][reallcfi]['fseeknextfile']
|
|
7843
|
+
if(len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > 0):
|
|
7844
|
+
listarrayfiles['ffilelist'][reallcfi]['fextrafields'] = len(
|
|
7845
|
+
listarrayfiles['ffilelist'][reallcfi]['fextralist'])
|
|
8285
7846
|
if(not followlink and len(extradata) <= 0):
|
|
8286
|
-
extradata =
|
|
7847
|
+
extradata = listarrayfiles['ffilelist'][reallcfi]['fextralist']
|
|
8287
7848
|
if(not followlink and len(jsondata) <= 0):
|
|
8288
|
-
jsondata =
|
|
8289
|
-
fcontents =
|
|
8290
|
-
if(not
|
|
7849
|
+
jsondata = listarrayfiles['ffilelist'][reallcfi]['fjsondata']
|
|
7850
|
+
fcontents = listarrayfiles['ffilelist'][reallcfi]['fcontents']
|
|
7851
|
+
if(not listarrayfiles['ffilelist'][reallcfi]['fcontentasfile']):
|
|
8291
7852
|
fcontents = MkTempFile(fcontents)
|
|
8292
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
7853
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
8293
7854
|
fcontents.seek(0, 0)
|
|
8294
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
7855
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
8295
7856
|
fcompression = ""
|
|
8296
7857
|
fcsize = format(int(0), 'x').lower()
|
|
8297
7858
|
curcompression = "none"
|
|
@@ -8334,10 +7895,10 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8334
7895
|
fcontents.close()
|
|
8335
7896
|
fcontents = cfcontents
|
|
8336
7897
|
if followlink:
|
|
8337
|
-
if(
|
|
8338
|
-
getflinkpath =
|
|
8339
|
-
flinkid =
|
|
8340
|
-
flinkinfo =
|
|
7898
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] == 1 or listarrayfiles['ffilelist'][reallcfi]['ftype'] == 2):
|
|
7899
|
+
getflinkpath = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
7900
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
7901
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8341
7902
|
fheadersize = format(
|
|
8342
7903
|
int(flinkinfo['fheadersize']), 'x').lower()
|
|
8343
7904
|
fsize = format(int(flinkinfo['fsize']), 'x').lower()
|
|
@@ -8374,10 +7935,10 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8374
7935
|
ftypehex = format(flinkinfo['ftype'], 'x').lower()
|
|
8375
7936
|
else:
|
|
8376
7937
|
ftypehex = format(
|
|
8377
|
-
|
|
7938
|
+
listarrayfiles['ffilelist'][reallcfi]['ftype'], 'x').lower()
|
|
8378
7939
|
fcurfid = format(curfid, 'x').lower()
|
|
8379
7940
|
if(not followlink and finode != 0):
|
|
8380
|
-
if(
|
|
7941
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] != 1):
|
|
8381
7942
|
fcurinode = format(int(curinode), 'x').lower()
|
|
8382
7943
|
inodetofile.update({curinode: fname})
|
|
8383
7944
|
filetoinode.update({fname: curinode})
|
|
@@ -8427,7 +7988,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8427
7988
|
outvar = fp.read()
|
|
8428
7989
|
fp.close()
|
|
8429
7990
|
return outvar
|
|
8430
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
7991
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
8431
7992
|
fp = CompressOpenFileAlt(
|
|
8432
7993
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
8433
7994
|
fp.seek(0, 0)
|
|
@@ -8440,50 +8001,50 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8440
8001
|
return True
|
|
8441
8002
|
|
|
8442
8003
|
|
|
8443
|
-
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8004
|
+
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8444
8005
|
fp = MkTempFile(instr)
|
|
8445
|
-
|
|
8446
|
-
checksumtype, skipchecksum, extradata, formatspecs, verbose, returnfp)
|
|
8447
|
-
return
|
|
8006
|
+
listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8007
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8008
|
+
return listarrayfiles
|
|
8448
8009
|
|
|
8449
8010
|
|
|
8450
|
-
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False,
|
|
8011
|
+
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8451
8012
|
outarray = MkTempFile()
|
|
8452
8013
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
8453
8014
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
8454
|
-
|
|
8455
|
-
|
|
8456
|
-
return
|
|
8015
|
+
listarrayfiles = RePackCatFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8016
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8017
|
+
return listarrayfiles
|
|
8457
8018
|
|
|
8458
8019
|
|
|
8459
|
-
def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8020
|
+
def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8460
8021
|
if(outdir is not None):
|
|
8461
8022
|
outdir = RemoveWindowsPath(outdir)
|
|
8462
8023
|
if(verbose):
|
|
8463
8024
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8464
8025
|
if(isinstance(infile, dict)):
|
|
8465
|
-
|
|
8026
|
+
listarrayfiles = infile
|
|
8466
8027
|
else:
|
|
8467
8028
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8468
8029
|
infile = RemoveWindowsPath(infile)
|
|
8469
|
-
|
|
8470
|
-
if(not
|
|
8030
|
+
listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8031
|
+
if(not listarrayfiles):
|
|
8471
8032
|
return False
|
|
8472
|
-
lenlist = len(
|
|
8473
|
-
fnumfiles = int(
|
|
8033
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8034
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8474
8035
|
lcfi = 0
|
|
8475
|
-
lcfx = int(
|
|
8476
|
-
if(lenlist >
|
|
8036
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8037
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8477
8038
|
lcfx = int(lenlist)
|
|
8478
8039
|
else:
|
|
8479
|
-
lcfx = int(
|
|
8040
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8480
8041
|
while(lcfi < lcfx):
|
|
8481
8042
|
funame = ""
|
|
8482
8043
|
try:
|
|
8483
8044
|
import pwd
|
|
8484
8045
|
try:
|
|
8485
8046
|
userinfo = pwd.getpwuid(
|
|
8486
|
-
|
|
8047
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'])
|
|
8487
8048
|
funame = userinfo.pw_name
|
|
8488
8049
|
except KeyError:
|
|
8489
8050
|
funame = ""
|
|
@@ -8494,7 +8055,7 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8494
8055
|
import grp
|
|
8495
8056
|
try:
|
|
8496
8057
|
groupinfo = grp.getgrgid(
|
|
8497
|
-
|
|
8058
|
+
listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8498
8059
|
fgname = groupinfo.gr_name
|
|
8499
8060
|
except KeyError:
|
|
8500
8061
|
fgname = ""
|
|
@@ -8502,15 +8063,15 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8502
8063
|
fgname = ""
|
|
8503
8064
|
if(verbose):
|
|
8504
8065
|
VerbosePrintOut(PrependPath(
|
|
8505
|
-
outdir,
|
|
8506
|
-
if(
|
|
8507
|
-
with open(PrependPath(outdir,
|
|
8508
|
-
if(not
|
|
8509
|
-
|
|
8510
|
-
|
|
8511
|
-
|
|
8066
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8067
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8068
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8069
|
+
if(not listarrayfiles['ffilelist'][lcfi]['fcontentasfile']):
|
|
8070
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'] = MkTempFile(
|
|
8071
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
8072
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
8512
8073
|
shutil.copyfileobj(
|
|
8513
|
-
|
|
8074
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
8514
8075
|
try:
|
|
8515
8076
|
fpc.flush()
|
|
8516
8077
|
if(hasattr(os, "sync")):
|
|
@@ -8521,20 +8082,20 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8521
8082
|
pass
|
|
8522
8083
|
except OSError:
|
|
8523
8084
|
pass
|
|
8524
|
-
if(hasattr(os, "chown") and funame ==
|
|
8525
|
-
os.chown(PrependPath(outdir,
|
|
8526
|
-
|
|
8085
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8086
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8087
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8527
8088
|
if(preservepermissions):
|
|
8528
8089
|
os.chmod(PrependPath(
|
|
8529
|
-
outdir,
|
|
8090
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8530
8091
|
if(preservetime):
|
|
8531
|
-
os.utime(PrependPath(outdir,
|
|
8532
|
-
|
|
8533
|
-
if(
|
|
8092
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8093
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8094
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8534
8095
|
if(followlink):
|
|
8535
|
-
getflinkpath =
|
|
8536
|
-
flinkid =
|
|
8537
|
-
flinkinfo =
|
|
8096
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8097
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8098
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8538
8099
|
funame = ""
|
|
8539
8100
|
try:
|
|
8540
8101
|
import pwd
|
|
@@ -8556,7 +8117,7 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8556
8117
|
except ImportError:
|
|
8557
8118
|
fgname = ""
|
|
8558
8119
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8559
|
-
with open(PrependPath(outdir,
|
|
8120
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8560
8121
|
if(not flinkinfo['fcontentasfile']):
|
|
8561
8122
|
flinkinfo['fcontents'] = MkTempFile(
|
|
8562
8123
|
flinkinfo['fcontents'])
|
|
@@ -8574,46 +8135,46 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8574
8135
|
pass
|
|
8575
8136
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8576
8137
|
os.chown(PrependPath(
|
|
8577
|
-
outdir,
|
|
8138
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8578
8139
|
if(preservepermissions):
|
|
8579
8140
|
os.chmod(PrependPath(
|
|
8580
|
-
outdir,
|
|
8141
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8581
8142
|
if(preservetime):
|
|
8582
|
-
os.utime(PrependPath(outdir,
|
|
8143
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8583
8144
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8584
8145
|
if(flinkinfo['ftype'] == 1):
|
|
8585
8146
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8586
|
-
outdir,
|
|
8147
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8587
8148
|
if(flinkinfo['ftype'] == 2):
|
|
8588
8149
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8589
|
-
outdir,
|
|
8150
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8590
8151
|
if(flinkinfo['ftype'] == 5):
|
|
8591
8152
|
if(preservepermissions):
|
|
8592
8153
|
os.mkdir(PrependPath(
|
|
8593
|
-
outdir,
|
|
8154
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8594
8155
|
else:
|
|
8595
8156
|
os.mkdir(PrependPath(
|
|
8596
|
-
outdir,
|
|
8157
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8597
8158
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8598
8159
|
os.chown(PrependPath(
|
|
8599
|
-
outdir,
|
|
8160
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8600
8161
|
if(preservepermissions):
|
|
8601
8162
|
os.chmod(PrependPath(
|
|
8602
|
-
outdir,
|
|
8163
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8603
8164
|
if(preservetime):
|
|
8604
|
-
os.utime(PrependPath(outdir,
|
|
8165
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8605
8166
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8606
8167
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8607
8168
|
os.mkfifo(PrependPath(
|
|
8608
|
-
outdir,
|
|
8169
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8609
8170
|
else:
|
|
8610
|
-
os.link(
|
|
8611
|
-
outdir,
|
|
8612
|
-
if(
|
|
8171
|
+
os.link(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8172
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8173
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8613
8174
|
if(followlink):
|
|
8614
|
-
getflinkpath =
|
|
8615
|
-
flinkid =
|
|
8616
|
-
flinkinfo =
|
|
8175
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8176
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8177
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8617
8178
|
funame = ""
|
|
8618
8179
|
try:
|
|
8619
8180
|
import pwd
|
|
@@ -8635,7 +8196,7 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8635
8196
|
except ImportError:
|
|
8636
8197
|
fgname = ""
|
|
8637
8198
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8638
|
-
with open(PrependPath(outdir,
|
|
8199
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8639
8200
|
if(not flinkinfo['fcontentasfile']):
|
|
8640
8201
|
flinkinfo['fcontents'] = MkTempFile(
|
|
8641
8202
|
flinkinfo['fcontents'])
|
|
@@ -8653,71 +8214,71 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8653
8214
|
pass
|
|
8654
8215
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8655
8216
|
os.chown(PrependPath(
|
|
8656
|
-
outdir,
|
|
8217
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8657
8218
|
if(preservepermissions):
|
|
8658
8219
|
os.chmod(PrependPath(
|
|
8659
|
-
outdir,
|
|
8220
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8660
8221
|
if(preservetime):
|
|
8661
|
-
os.utime(PrependPath(outdir,
|
|
8222
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8662
8223
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8663
8224
|
if(flinkinfo['ftype'] == 1):
|
|
8664
8225
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8665
|
-
outdir,
|
|
8226
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8666
8227
|
if(flinkinfo['ftype'] == 2):
|
|
8667
8228
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8668
|
-
outdir,
|
|
8229
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8669
8230
|
if(flinkinfo['ftype'] == 5):
|
|
8670
8231
|
if(preservepermissions):
|
|
8671
8232
|
os.mkdir(PrependPath(
|
|
8672
|
-
outdir,
|
|
8233
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8673
8234
|
else:
|
|
8674
8235
|
os.mkdir(PrependPath(
|
|
8675
|
-
outdir,
|
|
8236
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8676
8237
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8677
8238
|
os.chown(PrependPath(
|
|
8678
|
-
outdir,
|
|
8239
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8679
8240
|
if(preservepermissions):
|
|
8680
8241
|
os.chmod(PrependPath(
|
|
8681
|
-
outdir,
|
|
8242
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8682
8243
|
if(preservetime):
|
|
8683
|
-
os.utime(PrependPath(outdir,
|
|
8244
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8684
8245
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8685
8246
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8686
8247
|
os.mkfifo(PrependPath(
|
|
8687
|
-
outdir,
|
|
8248
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8688
8249
|
else:
|
|
8689
|
-
os.symlink(
|
|
8690
|
-
outdir,
|
|
8691
|
-
if(
|
|
8250
|
+
os.symlink(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8251
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8252
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8692
8253
|
if(preservepermissions):
|
|
8693
8254
|
os.mkdir(PrependPath(
|
|
8694
|
-
outdir,
|
|
8255
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8695
8256
|
else:
|
|
8696
8257
|
os.mkdir(PrependPath(
|
|
8697
|
-
outdir,
|
|
8698
|
-
if(hasattr(os, "chown") and funame ==
|
|
8699
|
-
os.chown(PrependPath(outdir,
|
|
8700
|
-
|
|
8258
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8259
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8260
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8261
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8701
8262
|
if(preservepermissions):
|
|
8702
8263
|
os.chmod(PrependPath(
|
|
8703
|
-
outdir,
|
|
8264
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8704
8265
|
if(preservetime):
|
|
8705
|
-
os.utime(PrependPath(outdir,
|
|
8706
|
-
|
|
8707
|
-
if(
|
|
8266
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8267
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8268
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8708
8269
|
os.mkfifo(PrependPath(
|
|
8709
|
-
outdir,
|
|
8270
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8710
8271
|
lcfi = lcfi + 1
|
|
8711
8272
|
if(returnfp):
|
|
8712
|
-
return
|
|
8273
|
+
return listarrayfiles['ffilelist']['fp']
|
|
8713
8274
|
else:
|
|
8714
8275
|
return True
|
|
8715
8276
|
|
|
8716
8277
|
|
|
8717
8278
|
def UnPackCatFileString(instr, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8718
8279
|
fp = MkTempFile(instr)
|
|
8719
|
-
|
|
8720
|
-
return
|
|
8280
|
+
listarrayfiles = UnPackCatFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
8281
|
+
return listarrayfiles
|
|
8721
8282
|
|
|
8722
8283
|
def ftype_to_str(ftype):
|
|
8723
8284
|
mapping = {
|
|
@@ -8734,64 +8295,64 @@ def ftype_to_str(ftype):
|
|
|
8734
8295
|
# Default to "file" if unknown
|
|
8735
8296
|
return mapping.get(ftype, "file")
|
|
8736
8297
|
|
|
8737
|
-
def CatFileListFiles(infile, fmttype="auto", seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8298
|
+
def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8738
8299
|
if(verbose):
|
|
8739
8300
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8740
8301
|
if(isinstance(infile, dict)):
|
|
8741
|
-
|
|
8302
|
+
listarrayfiles = infile
|
|
8742
8303
|
else:
|
|
8743
8304
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8744
8305
|
infile = RemoveWindowsPath(infile)
|
|
8745
|
-
|
|
8746
|
-
if(not
|
|
8306
|
+
listarrayfiles = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8307
|
+
if(not listarrayfiles):
|
|
8747
8308
|
return False
|
|
8748
|
-
lenlist = len(
|
|
8749
|
-
fnumfiles = int(
|
|
8309
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8310
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8750
8311
|
lcfi = 0
|
|
8751
|
-
lcfx = int(
|
|
8752
|
-
if(lenlist >
|
|
8312
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8313
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8753
8314
|
lcfx = int(lenlist)
|
|
8754
8315
|
else:
|
|
8755
|
-
lcfx = int(
|
|
8316
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8756
8317
|
returnval = {}
|
|
8757
8318
|
while(lcfi < lcfx):
|
|
8758
|
-
returnval.update({lcfi:
|
|
8319
|
+
returnval.update({lcfi: listarrayfiles['ffilelist'][lcfi]['fname']})
|
|
8759
8320
|
if(not verbose):
|
|
8760
|
-
VerbosePrintOut(
|
|
8321
|
+
VerbosePrintOut(listarrayfiles['ffilelist'][lcfi]['fname'])
|
|
8761
8322
|
if(verbose):
|
|
8762
8323
|
permissions = {'access': {'0': ('---'), '1': ('--x'), '2': ('-w-'), '3': ('-wx'), '4': (
|
|
8763
8324
|
'r--'), '5': ('r-x'), '6': ('rw-'), '7': ('rwx')}, 'roles': {0: 'owner', 1: 'group', 2: 'other'}}
|
|
8764
|
-
printfname =
|
|
8765
|
-
if(
|
|
8766
|
-
printfname =
|
|
8767
|
-
" link to " +
|
|
8768
|
-
if(
|
|
8769
|
-
printfname =
|
|
8770
|
-
" -> " +
|
|
8771
|
-
fuprint =
|
|
8325
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname']
|
|
8326
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8327
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8328
|
+
" link to " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8329
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8330
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8331
|
+
" -> " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8332
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['funame']
|
|
8772
8333
|
if(len(fuprint) <= 0):
|
|
8773
|
-
fuprint =
|
|
8774
|
-
fgprint =
|
|
8334
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['fuid']
|
|
8335
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgname']
|
|
8775
8336
|
if(len(fgprint) <= 0):
|
|
8776
|
-
fgprint =
|
|
8337
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgid']
|
|
8777
8338
|
if(newstyle):
|
|
8778
|
-
VerbosePrintOut(ftype_to_str(
|
|
8779
|
-
|
|
8339
|
+
VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
|
|
8340
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
|
|
8780
8341
|
else:
|
|
8781
|
-
VerbosePrintOut(PrintPermissionString(
|
|
8782
|
-
|
|
8342
|
+
VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
|
|
8343
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8783
8344
|
lcfi = lcfi + 1
|
|
8784
8345
|
if(returnfp):
|
|
8785
|
-
return
|
|
8346
|
+
return listarrayfiles['fp']
|
|
8786
8347
|
else:
|
|
8787
8348
|
return True
|
|
8788
8349
|
|
|
8789
8350
|
|
|
8790
8351
|
def CatFileStringListFiles(instr, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8791
8352
|
fp = MkTempFile(instr)
|
|
8792
|
-
|
|
8353
|
+
listarrayfiles = CatFileListFiles(
|
|
8793
8354
|
instr, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
8794
|
-
return
|
|
8355
|
+
return listarrayfiles
|
|
8795
8356
|
|
|
8796
8357
|
|
|
8797
8358
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
@@ -8807,7 +8368,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8807
8368
|
if(not infile):
|
|
8808
8369
|
return False
|
|
8809
8370
|
infile.seek(0, 0)
|
|
8810
|
-
elif(re.findall(
|
|
8371
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8811
8372
|
infile = download_file_from_internet_file(infile)
|
|
8812
8373
|
infile.seek(0, 0)
|
|
8813
8374
|
if(not infile):
|
|
@@ -8831,7 +8392,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8831
8392
|
return False
|
|
8832
8393
|
try:
|
|
8833
8394
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
8834
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
8395
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
8835
8396
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8836
8397
|
formatspecs = formatspecs[compresscheck]
|
|
8837
8398
|
if(compresscheck=="zstd"):
|
|
@@ -8843,7 +8404,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8843
8404
|
else:
|
|
8844
8405
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
8845
8406
|
else:
|
|
8846
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
8407
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
8847
8408
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8848
8409
|
formatspecs = formatspecs[compresscheck]
|
|
8849
8410
|
if(compresscheck=="zstd"):
|
|
@@ -8914,7 +8475,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8914
8475
|
member.size).rjust(15) + " " + datetime.datetime.utcfromtimestamp(member.mtime).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8915
8476
|
lcfi = lcfi + 1
|
|
8916
8477
|
if(returnfp):
|
|
8917
|
-
return
|
|
8478
|
+
return listarrayfiles['fp']
|
|
8918
8479
|
else:
|
|
8919
8480
|
return True
|
|
8920
8481
|
|
|
@@ -8932,7 +8493,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8932
8493
|
if(not infile):
|
|
8933
8494
|
return False
|
|
8934
8495
|
infile.seek(0, 0)
|
|
8935
|
-
elif(re.findall(
|
|
8496
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8936
8497
|
infile = download_file_from_internet_file(infile)
|
|
8937
8498
|
infile.seek(0, 0)
|
|
8938
8499
|
if(not infile):
|
|
@@ -9047,7 +8608,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
9047
8608
|
15) + " " + datetime.datetime.utcfromtimestamp(int(time.mktime(member.date_time + (0, 0, -1)))).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9048
8609
|
lcfi = lcfi + 1
|
|
9049
8610
|
if(returnfp):
|
|
9050
|
-
return
|
|
8611
|
+
return listarrayfiles['fp']
|
|
9051
8612
|
else:
|
|
9052
8613
|
return True
|
|
9053
8614
|
|
|
@@ -9185,7 +8746,7 @@ if(rarfile_support):
|
|
|
9185
8746
|
member.file_size).rjust(15) + " " + member.mtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9186
8747
|
lcfi = lcfi + 1
|
|
9187
8748
|
if(returnfp):
|
|
9188
|
-
return
|
|
8749
|
+
return listarrayfiles['fp']
|
|
9189
8750
|
else:
|
|
9190
8751
|
return True
|
|
9191
8752
|
|
|
@@ -9203,7 +8764,7 @@ if(py7zr_support):
|
|
|
9203
8764
|
returnval = {}
|
|
9204
8765
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
9205
8766
|
file_content = szpfp.readall()
|
|
9206
|
-
#sztest = szpfp.testzip()
|
|
8767
|
+
#sztest = szpfp.testzip()
|
|
9207
8768
|
sztestalt = szpfp.test()
|
|
9208
8769
|
if(sztestalt):
|
|
9209
8770
|
VerbosePrintOut("Bad file found!")
|
|
@@ -9292,7 +8853,7 @@ if(py7zr_support):
|
|
|
9292
8853
|
fsize).rjust(15) + " " + member.creationtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9293
8854
|
lcfi = lcfi + 1
|
|
9294
8855
|
if(returnfp):
|
|
9295
|
-
return
|
|
8856
|
+
return listarrayfiles['fp']
|
|
9296
8857
|
else:
|
|
9297
8858
|
return True
|
|
9298
8859
|
|
|
@@ -9300,7 +8861,7 @@ if(py7zr_support):
|
|
|
9300
8861
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
9301
8862
|
if(verbose):
|
|
9302
8863
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
9303
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
8864
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9304
8865
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9305
8866
|
formatspecs = formatspecs[checkcompressfile]
|
|
9306
8867
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -9322,9 +8883,9 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
9322
8883
|
outarray = MkTempFile()
|
|
9323
8884
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
9324
8885
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
9325
|
-
|
|
8886
|
+
listarrayfiles = CatFileListFiles(
|
|
9326
8887
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
9327
|
-
return
|
|
8888
|
+
return listarrayfiles
|
|
9328
8889
|
|
|
9329
8890
|
"""
|
|
9330
8891
|
PyNeoFile compatibility layer
|
|
@@ -9343,31 +8904,44 @@ def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='crc32'
|
|
|
9343
8904
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
9344
8905
|
|
|
9345
8906
|
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
9346
|
-
return
|
|
8907
|
+
return PackCatFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
9347
8908
|
|
|
9348
8909
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9349
|
-
return
|
|
8910
|
+
return CatFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
9350
8911
|
|
|
9351
8912
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9352
|
-
return
|
|
8913
|
+
return UnPackCatFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
9353
8914
|
|
|
9354
8915
|
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
|
|
9355
|
-
return
|
|
8916
|
+
return RePackCatFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
9356
8917
|
|
|
9357
8918
|
def archivefilevalidate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
9358
|
-
return
|
|
8919
|
+
return CatFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
|
|
9359
8920
|
|
|
9360
8921
|
def archivefilelistfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
9361
|
-
return
|
|
8922
|
+
return CatFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
9362
8923
|
|
|
9363
8924
|
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
|
|
9364
|
-
intmp = InFileToArray(infile, 0, 0, False, True, False, formatspecs, False, False)
|
|
9365
|
-
return
|
|
8925
|
+
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
8926
|
+
return RePackCatFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
8927
|
+
|
|
8928
|
+
def detect_cwd(ftp, file_dir):
|
|
8929
|
+
"""
|
|
8930
|
+
Test whether cwd into file_dir works. Returns True if it does,
|
|
8931
|
+
False if not (so absolute paths should be used).
|
|
8932
|
+
"""
|
|
8933
|
+
if not file_dir or file_dir in ("/", ""):
|
|
8934
|
+
return False # nothing to cwd into
|
|
8935
|
+
try:
|
|
8936
|
+
ftp.cwd(file_dir)
|
|
8937
|
+
return True
|
|
8938
|
+
except all_errors:
|
|
8939
|
+
return False
|
|
9366
8940
|
|
|
9367
8941
|
def download_file_from_ftp_file(url):
|
|
9368
8942
|
urlparts = urlparse(url)
|
|
9369
|
-
file_name = os.path.basename(urlparts.path)
|
|
9370
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
8943
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
8944
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9371
8945
|
if(urlparts.username is not None):
|
|
9372
8946
|
ftp_username = urlparts.username
|
|
9373
8947
|
else:
|
|
@@ -9384,7 +8958,7 @@ def download_file_from_ftp_file(url):
|
|
|
9384
8958
|
ftp = FTP_TLS()
|
|
9385
8959
|
else:
|
|
9386
8960
|
return False
|
|
9387
|
-
if(urlparts.scheme == "sftp"):
|
|
8961
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9388
8962
|
if(__use_pysftp__):
|
|
9389
8963
|
return download_file_from_pysftp_file(url)
|
|
9390
8964
|
else:
|
|
@@ -9402,26 +8976,70 @@ def download_file_from_ftp_file(url):
|
|
|
9402
8976
|
except socket.timeout:
|
|
9403
8977
|
log.info("Error With URL "+url)
|
|
9404
8978
|
return False
|
|
9405
|
-
|
|
9406
|
-
|
|
9407
|
-
|
|
8979
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8980
|
+
try:
|
|
8981
|
+
ftp.auth()
|
|
8982
|
+
except all_errors:
|
|
8983
|
+
pass
|
|
8984
|
+
ftp.login(ftp_username, ftp_password)
|
|
8985
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8986
|
+
try:
|
|
8987
|
+
ftp.prot_p()
|
|
8988
|
+
except all_errors:
|
|
8989
|
+
ftp.prot_c()
|
|
8990
|
+
# UTF-8 filenames if supported
|
|
8991
|
+
try:
|
|
8992
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
8993
|
+
ftp.encoding = "utf-8"
|
|
8994
|
+
except all_errors:
|
|
8995
|
+
pass
|
|
8996
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
9408
8997
|
ftpfile = MkTempFile()
|
|
9409
|
-
|
|
9410
|
-
|
|
8998
|
+
# Try EPSV first, then fall back
|
|
8999
|
+
try:
|
|
9000
|
+
ftp.force_epsv = True
|
|
9001
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9002
|
+
if(is_cwd_allowed):
|
|
9003
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9004
|
+
else:
|
|
9005
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9006
|
+
except all_errors:
|
|
9007
|
+
try:
|
|
9008
|
+
ftp.set_pasv(True)
|
|
9009
|
+
if(is_cwd_allowed):
|
|
9010
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9011
|
+
else:
|
|
9012
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9013
|
+
except all_errors:
|
|
9014
|
+
ftp.set_pasv(False)
|
|
9015
|
+
if(is_cwd_allowed):
|
|
9016
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9017
|
+
else:
|
|
9018
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9411
9019
|
ftp.close()
|
|
9412
9020
|
ftpfile.seek(0, 0)
|
|
9413
9021
|
return ftpfile
|
|
9414
9022
|
|
|
9415
9023
|
|
|
9024
|
+
def download_file_from_ftps_file(url):
|
|
9025
|
+
return download_file_from_ftp_file(url)
|
|
9026
|
+
|
|
9027
|
+
|
|
9416
9028
|
def download_file_from_ftp_string(url):
|
|
9417
9029
|
ftpfile = download_file_from_ftp_file(url)
|
|
9418
|
-
|
|
9030
|
+
ftpout = ftpfile.read()
|
|
9031
|
+
ftpfile.close()
|
|
9032
|
+
return ftpout
|
|
9033
|
+
|
|
9034
|
+
|
|
9035
|
+
def download_file_from_ftps_string(url):
|
|
9036
|
+
return download_file_from_ftp_string(url)
|
|
9419
9037
|
|
|
9420
9038
|
|
|
9421
9039
|
def upload_file_to_ftp_file(ftpfile, url):
|
|
9422
9040
|
urlparts = urlparse(url)
|
|
9423
|
-
file_name = os.path.basename(urlparts.path)
|
|
9424
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9041
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9042
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9425
9043
|
if(urlparts.username is not None):
|
|
9426
9044
|
ftp_username = urlparts.username
|
|
9427
9045
|
else:
|
|
@@ -9438,7 +9056,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9438
9056
|
ftp = FTP_TLS()
|
|
9439
9057
|
else:
|
|
9440
9058
|
return False
|
|
9441
|
-
if(urlparts.scheme == "sftp"):
|
|
9059
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9442
9060
|
if(__use_pysftp__):
|
|
9443
9061
|
return upload_file_to_pysftp_file(url)
|
|
9444
9062
|
else:
|
|
@@ -9456,15 +9074,55 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9456
9074
|
except socket.timeout:
|
|
9457
9075
|
log.info("Error With URL "+url)
|
|
9458
9076
|
return False
|
|
9459
|
-
|
|
9460
|
-
|
|
9461
|
-
|
|
9462
|
-
|
|
9077
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9078
|
+
try:
|
|
9079
|
+
ftp.auth()
|
|
9080
|
+
except all_errors:
|
|
9081
|
+
pass
|
|
9082
|
+
ftp.login(ftp_username, ftp_password)
|
|
9083
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9084
|
+
try:
|
|
9085
|
+
ftp.prot_p()
|
|
9086
|
+
except all_errors:
|
|
9087
|
+
ftp.prot_c()
|
|
9088
|
+
# UTF-8 filenames if supported
|
|
9089
|
+
try:
|
|
9090
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
9091
|
+
ftp.encoding = "utf-8"
|
|
9092
|
+
except all_errors:
|
|
9093
|
+
pass
|
|
9094
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
9095
|
+
ftpfile.seek(0, 0)
|
|
9096
|
+
# Try EPSV first, then fall back
|
|
9097
|
+
try:
|
|
9098
|
+
ftp.force_epsv = True
|
|
9099
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9100
|
+
if(is_cwd_allowed):
|
|
9101
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9102
|
+
else:
|
|
9103
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9104
|
+
except all_errors:
|
|
9105
|
+
try:
|
|
9106
|
+
ftp.set_pasv(True)
|
|
9107
|
+
if(is_cwd_allowed):
|
|
9108
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9109
|
+
else:
|
|
9110
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9111
|
+
except all_errors:
|
|
9112
|
+
ftp.set_pasv(False)
|
|
9113
|
+
if(is_cwd_allowed):
|
|
9114
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9115
|
+
else:
|
|
9116
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9463
9117
|
ftp.close()
|
|
9464
9118
|
ftpfile.seek(0, 0)
|
|
9465
9119
|
return ftpfile
|
|
9466
9120
|
|
|
9467
9121
|
|
|
9122
|
+
def upload_file_to_ftps_file(ftpfile, url):
|
|
9123
|
+
return upload_file_to_ftp_file(ftpfile, url)
|
|
9124
|
+
|
|
9125
|
+
|
|
9468
9126
|
def upload_file_to_ftp_string(ftpstring, url):
|
|
9469
9127
|
ftpfileo = MkTempFile(ftpstring)
|
|
9470
9128
|
ftpfile = upload_file_to_ftp_file(ftpfileo, url)
|
|
@@ -9472,6 +9130,10 @@ def upload_file_to_ftp_string(ftpstring, url):
|
|
|
9472
9130
|
return ftpfile
|
|
9473
9131
|
|
|
9474
9132
|
|
|
9133
|
+
def upload_file_to_ftps_string(ftpstring, url):
|
|
9134
|
+
return upload_file_to_ftp_string(ftpstring, url)
|
|
9135
|
+
|
|
9136
|
+
|
|
9475
9137
|
class RawIteratorWrapper:
|
|
9476
9138
|
def __init__(self, iterator):
|
|
9477
9139
|
self.iterator = iterator
|
|
@@ -9509,7 +9171,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9509
9171
|
urlparts.params, urlparts.query, urlparts.fragment))
|
|
9510
9172
|
|
|
9511
9173
|
# Handle SFTP/FTP
|
|
9512
|
-
if urlparts.scheme == "sftp":
|
|
9174
|
+
if urlparts.scheme == "sftp" or urlparts.scheme == "scp":
|
|
9513
9175
|
if __use_pysftp__:
|
|
9514
9176
|
return download_file_from_pysftp_file(url)
|
|
9515
9177
|
else:
|
|
@@ -9582,14 +9244,16 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9582
9244
|
|
|
9583
9245
|
def download_file_from_http_string(url, headers=geturls_headers_pyfile_python_alt, usehttp=__use_http_lib__):
|
|
9584
9246
|
httpfile = download_file_from_http_file(url, headers, usehttp)
|
|
9585
|
-
|
|
9247
|
+
httpout = httpfile.read()
|
|
9248
|
+
httpfile.close()
|
|
9249
|
+
return httpout
|
|
9586
9250
|
|
|
9587
9251
|
|
|
9588
9252
|
if(haveparamiko):
|
|
9589
9253
|
def download_file_from_sftp_file(url):
|
|
9590
9254
|
urlparts = urlparse(url)
|
|
9591
|
-
file_name = os.path.basename(urlparts.path)
|
|
9592
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9255
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9256
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9593
9257
|
sftp_port = urlparts.port
|
|
9594
9258
|
if(urlparts.port is None):
|
|
9595
9259
|
sftp_port = 22
|
|
@@ -9609,14 +9273,14 @@ if(haveparamiko):
|
|
|
9609
9273
|
return download_file_from_ftp_file(url)
|
|
9610
9274
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9611
9275
|
return download_file_from_http_file(url)
|
|
9612
|
-
if(urlparts.scheme != "sftp"):
|
|
9276
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9613
9277
|
return False
|
|
9614
9278
|
ssh = paramiko.SSHClient()
|
|
9615
9279
|
ssh.load_system_host_keys()
|
|
9616
9280
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9617
9281
|
try:
|
|
9618
9282
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9619
|
-
username=
|
|
9283
|
+
username=sftp_username, password=urlparts.password)
|
|
9620
9284
|
except paramiko.ssh_exception.SSHException:
|
|
9621
9285
|
return False
|
|
9622
9286
|
except socket.gaierror:
|
|
@@ -9627,7 +9291,7 @@ if(haveparamiko):
|
|
|
9627
9291
|
return False
|
|
9628
9292
|
sftp = ssh.open_sftp()
|
|
9629
9293
|
sftpfile = MkTempFile()
|
|
9630
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9294
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9631
9295
|
sftp.close()
|
|
9632
9296
|
ssh.close()
|
|
9633
9297
|
sftpfile.seek(0, 0)
|
|
@@ -9639,7 +9303,9 @@ else:
|
|
|
9639
9303
|
if(haveparamiko):
|
|
9640
9304
|
def download_file_from_sftp_string(url):
|
|
9641
9305
|
sftpfile = download_file_from_sftp_file(url)
|
|
9642
|
-
|
|
9306
|
+
sftpout = sftpfile.read()
|
|
9307
|
+
sftpfile.close()
|
|
9308
|
+
return sftpout
|
|
9643
9309
|
else:
|
|
9644
9310
|
def download_file_from_sftp_string(url):
|
|
9645
9311
|
return False
|
|
@@ -9647,8 +9313,8 @@ else:
|
|
|
9647
9313
|
if(haveparamiko):
|
|
9648
9314
|
def upload_file_to_sftp_file(sftpfile, url):
|
|
9649
9315
|
urlparts = urlparse(url)
|
|
9650
|
-
file_name = os.path.basename(urlparts.path)
|
|
9651
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9316
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9317
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9652
9318
|
sftp_port = urlparts.port
|
|
9653
9319
|
if(urlparts.port is None):
|
|
9654
9320
|
sftp_port = 22
|
|
@@ -9665,17 +9331,17 @@ if(haveparamiko):
|
|
|
9665
9331
|
else:
|
|
9666
9332
|
sftp_password = ""
|
|
9667
9333
|
if(urlparts.scheme == "ftp"):
|
|
9668
|
-
return upload_file_to_ftp_file(url)
|
|
9334
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9669
9335
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9670
9336
|
return False
|
|
9671
|
-
if(urlparts.scheme != "sftp"):
|
|
9337
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9672
9338
|
return False
|
|
9673
9339
|
ssh = paramiko.SSHClient()
|
|
9674
9340
|
ssh.load_system_host_keys()
|
|
9675
9341
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9676
9342
|
try:
|
|
9677
9343
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9678
|
-
username=
|
|
9344
|
+
username=sftp_username, password=sftp_password)
|
|
9679
9345
|
except paramiko.ssh_exception.SSHException:
|
|
9680
9346
|
return False
|
|
9681
9347
|
except socket.gaierror:
|
|
@@ -9685,7 +9351,8 @@ if(haveparamiko):
|
|
|
9685
9351
|
log.info("Error With URL "+url)
|
|
9686
9352
|
return False
|
|
9687
9353
|
sftp = ssh.open_sftp()
|
|
9688
|
-
|
|
9354
|
+
sftpfile.seek(0, 0)
|
|
9355
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9689
9356
|
sftp.close()
|
|
9690
9357
|
ssh.close()
|
|
9691
9358
|
sftpfile.seek(0, 0)
|
|
@@ -9707,8 +9374,8 @@ else:
|
|
|
9707
9374
|
if(havepysftp):
|
|
9708
9375
|
def download_file_from_pysftp_file(url):
|
|
9709
9376
|
urlparts = urlparse(url)
|
|
9710
|
-
file_name = os.path.basename(urlparts.path)
|
|
9711
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9377
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9378
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9712
9379
|
sftp_port = urlparts.port
|
|
9713
9380
|
if(urlparts.port is None):
|
|
9714
9381
|
sftp_port = 22
|
|
@@ -9728,11 +9395,11 @@ if(havepysftp):
|
|
|
9728
9395
|
return download_file_from_ftp_file(url)
|
|
9729
9396
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9730
9397
|
return download_file_from_http_file(url)
|
|
9731
|
-
if(urlparts.scheme != "sftp"):
|
|
9398
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9732
9399
|
return False
|
|
9733
9400
|
try:
|
|
9734
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9735
|
-
username=
|
|
9401
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9402
|
+
username=sftp_username, password=sftp_password)
|
|
9736
9403
|
except paramiko.ssh_exception.SSHException:
|
|
9737
9404
|
return False
|
|
9738
9405
|
except socket.gaierror:
|
|
@@ -9741,9 +9408,8 @@ if(havepysftp):
|
|
|
9741
9408
|
except socket.timeout:
|
|
9742
9409
|
log.info("Error With URL "+url)
|
|
9743
9410
|
return False
|
|
9744
|
-
sftp = ssh.open_sftp()
|
|
9745
9411
|
sftpfile = MkTempFile()
|
|
9746
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9412
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9747
9413
|
sftp.close()
|
|
9748
9414
|
ssh.close()
|
|
9749
9415
|
sftpfile.seek(0, 0)
|
|
@@ -9755,7 +9421,9 @@ else:
|
|
|
9755
9421
|
if(havepysftp):
|
|
9756
9422
|
def download_file_from_pysftp_string(url):
|
|
9757
9423
|
sftpfile = download_file_from_pysftp_file(url)
|
|
9758
|
-
|
|
9424
|
+
sftpout = sftpfile.read()
|
|
9425
|
+
sftpfile.close()
|
|
9426
|
+
return sftpout
|
|
9759
9427
|
else:
|
|
9760
9428
|
def download_file_from_pysftp_string(url):
|
|
9761
9429
|
return False
|
|
@@ -9763,8 +9431,8 @@ else:
|
|
|
9763
9431
|
if(havepysftp):
|
|
9764
9432
|
def upload_file_to_pysftp_file(sftpfile, url):
|
|
9765
9433
|
urlparts = urlparse(url)
|
|
9766
|
-
file_name = os.path.basename(urlparts.path)
|
|
9767
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9434
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9435
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9768
9436
|
sftp_port = urlparts.port
|
|
9769
9437
|
if(urlparts.port is None):
|
|
9770
9438
|
sftp_port = 22
|
|
@@ -9781,14 +9449,14 @@ if(havepysftp):
|
|
|
9781
9449
|
else:
|
|
9782
9450
|
sftp_password = ""
|
|
9783
9451
|
if(urlparts.scheme == "ftp"):
|
|
9784
|
-
return upload_file_to_ftp_file(url)
|
|
9452
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9785
9453
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9786
9454
|
return False
|
|
9787
|
-
if(urlparts.scheme != "sftp"):
|
|
9455
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9788
9456
|
return False
|
|
9789
9457
|
try:
|
|
9790
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9791
|
-
username=
|
|
9458
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9459
|
+
username=sftp_username, password=sftp_password)
|
|
9792
9460
|
except paramiko.ssh_exception.SSHException:
|
|
9793
9461
|
return False
|
|
9794
9462
|
except socket.gaierror:
|
|
@@ -9797,8 +9465,8 @@ if(havepysftp):
|
|
|
9797
9465
|
except socket.timeout:
|
|
9798
9466
|
log.info("Error With URL "+url)
|
|
9799
9467
|
return False
|
|
9800
|
-
|
|
9801
|
-
sftp.putfo(sftpfile, urlparts.path)
|
|
9468
|
+
sftpfile.seek(0, 0)
|
|
9469
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9802
9470
|
sftp.close()
|
|
9803
9471
|
ssh.close()
|
|
9804
9472
|
sftpfile.seek(0, 0)
|
|
@@ -9824,7 +9492,7 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9824
9492
|
return download_file_from_http_file(url, headers, usehttp)
|
|
9825
9493
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9826
9494
|
return download_file_from_ftp_file(url)
|
|
9827
|
-
elif(urlparts.scheme == "sftp"):
|
|
9495
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9828
9496
|
if(__use_pysftp__ and havepysftp):
|
|
9829
9497
|
return download_file_from_pysftp_file(url)
|
|
9830
9498
|
else:
|
|
@@ -9834,9 +9502,9 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9834
9502
|
return False
|
|
9835
9503
|
|
|
9836
9504
|
|
|
9837
|
-
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9505
|
+
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9838
9506
|
fp = download_file_from_internet_file(url)
|
|
9839
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9507
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9840
9508
|
fp.seek(0, 0)
|
|
9841
9509
|
if(not fp):
|
|
9842
9510
|
return False
|
|
@@ -9849,7 +9517,7 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9849
9517
|
return download_file_from_http_string(url, headers)
|
|
9850
9518
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9851
9519
|
return download_file_from_ftp_string(url)
|
|
9852
|
-
elif(urlparts.scheme == "sftp"):
|
|
9520
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9853
9521
|
if(__use_pysftp__ and havepysftp):
|
|
9854
9522
|
return download_file_from_pysftp_string(url)
|
|
9855
9523
|
else:
|
|
@@ -9859,13 +9527,15 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9859
9527
|
return False
|
|
9860
9528
|
|
|
9861
9529
|
|
|
9862
|
-
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9530
|
+
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9863
9531
|
fp = download_file_from_internet_string(url)
|
|
9864
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9865
|
-
fp.seek(0, 0)
|
|
9532
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9866
9533
|
if(not fp):
|
|
9867
9534
|
return False
|
|
9868
|
-
|
|
9535
|
+
fp.seek(0, 0)
|
|
9536
|
+
fpout = fp.read()
|
|
9537
|
+
fp.close
|
|
9538
|
+
return fpout
|
|
9869
9539
|
|
|
9870
9540
|
|
|
9871
9541
|
def upload_file_to_internet_file(ifp, url):
|
|
@@ -9874,7 +9544,7 @@ def upload_file_to_internet_file(ifp, url):
|
|
|
9874
9544
|
return False
|
|
9875
9545
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9876
9546
|
return upload_file_to_ftp_file(ifp, url)
|
|
9877
|
-
elif(urlparts.scheme == "sftp"):
|
|
9547
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9878
9548
|
if(__use_pysftp__ and havepysftp):
|
|
9879
9549
|
return upload_file_to_pysftp_file(ifp, url)
|
|
9880
9550
|
else:
|
|
@@ -9890,8 +9560,7 @@ def upload_file_to_internet_compress_file(ifp, url, compression="auto", compress
|
|
|
9890
9560
|
if(not catfileout):
|
|
9891
9561
|
return False
|
|
9892
9562
|
fp.seek(0, 0)
|
|
9893
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9894
|
-
return True
|
|
9563
|
+
return upload_file_to_internet_file(fp, outfile)
|
|
9895
9564
|
|
|
9896
9565
|
|
|
9897
9566
|
def upload_file_to_internet_string(ifp, url):
|
|
@@ -9900,7 +9569,7 @@ def upload_file_to_internet_string(ifp, url):
|
|
|
9900
9569
|
return False
|
|
9901
9570
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9902
9571
|
return upload_file_to_ftp_string(ifp, url)
|
|
9903
|
-
elif(urlparts.scheme == "sftp"):
|
|
9572
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9904
9573
|
if(__use_pysftp__ and havepysftp):
|
|
9905
9574
|
return upload_file_to_pysftp_string(ifp, url)
|
|
9906
9575
|
else:
|
|
@@ -9917,5 +9586,4 @@ def upload_file_to_internet_compress_string(ifp, url, compression="auto", compre
|
|
|
9917
9586
|
if(not catfileout):
|
|
9918
9587
|
return False
|
|
9919
9588
|
fp.seek(0, 0)
|
|
9920
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9921
|
-
return True
|
|
9589
|
+
return upload_file_to_internet_file(fp, outfile)
|