PyFoxFile 0.21.4__py3-none-any.whl → 0.22.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyfoxfile-0.21.4.data → pyfoxfile-0.22.4.data}/scripts/foxfile.py +9 -9
- {pyfoxfile-0.21.4.dist-info → pyfoxfile-0.22.4.dist-info}/METADATA +1 -1
- pyfoxfile-0.22.4.dist-info/RECORD +10 -0
- pyfoxfile.py +907 -1239
- pyfoxfile-0.21.4.dist-info/RECORD +0 -10
- {pyfoxfile-0.21.4.data → pyfoxfile-0.22.4.data}/scripts/foxneofile.py +0 -0
- {pyfoxfile-0.21.4.data → pyfoxfile-0.22.4.data}/scripts/neofoxfile.py +0 -0
- {pyfoxfile-0.21.4.dist-info → pyfoxfile-0.22.4.dist-info}/WHEEL +0 -0
- {pyfoxfile-0.21.4.dist-info → pyfoxfile-0.22.4.dist-info}/licenses/LICENSE +0 -0
- {pyfoxfile-0.21.4.dist-info → pyfoxfile-0.22.4.dist-info}/top_level.txt +0 -0
- {pyfoxfile-0.21.4.dist-info → pyfoxfile-0.22.4.dist-info}/zip-safe +0 -0
pyfoxfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyfoxfile.py - Last Update: 8/
|
|
17
|
+
$FileInfo: pyfoxfile.py - Last Update: 8/29/2025 Ver. 0.22.4 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -38,6 +38,7 @@ import zipfile
|
|
|
38
38
|
import binascii
|
|
39
39
|
import platform
|
|
40
40
|
from io import StringIO, BytesIO
|
|
41
|
+
import posixpath as pp # POSIX-safe joins/normpaths
|
|
41
42
|
try:
|
|
42
43
|
from backports import tempfile
|
|
43
44
|
except ImportError:
|
|
@@ -45,10 +46,10 @@ except ImportError:
|
|
|
45
46
|
# FTP Support
|
|
46
47
|
ftpssl = True
|
|
47
48
|
try:
|
|
48
|
-
from ftplib import FTP, FTP_TLS
|
|
49
|
+
from ftplib import FTP, FTP_TLS, all_errors
|
|
49
50
|
except ImportError:
|
|
50
51
|
ftpssl = False
|
|
51
|
-
from ftplib import FTP
|
|
52
|
+
from ftplib import FTP, all_errors
|
|
52
53
|
|
|
53
54
|
try:
|
|
54
55
|
import ujson as json
|
|
@@ -102,9 +103,13 @@ baseint = tuple(baseint)
|
|
|
102
103
|
|
|
103
104
|
# URL Parsing
|
|
104
105
|
try:
|
|
105
|
-
|
|
106
|
+
# Python 3
|
|
107
|
+
from urllib.parse import urlparse, urlunparse, unquote
|
|
108
|
+
from urllib.request import url2pathname
|
|
106
109
|
except ImportError:
|
|
110
|
+
# Python 2
|
|
107
111
|
from urlparse import urlparse, urlunparse
|
|
112
|
+
from urllib import unquote, url2pathname
|
|
108
113
|
|
|
109
114
|
# Windows-specific setup
|
|
110
115
|
if os.name == "nt":
|
|
@@ -266,6 +271,8 @@ def get_default_threads():
|
|
|
266
271
|
|
|
267
272
|
|
|
268
273
|
__use_pysftp__ = False
|
|
274
|
+
__upload_proto_support__ = "^(ftp|ftps|sftp|scp)://"
|
|
275
|
+
__download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp)://"
|
|
269
276
|
if(not havepysftp):
|
|
270
277
|
__use_pysftp__ = False
|
|
271
278
|
__use_http_lib__ = "httpx"
|
|
@@ -383,13 +390,13 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
|
|
|
383
390
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
384
391
|
__project__ = __program_name__
|
|
385
392
|
__project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
|
|
386
|
-
__version_info__ = (0,
|
|
387
|
-
__version_info__ = (0,
|
|
388
|
-
__version_date_info__ = (2025, 9,
|
|
393
|
+
__version_info__ = (0, 22, 4, "RC 1", 1)
|
|
394
|
+
__version_info__ = (0, 22, 4, "RC 1", 1)
|
|
395
|
+
__version_date_info__ = (2025, 9, 29, "RC 1", 1)
|
|
389
396
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
390
397
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
391
398
|
__revision__ = __version_info__[3]
|
|
392
|
-
__revision_id__ = "$Id:
|
|
399
|
+
__revision_id__ = "$Id: c5f2e77a91df1432f3fb4213ee32b80c79427e3a $"
|
|
393
400
|
if(__version_info__[4] is not None):
|
|
394
401
|
__version_date_plusrc__ = __version_date__ + \
|
|
395
402
|
"-" + str(__version_date_info__[4])
|
|
@@ -619,6 +626,182 @@ def _normalize_initial_data(data, isbytes, encoding):
|
|
|
619
626
|
return str(data)
|
|
620
627
|
|
|
621
628
|
|
|
629
|
+
def _split_posix(path_text):
|
|
630
|
+
"""Split POSIX paths regardless of OS; return list of components."""
|
|
631
|
+
# Normalize leading './'
|
|
632
|
+
if path_text.startswith(u'./'):
|
|
633
|
+
path_text = path_text[2:]
|
|
634
|
+
# Strip redundant slashes
|
|
635
|
+
path_text = re.sub(u'/+', u'/', path_text)
|
|
636
|
+
# Drop trailing '/' so 'dir/' -> ['dir']
|
|
637
|
+
if path_text.endswith(u'/'):
|
|
638
|
+
path_text = path_text[:-1]
|
|
639
|
+
return path_text.split(u'/') if path_text else []
|
|
640
|
+
|
|
641
|
+
def _is_abs_like(s):
|
|
642
|
+
"""Absolute targets (POSIX or Windows-drive style)."""
|
|
643
|
+
return s.startswith(u'/') or s.startswith(u'\\') or re.match(u'^[A-Za-z]:[/\\\\]', s)
|
|
644
|
+
|
|
645
|
+
def _resolves_outside(base_rel, target_rel):
|
|
646
|
+
"""
|
|
647
|
+
Given a base directory (relative, POSIX) and a target (relative),
|
|
648
|
+
return True if base/target resolves outside of base.
|
|
649
|
+
We anchor under '/' so normpath is root-anchored and portable.
|
|
650
|
+
"""
|
|
651
|
+
base_clean = u'/'.join(_split_posix(base_rel))
|
|
652
|
+
target_clean = u'/'.join(_split_posix(target_rel))
|
|
653
|
+
base_abs = u'/' + base_clean if base_clean else u'/'
|
|
654
|
+
combined = pp.normpath(pp.join(base_abs, target_clean))
|
|
655
|
+
if combined == base_abs or combined.startswith(base_abs + u'/'):
|
|
656
|
+
return False
|
|
657
|
+
return True
|
|
658
|
+
|
|
659
|
+
|
|
660
|
+
def DetectTarBombFoxFileArray(listarrayfiles,
|
|
661
|
+
top_file_ratio_threshold=0.6,
|
|
662
|
+
min_members_for_ratio=4,
|
|
663
|
+
symlink_policy="escape-only", # 'escape-only' | 'deny' | 'single-folder-only'
|
|
664
|
+
to_text=to_text):
|
|
665
|
+
"""
|
|
666
|
+
Detect 'tarbomb-like' archives from FoxFileToArray/TarFileToArray dicts.
|
|
667
|
+
|
|
668
|
+
Parameters:
|
|
669
|
+
listarrayfiles: dict with key 'ffilelist' -> list of entries (requires 'fname')
|
|
670
|
+
top_file_ratio_threshold: float, fraction of root files considered tarbomb
|
|
671
|
+
min_members_for_ratio: int, minimum members before ratio heuristic applies
|
|
672
|
+
symlink_policy:
|
|
673
|
+
- 'escape-only': only symlinks that escape parent/are absolute are unsafe
|
|
674
|
+
- 'deny': any symlink is unsafe
|
|
675
|
+
- 'single-folder-only': symlinks allowed only if archive has a single top-level folder
|
|
676
|
+
to_text: normalization function (your provided to_text)
|
|
677
|
+
|
|
678
|
+
Returns dict with:
|
|
679
|
+
- is_tarbomb, reasons, total_members, top_level_entries, top_level_files_count,
|
|
680
|
+
has_absolute_paths, has_parent_traversal,
|
|
681
|
+
symlink_escapes_root (bool), symlink_issues (list[{entry,target,reason}])
|
|
682
|
+
"""
|
|
683
|
+
files = listarrayfiles or {}
|
|
684
|
+
members = files.get('ffilelist') or []
|
|
685
|
+
|
|
686
|
+
names = []
|
|
687
|
+
has_abs = False
|
|
688
|
+
has_parent = False
|
|
689
|
+
|
|
690
|
+
# Symlink tracking
|
|
691
|
+
has_any_symlink = False
|
|
692
|
+
symlink_issues = []
|
|
693
|
+
any_symlink_escape = False
|
|
694
|
+
|
|
695
|
+
for m in members:
|
|
696
|
+
m = m or {}
|
|
697
|
+
name = to_text(m.get('fname', u""))
|
|
698
|
+
|
|
699
|
+
if _is_abs_like(name):
|
|
700
|
+
has_abs = True
|
|
701
|
+
|
|
702
|
+
parts = _split_posix(name)
|
|
703
|
+
if u'..' in parts:
|
|
704
|
+
has_parent = True
|
|
705
|
+
|
|
706
|
+
if not parts:
|
|
707
|
+
continue
|
|
708
|
+
|
|
709
|
+
norm_name = u'/'.join(parts)
|
|
710
|
+
names.append(norm_name)
|
|
711
|
+
|
|
712
|
+
# ---- Symlink detection ----
|
|
713
|
+
ftype = m.get('ftype')
|
|
714
|
+
is_symlink = (ftype == 2) or (to_text(ftype).lower() == u'symlink' if ftype is not None else False)
|
|
715
|
+
if is_symlink:
|
|
716
|
+
has_any_symlink = True
|
|
717
|
+
target = to_text(m.get('flinkname', u""))
|
|
718
|
+
# Absolute symlink target is unsafe
|
|
719
|
+
if _is_abs_like(target):
|
|
720
|
+
any_symlink_escape = True
|
|
721
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'absolute symlink target'})
|
|
722
|
+
else:
|
|
723
|
+
parent = u'/'.join(parts[:-1]) # may be ''
|
|
724
|
+
if _resolves_outside(parent, target):
|
|
725
|
+
any_symlink_escape = True
|
|
726
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'symlink escapes parent directory'})
|
|
727
|
+
|
|
728
|
+
total = len(names)
|
|
729
|
+
reasons = []
|
|
730
|
+
if total == 0:
|
|
731
|
+
return {
|
|
732
|
+
"is_tarbomb": False,
|
|
733
|
+
"reasons": ["archive contains no members"],
|
|
734
|
+
"total_members": 0,
|
|
735
|
+
"top_level_entries": [],
|
|
736
|
+
"top_level_files_count": 0,
|
|
737
|
+
"has_absolute_paths": has_abs,
|
|
738
|
+
"has_parent_traversal": has_parent,
|
|
739
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
740
|
+
"symlink_issues": symlink_issues,
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
# Layout counts
|
|
744
|
+
top_counts = {}
|
|
745
|
+
top_level_files_count = 0
|
|
746
|
+
for name in names:
|
|
747
|
+
parts = name.split(u'/')
|
|
748
|
+
first = parts[0]
|
|
749
|
+
top_counts[first] = top_counts.get(first, 0) + 1
|
|
750
|
+
if len(parts) == 1: # directly at archive root
|
|
751
|
+
top_level_files_count += 1
|
|
752
|
+
|
|
753
|
+
top_keys = sorted(top_counts.keys())
|
|
754
|
+
is_tarbomb = False
|
|
755
|
+
|
|
756
|
+
# Path-based dangers
|
|
757
|
+
if has_abs:
|
|
758
|
+
is_tarbomb = True
|
|
759
|
+
reasons.append("contains absolute paths (dangerous)")
|
|
760
|
+
if has_parent:
|
|
761
|
+
is_tarbomb = True
|
|
762
|
+
reasons.append("contains parent-traversal ('..') entries (dangerous)")
|
|
763
|
+
if any_symlink_escape:
|
|
764
|
+
is_tarbomb = True
|
|
765
|
+
reasons.append("contains symlinks that escape their parent directory")
|
|
766
|
+
|
|
767
|
+
# Symlink policy enforcement
|
|
768
|
+
if symlink_policy == "deny" and has_any_symlink:
|
|
769
|
+
is_tarbomb = True
|
|
770
|
+
reasons.append("symlinks present and policy is 'deny'")
|
|
771
|
+
elif symlink_policy == "single-folder-only" and has_any_symlink and len(top_keys) != 1:
|
|
772
|
+
is_tarbomb = True
|
|
773
|
+
reasons.append("symlinks present but archive lacks a single top-level folder")
|
|
774
|
+
|
|
775
|
+
# Tarbomb layout heuristics
|
|
776
|
+
if len(top_keys) == 1:
|
|
777
|
+
reasons.append("single top-level entry '{0}'".format(top_keys[0]))
|
|
778
|
+
else:
|
|
779
|
+
ratio = float(top_level_files_count) / float(total)
|
|
780
|
+
if total >= min_members_for_ratio and ratio > float(top_file_ratio_threshold):
|
|
781
|
+
is_tarbomb = True
|
|
782
|
+
reasons.append("high fraction of members ({0:.0%}) at archive root".format(ratio))
|
|
783
|
+
else:
|
|
784
|
+
max_bucket = max(top_counts.values()) if top_counts else 0
|
|
785
|
+
if max_bucket < total * 0.9:
|
|
786
|
+
is_tarbomb = True
|
|
787
|
+
reasons.append("multiple top-level entries with no dominant folder: {0}".format(
|
|
788
|
+
u", ".join(top_keys[:10])))
|
|
789
|
+
else:
|
|
790
|
+
reasons.append("multiple top-level entries but one dominates")
|
|
791
|
+
|
|
792
|
+
return {
|
|
793
|
+
"is_tarbomb": bool(is_tarbomb),
|
|
794
|
+
"reasons": reasons,
|
|
795
|
+
"total_members": total,
|
|
796
|
+
"top_level_entries": top_keys,
|
|
797
|
+
"top_level_files_count": top_level_files_count,
|
|
798
|
+
"has_absolute_paths": has_abs,
|
|
799
|
+
"has_parent_traversal": has_parent,
|
|
800
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
801
|
+
"symlink_issues": symlink_issues,
|
|
802
|
+
}
|
|
803
|
+
|
|
804
|
+
|
|
622
805
|
def MkTempFile(data=None, inmem=__use_inmemfile__, isbytes=True, prefix=__project__,
|
|
623
806
|
delete=True, encoding="utf-8"):
|
|
624
807
|
"""
|
|
@@ -685,6 +868,13 @@ def RemoveWindowsPath(dpath):
|
|
|
685
868
|
"""
|
|
686
869
|
if not dpath:
|
|
687
870
|
return ""
|
|
871
|
+
if re.match("^file://", dpath, re.IGNORECASE):
|
|
872
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
873
|
+
if dpath.lower().startswith("file://") and not dpath.lower().startswith("file:///"):
|
|
874
|
+
# insert the extra slash
|
|
875
|
+
dpath = "file:///" + dpath[7:]
|
|
876
|
+
dparsed = urlparse(dpath)
|
|
877
|
+
dpath = url2pathname(dparsed.path)
|
|
688
878
|
# Accept bytes and decode safely
|
|
689
879
|
if isinstance(dpath, (bytes, bytearray)):
|
|
690
880
|
dpath = dpath.decode("utf-8", "ignore")
|
|
@@ -700,6 +890,13 @@ def NormalizeRelativePath(inpath):
|
|
|
700
890
|
"""
|
|
701
891
|
Ensures the path is relative unless it is absolute. Prepares consistent relative paths.
|
|
702
892
|
"""
|
|
893
|
+
if re.match("^file://", inpath, re.IGNORECASE):
|
|
894
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
895
|
+
if inpath.lower().startswith("file://") and not inpath.lower().startswith("file:///"):
|
|
896
|
+
# insert the extra slash
|
|
897
|
+
inpath = "file:///" + inpath[7:]
|
|
898
|
+
dparsed = urlparse(inpath)
|
|
899
|
+
inpath = url2pathname(dparsed.path)
|
|
703
900
|
inpath = RemoveWindowsPath(inpath)
|
|
704
901
|
if os.path.isabs(inpath):
|
|
705
902
|
outpath = inpath
|
|
@@ -756,6 +953,13 @@ def ListDir(dirpath, followlink=False, duplicates=False, include_regex=None, exc
|
|
|
756
953
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
757
954
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
758
955
|
for mydirfile in dirpath:
|
|
956
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
957
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
958
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
959
|
+
# insert the extra slash
|
|
960
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
961
|
+
dparsed = urlparse(mydirfile)
|
|
962
|
+
mydirfile = url2pathname(dparsed.path)
|
|
759
963
|
if not os.path.exists(mydirfile):
|
|
760
964
|
return False
|
|
761
965
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -826,6 +1030,13 @@ def ListDirAdvanced(dirpath, followlink=False, duplicates=False, include_regex=N
|
|
|
826
1030
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
827
1031
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
828
1032
|
for mydirfile in dirpath:
|
|
1033
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
1034
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
1035
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
1036
|
+
# insert the extra slash
|
|
1037
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
1038
|
+
dparsed = urlparse(mydirfile)
|
|
1039
|
+
mydirfile = url2pathname(dparsed.path)
|
|
829
1040
|
if not os.path.exists(mydirfile):
|
|
830
1041
|
return False
|
|
831
1042
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -2035,7 +2246,7 @@ def ReadFileHeaderDataWoSize(fp, delimiter=__file_format_dict__['format_delimite
|
|
|
2035
2246
|
if(headersize <= 0 or headernumfields <= 0):
|
|
2036
2247
|
return []
|
|
2037
2248
|
headerdata = ReadTillNullByteByNum(fp, delimiter, headernumfields)
|
|
2038
|
-
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
2249
|
+
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
2039
2250
|
HeaderOut = preheaderdata + headerdata
|
|
2040
2251
|
return HeaderOut
|
|
2041
2252
|
|
|
@@ -2522,22 +2733,20 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2522
2733
|
return outlist
|
|
2523
2734
|
|
|
2524
2735
|
|
|
2525
|
-
def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2736
|
+
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2526
2737
|
if(not hasattr(fp, "read")):
|
|
2527
2738
|
return False
|
|
2528
2739
|
delimiter = formatspecs['format_delimiter']
|
|
2529
|
-
curloc =
|
|
2740
|
+
curloc = filestart
|
|
2530
2741
|
try:
|
|
2531
|
-
fp.seek(0, 2)
|
|
2742
|
+
fp.seek(0, 2)
|
|
2532
2743
|
except OSError:
|
|
2533
|
-
SeekToEndOfFile(fp)
|
|
2744
|
+
SeekToEndOfFile(fp)
|
|
2534
2745
|
except ValueError:
|
|
2535
|
-
SeekToEndOfFile(fp)
|
|
2536
|
-
CatSize = fp.tell()
|
|
2537
|
-
CatSizeEnd = CatSize
|
|
2746
|
+
SeekToEndOfFile(fp)
|
|
2747
|
+
CatSize = fp.tell()
|
|
2748
|
+
CatSizeEnd = CatSize
|
|
2538
2749
|
fp.seek(curloc, 0)
|
|
2539
|
-
if(curloc > 0):
|
|
2540
|
-
fp.seek(0, 0)
|
|
2541
2750
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2542
2751
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2543
2752
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2552,8 +2761,6 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2552
2761
|
else:
|
|
2553
2762
|
inheader = ReadFileHeaderDataWoSize(
|
|
2554
2763
|
fp, formatspecs['format_delimiter'])
|
|
2555
|
-
if(curloc > 0):
|
|
2556
|
-
fp.seek(curloc, 0)
|
|
2557
2764
|
fprechecksumtype = inheader[-2]
|
|
2558
2765
|
fprechecksum = inheader[-1]
|
|
2559
2766
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -2577,22 +2784,20 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2577
2784
|
return flist
|
|
2578
2785
|
|
|
2579
2786
|
|
|
2580
|
-
def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2787
|
+
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2581
2788
|
if(not hasattr(fp, "read")):
|
|
2582
2789
|
return False
|
|
2583
2790
|
delimiter = formatspecs['format_delimiter']
|
|
2584
|
-
curloc =
|
|
2791
|
+
curloc = filestart
|
|
2585
2792
|
try:
|
|
2586
|
-
fp.seek(0, 2)
|
|
2793
|
+
fp.seek(0, 2)
|
|
2587
2794
|
except OSError:
|
|
2588
|
-
SeekToEndOfFile(fp)
|
|
2795
|
+
SeekToEndOfFile(fp)
|
|
2589
2796
|
except ValueError:
|
|
2590
|
-
SeekToEndOfFile(fp)
|
|
2591
|
-
CatSize = fp.tell()
|
|
2592
|
-
CatSizeEnd = CatSize
|
|
2797
|
+
SeekToEndOfFile(fp)
|
|
2798
|
+
CatSize = fp.tell()
|
|
2799
|
+
CatSizeEnd = CatSize
|
|
2593
2800
|
fp.seek(curloc, 0)
|
|
2594
|
-
if(curloc > 0):
|
|
2595
|
-
fp.seek(0, 0)
|
|
2596
2801
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2597
2802
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2598
2803
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2624,8 +2829,6 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2624
2829
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2625
2830
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2626
2831
|
pass
|
|
2627
|
-
if(curloc > 0):
|
|
2628
|
-
fp.seek(curloc, 0)
|
|
2629
2832
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2630
2833
|
fheadsize = int(inheader[0], 16)
|
|
2631
2834
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2644,7 +2847,7 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2644
2847
|
return False
|
|
2645
2848
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
2646
2849
|
fcompresstype = ""
|
|
2647
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
2850
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
2648
2851
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
2649
2852
|
seekstart = 0
|
|
2650
2853
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -2737,22 +2940,20 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2737
2940
|
return outlist
|
|
2738
2941
|
|
|
2739
2942
|
|
|
2740
|
-
def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2943
|
+
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2741
2944
|
if(not hasattr(fp, "read")):
|
|
2742
2945
|
return False
|
|
2743
2946
|
delimiter = formatspecs['format_delimiter']
|
|
2744
|
-
curloc =
|
|
2947
|
+
curloc = filestart
|
|
2745
2948
|
try:
|
|
2746
|
-
fp.seek(0, 2)
|
|
2949
|
+
fp.seek(0, 2)
|
|
2747
2950
|
except OSError:
|
|
2748
|
-
SeekToEndOfFile(fp)
|
|
2951
|
+
SeekToEndOfFile(fp)
|
|
2749
2952
|
except ValueError:
|
|
2750
|
-
SeekToEndOfFile(fp)
|
|
2751
|
-
CatSize = fp.tell()
|
|
2752
|
-
CatSizeEnd = CatSize
|
|
2953
|
+
SeekToEndOfFile(fp)
|
|
2954
|
+
CatSize = fp.tell()
|
|
2955
|
+
CatSizeEnd = CatSize
|
|
2753
2956
|
fp.seek(curloc, 0)
|
|
2754
|
-
if(curloc > 0):
|
|
2755
|
-
fp.seek(0, 0)
|
|
2756
2957
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2757
2958
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2758
2959
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2784,8 +2985,6 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2784
2985
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2785
2986
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2786
2987
|
pass
|
|
2787
|
-
if(curloc > 0):
|
|
2788
|
-
fp.seek(curloc, 0)
|
|
2789
2988
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2790
2989
|
fheadsize = int(inheader[0], 16)
|
|
2791
2990
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2903,25 +3102,25 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2903
3102
|
return outlist
|
|
2904
3103
|
|
|
2905
3104
|
|
|
2906
|
-
def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3105
|
+
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2907
3106
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
2908
3107
|
formatspecs = formatspecs[fmttype]
|
|
2909
3108
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
2910
3109
|
fmttype = "auto"
|
|
2911
3110
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
2912
3111
|
fp = infile
|
|
2913
|
-
fp.seek(
|
|
2914
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3112
|
+
fp.seek(filestart, 0)
|
|
3113
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2915
3114
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2916
3115
|
formatspecs = formatspecs[compresscheck]
|
|
2917
3116
|
else:
|
|
2918
|
-
fp.seek(
|
|
2919
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3117
|
+
fp.seek(filestart, 0)
|
|
3118
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2920
3119
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2921
3120
|
formatspecs = formatspecs[checkcompressfile]
|
|
2922
|
-
fp.seek(
|
|
2923
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2924
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3121
|
+
fp.seek(filestart, 0)
|
|
3122
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3123
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
2925
3124
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
2926
3125
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
2927
3126
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -2956,58 +3155,58 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2956
3155
|
compresscheck = "zlib"
|
|
2957
3156
|
else:
|
|
2958
3157
|
return False
|
|
2959
|
-
fp.seek(
|
|
3158
|
+
fp.seek(filestart, 0)
|
|
2960
3159
|
elif(infile == "-"):
|
|
2961
3160
|
fp = MkTempFile()
|
|
2962
3161
|
if(hasattr(sys.stdin, "buffer")):
|
|
2963
3162
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
2964
3163
|
else:
|
|
2965
3164
|
shutil.copyfileobj(sys.stdin, fp)
|
|
2966
|
-
fp.seek(
|
|
2967
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2968
|
-
fp.seek(
|
|
2969
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3165
|
+
fp.seek(filestart, 0)
|
|
3166
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3167
|
+
fp.seek(filestart, 0)
|
|
3168
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2970
3169
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2971
3170
|
formatspecs = formatspecs[compresscheck]
|
|
2972
3171
|
else:
|
|
2973
|
-
fp.seek(
|
|
2974
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3172
|
+
fp.seek(filestart, 0)
|
|
3173
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2975
3174
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2976
3175
|
formatspecs = formatspecs[checkcompressfile]
|
|
2977
|
-
fp.seek(
|
|
3176
|
+
fp.seek(filestart, 0)
|
|
2978
3177
|
if(not fp):
|
|
2979
3178
|
return False
|
|
2980
|
-
fp.seek(
|
|
3179
|
+
fp.seek(filestart, 0)
|
|
2981
3180
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
2982
3181
|
fp = MkTempFile()
|
|
2983
3182
|
fp.write(infile)
|
|
2984
|
-
fp.seek(
|
|
2985
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2986
|
-
fp.seek(
|
|
2987
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3183
|
+
fp.seek(filestart, 0)
|
|
3184
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3185
|
+
fp.seek(filestart, 0)
|
|
3186
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2988
3187
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2989
3188
|
formatspecs = formatspecs[compresscheck]
|
|
2990
3189
|
else:
|
|
2991
|
-
fp.seek(
|
|
2992
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3190
|
+
fp.seek(filestart, 0)
|
|
3191
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2993
3192
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2994
3193
|
formatspecs = formatspecs[checkcompressfile]
|
|
2995
|
-
fp.seek(
|
|
3194
|
+
fp.seek(filestart, 0)
|
|
2996
3195
|
if(not fp):
|
|
2997
3196
|
return False
|
|
2998
|
-
fp.seek(
|
|
2999
|
-
elif(re.findall(
|
|
3197
|
+
fp.seek(filestart, 0)
|
|
3198
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
3000
3199
|
fp = download_file_from_internet_file(infile)
|
|
3001
|
-
fp.seek(
|
|
3002
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3200
|
+
fp.seek(filestart, 0)
|
|
3201
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3003
3202
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3004
3203
|
formatspecs = formatspecs[compresscheck]
|
|
3005
3204
|
else:
|
|
3006
|
-
fp.seek(
|
|
3007
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3205
|
+
fp.seek(filestart, 0)
|
|
3206
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3008
3207
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3009
3208
|
formatspecs = formatspecs[checkcompressfile]
|
|
3010
|
-
fp.seek(
|
|
3209
|
+
fp.seek(filestart, 0)
|
|
3011
3210
|
if(not compresscheck):
|
|
3012
3211
|
fextname = os.path.splitext(infile)[1]
|
|
3013
3212
|
if(fextname == ".gz"):
|
|
@@ -3028,14 +3227,14 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3028
3227
|
compresscheck = "zlib"
|
|
3029
3228
|
else:
|
|
3030
3229
|
return False
|
|
3031
|
-
fp.seek(
|
|
3032
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3230
|
+
fp.seek(filestart, 0)
|
|
3231
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3033
3232
|
if(not fp):
|
|
3034
3233
|
return False
|
|
3035
|
-
fp.seek(
|
|
3234
|
+
fp.seek(filestart, 0)
|
|
3036
3235
|
else:
|
|
3037
3236
|
infile = RemoveWindowsPath(infile)
|
|
3038
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3237
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
3039
3238
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3040
3239
|
formatspecs = formatspecs[checkcompressfile]
|
|
3041
3240
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -3050,7 +3249,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3050
3249
|
return False
|
|
3051
3250
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
3052
3251
|
return False
|
|
3053
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3252
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
3054
3253
|
if(not compresscheck):
|
|
3055
3254
|
fextname = os.path.splitext(infile)[1]
|
|
3056
3255
|
if(fextname == ".gz"):
|
|
@@ -3073,43 +3272,43 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3073
3272
|
return False
|
|
3074
3273
|
if(not compresscheck):
|
|
3075
3274
|
return False
|
|
3076
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
3077
|
-
return ReadFileDataWithContentToArray(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3275
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3276
|
+
return ReadFileDataWithContentToArray(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3078
3277
|
|
|
3079
3278
|
|
|
3080
|
-
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3279
|
+
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3081
3280
|
if(isinstance(infile, (list, tuple, ))):
|
|
3082
3281
|
pass
|
|
3083
3282
|
else:
|
|
3084
3283
|
infile = [infile]
|
|
3085
3284
|
outretval = {}
|
|
3086
3285
|
for curfname in infile:
|
|
3087
|
-
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3286
|
+
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3088
3287
|
return outretval
|
|
3089
3288
|
|
|
3090
|
-
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3091
|
-
return ReadInMultipleFileWithContentToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3289
|
+
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3290
|
+
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3092
3291
|
|
|
3093
3292
|
|
|
3094
|
-
def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3293
|
+
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3095
3294
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
3096
3295
|
formatspecs = formatspecs[fmttype]
|
|
3097
3296
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
3098
3297
|
fmttype = "auto"
|
|
3099
3298
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
3100
3299
|
fp = infile
|
|
3101
|
-
fp.seek(
|
|
3102
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3300
|
+
fp.seek(filestart, 0)
|
|
3301
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3103
3302
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3104
3303
|
formatspecs = formatspecs[compresscheck]
|
|
3105
3304
|
else:
|
|
3106
|
-
fp.seek(
|
|
3107
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3305
|
+
fp.seek(filestart, 0)
|
|
3306
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3108
3307
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3109
3308
|
formatspecs = formatspecs[checkcompressfile]
|
|
3110
|
-
fp.seek(
|
|
3111
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3112
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3309
|
+
fp.seek(filestart, 0)
|
|
3310
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3311
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
3113
3312
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
3114
3313
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
3115
3314
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -3144,58 +3343,58 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3144
3343
|
compresscheck = "zlib"
|
|
3145
3344
|
else:
|
|
3146
3345
|
return False
|
|
3147
|
-
fp.seek(
|
|
3346
|
+
fp.seek(filestart, 0)
|
|
3148
3347
|
elif(infile == "-"):
|
|
3149
3348
|
fp = MkTempFile()
|
|
3150
3349
|
if(hasattr(sys.stdin, "buffer")):
|
|
3151
3350
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
3152
3351
|
else:
|
|
3153
3352
|
shutil.copyfileobj(sys.stdin, fp)
|
|
3154
|
-
fp.seek(
|
|
3155
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3156
|
-
fp.seek(
|
|
3157
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3353
|
+
fp.seek(filestart, 0)
|
|
3354
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3355
|
+
fp.seek(filestart, 0)
|
|
3356
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3158
3357
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3159
3358
|
formatspecs = formatspecs[compresscheck]
|
|
3160
3359
|
else:
|
|
3161
|
-
fp.seek(
|
|
3162
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3360
|
+
fp.seek(filestart, 0)
|
|
3361
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3163
3362
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3164
3363
|
formatspecs = formatspecs[checkcompressfile]
|
|
3165
|
-
fp.seek(
|
|
3364
|
+
fp.seek(filestart, 0)
|
|
3166
3365
|
if(not fp):
|
|
3167
3366
|
return False
|
|
3168
|
-
fp.seek(
|
|
3367
|
+
fp.seek(filestart, 0)
|
|
3169
3368
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
3170
3369
|
fp = MkTempFile()
|
|
3171
3370
|
fp.write(infile)
|
|
3172
|
-
fp.seek(
|
|
3173
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3174
|
-
fp.seek(
|
|
3175
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3371
|
+
fp.seek(filestart, 0)
|
|
3372
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3373
|
+
fp.seek(filestart, 0)
|
|
3374
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3176
3375
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3177
3376
|
formatspecs = formatspecs[compresscheck]
|
|
3178
3377
|
else:
|
|
3179
|
-
fp.seek(
|
|
3180
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3378
|
+
fp.seek(filestart, 0)
|
|
3379
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3181
3380
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3182
3381
|
formatspecs = formatspecs[checkcompressfile]
|
|
3183
|
-
fp.seek(
|
|
3382
|
+
fp.seek(filestart, 0)
|
|
3184
3383
|
if(not fp):
|
|
3185
3384
|
return False
|
|
3186
|
-
fp.seek(
|
|
3187
|
-
elif(re.findall(
|
|
3385
|
+
fp.seek(filestart, 0)
|
|
3386
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
3188
3387
|
fp = download_file_from_internet_file(infile)
|
|
3189
|
-
fp.seek(
|
|
3190
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3388
|
+
fp.seek(filestart, 0)
|
|
3389
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3191
3390
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3192
3391
|
formatspecs = formatspecs[compresscheck]
|
|
3193
3392
|
else:
|
|
3194
|
-
fp.seek(
|
|
3195
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3393
|
+
fp.seek(filestart, 0)
|
|
3394
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3196
3395
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3197
3396
|
formatspecs = formatspecs[checkcompressfile]
|
|
3198
|
-
fp.seek(
|
|
3397
|
+
fp.seek(filestart, 0)
|
|
3199
3398
|
if(not compresscheck):
|
|
3200
3399
|
fextname = os.path.splitext(infile)[1]
|
|
3201
3400
|
if(fextname == ".gz"):
|
|
@@ -3216,14 +3415,14 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3216
3415
|
compresscheck = "zlib"
|
|
3217
3416
|
else:
|
|
3218
3417
|
return False
|
|
3219
|
-
fp.seek(
|
|
3220
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3418
|
+
fp.seek(filestart, 0)
|
|
3419
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3221
3420
|
if(not fp):
|
|
3222
3421
|
return False
|
|
3223
|
-
fp.seek(
|
|
3422
|
+
fp.seek(filestart, 0)
|
|
3224
3423
|
else:
|
|
3225
3424
|
infile = RemoveWindowsPath(infile)
|
|
3226
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3425
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
3227
3426
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3228
3427
|
formatspecs = formatspecs[checkcompressfile]
|
|
3229
3428
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -3238,7 +3437,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3238
3437
|
return False
|
|
3239
3438
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
3240
3439
|
return False
|
|
3241
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3440
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
3242
3441
|
if(not compresscheck):
|
|
3243
3442
|
fextname = os.path.splitext(infile)[1]
|
|
3244
3443
|
if(fextname == ".gz"):
|
|
@@ -3261,22 +3460,22 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3261
3460
|
return False
|
|
3262
3461
|
if(not compresscheck):
|
|
3263
3462
|
return False
|
|
3264
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
3265
|
-
return ReadFileDataWithContentToList(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3463
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3464
|
+
return ReadFileDataWithContentToList(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3266
3465
|
|
|
3267
3466
|
|
|
3268
|
-
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3467
|
+
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3269
3468
|
if(isinstance(infile, (list, tuple, ))):
|
|
3270
3469
|
pass
|
|
3271
3470
|
else:
|
|
3272
3471
|
infile = [infile]
|
|
3273
3472
|
outretval = {}
|
|
3274
3473
|
for curfname in infile:
|
|
3275
|
-
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3474
|
+
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3276
3475
|
return outretval
|
|
3277
3476
|
|
|
3278
|
-
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3279
|
-
return ReadInMultipleFileWithContentToList(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3477
|
+
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3478
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3280
3479
|
|
|
3281
3480
|
|
|
3282
3481
|
def AppendNullByte(indata, delimiter=__file_format_dict__['format_delimiter']):
|
|
@@ -3405,7 +3604,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3405
3604
|
fp = MkTempFile()
|
|
3406
3605
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3407
3606
|
fp = outfile
|
|
3408
|
-
elif(re.findall(
|
|
3607
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3409
3608
|
fp = MkTempFile()
|
|
3410
3609
|
else:
|
|
3411
3610
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -3441,7 +3640,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3441
3640
|
outvar = fp.read()
|
|
3442
3641
|
fp.close()
|
|
3443
3642
|
return outvar
|
|
3444
|
-
elif(re.findall(
|
|
3643
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3445
3644
|
fp = CompressOpenFileAlt(
|
|
3446
3645
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3447
3646
|
fp.seek(0, 0)
|
|
@@ -3745,9 +3944,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3745
3944
|
if not followlink and ftype in data_types:
|
|
3746
3945
|
with open(fname, "rb") as fpc:
|
|
3747
3946
|
shutil.copyfileobj(fpc, fcontents)
|
|
3748
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3947
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3749
3948
|
fcontents.seek(0, 0)
|
|
3750
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3949
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3751
3950
|
if(typechecktest is False and not compresswholefile):
|
|
3752
3951
|
fcontents.seek(0, 2)
|
|
3753
3952
|
ucfsize = fcontents.tell()
|
|
@@ -3792,9 +3991,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3792
3991
|
flstatinfo = os.stat(flinkname)
|
|
3793
3992
|
with open(flinkname, "rb") as fpc:
|
|
3794
3993
|
shutil.copyfileobj(fpc, fcontents)
|
|
3795
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3994
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3796
3995
|
fcontents.seek(0, 0)
|
|
3797
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3996
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3798
3997
|
if(typechecktest is False and not compresswholefile):
|
|
3799
3998
|
fcontents.seek(0, 2)
|
|
3800
3999
|
ucfsize = fcontents.tell()
|
|
@@ -3904,7 +4103,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
|
|
|
3904
4103
|
fheaderchecksumtype = curfname[26]
|
|
3905
4104
|
fcontentchecksumtype = curfname[27]
|
|
3906
4105
|
fcontents = curfname[28]
|
|
3907
|
-
fencoding = GetFileEncoding(fcontents, False)
|
|
4106
|
+
fencoding = GetFileEncoding(fcontents, 0, False)
|
|
3908
4107
|
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
3909
4108
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
3910
4109
|
fcontents.seek(0, 0)
|
|
@@ -3954,7 +4153,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3954
4153
|
fp = MkTempFile()
|
|
3955
4154
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3956
4155
|
fp = outfile
|
|
3957
|
-
elif(re.findall(
|
|
4156
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3958
4157
|
fp = MkTempFile()
|
|
3959
4158
|
else:
|
|
3960
4159
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -3991,7 +4190,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3991
4190
|
outvar = fp.read()
|
|
3992
4191
|
fp.close()
|
|
3993
4192
|
return outvar
|
|
3994
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4193
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
3995
4194
|
fp = CompressOpenFileAlt(
|
|
3996
4195
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3997
4196
|
fp.seek(0, 0)
|
|
@@ -4034,7 +4233,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
4034
4233
|
fp = MkTempFile()
|
|
4035
4234
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
4036
4235
|
fp = outfile
|
|
4037
|
-
elif(re.findall(
|
|
4236
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
4038
4237
|
fp = MkTempFile()
|
|
4039
4238
|
else:
|
|
4040
4239
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -4071,7 +4270,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
4071
4270
|
outvar = fp.read()
|
|
4072
4271
|
fp.close()
|
|
4073
4272
|
return outvar
|
|
4074
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4273
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
4075
4274
|
fp = CompressOpenFileAlt(
|
|
4076
4275
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
4077
4276
|
fp.seek(0, 0)
|
|
@@ -4155,7 +4354,8 @@ def GzipCompressData(data, compresslevel=9):
|
|
|
4155
4354
|
out = MkTempFile()
|
|
4156
4355
|
with gzip.GzipFile(fileobj=out, mode="wb", compresslevel=compresslevel) as f:
|
|
4157
4356
|
f.write(data)
|
|
4158
|
-
|
|
4357
|
+
out.seek(0, 0)
|
|
4358
|
+
compressed_data = out.read()
|
|
4159
4359
|
return compressed_data
|
|
4160
4360
|
|
|
4161
4361
|
|
|
@@ -4253,7 +4453,7 @@ def IsSingleDict(variable):
|
|
|
4253
4453
|
return True
|
|
4254
4454
|
|
|
4255
4455
|
|
|
4256
|
-
def GetFileEncoding(infile, closefp=True):
|
|
4456
|
+
def GetFileEncoding(infile, filestart=0, closefp=True):
|
|
4257
4457
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4258
4458
|
fp = infile
|
|
4259
4459
|
else:
|
|
@@ -4262,19 +4462,19 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4262
4462
|
except FileNotFoundError:
|
|
4263
4463
|
return False
|
|
4264
4464
|
file_encoding = "UTF-8"
|
|
4265
|
-
fp.seek(
|
|
4465
|
+
fp.seek(filestart, 0)
|
|
4266
4466
|
prefp = fp.read(2)
|
|
4267
4467
|
if(prefp == binascii.unhexlify("fffe")):
|
|
4268
4468
|
file_encoding = "UTF-16LE"
|
|
4269
4469
|
elif(prefp == binascii.unhexlify("feff")):
|
|
4270
4470
|
file_encoding = "UTF-16BE"
|
|
4271
|
-
fp.seek(
|
|
4471
|
+
fp.seek(filestart, 0)
|
|
4272
4472
|
prefp = fp.read(3)
|
|
4273
4473
|
if(prefp == binascii.unhexlify("efbbbf")):
|
|
4274
4474
|
file_encoding = "UTF-8"
|
|
4275
4475
|
elif(prefp == binascii.unhexlify("0efeff")):
|
|
4276
4476
|
file_encoding = "SCSU"
|
|
4277
|
-
fp.seek(
|
|
4477
|
+
fp.seek(filestart, 0)
|
|
4278
4478
|
prefp = fp.read(4)
|
|
4279
4479
|
if(prefp == binascii.unhexlify("fffe0000")):
|
|
4280
4480
|
file_encoding = "UTF-32LE"
|
|
@@ -4290,21 +4490,21 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4290
4490
|
file_encoding = "UTF-7"
|
|
4291
4491
|
elif(prefp == binascii.unhexlify("2b2f762f")):
|
|
4292
4492
|
file_encoding = "UTF-7"
|
|
4293
|
-
fp.seek(
|
|
4493
|
+
fp.seek(filestart, 0)
|
|
4294
4494
|
if(closefp):
|
|
4295
4495
|
fp.close()
|
|
4296
4496
|
return file_encoding
|
|
4297
4497
|
|
|
4298
4498
|
|
|
4299
|
-
def GetFileEncodingFromString(instring, closefp=True):
|
|
4499
|
+
def GetFileEncodingFromString(instring, filestart=0, closefp=True):
|
|
4300
4500
|
try:
|
|
4301
4501
|
instringsfile = MkTempFile(instring)
|
|
4302
4502
|
except TypeError:
|
|
4303
4503
|
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4304
|
-
return GetFileEncoding(instringsfile, closefp)
|
|
4504
|
+
return GetFileEncoding(instringsfile, filestart, closefp)
|
|
4305
4505
|
|
|
4306
4506
|
|
|
4307
|
-
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4507
|
+
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4308
4508
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4309
4509
|
fp = infile
|
|
4310
4510
|
else:
|
|
@@ -4313,7 +4513,8 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4313
4513
|
except FileNotFoundError:
|
|
4314
4514
|
return False
|
|
4315
4515
|
filetype = False
|
|
4316
|
-
|
|
4516
|
+
curloc = filestart
|
|
4517
|
+
fp.seek(filestart, 0)
|
|
4317
4518
|
prefp = fp.read(2)
|
|
4318
4519
|
if(prefp == binascii.unhexlify("1f8b")):
|
|
4319
4520
|
filetype = "gzip"
|
|
@@ -4329,13 +4530,13 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4329
4530
|
filetype = "zlib"
|
|
4330
4531
|
elif(prefp == binascii.unhexlify("1f9d")):
|
|
4331
4532
|
filetype = "zcompress"
|
|
4332
|
-
fp.seek(
|
|
4533
|
+
fp.seek(curloc, 0)
|
|
4333
4534
|
prefp = fp.read(3)
|
|
4334
4535
|
if(prefp == binascii.unhexlify("425a68")):
|
|
4335
4536
|
filetype = "bzip2"
|
|
4336
4537
|
elif(prefp == binascii.unhexlify("5d0000")):
|
|
4337
4538
|
filetype = "lzma"
|
|
4338
|
-
fp.seek(
|
|
4539
|
+
fp.seek(curloc, 0)
|
|
4339
4540
|
prefp = fp.read(4)
|
|
4340
4541
|
if(prefp == binascii.unhexlify("28b52ffd")):
|
|
4341
4542
|
filetype = "zstd"
|
|
@@ -4347,29 +4548,29 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4347
4548
|
filetype = "zipfile"
|
|
4348
4549
|
elif(prefp == binascii.unhexlify("504b0708")):
|
|
4349
4550
|
filetype = "zipfile"
|
|
4350
|
-
fp.seek(
|
|
4551
|
+
fp.seek(curloc, 0)
|
|
4351
4552
|
prefp = fp.read(5)
|
|
4352
4553
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4353
4554
|
filetype = "tarfile"
|
|
4354
4555
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4355
4556
|
filetype = "tarfile"
|
|
4356
|
-
fp.seek(
|
|
4557
|
+
fp.seek(curloc, 0)
|
|
4357
4558
|
prefp = fp.read(6)
|
|
4358
4559
|
if(prefp == binascii.unhexlify("fd377a585a00")):
|
|
4359
4560
|
filetype = "xz"
|
|
4360
4561
|
elif(prefp == binascii.unhexlify("377abcaf271c")):
|
|
4361
4562
|
filetype = "7zipfile"
|
|
4362
|
-
fp.seek(
|
|
4563
|
+
fp.seek(curloc, 0)
|
|
4363
4564
|
prefp = fp.read(7)
|
|
4364
4565
|
if(prefp == binascii.unhexlify("526172211a0700")):
|
|
4365
4566
|
filetype = "rarfile"
|
|
4366
4567
|
elif(prefp == binascii.unhexlify("2a2a4143452a2a")):
|
|
4367
4568
|
filetype = "ace"
|
|
4368
|
-
fp.seek(
|
|
4569
|
+
fp.seek(curloc, 0)
|
|
4369
4570
|
prefp = fp.read(7)
|
|
4370
4571
|
if(prefp == binascii.unhexlify("894c5a4f0d0a1a")):
|
|
4371
4572
|
filetype = "lzo"
|
|
4372
|
-
fp.seek(
|
|
4573
|
+
fp.seek(curloc, 0)
|
|
4373
4574
|
prefp = fp.read(8)
|
|
4374
4575
|
if(prefp == binascii.unhexlify("7573746172003030")):
|
|
4375
4576
|
filetype = "tarfile"
|
|
@@ -4377,7 +4578,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4377
4578
|
filetype = "tarfile"
|
|
4378
4579
|
if(prefp == binascii.unhexlify("526172211a070100")):
|
|
4379
4580
|
filetype = "rarfile"
|
|
4380
|
-
fp.seek(
|
|
4581
|
+
fp.seek(curloc, 0)
|
|
4381
4582
|
if(IsNestedDict(formatspecs)):
|
|
4382
4583
|
for key, value in formatspecs.items():
|
|
4383
4584
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4393,7 +4594,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4393
4594
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4394
4595
|
filetype = formatspecs[key]['format_magic']
|
|
4395
4596
|
continue
|
|
4396
|
-
fp.seek(
|
|
4597
|
+
fp.seek(curloc, 0)
|
|
4397
4598
|
elif(IsSingleDict(formatspecs)):
|
|
4398
4599
|
prefp = fp.read(formatspecs['format_len'])
|
|
4399
4600
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4408,15 +4609,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4408
4609
|
filetype = formatspecs['format_magic']
|
|
4409
4610
|
else:
|
|
4410
4611
|
pass
|
|
4411
|
-
fp.seek(
|
|
4612
|
+
fp.seek(curloc, 0)
|
|
4412
4613
|
prefp = fp.read(9)
|
|
4413
4614
|
if(prefp == binascii.unhexlify("894c5a4f000d0a1a0a")):
|
|
4414
4615
|
filetype = "lzo"
|
|
4415
|
-
fp.seek(
|
|
4616
|
+
fp.seek(curloc, 0)
|
|
4416
4617
|
prefp = fp.read(10)
|
|
4417
4618
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4418
4619
|
filetype = "tarfile"
|
|
4419
|
-
fp.seek(
|
|
4620
|
+
fp.seek(curloc, 0)
|
|
4420
4621
|
if(filetype == "gzip" or filetype == "bzip2" or filetype == "lzma" or filetype == "zstd" or filetype == "lz4" or filetype == "zlib"):
|
|
4421
4622
|
if(TarFileCheck(fp)):
|
|
4422
4623
|
filetype = "tarfile"
|
|
@@ -4431,14 +4632,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4431
4632
|
return "7zipfile"
|
|
4432
4633
|
else:
|
|
4433
4634
|
filetype = False
|
|
4434
|
-
fp.seek(
|
|
4635
|
+
fp.seek(curloc, 0)
|
|
4435
4636
|
if(closefp):
|
|
4436
4637
|
fp.close()
|
|
4437
4638
|
return filetype
|
|
4438
4639
|
|
|
4439
4640
|
|
|
4440
|
-
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4441
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4641
|
+
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4642
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4643
|
+
curloc = filestart
|
|
4442
4644
|
if(not compresscheck):
|
|
4443
4645
|
fextname = os.path.splitext(infile)[1]
|
|
4444
4646
|
if(fextname == ".gz"):
|
|
@@ -4487,7 +4689,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4487
4689
|
elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
4488
4690
|
return "7zipfile"
|
|
4489
4691
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4490
|
-
fp = UncompressFileAlt(infile, formatspecs)
|
|
4692
|
+
fp = UncompressFileAlt(infile, formatspecs, filestart)
|
|
4491
4693
|
else:
|
|
4492
4694
|
try:
|
|
4493
4695
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4517,10 +4719,11 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4517
4719
|
except FileNotFoundError:
|
|
4518
4720
|
return False
|
|
4519
4721
|
filetype = False
|
|
4722
|
+
fp.seek(filestart, 0)
|
|
4520
4723
|
prefp = fp.read(5)
|
|
4521
4724
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4522
4725
|
filetype = "tarfile"
|
|
4523
|
-
fp.seek(
|
|
4726
|
+
fp.seek(curloc, 0)
|
|
4524
4727
|
if(IsNestedDict(formatspecs)):
|
|
4525
4728
|
for key, value in formatspecs.items():
|
|
4526
4729
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4536,7 +4739,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4536
4739
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4537
4740
|
filetype = formatspecs[key]['format_magic']
|
|
4538
4741
|
continue
|
|
4539
|
-
fp.seek(
|
|
4742
|
+
fp.seek(curloc, 0)
|
|
4540
4743
|
elif(IsSingleDict(formatspecs)):
|
|
4541
4744
|
prefp = fp.read(formatspecs['format_len'])
|
|
4542
4745
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4551,36 +4754,36 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4551
4754
|
filetype = formatspecs['format_magic']
|
|
4552
4755
|
else:
|
|
4553
4756
|
pass
|
|
4554
|
-
fp.seek(
|
|
4757
|
+
fp.seek(curloc, 0)
|
|
4555
4758
|
prefp = fp.read(10)
|
|
4556
4759
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4557
4760
|
filetype = "tarfile"
|
|
4558
|
-
fp.seek(
|
|
4761
|
+
fp.seek(curloc, 0)
|
|
4559
4762
|
if(closefp):
|
|
4560
4763
|
fp.close()
|
|
4561
4764
|
return filetype
|
|
4562
4765
|
|
|
4563
4766
|
|
|
4564
|
-
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4767
|
+
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4565
4768
|
try:
|
|
4566
4769
|
instringsfile = MkTempFile(instring)
|
|
4567
4770
|
except TypeError:
|
|
4568
4771
|
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4569
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4772
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4570
4773
|
|
|
4571
4774
|
|
|
4572
|
-
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4775
|
+
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4573
4776
|
try:
|
|
4574
4777
|
instringsfile = MkTempFile(instring)
|
|
4575
4778
|
except TypeError:
|
|
4576
4779
|
instringsfile = MkTempFile(instring.decode("UTF-8"))
|
|
4577
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4780
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4578
4781
|
|
|
4579
4782
|
|
|
4580
|
-
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
4783
|
+
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4581
4784
|
if(not hasattr(fp, "read")):
|
|
4582
4785
|
return False
|
|
4583
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
4786
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4584
4787
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4585
4788
|
formatspecs = formatspecs[compresscheck]
|
|
4586
4789
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4614,8 +4817,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
|
4614
4817
|
return fp
|
|
4615
4818
|
|
|
4616
4819
|
|
|
4617
|
-
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
4618
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4820
|
+
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb", filestart=0):
|
|
4821
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4619
4822
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4620
4823
|
formatspecs = formatspecs[compresscheck]
|
|
4621
4824
|
if(sys.version_info[0] == 2 and compresscheck):
|
|
@@ -4661,8 +4864,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
|
4661
4864
|
return filefp
|
|
4662
4865
|
|
|
4663
4866
|
|
|
4664
|
-
def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
4665
|
-
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, False)
|
|
4867
|
+
def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4868
|
+
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, filestart, False)
|
|
4666
4869
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4667
4870
|
formatspecs = formatspecs[compresscheck]
|
|
4668
4871
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4689,32 +4892,32 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4689
4892
|
return fileuz
|
|
4690
4893
|
|
|
4691
4894
|
|
|
4692
|
-
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__):
|
|
4895
|
+
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4693
4896
|
filefp = StringIO()
|
|
4694
|
-
outstring = UncompressString(instring, formatspecs)
|
|
4897
|
+
outstring = UncompressString(instring, formatspecs, filestart)
|
|
4695
4898
|
filefp.write(outstring)
|
|
4696
4899
|
filefp.seek(0, 0)
|
|
4697
4900
|
return filefp
|
|
4698
4901
|
|
|
4699
|
-
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4902
|
+
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4700
4903
|
if(not hasattr(fp, "read")):
|
|
4701
4904
|
return False
|
|
4702
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4905
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4703
4906
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4704
4907
|
formatspecs = formatspecs[prechck]
|
|
4705
|
-
fp.seek(
|
|
4908
|
+
fp.seek(filestart, 0)
|
|
4706
4909
|
if(prechck!="zstd"):
|
|
4707
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4910
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4708
4911
|
filefp = StringIO()
|
|
4709
|
-
fp.seek(
|
|
4710
|
-
outstring = UncompressString(fp.read(), formatspecs)
|
|
4912
|
+
fp.seek(filestart, 0)
|
|
4913
|
+
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
4711
4914
|
filefp.write(outstring)
|
|
4712
4915
|
filefp.seek(0, 0)
|
|
4713
4916
|
return filefp
|
|
4714
4917
|
|
|
4715
4918
|
|
|
4716
|
-
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
4717
|
-
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, False)
|
|
4919
|
+
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4920
|
+
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, filestart, False)
|
|
4718
4921
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4719
4922
|
formatspecs = formatspecs[compresscheck]
|
|
4720
4923
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4739,26 +4942,26 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4739
4942
|
return fileuz
|
|
4740
4943
|
|
|
4741
4944
|
|
|
4742
|
-
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__):
|
|
4945
|
+
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4743
4946
|
filefp = MkTempFile()
|
|
4744
|
-
outstring = UncompressBytes(inbytes, formatspecs)
|
|
4947
|
+
outstring = UncompressBytes(inbytes, formatspecs, filestart)
|
|
4745
4948
|
filefp.write(outstring)
|
|
4746
4949
|
filefp.seek(0, 0)
|
|
4747
4950
|
return filefp
|
|
4748
4951
|
|
|
4749
4952
|
|
|
4750
|
-
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4953
|
+
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4751
4954
|
if(not hasattr(fp, "read")):
|
|
4752
4955
|
return False
|
|
4753
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4956
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4754
4957
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4755
4958
|
formatspecs = formatspecs[prechck]
|
|
4756
|
-
fp.seek(
|
|
4959
|
+
fp.seek(filestart, 0)
|
|
4757
4960
|
if(prechck!="zstd"):
|
|
4758
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4961
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4759
4962
|
filefp = MkTempFile()
|
|
4760
|
-
fp.seek(
|
|
4761
|
-
outstring = UncompressBytes(fp.read(), formatspecs)
|
|
4963
|
+
fp.seek(filestart, 0)
|
|
4964
|
+
outstring = UncompressBytes(fp.read(), formatspecs, 0)
|
|
4762
4965
|
filefp.write(outstring)
|
|
4763
4966
|
filefp.seek(0, 0)
|
|
4764
4967
|
return filefp
|
|
@@ -5005,7 +5208,7 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5005
5208
|
fp = MkTempFile()
|
|
5006
5209
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5007
5210
|
fp = outfile
|
|
5008
|
-
elif(re.findall(
|
|
5211
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5009
5212
|
fp = MkTempFile()
|
|
5010
5213
|
else:
|
|
5011
5214
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -5207,9 +5410,9 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5207
5410
|
if not followlink and ftype in data_types:
|
|
5208
5411
|
with open(fname, "rb") as fpc:
|
|
5209
5412
|
shutil.copyfileobj(fpc, fcontents)
|
|
5210
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5413
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5211
5414
|
fcontents.seek(0, 0)
|
|
5212
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5415
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5213
5416
|
if(typechecktest is False and not compresswholefile):
|
|
5214
5417
|
fcontents.seek(0, 2)
|
|
5215
5418
|
ucfsize = fcontents.tell()
|
|
@@ -5254,9 +5457,9 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5254
5457
|
flstatinfo = os.stat(flinkname)
|
|
5255
5458
|
with open(flinkname, "rb") as fpc:
|
|
5256
5459
|
shutil.copyfileobj(fpc, fcontents)
|
|
5257
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5460
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5258
5461
|
fcontents.seek(0, 0)
|
|
5259
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5462
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5260
5463
|
if(typechecktest is False and not compresswholefile):
|
|
5261
5464
|
fcontents.seek(0, 2)
|
|
5262
5465
|
ucfsize = fcontents.tell()
|
|
@@ -5334,7 +5537,7 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5334
5537
|
outvar = fp.read()
|
|
5335
5538
|
fp.close()
|
|
5336
5539
|
return outvar
|
|
5337
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5540
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5338
5541
|
fp = CompressOpenFileAlt(
|
|
5339
5542
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5340
5543
|
fp.seek(0, 0)
|
|
@@ -5389,7 +5592,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5389
5592
|
fp = MkTempFile()
|
|
5390
5593
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5391
5594
|
fp = outfile
|
|
5392
|
-
elif(re.findall(
|
|
5595
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5393
5596
|
fp = MkTempFile()
|
|
5394
5597
|
else:
|
|
5395
5598
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -5418,7 +5621,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5418
5621
|
if(not infile):
|
|
5419
5622
|
return False
|
|
5420
5623
|
infile.seek(0, 0)
|
|
5421
|
-
elif(re.findall(
|
|
5624
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5422
5625
|
infile = download_file_from_internet_file(infile)
|
|
5423
5626
|
infile.seek(0, 0)
|
|
5424
5627
|
if(not infile):
|
|
@@ -5442,7 +5645,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5442
5645
|
return False
|
|
5443
5646
|
try:
|
|
5444
5647
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5445
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
5648
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
5446
5649
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5447
5650
|
formatspecs = formatspecs[compresscheck]
|
|
5448
5651
|
if(compresscheck=="zstd"):
|
|
@@ -5454,7 +5657,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5454
5657
|
else:
|
|
5455
5658
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
5456
5659
|
else:
|
|
5457
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
5660
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
5458
5661
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5459
5662
|
formatspecs = formatspecs[compresscheck]
|
|
5460
5663
|
if(compresscheck=="zstd"):
|
|
@@ -5555,9 +5758,9 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5555
5758
|
fpc = tarfp.extractfile(member)
|
|
5556
5759
|
shutil.copyfileobj(fpc, fcontents)
|
|
5557
5760
|
fpc.close()
|
|
5558
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5761
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5559
5762
|
fcontents.seek(0, 0)
|
|
5560
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5763
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5561
5764
|
if(typechecktest is False and not compresswholefile):
|
|
5562
5765
|
fcontents.seek(0, 2)
|
|
5563
5766
|
ucfsize = fcontents.tell()
|
|
@@ -5635,7 +5838,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5635
5838
|
outvar = fp.read()
|
|
5636
5839
|
fp.close()
|
|
5637
5840
|
return outvar
|
|
5638
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5841
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5639
5842
|
fp = CompressOpenFileAlt(
|
|
5640
5843
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5641
5844
|
fp.seek(0, 0)
|
|
@@ -5686,7 +5889,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5686
5889
|
fp = MkTempFile()
|
|
5687
5890
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5688
5891
|
fp = outfile
|
|
5689
|
-
elif(re.findall(
|
|
5892
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5690
5893
|
fp = MkTempFile()
|
|
5691
5894
|
else:
|
|
5692
5895
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -5715,7 +5918,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5715
5918
|
if(not infile):
|
|
5716
5919
|
return False
|
|
5717
5920
|
infile.seek(0, 0)
|
|
5718
|
-
elif(re.findall(
|
|
5921
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5719
5922
|
infile = download_file_from_internet_file(infile)
|
|
5720
5923
|
infile.seek(0, 0)
|
|
5721
5924
|
if(not infile):
|
|
@@ -5852,9 +6055,9 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5852
6055
|
curcompression = "none"
|
|
5853
6056
|
if ftype == 0:
|
|
5854
6057
|
fcontents.write(zipfp.read(member.filename))
|
|
5855
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6058
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5856
6059
|
fcontents.seek(0, 0)
|
|
5857
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6060
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5858
6061
|
if(typechecktest is False and not compresswholefile):
|
|
5859
6062
|
fcontents.seek(0, 2)
|
|
5860
6063
|
ucfsize = fcontents.tell()
|
|
@@ -5929,7 +6132,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5929
6132
|
outvar = fp.read()
|
|
5930
6133
|
fp.close()
|
|
5931
6134
|
return outvar
|
|
5932
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6135
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5933
6136
|
fp = CompressOpenFileAlt(
|
|
5934
6137
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5935
6138
|
fp.seek(0, 0)
|
|
@@ -5985,7 +6188,7 @@ if(rarfile_support):
|
|
|
5985
6188
|
fp = MkTempFile()
|
|
5986
6189
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5987
6190
|
fp = outfile
|
|
5988
|
-
elif(re.findall(
|
|
6191
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5989
6192
|
fp = MkTempFile()
|
|
5990
6193
|
else:
|
|
5991
6194
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -6169,9 +6372,9 @@ if(rarfile_support):
|
|
|
6169
6372
|
curcompression = "none"
|
|
6170
6373
|
if ftype == 0:
|
|
6171
6374
|
fcontents.write(rarfp.read(member.filename))
|
|
6172
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6375
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6173
6376
|
fcontents.seek(0, 0)
|
|
6174
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6377
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6175
6378
|
if(typechecktest is False and not compresswholefile):
|
|
6176
6379
|
fcontents.seek(0, 2)
|
|
6177
6380
|
ucfsize = fcontents.tell()
|
|
@@ -6249,7 +6452,7 @@ if(rarfile_support):
|
|
|
6249
6452
|
outvar = fp.read()
|
|
6250
6453
|
fp.close()
|
|
6251
6454
|
return outvar
|
|
6252
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6455
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6253
6456
|
fp = CompressOpenFileAlt(
|
|
6254
6457
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6255
6458
|
fp.seek(0, 0)
|
|
@@ -6305,7 +6508,7 @@ if(py7zr_support):
|
|
|
6305
6508
|
fp = MkTempFile()
|
|
6306
6509
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6307
6510
|
fp = outfile
|
|
6308
|
-
elif(re.findall(
|
|
6511
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
6309
6512
|
fp = MkTempFile()
|
|
6310
6513
|
else:
|
|
6311
6514
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -6328,7 +6531,7 @@ if(py7zr_support):
|
|
|
6328
6531
|
return False
|
|
6329
6532
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
6330
6533
|
file_content = szpfp.readall()
|
|
6331
|
-
#sztest = szpfp.testzip()
|
|
6534
|
+
#sztest = szpfp.testzip()
|
|
6332
6535
|
sztestalt = szpfp.test()
|
|
6333
6536
|
if(sztestalt):
|
|
6334
6537
|
VerbosePrintOut("Bad file found!")
|
|
@@ -6422,9 +6625,9 @@ if(py7zr_support):
|
|
|
6422
6625
|
fcontents.write(file_content[member.filename].read())
|
|
6423
6626
|
fsize = format(fcontents.tell(), 'x').lower()
|
|
6424
6627
|
fcontents.seek(0, 0)
|
|
6425
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6628
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6426
6629
|
fcontents.seek(0, 0)
|
|
6427
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6630
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6428
6631
|
file_content[member.filename].close()
|
|
6429
6632
|
if(typechecktest is False and not compresswholefile):
|
|
6430
6633
|
fcontents.seek(0, 2)
|
|
@@ -6503,7 +6706,7 @@ if(py7zr_support):
|
|
|
6503
6706
|
outvar = fp.read()
|
|
6504
6707
|
fp.close()
|
|
6505
6708
|
return outvar
|
|
6506
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6709
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6507
6710
|
fp = CompressOpenFileAlt(
|
|
6508
6711
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6509
6712
|
fp.seek(0, 0)
|
|
@@ -6517,7 +6720,7 @@ if(py7zr_support):
|
|
|
6517
6720
|
|
|
6518
6721
|
|
|
6519
6722
|
def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
6520
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6723
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6521
6724
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6522
6725
|
formatspecs = formatspecs[checkcompressfile]
|
|
6523
6726
|
if(verbose):
|
|
@@ -6537,18 +6740,20 @@ def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", c
|
|
|
6537
6740
|
return False
|
|
6538
6741
|
|
|
6539
6742
|
|
|
6540
|
-
def
|
|
6743
|
+
def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
6744
|
+
if(verbose):
|
|
6745
|
+
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
6541
6746
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6542
6747
|
formatspecs = formatspecs[fmttype]
|
|
6543
6748
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6544
6749
|
fmttype = "auto"
|
|
6545
|
-
curloc =
|
|
6750
|
+
curloc = filestart
|
|
6546
6751
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6547
6752
|
curloc = infile.tell()
|
|
6548
6753
|
fp = infile
|
|
6549
|
-
fp.seek(
|
|
6550
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6551
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6754
|
+
fp.seek(filestart, 0)
|
|
6755
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6756
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6552
6757
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6553
6758
|
formatspecs = formatspecs[checkcompressfile]
|
|
6554
6759
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6565,45 +6770,45 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6565
6770
|
return False
|
|
6566
6771
|
if(not fp):
|
|
6567
6772
|
return False
|
|
6568
|
-
fp.seek(
|
|
6773
|
+
fp.seek(filestart, 0)
|
|
6569
6774
|
elif(infile == "-"):
|
|
6570
6775
|
fp = MkTempFile()
|
|
6571
6776
|
if(hasattr(sys.stdin, "buffer")):
|
|
6572
6777
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6573
6778
|
else:
|
|
6574
6779
|
shutil.copyfileobj(sys.stdin, fp)
|
|
6575
|
-
fp.seek(
|
|
6576
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6577
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6780
|
+
fp.seek(filestart, 0)
|
|
6781
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6782
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6578
6783
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6579
6784
|
formatspecs = formatspecs[checkcompressfile]
|
|
6580
6785
|
if(not fp):
|
|
6581
6786
|
return False
|
|
6582
|
-
fp.seek(
|
|
6787
|
+
fp.seek(filestart, 0)
|
|
6583
6788
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6584
6789
|
fp = MkTempFile()
|
|
6585
6790
|
fp.write(infile)
|
|
6586
|
-
fp.seek(
|
|
6587
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6588
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6791
|
+
fp.seek(filestart, 0)
|
|
6792
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6793
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6589
6794
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6590
6795
|
formatspecs = formatspecs[compresscheck]
|
|
6591
6796
|
if(not fp):
|
|
6592
6797
|
return False
|
|
6593
|
-
fp.seek(
|
|
6594
|
-
elif(re.findall(
|
|
6798
|
+
fp.seek(filestart, 0)
|
|
6799
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
6595
6800
|
fp = download_file_from_internet_file(infile)
|
|
6596
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6597
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6801
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6802
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6598
6803
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6599
6804
|
formatspecs = formatspecs[compresscheck]
|
|
6600
|
-
fp.seek(
|
|
6805
|
+
fp.seek(filestart, 0)
|
|
6601
6806
|
if(not fp):
|
|
6602
6807
|
return False
|
|
6603
|
-
fp.seek(
|
|
6808
|
+
fp.seek(filestart, 0)
|
|
6604
6809
|
else:
|
|
6605
6810
|
infile = RemoveWindowsPath(infile)
|
|
6606
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6811
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6607
6812
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6608
6813
|
formatspecs = formatspecs[checkcompressfile]
|
|
6609
6814
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6618,7 +6823,7 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6618
6823
|
return False
|
|
6619
6824
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6620
6825
|
return False
|
|
6621
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6826
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
6622
6827
|
if(not compresscheck):
|
|
6623
6828
|
fextname = os.path.splitext(infile)[1]
|
|
6624
6829
|
if(fextname == ".gz"):
|
|
@@ -6641,26 +6846,23 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6641
6846
|
return False
|
|
6642
6847
|
if(not compresscheck):
|
|
6643
6848
|
return False
|
|
6644
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6849
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
6645
6850
|
try:
|
|
6646
|
-
fp.seek(0, 2)
|
|
6851
|
+
fp.seek(0, 2)
|
|
6647
6852
|
except OSError:
|
|
6648
|
-
SeekToEndOfFile(fp)
|
|
6853
|
+
SeekToEndOfFile(fp)
|
|
6649
6854
|
except ValueError:
|
|
6650
|
-
SeekToEndOfFile(fp)
|
|
6651
|
-
CatSize = fp.tell()
|
|
6652
|
-
CatSizeEnd = CatSize
|
|
6855
|
+
SeekToEndOfFile(fp)
|
|
6856
|
+
CatSize = fp.tell()
|
|
6857
|
+
CatSizeEnd = CatSize
|
|
6653
6858
|
fp.seek(curloc, 0)
|
|
6654
|
-
if(curloc > 0):
|
|
6655
|
-
fp.seek(0, 0)
|
|
6656
6859
|
if(IsNestedDict(formatspecs)):
|
|
6657
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6860
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
6658
6861
|
if(compresschecking not in formatspecs):
|
|
6659
|
-
fp.seek(0, 0)
|
|
6660
6862
|
return False
|
|
6661
6863
|
else:
|
|
6662
6864
|
formatspecs = formatspecs[compresschecking]
|
|
6663
|
-
fp.seek(
|
|
6865
|
+
fp.seek(filestart, 0)
|
|
6664
6866
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6665
6867
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6666
6868
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -6677,23 +6879,8 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6677
6879
|
fp, formatspecs['format_delimiter'])
|
|
6678
6880
|
fnumextrafieldsize = int(inheader[5], 16)
|
|
6679
6881
|
fnumextrafields = int(inheader[6], 16)
|
|
6680
|
-
fextrafieldslist = []
|
|
6681
6882
|
extrastart = 7
|
|
6682
6883
|
extraend = extrastart + fnumextrafields
|
|
6683
|
-
while(extrastart < extraend):
|
|
6684
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
6685
|
-
extrastart = extrastart + 1
|
|
6686
|
-
if(fnumextrafields==1):
|
|
6687
|
-
try:
|
|
6688
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
6689
|
-
fnumextrafields = len(fextrafieldslist)
|
|
6690
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6691
|
-
try:
|
|
6692
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
6693
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6694
|
-
pass
|
|
6695
|
-
if(curloc > 0):
|
|
6696
|
-
fp.seek(curloc, 0)
|
|
6697
6884
|
formversion = re.findall("([\\d]+)", formstring)
|
|
6698
6885
|
fheadsize = int(inheader[0], 16)
|
|
6699
6886
|
fnumfields = int(inheader[1], 16)
|
|
@@ -6702,649 +6889,27 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6702
6889
|
fnumfiles = int(inheader[4], 16)
|
|
6703
6890
|
fprechecksumtype = inheader[-2]
|
|
6704
6891
|
fprechecksum = inheader[-1]
|
|
6892
|
+
il = 0
|
|
6705
6893
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
6706
6894
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
6707
|
-
|
|
6708
|
-
|
|
6709
|
-
|
|
6710
|
-
|
|
6711
|
-
|
|
6712
|
-
|
|
6713
|
-
|
|
6714
|
-
|
|
6715
|
-
|
|
6716
|
-
|
|
6717
|
-
|
|
6718
|
-
|
|
6719
|
-
|
|
6720
|
-
if(
|
|
6721
|
-
|
|
6722
|
-
|
|
6723
|
-
|
|
6724
|
-
|
|
6725
|
-
prefhstart = fp.tell()
|
|
6726
|
-
if(formatspecs['new_style']):
|
|
6727
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
6728
|
-
fp, formatspecs['format_delimiter'])
|
|
6729
|
-
else:
|
|
6730
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
6731
|
-
fp, formatspecs['format_delimiter'])
|
|
6732
|
-
if(len(preheaderdata) == 0):
|
|
6733
|
-
break
|
|
6734
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
6735
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
6736
|
-
preftype = int(preheaderdata[2], 16)
|
|
6737
|
-
prefencoding = preheaderdata[3]
|
|
6738
|
-
prefcencoding = preheaderdata[4]
|
|
6739
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
6740
|
-
prefname = preheaderdata[5]
|
|
6741
|
-
else:
|
|
6742
|
-
prefname = "./"+preheaderdata[5]
|
|
6743
|
-
prefbasedir = os.path.dirname(prefname)
|
|
6744
|
-
preflinkname = preheaderdata[6]
|
|
6745
|
-
prefsize = int(preheaderdata[7], 16)
|
|
6746
|
-
prefatime = int(preheaderdata[8], 16)
|
|
6747
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
6748
|
-
prefctime = int(preheaderdata[10], 16)
|
|
6749
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
6750
|
-
prefmode = int(preheaderdata[12], 16)
|
|
6751
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
6752
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
6753
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
6754
|
-
prefcompression = preheaderdata[14]
|
|
6755
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
6756
|
-
prefuid = int(preheaderdata[16], 16)
|
|
6757
|
-
prefuname = preheaderdata[17]
|
|
6758
|
-
prefgid = int(preheaderdata[18], 16)
|
|
6759
|
-
prefgname = preheaderdata[19]
|
|
6760
|
-
fid = int(preheaderdata[20], 16)
|
|
6761
|
-
finode = int(preheaderdata[21], 16)
|
|
6762
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
6763
|
-
prefdev = int(preheaderdata[23], 16)
|
|
6764
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
6765
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
6766
|
-
prefseeknextfile = preheaderdata[26]
|
|
6767
|
-
prefjsontype = preheaderdata[27]
|
|
6768
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
6769
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
6770
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
6771
|
-
prefjsonchecksum = preheaderdata[31]
|
|
6772
|
-
prefhend = fp.tell() - 1
|
|
6773
|
-
prefjstart = fp.tell()
|
|
6774
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
6775
|
-
prefjend = fp.tell()
|
|
6776
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
6777
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
6778
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
6779
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
6780
|
-
extrastart = 34
|
|
6781
|
-
extraend = extrastart + prefextrafields
|
|
6782
|
-
prefcs = preheaderdata[-2].lower()
|
|
6783
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
6784
|
-
prenewfcs = GetHeaderChecksum(
|
|
6785
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
6786
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
6787
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
6788
|
-
prefname + " at offset " + str(prefhstart))
|
|
6789
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
6790
|
-
"'" + prenewfcs + "'")
|
|
6791
|
-
return False
|
|
6792
|
-
if(prefjsonsize > 0):
|
|
6793
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
6794
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
6795
|
-
prefname + " at offset " + str(prefjstart))
|
|
6796
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
6797
|
-
return False
|
|
6798
|
-
prefcontentstart = fp.tell()
|
|
6799
|
-
prefcontents = ""
|
|
6800
|
-
pyhascontents = False
|
|
6801
|
-
if(prefsize > 0):
|
|
6802
|
-
if(prefcompression):
|
|
6803
|
-
prefcontents = fp.read(prefsize)
|
|
6804
|
-
else:
|
|
6805
|
-
prefcontents = fp.read(prefcsize)
|
|
6806
|
-
prenewfccs = GetFileChecksum(
|
|
6807
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
6808
|
-
pyhascontents = True
|
|
6809
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
6810
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
6811
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
6812
|
-
VerbosePrintOut("'" + prefccs +
|
|
6813
|
-
"' != " + "'" + prenewfccs + "'")
|
|
6814
|
-
return False
|
|
6815
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
6816
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
6817
|
-
if(abs(fseeknextasnum) == 0):
|
|
6818
|
-
pass
|
|
6819
|
-
fp.seek(fseeknextasnum, 1)
|
|
6820
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
6821
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6822
|
-
if(abs(fseeknextasnum) == 0):
|
|
6823
|
-
pass
|
|
6824
|
-
fp.seek(fseeknextasnum, 1)
|
|
6825
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
6826
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6827
|
-
if(abs(fseeknextasnum) == 0):
|
|
6828
|
-
pass
|
|
6829
|
-
fp.seek(fseeknextasnum, 0)
|
|
6830
|
-
else:
|
|
6831
|
-
return False
|
|
6832
|
-
il = il + 1
|
|
6833
|
-
fp.seek(seekstart, 0)
|
|
6834
|
-
fileidnum = il
|
|
6835
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
6836
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
6837
|
-
outftype = int(preheaderdata[2], 16)
|
|
6838
|
-
outfencoding = preheaderdata[3]
|
|
6839
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
6840
|
-
outfname = preheaderdata[4]
|
|
6841
|
-
else:
|
|
6842
|
-
outfname = "./"+preheaderdata[4]
|
|
6843
|
-
outflinkname = preheaderdata[5]
|
|
6844
|
-
outfsize = int(preheaderdata[6], 16)
|
|
6845
|
-
outfbasedir = os.path.dirname(outfname)
|
|
6846
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
6847
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
6848
|
-
if(returnfp):
|
|
6849
|
-
outlist.update({'fp': fp})
|
|
6850
|
-
else:
|
|
6851
|
-
fp.close()
|
|
6852
|
-
return outlist
|
|
6853
|
-
|
|
6854
|
-
|
|
6855
|
-
def FoxFileSeekToFileName(infile, fmttype="auto", seekfile=None, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
6856
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6857
|
-
formatspecs = formatspecs[fmttype]
|
|
6858
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6859
|
-
fmttype = "auto"
|
|
6860
|
-
curloc = 0
|
|
6861
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6862
|
-
curloc = infile.tell()
|
|
6863
|
-
fp = infile
|
|
6864
|
-
fp.seek(0, 0)
|
|
6865
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6866
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6867
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6868
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6869
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6870
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6871
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6872
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6873
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6874
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6875
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6876
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6877
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6878
|
-
return False
|
|
6879
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6880
|
-
return False
|
|
6881
|
-
if(not fp):
|
|
6882
|
-
return False
|
|
6883
|
-
fp.seek(0, 0)
|
|
6884
|
-
elif(infile == "-"):
|
|
6885
|
-
fp = MkTempFile()
|
|
6886
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
6887
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6888
|
-
else:
|
|
6889
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
6890
|
-
fp.seek(0, 0)
|
|
6891
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6892
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6893
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6894
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6895
|
-
if(not fp):
|
|
6896
|
-
return False
|
|
6897
|
-
fp.seek(0, 0)
|
|
6898
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6899
|
-
fp = MkTempFile()
|
|
6900
|
-
fp.write(infile)
|
|
6901
|
-
fp.seek(0, 0)
|
|
6902
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6903
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6904
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6905
|
-
formatspecs = formatspecs[compresscheck]
|
|
6906
|
-
if(not fp):
|
|
6907
|
-
return False
|
|
6908
|
-
fp.seek(0, 0)
|
|
6909
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
6910
|
-
fp = download_file_from_internet_file(infile)
|
|
6911
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6912
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6913
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6914
|
-
formatspecs = formatspecs[compresscheck]
|
|
6915
|
-
fp.seek(0, 0)
|
|
6916
|
-
if(not fp):
|
|
6917
|
-
return False
|
|
6918
|
-
fp.seek(0, 0)
|
|
6919
|
-
else:
|
|
6920
|
-
infile = RemoveWindowsPath(infile)
|
|
6921
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6922
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6923
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6924
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6925
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6926
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6927
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6928
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6929
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6930
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6931
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6932
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6933
|
-
return False
|
|
6934
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6935
|
-
return False
|
|
6936
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6937
|
-
if(not compresscheck):
|
|
6938
|
-
fextname = os.path.splitext(infile)[1]
|
|
6939
|
-
if(fextname == ".gz"):
|
|
6940
|
-
compresscheck = "gzip"
|
|
6941
|
-
elif(fextname == ".bz2"):
|
|
6942
|
-
compresscheck = "bzip2"
|
|
6943
|
-
elif(fextname == ".zst"):
|
|
6944
|
-
compresscheck = "zstd"
|
|
6945
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
6946
|
-
compresscheck = "lz4"
|
|
6947
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
6948
|
-
compresscheck = "lzo"
|
|
6949
|
-
elif(fextname == ".lzma"):
|
|
6950
|
-
compresscheck = "lzma"
|
|
6951
|
-
elif(fextname == ".xz"):
|
|
6952
|
-
compresscheck = "xz"
|
|
6953
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
6954
|
-
compresscheck = "zlib"
|
|
6955
|
-
else:
|
|
6956
|
-
return False
|
|
6957
|
-
if(not compresscheck):
|
|
6958
|
-
return False
|
|
6959
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6960
|
-
try:
|
|
6961
|
-
fp.seek(0, 2);
|
|
6962
|
-
except OSError:
|
|
6963
|
-
SeekToEndOfFile(fp);
|
|
6964
|
-
except ValueError:
|
|
6965
|
-
SeekToEndOfFile(fp);
|
|
6966
|
-
CatSize = fp.tell();
|
|
6967
|
-
CatSizeEnd = CatSize;
|
|
6968
|
-
fp.seek(curloc, 0)
|
|
6969
|
-
if(curloc > 0):
|
|
6970
|
-
fp.seek(0, 0)
|
|
6971
|
-
if(IsNestedDict(formatspecs)):
|
|
6972
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6973
|
-
if(compresschecking not in formatspecs):
|
|
6974
|
-
return False
|
|
6975
|
-
else:
|
|
6976
|
-
formatspecs = formatspecs[compresschecking]
|
|
6977
|
-
fp.seek(0, 0)
|
|
6978
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6979
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6980
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
6981
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
6982
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
6983
|
-
return False
|
|
6984
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
6985
|
-
return False
|
|
6986
|
-
if(formatspecs['new_style']):
|
|
6987
|
-
inheader = ReadFileHeaderDataBySize(
|
|
6988
|
-
fp, formatspecs['format_delimiter'])
|
|
6989
|
-
else:
|
|
6990
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
6991
|
-
fp, formatspecs['format_delimiter'])
|
|
6992
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
6993
|
-
fnumextrafields = int(inheader[6], 16)
|
|
6994
|
-
fextrafieldslist = []
|
|
6995
|
-
extrastart = 7
|
|
6996
|
-
extraend = extrastart + fnumextrafields
|
|
6997
|
-
while(extrastart < extraend):
|
|
6998
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
6999
|
-
extrastart = extrastart + 1
|
|
7000
|
-
if(fnumextrafields==1):
|
|
7001
|
-
try:
|
|
7002
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
7003
|
-
fnumextrafields = len(fextrafieldslist)
|
|
7004
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7005
|
-
try:
|
|
7006
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
7007
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7008
|
-
pass
|
|
7009
|
-
if(curloc > 0):
|
|
7010
|
-
fp.seek(curloc, 0)
|
|
7011
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
7012
|
-
fheadsize = int(inheader[0], 16)
|
|
7013
|
-
fnumfields = int(inheader[1], 16)
|
|
7014
|
-
fhencoding = inheader[2]
|
|
7015
|
-
fostype = inheader[3]
|
|
7016
|
-
fnumfiles = int(inheader[4], 16)
|
|
7017
|
-
fprechecksumtype = inheader[-2]
|
|
7018
|
-
fprechecksum = inheader[-1]
|
|
7019
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
7020
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
7021
|
-
if(not headercheck and not skipchecksum):
|
|
7022
|
-
VerbosePrintOut(
|
|
7023
|
-
"File Header Checksum Error with file at offset " + str(0))
|
|
7024
|
-
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
7025
|
-
"'" + newfcs + "'")
|
|
7026
|
-
return False
|
|
7027
|
-
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
7028
|
-
fcompresstype = compresscheck
|
|
7029
|
-
if(fcompresstype==formatspecs['format_magic']):
|
|
7030
|
-
fcompresstype = ""
|
|
7031
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
7032
|
-
seekto = fnumfiles - 1
|
|
7033
|
-
filefound = False
|
|
7034
|
-
if(seekto >= 0):
|
|
7035
|
-
il = -1
|
|
7036
|
-
while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
|
|
7037
|
-
prefhstart = fp.tell()
|
|
7038
|
-
if(formatspecs['new_style']):
|
|
7039
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
7040
|
-
fp, formatspecs['format_delimiter'])
|
|
7041
|
-
else:
|
|
7042
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
7043
|
-
fp, formatspecs['format_delimiter'])
|
|
7044
|
-
if(len(preheaderdata) == 0):
|
|
7045
|
-
break
|
|
7046
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
7047
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
7048
|
-
preftype = int(preheaderdata[2], 16)
|
|
7049
|
-
prefencoding = preheaderdata[3]
|
|
7050
|
-
prefencoding = preheaderdata[4]
|
|
7051
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
7052
|
-
prefname = preheaderdata[5]
|
|
7053
|
-
else:
|
|
7054
|
-
prefname = "./"+preheaderdata[5]
|
|
7055
|
-
prefbasedir = os.path.dirname(prefname)
|
|
7056
|
-
preflinkname = preheaderdata[6]
|
|
7057
|
-
prefsize = int(preheaderdata[7], 16)
|
|
7058
|
-
prefatime = int(preheaderdata[8], 16)
|
|
7059
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
7060
|
-
prefctime = int(preheaderdata[10], 16)
|
|
7061
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
7062
|
-
prefmode = int(preheaderdata[12], 16)
|
|
7063
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
7064
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
7065
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
7066
|
-
prefcompression = preheaderdata[14]
|
|
7067
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
7068
|
-
prefuid = int(preheaderdata[16], 16)
|
|
7069
|
-
prefuname = preheaderdata[17]
|
|
7070
|
-
prefgid = int(preheaderdata[18], 16)
|
|
7071
|
-
prefgname = preheaderdata[19]
|
|
7072
|
-
fid = int(preheaderdata[20], 16)
|
|
7073
|
-
finode = int(preheaderdata[21], 16)
|
|
7074
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
7075
|
-
prefdev = int(preheaderdata[23], 16)
|
|
7076
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
7077
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
7078
|
-
prefseeknextfile = preheaderdata[26]
|
|
7079
|
-
prefjsontype = preheaderdata[27]
|
|
7080
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
7081
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
7082
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
7083
|
-
prefjsonchecksum = preheaderdata[31]
|
|
7084
|
-
prefhend = fp.tell() - 1
|
|
7085
|
-
prefjstart = fp.tell()
|
|
7086
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
7087
|
-
prefjend = fp.tell()
|
|
7088
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
7089
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
7090
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
7091
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
7092
|
-
extrastart = 34
|
|
7093
|
-
extraend = extrastart + prefextrafields
|
|
7094
|
-
prefcs = preheaderdata[-2].lower()
|
|
7095
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
7096
|
-
prenewfcs = GetHeaderChecksum(
|
|
7097
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
7098
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
7099
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
7100
|
-
prefname + " at offset " + str(prefhstart))
|
|
7101
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
7102
|
-
"'" + prenewfcs + "'")
|
|
7103
|
-
return False
|
|
7104
|
-
if(prefjsonsize > 0):
|
|
7105
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
7106
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
7107
|
-
prefname + " at offset " + str(prefjstart))
|
|
7108
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
7109
|
-
return False
|
|
7110
|
-
prefcontentstart = fp.tell()
|
|
7111
|
-
prefcontents = ""
|
|
7112
|
-
pyhascontents = False
|
|
7113
|
-
if(prefsize > 0):
|
|
7114
|
-
if(prefcompression):
|
|
7115
|
-
prefcontents = fp.read(prefsize)
|
|
7116
|
-
else:
|
|
7117
|
-
prefcontents = fp.read(prefcsize)
|
|
7118
|
-
prenewfccs = GetFileChecksum(
|
|
7119
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
7120
|
-
pyhascontents = True
|
|
7121
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
7122
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
7123
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
7124
|
-
VerbosePrintOut("'" + prefccs +
|
|
7125
|
-
"' != " + "'" + prenewfccs + "'")
|
|
7126
|
-
return False
|
|
7127
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
7128
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
7129
|
-
if(abs(fseeknextasnum) == 0):
|
|
7130
|
-
pass
|
|
7131
|
-
fp.seek(fseeknextasnum, 1)
|
|
7132
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
7133
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7134
|
-
if(abs(fseeknextasnum) == 0):
|
|
7135
|
-
pass
|
|
7136
|
-
fp.seek(fseeknextasnum, 1)
|
|
7137
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
7138
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7139
|
-
if(abs(fseeknextasnum) == 0):
|
|
7140
|
-
pass
|
|
7141
|
-
fp.seek(fseeknextasnum, 0)
|
|
7142
|
-
else:
|
|
7143
|
-
return False
|
|
7144
|
-
il = il + 1
|
|
7145
|
-
filefound = False
|
|
7146
|
-
if(prefname == seekfile):
|
|
7147
|
-
filefound = True
|
|
7148
|
-
break
|
|
7149
|
-
fp.seek(seekstart, 0)
|
|
7150
|
-
fileidnum = il
|
|
7151
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
7152
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
7153
|
-
outftype = int(preheaderdata[2], 16)
|
|
7154
|
-
outfencoding = preheaderdata[3]
|
|
7155
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
7156
|
-
outfname = preheaderdata[4]
|
|
7157
|
-
else:
|
|
7158
|
-
outfname = "./"+preheaderdata[4]
|
|
7159
|
-
outflinkname = preheaderdata[5]
|
|
7160
|
-
outfsize = int(preheaderdata[6], 16)
|
|
7161
|
-
outfbasedir = os.path.dirname(outfname)
|
|
7162
|
-
if(filefound):
|
|
7163
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
7164
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
7165
|
-
else:
|
|
7166
|
-
return False
|
|
7167
|
-
if(returnfp):
|
|
7168
|
-
outlist.update({'fp': fp})
|
|
7169
|
-
else:
|
|
7170
|
-
fp.close()
|
|
7171
|
-
return outlist
|
|
7172
|
-
|
|
7173
|
-
|
|
7174
|
-
def FoxFileValidate(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7175
|
-
if(verbose):
|
|
7176
|
-
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
7177
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7178
|
-
formatspecs = formatspecs[fmttype]
|
|
7179
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7180
|
-
fmttype = "auto"
|
|
7181
|
-
curloc = 0
|
|
7182
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7183
|
-
curloc = infile.tell()
|
|
7184
|
-
fp = infile
|
|
7185
|
-
fp.seek(0, 0)
|
|
7186
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7187
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7188
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7189
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7190
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7191
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7192
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7193
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7194
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7195
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7196
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7197
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7198
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7199
|
-
return False
|
|
7200
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7201
|
-
return False
|
|
7202
|
-
if(not fp):
|
|
7203
|
-
return False
|
|
7204
|
-
fp.seek(0, 0)
|
|
7205
|
-
elif(infile == "-"):
|
|
7206
|
-
fp = MkTempFile()
|
|
7207
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
7208
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7209
|
-
else:
|
|
7210
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
7211
|
-
fp.seek(0, 0)
|
|
7212
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7213
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7214
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7215
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7216
|
-
if(not fp):
|
|
7217
|
-
return False
|
|
7218
|
-
fp.seek(0, 0)
|
|
7219
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7220
|
-
fp = MkTempFile()
|
|
7221
|
-
fp.write(infile)
|
|
7222
|
-
fp.seek(0, 0)
|
|
7223
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7224
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7225
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7226
|
-
formatspecs = formatspecs[compresscheck]
|
|
7227
|
-
if(not fp):
|
|
7228
|
-
return False
|
|
7229
|
-
fp.seek(0, 0)
|
|
7230
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
7231
|
-
fp = download_file_from_internet_file(infile)
|
|
7232
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7233
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7234
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7235
|
-
formatspecs = formatspecs[compresscheck]
|
|
7236
|
-
fp.seek(0, 0)
|
|
7237
|
-
if(not fp):
|
|
7238
|
-
return False
|
|
7239
|
-
fp.seek(0, 0)
|
|
7240
|
-
else:
|
|
7241
|
-
infile = RemoveWindowsPath(infile)
|
|
7242
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7243
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7244
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7245
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7246
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7247
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7248
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7249
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7250
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7251
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7252
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7253
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7254
|
-
return False
|
|
7255
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7256
|
-
return False
|
|
7257
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7258
|
-
if(not compresscheck):
|
|
7259
|
-
fextname = os.path.splitext(infile)[1]
|
|
7260
|
-
if(fextname == ".gz"):
|
|
7261
|
-
compresscheck = "gzip"
|
|
7262
|
-
elif(fextname == ".bz2"):
|
|
7263
|
-
compresscheck = "bzip2"
|
|
7264
|
-
elif(fextname == ".zst"):
|
|
7265
|
-
compresscheck = "zstd"
|
|
7266
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
7267
|
-
compresscheck = "lz4"
|
|
7268
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
7269
|
-
compresscheck = "lzo"
|
|
7270
|
-
elif(fextname == ".lzma"):
|
|
7271
|
-
compresscheck = "lzma"
|
|
7272
|
-
elif(fextname == ".xz"):
|
|
7273
|
-
compresscheck = "xz"
|
|
7274
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
7275
|
-
compresscheck = "zlib"
|
|
7276
|
-
else:
|
|
7277
|
-
return False
|
|
7278
|
-
if(not compresscheck):
|
|
7279
|
-
return False
|
|
7280
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7281
|
-
try:
|
|
7282
|
-
fp.seek(0, 2);
|
|
7283
|
-
except OSError:
|
|
7284
|
-
SeekToEndOfFile(fp);
|
|
7285
|
-
except ValueError:
|
|
7286
|
-
SeekToEndOfFile(fp);
|
|
7287
|
-
CatSize = fp.tell();
|
|
7288
|
-
CatSizeEnd = CatSize;
|
|
7289
|
-
fp.seek(curloc, 0)
|
|
7290
|
-
if(curloc > 0):
|
|
7291
|
-
fp.seek(0, 0)
|
|
7292
|
-
if(IsNestedDict(formatspecs)):
|
|
7293
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7294
|
-
if(compresschecking not in formatspecs):
|
|
7295
|
-
return False
|
|
7296
|
-
else:
|
|
7297
|
-
formatspecs = formatspecs[compresschecking]
|
|
7298
|
-
fp.seek(0, 0)
|
|
7299
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7300
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7301
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
7302
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
7303
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
7304
|
-
return False
|
|
7305
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
7306
|
-
return False
|
|
7307
|
-
if(formatspecs['new_style']):
|
|
7308
|
-
inheader = ReadFileHeaderDataBySize(
|
|
7309
|
-
fp, formatspecs['format_delimiter'])
|
|
7310
|
-
else:
|
|
7311
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
7312
|
-
fp, formatspecs['format_delimiter'])
|
|
7313
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
7314
|
-
fnumextrafields = int(inheader[6], 16)
|
|
7315
|
-
extrastart = 7
|
|
7316
|
-
extraend = extrastart + fnumextrafields
|
|
7317
|
-
if(curloc > 0):
|
|
7318
|
-
fp.seek(curloc, 0)
|
|
7319
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
7320
|
-
fheadsize = int(inheader[0], 16)
|
|
7321
|
-
fnumfields = int(inheader[1], 16)
|
|
7322
|
-
fhencoding = inheader[2]
|
|
7323
|
-
fostype = inheader[3]
|
|
7324
|
-
fnumfiles = int(inheader[4], 16)
|
|
7325
|
-
fprechecksumtype = inheader[-2]
|
|
7326
|
-
fprechecksum = inheader[-1]
|
|
7327
|
-
il = 0
|
|
7328
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
7329
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
7330
|
-
valid_archive = True
|
|
7331
|
-
invalid_archive = False
|
|
7332
|
-
if(verbose):
|
|
7333
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7334
|
-
try:
|
|
7335
|
-
VerbosePrintOut(infile.name)
|
|
7336
|
-
except AttributeError:
|
|
7337
|
-
pass
|
|
7338
|
-
elif(sys.version_info[0] >= 3 and isinstance(infile, bytes)):
|
|
7339
|
-
pass
|
|
7340
|
-
else:
|
|
7341
|
-
VerbosePrintOut(infile)
|
|
7342
|
-
VerbosePrintOut("Number of Records " + str(fnumfiles))
|
|
7343
|
-
if(headercheck):
|
|
7344
|
-
if(verbose):
|
|
7345
|
-
VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
|
|
7346
|
-
VerbosePrintOut("'" + fprechecksum + "' == " +
|
|
7347
|
-
"'" + newfcs + "'")
|
|
6895
|
+
valid_archive = True
|
|
6896
|
+
invalid_archive = False
|
|
6897
|
+
if(verbose):
|
|
6898
|
+
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6899
|
+
try:
|
|
6900
|
+
VerbosePrintOut(infile.name)
|
|
6901
|
+
except AttributeError:
|
|
6902
|
+
pass
|
|
6903
|
+
elif(sys.version_info[0] >= 3 and isinstance(infile, bytes)):
|
|
6904
|
+
pass
|
|
6905
|
+
else:
|
|
6906
|
+
VerbosePrintOut(infile)
|
|
6907
|
+
VerbosePrintOut("Number of Records " + str(fnumfiles))
|
|
6908
|
+
if(headercheck):
|
|
6909
|
+
if(verbose):
|
|
6910
|
+
VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
|
|
6911
|
+
VerbosePrintOut("'" + fprechecksum + "' == " +
|
|
6912
|
+
"'" + newfcs + "'")
|
|
7348
6913
|
else:
|
|
7349
6914
|
if(verbose):
|
|
7350
6915
|
VerbosePrintOut("File Header Checksum Failed at offset " + str(0))
|
|
@@ -7525,18 +7090,18 @@ def FoxFileValidateMultiple(infile, fmttype="auto", formatspecs=__file_format_mu
|
|
|
7525
7090
|
def FoxFileValidateMultipleFiles(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7526
7091
|
return FoxFileValidateMultiple(infile, fmttype, formatspecs, verbose, returnfp)
|
|
7527
7092
|
|
|
7528
|
-
def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7093
|
+
def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7529
7094
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7530
7095
|
formatspecs = formatspecs[fmttype]
|
|
7531
7096
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7532
7097
|
fmttype = "auto"
|
|
7533
|
-
curloc =
|
|
7098
|
+
curloc = filestart
|
|
7534
7099
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7535
7100
|
curloc = infile.tell()
|
|
7536
7101
|
fp = infile
|
|
7537
|
-
fp.seek(
|
|
7538
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7539
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7102
|
+
fp.seek(filestart, 0)
|
|
7103
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7104
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7540
7105
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7541
7106
|
formatspecs = formatspecs[checkcompressfile]
|
|
7542
7107
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7553,45 +7118,45 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7553
7118
|
return False
|
|
7554
7119
|
if(not fp):
|
|
7555
7120
|
return False
|
|
7556
|
-
fp.seek(
|
|
7121
|
+
fp.seek(filestart, 0)
|
|
7557
7122
|
elif(infile == "-"):
|
|
7558
7123
|
fp = MkTempFile()
|
|
7559
7124
|
if(hasattr(sys.stdin, "buffer")):
|
|
7560
7125
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7561
7126
|
else:
|
|
7562
7127
|
shutil.copyfileobj(sys.stdin, fp)
|
|
7563
|
-
fp.seek(
|
|
7564
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7565
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7128
|
+
fp.seek(filestart, 0)
|
|
7129
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7130
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7566
7131
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7567
7132
|
formatspecs = formatspecs[checkcompressfile]
|
|
7568
7133
|
if(not fp):
|
|
7569
7134
|
return False
|
|
7570
|
-
fp.seek(
|
|
7135
|
+
fp.seek(filestart, 0)
|
|
7571
7136
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7572
7137
|
fp = MkTempFile()
|
|
7573
7138
|
fp.write(infile)
|
|
7574
|
-
fp.seek(
|
|
7575
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7576
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7139
|
+
fp.seek(filestart, 0)
|
|
7140
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7141
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7577
7142
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7578
7143
|
formatspecs = formatspecs[compresscheck]
|
|
7579
7144
|
if(not fp):
|
|
7580
7145
|
return False
|
|
7581
|
-
fp.seek(
|
|
7582
|
-
elif(re.findall(
|
|
7146
|
+
fp.seek(filestart, 0)
|
|
7147
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
7583
7148
|
fp = download_file_from_internet_file(infile)
|
|
7584
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7585
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7149
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7150
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7586
7151
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7587
7152
|
formatspecs = formatspecs[compresscheck]
|
|
7588
|
-
fp.seek(
|
|
7153
|
+
fp.seek(filestart, 0)
|
|
7589
7154
|
if(not fp):
|
|
7590
7155
|
return False
|
|
7591
|
-
fp.seek(
|
|
7156
|
+
fp.seek(filestart, 0)
|
|
7592
7157
|
else:
|
|
7593
7158
|
infile = RemoveWindowsPath(infile)
|
|
7594
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7159
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7595
7160
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7596
7161
|
formatspecs = formatspecs[checkcompressfile]
|
|
7597
7162
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7606,7 +7171,7 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7606
7171
|
return False
|
|
7607
7172
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7608
7173
|
return False
|
|
7609
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7174
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
7610
7175
|
if(not compresscheck):
|
|
7611
7176
|
fextname = os.path.splitext(infile)[1]
|
|
7612
7177
|
if(fextname == ".gz"):
|
|
@@ -7629,25 +7194,23 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7629
7194
|
return False
|
|
7630
7195
|
if(not compresscheck):
|
|
7631
7196
|
return False
|
|
7632
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7197
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
7633
7198
|
try:
|
|
7634
|
-
fp.seek(0, 2)
|
|
7199
|
+
fp.seek(0, 2)
|
|
7635
7200
|
except OSError:
|
|
7636
|
-
SeekToEndOfFile(fp)
|
|
7201
|
+
SeekToEndOfFile(fp)
|
|
7637
7202
|
except ValueError:
|
|
7638
|
-
SeekToEndOfFile(fp)
|
|
7639
|
-
CatSize = fp.tell()
|
|
7203
|
+
SeekToEndOfFile(fp)
|
|
7204
|
+
CatSize = fp.tell()
|
|
7640
7205
|
CatSizeEnd = CatSize;
|
|
7641
7206
|
fp.seek(curloc, 0)
|
|
7642
|
-
if(curloc > 0):
|
|
7643
|
-
fp.seek(0, 0)
|
|
7644
7207
|
if(IsNestedDict(formatspecs)):
|
|
7645
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7208
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7646
7209
|
if(compresschecking not in formatspecs):
|
|
7647
7210
|
return False
|
|
7648
7211
|
else:
|
|
7649
7212
|
formatspecs = formatspecs[compresschecking]
|
|
7650
|
-
fp.seek(
|
|
7213
|
+
fp.seek(filestart, 0)
|
|
7651
7214
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7652
7215
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7653
7216
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -7679,8 +7242,6 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7679
7242
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
7680
7243
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7681
7244
|
pass
|
|
7682
|
-
if(curloc > 0):
|
|
7683
|
-
fp.seek(curloc, 0)
|
|
7684
7245
|
formversion = re.findall("([\\d]+)", formstring)
|
|
7685
7246
|
fheadsize = int(inheader[0], 16)
|
|
7686
7247
|
fnumfields = int(inheader[1], 16)
|
|
@@ -7701,7 +7262,7 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7701
7262
|
fcompresstype = compresscheck
|
|
7702
7263
|
if(fcompresstype==formatspecs['format_magic']):
|
|
7703
7264
|
fcompresstype = ""
|
|
7704
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
7265
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
7705
7266
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
7706
7267
|
seekstart = 0
|
|
7707
7268
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -7945,7 +7506,7 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7945
7506
|
outfcontents.seek(0, 0)
|
|
7946
7507
|
if(uncompress):
|
|
7947
7508
|
cfcontents = UncompressFileAlt(
|
|
7948
|
-
outfcontents, formatspecs)
|
|
7509
|
+
outfcontents, formatspecs, 0)
|
|
7949
7510
|
cfcontents.seek(0, 0)
|
|
7950
7511
|
outfcontents = MkTempFile()
|
|
7951
7512
|
shutil.copyfileobj(cfcontents, outfcontents)
|
|
@@ -7990,49 +7551,49 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7990
7551
|
return outlist
|
|
7991
7552
|
|
|
7992
7553
|
|
|
7993
|
-
def MultipleFoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7554
|
+
def MultipleFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7994
7555
|
if(isinstance(infile, (list, tuple, ))):
|
|
7995
7556
|
pass
|
|
7996
7557
|
else:
|
|
7997
7558
|
infile = [infile]
|
|
7998
7559
|
outretval = {}
|
|
7999
7560
|
for curfname in infile:
|
|
8000
|
-
curretfile[curfname] = FoxFileToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7561
|
+
curretfile[curfname] = FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8001
7562
|
return outretval
|
|
8002
7563
|
|
|
8003
|
-
def MultipleFoxFilesToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8004
|
-
return MultipleFoxFileToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7564
|
+
def MultipleFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7565
|
+
return MultipleFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8005
7566
|
|
|
8006
7567
|
|
|
8007
|
-
def FoxFileStringToArray(instr, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8008
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7568
|
+
def FoxFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7569
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8009
7570
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8010
7571
|
formatspecs = formatspecs[checkcompressfile]
|
|
8011
7572
|
fp = MkTempFile(instr)
|
|
8012
|
-
|
|
8013
|
-
return
|
|
7573
|
+
listarrayfiles = FoxFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7574
|
+
return listarrayfiles
|
|
8014
7575
|
|
|
8015
7576
|
|
|
8016
7577
|
def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8017
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7578
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8018
7579
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8019
7580
|
formatspecs = formatspecs[checkcompressfile]
|
|
8020
7581
|
fp = MkTempFile()
|
|
8021
7582
|
fp = PackFoxFileFromTarFile(
|
|
8022
7583
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8023
|
-
|
|
8024
|
-
return
|
|
7584
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7585
|
+
return listarrayfiles
|
|
8025
7586
|
|
|
8026
7587
|
|
|
8027
7588
|
def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8028
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7589
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8029
7590
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8030
7591
|
formatspecs = formatspecs[checkcompressfile]
|
|
8031
7592
|
fp = MkTempFile()
|
|
8032
7593
|
fp = PackFoxFileFromZipFile(
|
|
8033
7594
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8034
|
-
|
|
8035
|
-
return
|
|
7595
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7596
|
+
return listarrayfiles
|
|
8036
7597
|
|
|
8037
7598
|
|
|
8038
7599
|
if(not rarfile_support):
|
|
@@ -8041,14 +7602,14 @@ if(not rarfile_support):
|
|
|
8041
7602
|
|
|
8042
7603
|
if(rarfile_support):
|
|
8043
7604
|
def RarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8044
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7605
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8045
7606
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8046
7607
|
formatspecs = formatspecs[checkcompressfile]
|
|
8047
7608
|
fp = MkTempFile()
|
|
8048
7609
|
fp = PackFoxFileFromRarFile(
|
|
8049
7610
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8050
|
-
|
|
8051
|
-
return
|
|
7611
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7612
|
+
return listarrayfiles
|
|
8052
7613
|
|
|
8053
7614
|
if(not py7zr_support):
|
|
8054
7615
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
@@ -8056,18 +7617,18 @@ if(not py7zr_support):
|
|
|
8056
7617
|
|
|
8057
7618
|
if(py7zr_support):
|
|
8058
7619
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
8059
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7620
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8060
7621
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8061
7622
|
formatspecs = formatspecs[checkcompressfile]
|
|
8062
7623
|
fp = MkTempFile()
|
|
8063
7624
|
fp = PackFoxFileFromSevenZipFile(
|
|
8064
7625
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
8065
|
-
|
|
8066
|
-
return
|
|
7626
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7627
|
+
return listarrayfiles
|
|
8067
7628
|
|
|
8068
7629
|
|
|
8069
|
-
def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8070
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7630
|
+
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7631
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
8071
7632
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
8072
7633
|
formatspecs = formatspecs[checkcompressfile]
|
|
8073
7634
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -8079,78 +7640,78 @@ def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=
|
|
|
8079
7640
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
8080
7641
|
return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8081
7642
|
elif(checkcompressfile == formatspecs['format_magic']):
|
|
8082
|
-
return FoxFileToArray(infile, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7643
|
+
return FoxFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8083
7644
|
else:
|
|
8084
7645
|
return False
|
|
8085
7646
|
return False
|
|
8086
7647
|
|
|
8087
7648
|
|
|
8088
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
7649
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
8089
7650
|
outarray = MkTempFile()
|
|
8090
7651
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
8091
7652
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
8092
|
-
|
|
8093
|
-
return
|
|
7653
|
+
listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7654
|
+
return listarrayfiles
|
|
8094
7655
|
|
|
8095
7656
|
|
|
8096
7657
|
def FoxFileArrayToArrayIndex(inarray, returnfp=False):
|
|
8097
7658
|
if(isinstance(inarray, dict)):
|
|
8098
|
-
|
|
7659
|
+
listarrayfiles = inarray
|
|
8099
7660
|
else:
|
|
8100
7661
|
return False
|
|
8101
|
-
if(not
|
|
7662
|
+
if(not listarrayfiles):
|
|
8102
7663
|
return False
|
|
8103
|
-
outarray = {'list':
|
|
7664
|
+
outarray = {'list': listarrayfiles, 'filetoid': {}, 'idtofile': {}, 'filetypes': {'directories': {'filetoid': {}, 'idtofile': {}}, 'files': {'filetoid': {}, 'idtofile': {}}, 'links': {'filetoid': {}, 'idtofile': {}}, 'symlinks': {'filetoid': {
|
|
8104
7665
|
}, 'idtofile': {}}, 'hardlinks': {'filetoid': {}, 'idtofile': {}}, 'character': {'filetoid': {}, 'idtofile': {}}, 'block': {'filetoid': {}, 'idtofile': {}}, 'fifo': {'filetoid': {}, 'idtofile': {}}, 'devices': {'filetoid': {}, 'idtofile': {}}}}
|
|
8105
7666
|
if(returnfp):
|
|
8106
|
-
outarray.update({'fp':
|
|
8107
|
-
lenlist = len(
|
|
7667
|
+
outarray.update({'fp': listarrayfiles['fp']})
|
|
7668
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8108
7669
|
lcfi = 0
|
|
8109
|
-
lcfx = int(
|
|
8110
|
-
if(lenlist >
|
|
7670
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7671
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8111
7672
|
lcfx = int(lenlist)
|
|
8112
7673
|
else:
|
|
8113
|
-
lcfx = int(
|
|
7674
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8114
7675
|
while(lcfi < lcfx):
|
|
8115
|
-
filetoidarray = {
|
|
8116
|
-
['fname']:
|
|
8117
|
-
idtofilearray = {
|
|
8118
|
-
['fid']:
|
|
7676
|
+
filetoidarray = {listarrayfiles['ffilelist'][lcfi]
|
|
7677
|
+
['fname']: listarrayfiles['ffilelist'][lcfi]['fid']}
|
|
7678
|
+
idtofilearray = {listarrayfiles['ffilelist'][lcfi]
|
|
7679
|
+
['fid']: listarrayfiles['ffilelist'][lcfi]['fname']}
|
|
8119
7680
|
outarray['filetoid'].update(filetoidarray)
|
|
8120
7681
|
outarray['idtofile'].update(idtofilearray)
|
|
8121
|
-
if(
|
|
7682
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8122
7683
|
outarray['filetypes']['files']['filetoid'].update(filetoidarray)
|
|
8123
7684
|
outarray['filetypes']['files']['idtofile'].update(idtofilearray)
|
|
8124
|
-
if(
|
|
7685
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8125
7686
|
outarray['filetypes']['hardlinks']['filetoid'].update(
|
|
8126
7687
|
filetoidarray)
|
|
8127
7688
|
outarray['filetypes']['hardlinks']['idtofile'].update(
|
|
8128
7689
|
idtofilearray)
|
|
8129
7690
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8130
7691
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8131
|
-
if(
|
|
7692
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8132
7693
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8133
7694
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8134
7695
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8135
7696
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8136
|
-
if(
|
|
7697
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 3):
|
|
8137
7698
|
outarray['filetypes']['character']['filetoid'].update(
|
|
8138
7699
|
filetoidarray)
|
|
8139
7700
|
outarray['filetypes']['character']['idtofile'].update(
|
|
8140
7701
|
idtofilearray)
|
|
8141
7702
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8142
7703
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8143
|
-
if(
|
|
7704
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 4):
|
|
8144
7705
|
outarray['filetypes']['block']['filetoid'].update(filetoidarray)
|
|
8145
7706
|
outarray['filetypes']['block']['idtofile'].update(idtofilearray)
|
|
8146
7707
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8147
7708
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8148
|
-
if(
|
|
7709
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8149
7710
|
outarray['filetypes']['directories']['filetoid'].update(
|
|
8150
7711
|
filetoidarray)
|
|
8151
7712
|
outarray['filetypes']['directories']['idtofile'].update(
|
|
8152
7713
|
idtofilearray)
|
|
8153
|
-
if(
|
|
7714
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6):
|
|
8154
7715
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8155
7716
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8156
7717
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
@@ -8159,13 +7720,13 @@ def FoxFileArrayToArrayIndex(inarray, returnfp=False):
|
|
|
8159
7720
|
return outarray
|
|
8160
7721
|
|
|
8161
7722
|
|
|
8162
|
-
def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7723
|
+
def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8163
7724
|
if(isinstance(infile, dict)):
|
|
8164
|
-
|
|
7725
|
+
listarrayfiles = infile
|
|
8165
7726
|
else:
|
|
8166
7727
|
if(infile != "-" and not isinstance(infile, bytes) and not hasattr(infile, "read") and not hasattr(infile, "write")):
|
|
8167
7728
|
infile = RemoveWindowsPath(infile)
|
|
8168
|
-
|
|
7729
|
+
listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8169
7730
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
8170
7731
|
formatspecs = formatspecs[fmttype]
|
|
8171
7732
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
@@ -8191,14 +7752,14 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8191
7752
|
os.unlink(outfile)
|
|
8192
7753
|
except OSError:
|
|
8193
7754
|
pass
|
|
8194
|
-
if(not
|
|
7755
|
+
if(not listarrayfiles):
|
|
8195
7756
|
return False
|
|
8196
7757
|
if(outfile == "-" or outfile is None):
|
|
8197
7758
|
verbose = False
|
|
8198
7759
|
fp = MkTempFile()
|
|
8199
7760
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
8200
7761
|
fp = outfile
|
|
8201
|
-
elif(re.findall(
|
|
7762
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
8202
7763
|
fp = MkTempFile()
|
|
8203
7764
|
else:
|
|
8204
7765
|
fbasename = os.path.splitext(outfile)[0]
|
|
@@ -8211,19 +7772,19 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8211
7772
|
return False
|
|
8212
7773
|
formver = formatspecs['format_ver']
|
|
8213
7774
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
8214
|
-
lenlist = len(
|
|
8215
|
-
fnumfiles = int(
|
|
7775
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7776
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8216
7777
|
if(lenlist > fnumfiles or lenlist < fnumfiles):
|
|
8217
7778
|
fnumfiles = lenlist
|
|
8218
|
-
AppendFileHeader(fp, fnumfiles,
|
|
8219
|
-
lenlist = len(
|
|
8220
|
-
fnumfiles = int(
|
|
7779
|
+
AppendFileHeader(fp, fnumfiles, listarrayfiles['fencoding'], [], checksumtype[0], formatspecs)
|
|
7780
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7781
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8221
7782
|
lcfi = 0
|
|
8222
|
-
lcfx = int(
|
|
8223
|
-
if(lenlist >
|
|
7783
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7784
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8224
7785
|
lcfx = int(lenlist)
|
|
8225
7786
|
else:
|
|
8226
|
-
lcfx = int(
|
|
7787
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8227
7788
|
curinode = 0
|
|
8228
7789
|
curfid = 0
|
|
8229
7790
|
inodelist = []
|
|
@@ -8231,66 +7792,66 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8231
7792
|
filetoinode = {}
|
|
8232
7793
|
reallcfi = 0
|
|
8233
7794
|
while(lcfi < lcfx):
|
|
8234
|
-
fencoding =
|
|
8235
|
-
fcencoding =
|
|
8236
|
-
if(re.findall("^[.|/]",
|
|
8237
|
-
fname =
|
|
7795
|
+
fencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7796
|
+
fcencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7797
|
+
if(re.findall("^[.|/]", listarrayfiles['ffilelist'][reallcfi]['fname'])):
|
|
7798
|
+
fname = listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8238
7799
|
else:
|
|
8239
|
-
fname = "./"+
|
|
7800
|
+
fname = "./"+listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8240
7801
|
if(verbose):
|
|
8241
7802
|
VerbosePrintOut(fname)
|
|
8242
7803
|
fheadersize = format(
|
|
8243
|
-
int(
|
|
7804
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fheadersize']), 'x').lower()
|
|
8244
7805
|
fsize = format(
|
|
8245
|
-
int(
|
|
8246
|
-
flinkname =
|
|
7806
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fsize']), 'x').lower()
|
|
7807
|
+
flinkname = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
8247
7808
|
fatime = format(
|
|
8248
|
-
int(
|
|
7809
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fatime']), 'x').lower()
|
|
8249
7810
|
fmtime = format(
|
|
8250
|
-
int(
|
|
7811
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmtime']), 'x').lower()
|
|
8251
7812
|
fctime = format(
|
|
8252
|
-
int(
|
|
7813
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fctime']), 'x').lower()
|
|
8253
7814
|
fbtime = format(
|
|
8254
|
-
int(
|
|
7815
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fbtime']), 'x').lower()
|
|
8255
7816
|
fmode = format(
|
|
8256
|
-
int(
|
|
7817
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmode']), 'x').lower()
|
|
8257
7818
|
fchmode = format(
|
|
8258
|
-
int(
|
|
7819
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fchmode']), 'x').lower()
|
|
8259
7820
|
fuid = format(
|
|
8260
|
-
int(
|
|
8261
|
-
funame =
|
|
7821
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fuid']), 'x').lower()
|
|
7822
|
+
funame = listarrayfiles['ffilelist'][reallcfi]['funame']
|
|
8262
7823
|
fgid = format(
|
|
8263
|
-
int(
|
|
8264
|
-
fgname =
|
|
7824
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fgid']), 'x').lower()
|
|
7825
|
+
fgname = listarrayfiles['ffilelist'][reallcfi]['fgname']
|
|
8265
7826
|
finode = format(
|
|
8266
|
-
int(
|
|
7827
|
+
int(listarrayfiles['ffilelist'][reallcfi]['finode']), 'x').lower()
|
|
8267
7828
|
flinkcount = format(
|
|
8268
|
-
int(
|
|
7829
|
+
int(listarrayfiles['ffilelist'][reallcfi]['flinkcount']), 'x').lower()
|
|
8269
7830
|
fwinattributes = format(
|
|
8270
|
-
int(
|
|
8271
|
-
fcompression =
|
|
7831
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fwinattributes']), 'x').lower()
|
|
7832
|
+
fcompression = listarrayfiles['ffilelist'][reallcfi]['fcompression']
|
|
8272
7833
|
fcsize = format(
|
|
8273
|
-
int(
|
|
7834
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fcsize']), 'x').lower()
|
|
8274
7835
|
fdev = format(
|
|
8275
|
-
int(
|
|
7836
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fdev']), 'x').lower()
|
|
8276
7837
|
fdev_minor = format(
|
|
8277
|
-
int(
|
|
7838
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fminor']), 'x').lower()
|
|
8278
7839
|
fdev_major = format(
|
|
8279
|
-
int(
|
|
8280
|
-
fseeknextfile =
|
|
8281
|
-
if(len(
|
|
8282
|
-
|
|
8283
|
-
|
|
7840
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
|
|
7841
|
+
fseeknextfile = listarrayfiles['ffilelist'][reallcfi]['fseeknextfile']
|
|
7842
|
+
if(len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > 0):
|
|
7843
|
+
listarrayfiles['ffilelist'][reallcfi]['fextrafields'] = len(
|
|
7844
|
+
listarrayfiles['ffilelist'][reallcfi]['fextralist'])
|
|
8284
7845
|
if(not followlink and len(extradata) <= 0):
|
|
8285
|
-
extradata =
|
|
7846
|
+
extradata = listarrayfiles['ffilelist'][reallcfi]['fextralist']
|
|
8286
7847
|
if(not followlink and len(jsondata) <= 0):
|
|
8287
|
-
jsondata =
|
|
8288
|
-
fcontents =
|
|
8289
|
-
if(not
|
|
7848
|
+
jsondata = listarrayfiles['ffilelist'][reallcfi]['fjsondata']
|
|
7849
|
+
fcontents = listarrayfiles['ffilelist'][reallcfi]['fcontents']
|
|
7850
|
+
if(not listarrayfiles['ffilelist'][reallcfi]['fcontentasfile']):
|
|
8290
7851
|
fcontents = MkTempFile(fcontents)
|
|
8291
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
7852
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
8292
7853
|
fcontents.seek(0, 0)
|
|
8293
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
7854
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
8294
7855
|
fcompression = ""
|
|
8295
7856
|
fcsize = format(int(0), 'x').lower()
|
|
8296
7857
|
curcompression = "none"
|
|
@@ -8333,10 +7894,10 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8333
7894
|
fcontents.close()
|
|
8334
7895
|
fcontents = cfcontents
|
|
8335
7896
|
if followlink:
|
|
8336
|
-
if(
|
|
8337
|
-
getflinkpath =
|
|
8338
|
-
flinkid =
|
|
8339
|
-
flinkinfo =
|
|
7897
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] == 1 or listarrayfiles['ffilelist'][reallcfi]['ftype'] == 2):
|
|
7898
|
+
getflinkpath = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
7899
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
7900
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8340
7901
|
fheadersize = format(
|
|
8341
7902
|
int(flinkinfo['fheadersize']), 'x').lower()
|
|
8342
7903
|
fsize = format(int(flinkinfo['fsize']), 'x').lower()
|
|
@@ -8373,10 +7934,10 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8373
7934
|
ftypehex = format(flinkinfo['ftype'], 'x').lower()
|
|
8374
7935
|
else:
|
|
8375
7936
|
ftypehex = format(
|
|
8376
|
-
|
|
7937
|
+
listarrayfiles['ffilelist'][reallcfi]['ftype'], 'x').lower()
|
|
8377
7938
|
fcurfid = format(curfid, 'x').lower()
|
|
8378
7939
|
if(not followlink and finode != 0):
|
|
8379
|
-
if(
|
|
7940
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] != 1):
|
|
8380
7941
|
fcurinode = format(int(curinode), 'x').lower()
|
|
8381
7942
|
inodetofile.update({curinode: fname})
|
|
8382
7943
|
filetoinode.update({fname: curinode})
|
|
@@ -8426,7 +7987,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8426
7987
|
outvar = fp.read()
|
|
8427
7988
|
fp.close()
|
|
8428
7989
|
return outvar
|
|
8429
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
7990
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
8430
7991
|
fp = CompressOpenFileAlt(
|
|
8431
7992
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
8432
7993
|
fp.seek(0, 0)
|
|
@@ -8439,50 +8000,50 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8439
8000
|
return True
|
|
8440
8001
|
|
|
8441
8002
|
|
|
8442
|
-
def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8003
|
+
def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8443
8004
|
fp = MkTempFile(instr)
|
|
8444
|
-
|
|
8445
|
-
checksumtype, skipchecksum, extradata, formatspecs, verbose, returnfp)
|
|
8446
|
-
return
|
|
8005
|
+
listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8006
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8007
|
+
return listarrayfiles
|
|
8447
8008
|
|
|
8448
8009
|
|
|
8449
|
-
def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False,
|
|
8010
|
+
def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8450
8011
|
outarray = MkTempFile()
|
|
8451
8012
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
8452
8013
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
8453
|
-
|
|
8454
|
-
|
|
8455
|
-
return
|
|
8014
|
+
listarrayfiles = RePackFoxFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8015
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8016
|
+
return listarrayfiles
|
|
8456
8017
|
|
|
8457
8018
|
|
|
8458
|
-
def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8019
|
+
def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8459
8020
|
if(outdir is not None):
|
|
8460
8021
|
outdir = RemoveWindowsPath(outdir)
|
|
8461
8022
|
if(verbose):
|
|
8462
8023
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8463
8024
|
if(isinstance(infile, dict)):
|
|
8464
|
-
|
|
8025
|
+
listarrayfiles = infile
|
|
8465
8026
|
else:
|
|
8466
8027
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8467
8028
|
infile = RemoveWindowsPath(infile)
|
|
8468
|
-
|
|
8469
|
-
if(not
|
|
8029
|
+
listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8030
|
+
if(not listarrayfiles):
|
|
8470
8031
|
return False
|
|
8471
|
-
lenlist = len(
|
|
8472
|
-
fnumfiles = int(
|
|
8032
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8033
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8473
8034
|
lcfi = 0
|
|
8474
|
-
lcfx = int(
|
|
8475
|
-
if(lenlist >
|
|
8035
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8036
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8476
8037
|
lcfx = int(lenlist)
|
|
8477
8038
|
else:
|
|
8478
|
-
lcfx = int(
|
|
8039
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8479
8040
|
while(lcfi < lcfx):
|
|
8480
8041
|
funame = ""
|
|
8481
8042
|
try:
|
|
8482
8043
|
import pwd
|
|
8483
8044
|
try:
|
|
8484
8045
|
userinfo = pwd.getpwuid(
|
|
8485
|
-
|
|
8046
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'])
|
|
8486
8047
|
funame = userinfo.pw_name
|
|
8487
8048
|
except KeyError:
|
|
8488
8049
|
funame = ""
|
|
@@ -8493,7 +8054,7 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8493
8054
|
import grp
|
|
8494
8055
|
try:
|
|
8495
8056
|
groupinfo = grp.getgrgid(
|
|
8496
|
-
|
|
8057
|
+
listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8497
8058
|
fgname = groupinfo.gr_name
|
|
8498
8059
|
except KeyError:
|
|
8499
8060
|
fgname = ""
|
|
@@ -8501,15 +8062,15 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8501
8062
|
fgname = ""
|
|
8502
8063
|
if(verbose):
|
|
8503
8064
|
VerbosePrintOut(PrependPath(
|
|
8504
|
-
outdir,
|
|
8505
|
-
if(
|
|
8506
|
-
with open(PrependPath(outdir,
|
|
8507
|
-
if(not
|
|
8508
|
-
|
|
8509
|
-
|
|
8510
|
-
|
|
8065
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8066
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8067
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8068
|
+
if(not listarrayfiles['ffilelist'][lcfi]['fcontentasfile']):
|
|
8069
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'] = MkTempFile(
|
|
8070
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
8071
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
8511
8072
|
shutil.copyfileobj(
|
|
8512
|
-
|
|
8073
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
8513
8074
|
try:
|
|
8514
8075
|
fpc.flush()
|
|
8515
8076
|
if(hasattr(os, "sync")):
|
|
@@ -8520,20 +8081,20 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8520
8081
|
pass
|
|
8521
8082
|
except OSError:
|
|
8522
8083
|
pass
|
|
8523
|
-
if(hasattr(os, "chown") and funame ==
|
|
8524
|
-
os.chown(PrependPath(outdir,
|
|
8525
|
-
|
|
8084
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8085
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8086
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8526
8087
|
if(preservepermissions):
|
|
8527
8088
|
os.chmod(PrependPath(
|
|
8528
|
-
outdir,
|
|
8089
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8529
8090
|
if(preservetime):
|
|
8530
|
-
os.utime(PrependPath(outdir,
|
|
8531
|
-
|
|
8532
|
-
if(
|
|
8091
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8092
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8093
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8533
8094
|
if(followlink):
|
|
8534
|
-
getflinkpath =
|
|
8535
|
-
flinkid =
|
|
8536
|
-
flinkinfo =
|
|
8095
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8096
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8097
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8537
8098
|
funame = ""
|
|
8538
8099
|
try:
|
|
8539
8100
|
import pwd
|
|
@@ -8555,7 +8116,7 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8555
8116
|
except ImportError:
|
|
8556
8117
|
fgname = ""
|
|
8557
8118
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8558
|
-
with open(PrependPath(outdir,
|
|
8119
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8559
8120
|
if(not flinkinfo['fcontentasfile']):
|
|
8560
8121
|
flinkinfo['fcontents'] = MkTempFile(
|
|
8561
8122
|
flinkinfo['fcontents'])
|
|
@@ -8573,46 +8134,46 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8573
8134
|
pass
|
|
8574
8135
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8575
8136
|
os.chown(PrependPath(
|
|
8576
|
-
outdir,
|
|
8137
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8577
8138
|
if(preservepermissions):
|
|
8578
8139
|
os.chmod(PrependPath(
|
|
8579
|
-
outdir,
|
|
8140
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8580
8141
|
if(preservetime):
|
|
8581
|
-
os.utime(PrependPath(outdir,
|
|
8142
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8582
8143
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8583
8144
|
if(flinkinfo['ftype'] == 1):
|
|
8584
8145
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8585
|
-
outdir,
|
|
8146
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8586
8147
|
if(flinkinfo['ftype'] == 2):
|
|
8587
8148
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8588
|
-
outdir,
|
|
8149
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8589
8150
|
if(flinkinfo['ftype'] == 5):
|
|
8590
8151
|
if(preservepermissions):
|
|
8591
8152
|
os.mkdir(PrependPath(
|
|
8592
|
-
outdir,
|
|
8153
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8593
8154
|
else:
|
|
8594
8155
|
os.mkdir(PrependPath(
|
|
8595
|
-
outdir,
|
|
8156
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8596
8157
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8597
8158
|
os.chown(PrependPath(
|
|
8598
|
-
outdir,
|
|
8159
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8599
8160
|
if(preservepermissions):
|
|
8600
8161
|
os.chmod(PrependPath(
|
|
8601
|
-
outdir,
|
|
8162
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8602
8163
|
if(preservetime):
|
|
8603
|
-
os.utime(PrependPath(outdir,
|
|
8164
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8604
8165
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8605
8166
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8606
8167
|
os.mkfifo(PrependPath(
|
|
8607
|
-
outdir,
|
|
8168
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8608
8169
|
else:
|
|
8609
|
-
os.link(
|
|
8610
|
-
outdir,
|
|
8611
|
-
if(
|
|
8170
|
+
os.link(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8171
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8172
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8612
8173
|
if(followlink):
|
|
8613
|
-
getflinkpath =
|
|
8614
|
-
flinkid =
|
|
8615
|
-
flinkinfo =
|
|
8174
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8175
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8176
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8616
8177
|
funame = ""
|
|
8617
8178
|
try:
|
|
8618
8179
|
import pwd
|
|
@@ -8634,7 +8195,7 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8634
8195
|
except ImportError:
|
|
8635
8196
|
fgname = ""
|
|
8636
8197
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8637
|
-
with open(PrependPath(outdir,
|
|
8198
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8638
8199
|
if(not flinkinfo['fcontentasfile']):
|
|
8639
8200
|
flinkinfo['fcontents'] = MkTempFile(
|
|
8640
8201
|
flinkinfo['fcontents'])
|
|
@@ -8652,71 +8213,71 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8652
8213
|
pass
|
|
8653
8214
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8654
8215
|
os.chown(PrependPath(
|
|
8655
|
-
outdir,
|
|
8216
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8656
8217
|
if(preservepermissions):
|
|
8657
8218
|
os.chmod(PrependPath(
|
|
8658
|
-
outdir,
|
|
8219
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8659
8220
|
if(preservetime):
|
|
8660
|
-
os.utime(PrependPath(outdir,
|
|
8221
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8661
8222
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8662
8223
|
if(flinkinfo['ftype'] == 1):
|
|
8663
8224
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8664
|
-
outdir,
|
|
8225
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8665
8226
|
if(flinkinfo['ftype'] == 2):
|
|
8666
8227
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8667
|
-
outdir,
|
|
8228
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8668
8229
|
if(flinkinfo['ftype'] == 5):
|
|
8669
8230
|
if(preservepermissions):
|
|
8670
8231
|
os.mkdir(PrependPath(
|
|
8671
|
-
outdir,
|
|
8232
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8672
8233
|
else:
|
|
8673
8234
|
os.mkdir(PrependPath(
|
|
8674
|
-
outdir,
|
|
8235
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8675
8236
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8676
8237
|
os.chown(PrependPath(
|
|
8677
|
-
outdir,
|
|
8238
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8678
8239
|
if(preservepermissions):
|
|
8679
8240
|
os.chmod(PrependPath(
|
|
8680
|
-
outdir,
|
|
8241
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8681
8242
|
if(preservetime):
|
|
8682
|
-
os.utime(PrependPath(outdir,
|
|
8243
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8683
8244
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8684
8245
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8685
8246
|
os.mkfifo(PrependPath(
|
|
8686
|
-
outdir,
|
|
8247
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8687
8248
|
else:
|
|
8688
|
-
os.symlink(
|
|
8689
|
-
outdir,
|
|
8690
|
-
if(
|
|
8249
|
+
os.symlink(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8250
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8251
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8691
8252
|
if(preservepermissions):
|
|
8692
8253
|
os.mkdir(PrependPath(
|
|
8693
|
-
outdir,
|
|
8254
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8694
8255
|
else:
|
|
8695
8256
|
os.mkdir(PrependPath(
|
|
8696
|
-
outdir,
|
|
8697
|
-
if(hasattr(os, "chown") and funame ==
|
|
8698
|
-
os.chown(PrependPath(outdir,
|
|
8699
|
-
|
|
8257
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8258
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8259
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8260
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8700
8261
|
if(preservepermissions):
|
|
8701
8262
|
os.chmod(PrependPath(
|
|
8702
|
-
outdir,
|
|
8263
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8703
8264
|
if(preservetime):
|
|
8704
|
-
os.utime(PrependPath(outdir,
|
|
8705
|
-
|
|
8706
|
-
if(
|
|
8265
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8266
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8267
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8707
8268
|
os.mkfifo(PrependPath(
|
|
8708
|
-
outdir,
|
|
8269
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8709
8270
|
lcfi = lcfi + 1
|
|
8710
8271
|
if(returnfp):
|
|
8711
|
-
return
|
|
8272
|
+
return listarrayfiles['ffilelist']['fp']
|
|
8712
8273
|
else:
|
|
8713
8274
|
return True
|
|
8714
8275
|
|
|
8715
8276
|
|
|
8716
8277
|
def UnPackFoxFileString(instr, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8717
8278
|
fp = MkTempFile(instr)
|
|
8718
|
-
|
|
8719
|
-
return
|
|
8279
|
+
listarrayfiles = UnPackFoxFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
8280
|
+
return listarrayfiles
|
|
8720
8281
|
|
|
8721
8282
|
def ftype_to_str(ftype):
|
|
8722
8283
|
mapping = {
|
|
@@ -8733,64 +8294,64 @@ def ftype_to_str(ftype):
|
|
|
8733
8294
|
# Default to "file" if unknown
|
|
8734
8295
|
return mapping.get(ftype, "file")
|
|
8735
8296
|
|
|
8736
|
-
def FoxFileListFiles(infile, fmttype="auto", seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8297
|
+
def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8737
8298
|
if(verbose):
|
|
8738
8299
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8739
8300
|
if(isinstance(infile, dict)):
|
|
8740
|
-
|
|
8301
|
+
listarrayfiles = infile
|
|
8741
8302
|
else:
|
|
8742
8303
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8743
8304
|
infile = RemoveWindowsPath(infile)
|
|
8744
|
-
|
|
8745
|
-
if(not
|
|
8305
|
+
listarrayfiles = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8306
|
+
if(not listarrayfiles):
|
|
8746
8307
|
return False
|
|
8747
|
-
lenlist = len(
|
|
8748
|
-
fnumfiles = int(
|
|
8308
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8309
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8749
8310
|
lcfi = 0
|
|
8750
|
-
lcfx = int(
|
|
8751
|
-
if(lenlist >
|
|
8311
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8312
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8752
8313
|
lcfx = int(lenlist)
|
|
8753
8314
|
else:
|
|
8754
|
-
lcfx = int(
|
|
8315
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8755
8316
|
returnval = {}
|
|
8756
8317
|
while(lcfi < lcfx):
|
|
8757
|
-
returnval.update({lcfi:
|
|
8318
|
+
returnval.update({lcfi: listarrayfiles['ffilelist'][lcfi]['fname']})
|
|
8758
8319
|
if(not verbose):
|
|
8759
|
-
VerbosePrintOut(
|
|
8320
|
+
VerbosePrintOut(listarrayfiles['ffilelist'][lcfi]['fname'])
|
|
8760
8321
|
if(verbose):
|
|
8761
8322
|
permissions = {'access': {'0': ('---'), '1': ('--x'), '2': ('-w-'), '3': ('-wx'), '4': (
|
|
8762
8323
|
'r--'), '5': ('r-x'), '6': ('rw-'), '7': ('rwx')}, 'roles': {0: 'owner', 1: 'group', 2: 'other'}}
|
|
8763
|
-
printfname =
|
|
8764
|
-
if(
|
|
8765
|
-
printfname =
|
|
8766
|
-
" link to " +
|
|
8767
|
-
if(
|
|
8768
|
-
printfname =
|
|
8769
|
-
" -> " +
|
|
8770
|
-
fuprint =
|
|
8324
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname']
|
|
8325
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8326
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8327
|
+
" link to " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8328
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8329
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8330
|
+
" -> " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8331
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['funame']
|
|
8771
8332
|
if(len(fuprint) <= 0):
|
|
8772
|
-
fuprint =
|
|
8773
|
-
fgprint =
|
|
8333
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['fuid']
|
|
8334
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgname']
|
|
8774
8335
|
if(len(fgprint) <= 0):
|
|
8775
|
-
fgprint =
|
|
8336
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgid']
|
|
8776
8337
|
if(newstyle):
|
|
8777
|
-
VerbosePrintOut(ftype_to_str(
|
|
8778
|
-
|
|
8338
|
+
VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
|
|
8339
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
|
|
8779
8340
|
else:
|
|
8780
|
-
VerbosePrintOut(PrintPermissionString(
|
|
8781
|
-
|
|
8341
|
+
VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
|
|
8342
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8782
8343
|
lcfi = lcfi + 1
|
|
8783
8344
|
if(returnfp):
|
|
8784
|
-
return
|
|
8345
|
+
return listarrayfiles['fp']
|
|
8785
8346
|
else:
|
|
8786
8347
|
return True
|
|
8787
8348
|
|
|
8788
8349
|
|
|
8789
8350
|
def FoxFileStringListFiles(instr, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8790
8351
|
fp = MkTempFile(instr)
|
|
8791
|
-
|
|
8352
|
+
listarrayfiles = FoxFileListFiles(
|
|
8792
8353
|
instr, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
8793
|
-
return
|
|
8354
|
+
return listarrayfiles
|
|
8794
8355
|
|
|
8795
8356
|
|
|
8796
8357
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
@@ -8806,7 +8367,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8806
8367
|
if(not infile):
|
|
8807
8368
|
return False
|
|
8808
8369
|
infile.seek(0, 0)
|
|
8809
|
-
elif(re.findall(
|
|
8370
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8810
8371
|
infile = download_file_from_internet_file(infile)
|
|
8811
8372
|
infile.seek(0, 0)
|
|
8812
8373
|
if(not infile):
|
|
@@ -8830,7 +8391,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8830
8391
|
return False
|
|
8831
8392
|
try:
|
|
8832
8393
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
8833
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
8394
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
8834
8395
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8835
8396
|
formatspecs = formatspecs[compresscheck]
|
|
8836
8397
|
if(compresscheck=="zstd"):
|
|
@@ -8842,7 +8403,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8842
8403
|
else:
|
|
8843
8404
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
8844
8405
|
else:
|
|
8845
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
8406
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
8846
8407
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8847
8408
|
formatspecs = formatspecs[compresscheck]
|
|
8848
8409
|
if(compresscheck=="zstd"):
|
|
@@ -8913,7 +8474,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8913
8474
|
member.size).rjust(15) + " " + datetime.datetime.utcfromtimestamp(member.mtime).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8914
8475
|
lcfi = lcfi + 1
|
|
8915
8476
|
if(returnfp):
|
|
8916
|
-
return
|
|
8477
|
+
return listarrayfiles['fp']
|
|
8917
8478
|
else:
|
|
8918
8479
|
return True
|
|
8919
8480
|
|
|
@@ -8931,7 +8492,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8931
8492
|
if(not infile):
|
|
8932
8493
|
return False
|
|
8933
8494
|
infile.seek(0, 0)
|
|
8934
|
-
elif(re.findall(
|
|
8495
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8935
8496
|
infile = download_file_from_internet_file(infile)
|
|
8936
8497
|
infile.seek(0, 0)
|
|
8937
8498
|
if(not infile):
|
|
@@ -9046,7 +8607,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
9046
8607
|
15) + " " + datetime.datetime.utcfromtimestamp(int(time.mktime(member.date_time + (0, 0, -1)))).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9047
8608
|
lcfi = lcfi + 1
|
|
9048
8609
|
if(returnfp):
|
|
9049
|
-
return
|
|
8610
|
+
return listarrayfiles['fp']
|
|
9050
8611
|
else:
|
|
9051
8612
|
return True
|
|
9052
8613
|
|
|
@@ -9184,7 +8745,7 @@ if(rarfile_support):
|
|
|
9184
8745
|
member.file_size).rjust(15) + " " + member.mtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9185
8746
|
lcfi = lcfi + 1
|
|
9186
8747
|
if(returnfp):
|
|
9187
|
-
return
|
|
8748
|
+
return listarrayfiles['fp']
|
|
9188
8749
|
else:
|
|
9189
8750
|
return True
|
|
9190
8751
|
|
|
@@ -9202,7 +8763,7 @@ if(py7zr_support):
|
|
|
9202
8763
|
returnval = {}
|
|
9203
8764
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
9204
8765
|
file_content = szpfp.readall()
|
|
9205
|
-
#sztest = szpfp.testzip()
|
|
8766
|
+
#sztest = szpfp.testzip()
|
|
9206
8767
|
sztestalt = szpfp.test()
|
|
9207
8768
|
if(sztestalt):
|
|
9208
8769
|
VerbosePrintOut("Bad file found!")
|
|
@@ -9291,7 +8852,7 @@ if(py7zr_support):
|
|
|
9291
8852
|
fsize).rjust(15) + " " + member.creationtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9292
8853
|
lcfi = lcfi + 1
|
|
9293
8854
|
if(returnfp):
|
|
9294
|
-
return
|
|
8855
|
+
return listarrayfiles['fp']
|
|
9295
8856
|
else:
|
|
9296
8857
|
return True
|
|
9297
8858
|
|
|
@@ -9299,7 +8860,7 @@ if(py7zr_support):
|
|
|
9299
8860
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
9300
8861
|
if(verbose):
|
|
9301
8862
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
9302
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
8863
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9303
8864
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9304
8865
|
formatspecs = formatspecs[checkcompressfile]
|
|
9305
8866
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -9321,9 +8882,9 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
9321
8882
|
outarray = MkTempFile()
|
|
9322
8883
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
9323
8884
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
9324
|
-
|
|
8885
|
+
listarrayfiles = FoxFileListFiles(
|
|
9325
8886
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
9326
|
-
return
|
|
8887
|
+
return listarrayfiles
|
|
9327
8888
|
|
|
9328
8889
|
"""
|
|
9329
8890
|
PyNeoFile compatibility layer
|
|
@@ -9342,31 +8903,44 @@ def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='crc32'
|
|
|
9342
8903
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
9343
8904
|
|
|
9344
8905
|
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
9345
|
-
return
|
|
8906
|
+
return PackFoxFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
9346
8907
|
|
|
9347
8908
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9348
|
-
return
|
|
8909
|
+
return FoxFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
9349
8910
|
|
|
9350
8911
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9351
|
-
return
|
|
8912
|
+
return UnPackFoxFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
9352
8913
|
|
|
9353
8914
|
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
|
|
9354
|
-
return
|
|
8915
|
+
return RePackFoxFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
9355
8916
|
|
|
9356
8917
|
def archivefilevalidate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
9357
|
-
return
|
|
8918
|
+
return FoxFileValidate(infile, "auto", formatspecs, False, verbose, returnfp)
|
|
9358
8919
|
|
|
9359
8920
|
def archivefilelistfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
9360
|
-
return
|
|
8921
|
+
return FoxFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
9361
8922
|
|
|
9362
8923
|
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
|
|
9363
|
-
intmp = InFileToArray(infile, 0, 0, False, True, False, formatspecs, False, False)
|
|
9364
|
-
return
|
|
8924
|
+
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
8925
|
+
return RePackFoxFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
8926
|
+
|
|
8927
|
+
def detect_cwd(ftp, file_dir):
|
|
8928
|
+
"""
|
|
8929
|
+
Test whether cwd into file_dir works. Returns True if it does,
|
|
8930
|
+
False if not (so absolute paths should be used).
|
|
8931
|
+
"""
|
|
8932
|
+
if not file_dir or file_dir in ("/", ""):
|
|
8933
|
+
return False # nothing to cwd into
|
|
8934
|
+
try:
|
|
8935
|
+
ftp.cwd(file_dir)
|
|
8936
|
+
return True
|
|
8937
|
+
except all_errors:
|
|
8938
|
+
return False
|
|
9365
8939
|
|
|
9366
8940
|
def download_file_from_ftp_file(url):
|
|
9367
8941
|
urlparts = urlparse(url)
|
|
9368
|
-
file_name = os.path.basename(urlparts.path)
|
|
9369
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
8942
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
8943
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9370
8944
|
if(urlparts.username is not None):
|
|
9371
8945
|
ftp_username = urlparts.username
|
|
9372
8946
|
else:
|
|
@@ -9383,7 +8957,7 @@ def download_file_from_ftp_file(url):
|
|
|
9383
8957
|
ftp = FTP_TLS()
|
|
9384
8958
|
else:
|
|
9385
8959
|
return False
|
|
9386
|
-
if(urlparts.scheme == "sftp"):
|
|
8960
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9387
8961
|
if(__use_pysftp__):
|
|
9388
8962
|
return download_file_from_pysftp_file(url)
|
|
9389
8963
|
else:
|
|
@@ -9401,26 +8975,70 @@ def download_file_from_ftp_file(url):
|
|
|
9401
8975
|
except socket.timeout:
|
|
9402
8976
|
log.info("Error With URL "+url)
|
|
9403
8977
|
return False
|
|
9404
|
-
|
|
9405
|
-
|
|
9406
|
-
|
|
8978
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8979
|
+
try:
|
|
8980
|
+
ftp.auth()
|
|
8981
|
+
except all_errors:
|
|
8982
|
+
pass
|
|
8983
|
+
ftp.login(ftp_username, ftp_password)
|
|
8984
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8985
|
+
try:
|
|
8986
|
+
ftp.prot_p()
|
|
8987
|
+
except all_errors:
|
|
8988
|
+
ftp.prot_c()
|
|
8989
|
+
# UTF-8 filenames if supported
|
|
8990
|
+
try:
|
|
8991
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
8992
|
+
ftp.encoding = "utf-8"
|
|
8993
|
+
except all_errors:
|
|
8994
|
+
pass
|
|
8995
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
9407
8996
|
ftpfile = MkTempFile()
|
|
9408
|
-
|
|
9409
|
-
|
|
8997
|
+
# Try EPSV first, then fall back
|
|
8998
|
+
try:
|
|
8999
|
+
ftp.force_epsv = True
|
|
9000
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9001
|
+
if(is_cwd_allowed):
|
|
9002
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9003
|
+
else:
|
|
9004
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9005
|
+
except all_errors:
|
|
9006
|
+
try:
|
|
9007
|
+
ftp.set_pasv(True)
|
|
9008
|
+
if(is_cwd_allowed):
|
|
9009
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9010
|
+
else:
|
|
9011
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9012
|
+
except all_errors:
|
|
9013
|
+
ftp.set_pasv(False)
|
|
9014
|
+
if(is_cwd_allowed):
|
|
9015
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9016
|
+
else:
|
|
9017
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9410
9018
|
ftp.close()
|
|
9411
9019
|
ftpfile.seek(0, 0)
|
|
9412
9020
|
return ftpfile
|
|
9413
9021
|
|
|
9414
9022
|
|
|
9023
|
+
def download_file_from_ftps_file(url):
|
|
9024
|
+
return download_file_from_ftp_file(url)
|
|
9025
|
+
|
|
9026
|
+
|
|
9415
9027
|
def download_file_from_ftp_string(url):
|
|
9416
9028
|
ftpfile = download_file_from_ftp_file(url)
|
|
9417
|
-
|
|
9029
|
+
ftpout = ftpfile.read()
|
|
9030
|
+
ftpfile.close()
|
|
9031
|
+
return ftpout
|
|
9032
|
+
|
|
9033
|
+
|
|
9034
|
+
def download_file_from_ftps_string(url):
|
|
9035
|
+
return download_file_from_ftp_string(url)
|
|
9418
9036
|
|
|
9419
9037
|
|
|
9420
9038
|
def upload_file_to_ftp_file(ftpfile, url):
|
|
9421
9039
|
urlparts = urlparse(url)
|
|
9422
|
-
file_name = os.path.basename(urlparts.path)
|
|
9423
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9040
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9041
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9424
9042
|
if(urlparts.username is not None):
|
|
9425
9043
|
ftp_username = urlparts.username
|
|
9426
9044
|
else:
|
|
@@ -9437,7 +9055,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9437
9055
|
ftp = FTP_TLS()
|
|
9438
9056
|
else:
|
|
9439
9057
|
return False
|
|
9440
|
-
if(urlparts.scheme == "sftp"):
|
|
9058
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9441
9059
|
if(__use_pysftp__):
|
|
9442
9060
|
return upload_file_to_pysftp_file(url)
|
|
9443
9061
|
else:
|
|
@@ -9455,15 +9073,55 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9455
9073
|
except socket.timeout:
|
|
9456
9074
|
log.info("Error With URL "+url)
|
|
9457
9075
|
return False
|
|
9458
|
-
|
|
9459
|
-
|
|
9460
|
-
|
|
9461
|
-
|
|
9076
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9077
|
+
try:
|
|
9078
|
+
ftp.auth()
|
|
9079
|
+
except all_errors:
|
|
9080
|
+
pass
|
|
9081
|
+
ftp.login(ftp_username, ftp_password)
|
|
9082
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9083
|
+
try:
|
|
9084
|
+
ftp.prot_p()
|
|
9085
|
+
except all_errors:
|
|
9086
|
+
ftp.prot_c()
|
|
9087
|
+
# UTF-8 filenames if supported
|
|
9088
|
+
try:
|
|
9089
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
9090
|
+
ftp.encoding = "utf-8"
|
|
9091
|
+
except all_errors:
|
|
9092
|
+
pass
|
|
9093
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
9094
|
+
ftpfile.seek(0, 0)
|
|
9095
|
+
# Try EPSV first, then fall back
|
|
9096
|
+
try:
|
|
9097
|
+
ftp.force_epsv = True
|
|
9098
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9099
|
+
if(is_cwd_allowed):
|
|
9100
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9101
|
+
else:
|
|
9102
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9103
|
+
except all_errors:
|
|
9104
|
+
try:
|
|
9105
|
+
ftp.set_pasv(True)
|
|
9106
|
+
if(is_cwd_allowed):
|
|
9107
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9108
|
+
else:
|
|
9109
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9110
|
+
except all_errors:
|
|
9111
|
+
ftp.set_pasv(False)
|
|
9112
|
+
if(is_cwd_allowed):
|
|
9113
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9114
|
+
else:
|
|
9115
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9462
9116
|
ftp.close()
|
|
9463
9117
|
ftpfile.seek(0, 0)
|
|
9464
9118
|
return ftpfile
|
|
9465
9119
|
|
|
9466
9120
|
|
|
9121
|
+
def upload_file_to_ftps_file(ftpfile, url):
|
|
9122
|
+
return upload_file_to_ftp_file(ftpfile, url)
|
|
9123
|
+
|
|
9124
|
+
|
|
9467
9125
|
def upload_file_to_ftp_string(ftpstring, url):
|
|
9468
9126
|
ftpfileo = MkTempFile(ftpstring)
|
|
9469
9127
|
ftpfile = upload_file_to_ftp_file(ftpfileo, url)
|
|
@@ -9471,6 +9129,10 @@ def upload_file_to_ftp_string(ftpstring, url):
|
|
|
9471
9129
|
return ftpfile
|
|
9472
9130
|
|
|
9473
9131
|
|
|
9132
|
+
def upload_file_to_ftps_string(ftpstring, url):
|
|
9133
|
+
return upload_file_to_ftp_string(ftpstring, url)
|
|
9134
|
+
|
|
9135
|
+
|
|
9474
9136
|
class RawIteratorWrapper:
|
|
9475
9137
|
def __init__(self, iterator):
|
|
9476
9138
|
self.iterator = iterator
|
|
@@ -9508,7 +9170,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9508
9170
|
urlparts.params, urlparts.query, urlparts.fragment))
|
|
9509
9171
|
|
|
9510
9172
|
# Handle SFTP/FTP
|
|
9511
|
-
if urlparts.scheme == "sftp":
|
|
9173
|
+
if urlparts.scheme == "sftp" or urlparts.scheme == "scp":
|
|
9512
9174
|
if __use_pysftp__:
|
|
9513
9175
|
return download_file_from_pysftp_file(url)
|
|
9514
9176
|
else:
|
|
@@ -9581,14 +9243,16 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9581
9243
|
|
|
9582
9244
|
def download_file_from_http_string(url, headers=geturls_headers_pyfile_python_alt, usehttp=__use_http_lib__):
|
|
9583
9245
|
httpfile = download_file_from_http_file(url, headers, usehttp)
|
|
9584
|
-
|
|
9246
|
+
httpout = httpfile.read()
|
|
9247
|
+
httpfile.close()
|
|
9248
|
+
return httpout
|
|
9585
9249
|
|
|
9586
9250
|
|
|
9587
9251
|
if(haveparamiko):
|
|
9588
9252
|
def download_file_from_sftp_file(url):
|
|
9589
9253
|
urlparts = urlparse(url)
|
|
9590
|
-
file_name = os.path.basename(urlparts.path)
|
|
9591
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9254
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9255
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9592
9256
|
sftp_port = urlparts.port
|
|
9593
9257
|
if(urlparts.port is None):
|
|
9594
9258
|
sftp_port = 22
|
|
@@ -9608,14 +9272,14 @@ if(haveparamiko):
|
|
|
9608
9272
|
return download_file_from_ftp_file(url)
|
|
9609
9273
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9610
9274
|
return download_file_from_http_file(url)
|
|
9611
|
-
if(urlparts.scheme != "sftp"):
|
|
9275
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9612
9276
|
return False
|
|
9613
9277
|
ssh = paramiko.SSHClient()
|
|
9614
9278
|
ssh.load_system_host_keys()
|
|
9615
9279
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9616
9280
|
try:
|
|
9617
9281
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9618
|
-
username=
|
|
9282
|
+
username=sftp_username, password=urlparts.password)
|
|
9619
9283
|
except paramiko.ssh_exception.SSHException:
|
|
9620
9284
|
return False
|
|
9621
9285
|
except socket.gaierror:
|
|
@@ -9626,7 +9290,7 @@ if(haveparamiko):
|
|
|
9626
9290
|
return False
|
|
9627
9291
|
sftp = ssh.open_sftp()
|
|
9628
9292
|
sftpfile = MkTempFile()
|
|
9629
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9293
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9630
9294
|
sftp.close()
|
|
9631
9295
|
ssh.close()
|
|
9632
9296
|
sftpfile.seek(0, 0)
|
|
@@ -9638,7 +9302,9 @@ else:
|
|
|
9638
9302
|
if(haveparamiko):
|
|
9639
9303
|
def download_file_from_sftp_string(url):
|
|
9640
9304
|
sftpfile = download_file_from_sftp_file(url)
|
|
9641
|
-
|
|
9305
|
+
sftpout = sftpfile.read()
|
|
9306
|
+
sftpfile.close()
|
|
9307
|
+
return sftpout
|
|
9642
9308
|
else:
|
|
9643
9309
|
def download_file_from_sftp_string(url):
|
|
9644
9310
|
return False
|
|
@@ -9646,8 +9312,8 @@ else:
|
|
|
9646
9312
|
if(haveparamiko):
|
|
9647
9313
|
def upload_file_to_sftp_file(sftpfile, url):
|
|
9648
9314
|
urlparts = urlparse(url)
|
|
9649
|
-
file_name = os.path.basename(urlparts.path)
|
|
9650
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9315
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9316
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9651
9317
|
sftp_port = urlparts.port
|
|
9652
9318
|
if(urlparts.port is None):
|
|
9653
9319
|
sftp_port = 22
|
|
@@ -9664,17 +9330,17 @@ if(haveparamiko):
|
|
|
9664
9330
|
else:
|
|
9665
9331
|
sftp_password = ""
|
|
9666
9332
|
if(urlparts.scheme == "ftp"):
|
|
9667
|
-
return upload_file_to_ftp_file(url)
|
|
9333
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9668
9334
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9669
9335
|
return False
|
|
9670
|
-
if(urlparts.scheme != "sftp"):
|
|
9336
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9671
9337
|
return False
|
|
9672
9338
|
ssh = paramiko.SSHClient()
|
|
9673
9339
|
ssh.load_system_host_keys()
|
|
9674
9340
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9675
9341
|
try:
|
|
9676
9342
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9677
|
-
username=
|
|
9343
|
+
username=sftp_username, password=sftp_password)
|
|
9678
9344
|
except paramiko.ssh_exception.SSHException:
|
|
9679
9345
|
return False
|
|
9680
9346
|
except socket.gaierror:
|
|
@@ -9684,7 +9350,8 @@ if(haveparamiko):
|
|
|
9684
9350
|
log.info("Error With URL "+url)
|
|
9685
9351
|
return False
|
|
9686
9352
|
sftp = ssh.open_sftp()
|
|
9687
|
-
|
|
9353
|
+
sftpfile.seek(0, 0)
|
|
9354
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9688
9355
|
sftp.close()
|
|
9689
9356
|
ssh.close()
|
|
9690
9357
|
sftpfile.seek(0, 0)
|
|
@@ -9706,8 +9373,8 @@ else:
|
|
|
9706
9373
|
if(havepysftp):
|
|
9707
9374
|
def download_file_from_pysftp_file(url):
|
|
9708
9375
|
urlparts = urlparse(url)
|
|
9709
|
-
file_name = os.path.basename(urlparts.path)
|
|
9710
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9376
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9377
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9711
9378
|
sftp_port = urlparts.port
|
|
9712
9379
|
if(urlparts.port is None):
|
|
9713
9380
|
sftp_port = 22
|
|
@@ -9727,11 +9394,11 @@ if(havepysftp):
|
|
|
9727
9394
|
return download_file_from_ftp_file(url)
|
|
9728
9395
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9729
9396
|
return download_file_from_http_file(url)
|
|
9730
|
-
if(urlparts.scheme != "sftp"):
|
|
9397
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9731
9398
|
return False
|
|
9732
9399
|
try:
|
|
9733
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9734
|
-
username=
|
|
9400
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9401
|
+
username=sftp_username, password=sftp_password)
|
|
9735
9402
|
except paramiko.ssh_exception.SSHException:
|
|
9736
9403
|
return False
|
|
9737
9404
|
except socket.gaierror:
|
|
@@ -9740,9 +9407,8 @@ if(havepysftp):
|
|
|
9740
9407
|
except socket.timeout:
|
|
9741
9408
|
log.info("Error With URL "+url)
|
|
9742
9409
|
return False
|
|
9743
|
-
sftp = ssh.open_sftp()
|
|
9744
9410
|
sftpfile = MkTempFile()
|
|
9745
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9411
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9746
9412
|
sftp.close()
|
|
9747
9413
|
ssh.close()
|
|
9748
9414
|
sftpfile.seek(0, 0)
|
|
@@ -9754,7 +9420,9 @@ else:
|
|
|
9754
9420
|
if(havepysftp):
|
|
9755
9421
|
def download_file_from_pysftp_string(url):
|
|
9756
9422
|
sftpfile = download_file_from_pysftp_file(url)
|
|
9757
|
-
|
|
9423
|
+
sftpout = sftpfile.read()
|
|
9424
|
+
sftpfile.close()
|
|
9425
|
+
return sftpout
|
|
9758
9426
|
else:
|
|
9759
9427
|
def download_file_from_pysftp_string(url):
|
|
9760
9428
|
return False
|
|
@@ -9762,8 +9430,8 @@ else:
|
|
|
9762
9430
|
if(havepysftp):
|
|
9763
9431
|
def upload_file_to_pysftp_file(sftpfile, url):
|
|
9764
9432
|
urlparts = urlparse(url)
|
|
9765
|
-
file_name = os.path.basename(urlparts.path)
|
|
9766
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9433
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9434
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9767
9435
|
sftp_port = urlparts.port
|
|
9768
9436
|
if(urlparts.port is None):
|
|
9769
9437
|
sftp_port = 22
|
|
@@ -9780,14 +9448,14 @@ if(havepysftp):
|
|
|
9780
9448
|
else:
|
|
9781
9449
|
sftp_password = ""
|
|
9782
9450
|
if(urlparts.scheme == "ftp"):
|
|
9783
|
-
return upload_file_to_ftp_file(url)
|
|
9451
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9784
9452
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9785
9453
|
return False
|
|
9786
|
-
if(urlparts.scheme != "sftp"):
|
|
9454
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9787
9455
|
return False
|
|
9788
9456
|
try:
|
|
9789
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9790
|
-
username=
|
|
9457
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9458
|
+
username=sftp_username, password=sftp_password)
|
|
9791
9459
|
except paramiko.ssh_exception.SSHException:
|
|
9792
9460
|
return False
|
|
9793
9461
|
except socket.gaierror:
|
|
@@ -9796,8 +9464,8 @@ if(havepysftp):
|
|
|
9796
9464
|
except socket.timeout:
|
|
9797
9465
|
log.info("Error With URL "+url)
|
|
9798
9466
|
return False
|
|
9799
|
-
|
|
9800
|
-
sftp.putfo(sftpfile, urlparts.path)
|
|
9467
|
+
sftpfile.seek(0, 0)
|
|
9468
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9801
9469
|
sftp.close()
|
|
9802
9470
|
ssh.close()
|
|
9803
9471
|
sftpfile.seek(0, 0)
|
|
@@ -9823,7 +9491,7 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9823
9491
|
return download_file_from_http_file(url, headers, usehttp)
|
|
9824
9492
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9825
9493
|
return download_file_from_ftp_file(url)
|
|
9826
|
-
elif(urlparts.scheme == "sftp"):
|
|
9494
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9827
9495
|
if(__use_pysftp__ and havepysftp):
|
|
9828
9496
|
return download_file_from_pysftp_file(url)
|
|
9829
9497
|
else:
|
|
@@ -9833,9 +9501,9 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9833
9501
|
return False
|
|
9834
9502
|
|
|
9835
9503
|
|
|
9836
|
-
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9504
|
+
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9837
9505
|
fp = download_file_from_internet_file(url)
|
|
9838
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9506
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9839
9507
|
fp.seek(0, 0)
|
|
9840
9508
|
if(not fp):
|
|
9841
9509
|
return False
|
|
@@ -9848,7 +9516,7 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9848
9516
|
return download_file_from_http_string(url, headers)
|
|
9849
9517
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9850
9518
|
return download_file_from_ftp_string(url)
|
|
9851
|
-
elif(urlparts.scheme == "sftp"):
|
|
9519
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9852
9520
|
if(__use_pysftp__ and havepysftp):
|
|
9853
9521
|
return download_file_from_pysftp_string(url)
|
|
9854
9522
|
else:
|
|
@@ -9858,13 +9526,15 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9858
9526
|
return False
|
|
9859
9527
|
|
|
9860
9528
|
|
|
9861
|
-
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9529
|
+
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9862
9530
|
fp = download_file_from_internet_string(url)
|
|
9863
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9864
|
-
fp.seek(0, 0)
|
|
9531
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9865
9532
|
if(not fp):
|
|
9866
9533
|
return False
|
|
9867
|
-
|
|
9534
|
+
fp.seek(0, 0)
|
|
9535
|
+
fpout = fp.read()
|
|
9536
|
+
fp.close
|
|
9537
|
+
return fpout
|
|
9868
9538
|
|
|
9869
9539
|
|
|
9870
9540
|
def upload_file_to_internet_file(ifp, url):
|
|
@@ -9873,7 +9543,7 @@ def upload_file_to_internet_file(ifp, url):
|
|
|
9873
9543
|
return False
|
|
9874
9544
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9875
9545
|
return upload_file_to_ftp_file(ifp, url)
|
|
9876
|
-
elif(urlparts.scheme == "sftp"):
|
|
9546
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9877
9547
|
if(__use_pysftp__ and havepysftp):
|
|
9878
9548
|
return upload_file_to_pysftp_file(ifp, url)
|
|
9879
9549
|
else:
|
|
@@ -9889,8 +9559,7 @@ def upload_file_to_internet_compress_file(ifp, url, compression="auto", compress
|
|
|
9889
9559
|
if(not foxfileout):
|
|
9890
9560
|
return False
|
|
9891
9561
|
fp.seek(0, 0)
|
|
9892
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9893
|
-
return True
|
|
9562
|
+
return upload_file_to_internet_file(fp, outfile)
|
|
9894
9563
|
|
|
9895
9564
|
|
|
9896
9565
|
def upload_file_to_internet_string(ifp, url):
|
|
@@ -9899,7 +9568,7 @@ def upload_file_to_internet_string(ifp, url):
|
|
|
9899
9568
|
return False
|
|
9900
9569
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9901
9570
|
return upload_file_to_ftp_string(ifp, url)
|
|
9902
|
-
elif(urlparts.scheme == "sftp"):
|
|
9571
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9903
9572
|
if(__use_pysftp__ and havepysftp):
|
|
9904
9573
|
return upload_file_to_pysftp_string(ifp, url)
|
|
9905
9574
|
else:
|
|
@@ -9916,5 +9585,4 @@ def upload_file_to_internet_compress_string(ifp, url, compression="auto", compre
|
|
|
9916
9585
|
if(not foxfileout):
|
|
9917
9586
|
return False
|
|
9918
9587
|
fp.seek(0, 0)
|
|
9919
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9920
|
-
return True
|
|
9588
|
+
return upload_file_to_internet_file(fp, outfile)
|