PyFoxFile 0.21.2__py3-none-any.whl → 0.22.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyfoxfile-0.21.2.data → pyfoxfile-0.22.2.data}/scripts/foxfile.py +9 -9
- {pyfoxfile-0.21.2.data → pyfoxfile-0.22.2.data}/scripts/neofoxfile.py +1 -1
- {pyfoxfile-0.21.2.dist-info → pyfoxfile-0.22.2.dist-info}/METADATA +4 -4
- pyfoxfile-0.22.2.dist-info/RECORD +10 -0
- pyfoxfile.py +1114 -1351
- pyfoxfile-0.21.2.dist-info/RECORD +0 -10
- {pyfoxfile-0.21.2.data → pyfoxfile-0.22.2.data}/scripts/foxneofile.py +0 -0
- {pyfoxfile-0.21.2.dist-info → pyfoxfile-0.22.2.dist-info}/WHEEL +0 -0
- {pyfoxfile-0.21.2.dist-info → pyfoxfile-0.22.2.dist-info}/licenses/LICENSE +0 -0
- {pyfoxfile-0.21.2.dist-info → pyfoxfile-0.22.2.dist-info}/top_level.txt +0 -0
- {pyfoxfile-0.21.2.dist-info → pyfoxfile-0.22.2.dist-info}/zip-safe +0 -0
pyfoxfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyfoxfile.py - Last Update: 8/
|
|
17
|
+
$FileInfo: pyfoxfile.py - Last Update: 8/29/2025 Ver. 0.22.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -32,10 +32,13 @@ import socket
|
|
|
32
32
|
import hashlib
|
|
33
33
|
import inspect
|
|
34
34
|
import datetime
|
|
35
|
+
import tempfile
|
|
35
36
|
import logging
|
|
36
37
|
import zipfile
|
|
37
38
|
import binascii
|
|
38
39
|
import platform
|
|
40
|
+
from io import StringIO, BytesIO
|
|
41
|
+
import posixpath as pp # POSIX-safe joins/normpaths
|
|
39
42
|
try:
|
|
40
43
|
from backports import tempfile
|
|
41
44
|
except ImportError:
|
|
@@ -43,10 +46,10 @@ except ImportError:
|
|
|
43
46
|
# FTP Support
|
|
44
47
|
ftpssl = True
|
|
45
48
|
try:
|
|
46
|
-
from ftplib import FTP, FTP_TLS
|
|
49
|
+
from ftplib import FTP, FTP_TLS, all_errors
|
|
47
50
|
except ImportError:
|
|
48
51
|
ftpssl = False
|
|
49
|
-
from ftplib import FTP
|
|
52
|
+
from ftplib import FTP, all_errors
|
|
50
53
|
|
|
51
54
|
try:
|
|
52
55
|
import ujson as json
|
|
@@ -100,9 +103,13 @@ baseint = tuple(baseint)
|
|
|
100
103
|
|
|
101
104
|
# URL Parsing
|
|
102
105
|
try:
|
|
103
|
-
|
|
106
|
+
# Python 3
|
|
107
|
+
from urllib.parse import urlparse, urlunparse, unquote
|
|
108
|
+
from urllib.request import url2pathname
|
|
104
109
|
except ImportError:
|
|
110
|
+
# Python 2
|
|
105
111
|
from urlparse import urlparse, urlunparse
|
|
112
|
+
from urllib import unquote, url2pathname
|
|
106
113
|
|
|
107
114
|
# Windows-specific setup
|
|
108
115
|
if os.name == "nt":
|
|
@@ -243,17 +250,6 @@ except ImportError:
|
|
|
243
250
|
from urllib2 import Request, build_opener, HTTPBasicAuthHandler
|
|
244
251
|
from urlparse import urlparse
|
|
245
252
|
|
|
246
|
-
# StringIO and BytesIO
|
|
247
|
-
try:
|
|
248
|
-
from io import StringIO, BytesIO
|
|
249
|
-
except ImportError:
|
|
250
|
-
try:
|
|
251
|
-
from cStringIO import StringIO
|
|
252
|
-
from cStringIO import StringIO as BytesIO
|
|
253
|
-
except ImportError:
|
|
254
|
-
from StringIO import StringIO
|
|
255
|
-
from StringIO import StringIO as BytesIO
|
|
256
|
-
|
|
257
253
|
def get_importing_script_path():
|
|
258
254
|
# Inspect the stack and get the frame of the caller
|
|
259
255
|
stack = inspect.stack()
|
|
@@ -275,22 +271,8 @@ def get_default_threads():
|
|
|
275
271
|
|
|
276
272
|
|
|
277
273
|
__use_pysftp__ = False
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
__use_ini_file__ = True
|
|
281
|
-
__use_ini_name__ = "foxfile.ini"
|
|
282
|
-
if('PYARCHIVEFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYARCHIVEFILE_CONFIG_FILE']) and __use_env_file__):
|
|
283
|
-
scriptconf = os.environ['PYARCHIVEFILE_CONFIG_FILE']
|
|
284
|
-
else:
|
|
285
|
-
prescriptpath = get_importing_script_path()
|
|
286
|
-
if(prescriptpath is not None):
|
|
287
|
-
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
288
|
-
else:
|
|
289
|
-
scriptconf = ""
|
|
290
|
-
if os.path.exists(scriptconf):
|
|
291
|
-
__config_file__ = scriptconf
|
|
292
|
-
else:
|
|
293
|
-
__config_file__ = os.path.join(os.path.dirname(os.path.realpath(__file__)), __use_ini_name__)
|
|
274
|
+
__upload_proto_support__ = "^(ftp|ftps|sftp|scp)://"
|
|
275
|
+
__download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp)://"
|
|
294
276
|
if(not havepysftp):
|
|
295
277
|
__use_pysftp__ = False
|
|
296
278
|
__use_http_lib__ = "httpx"
|
|
@@ -328,7 +310,25 @@ def is_only_nonprintable(var):
|
|
|
328
310
|
__file_format_multi_dict__ = {}
|
|
329
311
|
__file_format_default__ = "FoxFile"
|
|
330
312
|
__include_defaults__ = True
|
|
313
|
+
__use_inmemfile__ = False
|
|
331
314
|
__program_name__ = "Py"+__file_format_default__
|
|
315
|
+
__use_env_file__ = True
|
|
316
|
+
__use_ini_file__ = True
|
|
317
|
+
__use_ini_name__ = "foxfile.ini"
|
|
318
|
+
__use_json_file__ = False
|
|
319
|
+
__use_json_name__ = "foxfile.json"
|
|
320
|
+
if('PYARCHIVEFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYARCHIVEFILE_CONFIG_FILE']) and __use_env_file__):
|
|
321
|
+
scriptconf = os.environ['PYARCHIVEFILE_CONFIG_FILE']
|
|
322
|
+
else:
|
|
323
|
+
prescriptpath = get_importing_script_path()
|
|
324
|
+
if(prescriptpath is not None):
|
|
325
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
326
|
+
else:
|
|
327
|
+
scriptconf = ""
|
|
328
|
+
if os.path.exists(scriptconf):
|
|
329
|
+
__config_file__ = scriptconf
|
|
330
|
+
else:
|
|
331
|
+
__config_file__ = os.path.join(os.path.dirname(os.path.realpath(__file__)), __use_ini_name__)
|
|
332
332
|
if __use_ini_file__ and os.path.exists(__config_file__):
|
|
333
333
|
config = configparser.ConfigParser()
|
|
334
334
|
config.read(__config_file__)
|
|
@@ -340,6 +340,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
340
340
|
__file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
|
|
341
341
|
__program_name__ = decode_unicode_escape(config.get('config', 'proname'))
|
|
342
342
|
__include_defaults__ = config.getboolean('config', 'includedef')
|
|
343
|
+
__use_inmemfile__ = config.getboolean('config', 'inmemfile')
|
|
343
344
|
# Loop through all sections
|
|
344
345
|
for section in config.sections():
|
|
345
346
|
required_keys = [
|
|
@@ -389,12 +390,13 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
|
|
|
389
390
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
390
391
|
__project__ = __program_name__
|
|
391
392
|
__project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
|
|
392
|
-
__version_info__ = (0,
|
|
393
|
-
|
|
393
|
+
__version_info__ = (0, 22, 2, "RC 1", 1)
|
|
394
|
+
__version_info__ = (0, 22, 2, "RC 1", 1)
|
|
395
|
+
__version_date_info__ = (2025, 9, 29, "RC 1", 1)
|
|
394
396
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
395
397
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
396
398
|
__revision__ = __version_info__[3]
|
|
397
|
-
__revision_id__ = "$Id:
|
|
399
|
+
__revision_id__ = "$Id: fe60b5bb8a36aa1216d0e100c5dcfd68f8b3459f $"
|
|
398
400
|
if(__version_info__[4] is not None):
|
|
399
401
|
__version_date_plusrc__ = __version_date__ + \
|
|
400
402
|
"-" + str(__version_date_info__[4])
|
|
@@ -584,6 +586,281 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
|
|
|
584
586
|
return dbgtxt
|
|
585
587
|
|
|
586
588
|
|
|
589
|
+
# --- Helpers ---
|
|
590
|
+
def _normalize_initial_data(data, isbytes, encoding):
|
|
591
|
+
"""Return data in the correct type for write(): bytes (if isbytes) or text (if not)."""
|
|
592
|
+
if data is None:
|
|
593
|
+
return None
|
|
594
|
+
|
|
595
|
+
if isbytes:
|
|
596
|
+
# Want bytes
|
|
597
|
+
if isinstance(data, bytes):
|
|
598
|
+
return data
|
|
599
|
+
# Py2: str is already bytes, unicode needs encode
|
|
600
|
+
if sys.version_info[0] == 2:
|
|
601
|
+
try:
|
|
602
|
+
unicode # noqa: F821
|
|
603
|
+
except NameError:
|
|
604
|
+
pass
|
|
605
|
+
else:
|
|
606
|
+
if isinstance(data, unicode): # noqa: F821
|
|
607
|
+
return data.encode(encoding)
|
|
608
|
+
# Py3 str -> encode
|
|
609
|
+
return str(data).encode(encoding)
|
|
610
|
+
else:
|
|
611
|
+
# Want text (unicode/str)
|
|
612
|
+
if sys.version_info[0] == 2:
|
|
613
|
+
try:
|
|
614
|
+
unicode # noqa: F821
|
|
615
|
+
if isinstance(data, unicode): # noqa: F821
|
|
616
|
+
return data
|
|
617
|
+
# bytes/str -> decode
|
|
618
|
+
return data.decode(encoding) if isinstance(data, str) else unicode(data) # noqa: F821
|
|
619
|
+
except NameError:
|
|
620
|
+
# Very defensive; shouldn't happen
|
|
621
|
+
return data
|
|
622
|
+
else:
|
|
623
|
+
# Py3: want str
|
|
624
|
+
if isinstance(data, bytes):
|
|
625
|
+
return data.decode(encoding)
|
|
626
|
+
return str(data)
|
|
627
|
+
|
|
628
|
+
|
|
629
|
+
def _split_posix(path_text):
|
|
630
|
+
"""Split POSIX paths regardless of OS; return list of components."""
|
|
631
|
+
# Normalize leading './'
|
|
632
|
+
if path_text.startswith(u'./'):
|
|
633
|
+
path_text = path_text[2:]
|
|
634
|
+
# Strip redundant slashes
|
|
635
|
+
path_text = re.sub(u'/+', u'/', path_text)
|
|
636
|
+
# Drop trailing '/' so 'dir/' -> ['dir']
|
|
637
|
+
if path_text.endswith(u'/'):
|
|
638
|
+
path_text = path_text[:-1]
|
|
639
|
+
return path_text.split(u'/') if path_text else []
|
|
640
|
+
|
|
641
|
+
def _is_abs_like(s):
|
|
642
|
+
"""Absolute targets (POSIX or Windows-drive style)."""
|
|
643
|
+
return s.startswith(u'/') or s.startswith(u'\\') or re.match(u'^[A-Za-z]:[/\\\\]', s)
|
|
644
|
+
|
|
645
|
+
def _resolves_outside(base_rel, target_rel):
|
|
646
|
+
"""
|
|
647
|
+
Given a base directory (relative, POSIX) and a target (relative),
|
|
648
|
+
return True if base/target resolves outside of base.
|
|
649
|
+
We anchor under '/' so normpath is root-anchored and portable.
|
|
650
|
+
"""
|
|
651
|
+
base_clean = u'/'.join(_split_posix(base_rel))
|
|
652
|
+
target_clean = u'/'.join(_split_posix(target_rel))
|
|
653
|
+
base_abs = u'/' + base_clean if base_clean else u'/'
|
|
654
|
+
combined = pp.normpath(pp.join(base_abs, target_clean))
|
|
655
|
+
if combined == base_abs or combined.startswith(base_abs + u'/'):
|
|
656
|
+
return False
|
|
657
|
+
return True
|
|
658
|
+
|
|
659
|
+
|
|
660
|
+
def DetectTarBombFoxFileArray(listarrayfiles,
|
|
661
|
+
top_file_ratio_threshold=0.6,
|
|
662
|
+
min_members_for_ratio=4,
|
|
663
|
+
symlink_policy="escape-only", # 'escape-only' | 'deny' | 'single-folder-only'
|
|
664
|
+
to_text=to_text):
|
|
665
|
+
"""
|
|
666
|
+
Detect 'tarbomb-like' archives from FoxFileToArray/TarFileToArray dicts.
|
|
667
|
+
|
|
668
|
+
Parameters:
|
|
669
|
+
listarrayfiles: dict with key 'ffilelist' -> list of entries (requires 'fname')
|
|
670
|
+
top_file_ratio_threshold: float, fraction of root files considered tarbomb
|
|
671
|
+
min_members_for_ratio: int, minimum members before ratio heuristic applies
|
|
672
|
+
symlink_policy:
|
|
673
|
+
- 'escape-only': only symlinks that escape parent/are absolute are unsafe
|
|
674
|
+
- 'deny': any symlink is unsafe
|
|
675
|
+
- 'single-folder-only': symlinks allowed only if archive has a single top-level folder
|
|
676
|
+
to_text: normalization function (your provided to_text)
|
|
677
|
+
|
|
678
|
+
Returns dict with:
|
|
679
|
+
- is_tarbomb, reasons, total_members, top_level_entries, top_level_files_count,
|
|
680
|
+
has_absolute_paths, has_parent_traversal,
|
|
681
|
+
symlink_escapes_root (bool), symlink_issues (list[{entry,target,reason}])
|
|
682
|
+
"""
|
|
683
|
+
files = listarrayfiles or {}
|
|
684
|
+
members = files.get('ffilelist') or []
|
|
685
|
+
|
|
686
|
+
names = []
|
|
687
|
+
has_abs = False
|
|
688
|
+
has_parent = False
|
|
689
|
+
|
|
690
|
+
# Symlink tracking
|
|
691
|
+
has_any_symlink = False
|
|
692
|
+
symlink_issues = []
|
|
693
|
+
any_symlink_escape = False
|
|
694
|
+
|
|
695
|
+
for m in members:
|
|
696
|
+
m = m or {}
|
|
697
|
+
name = to_text(m.get('fname', u""))
|
|
698
|
+
|
|
699
|
+
if _is_abs_like(name):
|
|
700
|
+
has_abs = True
|
|
701
|
+
|
|
702
|
+
parts = _split_posix(name)
|
|
703
|
+
if u'..' in parts:
|
|
704
|
+
has_parent = True
|
|
705
|
+
|
|
706
|
+
if not parts:
|
|
707
|
+
continue
|
|
708
|
+
|
|
709
|
+
norm_name = u'/'.join(parts)
|
|
710
|
+
names.append(norm_name)
|
|
711
|
+
|
|
712
|
+
# ---- Symlink detection ----
|
|
713
|
+
ftype = m.get('ftype')
|
|
714
|
+
is_symlink = (ftype == 2) or (to_text(ftype).lower() == u'symlink' if ftype is not None else False)
|
|
715
|
+
if is_symlink:
|
|
716
|
+
has_any_symlink = True
|
|
717
|
+
target = to_text(m.get('flinkname', u""))
|
|
718
|
+
# Absolute symlink target is unsafe
|
|
719
|
+
if _is_abs_like(target):
|
|
720
|
+
any_symlink_escape = True
|
|
721
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'absolute symlink target'})
|
|
722
|
+
else:
|
|
723
|
+
parent = u'/'.join(parts[:-1]) # may be ''
|
|
724
|
+
if _resolves_outside(parent, target):
|
|
725
|
+
any_symlink_escape = True
|
|
726
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'symlink escapes parent directory'})
|
|
727
|
+
|
|
728
|
+
total = len(names)
|
|
729
|
+
reasons = []
|
|
730
|
+
if total == 0:
|
|
731
|
+
return {
|
|
732
|
+
"is_tarbomb": False,
|
|
733
|
+
"reasons": ["archive contains no members"],
|
|
734
|
+
"total_members": 0,
|
|
735
|
+
"top_level_entries": [],
|
|
736
|
+
"top_level_files_count": 0,
|
|
737
|
+
"has_absolute_paths": has_abs,
|
|
738
|
+
"has_parent_traversal": has_parent,
|
|
739
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
740
|
+
"symlink_issues": symlink_issues,
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
# Layout counts
|
|
744
|
+
top_counts = {}
|
|
745
|
+
top_level_files_count = 0
|
|
746
|
+
for name in names:
|
|
747
|
+
parts = name.split(u'/')
|
|
748
|
+
first = parts[0]
|
|
749
|
+
top_counts[first] = top_counts.get(first, 0) + 1
|
|
750
|
+
if len(parts) == 1: # directly at archive root
|
|
751
|
+
top_level_files_count += 1
|
|
752
|
+
|
|
753
|
+
top_keys = sorted(top_counts.keys())
|
|
754
|
+
is_tarbomb = False
|
|
755
|
+
|
|
756
|
+
# Path-based dangers
|
|
757
|
+
if has_abs:
|
|
758
|
+
is_tarbomb = True
|
|
759
|
+
reasons.append("contains absolute paths (dangerous)")
|
|
760
|
+
if has_parent:
|
|
761
|
+
is_tarbomb = True
|
|
762
|
+
reasons.append("contains parent-traversal ('..') entries (dangerous)")
|
|
763
|
+
if any_symlink_escape:
|
|
764
|
+
is_tarbomb = True
|
|
765
|
+
reasons.append("contains symlinks that escape their parent directory")
|
|
766
|
+
|
|
767
|
+
# Symlink policy enforcement
|
|
768
|
+
if symlink_policy == "deny" and has_any_symlink:
|
|
769
|
+
is_tarbomb = True
|
|
770
|
+
reasons.append("symlinks present and policy is 'deny'")
|
|
771
|
+
elif symlink_policy == "single-folder-only" and has_any_symlink and len(top_keys) != 1:
|
|
772
|
+
is_tarbomb = True
|
|
773
|
+
reasons.append("symlinks present but archive lacks a single top-level folder")
|
|
774
|
+
|
|
775
|
+
# Tarbomb layout heuristics
|
|
776
|
+
if len(top_keys) == 1:
|
|
777
|
+
reasons.append("single top-level entry '{0}'".format(top_keys[0]))
|
|
778
|
+
else:
|
|
779
|
+
ratio = float(top_level_files_count) / float(total)
|
|
780
|
+
if total >= min_members_for_ratio and ratio > float(top_file_ratio_threshold):
|
|
781
|
+
is_tarbomb = True
|
|
782
|
+
reasons.append("high fraction of members ({0:.0%}) at archive root".format(ratio))
|
|
783
|
+
else:
|
|
784
|
+
max_bucket = max(top_counts.values()) if top_counts else 0
|
|
785
|
+
if max_bucket < total * 0.9:
|
|
786
|
+
is_tarbomb = True
|
|
787
|
+
reasons.append("multiple top-level entries with no dominant folder: {0}".format(
|
|
788
|
+
u", ".join(top_keys[:10])))
|
|
789
|
+
else:
|
|
790
|
+
reasons.append("multiple top-level entries but one dominates")
|
|
791
|
+
|
|
792
|
+
return {
|
|
793
|
+
"is_tarbomb": bool(is_tarbomb),
|
|
794
|
+
"reasons": reasons,
|
|
795
|
+
"total_members": total,
|
|
796
|
+
"top_level_entries": top_keys,
|
|
797
|
+
"top_level_files_count": top_level_files_count,
|
|
798
|
+
"has_absolute_paths": has_abs,
|
|
799
|
+
"has_parent_traversal": has_parent,
|
|
800
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
801
|
+
"symlink_issues": symlink_issues,
|
|
802
|
+
}
|
|
803
|
+
|
|
804
|
+
|
|
805
|
+
def MkTempFile(data=None, inmem=__use_inmemfile__, isbytes=True, prefix=__project__,
|
|
806
|
+
delete=True, encoding="utf-8"):
|
|
807
|
+
"""
|
|
808
|
+
Return a file-like handle.
|
|
809
|
+
- If inmem=True: returns StringIO (text) or BytesIO (bytes).
|
|
810
|
+
- If inmem=False: returns a NamedTemporaryFile opened in text or binary mode.
|
|
811
|
+
Args:
|
|
812
|
+
data: optional initial content; if provided, it's written and the handle is seek(0)
|
|
813
|
+
inmem: bool — return in-memory handle if True
|
|
814
|
+
isbytes: bool — choose bytes (True) or text (False)
|
|
815
|
+
prefix: str — tempfile prefix
|
|
816
|
+
delete: bool — whether the tempfile is deleted on close (NamedTemporaryFile)
|
|
817
|
+
encoding: str — used for text mode (and for conversions when needed)
|
|
818
|
+
"""
|
|
819
|
+
init = _normalize_initial_data(data, isbytes, encoding)
|
|
820
|
+
|
|
821
|
+
if inmem:
|
|
822
|
+
buf = BytesIO() if isbytes else StringIO()
|
|
823
|
+
if init is not None:
|
|
824
|
+
buf.write(init)
|
|
825
|
+
buf.seek(0)
|
|
826
|
+
return buf
|
|
827
|
+
|
|
828
|
+
mode = "wb+" if isbytes else "w+"
|
|
829
|
+
kwargs = {"prefix": prefix or "", "delete": delete, "mode": mode}
|
|
830
|
+
|
|
831
|
+
# Only Python 3's text-mode files accept encoding/newline explicitly
|
|
832
|
+
if not isbytes and sys.version_info[0] >= 3:
|
|
833
|
+
kwargs["encoding"] = encoding
|
|
834
|
+
kwargs["newline"] = ""
|
|
835
|
+
|
|
836
|
+
f = tempfile.NamedTemporaryFile(**kwargs)
|
|
837
|
+
|
|
838
|
+
if init is not None:
|
|
839
|
+
f.write(init)
|
|
840
|
+
f.seek(0)
|
|
841
|
+
return f
|
|
842
|
+
|
|
843
|
+
|
|
844
|
+
def MkTempFileSmart(data=None, isbytes=True, prefix=__project__, max_mem=1024*1024, encoding="utf-8"):
|
|
845
|
+
"""
|
|
846
|
+
Spooled temp file: starts in memory and spills to disk past max_mem.
|
|
847
|
+
Behaves like BytesIO/StringIO for small data, with the same preload+seek(0) behavior.
|
|
848
|
+
"""
|
|
849
|
+
mode = "wb+" if isbytes else "w+"
|
|
850
|
+
kwargs = {"mode": mode, "max_size": max_mem, "prefix": prefix or ""}
|
|
851
|
+
if not isbytes and sys.version_info[0] >= 3:
|
|
852
|
+
kwargs["encoding"] = encoding
|
|
853
|
+
kwargs["newline"] = ""
|
|
854
|
+
|
|
855
|
+
f = tempfile.SpooledTemporaryFile(**kwargs)
|
|
856
|
+
|
|
857
|
+
init = _normalize_initial_data(data, isbytes, encoding)
|
|
858
|
+
if init is not None:
|
|
859
|
+
f.write(init)
|
|
860
|
+
f.seek(0)
|
|
861
|
+
return f
|
|
862
|
+
|
|
863
|
+
|
|
587
864
|
def RemoveWindowsPath(dpath):
|
|
588
865
|
"""
|
|
589
866
|
Normalize a path by converting backslashes to forward slashes
|
|
@@ -591,6 +868,13 @@ def RemoveWindowsPath(dpath):
|
|
|
591
868
|
"""
|
|
592
869
|
if not dpath:
|
|
593
870
|
return ""
|
|
871
|
+
if re.match("^file://", dpath, re.IGNORECASE):
|
|
872
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
873
|
+
if dpath.lower().startswith("file://") and not dpath.lower().startswith("file:///"):
|
|
874
|
+
# insert the extra slash
|
|
875
|
+
dpath = "file:///" + dpath[7:]
|
|
876
|
+
dparsed = urlparse(dpath)
|
|
877
|
+
dpath = url2pathname(dparsed.path)
|
|
594
878
|
# Accept bytes and decode safely
|
|
595
879
|
if isinstance(dpath, (bytes, bytearray)):
|
|
596
880
|
dpath = dpath.decode("utf-8", "ignore")
|
|
@@ -606,6 +890,13 @@ def NormalizeRelativePath(inpath):
|
|
|
606
890
|
"""
|
|
607
891
|
Ensures the path is relative unless it is absolute. Prepares consistent relative paths.
|
|
608
892
|
"""
|
|
893
|
+
if re.match("^file://", inpath, re.IGNORECASE):
|
|
894
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
895
|
+
if inpath.lower().startswith("file://") and not inpath.lower().startswith("file:///"):
|
|
896
|
+
# insert the extra slash
|
|
897
|
+
inpath = "file:///" + inpath[7:]
|
|
898
|
+
dparsed = urlparse(inpath)
|
|
899
|
+
inpath = url2pathname(dparsed.path)
|
|
609
900
|
inpath = RemoveWindowsPath(inpath)
|
|
610
901
|
if os.path.isabs(inpath):
|
|
611
902
|
outpath = inpath
|
|
@@ -662,6 +953,13 @@ def ListDir(dirpath, followlink=False, duplicates=False, include_regex=None, exc
|
|
|
662
953
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
663
954
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
664
955
|
for mydirfile in dirpath:
|
|
956
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
957
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
958
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
959
|
+
# insert the extra slash
|
|
960
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
961
|
+
dparsed = urlparse(mydirfile)
|
|
962
|
+
mydirfile = url2pathname(dparsed.path)
|
|
665
963
|
if not os.path.exists(mydirfile):
|
|
666
964
|
return False
|
|
667
965
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -732,6 +1030,13 @@ def ListDirAdvanced(dirpath, followlink=False, duplicates=False, include_regex=N
|
|
|
732
1030
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
733
1031
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
734
1032
|
for mydirfile in dirpath:
|
|
1033
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
1034
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
1035
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
1036
|
+
# insert the extra slash
|
|
1037
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
1038
|
+
dparsed = urlparse(mydirfile)
|
|
1039
|
+
mydirfile = url2pathname(dparsed.path)
|
|
735
1040
|
if not os.path.exists(mydirfile):
|
|
736
1041
|
return False
|
|
737
1042
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -1920,7 +2225,7 @@ def ReadFileHeaderDataBySize(fp, delimiter=__file_format_dict__['format_delimite
|
|
|
1920
2225
|
headersize = int(preheaderdata[0], 16)
|
|
1921
2226
|
if(headersize <= 0):
|
|
1922
2227
|
return []
|
|
1923
|
-
subfp =
|
|
2228
|
+
subfp = MkTempFile()
|
|
1924
2229
|
subfp.write(fp.read(headersize))
|
|
1925
2230
|
fp.seek(len(delimiter), 1)
|
|
1926
2231
|
subfp.seek(0, 0)
|
|
@@ -1941,7 +2246,7 @@ def ReadFileHeaderDataWoSize(fp, delimiter=__file_format_dict__['format_delimite
|
|
|
1941
2246
|
if(headersize <= 0 or headernumfields <= 0):
|
|
1942
2247
|
return []
|
|
1943
2248
|
headerdata = ReadTillNullByteByNum(fp, delimiter, headernumfields)
|
|
1944
|
-
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
2249
|
+
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
1945
2250
|
HeaderOut = preheaderdata + headerdata
|
|
1946
2251
|
return HeaderOut
|
|
1947
2252
|
|
|
@@ -2004,7 +2309,7 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
2004
2309
|
return False
|
|
2005
2310
|
fhend = fp.tell() - 1
|
|
2006
2311
|
fcontentstart = fp.tell()
|
|
2007
|
-
fcontents =
|
|
2312
|
+
fcontents = MkTempFile()
|
|
2008
2313
|
if(fsize > 0 and not listonly):
|
|
2009
2314
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
2010
2315
|
fcontents.write(fp.read(fsize))
|
|
@@ -2031,7 +2336,7 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
2031
2336
|
if(uncompress):
|
|
2032
2337
|
cfcontents = UncompressFileAlt(fcontents, formatspecs)
|
|
2033
2338
|
cfcontents.seek(0, 0)
|
|
2034
|
-
fcontents =
|
|
2339
|
+
fcontents = MkTempFile()
|
|
2035
2340
|
shutil.copyfileobj(cfcontents, fcontents)
|
|
2036
2341
|
cfcontents.close()
|
|
2037
2342
|
fcontents.seek(0, 0)
|
|
@@ -2145,7 +2450,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
2145
2450
|
fjsoncontent = {}
|
|
2146
2451
|
elif(fjsontype=="list"):
|
|
2147
2452
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
2148
|
-
flisttmp =
|
|
2453
|
+
flisttmp = MkTempFile()
|
|
2149
2454
|
flisttmp.write(fprejsoncontent.encode())
|
|
2150
2455
|
flisttmp.seek(0)
|
|
2151
2456
|
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
@@ -2181,7 +2486,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
2181
2486
|
return False
|
|
2182
2487
|
fhend = fp.tell() - 1
|
|
2183
2488
|
fcontentstart = fp.tell()
|
|
2184
|
-
fcontents =
|
|
2489
|
+
fcontents = MkTempFile()
|
|
2185
2490
|
pyhascontents = False
|
|
2186
2491
|
if(fsize > 0 and not listonly):
|
|
2187
2492
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -2212,7 +2517,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
2212
2517
|
cfcontents = UncompressFileAlt(
|
|
2213
2518
|
fcontents, formatspecs)
|
|
2214
2519
|
cfcontents.seek(0, 0)
|
|
2215
|
-
fcontents =
|
|
2520
|
+
fcontents = MkTempFile()
|
|
2216
2521
|
shutil.copyfileobj(cfcontents, fcontents)
|
|
2217
2522
|
cfcontents.close()
|
|
2218
2523
|
fcontents.seek(0, 0)
|
|
@@ -2331,7 +2636,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2331
2636
|
fjsoncontent = {}
|
|
2332
2637
|
elif(fjsontype=="list"):
|
|
2333
2638
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
2334
|
-
flisttmp =
|
|
2639
|
+
flisttmp = MkTempFile()
|
|
2335
2640
|
flisttmp.write(fprejsoncontent.encode())
|
|
2336
2641
|
flisttmp.seek(0)
|
|
2337
2642
|
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
@@ -2366,7 +2671,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2366
2671
|
return False
|
|
2367
2672
|
fhend = fp.tell() - 1
|
|
2368
2673
|
fcontentstart = fp.tell()
|
|
2369
|
-
fcontents =
|
|
2674
|
+
fcontents = MkTempFile()
|
|
2370
2675
|
pyhascontents = False
|
|
2371
2676
|
if(fsize > 0 and not listonly):
|
|
2372
2677
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -2396,7 +2701,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2396
2701
|
cfcontents = UncompressFileAlt(
|
|
2397
2702
|
fcontents, formatspecs)
|
|
2398
2703
|
cfcontents.seek(0, 0)
|
|
2399
|
-
fcontents =
|
|
2704
|
+
fcontents = MkTempFile()
|
|
2400
2705
|
shutil.copyfileobj(cfcontents, fcontents)
|
|
2401
2706
|
cfcontents.close()
|
|
2402
2707
|
fcontents.seek(0, 0)
|
|
@@ -2428,22 +2733,20 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2428
2733
|
return outlist
|
|
2429
2734
|
|
|
2430
2735
|
|
|
2431
|
-
def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2736
|
+
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2432
2737
|
if(not hasattr(fp, "read")):
|
|
2433
2738
|
return False
|
|
2434
2739
|
delimiter = formatspecs['format_delimiter']
|
|
2435
|
-
curloc =
|
|
2740
|
+
curloc = filestart
|
|
2436
2741
|
try:
|
|
2437
|
-
fp.seek(0, 2)
|
|
2742
|
+
fp.seek(0, 2)
|
|
2438
2743
|
except OSError:
|
|
2439
|
-
SeekToEndOfFile(fp)
|
|
2744
|
+
SeekToEndOfFile(fp)
|
|
2440
2745
|
except ValueError:
|
|
2441
|
-
SeekToEndOfFile(fp)
|
|
2442
|
-
CatSize = fp.tell()
|
|
2443
|
-
CatSizeEnd = CatSize
|
|
2746
|
+
SeekToEndOfFile(fp)
|
|
2747
|
+
CatSize = fp.tell()
|
|
2748
|
+
CatSizeEnd = CatSize
|
|
2444
2749
|
fp.seek(curloc, 0)
|
|
2445
|
-
if(curloc > 0):
|
|
2446
|
-
fp.seek(0, 0)
|
|
2447
2750
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2448
2751
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2449
2752
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2458,8 +2761,6 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2458
2761
|
else:
|
|
2459
2762
|
inheader = ReadFileHeaderDataWoSize(
|
|
2460
2763
|
fp, formatspecs['format_delimiter'])
|
|
2461
|
-
if(curloc > 0):
|
|
2462
|
-
fp.seek(curloc, 0)
|
|
2463
2764
|
fprechecksumtype = inheader[-2]
|
|
2464
2765
|
fprechecksum = inheader[-1]
|
|
2465
2766
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -2483,22 +2784,20 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2483
2784
|
return flist
|
|
2484
2785
|
|
|
2485
2786
|
|
|
2486
|
-
def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2787
|
+
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2487
2788
|
if(not hasattr(fp, "read")):
|
|
2488
2789
|
return False
|
|
2489
2790
|
delimiter = formatspecs['format_delimiter']
|
|
2490
|
-
curloc =
|
|
2791
|
+
curloc = filestart
|
|
2491
2792
|
try:
|
|
2492
|
-
fp.seek(0, 2)
|
|
2793
|
+
fp.seek(0, 2)
|
|
2493
2794
|
except OSError:
|
|
2494
|
-
SeekToEndOfFile(fp)
|
|
2795
|
+
SeekToEndOfFile(fp)
|
|
2495
2796
|
except ValueError:
|
|
2496
|
-
SeekToEndOfFile(fp)
|
|
2497
|
-
CatSize = fp.tell()
|
|
2498
|
-
CatSizeEnd = CatSize
|
|
2797
|
+
SeekToEndOfFile(fp)
|
|
2798
|
+
CatSize = fp.tell()
|
|
2799
|
+
CatSizeEnd = CatSize
|
|
2499
2800
|
fp.seek(curloc, 0)
|
|
2500
|
-
if(curloc > 0):
|
|
2501
|
-
fp.seek(0, 0)
|
|
2502
2801
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2503
2802
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2504
2803
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2530,8 +2829,6 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2530
2829
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2531
2830
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2532
2831
|
pass
|
|
2533
|
-
if(curloc > 0):
|
|
2534
|
-
fp.seek(curloc, 0)
|
|
2535
2832
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2536
2833
|
fheadsize = int(inheader[0], 16)
|
|
2537
2834
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2596,7 +2893,7 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2596
2893
|
invalid_archive = True
|
|
2597
2894
|
prefhend = fp.tell() - 1
|
|
2598
2895
|
prefcontentstart = fp.tell()
|
|
2599
|
-
prefcontents =
|
|
2896
|
+
prefcontents = MkTempFile()
|
|
2600
2897
|
pyhascontents = False
|
|
2601
2898
|
if(prefsize > 0):
|
|
2602
2899
|
prefcontents.write(fp.read(prefsize))
|
|
@@ -2643,22 +2940,20 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2643
2940
|
return outlist
|
|
2644
2941
|
|
|
2645
2942
|
|
|
2646
|
-
def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2943
|
+
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2647
2944
|
if(not hasattr(fp, "read")):
|
|
2648
2945
|
return False
|
|
2649
2946
|
delimiter = formatspecs['format_delimiter']
|
|
2650
|
-
curloc =
|
|
2947
|
+
curloc = filestart
|
|
2651
2948
|
try:
|
|
2652
|
-
fp.seek(0, 2)
|
|
2949
|
+
fp.seek(0, 2)
|
|
2653
2950
|
except OSError:
|
|
2654
|
-
SeekToEndOfFile(fp)
|
|
2951
|
+
SeekToEndOfFile(fp)
|
|
2655
2952
|
except ValueError:
|
|
2656
|
-
SeekToEndOfFile(fp)
|
|
2657
|
-
CatSize = fp.tell()
|
|
2658
|
-
CatSizeEnd = CatSize
|
|
2953
|
+
SeekToEndOfFile(fp)
|
|
2954
|
+
CatSize = fp.tell()
|
|
2955
|
+
CatSizeEnd = CatSize
|
|
2659
2956
|
fp.seek(curloc, 0)
|
|
2660
|
-
if(curloc > 0):
|
|
2661
|
-
fp.seek(0, 0)
|
|
2662
2957
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2663
2958
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2664
2959
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2690,8 +2985,6 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2690
2985
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2691
2986
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2692
2987
|
pass
|
|
2693
|
-
if(curloc > 0):
|
|
2694
|
-
fp.seek(curloc, 0)
|
|
2695
2988
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2696
2989
|
fheadsize = int(inheader[0], 16)
|
|
2697
2990
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2809,25 +3102,25 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2809
3102
|
return outlist
|
|
2810
3103
|
|
|
2811
3104
|
|
|
2812
|
-
def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3105
|
+
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2813
3106
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
2814
3107
|
formatspecs = formatspecs[fmttype]
|
|
2815
3108
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
2816
3109
|
fmttype = "auto"
|
|
2817
3110
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
2818
3111
|
fp = infile
|
|
2819
|
-
fp.seek(
|
|
2820
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3112
|
+
fp.seek(filestart, 0)
|
|
3113
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2821
3114
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2822
3115
|
formatspecs = formatspecs[compresscheck]
|
|
2823
3116
|
else:
|
|
2824
|
-
fp.seek(
|
|
2825
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3117
|
+
fp.seek(filestart, 0)
|
|
3118
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2826
3119
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2827
3120
|
formatspecs = formatspecs[checkcompressfile]
|
|
2828
|
-
fp.seek(
|
|
2829
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2830
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3121
|
+
fp.seek(filestart, 0)
|
|
3122
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3123
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
2831
3124
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
2832
3125
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
2833
3126
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -2862,58 +3155,58 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2862
3155
|
compresscheck = "zlib"
|
|
2863
3156
|
else:
|
|
2864
3157
|
return False
|
|
2865
|
-
fp.seek(
|
|
3158
|
+
fp.seek(filestart, 0)
|
|
2866
3159
|
elif(infile == "-"):
|
|
2867
|
-
fp =
|
|
3160
|
+
fp = MkTempFile()
|
|
2868
3161
|
if(hasattr(sys.stdin, "buffer")):
|
|
2869
3162
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
2870
3163
|
else:
|
|
2871
3164
|
shutil.copyfileobj(sys.stdin, fp)
|
|
2872
|
-
fp.seek(
|
|
2873
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2874
|
-
fp.seek(
|
|
2875
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3165
|
+
fp.seek(filestart, 0)
|
|
3166
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3167
|
+
fp.seek(filestart, 0)
|
|
3168
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2876
3169
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2877
3170
|
formatspecs = formatspecs[compresscheck]
|
|
2878
3171
|
else:
|
|
2879
|
-
fp.seek(
|
|
2880
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3172
|
+
fp.seek(filestart, 0)
|
|
3173
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2881
3174
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2882
3175
|
formatspecs = formatspecs[checkcompressfile]
|
|
2883
|
-
fp.seek(
|
|
3176
|
+
fp.seek(filestart, 0)
|
|
2884
3177
|
if(not fp):
|
|
2885
3178
|
return False
|
|
2886
|
-
fp.seek(
|
|
3179
|
+
fp.seek(filestart, 0)
|
|
2887
3180
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
2888
|
-
fp =
|
|
3181
|
+
fp = MkTempFile()
|
|
2889
3182
|
fp.write(infile)
|
|
2890
|
-
fp.seek(
|
|
2891
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2892
|
-
fp.seek(
|
|
2893
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3183
|
+
fp.seek(filestart, 0)
|
|
3184
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3185
|
+
fp.seek(filestart, 0)
|
|
3186
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2894
3187
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2895
3188
|
formatspecs = formatspecs[compresscheck]
|
|
2896
3189
|
else:
|
|
2897
|
-
fp.seek(
|
|
2898
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3190
|
+
fp.seek(filestart, 0)
|
|
3191
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2899
3192
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2900
3193
|
formatspecs = formatspecs[checkcompressfile]
|
|
2901
|
-
fp.seek(
|
|
3194
|
+
fp.seek(filestart, 0)
|
|
2902
3195
|
if(not fp):
|
|
2903
3196
|
return False
|
|
2904
|
-
fp.seek(
|
|
2905
|
-
elif(re.findall(
|
|
3197
|
+
fp.seek(filestart, 0)
|
|
3198
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
2906
3199
|
fp = download_file_from_internet_file(infile)
|
|
2907
|
-
fp.seek(
|
|
2908
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3200
|
+
fp.seek(filestart, 0)
|
|
3201
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2909
3202
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2910
3203
|
formatspecs = formatspecs[compresscheck]
|
|
2911
3204
|
else:
|
|
2912
|
-
fp.seek(
|
|
2913
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3205
|
+
fp.seek(filestart, 0)
|
|
3206
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2914
3207
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2915
3208
|
formatspecs = formatspecs[checkcompressfile]
|
|
2916
|
-
fp.seek(
|
|
3209
|
+
fp.seek(filestart, 0)
|
|
2917
3210
|
if(not compresscheck):
|
|
2918
3211
|
fextname = os.path.splitext(infile)[1]
|
|
2919
3212
|
if(fextname == ".gz"):
|
|
@@ -2934,14 +3227,14 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2934
3227
|
compresscheck = "zlib"
|
|
2935
3228
|
else:
|
|
2936
3229
|
return False
|
|
2937
|
-
fp.seek(
|
|
2938
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3230
|
+
fp.seek(filestart, 0)
|
|
3231
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
2939
3232
|
if(not fp):
|
|
2940
3233
|
return False
|
|
2941
|
-
fp.seek(
|
|
3234
|
+
fp.seek(filestart, 0)
|
|
2942
3235
|
else:
|
|
2943
3236
|
infile = RemoveWindowsPath(infile)
|
|
2944
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3237
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
2945
3238
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2946
3239
|
formatspecs = formatspecs[checkcompressfile]
|
|
2947
3240
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -2956,7 +3249,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2956
3249
|
return False
|
|
2957
3250
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
2958
3251
|
return False
|
|
2959
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3252
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
2960
3253
|
if(not compresscheck):
|
|
2961
3254
|
fextname = os.path.splitext(infile)[1]
|
|
2962
3255
|
if(fextname == ".gz"):
|
|
@@ -2979,43 +3272,43 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2979
3272
|
return False
|
|
2980
3273
|
if(not compresscheck):
|
|
2981
3274
|
return False
|
|
2982
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
2983
|
-
return ReadFileDataWithContentToArray(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3275
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3276
|
+
return ReadFileDataWithContentToArray(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
2984
3277
|
|
|
2985
3278
|
|
|
2986
|
-
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3279
|
+
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2987
3280
|
if(isinstance(infile, (list, tuple, ))):
|
|
2988
3281
|
pass
|
|
2989
3282
|
else:
|
|
2990
3283
|
infile = [infile]
|
|
2991
3284
|
outretval = {}
|
|
2992
3285
|
for curfname in infile:
|
|
2993
|
-
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3286
|
+
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
2994
3287
|
return outretval
|
|
2995
3288
|
|
|
2996
|
-
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2997
|
-
return ReadInMultipleFileWithContentToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3289
|
+
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3290
|
+
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
2998
3291
|
|
|
2999
3292
|
|
|
3000
|
-
def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3293
|
+
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3001
3294
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
3002
3295
|
formatspecs = formatspecs[fmttype]
|
|
3003
3296
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
3004
3297
|
fmttype = "auto"
|
|
3005
3298
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
3006
3299
|
fp = infile
|
|
3007
|
-
fp.seek(
|
|
3008
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3300
|
+
fp.seek(filestart, 0)
|
|
3301
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3009
3302
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3010
3303
|
formatspecs = formatspecs[compresscheck]
|
|
3011
3304
|
else:
|
|
3012
|
-
fp.seek(
|
|
3013
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3305
|
+
fp.seek(filestart, 0)
|
|
3306
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3014
3307
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3015
3308
|
formatspecs = formatspecs[checkcompressfile]
|
|
3016
|
-
fp.seek(
|
|
3017
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3018
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3309
|
+
fp.seek(filestart, 0)
|
|
3310
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3311
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
3019
3312
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
3020
3313
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
3021
3314
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -3050,58 +3343,58 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3050
3343
|
compresscheck = "zlib"
|
|
3051
3344
|
else:
|
|
3052
3345
|
return False
|
|
3053
|
-
fp.seek(
|
|
3346
|
+
fp.seek(filestart, 0)
|
|
3054
3347
|
elif(infile == "-"):
|
|
3055
|
-
fp =
|
|
3348
|
+
fp = MkTempFile()
|
|
3056
3349
|
if(hasattr(sys.stdin, "buffer")):
|
|
3057
3350
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
3058
3351
|
else:
|
|
3059
3352
|
shutil.copyfileobj(sys.stdin, fp)
|
|
3060
|
-
fp.seek(
|
|
3061
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3062
|
-
fp.seek(
|
|
3063
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3353
|
+
fp.seek(filestart, 0)
|
|
3354
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3355
|
+
fp.seek(filestart, 0)
|
|
3356
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3064
3357
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3065
3358
|
formatspecs = formatspecs[compresscheck]
|
|
3066
3359
|
else:
|
|
3067
|
-
fp.seek(
|
|
3068
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3360
|
+
fp.seek(filestart, 0)
|
|
3361
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3069
3362
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3070
3363
|
formatspecs = formatspecs[checkcompressfile]
|
|
3071
|
-
fp.seek(
|
|
3364
|
+
fp.seek(filestart, 0)
|
|
3072
3365
|
if(not fp):
|
|
3073
3366
|
return False
|
|
3074
|
-
fp.seek(
|
|
3367
|
+
fp.seek(filestart, 0)
|
|
3075
3368
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
3076
|
-
fp =
|
|
3369
|
+
fp = MkTempFile()
|
|
3077
3370
|
fp.write(infile)
|
|
3078
|
-
fp.seek(
|
|
3079
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3080
|
-
fp.seek(
|
|
3081
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3371
|
+
fp.seek(filestart, 0)
|
|
3372
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3373
|
+
fp.seek(filestart, 0)
|
|
3374
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3082
3375
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3083
3376
|
formatspecs = formatspecs[compresscheck]
|
|
3084
3377
|
else:
|
|
3085
|
-
fp.seek(
|
|
3086
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3378
|
+
fp.seek(filestart, 0)
|
|
3379
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3087
3380
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3088
3381
|
formatspecs = formatspecs[checkcompressfile]
|
|
3089
|
-
fp.seek(
|
|
3382
|
+
fp.seek(filestart, 0)
|
|
3090
3383
|
if(not fp):
|
|
3091
3384
|
return False
|
|
3092
|
-
fp.seek(
|
|
3093
|
-
elif(re.findall(
|
|
3385
|
+
fp.seek(filestart, 0)
|
|
3386
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
3094
3387
|
fp = download_file_from_internet_file(infile)
|
|
3095
|
-
fp.seek(
|
|
3096
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3388
|
+
fp.seek(filestart, 0)
|
|
3389
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3097
3390
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3098
3391
|
formatspecs = formatspecs[compresscheck]
|
|
3099
3392
|
else:
|
|
3100
|
-
fp.seek(
|
|
3101
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3393
|
+
fp.seek(filestart, 0)
|
|
3394
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3102
3395
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3103
3396
|
formatspecs = formatspecs[checkcompressfile]
|
|
3104
|
-
fp.seek(
|
|
3397
|
+
fp.seek(filestart, 0)
|
|
3105
3398
|
if(not compresscheck):
|
|
3106
3399
|
fextname = os.path.splitext(infile)[1]
|
|
3107
3400
|
if(fextname == ".gz"):
|
|
@@ -3122,14 +3415,14 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3122
3415
|
compresscheck = "zlib"
|
|
3123
3416
|
else:
|
|
3124
3417
|
return False
|
|
3125
|
-
fp.seek(
|
|
3126
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3418
|
+
fp.seek(filestart, 0)
|
|
3419
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3127
3420
|
if(not fp):
|
|
3128
3421
|
return False
|
|
3129
|
-
fp.seek(
|
|
3422
|
+
fp.seek(filestart, 0)
|
|
3130
3423
|
else:
|
|
3131
3424
|
infile = RemoveWindowsPath(infile)
|
|
3132
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3425
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
3133
3426
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3134
3427
|
formatspecs = formatspecs[checkcompressfile]
|
|
3135
3428
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -3144,7 +3437,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3144
3437
|
return False
|
|
3145
3438
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
3146
3439
|
return False
|
|
3147
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3440
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
3148
3441
|
if(not compresscheck):
|
|
3149
3442
|
fextname = os.path.splitext(infile)[1]
|
|
3150
3443
|
if(fextname == ".gz"):
|
|
@@ -3167,22 +3460,22 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3167
3460
|
return False
|
|
3168
3461
|
if(not compresscheck):
|
|
3169
3462
|
return False
|
|
3170
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
3171
|
-
return ReadFileDataWithContentToList(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3463
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3464
|
+
return ReadFileDataWithContentToList(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3172
3465
|
|
|
3173
3466
|
|
|
3174
|
-
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3467
|
+
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3175
3468
|
if(isinstance(infile, (list, tuple, ))):
|
|
3176
3469
|
pass
|
|
3177
3470
|
else:
|
|
3178
3471
|
infile = [infile]
|
|
3179
3472
|
outretval = {}
|
|
3180
3473
|
for curfname in infile:
|
|
3181
|
-
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3474
|
+
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3182
3475
|
return outretval
|
|
3183
3476
|
|
|
3184
|
-
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3185
|
-
return ReadInMultipleFileWithContentToList(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3477
|
+
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3478
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3186
3479
|
|
|
3187
3480
|
|
|
3188
3481
|
def AppendNullByte(indata, delimiter=__file_format_dict__['format_delimiter']):
|
|
@@ -3277,7 +3570,7 @@ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc3
|
|
|
3277
3570
|
return fp
|
|
3278
3571
|
|
|
3279
3572
|
|
|
3280
|
-
def
|
|
3573
|
+
def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc32", formatspecs=__file_format_multi_dict__):
|
|
3281
3574
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
3282
3575
|
|
|
3283
3576
|
|
|
@@ -3308,11 +3601,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3308
3601
|
pass
|
|
3309
3602
|
if(outfile == "-" or outfile is None):
|
|
3310
3603
|
verbose = False
|
|
3311
|
-
fp =
|
|
3604
|
+
fp = MkTempFile()
|
|
3312
3605
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3313
3606
|
fp = outfile
|
|
3314
|
-
elif(re.findall(
|
|
3315
|
-
fp =
|
|
3607
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3608
|
+
fp = MkTempFile()
|
|
3316
3609
|
else:
|
|
3317
3610
|
fbasename = os.path.splitext(outfile)[0]
|
|
3318
3611
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -3347,7 +3640,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3347
3640
|
outvar = fp.read()
|
|
3348
3641
|
fp.close()
|
|
3349
3642
|
return outvar
|
|
3350
|
-
elif(re.findall(
|
|
3643
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3351
3644
|
fp = CompressOpenFileAlt(
|
|
3352
3645
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3353
3646
|
fp.seek(0, 0)
|
|
@@ -3360,7 +3653,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3360
3653
|
return True
|
|
3361
3654
|
|
|
3362
3655
|
|
|
3363
|
-
def
|
|
3656
|
+
def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="crc32", formatspecs=__file_format_dict__, returnfp=False):
|
|
3364
3657
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
3365
3658
|
|
|
3366
3659
|
|
|
@@ -3644,16 +3937,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3644
3937
|
fwinattributes = format(int(0), 'x').lower()
|
|
3645
3938
|
fcompression = ""
|
|
3646
3939
|
fcsize = format(int(0), 'x').lower()
|
|
3647
|
-
fcontents =
|
|
3940
|
+
fcontents = MkTempFile()
|
|
3648
3941
|
chunk_size = 1024
|
|
3649
3942
|
fcencoding = "UTF-8"
|
|
3650
3943
|
curcompression = "none"
|
|
3651
3944
|
if not followlink and ftype in data_types:
|
|
3652
3945
|
with open(fname, "rb") as fpc:
|
|
3653
3946
|
shutil.copyfileobj(fpc, fcontents)
|
|
3654
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3947
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3655
3948
|
fcontents.seek(0, 0)
|
|
3656
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3949
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3657
3950
|
if(typechecktest is False and not compresswholefile):
|
|
3658
3951
|
fcontents.seek(0, 2)
|
|
3659
3952
|
ucfsize = fcontents.tell()
|
|
@@ -3663,7 +3956,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3663
3956
|
ilmin = 0
|
|
3664
3957
|
ilcsize = []
|
|
3665
3958
|
while(ilmin < ilsize):
|
|
3666
|
-
cfcontents =
|
|
3959
|
+
cfcontents = MkTempFile()
|
|
3667
3960
|
fcontents.seek(0, 0)
|
|
3668
3961
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3669
3962
|
fcontents.seek(0, 0)
|
|
@@ -3680,7 +3973,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3680
3973
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
3681
3974
|
curcompression = compressionuselist[ilcmin]
|
|
3682
3975
|
fcontents.seek(0, 0)
|
|
3683
|
-
cfcontents =
|
|
3976
|
+
cfcontents = MkTempFile()
|
|
3684
3977
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3685
3978
|
cfcontents.seek(0, 0)
|
|
3686
3979
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -3698,9 +3991,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3698
3991
|
flstatinfo = os.stat(flinkname)
|
|
3699
3992
|
with open(flinkname, "rb") as fpc:
|
|
3700
3993
|
shutil.copyfileobj(fpc, fcontents)
|
|
3701
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3994
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3702
3995
|
fcontents.seek(0, 0)
|
|
3703
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3996
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3704
3997
|
if(typechecktest is False and not compresswholefile):
|
|
3705
3998
|
fcontents.seek(0, 2)
|
|
3706
3999
|
ucfsize = fcontents.tell()
|
|
@@ -3710,7 +4003,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3710
4003
|
ilmin = 0
|
|
3711
4004
|
ilcsize = []
|
|
3712
4005
|
while(ilmin < ilsize):
|
|
3713
|
-
cfcontents =
|
|
4006
|
+
cfcontents = MkTempFile()
|
|
3714
4007
|
fcontents.seek(0, 0)
|
|
3715
4008
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3716
4009
|
fcontents.seek(0, 0)
|
|
@@ -3727,7 +4020,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3727
4020
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
3728
4021
|
curcompression = compressionuselist[ilcmin]
|
|
3729
4022
|
fcontents.seek(0, 0)
|
|
3730
|
-
cfcontents =
|
|
4023
|
+
cfcontents = MkTempFile()
|
|
3731
4024
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3732
4025
|
cfcontents.seek(0, 0)
|
|
3733
4026
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -3810,7 +4103,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
|
|
|
3810
4103
|
fheaderchecksumtype = curfname[26]
|
|
3811
4104
|
fcontentchecksumtype = curfname[27]
|
|
3812
4105
|
fcontents = curfname[28]
|
|
3813
|
-
fencoding = GetFileEncoding(fcontents, False)
|
|
4106
|
+
fencoding = GetFileEncoding(fcontents, 0, False)
|
|
3814
4107
|
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
3815
4108
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
3816
4109
|
fcontents.seek(0, 0)
|
|
@@ -3857,11 +4150,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3857
4150
|
pass
|
|
3858
4151
|
if(outfile == "-" or outfile is None):
|
|
3859
4152
|
verbose = False
|
|
3860
|
-
fp =
|
|
4153
|
+
fp = MkTempFile()
|
|
3861
4154
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3862
4155
|
fp = outfile
|
|
3863
|
-
elif(re.findall(
|
|
3864
|
-
fp =
|
|
4156
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
4157
|
+
fp = MkTempFile()
|
|
3865
4158
|
else:
|
|
3866
4159
|
fbasename = os.path.splitext(outfile)[0]
|
|
3867
4160
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -3897,7 +4190,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3897
4190
|
outvar = fp.read()
|
|
3898
4191
|
fp.close()
|
|
3899
4192
|
return outvar
|
|
3900
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4193
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
3901
4194
|
fp = CompressOpenFileAlt(
|
|
3902
4195
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3903
4196
|
fp.seek(0, 0)
|
|
@@ -3937,11 +4230,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
3937
4230
|
pass
|
|
3938
4231
|
if(outfile == "-" or outfile is None):
|
|
3939
4232
|
verbose = False
|
|
3940
|
-
fp =
|
|
4233
|
+
fp = MkTempFile()
|
|
3941
4234
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3942
4235
|
fp = outfile
|
|
3943
|
-
elif(re.findall(
|
|
3944
|
-
fp =
|
|
4236
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
4237
|
+
fp = MkTempFile()
|
|
3945
4238
|
else:
|
|
3946
4239
|
fbasename = os.path.splitext(outfile)[0]
|
|
3947
4240
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -3977,7 +4270,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
3977
4270
|
outvar = fp.read()
|
|
3978
4271
|
fp.close()
|
|
3979
4272
|
return outvar
|
|
3980
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4273
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
3981
4274
|
fp = CompressOpenFileAlt(
|
|
3982
4275
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3983
4276
|
fp.seek(0, 0)
|
|
@@ -4058,10 +4351,11 @@ def GzipCompressData(data, compresslevel=9):
|
|
|
4058
4351
|
compressed_data = gzip.compress(data, compresslevel=compresslevel)
|
|
4059
4352
|
except AttributeError:
|
|
4060
4353
|
# Fallback to older method for Python 2.x and older 3.x versions
|
|
4061
|
-
out =
|
|
4354
|
+
out = MkTempFile()
|
|
4062
4355
|
with gzip.GzipFile(fileobj=out, mode="wb", compresslevel=compresslevel) as f:
|
|
4063
4356
|
f.write(data)
|
|
4064
|
-
|
|
4357
|
+
out.seek(0, 0)
|
|
4358
|
+
compressed_data = out.read()
|
|
4065
4359
|
return compressed_data
|
|
4066
4360
|
|
|
4067
4361
|
|
|
@@ -4071,7 +4365,7 @@ def GzipDecompressData(compressed_data):
|
|
|
4071
4365
|
decompressed_data = gzip.decompress(compressed_data)
|
|
4072
4366
|
except AttributeError:
|
|
4073
4367
|
# Fallback to older method for Python 2.x and older 3.x versions
|
|
4074
|
-
inp =
|
|
4368
|
+
inp = MkTempFile(compressed_data)
|
|
4075
4369
|
with gzip.GzipFile(fileobj=inp, mode="rb") as f:
|
|
4076
4370
|
decompressed_data = f.read()
|
|
4077
4371
|
return decompressed_data
|
|
@@ -4159,7 +4453,7 @@ def IsSingleDict(variable):
|
|
|
4159
4453
|
return True
|
|
4160
4454
|
|
|
4161
4455
|
|
|
4162
|
-
def GetFileEncoding(infile, closefp=True):
|
|
4456
|
+
def GetFileEncoding(infile, filestart=0, closefp=True):
|
|
4163
4457
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4164
4458
|
fp = infile
|
|
4165
4459
|
else:
|
|
@@ -4168,19 +4462,19 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4168
4462
|
except FileNotFoundError:
|
|
4169
4463
|
return False
|
|
4170
4464
|
file_encoding = "UTF-8"
|
|
4171
|
-
fp.seek(
|
|
4465
|
+
fp.seek(filestart, 0)
|
|
4172
4466
|
prefp = fp.read(2)
|
|
4173
4467
|
if(prefp == binascii.unhexlify("fffe")):
|
|
4174
4468
|
file_encoding = "UTF-16LE"
|
|
4175
4469
|
elif(prefp == binascii.unhexlify("feff")):
|
|
4176
4470
|
file_encoding = "UTF-16BE"
|
|
4177
|
-
fp.seek(
|
|
4471
|
+
fp.seek(filestart, 0)
|
|
4178
4472
|
prefp = fp.read(3)
|
|
4179
4473
|
if(prefp == binascii.unhexlify("efbbbf")):
|
|
4180
4474
|
file_encoding = "UTF-8"
|
|
4181
4475
|
elif(prefp == binascii.unhexlify("0efeff")):
|
|
4182
4476
|
file_encoding = "SCSU"
|
|
4183
|
-
fp.seek(
|
|
4477
|
+
fp.seek(filestart, 0)
|
|
4184
4478
|
prefp = fp.read(4)
|
|
4185
4479
|
if(prefp == binascii.unhexlify("fffe0000")):
|
|
4186
4480
|
file_encoding = "UTF-32LE"
|
|
@@ -4196,21 +4490,21 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4196
4490
|
file_encoding = "UTF-7"
|
|
4197
4491
|
elif(prefp == binascii.unhexlify("2b2f762f")):
|
|
4198
4492
|
file_encoding = "UTF-7"
|
|
4199
|
-
fp.seek(
|
|
4493
|
+
fp.seek(filestart, 0)
|
|
4200
4494
|
if(closefp):
|
|
4201
4495
|
fp.close()
|
|
4202
4496
|
return file_encoding
|
|
4203
4497
|
|
|
4204
4498
|
|
|
4205
|
-
def GetFileEncodingFromString(instring, closefp=True):
|
|
4499
|
+
def GetFileEncodingFromString(instring, filestart=0, closefp=True):
|
|
4206
4500
|
try:
|
|
4207
|
-
instringsfile =
|
|
4501
|
+
instringsfile = MkTempFile(instring)
|
|
4208
4502
|
except TypeError:
|
|
4209
|
-
instringsfile =
|
|
4210
|
-
return GetFileEncoding(instringsfile, closefp)
|
|
4503
|
+
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4504
|
+
return GetFileEncoding(instringsfile, filestart, closefp)
|
|
4211
4505
|
|
|
4212
4506
|
|
|
4213
|
-
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4507
|
+
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4214
4508
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4215
4509
|
fp = infile
|
|
4216
4510
|
else:
|
|
@@ -4219,7 +4513,8 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4219
4513
|
except FileNotFoundError:
|
|
4220
4514
|
return False
|
|
4221
4515
|
filetype = False
|
|
4222
|
-
|
|
4516
|
+
curloc = filestart
|
|
4517
|
+
fp.seek(filestart, 0)
|
|
4223
4518
|
prefp = fp.read(2)
|
|
4224
4519
|
if(prefp == binascii.unhexlify("1f8b")):
|
|
4225
4520
|
filetype = "gzip"
|
|
@@ -4235,13 +4530,13 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4235
4530
|
filetype = "zlib"
|
|
4236
4531
|
elif(prefp == binascii.unhexlify("1f9d")):
|
|
4237
4532
|
filetype = "zcompress"
|
|
4238
|
-
fp.seek(
|
|
4533
|
+
fp.seek(curloc, 0)
|
|
4239
4534
|
prefp = fp.read(3)
|
|
4240
4535
|
if(prefp == binascii.unhexlify("425a68")):
|
|
4241
4536
|
filetype = "bzip2"
|
|
4242
4537
|
elif(prefp == binascii.unhexlify("5d0000")):
|
|
4243
4538
|
filetype = "lzma"
|
|
4244
|
-
fp.seek(
|
|
4539
|
+
fp.seek(curloc, 0)
|
|
4245
4540
|
prefp = fp.read(4)
|
|
4246
4541
|
if(prefp == binascii.unhexlify("28b52ffd")):
|
|
4247
4542
|
filetype = "zstd"
|
|
@@ -4253,29 +4548,29 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4253
4548
|
filetype = "zipfile"
|
|
4254
4549
|
elif(prefp == binascii.unhexlify("504b0708")):
|
|
4255
4550
|
filetype = "zipfile"
|
|
4256
|
-
fp.seek(
|
|
4551
|
+
fp.seek(curloc, 0)
|
|
4257
4552
|
prefp = fp.read(5)
|
|
4258
4553
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4259
4554
|
filetype = "tarfile"
|
|
4260
4555
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4261
4556
|
filetype = "tarfile"
|
|
4262
|
-
fp.seek(
|
|
4557
|
+
fp.seek(curloc, 0)
|
|
4263
4558
|
prefp = fp.read(6)
|
|
4264
4559
|
if(prefp == binascii.unhexlify("fd377a585a00")):
|
|
4265
4560
|
filetype = "xz"
|
|
4266
4561
|
elif(prefp == binascii.unhexlify("377abcaf271c")):
|
|
4267
4562
|
filetype = "7zipfile"
|
|
4268
|
-
fp.seek(
|
|
4563
|
+
fp.seek(curloc, 0)
|
|
4269
4564
|
prefp = fp.read(7)
|
|
4270
4565
|
if(prefp == binascii.unhexlify("526172211a0700")):
|
|
4271
4566
|
filetype = "rarfile"
|
|
4272
4567
|
elif(prefp == binascii.unhexlify("2a2a4143452a2a")):
|
|
4273
4568
|
filetype = "ace"
|
|
4274
|
-
fp.seek(
|
|
4569
|
+
fp.seek(curloc, 0)
|
|
4275
4570
|
prefp = fp.read(7)
|
|
4276
4571
|
if(prefp == binascii.unhexlify("894c5a4f0d0a1a")):
|
|
4277
4572
|
filetype = "lzo"
|
|
4278
|
-
fp.seek(
|
|
4573
|
+
fp.seek(curloc, 0)
|
|
4279
4574
|
prefp = fp.read(8)
|
|
4280
4575
|
if(prefp == binascii.unhexlify("7573746172003030")):
|
|
4281
4576
|
filetype = "tarfile"
|
|
@@ -4283,7 +4578,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4283
4578
|
filetype = "tarfile"
|
|
4284
4579
|
if(prefp == binascii.unhexlify("526172211a070100")):
|
|
4285
4580
|
filetype = "rarfile"
|
|
4286
|
-
fp.seek(
|
|
4581
|
+
fp.seek(curloc, 0)
|
|
4287
4582
|
if(IsNestedDict(formatspecs)):
|
|
4288
4583
|
for key, value in formatspecs.items():
|
|
4289
4584
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4299,7 +4594,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4299
4594
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4300
4595
|
filetype = formatspecs[key]['format_magic']
|
|
4301
4596
|
continue
|
|
4302
|
-
fp.seek(
|
|
4597
|
+
fp.seek(curloc, 0)
|
|
4303
4598
|
elif(IsSingleDict(formatspecs)):
|
|
4304
4599
|
prefp = fp.read(formatspecs['format_len'])
|
|
4305
4600
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4314,15 +4609,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4314
4609
|
filetype = formatspecs['format_magic']
|
|
4315
4610
|
else:
|
|
4316
4611
|
pass
|
|
4317
|
-
fp.seek(
|
|
4612
|
+
fp.seek(curloc, 0)
|
|
4318
4613
|
prefp = fp.read(9)
|
|
4319
4614
|
if(prefp == binascii.unhexlify("894c5a4f000d0a1a0a")):
|
|
4320
4615
|
filetype = "lzo"
|
|
4321
|
-
fp.seek(
|
|
4616
|
+
fp.seek(curloc, 0)
|
|
4322
4617
|
prefp = fp.read(10)
|
|
4323
4618
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4324
4619
|
filetype = "tarfile"
|
|
4325
|
-
fp.seek(
|
|
4620
|
+
fp.seek(curloc, 0)
|
|
4326
4621
|
if(filetype == "gzip" or filetype == "bzip2" or filetype == "lzma" or filetype == "zstd" or filetype == "lz4" or filetype == "zlib"):
|
|
4327
4622
|
if(TarFileCheck(fp)):
|
|
4328
4623
|
filetype = "tarfile"
|
|
@@ -4337,14 +4632,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4337
4632
|
return "7zipfile"
|
|
4338
4633
|
else:
|
|
4339
4634
|
filetype = False
|
|
4340
|
-
fp.seek(
|
|
4635
|
+
fp.seek(curloc, 0)
|
|
4341
4636
|
if(closefp):
|
|
4342
4637
|
fp.close()
|
|
4343
4638
|
return filetype
|
|
4344
4639
|
|
|
4345
4640
|
|
|
4346
|
-
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4347
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4641
|
+
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4642
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4643
|
+
curloc = filestart
|
|
4348
4644
|
if(not compresscheck):
|
|
4349
4645
|
fextname = os.path.splitext(infile)[1]
|
|
4350
4646
|
if(fextname == ".gz"):
|
|
@@ -4393,7 +4689,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4393
4689
|
elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
4394
4690
|
return "7zipfile"
|
|
4395
4691
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4396
|
-
fp = UncompressFileAlt(infile, formatspecs)
|
|
4692
|
+
fp = UncompressFileAlt(infile, formatspecs, filestart)
|
|
4397
4693
|
else:
|
|
4398
4694
|
try:
|
|
4399
4695
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4423,10 +4719,11 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4423
4719
|
except FileNotFoundError:
|
|
4424
4720
|
return False
|
|
4425
4721
|
filetype = False
|
|
4722
|
+
fp.seek(filestart, 0)
|
|
4426
4723
|
prefp = fp.read(5)
|
|
4427
4724
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4428
4725
|
filetype = "tarfile"
|
|
4429
|
-
fp.seek(
|
|
4726
|
+
fp.seek(curloc, 0)
|
|
4430
4727
|
if(IsNestedDict(formatspecs)):
|
|
4431
4728
|
for key, value in formatspecs.items():
|
|
4432
4729
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4442,7 +4739,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4442
4739
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4443
4740
|
filetype = formatspecs[key]['format_magic']
|
|
4444
4741
|
continue
|
|
4445
|
-
fp.seek(
|
|
4742
|
+
fp.seek(curloc, 0)
|
|
4446
4743
|
elif(IsSingleDict(formatspecs)):
|
|
4447
4744
|
prefp = fp.read(formatspecs['format_len'])
|
|
4448
4745
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4457,36 +4754,36 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4457
4754
|
filetype = formatspecs['format_magic']
|
|
4458
4755
|
else:
|
|
4459
4756
|
pass
|
|
4460
|
-
fp.seek(
|
|
4757
|
+
fp.seek(curloc, 0)
|
|
4461
4758
|
prefp = fp.read(10)
|
|
4462
4759
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4463
4760
|
filetype = "tarfile"
|
|
4464
|
-
fp.seek(
|
|
4761
|
+
fp.seek(curloc, 0)
|
|
4465
4762
|
if(closefp):
|
|
4466
4763
|
fp.close()
|
|
4467
4764
|
return filetype
|
|
4468
4765
|
|
|
4469
4766
|
|
|
4470
|
-
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4767
|
+
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4471
4768
|
try:
|
|
4472
|
-
instringsfile =
|
|
4769
|
+
instringsfile = MkTempFile(instring)
|
|
4473
4770
|
except TypeError:
|
|
4474
|
-
instringsfile =
|
|
4475
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4771
|
+
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4772
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4476
4773
|
|
|
4477
4774
|
|
|
4478
|
-
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4775
|
+
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4479
4776
|
try:
|
|
4480
|
-
instringsfile =
|
|
4777
|
+
instringsfile = MkTempFile(instring)
|
|
4481
4778
|
except TypeError:
|
|
4482
|
-
instringsfile =
|
|
4483
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4779
|
+
instringsfile = MkTempFile(instring.decode("UTF-8"))
|
|
4780
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4484
4781
|
|
|
4485
4782
|
|
|
4486
|
-
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
4783
|
+
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4487
4784
|
if(not hasattr(fp, "read")):
|
|
4488
4785
|
return False
|
|
4489
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
4786
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4490
4787
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4491
4788
|
formatspecs = formatspecs[compresscheck]
|
|
4492
4789
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4520,8 +4817,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
|
4520
4817
|
return fp
|
|
4521
4818
|
|
|
4522
4819
|
|
|
4523
|
-
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
4524
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4820
|
+
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb", filestart=0):
|
|
4821
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4525
4822
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4526
4823
|
formatspecs = formatspecs[compresscheck]
|
|
4527
4824
|
if(sys.version_info[0] == 2 and compresscheck):
|
|
@@ -4567,8 +4864,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
|
4567
4864
|
return filefp
|
|
4568
4865
|
|
|
4569
4866
|
|
|
4570
|
-
def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
4571
|
-
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, False)
|
|
4867
|
+
def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4868
|
+
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, filestart, False)
|
|
4572
4869
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4573
4870
|
formatspecs = formatspecs[compresscheck]
|
|
4574
4871
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4595,32 +4892,32 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4595
4892
|
return fileuz
|
|
4596
4893
|
|
|
4597
4894
|
|
|
4598
|
-
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__):
|
|
4895
|
+
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4599
4896
|
filefp = StringIO()
|
|
4600
|
-
outstring = UncompressString(instring, formatspecs)
|
|
4897
|
+
outstring = UncompressString(instring, formatspecs, filestart)
|
|
4601
4898
|
filefp.write(outstring)
|
|
4602
4899
|
filefp.seek(0, 0)
|
|
4603
4900
|
return filefp
|
|
4604
4901
|
|
|
4605
|
-
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4902
|
+
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4606
4903
|
if(not hasattr(fp, "read")):
|
|
4607
4904
|
return False
|
|
4608
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4905
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4609
4906
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4610
4907
|
formatspecs = formatspecs[prechck]
|
|
4611
|
-
fp.seek(
|
|
4908
|
+
fp.seek(filestart, 0)
|
|
4612
4909
|
if(prechck!="zstd"):
|
|
4613
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4910
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4614
4911
|
filefp = StringIO()
|
|
4615
|
-
fp.seek(
|
|
4616
|
-
outstring = UncompressString(fp.read(), formatspecs)
|
|
4912
|
+
fp.seek(filestart, 0)
|
|
4913
|
+
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
4617
4914
|
filefp.write(outstring)
|
|
4618
4915
|
filefp.seek(0, 0)
|
|
4619
4916
|
return filefp
|
|
4620
4917
|
|
|
4621
4918
|
|
|
4622
|
-
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
4623
|
-
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, False)
|
|
4919
|
+
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4920
|
+
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, filestart, False)
|
|
4624
4921
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4625
4922
|
formatspecs = formatspecs[compresscheck]
|
|
4626
4923
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4645,26 +4942,26 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4645
4942
|
return fileuz
|
|
4646
4943
|
|
|
4647
4944
|
|
|
4648
|
-
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__):
|
|
4649
|
-
filefp =
|
|
4650
|
-
outstring = UncompressBytes(inbytes, formatspecs)
|
|
4945
|
+
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4946
|
+
filefp = MkTempFile()
|
|
4947
|
+
outstring = UncompressBytes(inbytes, formatspecs, filestart)
|
|
4651
4948
|
filefp.write(outstring)
|
|
4652
4949
|
filefp.seek(0, 0)
|
|
4653
4950
|
return filefp
|
|
4654
4951
|
|
|
4655
4952
|
|
|
4656
|
-
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4953
|
+
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4657
4954
|
if(not hasattr(fp, "read")):
|
|
4658
4955
|
return False
|
|
4659
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4956
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4660
4957
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4661
4958
|
formatspecs = formatspecs[prechck]
|
|
4662
|
-
fp.seek(
|
|
4959
|
+
fp.seek(filestart, 0)
|
|
4663
4960
|
if(prechck!="zstd"):
|
|
4664
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4665
|
-
filefp =
|
|
4666
|
-
fp.seek(
|
|
4667
|
-
outstring = UncompressBytes(fp.read(), formatspecs)
|
|
4961
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4962
|
+
filefp = MkTempFile()
|
|
4963
|
+
fp.seek(filestart, 0)
|
|
4964
|
+
outstring = UncompressBytes(fp.read(), formatspecs, 0)
|
|
4668
4965
|
filefp.write(outstring)
|
|
4669
4966
|
filefp.seek(0, 0)
|
|
4670
4967
|
return filefp
|
|
@@ -4679,7 +4976,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4679
4976
|
if(compression not in compressionuselist and compression is None):
|
|
4680
4977
|
compression = "auto"
|
|
4681
4978
|
if(compression == "gzip" and compression in compressionsupport):
|
|
4682
|
-
bytesfp =
|
|
4979
|
+
bytesfp = MkTempFile()
|
|
4683
4980
|
if(compressionlevel is None):
|
|
4684
4981
|
compressionlevel = 9
|
|
4685
4982
|
else:
|
|
@@ -4687,7 +4984,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4687
4984
|
bytesfp.write(GzipCompressData(
|
|
4688
4985
|
fp.read(), compresslevel=compressionlevel))
|
|
4689
4986
|
elif(compression == "bzip2" and compression in compressionsupport):
|
|
4690
|
-
bytesfp =
|
|
4987
|
+
bytesfp = MkTempFile()
|
|
4691
4988
|
if(compressionlevel is None):
|
|
4692
4989
|
compressionlevel = 9
|
|
4693
4990
|
else:
|
|
@@ -4695,7 +4992,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4695
4992
|
bytesfp.write(BzipCompressData(
|
|
4696
4993
|
fp.read(), compresslevel=compressionlevel))
|
|
4697
4994
|
elif(compression == "lz4" and compression in compressionsupport):
|
|
4698
|
-
bytesfp =
|
|
4995
|
+
bytesfp = MkTempFile()
|
|
4699
4996
|
if(compressionlevel is None):
|
|
4700
4997
|
compressionlevel = 9
|
|
4701
4998
|
else:
|
|
@@ -4703,14 +5000,14 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4703
5000
|
bytesfp.write(lz4.frame.compress(
|
|
4704
5001
|
fp.read(), compression_level=compressionlevel))
|
|
4705
5002
|
elif((compression == "lzo" or compression == "lzop") and compression in compressionsupport):
|
|
4706
|
-
bytesfp =
|
|
5003
|
+
bytesfp = MkTempFile()
|
|
4707
5004
|
if(compressionlevel is None):
|
|
4708
5005
|
compressionlevel = 9
|
|
4709
5006
|
else:
|
|
4710
5007
|
compressionlevel = int(compressionlevel)
|
|
4711
5008
|
bytesfp.write(lzo.compress(fp.read(), compressionlevel))
|
|
4712
5009
|
elif(compression == "zstd" and compression in compressionsupport):
|
|
4713
|
-
bytesfp =
|
|
5010
|
+
bytesfp = MkTempFile()
|
|
4714
5011
|
if(compressionlevel is None):
|
|
4715
5012
|
compressionlevel = 9
|
|
4716
5013
|
else:
|
|
@@ -4718,7 +5015,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4718
5015
|
compressor = zstandard.ZstdCompressor(compressionlevel, threads=get_default_threads())
|
|
4719
5016
|
bytesfp.write(compressor.compress(fp.read()))
|
|
4720
5017
|
elif(compression == "lzma" and compression in compressionsupport):
|
|
4721
|
-
bytesfp =
|
|
5018
|
+
bytesfp = MkTempFile()
|
|
4722
5019
|
if(compressionlevel is None):
|
|
4723
5020
|
compressionlevel = 9
|
|
4724
5021
|
else:
|
|
@@ -4728,7 +5025,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4728
5025
|
except (NotImplementedError, lzma.LZMAError):
|
|
4729
5026
|
bytesfp.write(lzma.compress(fp.read(), format=lzma.FORMAT_ALONE))
|
|
4730
5027
|
elif(compression == "xz" and compression in compressionsupport):
|
|
4731
|
-
bytesfp =
|
|
5028
|
+
bytesfp = MkTempFile()
|
|
4732
5029
|
if(compressionlevel is None):
|
|
4733
5030
|
compressionlevel = 9
|
|
4734
5031
|
else:
|
|
@@ -4738,7 +5035,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4738
5035
|
except (NotImplementedError, lzma.LZMAError):
|
|
4739
5036
|
bytesfp.write(lzma.compress(fp.read(), format=lzma.FORMAT_XZ))
|
|
4740
5037
|
elif(compression == "zlib" and compression in compressionsupport):
|
|
4741
|
-
bytesfp =
|
|
5038
|
+
bytesfp = MkTempFile()
|
|
4742
5039
|
if(compressionlevel is None):
|
|
4743
5040
|
compressionlevel = 9
|
|
4744
5041
|
else:
|
|
@@ -4908,11 +5205,11 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
4908
5205
|
pass
|
|
4909
5206
|
if(outfile == "-" or outfile is None):
|
|
4910
5207
|
verbose = False
|
|
4911
|
-
fp =
|
|
5208
|
+
fp = MkTempFile()
|
|
4912
5209
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
4913
5210
|
fp = outfile
|
|
4914
|
-
elif(re.findall(
|
|
4915
|
-
fp =
|
|
5211
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5212
|
+
fp = MkTempFile()
|
|
4916
5213
|
else:
|
|
4917
5214
|
fbasename = os.path.splitext(outfile)[0]
|
|
4918
5215
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -5107,15 +5404,15 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5107
5404
|
fwinattributes = format(int(0), 'x').lower()
|
|
5108
5405
|
fcompression = ""
|
|
5109
5406
|
fcsize = format(int(0), 'x').lower()
|
|
5110
|
-
fcontents =
|
|
5407
|
+
fcontents = MkTempFile()
|
|
5111
5408
|
fcencoding = "UTF-8"
|
|
5112
5409
|
curcompression = "none"
|
|
5113
5410
|
if not followlink and ftype in data_types:
|
|
5114
5411
|
with open(fname, "rb") as fpc:
|
|
5115
5412
|
shutil.copyfileobj(fpc, fcontents)
|
|
5116
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5413
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5117
5414
|
fcontents.seek(0, 0)
|
|
5118
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5415
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5119
5416
|
if(typechecktest is False and not compresswholefile):
|
|
5120
5417
|
fcontents.seek(0, 2)
|
|
5121
5418
|
ucfsize = fcontents.tell()
|
|
@@ -5125,7 +5422,7 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5125
5422
|
ilmin = 0
|
|
5126
5423
|
ilcsize = []
|
|
5127
5424
|
while(ilmin < ilsize):
|
|
5128
|
-
cfcontents =
|
|
5425
|
+
cfcontents = MkTempFile()
|
|
5129
5426
|
fcontents.seek(0, 0)
|
|
5130
5427
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5131
5428
|
fcontents.seek(0, 0)
|
|
@@ -5142,7 +5439,7 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5142
5439
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5143
5440
|
curcompression = compressionuselist[ilcmin]
|
|
5144
5441
|
fcontents.seek(0, 0)
|
|
5145
|
-
cfcontents =
|
|
5442
|
+
cfcontents = MkTempFile()
|
|
5146
5443
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5147
5444
|
cfcontents.seek(0, 0)
|
|
5148
5445
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5160,9 +5457,9 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5160
5457
|
flstatinfo = os.stat(flinkname)
|
|
5161
5458
|
with open(flinkname, "rb") as fpc:
|
|
5162
5459
|
shutil.copyfileobj(fpc, fcontents)
|
|
5163
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5460
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5164
5461
|
fcontents.seek(0, 0)
|
|
5165
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5462
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5166
5463
|
if(typechecktest is False and not compresswholefile):
|
|
5167
5464
|
fcontents.seek(0, 2)
|
|
5168
5465
|
ucfsize = fcontents.tell()
|
|
@@ -5172,7 +5469,7 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5172
5469
|
ilmin = 0
|
|
5173
5470
|
ilcsize = []
|
|
5174
5471
|
while(ilmin < ilsize):
|
|
5175
|
-
cfcontents =
|
|
5472
|
+
cfcontents = MkTempFile()
|
|
5176
5473
|
fcontents.seek(0, 0)
|
|
5177
5474
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5178
5475
|
fcontents.seek(0, 0)
|
|
@@ -5189,7 +5486,7 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5189
5486
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5190
5487
|
curcompression = compressionuselist[ilcmin]
|
|
5191
5488
|
fcontents.seek(0, 0)
|
|
5192
|
-
cfcontents =
|
|
5489
|
+
cfcontents = MkTempFile()
|
|
5193
5490
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5194
5491
|
cfcontents.seek(0, 0)
|
|
5195
5492
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5240,7 +5537,7 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5240
5537
|
outvar = fp.read()
|
|
5241
5538
|
fp.close()
|
|
5242
5539
|
return outvar
|
|
5243
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5540
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5244
5541
|
fp = CompressOpenFileAlt(
|
|
5245
5542
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5246
5543
|
fp.seek(0, 0)
|
|
@@ -5292,11 +5589,11 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5292
5589
|
pass
|
|
5293
5590
|
if(outfile == "-" or outfile is None):
|
|
5294
5591
|
verbose = False
|
|
5295
|
-
fp =
|
|
5592
|
+
fp = MkTempFile()
|
|
5296
5593
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5297
5594
|
fp = outfile
|
|
5298
|
-
elif(re.findall(
|
|
5299
|
-
fp =
|
|
5595
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5596
|
+
fp = MkTempFile()
|
|
5300
5597
|
else:
|
|
5301
5598
|
fbasename = os.path.splitext(outfile)[0]
|
|
5302
5599
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -5315,7 +5612,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5315
5612
|
filetoinode = {}
|
|
5316
5613
|
inodetoforminode = {}
|
|
5317
5614
|
if(infile == "-"):
|
|
5318
|
-
infile =
|
|
5615
|
+
infile = MkTempFile()
|
|
5319
5616
|
if(hasattr(sys.stdin, "buffer")):
|
|
5320
5617
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
5321
5618
|
else:
|
|
@@ -5324,7 +5621,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5324
5621
|
if(not infile):
|
|
5325
5622
|
return False
|
|
5326
5623
|
infile.seek(0, 0)
|
|
5327
|
-
elif(re.findall(
|
|
5624
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5328
5625
|
infile = download_file_from_internet_file(infile)
|
|
5329
5626
|
infile.seek(0, 0)
|
|
5330
5627
|
if(not infile):
|
|
@@ -5348,7 +5645,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5348
5645
|
return False
|
|
5349
5646
|
try:
|
|
5350
5647
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5351
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
5648
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
5352
5649
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5353
5650
|
formatspecs = formatspecs[compresscheck]
|
|
5354
5651
|
if(compresscheck=="zstd"):
|
|
@@ -5360,7 +5657,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5360
5657
|
else:
|
|
5361
5658
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
5362
5659
|
else:
|
|
5363
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
5660
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
5364
5661
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5365
5662
|
formatspecs = formatspecs[compresscheck]
|
|
5366
5663
|
if(compresscheck=="zstd"):
|
|
@@ -5454,16 +5751,16 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5454
5751
|
fwinattributes = format(int(0), 'x').lower()
|
|
5455
5752
|
fcompression = ""
|
|
5456
5753
|
fcsize = format(int(0), 'x').lower()
|
|
5457
|
-
fcontents =
|
|
5754
|
+
fcontents = MkTempFile()
|
|
5458
5755
|
fcencoding = "UTF-8"
|
|
5459
5756
|
curcompression = "none"
|
|
5460
5757
|
if ftype in data_types:
|
|
5461
5758
|
fpc = tarfp.extractfile(member)
|
|
5462
|
-
fpc.close()
|
|
5463
5759
|
shutil.copyfileobj(fpc, fcontents)
|
|
5464
|
-
|
|
5760
|
+
fpc.close()
|
|
5761
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5465
5762
|
fcontents.seek(0, 0)
|
|
5466
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5763
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5467
5764
|
if(typechecktest is False and not compresswholefile):
|
|
5468
5765
|
fcontents.seek(0, 2)
|
|
5469
5766
|
ucfsize = fcontents.tell()
|
|
@@ -5473,7 +5770,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5473
5770
|
ilmin = 0
|
|
5474
5771
|
ilcsize = []
|
|
5475
5772
|
while(ilmin < ilsize):
|
|
5476
|
-
cfcontents =
|
|
5773
|
+
cfcontents = MkTempFile()
|
|
5477
5774
|
fcontents.seek(0, 0)
|
|
5478
5775
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5479
5776
|
fcontents.seek(0, 0)
|
|
@@ -5490,7 +5787,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5490
5787
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5491
5788
|
curcompression = compressionuselist[ilcmin]
|
|
5492
5789
|
fcontents.seek(0, 0)
|
|
5493
|
-
cfcontents =
|
|
5790
|
+
cfcontents = MkTempFile()
|
|
5494
5791
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5495
5792
|
cfcontents.seek(0, 0)
|
|
5496
5793
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5541,7 +5838,7 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5541
5838
|
outvar = fp.read()
|
|
5542
5839
|
fp.close()
|
|
5543
5840
|
return outvar
|
|
5544
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5841
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5545
5842
|
fp = CompressOpenFileAlt(
|
|
5546
5843
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5547
5844
|
fp.seek(0, 0)
|
|
@@ -5589,11 +5886,11 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5589
5886
|
pass
|
|
5590
5887
|
if(outfile == "-" or outfile is None):
|
|
5591
5888
|
verbose = False
|
|
5592
|
-
fp =
|
|
5889
|
+
fp = MkTempFile()
|
|
5593
5890
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5594
5891
|
fp = outfile
|
|
5595
|
-
elif(re.findall(
|
|
5596
|
-
fp =
|
|
5892
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5893
|
+
fp = MkTempFile()
|
|
5597
5894
|
else:
|
|
5598
5895
|
fbasename = os.path.splitext(outfile)[0]
|
|
5599
5896
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -5612,7 +5909,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5612
5909
|
filetoinode = {}
|
|
5613
5910
|
inodetoforminode = {}
|
|
5614
5911
|
if(infile == "-"):
|
|
5615
|
-
infile =
|
|
5912
|
+
infile = MkTempFile()
|
|
5616
5913
|
if(hasattr(sys.stdin, "buffer")):
|
|
5617
5914
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
5618
5915
|
else:
|
|
@@ -5621,7 +5918,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5621
5918
|
if(not infile):
|
|
5622
5919
|
return False
|
|
5623
5920
|
infile.seek(0, 0)
|
|
5624
|
-
elif(re.findall(
|
|
5921
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5625
5922
|
infile = download_file_from_internet_file(infile)
|
|
5626
5923
|
infile.seek(0, 0)
|
|
5627
5924
|
if(not infile):
|
|
@@ -5753,14 +6050,14 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5753
6050
|
fgname = ""
|
|
5754
6051
|
except ImportError:
|
|
5755
6052
|
fgname = ""
|
|
5756
|
-
fcontents =
|
|
6053
|
+
fcontents = MkTempFile()
|
|
5757
6054
|
fcencoding = "UTF-8"
|
|
5758
6055
|
curcompression = "none"
|
|
5759
6056
|
if ftype == 0:
|
|
5760
6057
|
fcontents.write(zipfp.read(member.filename))
|
|
5761
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6058
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5762
6059
|
fcontents.seek(0, 0)
|
|
5763
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6060
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5764
6061
|
if(typechecktest is False and not compresswholefile):
|
|
5765
6062
|
fcontents.seek(0, 2)
|
|
5766
6063
|
ucfsize = fcontents.tell()
|
|
@@ -5770,7 +6067,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5770
6067
|
ilmin = 0
|
|
5771
6068
|
ilcsize = []
|
|
5772
6069
|
while(ilmin < ilsize):
|
|
5773
|
-
cfcontents =
|
|
6070
|
+
cfcontents = MkTempFile()
|
|
5774
6071
|
fcontents.seek(0, 0)
|
|
5775
6072
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5776
6073
|
fcontents.seek(0, 0)
|
|
@@ -5784,7 +6081,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5784
6081
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5785
6082
|
curcompression = compressionuselist[ilcmin]
|
|
5786
6083
|
fcontents.seek(0, 0)
|
|
5787
|
-
cfcontents =
|
|
6084
|
+
cfcontents = MkTempFile()
|
|
5788
6085
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5789
6086
|
cfcontents.seek(0, 0)
|
|
5790
6087
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5835,7 +6132,7 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5835
6132
|
outvar = fp.read()
|
|
5836
6133
|
fp.close()
|
|
5837
6134
|
return outvar
|
|
5838
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6135
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5839
6136
|
fp = CompressOpenFileAlt(
|
|
5840
6137
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5841
6138
|
fp.seek(0, 0)
|
|
@@ -5888,11 +6185,11 @@ if(rarfile_support):
|
|
|
5888
6185
|
pass
|
|
5889
6186
|
if(outfile == "-" or outfile is None):
|
|
5890
6187
|
verbose = False
|
|
5891
|
-
fp =
|
|
6188
|
+
fp = MkTempFile()
|
|
5892
6189
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5893
6190
|
fp = outfile
|
|
5894
|
-
elif(re.findall(
|
|
5895
|
-
fp =
|
|
6191
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
6192
|
+
fp = MkTempFile()
|
|
5896
6193
|
else:
|
|
5897
6194
|
fbasename = os.path.splitext(outfile)[0]
|
|
5898
6195
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -6070,14 +6367,14 @@ if(rarfile_support):
|
|
|
6070
6367
|
fgname = ""
|
|
6071
6368
|
except ImportError:
|
|
6072
6369
|
fgname = ""
|
|
6073
|
-
fcontents =
|
|
6370
|
+
fcontents = MkTempFile()
|
|
6074
6371
|
fcencoding = "UTF-8"
|
|
6075
6372
|
curcompression = "none"
|
|
6076
6373
|
if ftype == 0:
|
|
6077
6374
|
fcontents.write(rarfp.read(member.filename))
|
|
6078
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6375
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6079
6376
|
fcontents.seek(0, 0)
|
|
6080
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6377
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6081
6378
|
if(typechecktest is False and not compresswholefile):
|
|
6082
6379
|
fcontents.seek(0, 2)
|
|
6083
6380
|
ucfsize = fcontents.tell()
|
|
@@ -6087,7 +6384,7 @@ if(rarfile_support):
|
|
|
6087
6384
|
ilmin = 0
|
|
6088
6385
|
ilcsize = []
|
|
6089
6386
|
while(ilmin < ilsize):
|
|
6090
|
-
cfcontents =
|
|
6387
|
+
cfcontents = MkTempFile()
|
|
6091
6388
|
fcontents.seek(0, 0)
|
|
6092
6389
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6093
6390
|
fcontents.seek(0, 0)
|
|
@@ -6104,7 +6401,7 @@ if(rarfile_support):
|
|
|
6104
6401
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
6105
6402
|
curcompression = compressionuselist[ilcmin]
|
|
6106
6403
|
fcontents.seek(0, 0)
|
|
6107
|
-
cfcontents =
|
|
6404
|
+
cfcontents = MkTempFile()
|
|
6108
6405
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6109
6406
|
cfcontents.seek(0, 0)
|
|
6110
6407
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6155,7 +6452,7 @@ if(rarfile_support):
|
|
|
6155
6452
|
outvar = fp.read()
|
|
6156
6453
|
fp.close()
|
|
6157
6454
|
return outvar
|
|
6158
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6455
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6159
6456
|
fp = CompressOpenFileAlt(
|
|
6160
6457
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6161
6458
|
fp.seek(0, 0)
|
|
@@ -6208,11 +6505,11 @@ if(py7zr_support):
|
|
|
6208
6505
|
pass
|
|
6209
6506
|
if(outfile == "-" or outfile is None):
|
|
6210
6507
|
verbose = False
|
|
6211
|
-
fp =
|
|
6508
|
+
fp = MkTempFile()
|
|
6212
6509
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6213
6510
|
fp = outfile
|
|
6214
|
-
elif(re.findall(
|
|
6215
|
-
fp =
|
|
6511
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
6512
|
+
fp = MkTempFile()
|
|
6216
6513
|
else:
|
|
6217
6514
|
fbasename = os.path.splitext(outfile)[0]
|
|
6218
6515
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -6234,7 +6531,7 @@ if(py7zr_support):
|
|
|
6234
6531
|
return False
|
|
6235
6532
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
6236
6533
|
file_content = szpfp.readall()
|
|
6237
|
-
#sztest = szpfp.testzip()
|
|
6534
|
+
#sztest = szpfp.testzip()
|
|
6238
6535
|
sztestalt = szpfp.test()
|
|
6239
6536
|
if(sztestalt):
|
|
6240
6537
|
VerbosePrintOut("Bad file found!")
|
|
@@ -6321,16 +6618,16 @@ if(py7zr_support):
|
|
|
6321
6618
|
fgname = ""
|
|
6322
6619
|
except ImportError:
|
|
6323
6620
|
fgname = ""
|
|
6324
|
-
fcontents =
|
|
6621
|
+
fcontents = MkTempFile()
|
|
6325
6622
|
fcencoding = "UTF-8"
|
|
6326
6623
|
curcompression = "none"
|
|
6327
6624
|
if ftype == 0:
|
|
6328
6625
|
fcontents.write(file_content[member.filename].read())
|
|
6329
6626
|
fsize = format(fcontents.tell(), 'x').lower()
|
|
6330
6627
|
fcontents.seek(0, 0)
|
|
6331
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6628
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6332
6629
|
fcontents.seek(0, 0)
|
|
6333
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6630
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6334
6631
|
file_content[member.filename].close()
|
|
6335
6632
|
if(typechecktest is False and not compresswholefile):
|
|
6336
6633
|
fcontents.seek(0, 2)
|
|
@@ -6341,7 +6638,7 @@ if(py7zr_support):
|
|
|
6341
6638
|
ilmin = 0
|
|
6342
6639
|
ilcsize = []
|
|
6343
6640
|
while(ilmin < ilsize):
|
|
6344
|
-
cfcontents =
|
|
6641
|
+
cfcontents = MkTempFile()
|
|
6345
6642
|
fcontents.seek(0, 0)
|
|
6346
6643
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6347
6644
|
fcontents.seek(0, 0)
|
|
@@ -6358,7 +6655,7 @@ if(py7zr_support):
|
|
|
6358
6655
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
6359
6656
|
curcompression = compressionuselist[ilcmin]
|
|
6360
6657
|
fcontents.seek(0, 0)
|
|
6361
|
-
cfcontents =
|
|
6658
|
+
cfcontents = MkTempFile()
|
|
6362
6659
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6363
6660
|
cfcontents.seek(0, 0)
|
|
6364
6661
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6409,7 +6706,7 @@ if(py7zr_support):
|
|
|
6409
6706
|
outvar = fp.read()
|
|
6410
6707
|
fp.close()
|
|
6411
6708
|
return outvar
|
|
6412
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6709
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6413
6710
|
fp = CompressOpenFileAlt(
|
|
6414
6711
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6415
6712
|
fp.seek(0, 0)
|
|
@@ -6423,7 +6720,7 @@ if(py7zr_support):
|
|
|
6423
6720
|
|
|
6424
6721
|
|
|
6425
6722
|
def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
6426
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6723
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6427
6724
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6428
6725
|
formatspecs = formatspecs[checkcompressfile]
|
|
6429
6726
|
if(verbose):
|
|
@@ -6443,18 +6740,20 @@ def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", c
|
|
|
6443
6740
|
return False
|
|
6444
6741
|
|
|
6445
6742
|
|
|
6446
|
-
def
|
|
6743
|
+
def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
6744
|
+
if(verbose):
|
|
6745
|
+
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
6447
6746
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6448
6747
|
formatspecs = formatspecs[fmttype]
|
|
6449
6748
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6450
6749
|
fmttype = "auto"
|
|
6451
|
-
curloc =
|
|
6750
|
+
curloc = filestart
|
|
6452
6751
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6453
6752
|
curloc = infile.tell()
|
|
6454
6753
|
fp = infile
|
|
6455
|
-
fp.seek(
|
|
6456
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6457
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6754
|
+
fp.seek(filestart, 0)
|
|
6755
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6756
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6458
6757
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6459
6758
|
formatspecs = formatspecs[checkcompressfile]
|
|
6460
6759
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6471,45 +6770,45 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6471
6770
|
return False
|
|
6472
6771
|
if(not fp):
|
|
6473
6772
|
return False
|
|
6474
|
-
fp.seek(
|
|
6773
|
+
fp.seek(filestart, 0)
|
|
6475
6774
|
elif(infile == "-"):
|
|
6476
|
-
fp =
|
|
6775
|
+
fp = MkTempFile()
|
|
6477
6776
|
if(hasattr(sys.stdin, "buffer")):
|
|
6478
6777
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6479
6778
|
else:
|
|
6480
6779
|
shutil.copyfileobj(sys.stdin, fp)
|
|
6481
|
-
fp.seek(
|
|
6482
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6483
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6780
|
+
fp.seek(filestart, 0)
|
|
6781
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6782
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6484
6783
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6485
6784
|
formatspecs = formatspecs[checkcompressfile]
|
|
6486
6785
|
if(not fp):
|
|
6487
6786
|
return False
|
|
6488
|
-
fp.seek(
|
|
6787
|
+
fp.seek(filestart, 0)
|
|
6489
6788
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6490
|
-
fp =
|
|
6789
|
+
fp = MkTempFile()
|
|
6491
6790
|
fp.write(infile)
|
|
6492
|
-
fp.seek(
|
|
6493
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6494
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6791
|
+
fp.seek(filestart, 0)
|
|
6792
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6793
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6495
6794
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6496
6795
|
formatspecs = formatspecs[compresscheck]
|
|
6497
6796
|
if(not fp):
|
|
6498
6797
|
return False
|
|
6499
|
-
fp.seek(
|
|
6500
|
-
elif(re.findall(
|
|
6798
|
+
fp.seek(filestart, 0)
|
|
6799
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
6501
6800
|
fp = download_file_from_internet_file(infile)
|
|
6502
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6503
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6801
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6802
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6504
6803
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6505
6804
|
formatspecs = formatspecs[compresscheck]
|
|
6506
|
-
fp.seek(
|
|
6805
|
+
fp.seek(filestart, 0)
|
|
6507
6806
|
if(not fp):
|
|
6508
6807
|
return False
|
|
6509
|
-
fp.seek(
|
|
6808
|
+
fp.seek(filestart, 0)
|
|
6510
6809
|
else:
|
|
6511
6810
|
infile = RemoveWindowsPath(infile)
|
|
6512
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6811
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6513
6812
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6514
6813
|
formatspecs = formatspecs[checkcompressfile]
|
|
6515
6814
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6524,7 +6823,7 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6524
6823
|
return False
|
|
6525
6824
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6526
6825
|
return False
|
|
6527
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6826
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
6528
6827
|
if(not compresscheck):
|
|
6529
6828
|
fextname = os.path.splitext(infile)[1]
|
|
6530
6829
|
if(fextname == ".gz"):
|
|
@@ -6547,26 +6846,23 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6547
6846
|
return False
|
|
6548
6847
|
if(not compresscheck):
|
|
6549
6848
|
return False
|
|
6550
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6849
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
6551
6850
|
try:
|
|
6552
|
-
fp.seek(0, 2)
|
|
6851
|
+
fp.seek(0, 2)
|
|
6553
6852
|
except OSError:
|
|
6554
|
-
SeekToEndOfFile(fp)
|
|
6853
|
+
SeekToEndOfFile(fp)
|
|
6555
6854
|
except ValueError:
|
|
6556
|
-
SeekToEndOfFile(fp)
|
|
6557
|
-
CatSize = fp.tell()
|
|
6558
|
-
CatSizeEnd = CatSize
|
|
6855
|
+
SeekToEndOfFile(fp)
|
|
6856
|
+
CatSize = fp.tell()
|
|
6857
|
+
CatSizeEnd = CatSize
|
|
6559
6858
|
fp.seek(curloc, 0)
|
|
6560
|
-
if(curloc > 0):
|
|
6561
|
-
fp.seek(0, 0)
|
|
6562
6859
|
if(IsNestedDict(formatspecs)):
|
|
6563
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6860
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
6564
6861
|
if(compresschecking not in formatspecs):
|
|
6565
|
-
fp.seek(0, 0)
|
|
6566
6862
|
return False
|
|
6567
6863
|
else:
|
|
6568
6864
|
formatspecs = formatspecs[compresschecking]
|
|
6569
|
-
fp.seek(
|
|
6865
|
+
fp.seek(filestart, 0)
|
|
6570
6866
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6571
6867
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6572
6868
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -6583,23 +6879,8 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6583
6879
|
fp, formatspecs['format_delimiter'])
|
|
6584
6880
|
fnumextrafieldsize = int(inheader[5], 16)
|
|
6585
6881
|
fnumextrafields = int(inheader[6], 16)
|
|
6586
|
-
fextrafieldslist = []
|
|
6587
6882
|
extrastart = 7
|
|
6588
6883
|
extraend = extrastart + fnumextrafields
|
|
6589
|
-
while(extrastart < extraend):
|
|
6590
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
6591
|
-
extrastart = extrastart + 1
|
|
6592
|
-
if(fnumextrafields==1):
|
|
6593
|
-
try:
|
|
6594
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
6595
|
-
fnumextrafields = len(fextrafieldslist)
|
|
6596
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6597
|
-
try:
|
|
6598
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
6599
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6600
|
-
pass
|
|
6601
|
-
if(curloc > 0):
|
|
6602
|
-
fp.seek(curloc, 0)
|
|
6603
6884
|
formversion = re.findall("([\\d]+)", formstring)
|
|
6604
6885
|
fheadsize = int(inheader[0], 16)
|
|
6605
6886
|
fnumfields = int(inheader[1], 16)
|
|
@@ -6608,629 +6889,7 @@ def FoxFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6608
6889
|
fnumfiles = int(inheader[4], 16)
|
|
6609
6890
|
fprechecksumtype = inheader[-2]
|
|
6610
6891
|
fprechecksum = inheader[-1]
|
|
6611
|
-
|
|
6612
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
6613
|
-
if(not headercheck and not skipchecksum):
|
|
6614
|
-
VerbosePrintOut(
|
|
6615
|
-
"File Header Checksum Error with file at offset " + str(0))
|
|
6616
|
-
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
6617
|
-
"'" + newfcs + "'")
|
|
6618
|
-
return False
|
|
6619
|
-
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
6620
|
-
fcompresstype = compresscheck
|
|
6621
|
-
if(fcompresstype==formatspecs['format_magic']):
|
|
6622
|
-
fcompresstype = ""
|
|
6623
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
6624
|
-
if(seekto >= fnumfiles):
|
|
6625
|
-
seekto = fnumfiles - 1
|
|
6626
|
-
if(seekto < 0):
|
|
6627
|
-
seekto = 0
|
|
6628
|
-
if(seekto >= 0):
|
|
6629
|
-
il = -1
|
|
6630
|
-
while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
|
|
6631
|
-
prefhstart = fp.tell()
|
|
6632
|
-
if(formatspecs['new_style']):
|
|
6633
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
6634
|
-
fp, formatspecs['format_delimiter'])
|
|
6635
|
-
else:
|
|
6636
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
6637
|
-
fp, formatspecs['format_delimiter'])
|
|
6638
|
-
if(len(preheaderdata) == 0):
|
|
6639
|
-
break
|
|
6640
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
6641
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
6642
|
-
preftype = int(preheaderdata[2], 16)
|
|
6643
|
-
prefencoding = preheaderdata[3]
|
|
6644
|
-
prefcencoding = preheaderdata[4]
|
|
6645
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
6646
|
-
prefname = preheaderdata[5]
|
|
6647
|
-
else:
|
|
6648
|
-
prefname = "./"+preheaderdata[5]
|
|
6649
|
-
prefbasedir = os.path.dirname(prefname)
|
|
6650
|
-
preflinkname = preheaderdata[6]
|
|
6651
|
-
prefsize = int(preheaderdata[7], 16)
|
|
6652
|
-
prefatime = int(preheaderdata[8], 16)
|
|
6653
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
6654
|
-
prefctime = int(preheaderdata[10], 16)
|
|
6655
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
6656
|
-
prefmode = int(preheaderdata[12], 16)
|
|
6657
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
6658
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
6659
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
6660
|
-
prefcompression = preheaderdata[14]
|
|
6661
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
6662
|
-
prefuid = int(preheaderdata[16], 16)
|
|
6663
|
-
prefuname = preheaderdata[17]
|
|
6664
|
-
prefgid = int(preheaderdata[18], 16)
|
|
6665
|
-
prefgname = preheaderdata[19]
|
|
6666
|
-
fid = int(preheaderdata[20], 16)
|
|
6667
|
-
finode = int(preheaderdata[21], 16)
|
|
6668
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
6669
|
-
prefdev = int(preheaderdata[23], 16)
|
|
6670
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
6671
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
6672
|
-
prefseeknextfile = preheaderdata[26]
|
|
6673
|
-
prefjsontype = preheaderdata[27]
|
|
6674
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
6675
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
6676
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
6677
|
-
prefjsonchecksum = preheaderdata[31]
|
|
6678
|
-
prefhend = fp.tell() - 1
|
|
6679
|
-
prefjstart = fp.tell()
|
|
6680
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
6681
|
-
prefjend = fp.tell()
|
|
6682
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
6683
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
6684
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
6685
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
6686
|
-
extrastart = 34
|
|
6687
|
-
extraend = extrastart + prefextrafields
|
|
6688
|
-
prefcs = preheaderdata[-2].lower()
|
|
6689
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
6690
|
-
prenewfcs = GetHeaderChecksum(
|
|
6691
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
6692
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
6693
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
6694
|
-
prefname + " at offset " + str(prefhstart))
|
|
6695
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
6696
|
-
"'" + prenewfcs + "'")
|
|
6697
|
-
return False
|
|
6698
|
-
if(prefjsonsize > 0):
|
|
6699
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
6700
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
6701
|
-
prefname + " at offset " + str(prefjstart))
|
|
6702
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
6703
|
-
return False
|
|
6704
|
-
prefcontentstart = fp.tell()
|
|
6705
|
-
prefcontents = ""
|
|
6706
|
-
pyhascontents = False
|
|
6707
|
-
if(prefsize > 0):
|
|
6708
|
-
if(prefcompression):
|
|
6709
|
-
prefcontents = fp.read(prefsize)
|
|
6710
|
-
else:
|
|
6711
|
-
prefcontents = fp.read(prefcsize)
|
|
6712
|
-
prenewfccs = GetFileChecksum(
|
|
6713
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
6714
|
-
pyhascontents = True
|
|
6715
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
6716
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
6717
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
6718
|
-
VerbosePrintOut("'" + prefccs +
|
|
6719
|
-
"' != " + "'" + prenewfccs + "'")
|
|
6720
|
-
return False
|
|
6721
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
6722
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
6723
|
-
if(abs(fseeknextasnum) == 0):
|
|
6724
|
-
pass
|
|
6725
|
-
fp.seek(fseeknextasnum, 1)
|
|
6726
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
6727
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6728
|
-
if(abs(fseeknextasnum) == 0):
|
|
6729
|
-
pass
|
|
6730
|
-
fp.seek(fseeknextasnum, 1)
|
|
6731
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
6732
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6733
|
-
if(abs(fseeknextasnum) == 0):
|
|
6734
|
-
pass
|
|
6735
|
-
fp.seek(fseeknextasnum, 0)
|
|
6736
|
-
else:
|
|
6737
|
-
return False
|
|
6738
|
-
il = il + 1
|
|
6739
|
-
fp.seek(seekstart, 0)
|
|
6740
|
-
fileidnum = il
|
|
6741
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
6742
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
6743
|
-
outftype = int(preheaderdata[2], 16)
|
|
6744
|
-
outfencoding = preheaderdata[3]
|
|
6745
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
6746
|
-
outfname = preheaderdata[4]
|
|
6747
|
-
else:
|
|
6748
|
-
outfname = "./"+preheaderdata[4]
|
|
6749
|
-
outflinkname = preheaderdata[5]
|
|
6750
|
-
outfsize = int(preheaderdata[6], 16)
|
|
6751
|
-
outfbasedir = os.path.dirname(outfname)
|
|
6752
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
6753
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
6754
|
-
if(returnfp):
|
|
6755
|
-
outlist.update({'fp': fp})
|
|
6756
|
-
else:
|
|
6757
|
-
fp.close()
|
|
6758
|
-
return outlist
|
|
6759
|
-
|
|
6760
|
-
|
|
6761
|
-
def FoxFileSeekToFileName(infile, fmttype="auto", seekfile=None, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
6762
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6763
|
-
formatspecs = formatspecs[fmttype]
|
|
6764
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6765
|
-
fmttype = "auto"
|
|
6766
|
-
curloc = 0
|
|
6767
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6768
|
-
curloc = infile.tell()
|
|
6769
|
-
fp = infile
|
|
6770
|
-
fp.seek(0, 0)
|
|
6771
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6772
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6773
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6774
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6775
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6776
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6777
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6778
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6779
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6780
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6781
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6782
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6783
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6784
|
-
return False
|
|
6785
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6786
|
-
return False
|
|
6787
|
-
if(not fp):
|
|
6788
|
-
return False
|
|
6789
|
-
fp.seek(0, 0)
|
|
6790
|
-
elif(infile == "-"):
|
|
6791
|
-
fp = BytesIO()
|
|
6792
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
6793
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6794
|
-
else:
|
|
6795
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
6796
|
-
fp.seek(0, 0)
|
|
6797
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6798
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6799
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6800
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6801
|
-
if(not fp):
|
|
6802
|
-
return False
|
|
6803
|
-
fp.seek(0, 0)
|
|
6804
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6805
|
-
fp = BytesIO()
|
|
6806
|
-
fp.write(infile)
|
|
6807
|
-
fp.seek(0, 0)
|
|
6808
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6809
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6810
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6811
|
-
formatspecs = formatspecs[compresscheck]
|
|
6812
|
-
if(not fp):
|
|
6813
|
-
return False
|
|
6814
|
-
fp.seek(0, 0)
|
|
6815
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
6816
|
-
fp = download_file_from_internet_file(infile)
|
|
6817
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6818
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6819
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6820
|
-
formatspecs = formatspecs[compresscheck]
|
|
6821
|
-
fp.seek(0, 0)
|
|
6822
|
-
if(not fp):
|
|
6823
|
-
return False
|
|
6824
|
-
fp.seek(0, 0)
|
|
6825
|
-
else:
|
|
6826
|
-
infile = RemoveWindowsPath(infile)
|
|
6827
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6828
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6829
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6830
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6831
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6832
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6833
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6834
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6835
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6836
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6837
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6838
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6839
|
-
return False
|
|
6840
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6841
|
-
return False
|
|
6842
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6843
|
-
if(not compresscheck):
|
|
6844
|
-
fextname = os.path.splitext(infile)[1]
|
|
6845
|
-
if(fextname == ".gz"):
|
|
6846
|
-
compresscheck = "gzip"
|
|
6847
|
-
elif(fextname == ".bz2"):
|
|
6848
|
-
compresscheck = "bzip2"
|
|
6849
|
-
elif(fextname == ".zst"):
|
|
6850
|
-
compresscheck = "zstd"
|
|
6851
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
6852
|
-
compresscheck = "lz4"
|
|
6853
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
6854
|
-
compresscheck = "lzo"
|
|
6855
|
-
elif(fextname == ".lzma"):
|
|
6856
|
-
compresscheck = "lzma"
|
|
6857
|
-
elif(fextname == ".xz"):
|
|
6858
|
-
compresscheck = "xz"
|
|
6859
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
6860
|
-
compresscheck = "zlib"
|
|
6861
|
-
else:
|
|
6862
|
-
return False
|
|
6863
|
-
if(not compresscheck):
|
|
6864
|
-
return False
|
|
6865
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6866
|
-
try:
|
|
6867
|
-
fp.seek(0, 2);
|
|
6868
|
-
except OSError:
|
|
6869
|
-
SeekToEndOfFile(fp);
|
|
6870
|
-
except ValueError:
|
|
6871
|
-
SeekToEndOfFile(fp);
|
|
6872
|
-
CatSize = fp.tell();
|
|
6873
|
-
CatSizeEnd = CatSize;
|
|
6874
|
-
fp.seek(curloc, 0)
|
|
6875
|
-
if(curloc > 0):
|
|
6876
|
-
fp.seek(0, 0)
|
|
6877
|
-
if(IsNestedDict(formatspecs)):
|
|
6878
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6879
|
-
if(compresschecking not in formatspecs):
|
|
6880
|
-
return False
|
|
6881
|
-
else:
|
|
6882
|
-
formatspecs = formatspecs[compresschecking]
|
|
6883
|
-
fp.seek(0, 0)
|
|
6884
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6885
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6886
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
6887
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
6888
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
6889
|
-
return False
|
|
6890
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
6891
|
-
return False
|
|
6892
|
-
if(formatspecs['new_style']):
|
|
6893
|
-
inheader = ReadFileHeaderDataBySize(
|
|
6894
|
-
fp, formatspecs['format_delimiter'])
|
|
6895
|
-
else:
|
|
6896
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
6897
|
-
fp, formatspecs['format_delimiter'])
|
|
6898
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
6899
|
-
fnumextrafields = int(inheader[6], 16)
|
|
6900
|
-
fextrafieldslist = []
|
|
6901
|
-
extrastart = 7
|
|
6902
|
-
extraend = extrastart + fnumextrafields
|
|
6903
|
-
while(extrastart < extraend):
|
|
6904
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
6905
|
-
extrastart = extrastart + 1
|
|
6906
|
-
if(fnumextrafields==1):
|
|
6907
|
-
try:
|
|
6908
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
6909
|
-
fnumextrafields = len(fextrafieldslist)
|
|
6910
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6911
|
-
try:
|
|
6912
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
6913
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6914
|
-
pass
|
|
6915
|
-
if(curloc > 0):
|
|
6916
|
-
fp.seek(curloc, 0)
|
|
6917
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
6918
|
-
fheadsize = int(inheader[0], 16)
|
|
6919
|
-
fnumfields = int(inheader[1], 16)
|
|
6920
|
-
fhencoding = inheader[2]
|
|
6921
|
-
fostype = inheader[3]
|
|
6922
|
-
fnumfiles = int(inheader[4], 16)
|
|
6923
|
-
fprechecksumtype = inheader[-2]
|
|
6924
|
-
fprechecksum = inheader[-1]
|
|
6925
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
6926
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
6927
|
-
if(not headercheck and not skipchecksum):
|
|
6928
|
-
VerbosePrintOut(
|
|
6929
|
-
"File Header Checksum Error with file at offset " + str(0))
|
|
6930
|
-
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
6931
|
-
"'" + newfcs + "'")
|
|
6932
|
-
return False
|
|
6933
|
-
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
6934
|
-
fcompresstype = compresscheck
|
|
6935
|
-
if(fcompresstype==formatspecs['format_magic']):
|
|
6936
|
-
fcompresstype = ""
|
|
6937
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
6938
|
-
seekto = fnumfiles - 1
|
|
6939
|
-
filefound = False
|
|
6940
|
-
if(seekto >= 0):
|
|
6941
|
-
il = -1
|
|
6942
|
-
while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
|
|
6943
|
-
prefhstart = fp.tell()
|
|
6944
|
-
if(formatspecs['new_style']):
|
|
6945
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
6946
|
-
fp, formatspecs['format_delimiter'])
|
|
6947
|
-
else:
|
|
6948
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
6949
|
-
fp, formatspecs['format_delimiter'])
|
|
6950
|
-
if(len(preheaderdata) == 0):
|
|
6951
|
-
break
|
|
6952
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
6953
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
6954
|
-
preftype = int(preheaderdata[2], 16)
|
|
6955
|
-
prefencoding = preheaderdata[3]
|
|
6956
|
-
prefencoding = preheaderdata[4]
|
|
6957
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
6958
|
-
prefname = preheaderdata[5]
|
|
6959
|
-
else:
|
|
6960
|
-
prefname = "./"+preheaderdata[5]
|
|
6961
|
-
prefbasedir = os.path.dirname(prefname)
|
|
6962
|
-
preflinkname = preheaderdata[6]
|
|
6963
|
-
prefsize = int(preheaderdata[7], 16)
|
|
6964
|
-
prefatime = int(preheaderdata[8], 16)
|
|
6965
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
6966
|
-
prefctime = int(preheaderdata[10], 16)
|
|
6967
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
6968
|
-
prefmode = int(preheaderdata[12], 16)
|
|
6969
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
6970
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
6971
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
6972
|
-
prefcompression = preheaderdata[14]
|
|
6973
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
6974
|
-
prefuid = int(preheaderdata[16], 16)
|
|
6975
|
-
prefuname = preheaderdata[17]
|
|
6976
|
-
prefgid = int(preheaderdata[18], 16)
|
|
6977
|
-
prefgname = preheaderdata[19]
|
|
6978
|
-
fid = int(preheaderdata[20], 16)
|
|
6979
|
-
finode = int(preheaderdata[21], 16)
|
|
6980
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
6981
|
-
prefdev = int(preheaderdata[23], 16)
|
|
6982
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
6983
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
6984
|
-
prefseeknextfile = preheaderdata[26]
|
|
6985
|
-
prefjsontype = preheaderdata[27]
|
|
6986
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
6987
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
6988
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
6989
|
-
prefjsonchecksum = preheaderdata[31]
|
|
6990
|
-
prefhend = fp.tell() - 1
|
|
6991
|
-
prefjstart = fp.tell()
|
|
6992
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
6993
|
-
prefjend = fp.tell()
|
|
6994
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
6995
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
6996
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
6997
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
6998
|
-
extrastart = 34
|
|
6999
|
-
extraend = extrastart + prefextrafields
|
|
7000
|
-
prefcs = preheaderdata[-2].lower()
|
|
7001
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
7002
|
-
prenewfcs = GetHeaderChecksum(
|
|
7003
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
7004
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
7005
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
7006
|
-
prefname + " at offset " + str(prefhstart))
|
|
7007
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
7008
|
-
"'" + prenewfcs + "'")
|
|
7009
|
-
return False
|
|
7010
|
-
if(prefjsonsize > 0):
|
|
7011
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
7012
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
7013
|
-
prefname + " at offset " + str(prefjstart))
|
|
7014
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
7015
|
-
return False
|
|
7016
|
-
prefcontentstart = fp.tell()
|
|
7017
|
-
prefcontents = ""
|
|
7018
|
-
pyhascontents = False
|
|
7019
|
-
if(prefsize > 0):
|
|
7020
|
-
if(prefcompression):
|
|
7021
|
-
prefcontents = fp.read(prefsize)
|
|
7022
|
-
else:
|
|
7023
|
-
prefcontents = fp.read(prefcsize)
|
|
7024
|
-
prenewfccs = GetFileChecksum(
|
|
7025
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
7026
|
-
pyhascontents = True
|
|
7027
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
7028
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
7029
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
7030
|
-
VerbosePrintOut("'" + prefccs +
|
|
7031
|
-
"' != " + "'" + prenewfccs + "'")
|
|
7032
|
-
return False
|
|
7033
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
7034
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
7035
|
-
if(abs(fseeknextasnum) == 0):
|
|
7036
|
-
pass
|
|
7037
|
-
fp.seek(fseeknextasnum, 1)
|
|
7038
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
7039
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7040
|
-
if(abs(fseeknextasnum) == 0):
|
|
7041
|
-
pass
|
|
7042
|
-
fp.seek(fseeknextasnum, 1)
|
|
7043
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
7044
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7045
|
-
if(abs(fseeknextasnum) == 0):
|
|
7046
|
-
pass
|
|
7047
|
-
fp.seek(fseeknextasnum, 0)
|
|
7048
|
-
else:
|
|
7049
|
-
return False
|
|
7050
|
-
il = il + 1
|
|
7051
|
-
filefound = False
|
|
7052
|
-
if(prefname == seekfile):
|
|
7053
|
-
filefound = True
|
|
7054
|
-
break
|
|
7055
|
-
fp.seek(seekstart, 0)
|
|
7056
|
-
fileidnum = il
|
|
7057
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
7058
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
7059
|
-
outftype = int(preheaderdata[2], 16)
|
|
7060
|
-
outfencoding = preheaderdata[3]
|
|
7061
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
7062
|
-
outfname = preheaderdata[4]
|
|
7063
|
-
else:
|
|
7064
|
-
outfname = "./"+preheaderdata[4]
|
|
7065
|
-
outflinkname = preheaderdata[5]
|
|
7066
|
-
outfsize = int(preheaderdata[6], 16)
|
|
7067
|
-
outfbasedir = os.path.dirname(outfname)
|
|
7068
|
-
if(filefound):
|
|
7069
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
7070
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
7071
|
-
else:
|
|
7072
|
-
return False
|
|
7073
|
-
if(returnfp):
|
|
7074
|
-
outlist.update({'fp': fp})
|
|
7075
|
-
else:
|
|
7076
|
-
fp.close()
|
|
7077
|
-
return outlist
|
|
7078
|
-
|
|
7079
|
-
|
|
7080
|
-
def FoxFileValidate(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7081
|
-
if(verbose):
|
|
7082
|
-
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
7083
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7084
|
-
formatspecs = formatspecs[fmttype]
|
|
7085
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7086
|
-
fmttype = "auto"
|
|
7087
|
-
curloc = 0
|
|
7088
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7089
|
-
curloc = infile.tell()
|
|
7090
|
-
fp = infile
|
|
7091
|
-
fp.seek(0, 0)
|
|
7092
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7093
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7094
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7095
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7096
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7097
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7098
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7099
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7100
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7101
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7102
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7103
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7104
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7105
|
-
return False
|
|
7106
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7107
|
-
return False
|
|
7108
|
-
if(not fp):
|
|
7109
|
-
return False
|
|
7110
|
-
fp.seek(0, 0)
|
|
7111
|
-
elif(infile == "-"):
|
|
7112
|
-
fp = BytesIO()
|
|
7113
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
7114
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7115
|
-
else:
|
|
7116
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
7117
|
-
fp.seek(0, 0)
|
|
7118
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7119
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7120
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7121
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7122
|
-
if(not fp):
|
|
7123
|
-
return False
|
|
7124
|
-
fp.seek(0, 0)
|
|
7125
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7126
|
-
fp = BytesIO()
|
|
7127
|
-
fp.write(infile)
|
|
7128
|
-
fp.seek(0, 0)
|
|
7129
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7130
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7131
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7132
|
-
formatspecs = formatspecs[compresscheck]
|
|
7133
|
-
if(not fp):
|
|
7134
|
-
return False
|
|
7135
|
-
fp.seek(0, 0)
|
|
7136
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
7137
|
-
fp = download_file_from_internet_file(infile)
|
|
7138
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7139
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7140
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7141
|
-
formatspecs = formatspecs[compresscheck]
|
|
7142
|
-
fp.seek(0, 0)
|
|
7143
|
-
if(not fp):
|
|
7144
|
-
return False
|
|
7145
|
-
fp.seek(0, 0)
|
|
7146
|
-
else:
|
|
7147
|
-
infile = RemoveWindowsPath(infile)
|
|
7148
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7149
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7150
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7151
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7152
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7153
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7154
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7155
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7156
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7157
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7158
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7159
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7160
|
-
return False
|
|
7161
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7162
|
-
return False
|
|
7163
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7164
|
-
if(not compresscheck):
|
|
7165
|
-
fextname = os.path.splitext(infile)[1]
|
|
7166
|
-
if(fextname == ".gz"):
|
|
7167
|
-
compresscheck = "gzip"
|
|
7168
|
-
elif(fextname == ".bz2"):
|
|
7169
|
-
compresscheck = "bzip2"
|
|
7170
|
-
elif(fextname == ".zst"):
|
|
7171
|
-
compresscheck = "zstd"
|
|
7172
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
7173
|
-
compresscheck = "lz4"
|
|
7174
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
7175
|
-
compresscheck = "lzo"
|
|
7176
|
-
elif(fextname == ".lzma"):
|
|
7177
|
-
compresscheck = "lzma"
|
|
7178
|
-
elif(fextname == ".xz"):
|
|
7179
|
-
compresscheck = "xz"
|
|
7180
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
7181
|
-
compresscheck = "zlib"
|
|
7182
|
-
else:
|
|
7183
|
-
return False
|
|
7184
|
-
if(not compresscheck):
|
|
7185
|
-
return False
|
|
7186
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7187
|
-
try:
|
|
7188
|
-
fp.seek(0, 2);
|
|
7189
|
-
except OSError:
|
|
7190
|
-
SeekToEndOfFile(fp);
|
|
7191
|
-
except ValueError:
|
|
7192
|
-
SeekToEndOfFile(fp);
|
|
7193
|
-
CatSize = fp.tell();
|
|
7194
|
-
CatSizeEnd = CatSize;
|
|
7195
|
-
fp.seek(curloc, 0)
|
|
7196
|
-
if(curloc > 0):
|
|
7197
|
-
fp.seek(0, 0)
|
|
7198
|
-
if(IsNestedDict(formatspecs)):
|
|
7199
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7200
|
-
if(compresschecking not in formatspecs):
|
|
7201
|
-
return False
|
|
7202
|
-
else:
|
|
7203
|
-
formatspecs = formatspecs[compresschecking]
|
|
7204
|
-
fp.seek(0, 0)
|
|
7205
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7206
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7207
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
7208
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
7209
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
7210
|
-
return False
|
|
7211
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
7212
|
-
return False
|
|
7213
|
-
if(formatspecs['new_style']):
|
|
7214
|
-
inheader = ReadFileHeaderDataBySize(
|
|
7215
|
-
fp, formatspecs['format_delimiter'])
|
|
7216
|
-
else:
|
|
7217
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
7218
|
-
fp, formatspecs['format_delimiter'])
|
|
7219
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
7220
|
-
fnumextrafields = int(inheader[6], 16)
|
|
7221
|
-
extrastart = 7
|
|
7222
|
-
extraend = extrastart + fnumextrafields
|
|
7223
|
-
if(curloc > 0):
|
|
7224
|
-
fp.seek(curloc, 0)
|
|
7225
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
7226
|
-
fheadsize = int(inheader[0], 16)
|
|
7227
|
-
fnumfields = int(inheader[1], 16)
|
|
7228
|
-
fhencoding = inheader[2]
|
|
7229
|
-
fostype = inheader[3]
|
|
7230
|
-
fnumfiles = int(inheader[4], 16)
|
|
7231
|
-
fprechecksumtype = inheader[-2]
|
|
7232
|
-
fprechecksum = inheader[-1]
|
|
7233
|
-
il = 0
|
|
6892
|
+
il = 0
|
|
7234
6893
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
7235
6894
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
7236
6895
|
valid_archive = True
|
|
@@ -7431,18 +7090,18 @@ def FoxFileValidateMultiple(infile, fmttype="auto", formatspecs=__file_format_mu
|
|
|
7431
7090
|
def FoxFileValidateMultipleFiles(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7432
7091
|
return FoxFileValidateMultiple(infile, fmttype, formatspecs, verbose, returnfp)
|
|
7433
7092
|
|
|
7434
|
-
def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7093
|
+
def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7435
7094
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7436
7095
|
formatspecs = formatspecs[fmttype]
|
|
7437
7096
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7438
7097
|
fmttype = "auto"
|
|
7439
|
-
curloc =
|
|
7098
|
+
curloc = filestart
|
|
7440
7099
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7441
7100
|
curloc = infile.tell()
|
|
7442
7101
|
fp = infile
|
|
7443
|
-
fp.seek(
|
|
7444
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7445
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7102
|
+
fp.seek(filestart, 0)
|
|
7103
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7104
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7446
7105
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7447
7106
|
formatspecs = formatspecs[checkcompressfile]
|
|
7448
7107
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7459,45 +7118,45 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7459
7118
|
return False
|
|
7460
7119
|
if(not fp):
|
|
7461
7120
|
return False
|
|
7462
|
-
fp.seek(
|
|
7121
|
+
fp.seek(filestart, 0)
|
|
7463
7122
|
elif(infile == "-"):
|
|
7464
|
-
fp =
|
|
7123
|
+
fp = MkTempFile()
|
|
7465
7124
|
if(hasattr(sys.stdin, "buffer")):
|
|
7466
7125
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7467
7126
|
else:
|
|
7468
7127
|
shutil.copyfileobj(sys.stdin, fp)
|
|
7469
|
-
fp.seek(
|
|
7470
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7471
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7128
|
+
fp.seek(filestart, 0)
|
|
7129
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7130
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7472
7131
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7473
7132
|
formatspecs = formatspecs[checkcompressfile]
|
|
7474
7133
|
if(not fp):
|
|
7475
7134
|
return False
|
|
7476
|
-
fp.seek(
|
|
7135
|
+
fp.seek(filestart, 0)
|
|
7477
7136
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7478
|
-
fp =
|
|
7137
|
+
fp = MkTempFile()
|
|
7479
7138
|
fp.write(infile)
|
|
7480
|
-
fp.seek(
|
|
7481
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7482
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7139
|
+
fp.seek(filestart, 0)
|
|
7140
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7141
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7483
7142
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7484
7143
|
formatspecs = formatspecs[compresscheck]
|
|
7485
7144
|
if(not fp):
|
|
7486
7145
|
return False
|
|
7487
|
-
fp.seek(
|
|
7488
|
-
elif(re.findall(
|
|
7146
|
+
fp.seek(filestart, 0)
|
|
7147
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
7489
7148
|
fp = download_file_from_internet_file(infile)
|
|
7490
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7491
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7149
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7150
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7492
7151
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7493
7152
|
formatspecs = formatspecs[compresscheck]
|
|
7494
|
-
fp.seek(
|
|
7153
|
+
fp.seek(filestart, 0)
|
|
7495
7154
|
if(not fp):
|
|
7496
7155
|
return False
|
|
7497
|
-
fp.seek(
|
|
7156
|
+
fp.seek(filestart, 0)
|
|
7498
7157
|
else:
|
|
7499
7158
|
infile = RemoveWindowsPath(infile)
|
|
7500
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7159
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7501
7160
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7502
7161
|
formatspecs = formatspecs[checkcompressfile]
|
|
7503
7162
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7512,7 +7171,7 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7512
7171
|
return False
|
|
7513
7172
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7514
7173
|
return False
|
|
7515
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7174
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
7516
7175
|
if(not compresscheck):
|
|
7517
7176
|
fextname = os.path.splitext(infile)[1]
|
|
7518
7177
|
if(fextname == ".gz"):
|
|
@@ -7535,25 +7194,23 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7535
7194
|
return False
|
|
7536
7195
|
if(not compresscheck):
|
|
7537
7196
|
return False
|
|
7538
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7197
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
7539
7198
|
try:
|
|
7540
|
-
fp.seek(0, 2)
|
|
7199
|
+
fp.seek(0, 2)
|
|
7541
7200
|
except OSError:
|
|
7542
|
-
SeekToEndOfFile(fp)
|
|
7201
|
+
SeekToEndOfFile(fp)
|
|
7543
7202
|
except ValueError:
|
|
7544
|
-
SeekToEndOfFile(fp)
|
|
7545
|
-
CatSize = fp.tell()
|
|
7203
|
+
SeekToEndOfFile(fp)
|
|
7204
|
+
CatSize = fp.tell()
|
|
7546
7205
|
CatSizeEnd = CatSize;
|
|
7547
7206
|
fp.seek(curloc, 0)
|
|
7548
|
-
if(curloc > 0):
|
|
7549
|
-
fp.seek(0, 0)
|
|
7550
7207
|
if(IsNestedDict(formatspecs)):
|
|
7551
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7208
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7552
7209
|
if(compresschecking not in formatspecs):
|
|
7553
7210
|
return False
|
|
7554
7211
|
else:
|
|
7555
7212
|
formatspecs = formatspecs[compresschecking]
|
|
7556
|
-
fp.seek(
|
|
7213
|
+
fp.seek(filestart, 0)
|
|
7557
7214
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7558
7215
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7559
7216
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -7585,8 +7242,6 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7585
7242
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
7586
7243
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7587
7244
|
pass
|
|
7588
|
-
if(curloc > 0):
|
|
7589
|
-
fp.seek(curloc, 0)
|
|
7590
7245
|
formversion = re.findall("([\\d]+)", formstring)
|
|
7591
7246
|
fheadsize = int(inheader[0], 16)
|
|
7592
7247
|
fnumfields = int(inheader[1], 16)
|
|
@@ -7775,7 +7430,7 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7775
7430
|
outfjsoncontent = {}
|
|
7776
7431
|
elif(outfjsontype=="list"):
|
|
7777
7432
|
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
|
|
7778
|
-
flisttmp =
|
|
7433
|
+
flisttmp = MkTempFile()
|
|
7779
7434
|
flisttmp.write(outfprejsoncontent.encode())
|
|
7780
7435
|
flisttmp.seek(0)
|
|
7781
7436
|
outfjsoncontent = ReadFileHeaderData(flisttmp, outfjsonlen, formatspecs['format_delimiter'])
|
|
@@ -7828,7 +7483,7 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7828
7483
|
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
7829
7484
|
return False
|
|
7830
7485
|
outfcontentstart = fp.tell()
|
|
7831
|
-
outfcontents =
|
|
7486
|
+
outfcontents = MkTempFile()
|
|
7832
7487
|
pyhascontents = False
|
|
7833
7488
|
if(outfsize > 0 and not listonly):
|
|
7834
7489
|
if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
|
|
@@ -7851,9 +7506,9 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7851
7506
|
outfcontents.seek(0, 0)
|
|
7852
7507
|
if(uncompress):
|
|
7853
7508
|
cfcontents = UncompressFileAlt(
|
|
7854
|
-
outfcontents, formatspecs)
|
|
7509
|
+
outfcontents, formatspecs, 0)
|
|
7855
7510
|
cfcontents.seek(0, 0)
|
|
7856
|
-
outfcontents =
|
|
7511
|
+
outfcontents = MkTempFile()
|
|
7857
7512
|
shutil.copyfileobj(cfcontents, outfcontents)
|
|
7858
7513
|
cfcontents.close()
|
|
7859
7514
|
outfcontents.seek(0, 0)
|
|
@@ -7896,49 +7551,49 @@ def FoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7896
7551
|
return outlist
|
|
7897
7552
|
|
|
7898
7553
|
|
|
7899
|
-
def MultipleFoxFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7554
|
+
def MultipleFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7900
7555
|
if(isinstance(infile, (list, tuple, ))):
|
|
7901
7556
|
pass
|
|
7902
7557
|
else:
|
|
7903
7558
|
infile = [infile]
|
|
7904
7559
|
outretval = {}
|
|
7905
7560
|
for curfname in infile:
|
|
7906
|
-
curretfile[curfname] =
|
|
7561
|
+
curretfile[curfname] = FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7907
7562
|
return outretval
|
|
7908
7563
|
|
|
7909
|
-
def MultipleFoxFilesToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7910
|
-
return MultipleFoxFileToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7564
|
+
def MultipleFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7565
|
+
return MultipleFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7911
7566
|
|
|
7912
7567
|
|
|
7913
|
-
def FoxFileStringToArray(instr, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7914
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7568
|
+
def FoxFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7569
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7915
7570
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7916
7571
|
formatspecs = formatspecs[checkcompressfile]
|
|
7917
|
-
fp =
|
|
7918
|
-
|
|
7919
|
-
return
|
|
7572
|
+
fp = MkTempFile(instr)
|
|
7573
|
+
listarrayfiles = FoxFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7574
|
+
return listarrayfiles
|
|
7920
7575
|
|
|
7921
7576
|
|
|
7922
7577
|
def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7923
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7578
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7924
7579
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7925
7580
|
formatspecs = formatspecs[checkcompressfile]
|
|
7926
|
-
fp =
|
|
7581
|
+
fp = MkTempFile()
|
|
7927
7582
|
fp = PackFoxFileFromTarFile(
|
|
7928
7583
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7929
|
-
|
|
7930
|
-
return
|
|
7584
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7585
|
+
return listarrayfiles
|
|
7931
7586
|
|
|
7932
7587
|
|
|
7933
7588
|
def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7934
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7589
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7935
7590
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7936
7591
|
formatspecs = formatspecs[checkcompressfile]
|
|
7937
|
-
fp =
|
|
7592
|
+
fp = MkTempFile()
|
|
7938
7593
|
fp = PackFoxFileFromZipFile(
|
|
7939
7594
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7940
|
-
|
|
7941
|
-
return
|
|
7595
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7596
|
+
return listarrayfiles
|
|
7942
7597
|
|
|
7943
7598
|
|
|
7944
7599
|
if(not rarfile_support):
|
|
@@ -7947,14 +7602,14 @@ if(not rarfile_support):
|
|
|
7947
7602
|
|
|
7948
7603
|
if(rarfile_support):
|
|
7949
7604
|
def RarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7950
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7605
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7951
7606
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7952
7607
|
formatspecs = formatspecs[checkcompressfile]
|
|
7953
|
-
fp =
|
|
7608
|
+
fp = MkTempFile()
|
|
7954
7609
|
fp = PackFoxFileFromRarFile(
|
|
7955
7610
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7956
|
-
|
|
7957
|
-
return
|
|
7611
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7612
|
+
return listarrayfiles
|
|
7958
7613
|
|
|
7959
7614
|
if(not py7zr_support):
|
|
7960
7615
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
@@ -7962,18 +7617,18 @@ if(not py7zr_support):
|
|
|
7962
7617
|
|
|
7963
7618
|
if(py7zr_support):
|
|
7964
7619
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7965
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7620
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7966
7621
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7967
7622
|
formatspecs = formatspecs[checkcompressfile]
|
|
7968
|
-
fp =
|
|
7623
|
+
fp = MkTempFile()
|
|
7969
7624
|
fp = PackFoxFileFromSevenZipFile(
|
|
7970
7625
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7971
|
-
|
|
7972
|
-
return
|
|
7626
|
+
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7627
|
+
return listarrayfiles
|
|
7973
7628
|
|
|
7974
7629
|
|
|
7975
|
-
def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7976
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7630
|
+
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7631
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7977
7632
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7978
7633
|
formatspecs = formatspecs[checkcompressfile]
|
|
7979
7634
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7985,78 +7640,78 @@ def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=
|
|
|
7985
7640
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7986
7641
|
return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7987
7642
|
elif(checkcompressfile == formatspecs['format_magic']):
|
|
7988
|
-
return FoxFileToArray(infile, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7643
|
+
return FoxFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7989
7644
|
else:
|
|
7990
7645
|
return False
|
|
7991
7646
|
return False
|
|
7992
7647
|
|
|
7993
7648
|
|
|
7994
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
7995
|
-
outarray =
|
|
7649
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
7650
|
+
outarray = MkTempFile()
|
|
7996
7651
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
7997
7652
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
7998
|
-
|
|
7999
|
-
return
|
|
7653
|
+
listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7654
|
+
return listarrayfiles
|
|
8000
7655
|
|
|
8001
7656
|
|
|
8002
7657
|
def FoxFileArrayToArrayIndex(inarray, returnfp=False):
|
|
8003
7658
|
if(isinstance(inarray, dict)):
|
|
8004
|
-
|
|
7659
|
+
listarrayfiles = inarray
|
|
8005
7660
|
else:
|
|
8006
7661
|
return False
|
|
8007
|
-
if(not
|
|
7662
|
+
if(not listarrayfiles):
|
|
8008
7663
|
return False
|
|
8009
|
-
outarray = {'list':
|
|
7664
|
+
outarray = {'list': listarrayfiles, 'filetoid': {}, 'idtofile': {}, 'filetypes': {'directories': {'filetoid': {}, 'idtofile': {}}, 'files': {'filetoid': {}, 'idtofile': {}}, 'links': {'filetoid': {}, 'idtofile': {}}, 'symlinks': {'filetoid': {
|
|
8010
7665
|
}, 'idtofile': {}}, 'hardlinks': {'filetoid': {}, 'idtofile': {}}, 'character': {'filetoid': {}, 'idtofile': {}}, 'block': {'filetoid': {}, 'idtofile': {}}, 'fifo': {'filetoid': {}, 'idtofile': {}}, 'devices': {'filetoid': {}, 'idtofile': {}}}}
|
|
8011
7666
|
if(returnfp):
|
|
8012
|
-
outarray.update({'fp':
|
|
8013
|
-
lenlist = len(
|
|
7667
|
+
outarray.update({'fp': listarrayfiles['fp']})
|
|
7668
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8014
7669
|
lcfi = 0
|
|
8015
|
-
lcfx = int(
|
|
8016
|
-
if(lenlist >
|
|
7670
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7671
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8017
7672
|
lcfx = int(lenlist)
|
|
8018
7673
|
else:
|
|
8019
|
-
lcfx = int(
|
|
7674
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8020
7675
|
while(lcfi < lcfx):
|
|
8021
|
-
filetoidarray = {
|
|
8022
|
-
['fname']:
|
|
8023
|
-
idtofilearray = {
|
|
8024
|
-
['fid']:
|
|
7676
|
+
filetoidarray = {listarrayfiles['ffilelist'][lcfi]
|
|
7677
|
+
['fname']: listarrayfiles['ffilelist'][lcfi]['fid']}
|
|
7678
|
+
idtofilearray = {listarrayfiles['ffilelist'][lcfi]
|
|
7679
|
+
['fid']: listarrayfiles['ffilelist'][lcfi]['fname']}
|
|
8025
7680
|
outarray['filetoid'].update(filetoidarray)
|
|
8026
7681
|
outarray['idtofile'].update(idtofilearray)
|
|
8027
|
-
if(
|
|
7682
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8028
7683
|
outarray['filetypes']['files']['filetoid'].update(filetoidarray)
|
|
8029
7684
|
outarray['filetypes']['files']['idtofile'].update(idtofilearray)
|
|
8030
|
-
if(
|
|
7685
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8031
7686
|
outarray['filetypes']['hardlinks']['filetoid'].update(
|
|
8032
7687
|
filetoidarray)
|
|
8033
7688
|
outarray['filetypes']['hardlinks']['idtofile'].update(
|
|
8034
7689
|
idtofilearray)
|
|
8035
7690
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8036
7691
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8037
|
-
if(
|
|
7692
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8038
7693
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8039
7694
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8040
7695
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8041
7696
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8042
|
-
if(
|
|
7697
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 3):
|
|
8043
7698
|
outarray['filetypes']['character']['filetoid'].update(
|
|
8044
7699
|
filetoidarray)
|
|
8045
7700
|
outarray['filetypes']['character']['idtofile'].update(
|
|
8046
7701
|
idtofilearray)
|
|
8047
7702
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8048
7703
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8049
|
-
if(
|
|
7704
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 4):
|
|
8050
7705
|
outarray['filetypes']['block']['filetoid'].update(filetoidarray)
|
|
8051
7706
|
outarray['filetypes']['block']['idtofile'].update(idtofilearray)
|
|
8052
7707
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8053
7708
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8054
|
-
if(
|
|
7709
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8055
7710
|
outarray['filetypes']['directories']['filetoid'].update(
|
|
8056
7711
|
filetoidarray)
|
|
8057
7712
|
outarray['filetypes']['directories']['idtofile'].update(
|
|
8058
7713
|
idtofilearray)
|
|
8059
|
-
if(
|
|
7714
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6):
|
|
8060
7715
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8061
7716
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8062
7717
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
@@ -8065,13 +7720,13 @@ def FoxFileArrayToArrayIndex(inarray, returnfp=False):
|
|
|
8065
7720
|
return outarray
|
|
8066
7721
|
|
|
8067
7722
|
|
|
8068
|
-
def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7723
|
+
def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8069
7724
|
if(isinstance(infile, dict)):
|
|
8070
|
-
|
|
7725
|
+
listarrayfiles = infile
|
|
8071
7726
|
else:
|
|
8072
7727
|
if(infile != "-" and not isinstance(infile, bytes) and not hasattr(infile, "read") and not hasattr(infile, "write")):
|
|
8073
7728
|
infile = RemoveWindowsPath(infile)
|
|
8074
|
-
|
|
7729
|
+
listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8075
7730
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
8076
7731
|
formatspecs = formatspecs[fmttype]
|
|
8077
7732
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
@@ -8097,15 +7752,15 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8097
7752
|
os.unlink(outfile)
|
|
8098
7753
|
except OSError:
|
|
8099
7754
|
pass
|
|
8100
|
-
if(not
|
|
7755
|
+
if(not listarrayfiles):
|
|
8101
7756
|
return False
|
|
8102
7757
|
if(outfile == "-" or outfile is None):
|
|
8103
7758
|
verbose = False
|
|
8104
|
-
fp =
|
|
7759
|
+
fp = MkTempFile()
|
|
8105
7760
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
8106
7761
|
fp = outfile
|
|
8107
|
-
elif(re.findall(
|
|
8108
|
-
fp =
|
|
7762
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
7763
|
+
fp = MkTempFile()
|
|
8109
7764
|
else:
|
|
8110
7765
|
fbasename = os.path.splitext(outfile)[0]
|
|
8111
7766
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -8117,19 +7772,19 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8117
7772
|
return False
|
|
8118
7773
|
formver = formatspecs['format_ver']
|
|
8119
7774
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
8120
|
-
lenlist = len(
|
|
8121
|
-
fnumfiles = int(
|
|
7775
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7776
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8122
7777
|
if(lenlist > fnumfiles or lenlist < fnumfiles):
|
|
8123
7778
|
fnumfiles = lenlist
|
|
8124
|
-
AppendFileHeader(fp, fnumfiles,
|
|
8125
|
-
lenlist = len(
|
|
8126
|
-
fnumfiles = int(
|
|
7779
|
+
AppendFileHeader(fp, fnumfiles, listarrayfiles['fencoding'], [], checksumtype[0], formatspecs)
|
|
7780
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7781
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8127
7782
|
lcfi = 0
|
|
8128
|
-
lcfx = int(
|
|
8129
|
-
if(lenlist >
|
|
7783
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7784
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8130
7785
|
lcfx = int(lenlist)
|
|
8131
7786
|
else:
|
|
8132
|
-
lcfx = int(
|
|
7787
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8133
7788
|
curinode = 0
|
|
8134
7789
|
curfid = 0
|
|
8135
7790
|
inodelist = []
|
|
@@ -8137,66 +7792,66 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8137
7792
|
filetoinode = {}
|
|
8138
7793
|
reallcfi = 0
|
|
8139
7794
|
while(lcfi < lcfx):
|
|
8140
|
-
fencoding =
|
|
8141
|
-
fcencoding =
|
|
8142
|
-
if(re.findall("^[.|/]",
|
|
8143
|
-
fname =
|
|
7795
|
+
fencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7796
|
+
fcencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7797
|
+
if(re.findall("^[.|/]", listarrayfiles['ffilelist'][reallcfi]['fname'])):
|
|
7798
|
+
fname = listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8144
7799
|
else:
|
|
8145
|
-
fname = "./"+
|
|
7800
|
+
fname = "./"+listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8146
7801
|
if(verbose):
|
|
8147
7802
|
VerbosePrintOut(fname)
|
|
8148
7803
|
fheadersize = format(
|
|
8149
|
-
int(
|
|
7804
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fheadersize']), 'x').lower()
|
|
8150
7805
|
fsize = format(
|
|
8151
|
-
int(
|
|
8152
|
-
flinkname =
|
|
7806
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fsize']), 'x').lower()
|
|
7807
|
+
flinkname = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
8153
7808
|
fatime = format(
|
|
8154
|
-
int(
|
|
7809
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fatime']), 'x').lower()
|
|
8155
7810
|
fmtime = format(
|
|
8156
|
-
int(
|
|
7811
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmtime']), 'x').lower()
|
|
8157
7812
|
fctime = format(
|
|
8158
|
-
int(
|
|
7813
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fctime']), 'x').lower()
|
|
8159
7814
|
fbtime = format(
|
|
8160
|
-
int(
|
|
7815
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fbtime']), 'x').lower()
|
|
8161
7816
|
fmode = format(
|
|
8162
|
-
int(
|
|
7817
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmode']), 'x').lower()
|
|
8163
7818
|
fchmode = format(
|
|
8164
|
-
int(
|
|
7819
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fchmode']), 'x').lower()
|
|
8165
7820
|
fuid = format(
|
|
8166
|
-
int(
|
|
8167
|
-
funame =
|
|
7821
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fuid']), 'x').lower()
|
|
7822
|
+
funame = listarrayfiles['ffilelist'][reallcfi]['funame']
|
|
8168
7823
|
fgid = format(
|
|
8169
|
-
int(
|
|
8170
|
-
fgname =
|
|
7824
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fgid']), 'x').lower()
|
|
7825
|
+
fgname = listarrayfiles['ffilelist'][reallcfi]['fgname']
|
|
8171
7826
|
finode = format(
|
|
8172
|
-
int(
|
|
7827
|
+
int(listarrayfiles['ffilelist'][reallcfi]['finode']), 'x').lower()
|
|
8173
7828
|
flinkcount = format(
|
|
8174
|
-
int(
|
|
7829
|
+
int(listarrayfiles['ffilelist'][reallcfi]['flinkcount']), 'x').lower()
|
|
8175
7830
|
fwinattributes = format(
|
|
8176
|
-
int(
|
|
8177
|
-
fcompression =
|
|
7831
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fwinattributes']), 'x').lower()
|
|
7832
|
+
fcompression = listarrayfiles['ffilelist'][reallcfi]['fcompression']
|
|
8178
7833
|
fcsize = format(
|
|
8179
|
-
int(
|
|
7834
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fcsize']), 'x').lower()
|
|
8180
7835
|
fdev = format(
|
|
8181
|
-
int(
|
|
7836
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fdev']), 'x').lower()
|
|
8182
7837
|
fdev_minor = format(
|
|
8183
|
-
int(
|
|
7838
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fminor']), 'x').lower()
|
|
8184
7839
|
fdev_major = format(
|
|
8185
|
-
int(
|
|
8186
|
-
fseeknextfile =
|
|
8187
|
-
if(len(
|
|
8188
|
-
|
|
8189
|
-
|
|
7840
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
|
|
7841
|
+
fseeknextfile = listarrayfiles['ffilelist'][reallcfi]['fseeknextfile']
|
|
7842
|
+
if(len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > 0):
|
|
7843
|
+
listarrayfiles['ffilelist'][reallcfi]['fextrafields'] = len(
|
|
7844
|
+
listarrayfiles['ffilelist'][reallcfi]['fextralist'])
|
|
8190
7845
|
if(not followlink and len(extradata) <= 0):
|
|
8191
|
-
extradata =
|
|
7846
|
+
extradata = listarrayfiles['ffilelist'][reallcfi]['fextralist']
|
|
8192
7847
|
if(not followlink and len(jsondata) <= 0):
|
|
8193
|
-
jsondata =
|
|
8194
|
-
fcontents =
|
|
8195
|
-
if(not
|
|
8196
|
-
fcontents =
|
|
8197
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
7848
|
+
jsondata = listarrayfiles['ffilelist'][reallcfi]['fjsondata']
|
|
7849
|
+
fcontents = listarrayfiles['ffilelist'][reallcfi]['fcontents']
|
|
7850
|
+
if(not listarrayfiles['ffilelist'][reallcfi]['fcontentasfile']):
|
|
7851
|
+
fcontents = MkTempFile(fcontents)
|
|
7852
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
8198
7853
|
fcontents.seek(0, 0)
|
|
8199
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
7854
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
8200
7855
|
fcompression = ""
|
|
8201
7856
|
fcsize = format(int(0), 'x').lower()
|
|
8202
7857
|
curcompression = "none"
|
|
@@ -8209,7 +7864,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8209
7864
|
ilmin = 0
|
|
8210
7865
|
ilcsize = []
|
|
8211
7866
|
while(ilmin < ilsize):
|
|
8212
|
-
cfcontents =
|
|
7867
|
+
cfcontents = MkTempFile()
|
|
8213
7868
|
fcontents.seek(0, 0)
|
|
8214
7869
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
8215
7870
|
fcontents.seek(0, 0)
|
|
@@ -8226,7 +7881,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8226
7881
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
8227
7882
|
curcompression = compressionuselist[ilcmin]
|
|
8228
7883
|
fcontents.seek(0, 0)
|
|
8229
|
-
cfcontents =
|
|
7884
|
+
cfcontents = MkTempFile()
|
|
8230
7885
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
8231
7886
|
cfcontents.seek(0, 0)
|
|
8232
7887
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -8239,10 +7894,10 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8239
7894
|
fcontents.close()
|
|
8240
7895
|
fcontents = cfcontents
|
|
8241
7896
|
if followlink:
|
|
8242
|
-
if(
|
|
8243
|
-
getflinkpath =
|
|
8244
|
-
flinkid =
|
|
8245
|
-
flinkinfo =
|
|
7897
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] == 1 or listarrayfiles['ffilelist'][reallcfi]['ftype'] == 2):
|
|
7898
|
+
getflinkpath = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
7899
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
7900
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8246
7901
|
fheadersize = format(
|
|
8247
7902
|
int(flinkinfo['fheadersize']), 'x').lower()
|
|
8248
7903
|
fsize = format(int(flinkinfo['fsize']), 'x').lower()
|
|
@@ -8275,14 +7930,14 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8275
7930
|
extradata = flinkinfo['fjsondata']
|
|
8276
7931
|
fcontents = flinkinfo['fcontents']
|
|
8277
7932
|
if(not flinkinfo['fcontentasfile']):
|
|
8278
|
-
fcontents =
|
|
7933
|
+
fcontents = MkTempFile(fcontents)
|
|
8279
7934
|
ftypehex = format(flinkinfo['ftype'], 'x').lower()
|
|
8280
7935
|
else:
|
|
8281
7936
|
ftypehex = format(
|
|
8282
|
-
|
|
7937
|
+
listarrayfiles['ffilelist'][reallcfi]['ftype'], 'x').lower()
|
|
8283
7938
|
fcurfid = format(curfid, 'x').lower()
|
|
8284
7939
|
if(not followlink and finode != 0):
|
|
8285
|
-
if(
|
|
7940
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] != 1):
|
|
8286
7941
|
fcurinode = format(int(curinode), 'x').lower()
|
|
8287
7942
|
inodetofile.update({curinode: fname})
|
|
8288
7943
|
filetoinode.update({fname: curinode})
|
|
@@ -8332,7 +7987,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8332
7987
|
outvar = fp.read()
|
|
8333
7988
|
fp.close()
|
|
8334
7989
|
return outvar
|
|
8335
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
7990
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
8336
7991
|
fp = CompressOpenFileAlt(
|
|
8337
7992
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
8338
7993
|
fp.seek(0, 0)
|
|
@@ -8345,50 +8000,50 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8345
8000
|
return True
|
|
8346
8001
|
|
|
8347
8002
|
|
|
8348
|
-
def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8349
|
-
fp =
|
|
8350
|
-
|
|
8351
|
-
checksumtype, skipchecksum, extradata, formatspecs, verbose, returnfp)
|
|
8352
|
-
return
|
|
8003
|
+
def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8004
|
+
fp = MkTempFile(instr)
|
|
8005
|
+
listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8006
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8007
|
+
return listarrayfiles
|
|
8353
8008
|
|
|
8354
8009
|
|
|
8355
|
-
def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False,
|
|
8356
|
-
outarray =
|
|
8010
|
+
def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8011
|
+
outarray = MkTempFile()
|
|
8357
8012
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
8358
8013
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
8359
|
-
|
|
8360
|
-
|
|
8361
|
-
return
|
|
8014
|
+
listarrayfiles = RePackFoxFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8015
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8016
|
+
return listarrayfiles
|
|
8362
8017
|
|
|
8363
8018
|
|
|
8364
|
-
def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8019
|
+
def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8365
8020
|
if(outdir is not None):
|
|
8366
8021
|
outdir = RemoveWindowsPath(outdir)
|
|
8367
8022
|
if(verbose):
|
|
8368
8023
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8369
8024
|
if(isinstance(infile, dict)):
|
|
8370
|
-
|
|
8025
|
+
listarrayfiles = infile
|
|
8371
8026
|
else:
|
|
8372
8027
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8373
8028
|
infile = RemoveWindowsPath(infile)
|
|
8374
|
-
|
|
8375
|
-
if(not
|
|
8029
|
+
listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8030
|
+
if(not listarrayfiles):
|
|
8376
8031
|
return False
|
|
8377
|
-
lenlist = len(
|
|
8378
|
-
fnumfiles = int(
|
|
8032
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8033
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8379
8034
|
lcfi = 0
|
|
8380
|
-
lcfx = int(
|
|
8381
|
-
if(lenlist >
|
|
8035
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8036
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8382
8037
|
lcfx = int(lenlist)
|
|
8383
8038
|
else:
|
|
8384
|
-
lcfx = int(
|
|
8039
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8385
8040
|
while(lcfi < lcfx):
|
|
8386
8041
|
funame = ""
|
|
8387
8042
|
try:
|
|
8388
8043
|
import pwd
|
|
8389
8044
|
try:
|
|
8390
8045
|
userinfo = pwd.getpwuid(
|
|
8391
|
-
|
|
8046
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'])
|
|
8392
8047
|
funame = userinfo.pw_name
|
|
8393
8048
|
except KeyError:
|
|
8394
8049
|
funame = ""
|
|
@@ -8399,7 +8054,7 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8399
8054
|
import grp
|
|
8400
8055
|
try:
|
|
8401
8056
|
groupinfo = grp.getgrgid(
|
|
8402
|
-
|
|
8057
|
+
listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8403
8058
|
fgname = groupinfo.gr_name
|
|
8404
8059
|
except KeyError:
|
|
8405
8060
|
fgname = ""
|
|
@@ -8407,15 +8062,15 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8407
8062
|
fgname = ""
|
|
8408
8063
|
if(verbose):
|
|
8409
8064
|
VerbosePrintOut(PrependPath(
|
|
8410
|
-
outdir,
|
|
8411
|
-
if(
|
|
8412
|
-
with open(PrependPath(outdir,
|
|
8413
|
-
if(not
|
|
8414
|
-
|
|
8415
|
-
|
|
8416
|
-
|
|
8065
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8066
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8067
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8068
|
+
if(not listarrayfiles['ffilelist'][lcfi]['fcontentasfile']):
|
|
8069
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'] = MkTempFile(
|
|
8070
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
8071
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
8417
8072
|
shutil.copyfileobj(
|
|
8418
|
-
|
|
8073
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
8419
8074
|
try:
|
|
8420
8075
|
fpc.flush()
|
|
8421
8076
|
if(hasattr(os, "sync")):
|
|
@@ -8426,20 +8081,20 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8426
8081
|
pass
|
|
8427
8082
|
except OSError:
|
|
8428
8083
|
pass
|
|
8429
|
-
if(hasattr(os, "chown") and funame ==
|
|
8430
|
-
os.chown(PrependPath(outdir,
|
|
8431
|
-
|
|
8084
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8085
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8086
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8432
8087
|
if(preservepermissions):
|
|
8433
8088
|
os.chmod(PrependPath(
|
|
8434
|
-
outdir,
|
|
8089
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8435
8090
|
if(preservetime):
|
|
8436
|
-
os.utime(PrependPath(outdir,
|
|
8437
|
-
|
|
8438
|
-
if(
|
|
8091
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8092
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8093
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8439
8094
|
if(followlink):
|
|
8440
|
-
getflinkpath =
|
|
8441
|
-
flinkid =
|
|
8442
|
-
flinkinfo =
|
|
8095
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8096
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8097
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8443
8098
|
funame = ""
|
|
8444
8099
|
try:
|
|
8445
8100
|
import pwd
|
|
@@ -8461,9 +8116,9 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8461
8116
|
except ImportError:
|
|
8462
8117
|
fgname = ""
|
|
8463
8118
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8464
|
-
with open(PrependPath(outdir,
|
|
8119
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8465
8120
|
if(not flinkinfo['fcontentasfile']):
|
|
8466
|
-
flinkinfo['fcontents'] =
|
|
8121
|
+
flinkinfo['fcontents'] = MkTempFile(
|
|
8467
8122
|
flinkinfo['fcontents'])
|
|
8468
8123
|
flinkinfo['fcontents'].seek(0, 0)
|
|
8469
8124
|
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
@@ -8479,46 +8134,46 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8479
8134
|
pass
|
|
8480
8135
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8481
8136
|
os.chown(PrependPath(
|
|
8482
|
-
outdir,
|
|
8137
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8483
8138
|
if(preservepermissions):
|
|
8484
8139
|
os.chmod(PrependPath(
|
|
8485
|
-
outdir,
|
|
8140
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8486
8141
|
if(preservetime):
|
|
8487
|
-
os.utime(PrependPath(outdir,
|
|
8142
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8488
8143
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8489
8144
|
if(flinkinfo['ftype'] == 1):
|
|
8490
8145
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8491
|
-
outdir,
|
|
8146
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8492
8147
|
if(flinkinfo['ftype'] == 2):
|
|
8493
8148
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8494
|
-
outdir,
|
|
8149
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8495
8150
|
if(flinkinfo['ftype'] == 5):
|
|
8496
8151
|
if(preservepermissions):
|
|
8497
8152
|
os.mkdir(PrependPath(
|
|
8498
|
-
outdir,
|
|
8153
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8499
8154
|
else:
|
|
8500
8155
|
os.mkdir(PrependPath(
|
|
8501
|
-
outdir,
|
|
8156
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8502
8157
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8503
8158
|
os.chown(PrependPath(
|
|
8504
|
-
outdir,
|
|
8159
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8505
8160
|
if(preservepermissions):
|
|
8506
8161
|
os.chmod(PrependPath(
|
|
8507
|
-
outdir,
|
|
8162
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8508
8163
|
if(preservetime):
|
|
8509
|
-
os.utime(PrependPath(outdir,
|
|
8164
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8510
8165
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8511
8166
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8512
8167
|
os.mkfifo(PrependPath(
|
|
8513
|
-
outdir,
|
|
8168
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8514
8169
|
else:
|
|
8515
|
-
os.link(
|
|
8516
|
-
outdir,
|
|
8517
|
-
if(
|
|
8170
|
+
os.link(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8171
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8172
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8518
8173
|
if(followlink):
|
|
8519
|
-
getflinkpath =
|
|
8520
|
-
flinkid =
|
|
8521
|
-
flinkinfo =
|
|
8174
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8175
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8176
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8522
8177
|
funame = ""
|
|
8523
8178
|
try:
|
|
8524
8179
|
import pwd
|
|
@@ -8540,9 +8195,9 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8540
8195
|
except ImportError:
|
|
8541
8196
|
fgname = ""
|
|
8542
8197
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8543
|
-
with open(PrependPath(outdir,
|
|
8198
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8544
8199
|
if(not flinkinfo['fcontentasfile']):
|
|
8545
|
-
flinkinfo['fcontents'] =
|
|
8200
|
+
flinkinfo['fcontents'] = MkTempFile(
|
|
8546
8201
|
flinkinfo['fcontents'])
|
|
8547
8202
|
flinkinfo['fcontents'].seek(0, 0)
|
|
8548
8203
|
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
@@ -8558,71 +8213,71 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8558
8213
|
pass
|
|
8559
8214
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8560
8215
|
os.chown(PrependPath(
|
|
8561
|
-
outdir,
|
|
8216
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8562
8217
|
if(preservepermissions):
|
|
8563
8218
|
os.chmod(PrependPath(
|
|
8564
|
-
outdir,
|
|
8219
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8565
8220
|
if(preservetime):
|
|
8566
|
-
os.utime(PrependPath(outdir,
|
|
8221
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8567
8222
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8568
8223
|
if(flinkinfo['ftype'] == 1):
|
|
8569
8224
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8570
|
-
outdir,
|
|
8225
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8571
8226
|
if(flinkinfo['ftype'] == 2):
|
|
8572
8227
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8573
|
-
outdir,
|
|
8228
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8574
8229
|
if(flinkinfo['ftype'] == 5):
|
|
8575
8230
|
if(preservepermissions):
|
|
8576
8231
|
os.mkdir(PrependPath(
|
|
8577
|
-
outdir,
|
|
8232
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8578
8233
|
else:
|
|
8579
8234
|
os.mkdir(PrependPath(
|
|
8580
|
-
outdir,
|
|
8235
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8581
8236
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8582
8237
|
os.chown(PrependPath(
|
|
8583
|
-
outdir,
|
|
8238
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8584
8239
|
if(preservepermissions):
|
|
8585
8240
|
os.chmod(PrependPath(
|
|
8586
|
-
outdir,
|
|
8241
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8587
8242
|
if(preservetime):
|
|
8588
|
-
os.utime(PrependPath(outdir,
|
|
8243
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8589
8244
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8590
8245
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8591
8246
|
os.mkfifo(PrependPath(
|
|
8592
|
-
outdir,
|
|
8247
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8593
8248
|
else:
|
|
8594
|
-
os.symlink(
|
|
8595
|
-
outdir,
|
|
8596
|
-
if(
|
|
8249
|
+
os.symlink(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8250
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8251
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8597
8252
|
if(preservepermissions):
|
|
8598
8253
|
os.mkdir(PrependPath(
|
|
8599
|
-
outdir,
|
|
8254
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8600
8255
|
else:
|
|
8601
8256
|
os.mkdir(PrependPath(
|
|
8602
|
-
outdir,
|
|
8603
|
-
if(hasattr(os, "chown") and funame ==
|
|
8604
|
-
os.chown(PrependPath(outdir,
|
|
8605
|
-
|
|
8257
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8258
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8259
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8260
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8606
8261
|
if(preservepermissions):
|
|
8607
8262
|
os.chmod(PrependPath(
|
|
8608
|
-
outdir,
|
|
8263
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8609
8264
|
if(preservetime):
|
|
8610
|
-
os.utime(PrependPath(outdir,
|
|
8611
|
-
|
|
8612
|
-
if(
|
|
8265
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8266
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8267
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8613
8268
|
os.mkfifo(PrependPath(
|
|
8614
|
-
outdir,
|
|
8269
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8615
8270
|
lcfi = lcfi + 1
|
|
8616
8271
|
if(returnfp):
|
|
8617
|
-
return
|
|
8272
|
+
return listarrayfiles['ffilelist']['fp']
|
|
8618
8273
|
else:
|
|
8619
8274
|
return True
|
|
8620
8275
|
|
|
8621
8276
|
|
|
8622
8277
|
def UnPackFoxFileString(instr, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8623
|
-
fp =
|
|
8624
|
-
|
|
8625
|
-
return
|
|
8278
|
+
fp = MkTempFile(instr)
|
|
8279
|
+
listarrayfiles = UnPackFoxFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
8280
|
+
return listarrayfiles
|
|
8626
8281
|
|
|
8627
8282
|
def ftype_to_str(ftype):
|
|
8628
8283
|
mapping = {
|
|
@@ -8639,71 +8294,71 @@ def ftype_to_str(ftype):
|
|
|
8639
8294
|
# Default to "file" if unknown
|
|
8640
8295
|
return mapping.get(ftype, "file")
|
|
8641
8296
|
|
|
8642
|
-
def FoxFileListFiles(infile, fmttype="auto", seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8297
|
+
def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8643
8298
|
if(verbose):
|
|
8644
8299
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8645
8300
|
if(isinstance(infile, dict)):
|
|
8646
|
-
|
|
8301
|
+
listarrayfiles = infile
|
|
8647
8302
|
else:
|
|
8648
8303
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8649
8304
|
infile = RemoveWindowsPath(infile)
|
|
8650
|
-
|
|
8651
|
-
if(not
|
|
8305
|
+
listarrayfiles = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8306
|
+
if(not listarrayfiles):
|
|
8652
8307
|
return False
|
|
8653
|
-
lenlist = len(
|
|
8654
|
-
fnumfiles = int(
|
|
8308
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8309
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8655
8310
|
lcfi = 0
|
|
8656
|
-
lcfx = int(
|
|
8657
|
-
if(lenlist >
|
|
8311
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8312
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8658
8313
|
lcfx = int(lenlist)
|
|
8659
8314
|
else:
|
|
8660
|
-
lcfx = int(
|
|
8315
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8661
8316
|
returnval = {}
|
|
8662
8317
|
while(lcfi < lcfx):
|
|
8663
|
-
returnval.update({lcfi:
|
|
8318
|
+
returnval.update({lcfi: listarrayfiles['ffilelist'][lcfi]['fname']})
|
|
8664
8319
|
if(not verbose):
|
|
8665
|
-
VerbosePrintOut(
|
|
8320
|
+
VerbosePrintOut(listarrayfiles['ffilelist'][lcfi]['fname'])
|
|
8666
8321
|
if(verbose):
|
|
8667
8322
|
permissions = {'access': {'0': ('---'), '1': ('--x'), '2': ('-w-'), '3': ('-wx'), '4': (
|
|
8668
8323
|
'r--'), '5': ('r-x'), '6': ('rw-'), '7': ('rwx')}, 'roles': {0: 'owner', 1: 'group', 2: 'other'}}
|
|
8669
|
-
printfname =
|
|
8670
|
-
if(
|
|
8671
|
-
printfname =
|
|
8672
|
-
" link to " +
|
|
8673
|
-
if(
|
|
8674
|
-
printfname =
|
|
8675
|
-
" -> " +
|
|
8676
|
-
fuprint =
|
|
8324
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname']
|
|
8325
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8326
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8327
|
+
" link to " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8328
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8329
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8330
|
+
" -> " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8331
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['funame']
|
|
8677
8332
|
if(len(fuprint) <= 0):
|
|
8678
|
-
fuprint =
|
|
8679
|
-
fgprint =
|
|
8333
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['fuid']
|
|
8334
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgname']
|
|
8680
8335
|
if(len(fgprint) <= 0):
|
|
8681
|
-
fgprint =
|
|
8336
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgid']
|
|
8682
8337
|
if(newstyle):
|
|
8683
|
-
VerbosePrintOut(ftype_to_str(
|
|
8684
|
-
|
|
8338
|
+
VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
|
|
8339
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
|
|
8685
8340
|
else:
|
|
8686
|
-
VerbosePrintOut(PrintPermissionString(
|
|
8687
|
-
|
|
8341
|
+
VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
|
|
8342
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8688
8343
|
lcfi = lcfi + 1
|
|
8689
8344
|
if(returnfp):
|
|
8690
|
-
return
|
|
8345
|
+
return listarrayfiles['fp']
|
|
8691
8346
|
else:
|
|
8692
8347
|
return True
|
|
8693
8348
|
|
|
8694
8349
|
|
|
8695
8350
|
def FoxFileStringListFiles(instr, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8696
|
-
fp =
|
|
8697
|
-
|
|
8351
|
+
fp = MkTempFile(instr)
|
|
8352
|
+
listarrayfiles = FoxFileListFiles(
|
|
8698
8353
|
instr, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
8699
|
-
return
|
|
8354
|
+
return listarrayfiles
|
|
8700
8355
|
|
|
8701
8356
|
|
|
8702
8357
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
8703
8358
|
if(verbose):
|
|
8704
8359
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8705
8360
|
if(infile == "-"):
|
|
8706
|
-
infile =
|
|
8361
|
+
infile = MkTempFile()
|
|
8707
8362
|
if(hasattr(sys.stdin, "buffer")):
|
|
8708
8363
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
8709
8364
|
else:
|
|
@@ -8712,7 +8367,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8712
8367
|
if(not infile):
|
|
8713
8368
|
return False
|
|
8714
8369
|
infile.seek(0, 0)
|
|
8715
|
-
elif(re.findall(
|
|
8370
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8716
8371
|
infile = download_file_from_internet_file(infile)
|
|
8717
8372
|
infile.seek(0, 0)
|
|
8718
8373
|
if(not infile):
|
|
@@ -8736,7 +8391,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8736
8391
|
return False
|
|
8737
8392
|
try:
|
|
8738
8393
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
8739
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
8394
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
8740
8395
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8741
8396
|
formatspecs = formatspecs[compresscheck]
|
|
8742
8397
|
if(compresscheck=="zstd"):
|
|
@@ -8748,7 +8403,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8748
8403
|
else:
|
|
8749
8404
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
8750
8405
|
else:
|
|
8751
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
8406
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
8752
8407
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8753
8408
|
formatspecs = formatspecs[compresscheck]
|
|
8754
8409
|
if(compresscheck=="zstd"):
|
|
@@ -8819,7 +8474,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8819
8474
|
member.size).rjust(15) + " " + datetime.datetime.utcfromtimestamp(member.mtime).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8820
8475
|
lcfi = lcfi + 1
|
|
8821
8476
|
if(returnfp):
|
|
8822
|
-
return
|
|
8477
|
+
return listarrayfiles['fp']
|
|
8823
8478
|
else:
|
|
8824
8479
|
return True
|
|
8825
8480
|
|
|
@@ -8828,7 +8483,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8828
8483
|
if(verbose):
|
|
8829
8484
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8830
8485
|
if(infile == "-"):
|
|
8831
|
-
infile =
|
|
8486
|
+
infile = MkTempFile()
|
|
8832
8487
|
if(hasattr(sys.stdin, "buffer")):
|
|
8833
8488
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
8834
8489
|
else:
|
|
@@ -8837,7 +8492,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8837
8492
|
if(not infile):
|
|
8838
8493
|
return False
|
|
8839
8494
|
infile.seek(0, 0)
|
|
8840
|
-
elif(re.findall(
|
|
8495
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8841
8496
|
infile = download_file_from_internet_file(infile)
|
|
8842
8497
|
infile.seek(0, 0)
|
|
8843
8498
|
if(not infile):
|
|
@@ -8952,7 +8607,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8952
8607
|
15) + " " + datetime.datetime.utcfromtimestamp(int(time.mktime(member.date_time + (0, 0, -1)))).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8953
8608
|
lcfi = lcfi + 1
|
|
8954
8609
|
if(returnfp):
|
|
8955
|
-
return
|
|
8610
|
+
return listarrayfiles['fp']
|
|
8956
8611
|
else:
|
|
8957
8612
|
return True
|
|
8958
8613
|
|
|
@@ -9090,7 +8745,7 @@ if(rarfile_support):
|
|
|
9090
8745
|
member.file_size).rjust(15) + " " + member.mtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9091
8746
|
lcfi = lcfi + 1
|
|
9092
8747
|
if(returnfp):
|
|
9093
|
-
return
|
|
8748
|
+
return listarrayfiles['fp']
|
|
9094
8749
|
else:
|
|
9095
8750
|
return True
|
|
9096
8751
|
|
|
@@ -9108,7 +8763,7 @@ if(py7zr_support):
|
|
|
9108
8763
|
returnval = {}
|
|
9109
8764
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
9110
8765
|
file_content = szpfp.readall()
|
|
9111
|
-
#sztest = szpfp.testzip()
|
|
8766
|
+
#sztest = szpfp.testzip()
|
|
9112
8767
|
sztestalt = szpfp.test()
|
|
9113
8768
|
if(sztestalt):
|
|
9114
8769
|
VerbosePrintOut("Bad file found!")
|
|
@@ -9197,7 +8852,7 @@ if(py7zr_support):
|
|
|
9197
8852
|
fsize).rjust(15) + " " + member.creationtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9198
8853
|
lcfi = lcfi + 1
|
|
9199
8854
|
if(returnfp):
|
|
9200
|
-
return
|
|
8855
|
+
return listarrayfiles['fp']
|
|
9201
8856
|
else:
|
|
9202
8857
|
return True
|
|
9203
8858
|
|
|
@@ -9205,7 +8860,7 @@ if(py7zr_support):
|
|
|
9205
8860
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
9206
8861
|
if(verbose):
|
|
9207
8862
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
9208
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
8863
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9209
8864
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9210
8865
|
formatspecs = formatspecs[checkcompressfile]
|
|
9211
8866
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -9224,12 +8879,12 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
|
|
|
9224
8879
|
|
|
9225
8880
|
|
|
9226
8881
|
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9227
|
-
outarray =
|
|
8882
|
+
outarray = MkTempFile()
|
|
9228
8883
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
9229
8884
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
9230
|
-
|
|
8885
|
+
listarrayfiles = FoxFileListFiles(
|
|
9231
8886
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
9232
|
-
return
|
|
8887
|
+
return listarrayfiles
|
|
9233
8888
|
|
|
9234
8889
|
"""
|
|
9235
8890
|
PyNeoFile compatibility layer
|
|
@@ -9242,7 +8897,7 @@ def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='crc32',
|
|
|
9242
8897
|
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
9243
8898
|
|
|
9244
8899
|
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
9245
|
-
return MakeEmptyFile(outfile, fmttype, "auto", False, None, checksumtype, formatspecs, returnfp)
|
|
8900
|
+
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
9246
8901
|
|
|
9247
8902
|
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
9248
8903
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
@@ -9251,7 +8906,7 @@ def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, chec
|
|
|
9251
8906
|
return PackFoxFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
9252
8907
|
|
|
9253
8908
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9254
|
-
return FoxFileToArray(infile, "auto", 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
8909
|
+
return FoxFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
9255
8910
|
|
|
9256
8911
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9257
8912
|
return UnPackFoxFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
@@ -9266,13 +8921,26 @@ def archivefilelistfiles_neo(infile, formatspecs=__file_format_multi_dict__, adv
|
|
|
9266
8921
|
return FoxFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
9267
8922
|
|
|
9268
8923
|
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
|
|
9269
|
-
intmp = InFileToArray(infile, 0, 0, False, True, False, formatspecs, False, False)
|
|
8924
|
+
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
9270
8925
|
return RePackFoxFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
9271
8926
|
|
|
8927
|
+
def detect_cwd(ftp, file_dir):
|
|
8928
|
+
"""
|
|
8929
|
+
Test whether cwd into file_dir works. Returns True if it does,
|
|
8930
|
+
False if not (so absolute paths should be used).
|
|
8931
|
+
"""
|
|
8932
|
+
if not file_dir or file_dir in ("/", ""):
|
|
8933
|
+
return False # nothing to cwd into
|
|
8934
|
+
try:
|
|
8935
|
+
ftp.cwd(file_dir)
|
|
8936
|
+
return True
|
|
8937
|
+
except all_errors:
|
|
8938
|
+
return False
|
|
8939
|
+
|
|
9272
8940
|
def download_file_from_ftp_file(url):
|
|
9273
8941
|
urlparts = urlparse(url)
|
|
9274
|
-
file_name = os.path.basename(urlparts.path)
|
|
9275
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
8942
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
8943
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9276
8944
|
if(urlparts.username is not None):
|
|
9277
8945
|
ftp_username = urlparts.username
|
|
9278
8946
|
else:
|
|
@@ -9289,7 +8957,7 @@ def download_file_from_ftp_file(url):
|
|
|
9289
8957
|
ftp = FTP_TLS()
|
|
9290
8958
|
else:
|
|
9291
8959
|
return False
|
|
9292
|
-
if(urlparts.scheme == "sftp"):
|
|
8960
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9293
8961
|
if(__use_pysftp__):
|
|
9294
8962
|
return download_file_from_pysftp_file(url)
|
|
9295
8963
|
else:
|
|
@@ -9307,26 +8975,70 @@ def download_file_from_ftp_file(url):
|
|
|
9307
8975
|
except socket.timeout:
|
|
9308
8976
|
log.info("Error With URL "+url)
|
|
9309
8977
|
return False
|
|
9310
|
-
|
|
9311
|
-
|
|
9312
|
-
|
|
9313
|
-
|
|
9314
|
-
|
|
9315
|
-
|
|
8978
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8979
|
+
try:
|
|
8980
|
+
ftp.auth()
|
|
8981
|
+
except all_errors:
|
|
8982
|
+
pass
|
|
8983
|
+
ftp.login(ftp_username, ftp_password)
|
|
8984
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8985
|
+
try:
|
|
8986
|
+
ftp.prot_p()
|
|
8987
|
+
except all_errors:
|
|
8988
|
+
ftp.prot_c()
|
|
8989
|
+
# UTF-8 filenames if supported
|
|
8990
|
+
try:
|
|
8991
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
8992
|
+
ftp.encoding = "utf-8"
|
|
8993
|
+
except all_errors:
|
|
8994
|
+
pass
|
|
8995
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
8996
|
+
ftpfile = MkTempFile()
|
|
8997
|
+
# Try EPSV first, then fall back
|
|
8998
|
+
try:
|
|
8999
|
+
ftp.force_epsv = True
|
|
9000
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9001
|
+
if(is_cwd_allowed):
|
|
9002
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9003
|
+
else:
|
|
9004
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9005
|
+
except all_errors:
|
|
9006
|
+
try:
|
|
9007
|
+
ftp.set_pasv(True)
|
|
9008
|
+
if(is_cwd_allowed):
|
|
9009
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9010
|
+
else:
|
|
9011
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9012
|
+
except all_errors:
|
|
9013
|
+
ftp.set_pasv(False)
|
|
9014
|
+
if(is_cwd_allowed):
|
|
9015
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9016
|
+
else:
|
|
9017
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9316
9018
|
ftp.close()
|
|
9317
9019
|
ftpfile.seek(0, 0)
|
|
9318
9020
|
return ftpfile
|
|
9319
9021
|
|
|
9320
9022
|
|
|
9023
|
+
def download_file_from_ftps_file(url):
|
|
9024
|
+
return download_file_from_ftp_file(url)
|
|
9025
|
+
|
|
9026
|
+
|
|
9321
9027
|
def download_file_from_ftp_string(url):
|
|
9322
9028
|
ftpfile = download_file_from_ftp_file(url)
|
|
9323
|
-
|
|
9029
|
+
ftpout = ftpfile.read()
|
|
9030
|
+
ftpfile.close()
|
|
9031
|
+
return ftpout
|
|
9032
|
+
|
|
9033
|
+
|
|
9034
|
+
def download_file_from_ftps_string(url):
|
|
9035
|
+
return download_file_from_ftp_string(url)
|
|
9324
9036
|
|
|
9325
9037
|
|
|
9326
9038
|
def upload_file_to_ftp_file(ftpfile, url):
|
|
9327
9039
|
urlparts = urlparse(url)
|
|
9328
|
-
file_name = os.path.basename(urlparts.path)
|
|
9329
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9040
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9041
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9330
9042
|
if(urlparts.username is not None):
|
|
9331
9043
|
ftp_username = urlparts.username
|
|
9332
9044
|
else:
|
|
@@ -9343,7 +9055,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9343
9055
|
ftp = FTP_TLS()
|
|
9344
9056
|
else:
|
|
9345
9057
|
return False
|
|
9346
|
-
if(urlparts.scheme == "sftp"):
|
|
9058
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9347
9059
|
if(__use_pysftp__):
|
|
9348
9060
|
return upload_file_to_pysftp_file(url)
|
|
9349
9061
|
else:
|
|
@@ -9361,22 +9073,66 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9361
9073
|
except socket.timeout:
|
|
9362
9074
|
log.info("Error With URL "+url)
|
|
9363
9075
|
return False
|
|
9364
|
-
|
|
9365
|
-
|
|
9366
|
-
|
|
9367
|
-
|
|
9076
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9077
|
+
try:
|
|
9078
|
+
ftp.auth()
|
|
9079
|
+
except all_errors:
|
|
9080
|
+
pass
|
|
9081
|
+
ftp.login(ftp_username, ftp_password)
|
|
9082
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9083
|
+
try:
|
|
9084
|
+
ftp.prot_p()
|
|
9085
|
+
except all_errors:
|
|
9086
|
+
ftp.prot_c()
|
|
9087
|
+
# UTF-8 filenames if supported
|
|
9088
|
+
try:
|
|
9089
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
9090
|
+
ftp.encoding = "utf-8"
|
|
9091
|
+
except all_errors:
|
|
9092
|
+
pass
|
|
9093
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
9094
|
+
ftpfile.seek(0, 0)
|
|
9095
|
+
# Try EPSV first, then fall back
|
|
9096
|
+
try:
|
|
9097
|
+
ftp.force_epsv = True
|
|
9098
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9099
|
+
if(is_cwd_allowed):
|
|
9100
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9101
|
+
else:
|
|
9102
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9103
|
+
except all_errors:
|
|
9104
|
+
try:
|
|
9105
|
+
ftp.set_pasv(True)
|
|
9106
|
+
if(is_cwd_allowed):
|
|
9107
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9108
|
+
else:
|
|
9109
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9110
|
+
except all_errors:
|
|
9111
|
+
ftp.set_pasv(False)
|
|
9112
|
+
if(is_cwd_allowed):
|
|
9113
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9114
|
+
else:
|
|
9115
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9368
9116
|
ftp.close()
|
|
9369
9117
|
ftpfile.seek(0, 0)
|
|
9370
9118
|
return ftpfile
|
|
9371
9119
|
|
|
9372
9120
|
|
|
9121
|
+
def upload_file_to_ftps_file(ftpfile, url):
|
|
9122
|
+
return upload_file_to_ftp_file(ftpfile, url)
|
|
9123
|
+
|
|
9124
|
+
|
|
9373
9125
|
def upload_file_to_ftp_string(ftpstring, url):
|
|
9374
|
-
ftpfileo =
|
|
9126
|
+
ftpfileo = MkTempFile(ftpstring)
|
|
9375
9127
|
ftpfile = upload_file_to_ftp_file(ftpfileo, url)
|
|
9376
9128
|
ftpfileo.close()
|
|
9377
9129
|
return ftpfile
|
|
9378
9130
|
|
|
9379
9131
|
|
|
9132
|
+
def upload_file_to_ftps_string(ftpstring, url):
|
|
9133
|
+
return upload_file_to_ftp_string(ftpstring, url)
|
|
9134
|
+
|
|
9135
|
+
|
|
9380
9136
|
class RawIteratorWrapper:
|
|
9381
9137
|
def __init__(self, iterator):
|
|
9382
9138
|
self.iterator = iterator
|
|
@@ -9414,7 +9170,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9414
9170
|
urlparts.params, urlparts.query, urlparts.fragment))
|
|
9415
9171
|
|
|
9416
9172
|
# Handle SFTP/FTP
|
|
9417
|
-
if urlparts.scheme == "sftp":
|
|
9173
|
+
if urlparts.scheme == "sftp" or urlparts.scheme == "scp":
|
|
9418
9174
|
if __use_pysftp__:
|
|
9419
9175
|
return download_file_from_pysftp_file(url)
|
|
9420
9176
|
else:
|
|
@@ -9423,7 +9179,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9423
9179
|
return download_file_from_ftp_file(url)
|
|
9424
9180
|
|
|
9425
9181
|
# Create a temporary file object
|
|
9426
|
-
httpfile =
|
|
9182
|
+
httpfile = MkTempFile()
|
|
9427
9183
|
|
|
9428
9184
|
# 1) Requests branch
|
|
9429
9185
|
if usehttp == 'requests' and haverequests:
|
|
@@ -9487,14 +9243,16 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9487
9243
|
|
|
9488
9244
|
def download_file_from_http_string(url, headers=geturls_headers_pyfile_python_alt, usehttp=__use_http_lib__):
|
|
9489
9245
|
httpfile = download_file_from_http_file(url, headers, usehttp)
|
|
9490
|
-
|
|
9246
|
+
httpout = httpfile.read()
|
|
9247
|
+
httpfile.close()
|
|
9248
|
+
return httpout
|
|
9491
9249
|
|
|
9492
9250
|
|
|
9493
9251
|
if(haveparamiko):
|
|
9494
9252
|
def download_file_from_sftp_file(url):
|
|
9495
9253
|
urlparts = urlparse(url)
|
|
9496
|
-
file_name = os.path.basename(urlparts.path)
|
|
9497
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9254
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9255
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9498
9256
|
sftp_port = urlparts.port
|
|
9499
9257
|
if(urlparts.port is None):
|
|
9500
9258
|
sftp_port = 22
|
|
@@ -9514,14 +9272,14 @@ if(haveparamiko):
|
|
|
9514
9272
|
return download_file_from_ftp_file(url)
|
|
9515
9273
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9516
9274
|
return download_file_from_http_file(url)
|
|
9517
|
-
if(urlparts.scheme != "sftp"):
|
|
9275
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9518
9276
|
return False
|
|
9519
9277
|
ssh = paramiko.SSHClient()
|
|
9520
9278
|
ssh.load_system_host_keys()
|
|
9521
9279
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9522
9280
|
try:
|
|
9523
9281
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9524
|
-
username=
|
|
9282
|
+
username=sftp_username, password=urlparts.password)
|
|
9525
9283
|
except paramiko.ssh_exception.SSHException:
|
|
9526
9284
|
return False
|
|
9527
9285
|
except socket.gaierror:
|
|
@@ -9531,8 +9289,8 @@ if(haveparamiko):
|
|
|
9531
9289
|
log.info("Error With URL "+url)
|
|
9532
9290
|
return False
|
|
9533
9291
|
sftp = ssh.open_sftp()
|
|
9534
|
-
sftpfile =
|
|
9535
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9292
|
+
sftpfile = MkTempFile()
|
|
9293
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9536
9294
|
sftp.close()
|
|
9537
9295
|
ssh.close()
|
|
9538
9296
|
sftpfile.seek(0, 0)
|
|
@@ -9544,7 +9302,9 @@ else:
|
|
|
9544
9302
|
if(haveparamiko):
|
|
9545
9303
|
def download_file_from_sftp_string(url):
|
|
9546
9304
|
sftpfile = download_file_from_sftp_file(url)
|
|
9547
|
-
|
|
9305
|
+
sftpout = sftpfile.read()
|
|
9306
|
+
sftpfile.close()
|
|
9307
|
+
return sftpout
|
|
9548
9308
|
else:
|
|
9549
9309
|
def download_file_from_sftp_string(url):
|
|
9550
9310
|
return False
|
|
@@ -9552,8 +9312,8 @@ else:
|
|
|
9552
9312
|
if(haveparamiko):
|
|
9553
9313
|
def upload_file_to_sftp_file(sftpfile, url):
|
|
9554
9314
|
urlparts = urlparse(url)
|
|
9555
|
-
file_name = os.path.basename(urlparts.path)
|
|
9556
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9315
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9316
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9557
9317
|
sftp_port = urlparts.port
|
|
9558
9318
|
if(urlparts.port is None):
|
|
9559
9319
|
sftp_port = 22
|
|
@@ -9570,17 +9330,17 @@ if(haveparamiko):
|
|
|
9570
9330
|
else:
|
|
9571
9331
|
sftp_password = ""
|
|
9572
9332
|
if(urlparts.scheme == "ftp"):
|
|
9573
|
-
return upload_file_to_ftp_file(url)
|
|
9333
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9574
9334
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9575
9335
|
return False
|
|
9576
|
-
if(urlparts.scheme != "sftp"):
|
|
9336
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9577
9337
|
return False
|
|
9578
9338
|
ssh = paramiko.SSHClient()
|
|
9579
9339
|
ssh.load_system_host_keys()
|
|
9580
9340
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9581
9341
|
try:
|
|
9582
9342
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9583
|
-
username=
|
|
9343
|
+
username=sftp_username, password=sftp_password)
|
|
9584
9344
|
except paramiko.ssh_exception.SSHException:
|
|
9585
9345
|
return False
|
|
9586
9346
|
except socket.gaierror:
|
|
@@ -9590,7 +9350,8 @@ if(haveparamiko):
|
|
|
9590
9350
|
log.info("Error With URL "+url)
|
|
9591
9351
|
return False
|
|
9592
9352
|
sftp = ssh.open_sftp()
|
|
9593
|
-
|
|
9353
|
+
sftpfile.seek(0, 0)
|
|
9354
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9594
9355
|
sftp.close()
|
|
9595
9356
|
ssh.close()
|
|
9596
9357
|
sftpfile.seek(0, 0)
|
|
@@ -9601,7 +9362,7 @@ else:
|
|
|
9601
9362
|
|
|
9602
9363
|
if(haveparamiko):
|
|
9603
9364
|
def upload_file_to_sftp_string(sftpstring, url):
|
|
9604
|
-
sftpfileo =
|
|
9365
|
+
sftpfileo = MkTempFile(sftpstring)
|
|
9605
9366
|
sftpfile = upload_file_to_sftp_files(sftpfileo, url)
|
|
9606
9367
|
sftpfileo.close()
|
|
9607
9368
|
return sftpfile
|
|
@@ -9612,8 +9373,8 @@ else:
|
|
|
9612
9373
|
if(havepysftp):
|
|
9613
9374
|
def download_file_from_pysftp_file(url):
|
|
9614
9375
|
urlparts = urlparse(url)
|
|
9615
|
-
file_name = os.path.basename(urlparts.path)
|
|
9616
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9376
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9377
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9617
9378
|
sftp_port = urlparts.port
|
|
9618
9379
|
if(urlparts.port is None):
|
|
9619
9380
|
sftp_port = 22
|
|
@@ -9633,11 +9394,11 @@ if(havepysftp):
|
|
|
9633
9394
|
return download_file_from_ftp_file(url)
|
|
9634
9395
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9635
9396
|
return download_file_from_http_file(url)
|
|
9636
|
-
if(urlparts.scheme != "sftp"):
|
|
9397
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9637
9398
|
return False
|
|
9638
9399
|
try:
|
|
9639
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9640
|
-
username=
|
|
9400
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9401
|
+
username=sftp_username, password=sftp_password)
|
|
9641
9402
|
except paramiko.ssh_exception.SSHException:
|
|
9642
9403
|
return False
|
|
9643
9404
|
except socket.gaierror:
|
|
@@ -9646,9 +9407,8 @@ if(havepysftp):
|
|
|
9646
9407
|
except socket.timeout:
|
|
9647
9408
|
log.info("Error With URL "+url)
|
|
9648
9409
|
return False
|
|
9649
|
-
|
|
9650
|
-
|
|
9651
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9410
|
+
sftpfile = MkTempFile()
|
|
9411
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9652
9412
|
sftp.close()
|
|
9653
9413
|
ssh.close()
|
|
9654
9414
|
sftpfile.seek(0, 0)
|
|
@@ -9660,7 +9420,9 @@ else:
|
|
|
9660
9420
|
if(havepysftp):
|
|
9661
9421
|
def download_file_from_pysftp_string(url):
|
|
9662
9422
|
sftpfile = download_file_from_pysftp_file(url)
|
|
9663
|
-
|
|
9423
|
+
sftpout = sftpfile.read()
|
|
9424
|
+
sftpfile.close()
|
|
9425
|
+
return sftpout
|
|
9664
9426
|
else:
|
|
9665
9427
|
def download_file_from_pysftp_string(url):
|
|
9666
9428
|
return False
|
|
@@ -9668,8 +9430,8 @@ else:
|
|
|
9668
9430
|
if(havepysftp):
|
|
9669
9431
|
def upload_file_to_pysftp_file(sftpfile, url):
|
|
9670
9432
|
urlparts = urlparse(url)
|
|
9671
|
-
file_name = os.path.basename(urlparts.path)
|
|
9672
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9433
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9434
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9673
9435
|
sftp_port = urlparts.port
|
|
9674
9436
|
if(urlparts.port is None):
|
|
9675
9437
|
sftp_port = 22
|
|
@@ -9686,14 +9448,14 @@ if(havepysftp):
|
|
|
9686
9448
|
else:
|
|
9687
9449
|
sftp_password = ""
|
|
9688
9450
|
if(urlparts.scheme == "ftp"):
|
|
9689
|
-
return upload_file_to_ftp_file(url)
|
|
9451
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9690
9452
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9691
9453
|
return False
|
|
9692
|
-
if(urlparts.scheme != "sftp"):
|
|
9454
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9693
9455
|
return False
|
|
9694
9456
|
try:
|
|
9695
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9696
|
-
username=
|
|
9457
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9458
|
+
username=sftp_username, password=sftp_password)
|
|
9697
9459
|
except paramiko.ssh_exception.SSHException:
|
|
9698
9460
|
return False
|
|
9699
9461
|
except socket.gaierror:
|
|
@@ -9702,8 +9464,8 @@ if(havepysftp):
|
|
|
9702
9464
|
except socket.timeout:
|
|
9703
9465
|
log.info("Error With URL "+url)
|
|
9704
9466
|
return False
|
|
9705
|
-
|
|
9706
|
-
sftp.putfo(sftpfile, urlparts.path)
|
|
9467
|
+
sftpfile.seek(0, 0)
|
|
9468
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9707
9469
|
sftp.close()
|
|
9708
9470
|
ssh.close()
|
|
9709
9471
|
sftpfile.seek(0, 0)
|
|
@@ -9714,7 +9476,7 @@ else:
|
|
|
9714
9476
|
|
|
9715
9477
|
if(havepysftp):
|
|
9716
9478
|
def upload_file_to_pysftp_string(sftpstring, url):
|
|
9717
|
-
sftpfileo =
|
|
9479
|
+
sftpfileo = MkTempFile(sftpstring)
|
|
9718
9480
|
sftpfile = upload_file_to_pysftp_file(ftpfileo, url)
|
|
9719
9481
|
sftpfileo.close()
|
|
9720
9482
|
return sftpfile
|
|
@@ -9729,7 +9491,7 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9729
9491
|
return download_file_from_http_file(url, headers, usehttp)
|
|
9730
9492
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9731
9493
|
return download_file_from_ftp_file(url)
|
|
9732
|
-
elif(urlparts.scheme == "sftp"):
|
|
9494
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9733
9495
|
if(__use_pysftp__ and havepysftp):
|
|
9734
9496
|
return download_file_from_pysftp_file(url)
|
|
9735
9497
|
else:
|
|
@@ -9739,9 +9501,9 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9739
9501
|
return False
|
|
9740
9502
|
|
|
9741
9503
|
|
|
9742
|
-
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9504
|
+
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9743
9505
|
fp = download_file_from_internet_file(url)
|
|
9744
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9506
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9745
9507
|
fp.seek(0, 0)
|
|
9746
9508
|
if(not fp):
|
|
9747
9509
|
return False
|
|
@@ -9754,7 +9516,7 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9754
9516
|
return download_file_from_http_string(url, headers)
|
|
9755
9517
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9756
9518
|
return download_file_from_ftp_string(url)
|
|
9757
|
-
elif(urlparts.scheme == "sftp"):
|
|
9519
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9758
9520
|
if(__use_pysftp__ and havepysftp):
|
|
9759
9521
|
return download_file_from_pysftp_string(url)
|
|
9760
9522
|
else:
|
|
@@ -9764,13 +9526,15 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9764
9526
|
return False
|
|
9765
9527
|
|
|
9766
9528
|
|
|
9767
|
-
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9529
|
+
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9768
9530
|
fp = download_file_from_internet_string(url)
|
|
9769
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9770
|
-
fp.seek(0, 0)
|
|
9531
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9771
9532
|
if(not fp):
|
|
9772
9533
|
return False
|
|
9773
|
-
|
|
9534
|
+
fp.seek(0, 0)
|
|
9535
|
+
fpout = fp.read()
|
|
9536
|
+
fp.close
|
|
9537
|
+
return fpout
|
|
9774
9538
|
|
|
9775
9539
|
|
|
9776
9540
|
def upload_file_to_internet_file(ifp, url):
|
|
@@ -9779,7 +9543,7 @@ def upload_file_to_internet_file(ifp, url):
|
|
|
9779
9543
|
return False
|
|
9780
9544
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9781
9545
|
return upload_file_to_ftp_file(ifp, url)
|
|
9782
|
-
elif(urlparts.scheme == "sftp"):
|
|
9546
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9783
9547
|
if(__use_pysftp__ and havepysftp):
|
|
9784
9548
|
return upload_file_to_pysftp_file(ifp, url)
|
|
9785
9549
|
else:
|
|
@@ -9795,8 +9559,7 @@ def upload_file_to_internet_compress_file(ifp, url, compression="auto", compress
|
|
|
9795
9559
|
if(not foxfileout):
|
|
9796
9560
|
return False
|
|
9797
9561
|
fp.seek(0, 0)
|
|
9798
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9799
|
-
return True
|
|
9562
|
+
return upload_file_to_internet_file(fp, outfile)
|
|
9800
9563
|
|
|
9801
9564
|
|
|
9802
9565
|
def upload_file_to_internet_string(ifp, url):
|
|
@@ -9805,7 +9568,7 @@ def upload_file_to_internet_string(ifp, url):
|
|
|
9805
9568
|
return False
|
|
9806
9569
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9807
9570
|
return upload_file_to_ftp_string(ifp, url)
|
|
9808
|
-
elif(urlparts.scheme == "sftp"):
|
|
9571
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9809
9572
|
if(__use_pysftp__ and havepysftp):
|
|
9810
9573
|
return upload_file_to_pysftp_string(ifp, url)
|
|
9811
9574
|
else:
|
|
@@ -9816,10 +9579,10 @@ def upload_file_to_internet_string(ifp, url):
|
|
|
9816
9579
|
|
|
9817
9580
|
|
|
9818
9581
|
def upload_file_to_internet_compress_string(ifp, url, compression="auto", compressionlevel=None, compressionuselist=compressionlistalt, formatspecs=__file_format_dict__):
|
|
9582
|
+
internetfileo = MkTempFile(ifp)
|
|
9819
9583
|
fp = CompressOpenFileAlt(
|
|
9820
|
-
|
|
9584
|
+
internetfileo, compression, compressionlevel, compressionuselist, formatspecs)
|
|
9821
9585
|
if(not foxfileout):
|
|
9822
9586
|
return False
|
|
9823
9587
|
fp.seek(0, 0)
|
|
9824
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9825
|
-
return True
|
|
9588
|
+
return upload_file_to_internet_file(fp, outfile)
|