PyCatFile 0.21.2__py3-none-any.whl → 0.22.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pycatfile-0.21.2.data → pycatfile-0.22.2.data}/scripts/catfile.py +15 -15
- {pycatfile-0.21.2.data → pycatfile-0.22.2.data}/scripts/neocatfile.py +1 -1
- {pycatfile-0.21.2.dist-info → pycatfile-0.22.2.dist-info}/METADATA +4 -4
- pycatfile-0.22.2.dist-info/RECORD +10 -0
- pycatfile.py +1117 -1355
- pycatfile-0.21.2.dist-info/RECORD +0 -10
- {pycatfile-0.21.2.data → pycatfile-0.22.2.data}/scripts/catneofile.py +0 -0
- {pycatfile-0.21.2.dist-info → pycatfile-0.22.2.dist-info}/WHEEL +0 -0
- {pycatfile-0.21.2.dist-info → pycatfile-0.22.2.dist-info}/licenses/LICENSE +0 -0
- {pycatfile-0.21.2.dist-info → pycatfile-0.22.2.dist-info}/top_level.txt +0 -0
- {pycatfile-0.21.2.dist-info → pycatfile-0.22.2.dist-info}/zip-safe +0 -0
pycatfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pycatfile.py - Last Update: 8/
|
|
17
|
+
$FileInfo: pycatfile.py - Last Update: 8/29/2025 Ver. 0.22.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -32,10 +32,13 @@ import socket
|
|
|
32
32
|
import hashlib
|
|
33
33
|
import inspect
|
|
34
34
|
import datetime
|
|
35
|
+
import tempfile
|
|
35
36
|
import logging
|
|
36
37
|
import zipfile
|
|
37
38
|
import binascii
|
|
38
39
|
import platform
|
|
40
|
+
from io import StringIO, BytesIO
|
|
41
|
+
import posixpath as pp # POSIX-safe joins/normpaths
|
|
39
42
|
try:
|
|
40
43
|
from backports import tempfile
|
|
41
44
|
except ImportError:
|
|
@@ -43,10 +46,10 @@ except ImportError:
|
|
|
43
46
|
# FTP Support
|
|
44
47
|
ftpssl = True
|
|
45
48
|
try:
|
|
46
|
-
from ftplib import FTP, FTP_TLS
|
|
49
|
+
from ftplib import FTP, FTP_TLS, all_errors
|
|
47
50
|
except ImportError:
|
|
48
51
|
ftpssl = False
|
|
49
|
-
from ftplib import FTP
|
|
52
|
+
from ftplib import FTP, all_errors
|
|
50
53
|
|
|
51
54
|
try:
|
|
52
55
|
import ujson as json
|
|
@@ -100,9 +103,13 @@ baseint = tuple(baseint)
|
|
|
100
103
|
|
|
101
104
|
# URL Parsing
|
|
102
105
|
try:
|
|
103
|
-
|
|
106
|
+
# Python 3
|
|
107
|
+
from urllib.parse import urlparse, urlunparse, unquote
|
|
108
|
+
from urllib.request import url2pathname
|
|
104
109
|
except ImportError:
|
|
110
|
+
# Python 2
|
|
105
111
|
from urlparse import urlparse, urlunparse
|
|
112
|
+
from urllib import unquote, url2pathname
|
|
106
113
|
|
|
107
114
|
# Windows-specific setup
|
|
108
115
|
if os.name == "nt":
|
|
@@ -243,17 +250,6 @@ except ImportError:
|
|
|
243
250
|
from urllib2 import Request, build_opener, HTTPBasicAuthHandler
|
|
244
251
|
from urlparse import urlparse
|
|
245
252
|
|
|
246
|
-
# StringIO and BytesIO
|
|
247
|
-
try:
|
|
248
|
-
from io import StringIO, BytesIO
|
|
249
|
-
except ImportError:
|
|
250
|
-
try:
|
|
251
|
-
from cStringIO import StringIO
|
|
252
|
-
from cStringIO import StringIO as BytesIO
|
|
253
|
-
except ImportError:
|
|
254
|
-
from StringIO import StringIO
|
|
255
|
-
from StringIO import StringIO as BytesIO
|
|
256
|
-
|
|
257
253
|
def get_importing_script_path():
|
|
258
254
|
# Inspect the stack and get the frame of the caller
|
|
259
255
|
stack = inspect.stack()
|
|
@@ -275,22 +271,8 @@ def get_default_threads():
|
|
|
275
271
|
|
|
276
272
|
|
|
277
273
|
__use_pysftp__ = False
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
__use_ini_file__ = True
|
|
281
|
-
__use_ini_name__ = "catfile.ini"
|
|
282
|
-
if('PYCATFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYCATFILE_CONFIG_FILE']) and __use_env_file__):
|
|
283
|
-
scriptconf = os.environ['PYCATFILE_CONFIG_FILE']
|
|
284
|
-
else:
|
|
285
|
-
prescriptpath = get_importing_script_path()
|
|
286
|
-
if(prescriptpath is not None):
|
|
287
|
-
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
288
|
-
else:
|
|
289
|
-
scriptconf = ""
|
|
290
|
-
if os.path.exists(scriptconf):
|
|
291
|
-
__config_file__ = scriptconf
|
|
292
|
-
else:
|
|
293
|
-
__config_file__ = os.path.join(os.path.dirname(os.path.realpath(__file__)), __use_ini_name__)
|
|
274
|
+
__upload_proto_support__ = "^(ftp|ftps|sftp|scp)://"
|
|
275
|
+
__download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp)://"
|
|
294
276
|
if(not havepysftp):
|
|
295
277
|
__use_pysftp__ = False
|
|
296
278
|
__use_http_lib__ = "httpx"
|
|
@@ -328,7 +310,25 @@ def is_only_nonprintable(var):
|
|
|
328
310
|
__file_format_multi_dict__ = {}
|
|
329
311
|
__file_format_default__ = "CatFile"
|
|
330
312
|
__include_defaults__ = True
|
|
313
|
+
__use_inmemfile__ = False
|
|
331
314
|
__program_name__ = "Py"+__file_format_default__
|
|
315
|
+
__use_env_file__ = True
|
|
316
|
+
__use_ini_file__ = True
|
|
317
|
+
__use_ini_name__ = "catfile.ini"
|
|
318
|
+
__use_json_file__ = False
|
|
319
|
+
__use_json_name__ = "catfile.json"
|
|
320
|
+
if('PYCATFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYCATFILE_CONFIG_FILE']) and __use_env_file__):
|
|
321
|
+
scriptconf = os.environ['PYCATFILE_CONFIG_FILE']
|
|
322
|
+
else:
|
|
323
|
+
prescriptpath = get_importing_script_path()
|
|
324
|
+
if(prescriptpath is not None):
|
|
325
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
326
|
+
else:
|
|
327
|
+
scriptconf = ""
|
|
328
|
+
if os.path.exists(scriptconf):
|
|
329
|
+
__config_file__ = scriptconf
|
|
330
|
+
else:
|
|
331
|
+
__config_file__ = os.path.join(os.path.dirname(os.path.realpath(__file__)), __use_ini_name__)
|
|
332
332
|
if __use_ini_file__ and os.path.exists(__config_file__):
|
|
333
333
|
config = configparser.ConfigParser()
|
|
334
334
|
config.read(__config_file__)
|
|
@@ -340,6 +340,7 @@ if __use_ini_file__ and os.path.exists(__config_file__):
|
|
|
340
340
|
__file_format_default__ = decode_unicode_escape(config.get('config', 'default'))
|
|
341
341
|
__program_name__ = decode_unicode_escape(config.get('config', 'proname'))
|
|
342
342
|
__include_defaults__ = config.getboolean('config', 'includedef')
|
|
343
|
+
__use_inmemfile__ = config.getboolean('config', 'inmemfile')
|
|
343
344
|
# Loop through all sections
|
|
344
345
|
for section in config.sections():
|
|
345
346
|
required_keys = [
|
|
@@ -391,12 +392,12 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
|
|
|
391
392
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
392
393
|
__project__ = __program_name__
|
|
393
394
|
__project_url__ = "https://github.com/GameMaker2k/PyCatFile"
|
|
394
|
-
__version_info__ = (0,
|
|
395
|
-
__version_date_info__ = (2025, 9,
|
|
395
|
+
__version_info__ = (0, 22, 2, "RC 1", 1)
|
|
396
|
+
__version_date_info__ = (2025, 9, 29, "RC 1", 1)
|
|
396
397
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
397
398
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
398
399
|
__revision__ = __version_info__[3]
|
|
399
|
-
__revision_id__ = "$Id:
|
|
400
|
+
__revision_id__ = "$Id: 5942aea043d080ea8842152fa874ad3c7bbb4cc4 $"
|
|
400
401
|
if(__version_info__[4] is not None):
|
|
401
402
|
__version_date_plusrc__ = __version_date__ + \
|
|
402
403
|
"-" + str(__version_date_info__[4])
|
|
@@ -586,6 +587,281 @@ def VerbosePrintOutReturn(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
|
|
|
586
587
|
return dbgtxt
|
|
587
588
|
|
|
588
589
|
|
|
590
|
+
# --- Helpers ---
|
|
591
|
+
def _normalize_initial_data(data, isbytes, encoding):
|
|
592
|
+
"""Return data in the correct type for write(): bytes (if isbytes) or text (if not)."""
|
|
593
|
+
if data is None:
|
|
594
|
+
return None
|
|
595
|
+
|
|
596
|
+
if isbytes:
|
|
597
|
+
# Want bytes
|
|
598
|
+
if isinstance(data, bytes):
|
|
599
|
+
return data
|
|
600
|
+
# Py2: str is already bytes, unicode needs encode
|
|
601
|
+
if sys.version_info[0] == 2:
|
|
602
|
+
try:
|
|
603
|
+
unicode # noqa: F821
|
|
604
|
+
except NameError:
|
|
605
|
+
pass
|
|
606
|
+
else:
|
|
607
|
+
if isinstance(data, unicode): # noqa: F821
|
|
608
|
+
return data.encode(encoding)
|
|
609
|
+
# Py3 str -> encode
|
|
610
|
+
return str(data).encode(encoding)
|
|
611
|
+
else:
|
|
612
|
+
# Want text (unicode/str)
|
|
613
|
+
if sys.version_info[0] == 2:
|
|
614
|
+
try:
|
|
615
|
+
unicode # noqa: F821
|
|
616
|
+
if isinstance(data, unicode): # noqa: F821
|
|
617
|
+
return data
|
|
618
|
+
# bytes/str -> decode
|
|
619
|
+
return data.decode(encoding) if isinstance(data, str) else unicode(data) # noqa: F821
|
|
620
|
+
except NameError:
|
|
621
|
+
# Very defensive; shouldn't happen
|
|
622
|
+
return data
|
|
623
|
+
else:
|
|
624
|
+
# Py3: want str
|
|
625
|
+
if isinstance(data, bytes):
|
|
626
|
+
return data.decode(encoding)
|
|
627
|
+
return str(data)
|
|
628
|
+
|
|
629
|
+
|
|
630
|
+
def _split_posix(path_text):
|
|
631
|
+
"""Split POSIX paths regardless of OS; return list of components."""
|
|
632
|
+
# Normalize leading './'
|
|
633
|
+
if path_text.startswith(u'./'):
|
|
634
|
+
path_text = path_text[2:]
|
|
635
|
+
# Strip redundant slashes
|
|
636
|
+
path_text = re.sub(u'/+', u'/', path_text)
|
|
637
|
+
# Drop trailing '/' so 'dir/' -> ['dir']
|
|
638
|
+
if path_text.endswith(u'/'):
|
|
639
|
+
path_text = path_text[:-1]
|
|
640
|
+
return path_text.split(u'/') if path_text else []
|
|
641
|
+
|
|
642
|
+
def _is_abs_like(s):
|
|
643
|
+
"""Absolute targets (POSIX or Windows-drive style)."""
|
|
644
|
+
return s.startswith(u'/') or s.startswith(u'\\') or re.match(u'^[A-Za-z]:[/\\\\]', s)
|
|
645
|
+
|
|
646
|
+
def _resolves_outside(base_rel, target_rel):
|
|
647
|
+
"""
|
|
648
|
+
Given a base directory (relative, POSIX) and a target (relative),
|
|
649
|
+
return True if base/target resolves outside of base.
|
|
650
|
+
We anchor under '/' so normpath is root-anchored and portable.
|
|
651
|
+
"""
|
|
652
|
+
base_clean = u'/'.join(_split_posix(base_rel))
|
|
653
|
+
target_clean = u'/'.join(_split_posix(target_rel))
|
|
654
|
+
base_abs = u'/' + base_clean if base_clean else u'/'
|
|
655
|
+
combined = pp.normpath(pp.join(base_abs, target_clean))
|
|
656
|
+
if combined == base_abs or combined.startswith(base_abs + u'/'):
|
|
657
|
+
return False
|
|
658
|
+
return True
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
def DetectTarBombCatFileArray(listarrayfiles,
|
|
662
|
+
top_file_ratio_threshold=0.6,
|
|
663
|
+
min_members_for_ratio=4,
|
|
664
|
+
symlink_policy="escape-only", # 'escape-only' | 'deny' | 'single-folder-only'
|
|
665
|
+
to_text=to_text):
|
|
666
|
+
"""
|
|
667
|
+
Detect 'tarbomb-like' archives from CatFileToArray/TarFileToArray dicts.
|
|
668
|
+
|
|
669
|
+
Parameters:
|
|
670
|
+
listarrayfiles: dict with key 'ffilelist' -> list of entries (requires 'fname')
|
|
671
|
+
top_file_ratio_threshold: float, fraction of root files considered tarbomb
|
|
672
|
+
min_members_for_ratio: int, minimum members before ratio heuristic applies
|
|
673
|
+
symlink_policy:
|
|
674
|
+
- 'escape-only': only symlinks that escape parent/are absolute are unsafe
|
|
675
|
+
- 'deny': any symlink is unsafe
|
|
676
|
+
- 'single-folder-only': symlinks allowed only if archive has a single top-level folder
|
|
677
|
+
to_text: normalization function (your provided to_text)
|
|
678
|
+
|
|
679
|
+
Returns dict with:
|
|
680
|
+
- is_tarbomb, reasons, total_members, top_level_entries, top_level_files_count,
|
|
681
|
+
has_absolute_paths, has_parent_traversal,
|
|
682
|
+
symlink_escapes_root (bool), symlink_issues (list[{entry,target,reason}])
|
|
683
|
+
"""
|
|
684
|
+
files = listarrayfiles or {}
|
|
685
|
+
members = files.get('ffilelist') or []
|
|
686
|
+
|
|
687
|
+
names = []
|
|
688
|
+
has_abs = False
|
|
689
|
+
has_parent = False
|
|
690
|
+
|
|
691
|
+
# Symlink tracking
|
|
692
|
+
has_any_symlink = False
|
|
693
|
+
symlink_issues = []
|
|
694
|
+
any_symlink_escape = False
|
|
695
|
+
|
|
696
|
+
for m in members:
|
|
697
|
+
m = m or {}
|
|
698
|
+
name = to_text(m.get('fname', u""))
|
|
699
|
+
|
|
700
|
+
if _is_abs_like(name):
|
|
701
|
+
has_abs = True
|
|
702
|
+
|
|
703
|
+
parts = _split_posix(name)
|
|
704
|
+
if u'..' in parts:
|
|
705
|
+
has_parent = True
|
|
706
|
+
|
|
707
|
+
if not parts:
|
|
708
|
+
continue
|
|
709
|
+
|
|
710
|
+
norm_name = u'/'.join(parts)
|
|
711
|
+
names.append(norm_name)
|
|
712
|
+
|
|
713
|
+
# ---- Symlink detection ----
|
|
714
|
+
ftype = m.get('ftype')
|
|
715
|
+
is_symlink = (ftype == 2) or (to_text(ftype).lower() == u'symlink' if ftype is not None else False)
|
|
716
|
+
if is_symlink:
|
|
717
|
+
has_any_symlink = True
|
|
718
|
+
target = to_text(m.get('flinkname', u""))
|
|
719
|
+
# Absolute symlink target is unsafe
|
|
720
|
+
if _is_abs_like(target):
|
|
721
|
+
any_symlink_escape = True
|
|
722
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'absolute symlink target'})
|
|
723
|
+
else:
|
|
724
|
+
parent = u'/'.join(parts[:-1]) # may be ''
|
|
725
|
+
if _resolves_outside(parent, target):
|
|
726
|
+
any_symlink_escape = True
|
|
727
|
+
symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'symlink escapes parent directory'})
|
|
728
|
+
|
|
729
|
+
total = len(names)
|
|
730
|
+
reasons = []
|
|
731
|
+
if total == 0:
|
|
732
|
+
return {
|
|
733
|
+
"is_tarbomb": False,
|
|
734
|
+
"reasons": ["archive contains no members"],
|
|
735
|
+
"total_members": 0,
|
|
736
|
+
"top_level_entries": [],
|
|
737
|
+
"top_level_files_count": 0,
|
|
738
|
+
"has_absolute_paths": has_abs,
|
|
739
|
+
"has_parent_traversal": has_parent,
|
|
740
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
741
|
+
"symlink_issues": symlink_issues,
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
# Layout counts
|
|
745
|
+
top_counts = {}
|
|
746
|
+
top_level_files_count = 0
|
|
747
|
+
for name in names:
|
|
748
|
+
parts = name.split(u'/')
|
|
749
|
+
first = parts[0]
|
|
750
|
+
top_counts[first] = top_counts.get(first, 0) + 1
|
|
751
|
+
if len(parts) == 1: # directly at archive root
|
|
752
|
+
top_level_files_count += 1
|
|
753
|
+
|
|
754
|
+
top_keys = sorted(top_counts.keys())
|
|
755
|
+
is_tarbomb = False
|
|
756
|
+
|
|
757
|
+
# Path-based dangers
|
|
758
|
+
if has_abs:
|
|
759
|
+
is_tarbomb = True
|
|
760
|
+
reasons.append("contains absolute paths (dangerous)")
|
|
761
|
+
if has_parent:
|
|
762
|
+
is_tarbomb = True
|
|
763
|
+
reasons.append("contains parent-traversal ('..') entries (dangerous)")
|
|
764
|
+
if any_symlink_escape:
|
|
765
|
+
is_tarbomb = True
|
|
766
|
+
reasons.append("contains symlinks that escape their parent directory")
|
|
767
|
+
|
|
768
|
+
# Symlink policy enforcement
|
|
769
|
+
if symlink_policy == "deny" and has_any_symlink:
|
|
770
|
+
is_tarbomb = True
|
|
771
|
+
reasons.append("symlinks present and policy is 'deny'")
|
|
772
|
+
elif symlink_policy == "single-folder-only" and has_any_symlink and len(top_keys) != 1:
|
|
773
|
+
is_tarbomb = True
|
|
774
|
+
reasons.append("symlinks present but archive lacks a single top-level folder")
|
|
775
|
+
|
|
776
|
+
# Tarbomb layout heuristics
|
|
777
|
+
if len(top_keys) == 1:
|
|
778
|
+
reasons.append("single top-level entry '{0}'".format(top_keys[0]))
|
|
779
|
+
else:
|
|
780
|
+
ratio = float(top_level_files_count) / float(total)
|
|
781
|
+
if total >= min_members_for_ratio and ratio > float(top_file_ratio_threshold):
|
|
782
|
+
is_tarbomb = True
|
|
783
|
+
reasons.append("high fraction of members ({0:.0%}) at archive root".format(ratio))
|
|
784
|
+
else:
|
|
785
|
+
max_bucket = max(top_counts.values()) if top_counts else 0
|
|
786
|
+
if max_bucket < total * 0.9:
|
|
787
|
+
is_tarbomb = True
|
|
788
|
+
reasons.append("multiple top-level entries with no dominant folder: {0}".format(
|
|
789
|
+
u", ".join(top_keys[:10])))
|
|
790
|
+
else:
|
|
791
|
+
reasons.append("multiple top-level entries but one dominates")
|
|
792
|
+
|
|
793
|
+
return {
|
|
794
|
+
"is_tarbomb": bool(is_tarbomb),
|
|
795
|
+
"reasons": reasons,
|
|
796
|
+
"total_members": total,
|
|
797
|
+
"top_level_entries": top_keys,
|
|
798
|
+
"top_level_files_count": top_level_files_count,
|
|
799
|
+
"has_absolute_paths": has_abs,
|
|
800
|
+
"has_parent_traversal": has_parent,
|
|
801
|
+
"symlink_escapes_root": any_symlink_escape,
|
|
802
|
+
"symlink_issues": symlink_issues,
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
|
|
806
|
+
def MkTempFile(data=None, inmem=__use_inmemfile__, isbytes=True, prefix=__project__,
|
|
807
|
+
delete=True, encoding="utf-8"):
|
|
808
|
+
"""
|
|
809
|
+
Return a file-like handle.
|
|
810
|
+
- If inmem=True: returns StringIO (text) or BytesIO (bytes).
|
|
811
|
+
- If inmem=False: returns a NamedTemporaryFile opened in text or binary mode.
|
|
812
|
+
Args:
|
|
813
|
+
data: optional initial content; if provided, it's written and the handle is seek(0)
|
|
814
|
+
inmem: bool — return in-memory handle if True
|
|
815
|
+
isbytes: bool — choose bytes (True) or text (False)
|
|
816
|
+
prefix: str — tempfile prefix
|
|
817
|
+
delete: bool — whether the tempfile is deleted on close (NamedTemporaryFile)
|
|
818
|
+
encoding: str — used for text mode (and for conversions when needed)
|
|
819
|
+
"""
|
|
820
|
+
init = _normalize_initial_data(data, isbytes, encoding)
|
|
821
|
+
|
|
822
|
+
if inmem:
|
|
823
|
+
buf = BytesIO() if isbytes else StringIO()
|
|
824
|
+
if init is not None:
|
|
825
|
+
buf.write(init)
|
|
826
|
+
buf.seek(0)
|
|
827
|
+
return buf
|
|
828
|
+
|
|
829
|
+
mode = "wb+" if isbytes else "w+"
|
|
830
|
+
kwargs = {"prefix": prefix or "", "delete": delete, "mode": mode}
|
|
831
|
+
|
|
832
|
+
# Only Python 3's text-mode files accept encoding/newline explicitly
|
|
833
|
+
if not isbytes and sys.version_info[0] >= 3:
|
|
834
|
+
kwargs["encoding"] = encoding
|
|
835
|
+
kwargs["newline"] = ""
|
|
836
|
+
|
|
837
|
+
f = tempfile.NamedTemporaryFile(**kwargs)
|
|
838
|
+
|
|
839
|
+
if init is not None:
|
|
840
|
+
f.write(init)
|
|
841
|
+
f.seek(0)
|
|
842
|
+
return f
|
|
843
|
+
|
|
844
|
+
|
|
845
|
+
def MkTempFileSmart(data=None, isbytes=True, prefix=__project__, max_mem=1024*1024, encoding="utf-8"):
|
|
846
|
+
"""
|
|
847
|
+
Spooled temp file: starts in memory and spills to disk past max_mem.
|
|
848
|
+
Behaves like BytesIO/StringIO for small data, with the same preload+seek(0) behavior.
|
|
849
|
+
"""
|
|
850
|
+
mode = "wb+" if isbytes else "w+"
|
|
851
|
+
kwargs = {"mode": mode, "max_size": max_mem, "prefix": prefix or ""}
|
|
852
|
+
if not isbytes and sys.version_info[0] >= 3:
|
|
853
|
+
kwargs["encoding"] = encoding
|
|
854
|
+
kwargs["newline"] = ""
|
|
855
|
+
|
|
856
|
+
f = tempfile.SpooledTemporaryFile(**kwargs)
|
|
857
|
+
|
|
858
|
+
init = _normalize_initial_data(data, isbytes, encoding)
|
|
859
|
+
if init is not None:
|
|
860
|
+
f.write(init)
|
|
861
|
+
f.seek(0)
|
|
862
|
+
return f
|
|
863
|
+
|
|
864
|
+
|
|
589
865
|
def RemoveWindowsPath(dpath):
|
|
590
866
|
"""
|
|
591
867
|
Normalize a path by converting backslashes to forward slashes
|
|
@@ -593,6 +869,13 @@ def RemoveWindowsPath(dpath):
|
|
|
593
869
|
"""
|
|
594
870
|
if not dpath:
|
|
595
871
|
return ""
|
|
872
|
+
if re.match("^file://", dpath, re.IGNORECASE):
|
|
873
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
874
|
+
if dpath.lower().startswith("file://") and not dpath.lower().startswith("file:///"):
|
|
875
|
+
# insert the extra slash
|
|
876
|
+
dpath = "file:///" + dpath[7:]
|
|
877
|
+
dparsed = urlparse(dpath)
|
|
878
|
+
dpath = url2pathname(dparsed.path)
|
|
596
879
|
# Accept bytes and decode safely
|
|
597
880
|
if isinstance(dpath, (bytes, bytearray)):
|
|
598
881
|
dpath = dpath.decode("utf-8", "ignore")
|
|
@@ -608,6 +891,13 @@ def NormalizeRelativePath(inpath):
|
|
|
608
891
|
"""
|
|
609
892
|
Ensures the path is relative unless it is absolute. Prepares consistent relative paths.
|
|
610
893
|
"""
|
|
894
|
+
if re.match("^file://", inpath, re.IGNORECASE):
|
|
895
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
896
|
+
if inpath.lower().startswith("file://") and not inpath.lower().startswith("file:///"):
|
|
897
|
+
# insert the extra slash
|
|
898
|
+
inpath = "file:///" + inpath[7:]
|
|
899
|
+
dparsed = urlparse(inpath)
|
|
900
|
+
inpath = url2pathname(dparsed.path)
|
|
611
901
|
inpath = RemoveWindowsPath(inpath)
|
|
612
902
|
if os.path.isabs(inpath):
|
|
613
903
|
outpath = inpath
|
|
@@ -664,6 +954,13 @@ def ListDir(dirpath, followlink=False, duplicates=False, include_regex=None, exc
|
|
|
664
954
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
665
955
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
666
956
|
for mydirfile in dirpath:
|
|
957
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
958
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
959
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
960
|
+
# insert the extra slash
|
|
961
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
962
|
+
dparsed = urlparse(mydirfile)
|
|
963
|
+
mydirfile = url2pathname(dparsed.path)
|
|
667
964
|
if not os.path.exists(mydirfile):
|
|
668
965
|
return False
|
|
669
966
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -734,6 +1031,13 @@ def ListDirAdvanced(dirpath, followlink=False, duplicates=False, include_regex=N
|
|
|
734
1031
|
include_pattern = re.compile(include_regex) if include_regex else None
|
|
735
1032
|
exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
|
|
736
1033
|
for mydirfile in dirpath:
|
|
1034
|
+
if re.match("^file://", mydirfile, re.IGNORECASE):
|
|
1035
|
+
# Normalize to file:/// if it's a local path (no host)
|
|
1036
|
+
if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
|
|
1037
|
+
# insert the extra slash
|
|
1038
|
+
mydirfile = "file:///" + mydirfile[7:]
|
|
1039
|
+
dparsed = urlparse(mydirfile)
|
|
1040
|
+
mydirfile = url2pathname(dparsed.path)
|
|
737
1041
|
if not os.path.exists(mydirfile):
|
|
738
1042
|
return False
|
|
739
1043
|
mydirfile = NormalizeRelativePath(mydirfile)
|
|
@@ -1722,9 +2026,9 @@ def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__
|
|
|
1722
2026
|
|
|
1723
2027
|
|
|
1724
2028
|
def ValidateHeaderChecksum(inlist=[], checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
1725
|
-
|
|
2029
|
+
catfileheadercshex = GetHeaderChecksum(
|
|
1726
2030
|
inlist, checksumtype, True, formatspecs).lower()
|
|
1727
|
-
return inchecksum.lower() ==
|
|
2031
|
+
return inchecksum.lower() == catfileheadercshex
|
|
1728
2032
|
|
|
1729
2033
|
|
|
1730
2034
|
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
@@ -1922,7 +2226,7 @@ def ReadFileHeaderDataBySize(fp, delimiter=__file_format_dict__['format_delimite
|
|
|
1922
2226
|
headersize = int(preheaderdata[0], 16)
|
|
1923
2227
|
if(headersize <= 0):
|
|
1924
2228
|
return []
|
|
1925
|
-
subfp =
|
|
2229
|
+
subfp = MkTempFile()
|
|
1926
2230
|
subfp.write(fp.read(headersize))
|
|
1927
2231
|
fp.seek(len(delimiter), 1)
|
|
1928
2232
|
subfp.seek(0, 0)
|
|
@@ -1943,7 +2247,7 @@ def ReadFileHeaderDataWoSize(fp, delimiter=__file_format_dict__['format_delimite
|
|
|
1943
2247
|
if(headersize <= 0 or headernumfields <= 0):
|
|
1944
2248
|
return []
|
|
1945
2249
|
headerdata = ReadTillNullByteByNum(fp, delimiter, headernumfields)
|
|
1946
|
-
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
2250
|
+
#headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
|
|
1947
2251
|
HeaderOut = preheaderdata + headerdata
|
|
1948
2252
|
return HeaderOut
|
|
1949
2253
|
|
|
@@ -2006,7 +2310,7 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
2006
2310
|
return False
|
|
2007
2311
|
fhend = fp.tell() - 1
|
|
2008
2312
|
fcontentstart = fp.tell()
|
|
2009
|
-
fcontents =
|
|
2313
|
+
fcontents = MkTempFile()
|
|
2010
2314
|
if(fsize > 0 and not listonly):
|
|
2011
2315
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
2012
2316
|
fcontents.write(fp.read(fsize))
|
|
@@ -2033,7 +2337,7 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
2033
2337
|
if(uncompress):
|
|
2034
2338
|
cfcontents = UncompressFileAlt(fcontents, formatspecs)
|
|
2035
2339
|
cfcontents.seek(0, 0)
|
|
2036
|
-
fcontents =
|
|
2340
|
+
fcontents = MkTempFile()
|
|
2037
2341
|
shutil.copyfileobj(cfcontents, fcontents)
|
|
2038
2342
|
cfcontents.close()
|
|
2039
2343
|
fcontents.seek(0, 0)
|
|
@@ -2147,7 +2451,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
2147
2451
|
fjsoncontent = {}
|
|
2148
2452
|
elif(fjsontype=="list"):
|
|
2149
2453
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
2150
|
-
flisttmp =
|
|
2454
|
+
flisttmp = MkTempFile()
|
|
2151
2455
|
flisttmp.write(fprejsoncontent.encode())
|
|
2152
2456
|
flisttmp.seek(0)
|
|
2153
2457
|
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
@@ -2183,7 +2487,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
2183
2487
|
return False
|
|
2184
2488
|
fhend = fp.tell() - 1
|
|
2185
2489
|
fcontentstart = fp.tell()
|
|
2186
|
-
fcontents =
|
|
2490
|
+
fcontents = MkTempFile()
|
|
2187
2491
|
pyhascontents = False
|
|
2188
2492
|
if(fsize > 0 and not listonly):
|
|
2189
2493
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -2214,7 +2518,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
2214
2518
|
cfcontents = UncompressFileAlt(
|
|
2215
2519
|
fcontents, formatspecs)
|
|
2216
2520
|
cfcontents.seek(0, 0)
|
|
2217
|
-
fcontents =
|
|
2521
|
+
fcontents = MkTempFile()
|
|
2218
2522
|
shutil.copyfileobj(cfcontents, fcontents)
|
|
2219
2523
|
cfcontents.close()
|
|
2220
2524
|
fcontents.seek(0, 0)
|
|
@@ -2333,7 +2637,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2333
2637
|
fjsoncontent = {}
|
|
2334
2638
|
elif(fjsontype=="list"):
|
|
2335
2639
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
2336
|
-
flisttmp =
|
|
2640
|
+
flisttmp = MkTempFile()
|
|
2337
2641
|
flisttmp.write(fprejsoncontent.encode())
|
|
2338
2642
|
flisttmp.seek(0)
|
|
2339
2643
|
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
@@ -2368,7 +2672,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2368
2672
|
return False
|
|
2369
2673
|
fhend = fp.tell() - 1
|
|
2370
2674
|
fcontentstart = fp.tell()
|
|
2371
|
-
fcontents =
|
|
2675
|
+
fcontents = MkTempFile()
|
|
2372
2676
|
pyhascontents = False
|
|
2373
2677
|
if(fsize > 0 and not listonly):
|
|
2374
2678
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -2398,7 +2702,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2398
2702
|
cfcontents = UncompressFileAlt(
|
|
2399
2703
|
fcontents, formatspecs)
|
|
2400
2704
|
cfcontents.seek(0, 0)
|
|
2401
|
-
fcontents =
|
|
2705
|
+
fcontents = MkTempFile()
|
|
2402
2706
|
shutil.copyfileobj(cfcontents, fcontents)
|
|
2403
2707
|
cfcontents.close()
|
|
2404
2708
|
fcontents.seek(0, 0)
|
|
@@ -2430,22 +2734,20 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
2430
2734
|
return outlist
|
|
2431
2735
|
|
|
2432
2736
|
|
|
2433
|
-
def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2737
|
+
def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
|
|
2434
2738
|
if(not hasattr(fp, "read")):
|
|
2435
2739
|
return False
|
|
2436
2740
|
delimiter = formatspecs['format_delimiter']
|
|
2437
|
-
curloc =
|
|
2741
|
+
curloc = filestart
|
|
2438
2742
|
try:
|
|
2439
|
-
fp.seek(0, 2)
|
|
2743
|
+
fp.seek(0, 2)
|
|
2440
2744
|
except OSError:
|
|
2441
|
-
SeekToEndOfFile(fp)
|
|
2745
|
+
SeekToEndOfFile(fp)
|
|
2442
2746
|
except ValueError:
|
|
2443
|
-
SeekToEndOfFile(fp)
|
|
2444
|
-
CatSize = fp.tell()
|
|
2445
|
-
CatSizeEnd = CatSize
|
|
2747
|
+
SeekToEndOfFile(fp)
|
|
2748
|
+
CatSize = fp.tell()
|
|
2749
|
+
CatSizeEnd = CatSize
|
|
2446
2750
|
fp.seek(curloc, 0)
|
|
2447
|
-
if(curloc > 0):
|
|
2448
|
-
fp.seek(0, 0)
|
|
2449
2751
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2450
2752
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2451
2753
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2460,8 +2762,6 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2460
2762
|
else:
|
|
2461
2763
|
inheader = ReadFileHeaderDataWoSize(
|
|
2462
2764
|
fp, formatspecs['format_delimiter'])
|
|
2463
|
-
if(curloc > 0):
|
|
2464
|
-
fp.seek(curloc, 0)
|
|
2465
2765
|
fprechecksumtype = inheader[-2]
|
|
2466
2766
|
fprechecksum = inheader[-1]
|
|
2467
2767
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -2485,22 +2785,20 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
|
|
|
2485
2785
|
return flist
|
|
2486
2786
|
|
|
2487
2787
|
|
|
2488
|
-
def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2788
|
+
def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2489
2789
|
if(not hasattr(fp, "read")):
|
|
2490
2790
|
return False
|
|
2491
2791
|
delimiter = formatspecs['format_delimiter']
|
|
2492
|
-
curloc =
|
|
2792
|
+
curloc = filestart
|
|
2493
2793
|
try:
|
|
2494
|
-
fp.seek(0, 2)
|
|
2794
|
+
fp.seek(0, 2)
|
|
2495
2795
|
except OSError:
|
|
2496
|
-
SeekToEndOfFile(fp)
|
|
2796
|
+
SeekToEndOfFile(fp)
|
|
2497
2797
|
except ValueError:
|
|
2498
|
-
SeekToEndOfFile(fp)
|
|
2499
|
-
CatSize = fp.tell()
|
|
2500
|
-
CatSizeEnd = CatSize
|
|
2798
|
+
SeekToEndOfFile(fp)
|
|
2799
|
+
CatSize = fp.tell()
|
|
2800
|
+
CatSizeEnd = CatSize
|
|
2501
2801
|
fp.seek(curloc, 0)
|
|
2502
|
-
if(curloc > 0):
|
|
2503
|
-
fp.seek(0, 0)
|
|
2504
2802
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2505
2803
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2506
2804
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2532,8 +2830,6 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2532
2830
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2533
2831
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2534
2832
|
pass
|
|
2535
|
-
if(curloc > 0):
|
|
2536
|
-
fp.seek(curloc, 0)
|
|
2537
2833
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2538
2834
|
fheadsize = int(inheader[0], 16)
|
|
2539
2835
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2598,7 +2894,7 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2598
2894
|
invalid_archive = True
|
|
2599
2895
|
prefhend = fp.tell() - 1
|
|
2600
2896
|
prefcontentstart = fp.tell()
|
|
2601
|
-
prefcontents =
|
|
2897
|
+
prefcontents = MkTempFile()
|
|
2602
2898
|
pyhascontents = False
|
|
2603
2899
|
if(prefsize > 0):
|
|
2604
2900
|
prefcontents.write(fp.read(prefsize))
|
|
@@ -2645,22 +2941,20 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
|
|
|
2645
2941
|
return outlist
|
|
2646
2942
|
|
|
2647
2943
|
|
|
2648
|
-
def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2944
|
+
def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
|
|
2649
2945
|
if(not hasattr(fp, "read")):
|
|
2650
2946
|
return False
|
|
2651
2947
|
delimiter = formatspecs['format_delimiter']
|
|
2652
|
-
curloc =
|
|
2948
|
+
curloc = filestart
|
|
2653
2949
|
try:
|
|
2654
|
-
fp.seek(0, 2)
|
|
2950
|
+
fp.seek(0, 2)
|
|
2655
2951
|
except OSError:
|
|
2656
|
-
SeekToEndOfFile(fp)
|
|
2952
|
+
SeekToEndOfFile(fp)
|
|
2657
2953
|
except ValueError:
|
|
2658
|
-
SeekToEndOfFile(fp)
|
|
2659
|
-
CatSize = fp.tell()
|
|
2660
|
-
CatSizeEnd = CatSize
|
|
2954
|
+
SeekToEndOfFile(fp)
|
|
2955
|
+
CatSize = fp.tell()
|
|
2956
|
+
CatSizeEnd = CatSize
|
|
2661
2957
|
fp.seek(curloc, 0)
|
|
2662
|
-
if(curloc > 0):
|
|
2663
|
-
fp.seek(0, 0)
|
|
2664
2958
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
2665
2959
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
2666
2960
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -2692,8 +2986,6 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2692
2986
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
2693
2987
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
2694
2988
|
pass
|
|
2695
|
-
if(curloc > 0):
|
|
2696
|
-
fp.seek(curloc, 0)
|
|
2697
2989
|
formversion = re.findall("([\\d]+)", formstring)
|
|
2698
2990
|
fheadsize = int(inheader[0], 16)
|
|
2699
2991
|
fnumfields = int(inheader[1], 16)
|
|
@@ -2811,25 +3103,25 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
|
|
|
2811
3103
|
return outlist
|
|
2812
3104
|
|
|
2813
3105
|
|
|
2814
|
-
def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3106
|
+
def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2815
3107
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
2816
3108
|
formatspecs = formatspecs[fmttype]
|
|
2817
3109
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
2818
3110
|
fmttype = "auto"
|
|
2819
3111
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
2820
3112
|
fp = infile
|
|
2821
|
-
fp.seek(
|
|
2822
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3113
|
+
fp.seek(filestart, 0)
|
|
3114
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2823
3115
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2824
3116
|
formatspecs = formatspecs[compresscheck]
|
|
2825
3117
|
else:
|
|
2826
|
-
fp.seek(
|
|
2827
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3118
|
+
fp.seek(filestart, 0)
|
|
3119
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2828
3120
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2829
3121
|
formatspecs = formatspecs[checkcompressfile]
|
|
2830
|
-
fp.seek(
|
|
2831
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2832
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3122
|
+
fp.seek(filestart, 0)
|
|
3123
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3124
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
2833
3125
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
2834
3126
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
2835
3127
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -2864,58 +3156,58 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2864
3156
|
compresscheck = "zlib"
|
|
2865
3157
|
else:
|
|
2866
3158
|
return False
|
|
2867
|
-
fp.seek(
|
|
3159
|
+
fp.seek(filestart, 0)
|
|
2868
3160
|
elif(infile == "-"):
|
|
2869
|
-
fp =
|
|
3161
|
+
fp = MkTempFile()
|
|
2870
3162
|
if(hasattr(sys.stdin, "buffer")):
|
|
2871
3163
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
2872
3164
|
else:
|
|
2873
3165
|
shutil.copyfileobj(sys.stdin, fp)
|
|
2874
|
-
fp.seek(
|
|
2875
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2876
|
-
fp.seek(
|
|
2877
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3166
|
+
fp.seek(filestart, 0)
|
|
3167
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3168
|
+
fp.seek(filestart, 0)
|
|
3169
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2878
3170
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2879
3171
|
formatspecs = formatspecs[compresscheck]
|
|
2880
3172
|
else:
|
|
2881
|
-
fp.seek(
|
|
2882
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3173
|
+
fp.seek(filestart, 0)
|
|
3174
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2883
3175
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2884
3176
|
formatspecs = formatspecs[checkcompressfile]
|
|
2885
|
-
fp.seek(
|
|
3177
|
+
fp.seek(filestart, 0)
|
|
2886
3178
|
if(not fp):
|
|
2887
3179
|
return False
|
|
2888
|
-
fp.seek(
|
|
3180
|
+
fp.seek(filestart, 0)
|
|
2889
3181
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
2890
|
-
fp =
|
|
3182
|
+
fp = MkTempFile()
|
|
2891
3183
|
fp.write(infile)
|
|
2892
|
-
fp.seek(
|
|
2893
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
2894
|
-
fp.seek(
|
|
2895
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3184
|
+
fp.seek(filestart, 0)
|
|
3185
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3186
|
+
fp.seek(filestart, 0)
|
|
3187
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2896
3188
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2897
3189
|
formatspecs = formatspecs[compresscheck]
|
|
2898
3190
|
else:
|
|
2899
|
-
fp.seek(
|
|
2900
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3191
|
+
fp.seek(filestart, 0)
|
|
3192
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2901
3193
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2902
3194
|
formatspecs = formatspecs[checkcompressfile]
|
|
2903
|
-
fp.seek(
|
|
3195
|
+
fp.seek(filestart, 0)
|
|
2904
3196
|
if(not fp):
|
|
2905
3197
|
return False
|
|
2906
|
-
fp.seek(
|
|
2907
|
-
elif(re.findall(
|
|
3198
|
+
fp.seek(filestart, 0)
|
|
3199
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
2908
3200
|
fp = download_file_from_internet_file(infile)
|
|
2909
|
-
fp.seek(
|
|
2910
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3201
|
+
fp.seek(filestart, 0)
|
|
3202
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
2911
3203
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
2912
3204
|
formatspecs = formatspecs[compresscheck]
|
|
2913
3205
|
else:
|
|
2914
|
-
fp.seek(
|
|
2915
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3206
|
+
fp.seek(filestart, 0)
|
|
3207
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
2916
3208
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2917
3209
|
formatspecs = formatspecs[checkcompressfile]
|
|
2918
|
-
fp.seek(
|
|
3210
|
+
fp.seek(filestart, 0)
|
|
2919
3211
|
if(not compresscheck):
|
|
2920
3212
|
fextname = os.path.splitext(infile)[1]
|
|
2921
3213
|
if(fextname == ".gz"):
|
|
@@ -2936,14 +3228,14 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2936
3228
|
compresscheck = "zlib"
|
|
2937
3229
|
else:
|
|
2938
3230
|
return False
|
|
2939
|
-
fp.seek(
|
|
2940
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3231
|
+
fp.seek(filestart, 0)
|
|
3232
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
2941
3233
|
if(not fp):
|
|
2942
3234
|
return False
|
|
2943
|
-
fp.seek(
|
|
3235
|
+
fp.seek(filestart, 0)
|
|
2944
3236
|
else:
|
|
2945
3237
|
infile = RemoveWindowsPath(infile)
|
|
2946
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3238
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
2947
3239
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
2948
3240
|
formatspecs = formatspecs[checkcompressfile]
|
|
2949
3241
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -2958,7 +3250,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2958
3250
|
return False
|
|
2959
3251
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
2960
3252
|
return False
|
|
2961
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3253
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
2962
3254
|
if(not compresscheck):
|
|
2963
3255
|
fextname = os.path.splitext(infile)[1]
|
|
2964
3256
|
if(fextname == ".gz"):
|
|
@@ -2981,43 +3273,43 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
2981
3273
|
return False
|
|
2982
3274
|
if(not compresscheck):
|
|
2983
3275
|
return False
|
|
2984
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
2985
|
-
return ReadFileDataWithContentToArray(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3276
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3277
|
+
return ReadFileDataWithContentToArray(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
2986
3278
|
|
|
2987
3279
|
|
|
2988
|
-
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3280
|
+
def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2989
3281
|
if(isinstance(infile, (list, tuple, ))):
|
|
2990
3282
|
pass
|
|
2991
3283
|
else:
|
|
2992
3284
|
infile = [infile]
|
|
2993
3285
|
outretval = {}
|
|
2994
3286
|
for curfname in infile:
|
|
2995
|
-
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3287
|
+
outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
2996
3288
|
return outretval
|
|
2997
3289
|
|
|
2998
|
-
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
2999
|
-
return ReadInMultipleFileWithContentToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3290
|
+
def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3291
|
+
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3000
3292
|
|
|
3001
3293
|
|
|
3002
|
-
def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3294
|
+
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3003
3295
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
3004
3296
|
formatspecs = formatspecs[fmttype]
|
|
3005
3297
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
3006
3298
|
fmttype = "auto"
|
|
3007
3299
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
3008
3300
|
fp = infile
|
|
3009
|
-
fp.seek(
|
|
3010
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3301
|
+
fp.seek(filestart, 0)
|
|
3302
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3011
3303
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3012
3304
|
formatspecs = formatspecs[compresscheck]
|
|
3013
3305
|
else:
|
|
3014
|
-
fp.seek(
|
|
3015
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3306
|
+
fp.seek(filestart, 0)
|
|
3307
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3016
3308
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3017
3309
|
formatspecs = formatspecs[checkcompressfile]
|
|
3018
|
-
fp.seek(
|
|
3019
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3020
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
3310
|
+
fp.seek(filestart, 0)
|
|
3311
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3312
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
3021
3313
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
3022
3314
|
return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
|
|
3023
3315
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -3052,58 +3344,58 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3052
3344
|
compresscheck = "zlib"
|
|
3053
3345
|
else:
|
|
3054
3346
|
return False
|
|
3055
|
-
fp.seek(
|
|
3347
|
+
fp.seek(filestart, 0)
|
|
3056
3348
|
elif(infile == "-"):
|
|
3057
|
-
fp =
|
|
3349
|
+
fp = MkTempFile()
|
|
3058
3350
|
if(hasattr(sys.stdin, "buffer")):
|
|
3059
3351
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
3060
3352
|
else:
|
|
3061
3353
|
shutil.copyfileobj(sys.stdin, fp)
|
|
3062
|
-
fp.seek(
|
|
3063
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3064
|
-
fp.seek(
|
|
3065
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3354
|
+
fp.seek(filestart, 0)
|
|
3355
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3356
|
+
fp.seek(filestart, 0)
|
|
3357
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3066
3358
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3067
3359
|
formatspecs = formatspecs[compresscheck]
|
|
3068
3360
|
else:
|
|
3069
|
-
fp.seek(
|
|
3070
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3361
|
+
fp.seek(filestart, 0)
|
|
3362
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3071
3363
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3072
3364
|
formatspecs = formatspecs[checkcompressfile]
|
|
3073
|
-
fp.seek(
|
|
3365
|
+
fp.seek(filestart, 0)
|
|
3074
3366
|
if(not fp):
|
|
3075
3367
|
return False
|
|
3076
|
-
fp.seek(
|
|
3368
|
+
fp.seek(filestart, 0)
|
|
3077
3369
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
3078
|
-
fp =
|
|
3370
|
+
fp = MkTempFile()
|
|
3079
3371
|
fp.write(infile)
|
|
3080
|
-
fp.seek(
|
|
3081
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3082
|
-
fp.seek(
|
|
3083
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3372
|
+
fp.seek(filestart, 0)
|
|
3373
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3374
|
+
fp.seek(filestart, 0)
|
|
3375
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3084
3376
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3085
3377
|
formatspecs = formatspecs[compresscheck]
|
|
3086
3378
|
else:
|
|
3087
|
-
fp.seek(
|
|
3088
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3379
|
+
fp.seek(filestart, 0)
|
|
3380
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3089
3381
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3090
3382
|
formatspecs = formatspecs[checkcompressfile]
|
|
3091
|
-
fp.seek(
|
|
3383
|
+
fp.seek(filestart, 0)
|
|
3092
3384
|
if(not fp):
|
|
3093
3385
|
return False
|
|
3094
|
-
fp.seek(
|
|
3095
|
-
elif(re.findall(
|
|
3386
|
+
fp.seek(filestart, 0)
|
|
3387
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
3096
3388
|
fp = download_file_from_internet_file(infile)
|
|
3097
|
-
fp.seek(
|
|
3098
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
3389
|
+
fp.seek(filestart, 0)
|
|
3390
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
3099
3391
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
3100
3392
|
formatspecs = formatspecs[compresscheck]
|
|
3101
3393
|
else:
|
|
3102
|
-
fp.seek(
|
|
3103
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
|
|
3394
|
+
fp.seek(filestart, 0)
|
|
3395
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
|
|
3104
3396
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3105
3397
|
formatspecs = formatspecs[checkcompressfile]
|
|
3106
|
-
fp.seek(
|
|
3398
|
+
fp.seek(filestart, 0)
|
|
3107
3399
|
if(not compresscheck):
|
|
3108
3400
|
fextname = os.path.splitext(infile)[1]
|
|
3109
3401
|
if(fextname == ".gz"):
|
|
@@ -3124,14 +3416,14 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3124
3416
|
compresscheck = "zlib"
|
|
3125
3417
|
else:
|
|
3126
3418
|
return False
|
|
3127
|
-
fp.seek(
|
|
3128
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
3419
|
+
fp.seek(filestart, 0)
|
|
3420
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
3129
3421
|
if(not fp):
|
|
3130
3422
|
return False
|
|
3131
|
-
fp.seek(
|
|
3423
|
+
fp.seek(filestart, 0)
|
|
3132
3424
|
else:
|
|
3133
3425
|
infile = RemoveWindowsPath(infile)
|
|
3134
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
3426
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
3135
3427
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
3136
3428
|
formatspecs = formatspecs[checkcompressfile]
|
|
3137
3429
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -3146,7 +3438,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3146
3438
|
return False
|
|
3147
3439
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
3148
3440
|
return False
|
|
3149
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
3441
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
3150
3442
|
if(not compresscheck):
|
|
3151
3443
|
fextname = os.path.splitext(infile)[1]
|
|
3152
3444
|
if(fextname == ".gz"):
|
|
@@ -3169,22 +3461,22 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
|
|
|
3169
3461
|
return False
|
|
3170
3462
|
if(not compresscheck):
|
|
3171
3463
|
return False
|
|
3172
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
3173
|
-
return ReadFileDataWithContentToList(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3464
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
3465
|
+
return ReadFileDataWithContentToList(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3174
3466
|
|
|
3175
3467
|
|
|
3176
|
-
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3468
|
+
def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3177
3469
|
if(isinstance(infile, (list, tuple, ))):
|
|
3178
3470
|
pass
|
|
3179
3471
|
else:
|
|
3180
3472
|
infile = [infile]
|
|
3181
3473
|
outretval = {}
|
|
3182
3474
|
for curfname in infile:
|
|
3183
|
-
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3475
|
+
curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3184
3476
|
return outretval
|
|
3185
3477
|
|
|
3186
|
-
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3187
|
-
return ReadInMultipleFileWithContentToList(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3478
|
+
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3479
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3188
3480
|
|
|
3189
3481
|
|
|
3190
3482
|
def AppendNullByte(indata, delimiter=__file_format_dict__['format_delimiter']):
|
|
@@ -3279,7 +3571,7 @@ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc3
|
|
|
3279
3571
|
return fp
|
|
3280
3572
|
|
|
3281
3573
|
|
|
3282
|
-
def
|
|
3574
|
+
def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc32", formatspecs=__file_format_multi_dict__):
|
|
3283
3575
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
3284
3576
|
|
|
3285
3577
|
|
|
@@ -3310,11 +3602,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3310
3602
|
pass
|
|
3311
3603
|
if(outfile == "-" or outfile is None):
|
|
3312
3604
|
verbose = False
|
|
3313
|
-
fp =
|
|
3605
|
+
fp = MkTempFile()
|
|
3314
3606
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3315
3607
|
fp = outfile
|
|
3316
|
-
elif(re.findall(
|
|
3317
|
-
fp =
|
|
3608
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3609
|
+
fp = MkTempFile()
|
|
3318
3610
|
else:
|
|
3319
3611
|
fbasename = os.path.splitext(outfile)[0]
|
|
3320
3612
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -3349,7 +3641,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3349
3641
|
outvar = fp.read()
|
|
3350
3642
|
fp.close()
|
|
3351
3643
|
return outvar
|
|
3352
|
-
elif(re.findall(
|
|
3644
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
3353
3645
|
fp = CompressOpenFileAlt(
|
|
3354
3646
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3355
3647
|
fp.seek(0, 0)
|
|
@@ -3362,7 +3654,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
3362
3654
|
return True
|
|
3363
3655
|
|
|
3364
3656
|
|
|
3365
|
-
def
|
|
3657
|
+
def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="crc32", formatspecs=__file_format_dict__, returnfp=False):
|
|
3366
3658
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
3367
3659
|
|
|
3368
3660
|
|
|
@@ -3646,16 +3938,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3646
3938
|
fwinattributes = format(int(0), 'x').lower()
|
|
3647
3939
|
fcompression = ""
|
|
3648
3940
|
fcsize = format(int(0), 'x').lower()
|
|
3649
|
-
fcontents =
|
|
3941
|
+
fcontents = MkTempFile()
|
|
3650
3942
|
chunk_size = 1024
|
|
3651
3943
|
fcencoding = "UTF-8"
|
|
3652
3944
|
curcompression = "none"
|
|
3653
3945
|
if not followlink and ftype in data_types:
|
|
3654
3946
|
with open(fname, "rb") as fpc:
|
|
3655
3947
|
shutil.copyfileobj(fpc, fcontents)
|
|
3656
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3948
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3657
3949
|
fcontents.seek(0, 0)
|
|
3658
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3950
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3659
3951
|
if(typechecktest is False and not compresswholefile):
|
|
3660
3952
|
fcontents.seek(0, 2)
|
|
3661
3953
|
ucfsize = fcontents.tell()
|
|
@@ -3665,7 +3957,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3665
3957
|
ilmin = 0
|
|
3666
3958
|
ilcsize = []
|
|
3667
3959
|
while(ilmin < ilsize):
|
|
3668
|
-
cfcontents =
|
|
3960
|
+
cfcontents = MkTempFile()
|
|
3669
3961
|
fcontents.seek(0, 0)
|
|
3670
3962
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3671
3963
|
fcontents.seek(0, 0)
|
|
@@ -3682,7 +3974,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3682
3974
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
3683
3975
|
curcompression = compressionuselist[ilcmin]
|
|
3684
3976
|
fcontents.seek(0, 0)
|
|
3685
|
-
cfcontents =
|
|
3977
|
+
cfcontents = MkTempFile()
|
|
3686
3978
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3687
3979
|
cfcontents.seek(0, 0)
|
|
3688
3980
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -3700,9 +3992,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3700
3992
|
flstatinfo = os.stat(flinkname)
|
|
3701
3993
|
with open(flinkname, "rb") as fpc:
|
|
3702
3994
|
shutil.copyfileobj(fpc, fcontents)
|
|
3703
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
3995
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
3704
3996
|
fcontents.seek(0, 0)
|
|
3705
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
3997
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
3706
3998
|
if(typechecktest is False and not compresswholefile):
|
|
3707
3999
|
fcontents.seek(0, 2)
|
|
3708
4000
|
ucfsize = fcontents.tell()
|
|
@@ -3712,7 +4004,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3712
4004
|
ilmin = 0
|
|
3713
4005
|
ilcsize = []
|
|
3714
4006
|
while(ilmin < ilsize):
|
|
3715
|
-
cfcontents =
|
|
4007
|
+
cfcontents = MkTempFile()
|
|
3716
4008
|
fcontents.seek(0, 0)
|
|
3717
4009
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3718
4010
|
fcontents.seek(0, 0)
|
|
@@ -3729,7 +4021,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
3729
4021
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
3730
4022
|
curcompression = compressionuselist[ilcmin]
|
|
3731
4023
|
fcontents.seek(0, 0)
|
|
3732
|
-
cfcontents =
|
|
4024
|
+
cfcontents = MkTempFile()
|
|
3733
4025
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
3734
4026
|
cfcontents.seek(0, 0)
|
|
3735
4027
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -3812,7 +4104,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
|
|
|
3812
4104
|
fheaderchecksumtype = curfname[26]
|
|
3813
4105
|
fcontentchecksumtype = curfname[27]
|
|
3814
4106
|
fcontents = curfname[28]
|
|
3815
|
-
fencoding = GetFileEncoding(fcontents, False)
|
|
4107
|
+
fencoding = GetFileEncoding(fcontents, 0, False)
|
|
3816
4108
|
tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
|
|
3817
4109
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
3818
4110
|
fcontents.seek(0, 0)
|
|
@@ -3859,11 +4151,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3859
4151
|
pass
|
|
3860
4152
|
if(outfile == "-" or outfile is None):
|
|
3861
4153
|
verbose = False
|
|
3862
|
-
fp =
|
|
4154
|
+
fp = MkTempFile()
|
|
3863
4155
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3864
4156
|
fp = outfile
|
|
3865
|
-
elif(re.findall(
|
|
3866
|
-
fp =
|
|
4157
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
4158
|
+
fp = MkTempFile()
|
|
3867
4159
|
else:
|
|
3868
4160
|
fbasename = os.path.splitext(outfile)[0]
|
|
3869
4161
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -3899,7 +4191,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
3899
4191
|
outvar = fp.read()
|
|
3900
4192
|
fp.close()
|
|
3901
4193
|
return outvar
|
|
3902
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4194
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
3903
4195
|
fp = CompressOpenFileAlt(
|
|
3904
4196
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3905
4197
|
fp.seek(0, 0)
|
|
@@ -3939,11 +4231,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
3939
4231
|
pass
|
|
3940
4232
|
if(outfile == "-" or outfile is None):
|
|
3941
4233
|
verbose = False
|
|
3942
|
-
fp =
|
|
4234
|
+
fp = MkTempFile()
|
|
3943
4235
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
3944
4236
|
fp = outfile
|
|
3945
|
-
elif(re.findall(
|
|
3946
|
-
fp =
|
|
4237
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
4238
|
+
fp = MkTempFile()
|
|
3947
4239
|
else:
|
|
3948
4240
|
fbasename = os.path.splitext(outfile)[0]
|
|
3949
4241
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -3979,7 +4271,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
3979
4271
|
outvar = fp.read()
|
|
3980
4272
|
fp.close()
|
|
3981
4273
|
return outvar
|
|
3982
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
4274
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
3983
4275
|
fp = CompressOpenFileAlt(
|
|
3984
4276
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
3985
4277
|
fp.seek(0, 0)
|
|
@@ -4060,10 +4352,11 @@ def GzipCompressData(data, compresslevel=9):
|
|
|
4060
4352
|
compressed_data = gzip.compress(data, compresslevel=compresslevel)
|
|
4061
4353
|
except AttributeError:
|
|
4062
4354
|
# Fallback to older method for Python 2.x and older 3.x versions
|
|
4063
|
-
out =
|
|
4355
|
+
out = MkTempFile()
|
|
4064
4356
|
with gzip.GzipFile(fileobj=out, mode="wb", compresslevel=compresslevel) as f:
|
|
4065
4357
|
f.write(data)
|
|
4066
|
-
|
|
4358
|
+
out.seek(0, 0)
|
|
4359
|
+
compressed_data = out.read()
|
|
4067
4360
|
return compressed_data
|
|
4068
4361
|
|
|
4069
4362
|
|
|
@@ -4073,7 +4366,7 @@ def GzipDecompressData(compressed_data):
|
|
|
4073
4366
|
decompressed_data = gzip.decompress(compressed_data)
|
|
4074
4367
|
except AttributeError:
|
|
4075
4368
|
# Fallback to older method for Python 2.x and older 3.x versions
|
|
4076
|
-
inp =
|
|
4369
|
+
inp = MkTempFile(compressed_data)
|
|
4077
4370
|
with gzip.GzipFile(fileobj=inp, mode="rb") as f:
|
|
4078
4371
|
decompressed_data = f.read()
|
|
4079
4372
|
return decompressed_data
|
|
@@ -4161,7 +4454,7 @@ def IsSingleDict(variable):
|
|
|
4161
4454
|
return True
|
|
4162
4455
|
|
|
4163
4456
|
|
|
4164
|
-
def GetFileEncoding(infile, closefp=True):
|
|
4457
|
+
def GetFileEncoding(infile, filestart=0, closefp=True):
|
|
4165
4458
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4166
4459
|
fp = infile
|
|
4167
4460
|
else:
|
|
@@ -4170,19 +4463,19 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4170
4463
|
except FileNotFoundError:
|
|
4171
4464
|
return False
|
|
4172
4465
|
file_encoding = "UTF-8"
|
|
4173
|
-
fp.seek(
|
|
4466
|
+
fp.seek(filestart, 0)
|
|
4174
4467
|
prefp = fp.read(2)
|
|
4175
4468
|
if(prefp == binascii.unhexlify("fffe")):
|
|
4176
4469
|
file_encoding = "UTF-16LE"
|
|
4177
4470
|
elif(prefp == binascii.unhexlify("feff")):
|
|
4178
4471
|
file_encoding = "UTF-16BE"
|
|
4179
|
-
fp.seek(
|
|
4472
|
+
fp.seek(filestart, 0)
|
|
4180
4473
|
prefp = fp.read(3)
|
|
4181
4474
|
if(prefp == binascii.unhexlify("efbbbf")):
|
|
4182
4475
|
file_encoding = "UTF-8"
|
|
4183
4476
|
elif(prefp == binascii.unhexlify("0efeff")):
|
|
4184
4477
|
file_encoding = "SCSU"
|
|
4185
|
-
fp.seek(
|
|
4478
|
+
fp.seek(filestart, 0)
|
|
4186
4479
|
prefp = fp.read(4)
|
|
4187
4480
|
if(prefp == binascii.unhexlify("fffe0000")):
|
|
4188
4481
|
file_encoding = "UTF-32LE"
|
|
@@ -4198,21 +4491,21 @@ def GetFileEncoding(infile, closefp=True):
|
|
|
4198
4491
|
file_encoding = "UTF-7"
|
|
4199
4492
|
elif(prefp == binascii.unhexlify("2b2f762f")):
|
|
4200
4493
|
file_encoding = "UTF-7"
|
|
4201
|
-
fp.seek(
|
|
4494
|
+
fp.seek(filestart, 0)
|
|
4202
4495
|
if(closefp):
|
|
4203
4496
|
fp.close()
|
|
4204
4497
|
return file_encoding
|
|
4205
4498
|
|
|
4206
4499
|
|
|
4207
|
-
def GetFileEncodingFromString(instring, closefp=True):
|
|
4500
|
+
def GetFileEncodingFromString(instring, filestart=0, closefp=True):
|
|
4208
4501
|
try:
|
|
4209
|
-
instringsfile =
|
|
4502
|
+
instringsfile = MkTempFile(instring)
|
|
4210
4503
|
except TypeError:
|
|
4211
|
-
instringsfile =
|
|
4212
|
-
return GetFileEncoding(instringsfile, closefp)
|
|
4504
|
+
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4505
|
+
return GetFileEncoding(instringsfile, filestart, closefp)
|
|
4213
4506
|
|
|
4214
4507
|
|
|
4215
|
-
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4508
|
+
def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4216
4509
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4217
4510
|
fp = infile
|
|
4218
4511
|
else:
|
|
@@ -4221,7 +4514,8 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4221
4514
|
except FileNotFoundError:
|
|
4222
4515
|
return False
|
|
4223
4516
|
filetype = False
|
|
4224
|
-
|
|
4517
|
+
curloc = filestart
|
|
4518
|
+
fp.seek(filestart, 0)
|
|
4225
4519
|
prefp = fp.read(2)
|
|
4226
4520
|
if(prefp == binascii.unhexlify("1f8b")):
|
|
4227
4521
|
filetype = "gzip"
|
|
@@ -4237,13 +4531,13 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4237
4531
|
filetype = "zlib"
|
|
4238
4532
|
elif(prefp == binascii.unhexlify("1f9d")):
|
|
4239
4533
|
filetype = "zcompress"
|
|
4240
|
-
fp.seek(
|
|
4534
|
+
fp.seek(curloc, 0)
|
|
4241
4535
|
prefp = fp.read(3)
|
|
4242
4536
|
if(prefp == binascii.unhexlify("425a68")):
|
|
4243
4537
|
filetype = "bzip2"
|
|
4244
4538
|
elif(prefp == binascii.unhexlify("5d0000")):
|
|
4245
4539
|
filetype = "lzma"
|
|
4246
|
-
fp.seek(
|
|
4540
|
+
fp.seek(curloc, 0)
|
|
4247
4541
|
prefp = fp.read(4)
|
|
4248
4542
|
if(prefp == binascii.unhexlify("28b52ffd")):
|
|
4249
4543
|
filetype = "zstd"
|
|
@@ -4255,29 +4549,29 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4255
4549
|
filetype = "zipfile"
|
|
4256
4550
|
elif(prefp == binascii.unhexlify("504b0708")):
|
|
4257
4551
|
filetype = "zipfile"
|
|
4258
|
-
fp.seek(
|
|
4552
|
+
fp.seek(curloc, 0)
|
|
4259
4553
|
prefp = fp.read(5)
|
|
4260
4554
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4261
4555
|
filetype = "tarfile"
|
|
4262
4556
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4263
4557
|
filetype = "tarfile"
|
|
4264
|
-
fp.seek(
|
|
4558
|
+
fp.seek(curloc, 0)
|
|
4265
4559
|
prefp = fp.read(6)
|
|
4266
4560
|
if(prefp == binascii.unhexlify("fd377a585a00")):
|
|
4267
4561
|
filetype = "xz"
|
|
4268
4562
|
elif(prefp == binascii.unhexlify("377abcaf271c")):
|
|
4269
4563
|
filetype = "7zipfile"
|
|
4270
|
-
fp.seek(
|
|
4564
|
+
fp.seek(curloc, 0)
|
|
4271
4565
|
prefp = fp.read(7)
|
|
4272
4566
|
if(prefp == binascii.unhexlify("526172211a0700")):
|
|
4273
4567
|
filetype = "rarfile"
|
|
4274
4568
|
elif(prefp == binascii.unhexlify("2a2a4143452a2a")):
|
|
4275
4569
|
filetype = "ace"
|
|
4276
|
-
fp.seek(
|
|
4570
|
+
fp.seek(curloc, 0)
|
|
4277
4571
|
prefp = fp.read(7)
|
|
4278
4572
|
if(prefp == binascii.unhexlify("894c5a4f0d0a1a")):
|
|
4279
4573
|
filetype = "lzo"
|
|
4280
|
-
fp.seek(
|
|
4574
|
+
fp.seek(curloc, 0)
|
|
4281
4575
|
prefp = fp.read(8)
|
|
4282
4576
|
if(prefp == binascii.unhexlify("7573746172003030")):
|
|
4283
4577
|
filetype = "tarfile"
|
|
@@ -4285,7 +4579,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4285
4579
|
filetype = "tarfile"
|
|
4286
4580
|
if(prefp == binascii.unhexlify("526172211a070100")):
|
|
4287
4581
|
filetype = "rarfile"
|
|
4288
|
-
fp.seek(
|
|
4582
|
+
fp.seek(curloc, 0)
|
|
4289
4583
|
if(IsNestedDict(formatspecs)):
|
|
4290
4584
|
for key, value in formatspecs.items():
|
|
4291
4585
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4301,7 +4595,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4301
4595
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4302
4596
|
filetype = formatspecs[key]['format_magic']
|
|
4303
4597
|
continue
|
|
4304
|
-
fp.seek(
|
|
4598
|
+
fp.seek(curloc, 0)
|
|
4305
4599
|
elif(IsSingleDict(formatspecs)):
|
|
4306
4600
|
prefp = fp.read(formatspecs['format_len'])
|
|
4307
4601
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4316,15 +4610,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4316
4610
|
filetype = formatspecs['format_magic']
|
|
4317
4611
|
else:
|
|
4318
4612
|
pass
|
|
4319
|
-
fp.seek(
|
|
4613
|
+
fp.seek(curloc, 0)
|
|
4320
4614
|
prefp = fp.read(9)
|
|
4321
4615
|
if(prefp == binascii.unhexlify("894c5a4f000d0a1a0a")):
|
|
4322
4616
|
filetype = "lzo"
|
|
4323
|
-
fp.seek(
|
|
4617
|
+
fp.seek(curloc, 0)
|
|
4324
4618
|
prefp = fp.read(10)
|
|
4325
4619
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4326
4620
|
filetype = "tarfile"
|
|
4327
|
-
fp.seek(
|
|
4621
|
+
fp.seek(curloc, 0)
|
|
4328
4622
|
if(filetype == "gzip" or filetype == "bzip2" or filetype == "lzma" or filetype == "zstd" or filetype == "lz4" or filetype == "zlib"):
|
|
4329
4623
|
if(TarFileCheck(fp)):
|
|
4330
4624
|
filetype = "tarfile"
|
|
@@ -4339,14 +4633,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
|
|
|
4339
4633
|
return "7zipfile"
|
|
4340
4634
|
else:
|
|
4341
4635
|
filetype = False
|
|
4342
|
-
fp.seek(
|
|
4636
|
+
fp.seek(curloc, 0)
|
|
4343
4637
|
if(closefp):
|
|
4344
4638
|
fp.close()
|
|
4345
4639
|
return filetype
|
|
4346
4640
|
|
|
4347
4641
|
|
|
4348
|
-
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4349
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4642
|
+
def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4643
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4644
|
+
curloc = filestart
|
|
4350
4645
|
if(not compresscheck):
|
|
4351
4646
|
fextname = os.path.splitext(infile)[1]
|
|
4352
4647
|
if(fextname == ".gz"):
|
|
@@ -4395,7 +4690,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4395
4690
|
elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
4396
4691
|
return "7zipfile"
|
|
4397
4692
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
4398
|
-
fp = UncompressFileAlt(infile, formatspecs)
|
|
4693
|
+
fp = UncompressFileAlt(infile, formatspecs, filestart)
|
|
4399
4694
|
else:
|
|
4400
4695
|
try:
|
|
4401
4696
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4425,10 +4720,11 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4425
4720
|
except FileNotFoundError:
|
|
4426
4721
|
return False
|
|
4427
4722
|
filetype = False
|
|
4723
|
+
fp.seek(filestart, 0)
|
|
4428
4724
|
prefp = fp.read(5)
|
|
4429
4725
|
if(prefp == binascii.unhexlify("7573746172")):
|
|
4430
4726
|
filetype = "tarfile"
|
|
4431
|
-
fp.seek(
|
|
4727
|
+
fp.seek(curloc, 0)
|
|
4432
4728
|
if(IsNestedDict(formatspecs)):
|
|
4433
4729
|
for key, value in formatspecs.items():
|
|
4434
4730
|
prefp = fp.read(formatspecs[key]['format_len'])
|
|
@@ -4444,7 +4740,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4444
4740
|
if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
|
|
4445
4741
|
filetype = formatspecs[key]['format_magic']
|
|
4446
4742
|
continue
|
|
4447
|
-
fp.seek(
|
|
4743
|
+
fp.seek(curloc, 0)
|
|
4448
4744
|
elif(IsSingleDict(formatspecs)):
|
|
4449
4745
|
prefp = fp.read(formatspecs['format_len'])
|
|
4450
4746
|
if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
|
|
@@ -4459,36 +4755,36 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
|
|
|
4459
4755
|
filetype = formatspecs['format_magic']
|
|
4460
4756
|
else:
|
|
4461
4757
|
pass
|
|
4462
|
-
fp.seek(
|
|
4758
|
+
fp.seek(curloc, 0)
|
|
4463
4759
|
prefp = fp.read(10)
|
|
4464
4760
|
if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
|
|
4465
4761
|
filetype = "tarfile"
|
|
4466
|
-
fp.seek(
|
|
4762
|
+
fp.seek(curloc, 0)
|
|
4467
4763
|
if(closefp):
|
|
4468
4764
|
fp.close()
|
|
4469
4765
|
return filetype
|
|
4470
4766
|
|
|
4471
4767
|
|
|
4472
|
-
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4768
|
+
def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4473
4769
|
try:
|
|
4474
|
-
instringsfile =
|
|
4770
|
+
instringsfile = MkTempFile(instring)
|
|
4475
4771
|
except TypeError:
|
|
4476
|
-
instringsfile =
|
|
4477
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4772
|
+
instringsfile = MkTempFile(instring.encode("UTF-8"))
|
|
4773
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4478
4774
|
|
|
4479
4775
|
|
|
4480
|
-
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, closefp=True):
|
|
4776
|
+
def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
|
|
4481
4777
|
try:
|
|
4482
|
-
instringsfile =
|
|
4778
|
+
instringsfile = MkTempFile(instring)
|
|
4483
4779
|
except TypeError:
|
|
4484
|
-
instringsfile =
|
|
4485
|
-
return CheckCompressionType(instringsfile, formatspecs, closefp)
|
|
4780
|
+
instringsfile = MkTempFile(instring.decode("UTF-8"))
|
|
4781
|
+
return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
|
|
4486
4782
|
|
|
4487
4783
|
|
|
4488
|
-
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
4784
|
+
def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4489
4785
|
if(not hasattr(fp, "read")):
|
|
4490
4786
|
return False
|
|
4491
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
4787
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4492
4788
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4493
4789
|
formatspecs = formatspecs[compresscheck]
|
|
4494
4790
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4522,8 +4818,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
|
|
|
4522
4818
|
return fp
|
|
4523
4819
|
|
|
4524
4820
|
|
|
4525
|
-
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
4526
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
4821
|
+
def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb", filestart=0):
|
|
4822
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
|
|
4527
4823
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4528
4824
|
formatspecs = formatspecs[compresscheck]
|
|
4529
4825
|
if(sys.version_info[0] == 2 and compresscheck):
|
|
@@ -4569,8 +4865,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
|
|
|
4569
4865
|
return filefp
|
|
4570
4866
|
|
|
4571
4867
|
|
|
4572
|
-
def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
4573
|
-
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, False)
|
|
4868
|
+
def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4869
|
+
compresscheck = CheckCompressionTypeFromString(infile, formatspecs, filestart, False)
|
|
4574
4870
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4575
4871
|
formatspecs = formatspecs[compresscheck]
|
|
4576
4872
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4597,32 +4893,32 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4597
4893
|
return fileuz
|
|
4598
4894
|
|
|
4599
4895
|
|
|
4600
|
-
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__):
|
|
4896
|
+
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4601
4897
|
filefp = StringIO()
|
|
4602
|
-
outstring = UncompressString(instring, formatspecs)
|
|
4898
|
+
outstring = UncompressString(instring, formatspecs, filestart)
|
|
4603
4899
|
filefp.write(outstring)
|
|
4604
4900
|
filefp.seek(0, 0)
|
|
4605
4901
|
return filefp
|
|
4606
4902
|
|
|
4607
|
-
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4903
|
+
def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4608
4904
|
if(not hasattr(fp, "read")):
|
|
4609
4905
|
return False
|
|
4610
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4906
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4611
4907
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4612
4908
|
formatspecs = formatspecs[prechck]
|
|
4613
|
-
fp.seek(
|
|
4909
|
+
fp.seek(filestart, 0)
|
|
4614
4910
|
if(prechck!="zstd"):
|
|
4615
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4911
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4616
4912
|
filefp = StringIO()
|
|
4617
|
-
fp.seek(
|
|
4618
|
-
outstring = UncompressString(fp.read(), formatspecs)
|
|
4913
|
+
fp.seek(filestart, 0)
|
|
4914
|
+
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
4619
4915
|
filefp.write(outstring)
|
|
4620
4916
|
filefp.seek(0, 0)
|
|
4621
4917
|
return filefp
|
|
4622
4918
|
|
|
4623
4919
|
|
|
4624
|
-
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
4625
|
-
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, False)
|
|
4920
|
+
def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4921
|
+
compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, filestart, False)
|
|
4626
4922
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
4627
4923
|
formatspecs = formatspecs[compresscheck]
|
|
4628
4924
|
if(compresscheck == "gzip" and compresscheck in compressionsupport):
|
|
@@ -4647,26 +4943,26 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
|
|
|
4647
4943
|
return fileuz
|
|
4648
4944
|
|
|
4649
4945
|
|
|
4650
|
-
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__):
|
|
4651
|
-
filefp =
|
|
4652
|
-
outstring = UncompressBytes(inbytes, formatspecs)
|
|
4946
|
+
def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4947
|
+
filefp = MkTempFile()
|
|
4948
|
+
outstring = UncompressBytes(inbytes, formatspecs, filestart)
|
|
4653
4949
|
filefp.write(outstring)
|
|
4654
4950
|
filefp.seek(0, 0)
|
|
4655
4951
|
return filefp
|
|
4656
4952
|
|
|
4657
4953
|
|
|
4658
|
-
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__):
|
|
4954
|
+
def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
4659
4955
|
if(not hasattr(fp, "read")):
|
|
4660
4956
|
return False
|
|
4661
|
-
prechck = CheckCompressionType(fp, formatspecs, False)
|
|
4957
|
+
prechck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
4662
4958
|
if(IsNestedDict(formatspecs) and prechck in formatspecs):
|
|
4663
4959
|
formatspecs = formatspecs[prechck]
|
|
4664
|
-
fp.seek(
|
|
4960
|
+
fp.seek(filestart, 0)
|
|
4665
4961
|
if(prechck!="zstd"):
|
|
4666
|
-
return UncompressFileAlt(fp, formatspecs)
|
|
4667
|
-
filefp =
|
|
4668
|
-
fp.seek(
|
|
4669
|
-
outstring = UncompressBytes(fp.read(), formatspecs)
|
|
4962
|
+
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
4963
|
+
filefp = MkTempFile()
|
|
4964
|
+
fp.seek(filestart, 0)
|
|
4965
|
+
outstring = UncompressBytes(fp.read(), formatspecs, 0)
|
|
4670
4966
|
filefp.write(outstring)
|
|
4671
4967
|
filefp.seek(0, 0)
|
|
4672
4968
|
return filefp
|
|
@@ -4681,7 +4977,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4681
4977
|
if(compression not in compressionuselist and compression is None):
|
|
4682
4978
|
compression = "auto"
|
|
4683
4979
|
if(compression == "gzip" and compression in compressionsupport):
|
|
4684
|
-
bytesfp =
|
|
4980
|
+
bytesfp = MkTempFile()
|
|
4685
4981
|
if(compressionlevel is None):
|
|
4686
4982
|
compressionlevel = 9
|
|
4687
4983
|
else:
|
|
@@ -4689,7 +4985,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4689
4985
|
bytesfp.write(GzipCompressData(
|
|
4690
4986
|
fp.read(), compresslevel=compressionlevel))
|
|
4691
4987
|
elif(compression == "bzip2" and compression in compressionsupport):
|
|
4692
|
-
bytesfp =
|
|
4988
|
+
bytesfp = MkTempFile()
|
|
4693
4989
|
if(compressionlevel is None):
|
|
4694
4990
|
compressionlevel = 9
|
|
4695
4991
|
else:
|
|
@@ -4697,7 +4993,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4697
4993
|
bytesfp.write(BzipCompressData(
|
|
4698
4994
|
fp.read(), compresslevel=compressionlevel))
|
|
4699
4995
|
elif(compression == "lz4" and compression in compressionsupport):
|
|
4700
|
-
bytesfp =
|
|
4996
|
+
bytesfp = MkTempFile()
|
|
4701
4997
|
if(compressionlevel is None):
|
|
4702
4998
|
compressionlevel = 9
|
|
4703
4999
|
else:
|
|
@@ -4705,14 +5001,14 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4705
5001
|
bytesfp.write(lz4.frame.compress(
|
|
4706
5002
|
fp.read(), compression_level=compressionlevel))
|
|
4707
5003
|
elif((compression == "lzo" or compression == "lzop") and compression in compressionsupport):
|
|
4708
|
-
bytesfp =
|
|
5004
|
+
bytesfp = MkTempFile()
|
|
4709
5005
|
if(compressionlevel is None):
|
|
4710
5006
|
compressionlevel = 9
|
|
4711
5007
|
else:
|
|
4712
5008
|
compressionlevel = int(compressionlevel)
|
|
4713
5009
|
bytesfp.write(lzo.compress(fp.read(), compressionlevel))
|
|
4714
5010
|
elif(compression == "zstd" and compression in compressionsupport):
|
|
4715
|
-
bytesfp =
|
|
5011
|
+
bytesfp = MkTempFile()
|
|
4716
5012
|
if(compressionlevel is None):
|
|
4717
5013
|
compressionlevel = 9
|
|
4718
5014
|
else:
|
|
@@ -4720,7 +5016,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4720
5016
|
compressor = zstandard.ZstdCompressor(compressionlevel, threads=get_default_threads())
|
|
4721
5017
|
bytesfp.write(compressor.compress(fp.read()))
|
|
4722
5018
|
elif(compression == "lzma" and compression in compressionsupport):
|
|
4723
|
-
bytesfp =
|
|
5019
|
+
bytesfp = MkTempFile()
|
|
4724
5020
|
if(compressionlevel is None):
|
|
4725
5021
|
compressionlevel = 9
|
|
4726
5022
|
else:
|
|
@@ -4730,7 +5026,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4730
5026
|
except (NotImplementedError, lzma.LZMAError):
|
|
4731
5027
|
bytesfp.write(lzma.compress(fp.read(), format=lzma.FORMAT_ALONE))
|
|
4732
5028
|
elif(compression == "xz" and compression in compressionsupport):
|
|
4733
|
-
bytesfp =
|
|
5029
|
+
bytesfp = MkTempFile()
|
|
4734
5030
|
if(compressionlevel is None):
|
|
4735
5031
|
compressionlevel = 9
|
|
4736
5032
|
else:
|
|
@@ -4740,7 +5036,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None, compressi
|
|
|
4740
5036
|
except (NotImplementedError, lzma.LZMAError):
|
|
4741
5037
|
bytesfp.write(lzma.compress(fp.read(), format=lzma.FORMAT_XZ))
|
|
4742
5038
|
elif(compression == "zlib" and compression in compressionsupport):
|
|
4743
|
-
bytesfp =
|
|
5039
|
+
bytesfp = MkTempFile()
|
|
4744
5040
|
if(compressionlevel is None):
|
|
4745
5041
|
compressionlevel = 9
|
|
4746
5042
|
else:
|
|
@@ -4910,11 +5206,11 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
4910
5206
|
pass
|
|
4911
5207
|
if(outfile == "-" or outfile is None):
|
|
4912
5208
|
verbose = False
|
|
4913
|
-
fp =
|
|
5209
|
+
fp = MkTempFile()
|
|
4914
5210
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
4915
5211
|
fp = outfile
|
|
4916
|
-
elif(re.findall(
|
|
4917
|
-
fp =
|
|
5212
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5213
|
+
fp = MkTempFile()
|
|
4918
5214
|
else:
|
|
4919
5215
|
fbasename = os.path.splitext(outfile)[0]
|
|
4920
5216
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -5109,15 +5405,15 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5109
5405
|
fwinattributes = format(int(0), 'x').lower()
|
|
5110
5406
|
fcompression = ""
|
|
5111
5407
|
fcsize = format(int(0), 'x').lower()
|
|
5112
|
-
fcontents =
|
|
5408
|
+
fcontents = MkTempFile()
|
|
5113
5409
|
fcencoding = "UTF-8"
|
|
5114
5410
|
curcompression = "none"
|
|
5115
5411
|
if not followlink and ftype in data_types:
|
|
5116
5412
|
with open(fname, "rb") as fpc:
|
|
5117
5413
|
shutil.copyfileobj(fpc, fcontents)
|
|
5118
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5414
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5119
5415
|
fcontents.seek(0, 0)
|
|
5120
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5416
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5121
5417
|
if(typechecktest is False and not compresswholefile):
|
|
5122
5418
|
fcontents.seek(0, 2)
|
|
5123
5419
|
ucfsize = fcontents.tell()
|
|
@@ -5127,7 +5423,7 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5127
5423
|
ilmin = 0
|
|
5128
5424
|
ilcsize = []
|
|
5129
5425
|
while(ilmin < ilsize):
|
|
5130
|
-
cfcontents =
|
|
5426
|
+
cfcontents = MkTempFile()
|
|
5131
5427
|
fcontents.seek(0, 0)
|
|
5132
5428
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5133
5429
|
fcontents.seek(0, 0)
|
|
@@ -5144,7 +5440,7 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5144
5440
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5145
5441
|
curcompression = compressionuselist[ilcmin]
|
|
5146
5442
|
fcontents.seek(0, 0)
|
|
5147
|
-
cfcontents =
|
|
5443
|
+
cfcontents = MkTempFile()
|
|
5148
5444
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5149
5445
|
cfcontents.seek(0, 0)
|
|
5150
5446
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5162,9 +5458,9 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5162
5458
|
flstatinfo = os.stat(flinkname)
|
|
5163
5459
|
with open(flinkname, "rb") as fpc:
|
|
5164
5460
|
shutil.copyfileobj(fpc, fcontents)
|
|
5165
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
5461
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5166
5462
|
fcontents.seek(0, 0)
|
|
5167
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5463
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5168
5464
|
if(typechecktest is False and not compresswholefile):
|
|
5169
5465
|
fcontents.seek(0, 2)
|
|
5170
5466
|
ucfsize = fcontents.tell()
|
|
@@ -5174,7 +5470,7 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5174
5470
|
ilmin = 0
|
|
5175
5471
|
ilcsize = []
|
|
5176
5472
|
while(ilmin < ilsize):
|
|
5177
|
-
cfcontents =
|
|
5473
|
+
cfcontents = MkTempFile()
|
|
5178
5474
|
fcontents.seek(0, 0)
|
|
5179
5475
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5180
5476
|
fcontents.seek(0, 0)
|
|
@@ -5191,7 +5487,7 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5191
5487
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5192
5488
|
curcompression = compressionuselist[ilcmin]
|
|
5193
5489
|
fcontents.seek(0, 0)
|
|
5194
|
-
cfcontents =
|
|
5490
|
+
cfcontents = MkTempFile()
|
|
5195
5491
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5196
5492
|
cfcontents.seek(0, 0)
|
|
5197
5493
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5242,7 +5538,7 @@ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
|
|
|
5242
5538
|
outvar = fp.read()
|
|
5243
5539
|
fp.close()
|
|
5244
5540
|
return outvar
|
|
5245
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5541
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5246
5542
|
fp = CompressOpenFileAlt(
|
|
5247
5543
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5248
5544
|
fp.seek(0, 0)
|
|
@@ -5294,11 +5590,11 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5294
5590
|
pass
|
|
5295
5591
|
if(outfile == "-" or outfile is None):
|
|
5296
5592
|
verbose = False
|
|
5297
|
-
fp =
|
|
5593
|
+
fp = MkTempFile()
|
|
5298
5594
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5299
5595
|
fp = outfile
|
|
5300
|
-
elif(re.findall(
|
|
5301
|
-
fp =
|
|
5596
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5597
|
+
fp = MkTempFile()
|
|
5302
5598
|
else:
|
|
5303
5599
|
fbasename = os.path.splitext(outfile)[0]
|
|
5304
5600
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -5317,7 +5613,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5317
5613
|
filetoinode = {}
|
|
5318
5614
|
inodetoforminode = {}
|
|
5319
5615
|
if(infile == "-"):
|
|
5320
|
-
infile =
|
|
5616
|
+
infile = MkTempFile()
|
|
5321
5617
|
if(hasattr(sys.stdin, "buffer")):
|
|
5322
5618
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
5323
5619
|
else:
|
|
@@ -5326,7 +5622,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5326
5622
|
if(not infile):
|
|
5327
5623
|
return False
|
|
5328
5624
|
infile.seek(0, 0)
|
|
5329
|
-
elif(re.findall(
|
|
5625
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5330
5626
|
infile = download_file_from_internet_file(infile)
|
|
5331
5627
|
infile.seek(0, 0)
|
|
5332
5628
|
if(not infile):
|
|
@@ -5350,7 +5646,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5350
5646
|
return False
|
|
5351
5647
|
try:
|
|
5352
5648
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
5353
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
5649
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
5354
5650
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5355
5651
|
formatspecs = formatspecs[compresscheck]
|
|
5356
5652
|
if(compresscheck=="zstd"):
|
|
@@ -5362,7 +5658,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5362
5658
|
else:
|
|
5363
5659
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
5364
5660
|
else:
|
|
5365
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
5661
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
5366
5662
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
5367
5663
|
formatspecs = formatspecs[compresscheck]
|
|
5368
5664
|
if(compresscheck=="zstd"):
|
|
@@ -5456,16 +5752,16 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5456
5752
|
fwinattributes = format(int(0), 'x').lower()
|
|
5457
5753
|
fcompression = ""
|
|
5458
5754
|
fcsize = format(int(0), 'x').lower()
|
|
5459
|
-
fcontents =
|
|
5755
|
+
fcontents = MkTempFile()
|
|
5460
5756
|
fcencoding = "UTF-8"
|
|
5461
5757
|
curcompression = "none"
|
|
5462
5758
|
if ftype in data_types:
|
|
5463
5759
|
fpc = tarfp.extractfile(member)
|
|
5464
|
-
fpc.close()
|
|
5465
5760
|
shutil.copyfileobj(fpc, fcontents)
|
|
5466
|
-
|
|
5761
|
+
fpc.close()
|
|
5762
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5467
5763
|
fcontents.seek(0, 0)
|
|
5468
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
5764
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5469
5765
|
if(typechecktest is False and not compresswholefile):
|
|
5470
5766
|
fcontents.seek(0, 2)
|
|
5471
5767
|
ucfsize = fcontents.tell()
|
|
@@ -5475,7 +5771,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5475
5771
|
ilmin = 0
|
|
5476
5772
|
ilcsize = []
|
|
5477
5773
|
while(ilmin < ilsize):
|
|
5478
|
-
cfcontents =
|
|
5774
|
+
cfcontents = MkTempFile()
|
|
5479
5775
|
fcontents.seek(0, 0)
|
|
5480
5776
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5481
5777
|
fcontents.seek(0, 0)
|
|
@@ -5492,7 +5788,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5492
5788
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5493
5789
|
curcompression = compressionuselist[ilcmin]
|
|
5494
5790
|
fcontents.seek(0, 0)
|
|
5495
|
-
cfcontents =
|
|
5791
|
+
cfcontents = MkTempFile()
|
|
5496
5792
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5497
5793
|
cfcontents.seek(0, 0)
|
|
5498
5794
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5543,7 +5839,7 @@ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5543
5839
|
outvar = fp.read()
|
|
5544
5840
|
fp.close()
|
|
5545
5841
|
return outvar
|
|
5546
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
5842
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5547
5843
|
fp = CompressOpenFileAlt(
|
|
5548
5844
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5549
5845
|
fp.seek(0, 0)
|
|
@@ -5591,11 +5887,11 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5591
5887
|
pass
|
|
5592
5888
|
if(outfile == "-" or outfile is None):
|
|
5593
5889
|
verbose = False
|
|
5594
|
-
fp =
|
|
5890
|
+
fp = MkTempFile()
|
|
5595
5891
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5596
5892
|
fp = outfile
|
|
5597
|
-
elif(re.findall(
|
|
5598
|
-
fp =
|
|
5893
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
5894
|
+
fp = MkTempFile()
|
|
5599
5895
|
else:
|
|
5600
5896
|
fbasename = os.path.splitext(outfile)[0]
|
|
5601
5897
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -5614,7 +5910,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5614
5910
|
filetoinode = {}
|
|
5615
5911
|
inodetoforminode = {}
|
|
5616
5912
|
if(infile == "-"):
|
|
5617
|
-
infile =
|
|
5913
|
+
infile = MkTempFile()
|
|
5618
5914
|
if(hasattr(sys.stdin, "buffer")):
|
|
5619
5915
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
5620
5916
|
else:
|
|
@@ -5623,7 +5919,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5623
5919
|
if(not infile):
|
|
5624
5920
|
return False
|
|
5625
5921
|
infile.seek(0, 0)
|
|
5626
|
-
elif(re.findall(
|
|
5922
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
5627
5923
|
infile = download_file_from_internet_file(infile)
|
|
5628
5924
|
infile.seek(0, 0)
|
|
5629
5925
|
if(not infile):
|
|
@@ -5755,14 +6051,14 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5755
6051
|
fgname = ""
|
|
5756
6052
|
except ImportError:
|
|
5757
6053
|
fgname = ""
|
|
5758
|
-
fcontents =
|
|
6054
|
+
fcontents = MkTempFile()
|
|
5759
6055
|
fcencoding = "UTF-8"
|
|
5760
6056
|
curcompression = "none"
|
|
5761
6057
|
if ftype == 0:
|
|
5762
6058
|
fcontents.write(zipfp.read(member.filename))
|
|
5763
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6059
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
5764
6060
|
fcontents.seek(0, 0)
|
|
5765
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6061
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
5766
6062
|
if(typechecktest is False and not compresswholefile):
|
|
5767
6063
|
fcontents.seek(0, 2)
|
|
5768
6064
|
ucfsize = fcontents.tell()
|
|
@@ -5772,7 +6068,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5772
6068
|
ilmin = 0
|
|
5773
6069
|
ilcsize = []
|
|
5774
6070
|
while(ilmin < ilsize):
|
|
5775
|
-
cfcontents =
|
|
6071
|
+
cfcontents = MkTempFile()
|
|
5776
6072
|
fcontents.seek(0, 0)
|
|
5777
6073
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5778
6074
|
fcontents.seek(0, 0)
|
|
@@ -5786,7 +6082,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5786
6082
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
5787
6083
|
curcompression = compressionuselist[ilcmin]
|
|
5788
6084
|
fcontents.seek(0, 0)
|
|
5789
|
-
cfcontents =
|
|
6085
|
+
cfcontents = MkTempFile()
|
|
5790
6086
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
5791
6087
|
cfcontents.seek(0, 0)
|
|
5792
6088
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -5837,7 +6133,7 @@ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
|
|
|
5837
6133
|
outvar = fp.read()
|
|
5838
6134
|
fp.close()
|
|
5839
6135
|
return outvar
|
|
5840
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6136
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
5841
6137
|
fp = CompressOpenFileAlt(
|
|
5842
6138
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
5843
6139
|
fp.seek(0, 0)
|
|
@@ -5890,11 +6186,11 @@ if(rarfile_support):
|
|
|
5890
6186
|
pass
|
|
5891
6187
|
if(outfile == "-" or outfile is None):
|
|
5892
6188
|
verbose = False
|
|
5893
|
-
fp =
|
|
6189
|
+
fp = MkTempFile()
|
|
5894
6190
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5895
6191
|
fp = outfile
|
|
5896
|
-
elif(re.findall(
|
|
5897
|
-
fp =
|
|
6192
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
6193
|
+
fp = MkTempFile()
|
|
5898
6194
|
else:
|
|
5899
6195
|
fbasename = os.path.splitext(outfile)[0]
|
|
5900
6196
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -6072,14 +6368,14 @@ if(rarfile_support):
|
|
|
6072
6368
|
fgname = ""
|
|
6073
6369
|
except ImportError:
|
|
6074
6370
|
fgname = ""
|
|
6075
|
-
fcontents =
|
|
6371
|
+
fcontents = MkTempFile()
|
|
6076
6372
|
fcencoding = "UTF-8"
|
|
6077
6373
|
curcompression = "none"
|
|
6078
6374
|
if ftype == 0:
|
|
6079
6375
|
fcontents.write(rarfp.read(member.filename))
|
|
6080
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6376
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6081
6377
|
fcontents.seek(0, 0)
|
|
6082
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6378
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6083
6379
|
if(typechecktest is False and not compresswholefile):
|
|
6084
6380
|
fcontents.seek(0, 2)
|
|
6085
6381
|
ucfsize = fcontents.tell()
|
|
@@ -6089,7 +6385,7 @@ if(rarfile_support):
|
|
|
6089
6385
|
ilmin = 0
|
|
6090
6386
|
ilcsize = []
|
|
6091
6387
|
while(ilmin < ilsize):
|
|
6092
|
-
cfcontents =
|
|
6388
|
+
cfcontents = MkTempFile()
|
|
6093
6389
|
fcontents.seek(0, 0)
|
|
6094
6390
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6095
6391
|
fcontents.seek(0, 0)
|
|
@@ -6106,7 +6402,7 @@ if(rarfile_support):
|
|
|
6106
6402
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
6107
6403
|
curcompression = compressionuselist[ilcmin]
|
|
6108
6404
|
fcontents.seek(0, 0)
|
|
6109
|
-
cfcontents =
|
|
6405
|
+
cfcontents = MkTempFile()
|
|
6110
6406
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6111
6407
|
cfcontents.seek(0, 0)
|
|
6112
6408
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6157,7 +6453,7 @@ if(rarfile_support):
|
|
|
6157
6453
|
outvar = fp.read()
|
|
6158
6454
|
fp.close()
|
|
6159
6455
|
return outvar
|
|
6160
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6456
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6161
6457
|
fp = CompressOpenFileAlt(
|
|
6162
6458
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6163
6459
|
fp.seek(0, 0)
|
|
@@ -6210,11 +6506,11 @@ if(py7zr_support):
|
|
|
6210
6506
|
pass
|
|
6211
6507
|
if(outfile == "-" or outfile is None):
|
|
6212
6508
|
verbose = False
|
|
6213
|
-
fp =
|
|
6509
|
+
fp = MkTempFile()
|
|
6214
6510
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6215
6511
|
fp = outfile
|
|
6216
|
-
elif(re.findall(
|
|
6217
|
-
fp =
|
|
6512
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
6513
|
+
fp = MkTempFile()
|
|
6218
6514
|
else:
|
|
6219
6515
|
fbasename = os.path.splitext(outfile)[0]
|
|
6220
6516
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -6236,7 +6532,7 @@ if(py7zr_support):
|
|
|
6236
6532
|
return False
|
|
6237
6533
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
6238
6534
|
file_content = szpfp.readall()
|
|
6239
|
-
#sztest = szpfp.testzip()
|
|
6535
|
+
#sztest = szpfp.testzip()
|
|
6240
6536
|
sztestalt = szpfp.test()
|
|
6241
6537
|
if(sztestalt):
|
|
6242
6538
|
VerbosePrintOut("Bad file found!")
|
|
@@ -6323,16 +6619,16 @@ if(py7zr_support):
|
|
|
6323
6619
|
fgname = ""
|
|
6324
6620
|
except ImportError:
|
|
6325
6621
|
fgname = ""
|
|
6326
|
-
fcontents =
|
|
6622
|
+
fcontents = MkTempFile()
|
|
6327
6623
|
fcencoding = "UTF-8"
|
|
6328
6624
|
curcompression = "none"
|
|
6329
6625
|
if ftype == 0:
|
|
6330
6626
|
fcontents.write(file_content[member.filename].read())
|
|
6331
6627
|
fsize = format(fcontents.tell(), 'x').lower()
|
|
6332
6628
|
fcontents.seek(0, 0)
|
|
6333
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
6629
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6334
6630
|
fcontents.seek(0, 0)
|
|
6335
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
6631
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
6336
6632
|
file_content[member.filename].close()
|
|
6337
6633
|
if(typechecktest is False and not compresswholefile):
|
|
6338
6634
|
fcontents.seek(0, 2)
|
|
@@ -6343,7 +6639,7 @@ if(py7zr_support):
|
|
|
6343
6639
|
ilmin = 0
|
|
6344
6640
|
ilcsize = []
|
|
6345
6641
|
while(ilmin < ilsize):
|
|
6346
|
-
cfcontents =
|
|
6642
|
+
cfcontents = MkTempFile()
|
|
6347
6643
|
fcontents.seek(0, 0)
|
|
6348
6644
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6349
6645
|
fcontents.seek(0, 0)
|
|
@@ -6360,7 +6656,7 @@ if(py7zr_support):
|
|
|
6360
6656
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
6361
6657
|
curcompression = compressionuselist[ilcmin]
|
|
6362
6658
|
fcontents.seek(0, 0)
|
|
6363
|
-
cfcontents =
|
|
6659
|
+
cfcontents = MkTempFile()
|
|
6364
6660
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
6365
6661
|
cfcontents.seek(0, 0)
|
|
6366
6662
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6411,7 +6707,7 @@ if(py7zr_support):
|
|
|
6411
6707
|
outvar = fp.read()
|
|
6412
6708
|
fp.close()
|
|
6413
6709
|
return outvar
|
|
6414
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
6710
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
6415
6711
|
fp = CompressOpenFileAlt(
|
|
6416
6712
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
6417
6713
|
fp.seek(0, 0)
|
|
@@ -6425,7 +6721,7 @@ if(py7zr_support):
|
|
|
6425
6721
|
|
|
6426
6722
|
|
|
6427
6723
|
def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
6428
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6724
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6429
6725
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6430
6726
|
formatspecs = formatspecs[checkcompressfile]
|
|
6431
6727
|
if(verbose):
|
|
@@ -6445,18 +6741,20 @@ def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", c
|
|
|
6445
6741
|
return False
|
|
6446
6742
|
|
|
6447
6743
|
|
|
6448
|
-
def
|
|
6744
|
+
def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
6745
|
+
if(verbose):
|
|
6746
|
+
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
6449
6747
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6450
6748
|
formatspecs = formatspecs[fmttype]
|
|
6451
6749
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6452
6750
|
fmttype = "auto"
|
|
6453
|
-
curloc =
|
|
6751
|
+
curloc = filestart
|
|
6454
6752
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6455
6753
|
curloc = infile.tell()
|
|
6456
6754
|
fp = infile
|
|
6457
|
-
fp.seek(
|
|
6458
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6459
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6755
|
+
fp.seek(filestart, 0)
|
|
6756
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6757
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6460
6758
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6461
6759
|
formatspecs = formatspecs[checkcompressfile]
|
|
6462
6760
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6473,45 +6771,45 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6473
6771
|
return False
|
|
6474
6772
|
if(not fp):
|
|
6475
6773
|
return False
|
|
6476
|
-
fp.seek(
|
|
6774
|
+
fp.seek(filestart, 0)
|
|
6477
6775
|
elif(infile == "-"):
|
|
6478
|
-
fp =
|
|
6776
|
+
fp = MkTempFile()
|
|
6479
6777
|
if(hasattr(sys.stdin, "buffer")):
|
|
6480
6778
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6481
6779
|
else:
|
|
6482
6780
|
shutil.copyfileobj(sys.stdin, fp)
|
|
6483
|
-
fp.seek(
|
|
6484
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6485
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6781
|
+
fp.seek(filestart, 0)
|
|
6782
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6783
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
6486
6784
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6487
6785
|
formatspecs = formatspecs[checkcompressfile]
|
|
6488
6786
|
if(not fp):
|
|
6489
6787
|
return False
|
|
6490
|
-
fp.seek(
|
|
6788
|
+
fp.seek(filestart, 0)
|
|
6491
6789
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6492
|
-
fp =
|
|
6790
|
+
fp = MkTempFile()
|
|
6493
6791
|
fp.write(infile)
|
|
6494
|
-
fp.seek(
|
|
6495
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6496
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6792
|
+
fp.seek(filestart, 0)
|
|
6793
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6794
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6497
6795
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6498
6796
|
formatspecs = formatspecs[compresscheck]
|
|
6499
6797
|
if(not fp):
|
|
6500
6798
|
return False
|
|
6501
|
-
fp.seek(
|
|
6502
|
-
elif(re.findall(
|
|
6799
|
+
fp.seek(filestart, 0)
|
|
6800
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
6503
6801
|
fp = download_file_from_internet_file(infile)
|
|
6504
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6505
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6802
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
6803
|
+
compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
|
|
6506
6804
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6507
6805
|
formatspecs = formatspecs[compresscheck]
|
|
6508
|
-
fp.seek(
|
|
6806
|
+
fp.seek(filestart, 0)
|
|
6509
6807
|
if(not fp):
|
|
6510
6808
|
return False
|
|
6511
|
-
fp.seek(
|
|
6809
|
+
fp.seek(filestart, 0)
|
|
6512
6810
|
else:
|
|
6513
6811
|
infile = RemoveWindowsPath(infile)
|
|
6514
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6812
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
6515
6813
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6516
6814
|
formatspecs = formatspecs[checkcompressfile]
|
|
6517
6815
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -6526,7 +6824,7 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6526
6824
|
return False
|
|
6527
6825
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6528
6826
|
return False
|
|
6529
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6827
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
6530
6828
|
if(not compresscheck):
|
|
6531
6829
|
fextname = os.path.splitext(infile)[1]
|
|
6532
6830
|
if(fextname == ".gz"):
|
|
@@ -6549,26 +6847,23 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6549
6847
|
return False
|
|
6550
6848
|
if(not compresscheck):
|
|
6551
6849
|
return False
|
|
6552
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6850
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
6553
6851
|
try:
|
|
6554
|
-
fp.seek(0, 2)
|
|
6852
|
+
fp.seek(0, 2)
|
|
6555
6853
|
except OSError:
|
|
6556
|
-
SeekToEndOfFile(fp)
|
|
6854
|
+
SeekToEndOfFile(fp)
|
|
6557
6855
|
except ValueError:
|
|
6558
|
-
SeekToEndOfFile(fp)
|
|
6559
|
-
CatSize = fp.tell()
|
|
6560
|
-
CatSizeEnd = CatSize
|
|
6856
|
+
SeekToEndOfFile(fp)
|
|
6857
|
+
CatSize = fp.tell()
|
|
6858
|
+
CatSizeEnd = CatSize
|
|
6561
6859
|
fp.seek(curloc, 0)
|
|
6562
|
-
if(curloc > 0):
|
|
6563
|
-
fp.seek(0, 0)
|
|
6564
6860
|
if(IsNestedDict(formatspecs)):
|
|
6565
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6861
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
6566
6862
|
if(compresschecking not in formatspecs):
|
|
6567
|
-
fp.seek(0, 0)
|
|
6568
6863
|
return False
|
|
6569
6864
|
else:
|
|
6570
6865
|
formatspecs = formatspecs[compresschecking]
|
|
6571
|
-
fp.seek(
|
|
6866
|
+
fp.seek(filestart, 0)
|
|
6572
6867
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6573
6868
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6574
6869
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -6585,23 +6880,8 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6585
6880
|
fp, formatspecs['format_delimiter'])
|
|
6586
6881
|
fnumextrafieldsize = int(inheader[5], 16)
|
|
6587
6882
|
fnumextrafields = int(inheader[6], 16)
|
|
6588
|
-
fextrafieldslist = []
|
|
6589
6883
|
extrastart = 7
|
|
6590
6884
|
extraend = extrastart + fnumextrafields
|
|
6591
|
-
while(extrastart < extraend):
|
|
6592
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
6593
|
-
extrastart = extrastart + 1
|
|
6594
|
-
if(fnumextrafields==1):
|
|
6595
|
-
try:
|
|
6596
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
6597
|
-
fnumextrafields = len(fextrafieldslist)
|
|
6598
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6599
|
-
try:
|
|
6600
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
6601
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6602
|
-
pass
|
|
6603
|
-
if(curloc > 0):
|
|
6604
|
-
fp.seek(curloc, 0)
|
|
6605
6885
|
formversion = re.findall("([\\d]+)", formstring)
|
|
6606
6886
|
fheadsize = int(inheader[0], 16)
|
|
6607
6887
|
fnumfields = int(inheader[1], 16)
|
|
@@ -6610,629 +6890,7 @@ def CatFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, conte
|
|
|
6610
6890
|
fnumfiles = int(inheader[4], 16)
|
|
6611
6891
|
fprechecksumtype = inheader[-2]
|
|
6612
6892
|
fprechecksum = inheader[-1]
|
|
6613
|
-
|
|
6614
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
6615
|
-
if(not headercheck and not skipchecksum):
|
|
6616
|
-
VerbosePrintOut(
|
|
6617
|
-
"File Header Checksum Error with file at offset " + str(0))
|
|
6618
|
-
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
6619
|
-
"'" + newfcs + "'")
|
|
6620
|
-
return False
|
|
6621
|
-
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
6622
|
-
fcompresstype = compresscheck
|
|
6623
|
-
if(fcompresstype==formatspecs['format_magic']):
|
|
6624
|
-
fcompresstype = ""
|
|
6625
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
6626
|
-
if(seekto >= fnumfiles):
|
|
6627
|
-
seekto = fnumfiles - 1
|
|
6628
|
-
if(seekto < 0):
|
|
6629
|
-
seekto = 0
|
|
6630
|
-
if(seekto >= 0):
|
|
6631
|
-
il = -1
|
|
6632
|
-
while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
|
|
6633
|
-
prefhstart = fp.tell()
|
|
6634
|
-
if(formatspecs['new_style']):
|
|
6635
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
6636
|
-
fp, formatspecs['format_delimiter'])
|
|
6637
|
-
else:
|
|
6638
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
6639
|
-
fp, formatspecs['format_delimiter'])
|
|
6640
|
-
if(len(preheaderdata) == 0):
|
|
6641
|
-
break
|
|
6642
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
6643
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
6644
|
-
preftype = int(preheaderdata[2], 16)
|
|
6645
|
-
prefencoding = preheaderdata[3]
|
|
6646
|
-
prefcencoding = preheaderdata[4]
|
|
6647
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
6648
|
-
prefname = preheaderdata[5]
|
|
6649
|
-
else:
|
|
6650
|
-
prefname = "./"+preheaderdata[5]
|
|
6651
|
-
prefbasedir = os.path.dirname(prefname)
|
|
6652
|
-
preflinkname = preheaderdata[6]
|
|
6653
|
-
prefsize = int(preheaderdata[7], 16)
|
|
6654
|
-
prefatime = int(preheaderdata[8], 16)
|
|
6655
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
6656
|
-
prefctime = int(preheaderdata[10], 16)
|
|
6657
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
6658
|
-
prefmode = int(preheaderdata[12], 16)
|
|
6659
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
6660
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
6661
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
6662
|
-
prefcompression = preheaderdata[14]
|
|
6663
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
6664
|
-
prefuid = int(preheaderdata[16], 16)
|
|
6665
|
-
prefuname = preheaderdata[17]
|
|
6666
|
-
prefgid = int(preheaderdata[18], 16)
|
|
6667
|
-
prefgname = preheaderdata[19]
|
|
6668
|
-
fid = int(preheaderdata[20], 16)
|
|
6669
|
-
finode = int(preheaderdata[21], 16)
|
|
6670
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
6671
|
-
prefdev = int(preheaderdata[23], 16)
|
|
6672
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
6673
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
6674
|
-
prefseeknextfile = preheaderdata[26]
|
|
6675
|
-
prefjsontype = preheaderdata[27]
|
|
6676
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
6677
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
6678
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
6679
|
-
prefjsonchecksum = preheaderdata[31]
|
|
6680
|
-
prefhend = fp.tell() - 1
|
|
6681
|
-
prefjstart = fp.tell()
|
|
6682
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
6683
|
-
prefjend = fp.tell()
|
|
6684
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
6685
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
6686
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
6687
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
6688
|
-
extrastart = 34
|
|
6689
|
-
extraend = extrastart + prefextrafields
|
|
6690
|
-
prefcs = preheaderdata[-2].lower()
|
|
6691
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
6692
|
-
prenewfcs = GetHeaderChecksum(
|
|
6693
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
6694
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
6695
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
6696
|
-
prefname + " at offset " + str(prefhstart))
|
|
6697
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
6698
|
-
"'" + prenewfcs + "'")
|
|
6699
|
-
return False
|
|
6700
|
-
if(prefjsonsize > 0):
|
|
6701
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
6702
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
6703
|
-
prefname + " at offset " + str(prefjstart))
|
|
6704
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
6705
|
-
return False
|
|
6706
|
-
prefcontentstart = fp.tell()
|
|
6707
|
-
prefcontents = ""
|
|
6708
|
-
pyhascontents = False
|
|
6709
|
-
if(prefsize > 0):
|
|
6710
|
-
if(prefcompression):
|
|
6711
|
-
prefcontents = fp.read(prefsize)
|
|
6712
|
-
else:
|
|
6713
|
-
prefcontents = fp.read(prefcsize)
|
|
6714
|
-
prenewfccs = GetFileChecksum(
|
|
6715
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
6716
|
-
pyhascontents = True
|
|
6717
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
6718
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
6719
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
6720
|
-
VerbosePrintOut("'" + prefccs +
|
|
6721
|
-
"' != " + "'" + prenewfccs + "'")
|
|
6722
|
-
return False
|
|
6723
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
6724
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
6725
|
-
if(abs(fseeknextasnum) == 0):
|
|
6726
|
-
pass
|
|
6727
|
-
fp.seek(fseeknextasnum, 1)
|
|
6728
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
6729
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6730
|
-
if(abs(fseeknextasnum) == 0):
|
|
6731
|
-
pass
|
|
6732
|
-
fp.seek(fseeknextasnum, 1)
|
|
6733
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
6734
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
6735
|
-
if(abs(fseeknextasnum) == 0):
|
|
6736
|
-
pass
|
|
6737
|
-
fp.seek(fseeknextasnum, 0)
|
|
6738
|
-
else:
|
|
6739
|
-
return False
|
|
6740
|
-
il = il + 1
|
|
6741
|
-
fp.seek(seekstart, 0)
|
|
6742
|
-
fileidnum = il
|
|
6743
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
6744
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
6745
|
-
outftype = int(preheaderdata[2], 16)
|
|
6746
|
-
outfencoding = preheaderdata[3]
|
|
6747
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
6748
|
-
outfname = preheaderdata[4]
|
|
6749
|
-
else:
|
|
6750
|
-
outfname = "./"+preheaderdata[4]
|
|
6751
|
-
outflinkname = preheaderdata[5]
|
|
6752
|
-
outfsize = int(preheaderdata[6], 16)
|
|
6753
|
-
outfbasedir = os.path.dirname(outfname)
|
|
6754
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
6755
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
6756
|
-
if(returnfp):
|
|
6757
|
-
outlist.update({'fp': fp})
|
|
6758
|
-
else:
|
|
6759
|
-
fp.close()
|
|
6760
|
-
return outlist
|
|
6761
|
-
|
|
6762
|
-
|
|
6763
|
-
def CatFileSeekToFileName(infile, fmttype="auto", seekfile=None, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
6764
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
6765
|
-
formatspecs = formatspecs[fmttype]
|
|
6766
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
6767
|
-
fmttype = "auto"
|
|
6768
|
-
curloc = 0
|
|
6769
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
6770
|
-
curloc = infile.tell()
|
|
6771
|
-
fp = infile
|
|
6772
|
-
fp.seek(0, 0)
|
|
6773
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6774
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6775
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6776
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6777
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6778
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6779
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6780
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6781
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6782
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6783
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6784
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6785
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6786
|
-
return False
|
|
6787
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6788
|
-
return False
|
|
6789
|
-
if(not fp):
|
|
6790
|
-
return False
|
|
6791
|
-
fp.seek(0, 0)
|
|
6792
|
-
elif(infile == "-"):
|
|
6793
|
-
fp = BytesIO()
|
|
6794
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
6795
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
6796
|
-
else:
|
|
6797
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
6798
|
-
fp.seek(0, 0)
|
|
6799
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6800
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
6801
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6802
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6803
|
-
if(not fp):
|
|
6804
|
-
return False
|
|
6805
|
-
fp.seek(0, 0)
|
|
6806
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
6807
|
-
fp = BytesIO()
|
|
6808
|
-
fp.write(infile)
|
|
6809
|
-
fp.seek(0, 0)
|
|
6810
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6811
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6812
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6813
|
-
formatspecs = formatspecs[compresscheck]
|
|
6814
|
-
if(not fp):
|
|
6815
|
-
return False
|
|
6816
|
-
fp.seek(0, 0)
|
|
6817
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
6818
|
-
fp = download_file_from_internet_file(infile)
|
|
6819
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
6820
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
6821
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6822
|
-
formatspecs = formatspecs[compresscheck]
|
|
6823
|
-
fp.seek(0, 0)
|
|
6824
|
-
if(not fp):
|
|
6825
|
-
return False
|
|
6826
|
-
fp.seek(0, 0)
|
|
6827
|
-
else:
|
|
6828
|
-
infile = RemoveWindowsPath(infile)
|
|
6829
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
6830
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
6831
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
6832
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
6833
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6834
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
6835
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6836
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
6837
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6838
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
6839
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
6840
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
6841
|
-
return False
|
|
6842
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
6843
|
-
return False
|
|
6844
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
6845
|
-
if(not compresscheck):
|
|
6846
|
-
fextname = os.path.splitext(infile)[1]
|
|
6847
|
-
if(fextname == ".gz"):
|
|
6848
|
-
compresscheck = "gzip"
|
|
6849
|
-
elif(fextname == ".bz2"):
|
|
6850
|
-
compresscheck = "bzip2"
|
|
6851
|
-
elif(fextname == ".zst"):
|
|
6852
|
-
compresscheck = "zstd"
|
|
6853
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
6854
|
-
compresscheck = "lz4"
|
|
6855
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
6856
|
-
compresscheck = "lzo"
|
|
6857
|
-
elif(fextname == ".lzma"):
|
|
6858
|
-
compresscheck = "lzma"
|
|
6859
|
-
elif(fextname == ".xz"):
|
|
6860
|
-
compresscheck = "xz"
|
|
6861
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
6862
|
-
compresscheck = "zlib"
|
|
6863
|
-
else:
|
|
6864
|
-
return False
|
|
6865
|
-
if(not compresscheck):
|
|
6866
|
-
return False
|
|
6867
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
6868
|
-
try:
|
|
6869
|
-
fp.seek(0, 2);
|
|
6870
|
-
except OSError:
|
|
6871
|
-
SeekToEndOfFile(fp);
|
|
6872
|
-
except ValueError:
|
|
6873
|
-
SeekToEndOfFile(fp);
|
|
6874
|
-
CatSize = fp.tell();
|
|
6875
|
-
CatSizeEnd = CatSize;
|
|
6876
|
-
fp.seek(curloc, 0)
|
|
6877
|
-
if(curloc > 0):
|
|
6878
|
-
fp.seek(0, 0)
|
|
6879
|
-
if(IsNestedDict(formatspecs)):
|
|
6880
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
6881
|
-
if(compresschecking not in formatspecs):
|
|
6882
|
-
return False
|
|
6883
|
-
else:
|
|
6884
|
-
formatspecs = formatspecs[compresschecking]
|
|
6885
|
-
fp.seek(0, 0)
|
|
6886
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
6887
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
6888
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
6889
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
6890
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
6891
|
-
return False
|
|
6892
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
6893
|
-
return False
|
|
6894
|
-
if(formatspecs['new_style']):
|
|
6895
|
-
inheader = ReadFileHeaderDataBySize(
|
|
6896
|
-
fp, formatspecs['format_delimiter'])
|
|
6897
|
-
else:
|
|
6898
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
6899
|
-
fp, formatspecs['format_delimiter'])
|
|
6900
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
6901
|
-
fnumextrafields = int(inheader[6], 16)
|
|
6902
|
-
fextrafieldslist = []
|
|
6903
|
-
extrastart = 7
|
|
6904
|
-
extraend = extrastart + fnumextrafields
|
|
6905
|
-
while(extrastart < extraend):
|
|
6906
|
-
fextrafieldslist.append(inheader[extrastart])
|
|
6907
|
-
extrastart = extrastart + 1
|
|
6908
|
-
if(fnumextrafields==1):
|
|
6909
|
-
try:
|
|
6910
|
-
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
6911
|
-
fnumextrafields = len(fextrafieldslist)
|
|
6912
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6913
|
-
try:
|
|
6914
|
-
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
6915
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
6916
|
-
pass
|
|
6917
|
-
if(curloc > 0):
|
|
6918
|
-
fp.seek(curloc, 0)
|
|
6919
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
6920
|
-
fheadsize = int(inheader[0], 16)
|
|
6921
|
-
fnumfields = int(inheader[1], 16)
|
|
6922
|
-
fhencoding = inheader[2]
|
|
6923
|
-
fostype = inheader[3]
|
|
6924
|
-
fnumfiles = int(inheader[4], 16)
|
|
6925
|
-
fprechecksumtype = inheader[-2]
|
|
6926
|
-
fprechecksum = inheader[-1]
|
|
6927
|
-
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
6928
|
-
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
6929
|
-
if(not headercheck and not skipchecksum):
|
|
6930
|
-
VerbosePrintOut(
|
|
6931
|
-
"File Header Checksum Error with file at offset " + str(0))
|
|
6932
|
-
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
6933
|
-
"'" + newfcs + "'")
|
|
6934
|
-
return False
|
|
6935
|
-
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
6936
|
-
fcompresstype = compresscheck
|
|
6937
|
-
if(fcompresstype==formatspecs['format_magic']):
|
|
6938
|
-
fcompresstype = ""
|
|
6939
|
-
outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
6940
|
-
seekto = fnumfiles - 1
|
|
6941
|
-
filefound = False
|
|
6942
|
-
if(seekto >= 0):
|
|
6943
|
-
il = -1
|
|
6944
|
-
while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
|
|
6945
|
-
prefhstart = fp.tell()
|
|
6946
|
-
if(formatspecs['new_style']):
|
|
6947
|
-
preheaderdata = ReadFileHeaderDataBySize(
|
|
6948
|
-
fp, formatspecs['format_delimiter'])
|
|
6949
|
-
else:
|
|
6950
|
-
preheaderdata = ReadFileHeaderDataWoSize(
|
|
6951
|
-
fp, formatspecs['format_delimiter'])
|
|
6952
|
-
if(len(preheaderdata) == 0):
|
|
6953
|
-
break
|
|
6954
|
-
prefheadsize = int(preheaderdata[0], 16)
|
|
6955
|
-
prefnumfields = int(preheaderdata[1], 16)
|
|
6956
|
-
preftype = int(preheaderdata[2], 16)
|
|
6957
|
-
prefencoding = preheaderdata[3]
|
|
6958
|
-
prefencoding = preheaderdata[4]
|
|
6959
|
-
if(re.findall("^[.|/]", preheaderdata[5])):
|
|
6960
|
-
prefname = preheaderdata[5]
|
|
6961
|
-
else:
|
|
6962
|
-
prefname = "./"+preheaderdata[5]
|
|
6963
|
-
prefbasedir = os.path.dirname(prefname)
|
|
6964
|
-
preflinkname = preheaderdata[6]
|
|
6965
|
-
prefsize = int(preheaderdata[7], 16)
|
|
6966
|
-
prefatime = int(preheaderdata[8], 16)
|
|
6967
|
-
prefmtime = int(preheaderdata[9], 16)
|
|
6968
|
-
prefctime = int(preheaderdata[10], 16)
|
|
6969
|
-
prefbtime = int(preheaderdata[11], 16)
|
|
6970
|
-
prefmode = int(preheaderdata[12], 16)
|
|
6971
|
-
prefchmode = stat.S_IMODE(prefmode)
|
|
6972
|
-
preftypemod = stat.S_IFMT(prefmode)
|
|
6973
|
-
prefwinattributes = int(preheaderdata[13], 16)
|
|
6974
|
-
prefcompression = preheaderdata[14]
|
|
6975
|
-
prefcsize = int(preheaderdata[15], 16)
|
|
6976
|
-
prefuid = int(preheaderdata[16], 16)
|
|
6977
|
-
prefuname = preheaderdata[17]
|
|
6978
|
-
prefgid = int(preheaderdata[18], 16)
|
|
6979
|
-
prefgname = preheaderdata[19]
|
|
6980
|
-
fid = int(preheaderdata[20], 16)
|
|
6981
|
-
finode = int(preheaderdata[21], 16)
|
|
6982
|
-
flinkcount = int(preheaderdata[22], 16)
|
|
6983
|
-
prefdev = int(preheaderdata[23], 16)
|
|
6984
|
-
prefdev_minor = int(preheaderdata[24], 16)
|
|
6985
|
-
prefdev_major = int(preheaderdata[25], 16)
|
|
6986
|
-
prefseeknextfile = preheaderdata[26]
|
|
6987
|
-
prefjsontype = preheaderdata[27]
|
|
6988
|
-
prefjsonlen = int(preheaderdata[28], 16)
|
|
6989
|
-
prefjsonsize = int(preheaderdata[29], 16)
|
|
6990
|
-
prefjsonchecksumtype = preheaderdata[30]
|
|
6991
|
-
prefjsonchecksum = preheaderdata[31]
|
|
6992
|
-
prefhend = fp.tell() - 1
|
|
6993
|
-
prefjstart = fp.tell()
|
|
6994
|
-
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
6995
|
-
prefjend = fp.tell()
|
|
6996
|
-
fp.seek(len(formatspecs['format_delimiter']), 1)
|
|
6997
|
-
prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
6998
|
-
prefextrasize = int(preheaderdata[32], 16)
|
|
6999
|
-
prefextrafields = int(preheaderdata[33], 16)
|
|
7000
|
-
extrastart = 34
|
|
7001
|
-
extraend = extrastart + prefextrafields
|
|
7002
|
-
prefcs = preheaderdata[-2].lower()
|
|
7003
|
-
prenewfcs = preheaderdata[-1].lower()
|
|
7004
|
-
prenewfcs = GetHeaderChecksum(
|
|
7005
|
-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
7006
|
-
if(prefcs != prenewfcs and not skipchecksum):
|
|
7007
|
-
VerbosePrintOut("File Header Checksum Error with file " +
|
|
7008
|
-
prefname + " at offset " + str(prefhstart))
|
|
7009
|
-
VerbosePrintOut("'" + prefcs + "' != " +
|
|
7010
|
-
"'" + prenewfcs + "'")
|
|
7011
|
-
return False
|
|
7012
|
-
if(prefjsonsize > 0):
|
|
7013
|
-
if(prejsonfcs != prefjsonchecksum and not skipchecksum):
|
|
7014
|
-
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
7015
|
-
prefname + " at offset " + str(prefjstart))
|
|
7016
|
-
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
7017
|
-
return False
|
|
7018
|
-
prefcontentstart = fp.tell()
|
|
7019
|
-
prefcontents = ""
|
|
7020
|
-
pyhascontents = False
|
|
7021
|
-
if(prefsize > 0):
|
|
7022
|
-
if(prefcompression):
|
|
7023
|
-
prefcontents = fp.read(prefsize)
|
|
7024
|
-
else:
|
|
7025
|
-
prefcontents = fp.read(prefcsize)
|
|
7026
|
-
prenewfccs = GetFileChecksum(
|
|
7027
|
-
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
7028
|
-
pyhascontents = True
|
|
7029
|
-
if(prefccs != prenewfccs and not skipchecksum):
|
|
7030
|
-
VerbosePrintOut("File Content Checksum Error with file " +
|
|
7031
|
-
prefname + " at offset " + str(prefcontentstart))
|
|
7032
|
-
VerbosePrintOut("'" + prefccs +
|
|
7033
|
-
"' != " + "'" + prenewfccs + "'")
|
|
7034
|
-
return False
|
|
7035
|
-
if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
|
|
7036
|
-
fseeknextasnum = int(prefseeknextfile.replace("+", ""))
|
|
7037
|
-
if(abs(fseeknextasnum) == 0):
|
|
7038
|
-
pass
|
|
7039
|
-
fp.seek(fseeknextasnum, 1)
|
|
7040
|
-
elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
|
|
7041
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7042
|
-
if(abs(fseeknextasnum) == 0):
|
|
7043
|
-
pass
|
|
7044
|
-
fp.seek(fseeknextasnum, 1)
|
|
7045
|
-
elif(re.findall("^([0-9]+)", prefseeknextfile)):
|
|
7046
|
-
fseeknextasnum = int(prefseeknextfile)
|
|
7047
|
-
if(abs(fseeknextasnum) == 0):
|
|
7048
|
-
pass
|
|
7049
|
-
fp.seek(fseeknextasnum, 0)
|
|
7050
|
-
else:
|
|
7051
|
-
return False
|
|
7052
|
-
il = il + 1
|
|
7053
|
-
filefound = False
|
|
7054
|
-
if(prefname == seekfile):
|
|
7055
|
-
filefound = True
|
|
7056
|
-
break
|
|
7057
|
-
fp.seek(seekstart, 0)
|
|
7058
|
-
fileidnum = il
|
|
7059
|
-
outfheadsize = int(preheaderdata[0], 16)
|
|
7060
|
-
outfnumfields = int(preheaderdata[1], 16)
|
|
7061
|
-
outftype = int(preheaderdata[2], 16)
|
|
7062
|
-
outfencoding = preheaderdata[3]
|
|
7063
|
-
if(re.findall("^[.|/]", preheaderdata[4])):
|
|
7064
|
-
outfname = preheaderdata[4]
|
|
7065
|
-
else:
|
|
7066
|
-
outfname = "./"+preheaderdata[4]
|
|
7067
|
-
outflinkname = preheaderdata[5]
|
|
7068
|
-
outfsize = int(preheaderdata[6], 16)
|
|
7069
|
-
outfbasedir = os.path.dirname(outfname)
|
|
7070
|
-
if(filefound):
|
|
7071
|
-
outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
|
|
7072
|
-
'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
|
|
7073
|
-
else:
|
|
7074
|
-
return False
|
|
7075
|
-
if(returnfp):
|
|
7076
|
-
outlist.update({'fp': fp})
|
|
7077
|
-
else:
|
|
7078
|
-
fp.close()
|
|
7079
|
-
return outlist
|
|
7080
|
-
|
|
7081
|
-
|
|
7082
|
-
def CatFileValidate(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7083
|
-
if(verbose):
|
|
7084
|
-
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
7085
|
-
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7086
|
-
formatspecs = formatspecs[fmttype]
|
|
7087
|
-
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7088
|
-
fmttype = "auto"
|
|
7089
|
-
curloc = 0
|
|
7090
|
-
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7091
|
-
curloc = infile.tell()
|
|
7092
|
-
fp = infile
|
|
7093
|
-
fp.seek(0, 0)
|
|
7094
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7095
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7096
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7097
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7098
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7099
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7100
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7101
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7102
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7103
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7104
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7105
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7106
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7107
|
-
return False
|
|
7108
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7109
|
-
return False
|
|
7110
|
-
if(not fp):
|
|
7111
|
-
return False
|
|
7112
|
-
fp.seek(0, 0)
|
|
7113
|
-
elif(infile == "-"):
|
|
7114
|
-
fp = BytesIO()
|
|
7115
|
-
if(hasattr(sys.stdin, "buffer")):
|
|
7116
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7117
|
-
else:
|
|
7118
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
7119
|
-
fp.seek(0, 0)
|
|
7120
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7121
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7122
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7123
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7124
|
-
if(not fp):
|
|
7125
|
-
return False
|
|
7126
|
-
fp.seek(0, 0)
|
|
7127
|
-
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7128
|
-
fp = BytesIO()
|
|
7129
|
-
fp.write(infile)
|
|
7130
|
-
fp.seek(0, 0)
|
|
7131
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7132
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7133
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7134
|
-
formatspecs = formatspecs[compresscheck]
|
|
7135
|
-
if(not fp):
|
|
7136
|
-
return False
|
|
7137
|
-
fp.seek(0, 0)
|
|
7138
|
-
elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
|
|
7139
|
-
fp = download_file_from_internet_file(infile)
|
|
7140
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7141
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7142
|
-
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7143
|
-
formatspecs = formatspecs[compresscheck]
|
|
7144
|
-
fp.seek(0, 0)
|
|
7145
|
-
if(not fp):
|
|
7146
|
-
return False
|
|
7147
|
-
fp.seek(0, 0)
|
|
7148
|
-
else:
|
|
7149
|
-
infile = RemoveWindowsPath(infile)
|
|
7150
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7151
|
-
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7152
|
-
formatspecs = formatspecs[checkcompressfile]
|
|
7153
|
-
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
7154
|
-
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7155
|
-
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7156
|
-
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7157
|
-
elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7158
|
-
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7159
|
-
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7160
|
-
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7161
|
-
elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
|
|
7162
|
-
return False
|
|
7163
|
-
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7164
|
-
return False
|
|
7165
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7166
|
-
if(not compresscheck):
|
|
7167
|
-
fextname = os.path.splitext(infile)[1]
|
|
7168
|
-
if(fextname == ".gz"):
|
|
7169
|
-
compresscheck = "gzip"
|
|
7170
|
-
elif(fextname == ".bz2"):
|
|
7171
|
-
compresscheck = "bzip2"
|
|
7172
|
-
elif(fextname == ".zst"):
|
|
7173
|
-
compresscheck = "zstd"
|
|
7174
|
-
elif(fextname == ".lz4" or fextname == ".clz4"):
|
|
7175
|
-
compresscheck = "lz4"
|
|
7176
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
7177
|
-
compresscheck = "lzo"
|
|
7178
|
-
elif(fextname == ".lzma"):
|
|
7179
|
-
compresscheck = "lzma"
|
|
7180
|
-
elif(fextname == ".xz"):
|
|
7181
|
-
compresscheck = "xz"
|
|
7182
|
-
elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
|
|
7183
|
-
compresscheck = "zlib"
|
|
7184
|
-
else:
|
|
7185
|
-
return False
|
|
7186
|
-
if(not compresscheck):
|
|
7187
|
-
return False
|
|
7188
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7189
|
-
try:
|
|
7190
|
-
fp.seek(0, 2);
|
|
7191
|
-
except OSError:
|
|
7192
|
-
SeekToEndOfFile(fp);
|
|
7193
|
-
except ValueError:
|
|
7194
|
-
SeekToEndOfFile(fp);
|
|
7195
|
-
CatSize = fp.tell();
|
|
7196
|
-
CatSizeEnd = CatSize;
|
|
7197
|
-
fp.seek(curloc, 0)
|
|
7198
|
-
if(curloc > 0):
|
|
7199
|
-
fp.seek(0, 0)
|
|
7200
|
-
if(IsNestedDict(formatspecs)):
|
|
7201
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7202
|
-
if(compresschecking not in formatspecs):
|
|
7203
|
-
return False
|
|
7204
|
-
else:
|
|
7205
|
-
formatspecs = formatspecs[compresschecking]
|
|
7206
|
-
fp.seek(0, 0)
|
|
7207
|
-
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7208
|
-
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7209
|
-
formdelszie = len(formatspecs['format_delimiter'])
|
|
7210
|
-
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
7211
|
-
if(formstring != formatspecs['format_magic']+inheaderver):
|
|
7212
|
-
return False
|
|
7213
|
-
if(formdel != formatspecs['format_delimiter']):
|
|
7214
|
-
return False
|
|
7215
|
-
if(formatspecs['new_style']):
|
|
7216
|
-
inheader = ReadFileHeaderDataBySize(
|
|
7217
|
-
fp, formatspecs['format_delimiter'])
|
|
7218
|
-
else:
|
|
7219
|
-
inheader = ReadFileHeaderDataWoSize(
|
|
7220
|
-
fp, formatspecs['format_delimiter'])
|
|
7221
|
-
fnumextrafieldsize = int(inheader[5], 16)
|
|
7222
|
-
fnumextrafields = int(inheader[6], 16)
|
|
7223
|
-
extrastart = 7
|
|
7224
|
-
extraend = extrastart + fnumextrafields
|
|
7225
|
-
if(curloc > 0):
|
|
7226
|
-
fp.seek(curloc, 0)
|
|
7227
|
-
formversion = re.findall("([\\d]+)", formstring)
|
|
7228
|
-
fheadsize = int(inheader[0], 16)
|
|
7229
|
-
fnumfields = int(inheader[1], 16)
|
|
7230
|
-
fhencoding = inheader[2]
|
|
7231
|
-
fostype = inheader[3]
|
|
7232
|
-
fnumfiles = int(inheader[4], 16)
|
|
7233
|
-
fprechecksumtype = inheader[-2]
|
|
7234
|
-
fprechecksum = inheader[-1]
|
|
7235
|
-
il = 0
|
|
6893
|
+
il = 0
|
|
7236
6894
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
7237
6895
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
|
|
7238
6896
|
valid_archive = True
|
|
@@ -7433,18 +7091,18 @@ def CatFileValidateMultiple(infile, fmttype="auto", formatspecs=__file_format_mu
|
|
|
7433
7091
|
def CatFileValidateMultipleFiles(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7434
7092
|
return CatFileValidateMultiple(infile, fmttype, formatspecs, verbose, returnfp)
|
|
7435
7093
|
|
|
7436
|
-
def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7094
|
+
def CatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7437
7095
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
7438
7096
|
formatspecs = formatspecs[fmttype]
|
|
7439
7097
|
elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
|
|
7440
7098
|
fmttype = "auto"
|
|
7441
|
-
curloc =
|
|
7099
|
+
curloc = filestart
|
|
7442
7100
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
7443
7101
|
curloc = infile.tell()
|
|
7444
7102
|
fp = infile
|
|
7445
|
-
fp.seek(
|
|
7446
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7447
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7103
|
+
fp.seek(filestart, 0)
|
|
7104
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7105
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7448
7106
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7449
7107
|
formatspecs = formatspecs[checkcompressfile]
|
|
7450
7108
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7461,45 +7119,45 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7461
7119
|
return False
|
|
7462
7120
|
if(not fp):
|
|
7463
7121
|
return False
|
|
7464
|
-
fp.seek(
|
|
7122
|
+
fp.seek(filestart, 0)
|
|
7465
7123
|
elif(infile == "-"):
|
|
7466
|
-
fp =
|
|
7124
|
+
fp = MkTempFile()
|
|
7467
7125
|
if(hasattr(sys.stdin, "buffer")):
|
|
7468
7126
|
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
7469
7127
|
else:
|
|
7470
7128
|
shutil.copyfileobj(sys.stdin, fp)
|
|
7471
|
-
fp.seek(
|
|
7472
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7473
|
-
checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
|
|
7129
|
+
fp.seek(filestart, 0)
|
|
7130
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7131
|
+
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7474
7132
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7475
7133
|
formatspecs = formatspecs[checkcompressfile]
|
|
7476
7134
|
if(not fp):
|
|
7477
7135
|
return False
|
|
7478
|
-
fp.seek(
|
|
7136
|
+
fp.seek(filestart, 0)
|
|
7479
7137
|
elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
|
|
7480
|
-
fp =
|
|
7138
|
+
fp = MkTempFile()
|
|
7481
7139
|
fp.write(infile)
|
|
7482
|
-
fp.seek(
|
|
7483
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7484
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7140
|
+
fp.seek(filestart, 0)
|
|
7141
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7142
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7485
7143
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7486
7144
|
formatspecs = formatspecs[compresscheck]
|
|
7487
7145
|
if(not fp):
|
|
7488
7146
|
return False
|
|
7489
|
-
fp.seek(
|
|
7490
|
-
elif(re.findall(
|
|
7147
|
+
fp.seek(filestart, 0)
|
|
7148
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
7491
7149
|
fp = download_file_from_internet_file(infile)
|
|
7492
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
7493
|
-
compresscheck = CheckCompressionType(fp, formatspecs, False)
|
|
7150
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7151
|
+
compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7494
7152
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7495
7153
|
formatspecs = formatspecs[compresscheck]
|
|
7496
|
-
fp.seek(
|
|
7154
|
+
fp.seek(filestart, 0)
|
|
7497
7155
|
if(not fp):
|
|
7498
7156
|
return False
|
|
7499
|
-
fp.seek(
|
|
7157
|
+
fp.seek(filestart, 0)
|
|
7500
7158
|
else:
|
|
7501
7159
|
infile = RemoveWindowsPath(infile)
|
|
7502
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7160
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7503
7161
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7504
7162
|
formatspecs = formatspecs[checkcompressfile]
|
|
7505
7163
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7514,7 +7172,7 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7514
7172
|
return False
|
|
7515
7173
|
elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
|
|
7516
7174
|
return False
|
|
7517
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
7175
|
+
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
7518
7176
|
if(not compresscheck):
|
|
7519
7177
|
fextname = os.path.splitext(infile)[1]
|
|
7520
7178
|
if(fextname == ".gz"):
|
|
@@ -7537,25 +7195,23 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7537
7195
|
return False
|
|
7538
7196
|
if(not compresscheck):
|
|
7539
7197
|
return False
|
|
7540
|
-
fp = UncompressFile(infile, formatspecs, "rb")
|
|
7198
|
+
fp = UncompressFile(infile, formatspecs, "rb", filestart)
|
|
7541
7199
|
try:
|
|
7542
|
-
fp.seek(0, 2)
|
|
7200
|
+
fp.seek(0, 2)
|
|
7543
7201
|
except OSError:
|
|
7544
|
-
SeekToEndOfFile(fp)
|
|
7202
|
+
SeekToEndOfFile(fp)
|
|
7545
7203
|
except ValueError:
|
|
7546
|
-
SeekToEndOfFile(fp)
|
|
7547
|
-
CatSize = fp.tell()
|
|
7204
|
+
SeekToEndOfFile(fp)
|
|
7205
|
+
CatSize = fp.tell()
|
|
7548
7206
|
CatSizeEnd = CatSize;
|
|
7549
7207
|
fp.seek(curloc, 0)
|
|
7550
|
-
if(curloc > 0):
|
|
7551
|
-
fp.seek(0, 0)
|
|
7552
7208
|
if(IsNestedDict(formatspecs)):
|
|
7553
|
-
compresschecking = CheckCompressionType(fp, formatspecs, False)
|
|
7209
|
+
compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
|
|
7554
7210
|
if(compresschecking not in formatspecs):
|
|
7555
7211
|
return False
|
|
7556
7212
|
else:
|
|
7557
7213
|
formatspecs = formatspecs[compresschecking]
|
|
7558
|
-
fp.seek(
|
|
7214
|
+
fp.seek(filestart, 0)
|
|
7559
7215
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
7560
7216
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
7561
7217
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
@@ -7587,8 +7243,6 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7587
7243
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
7588
7244
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
7589
7245
|
pass
|
|
7590
|
-
if(curloc > 0):
|
|
7591
|
-
fp.seek(curloc, 0)
|
|
7592
7246
|
formversion = re.findall("([\\d]+)", formstring)
|
|
7593
7247
|
fheadsize = int(inheader[0], 16)
|
|
7594
7248
|
fnumfields = int(inheader[1], 16)
|
|
@@ -7777,7 +7431,7 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7777
7431
|
outfjsoncontent = {}
|
|
7778
7432
|
elif(outfjsontype=="list"):
|
|
7779
7433
|
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
|
|
7780
|
-
flisttmp =
|
|
7434
|
+
flisttmp = MkTempFile()
|
|
7781
7435
|
flisttmp.write(outfprejsoncontent.encode())
|
|
7782
7436
|
flisttmp.seek(0)
|
|
7783
7437
|
outfjsoncontent = ReadFileHeaderData(flisttmp, outfjsonlen, formatspecs['format_delimiter'])
|
|
@@ -7830,7 +7484,7 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7830
7484
|
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
7831
7485
|
return False
|
|
7832
7486
|
outfcontentstart = fp.tell()
|
|
7833
|
-
outfcontents =
|
|
7487
|
+
outfcontents = MkTempFile()
|
|
7834
7488
|
pyhascontents = False
|
|
7835
7489
|
if(outfsize > 0 and not listonly):
|
|
7836
7490
|
if(outfcompression == "none" or outfcompression == "" or outfcompression == "auto"):
|
|
@@ -7853,9 +7507,9 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7853
7507
|
outfcontents.seek(0, 0)
|
|
7854
7508
|
if(uncompress):
|
|
7855
7509
|
cfcontents = UncompressFileAlt(
|
|
7856
|
-
outfcontents, formatspecs)
|
|
7510
|
+
outfcontents, formatspecs, 0)
|
|
7857
7511
|
cfcontents.seek(0, 0)
|
|
7858
|
-
outfcontents =
|
|
7512
|
+
outfcontents = MkTempFile()
|
|
7859
7513
|
shutil.copyfileobj(cfcontents, outfcontents)
|
|
7860
7514
|
cfcontents.close()
|
|
7861
7515
|
outfcontents.seek(0, 0)
|
|
@@ -7898,49 +7552,49 @@ def CatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=Fals
|
|
|
7898
7552
|
return outlist
|
|
7899
7553
|
|
|
7900
7554
|
|
|
7901
|
-
def MultipleCatFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7555
|
+
def MultipleCatFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7902
7556
|
if(isinstance(infile, (list, tuple, ))):
|
|
7903
7557
|
pass
|
|
7904
7558
|
else:
|
|
7905
7559
|
infile = [infile]
|
|
7906
7560
|
outretval = {}
|
|
7907
7561
|
for curfname in infile:
|
|
7908
|
-
curretfile[curfname] =
|
|
7562
|
+
curretfile[curfname] = CatFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7909
7563
|
return outretval
|
|
7910
7564
|
|
|
7911
|
-
def MultipleCatFilesToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7912
|
-
return MultipleCatFileToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7565
|
+
def MultipleCatFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7566
|
+
return MultipleCatFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7913
7567
|
|
|
7914
7568
|
|
|
7915
|
-
def CatFileStringToArray(instr, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7916
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7569
|
+
def CatFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7570
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7917
7571
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7918
7572
|
formatspecs = formatspecs[checkcompressfile]
|
|
7919
|
-
fp =
|
|
7920
|
-
|
|
7921
|
-
return
|
|
7573
|
+
fp = MkTempFile(instr)
|
|
7574
|
+
listarrayfiles = CatFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7575
|
+
return listarrayfiles
|
|
7922
7576
|
|
|
7923
7577
|
|
|
7924
7578
|
def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7925
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7579
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7926
7580
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7927
7581
|
formatspecs = formatspecs[checkcompressfile]
|
|
7928
|
-
fp =
|
|
7582
|
+
fp = MkTempFile()
|
|
7929
7583
|
fp = PackCatFileFromTarFile(
|
|
7930
7584
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7931
|
-
|
|
7932
|
-
return
|
|
7585
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7586
|
+
return listarrayfiles
|
|
7933
7587
|
|
|
7934
7588
|
|
|
7935
7589
|
def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7936
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7590
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7937
7591
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7938
7592
|
formatspecs = formatspecs[checkcompressfile]
|
|
7939
|
-
fp =
|
|
7593
|
+
fp = MkTempFile()
|
|
7940
7594
|
fp = PackCatFileFromZipFile(
|
|
7941
7595
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7942
|
-
|
|
7943
|
-
return
|
|
7596
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7597
|
+
return listarrayfiles
|
|
7944
7598
|
|
|
7945
7599
|
|
|
7946
7600
|
if(not rarfile_support):
|
|
@@ -7949,14 +7603,14 @@ if(not rarfile_support):
|
|
|
7949
7603
|
|
|
7950
7604
|
if(rarfile_support):
|
|
7951
7605
|
def RarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7952
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7606
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7953
7607
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7954
7608
|
formatspecs = formatspecs[checkcompressfile]
|
|
7955
|
-
fp =
|
|
7609
|
+
fp = MkTempFile()
|
|
7956
7610
|
fp = PackCatFileFromRarFile(
|
|
7957
7611
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7958
|
-
|
|
7959
|
-
return
|
|
7612
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7613
|
+
return listarrayfiles
|
|
7960
7614
|
|
|
7961
7615
|
if(not py7zr_support):
|
|
7962
7616
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
@@ -7964,18 +7618,18 @@ if(not py7zr_support):
|
|
|
7964
7618
|
|
|
7965
7619
|
if(py7zr_support):
|
|
7966
7620
|
def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
|
|
7967
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7621
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7968
7622
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7969
7623
|
formatspecs = formatspecs[checkcompressfile]
|
|
7970
|
-
fp =
|
|
7624
|
+
fp = MkTempFile()
|
|
7971
7625
|
fp = PackCatFileFromSevenZipFile(
|
|
7972
7626
|
infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
|
|
7973
|
-
|
|
7974
|
-
return
|
|
7627
|
+
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7628
|
+
return listarrayfiles
|
|
7975
7629
|
|
|
7976
7630
|
|
|
7977
|
-
def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7978
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
7631
|
+
def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7632
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7979
7633
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
7980
7634
|
formatspecs = formatspecs[checkcompressfile]
|
|
7981
7635
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -7987,78 +7641,78 @@ def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=
|
|
|
7987
7641
|
elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7988
7642
|
return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7989
7643
|
elif(checkcompressfile == formatspecs['format_magic']):
|
|
7990
|
-
return CatFileToArray(infile, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7644
|
+
return CatFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7991
7645
|
else:
|
|
7992
7646
|
return False
|
|
7993
7647
|
return False
|
|
7994
7648
|
|
|
7995
7649
|
|
|
7996
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
7997
|
-
outarray =
|
|
7650
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
7651
|
+
outarray = MkTempFile()
|
|
7998
7652
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
7999
7653
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
8000
|
-
|
|
8001
|
-
return
|
|
7654
|
+
listarrayfiles = CatFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7655
|
+
return listarrayfiles
|
|
8002
7656
|
|
|
8003
7657
|
|
|
8004
7658
|
def CatFileArrayToArrayIndex(inarray, returnfp=False):
|
|
8005
7659
|
if(isinstance(inarray, dict)):
|
|
8006
|
-
|
|
7660
|
+
listarrayfiles = inarray
|
|
8007
7661
|
else:
|
|
8008
7662
|
return False
|
|
8009
|
-
if(not
|
|
7663
|
+
if(not listarrayfiles):
|
|
8010
7664
|
return False
|
|
8011
|
-
outarray = {'list':
|
|
7665
|
+
outarray = {'list': listarrayfiles, 'filetoid': {}, 'idtofile': {}, 'filetypes': {'directories': {'filetoid': {}, 'idtofile': {}}, 'files': {'filetoid': {}, 'idtofile': {}}, 'links': {'filetoid': {}, 'idtofile': {}}, 'symlinks': {'filetoid': {
|
|
8012
7666
|
}, 'idtofile': {}}, 'hardlinks': {'filetoid': {}, 'idtofile': {}}, 'character': {'filetoid': {}, 'idtofile': {}}, 'block': {'filetoid': {}, 'idtofile': {}}, 'fifo': {'filetoid': {}, 'idtofile': {}}, 'devices': {'filetoid': {}, 'idtofile': {}}}}
|
|
8013
7667
|
if(returnfp):
|
|
8014
|
-
outarray.update({'fp':
|
|
8015
|
-
lenlist = len(
|
|
7668
|
+
outarray.update({'fp': listarrayfiles['fp']})
|
|
7669
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8016
7670
|
lcfi = 0
|
|
8017
|
-
lcfx = int(
|
|
8018
|
-
if(lenlist >
|
|
7671
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7672
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8019
7673
|
lcfx = int(lenlist)
|
|
8020
7674
|
else:
|
|
8021
|
-
lcfx = int(
|
|
7675
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8022
7676
|
while(lcfi < lcfx):
|
|
8023
|
-
filetoidarray = {
|
|
8024
|
-
['fname']:
|
|
8025
|
-
idtofilearray = {
|
|
8026
|
-
['fid']:
|
|
7677
|
+
filetoidarray = {listarrayfiles['ffilelist'][lcfi]
|
|
7678
|
+
['fname']: listarrayfiles['ffilelist'][lcfi]['fid']}
|
|
7679
|
+
idtofilearray = {listarrayfiles['ffilelist'][lcfi]
|
|
7680
|
+
['fid']: listarrayfiles['ffilelist'][lcfi]['fname']}
|
|
8027
7681
|
outarray['filetoid'].update(filetoidarray)
|
|
8028
7682
|
outarray['idtofile'].update(idtofilearray)
|
|
8029
|
-
if(
|
|
7683
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8030
7684
|
outarray['filetypes']['files']['filetoid'].update(filetoidarray)
|
|
8031
7685
|
outarray['filetypes']['files']['idtofile'].update(idtofilearray)
|
|
8032
|
-
if(
|
|
7686
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8033
7687
|
outarray['filetypes']['hardlinks']['filetoid'].update(
|
|
8034
7688
|
filetoidarray)
|
|
8035
7689
|
outarray['filetypes']['hardlinks']['idtofile'].update(
|
|
8036
7690
|
idtofilearray)
|
|
8037
7691
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8038
7692
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8039
|
-
if(
|
|
7693
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8040
7694
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8041
7695
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8042
7696
|
outarray['filetypes']['links']['filetoid'].update(filetoidarray)
|
|
8043
7697
|
outarray['filetypes']['links']['idtofile'].update(idtofilearray)
|
|
8044
|
-
if(
|
|
7698
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 3):
|
|
8045
7699
|
outarray['filetypes']['character']['filetoid'].update(
|
|
8046
7700
|
filetoidarray)
|
|
8047
7701
|
outarray['filetypes']['character']['idtofile'].update(
|
|
8048
7702
|
idtofilearray)
|
|
8049
7703
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8050
7704
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8051
|
-
if(
|
|
7705
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 4):
|
|
8052
7706
|
outarray['filetypes']['block']['filetoid'].update(filetoidarray)
|
|
8053
7707
|
outarray['filetypes']['block']['idtofile'].update(idtofilearray)
|
|
8054
7708
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
8055
7709
|
outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
|
|
8056
|
-
if(
|
|
7710
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8057
7711
|
outarray['filetypes']['directories']['filetoid'].update(
|
|
8058
7712
|
filetoidarray)
|
|
8059
7713
|
outarray['filetypes']['directories']['idtofile'].update(
|
|
8060
7714
|
idtofilearray)
|
|
8061
|
-
if(
|
|
7715
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6):
|
|
8062
7716
|
outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
|
|
8063
7717
|
outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
|
|
8064
7718
|
outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
|
|
@@ -8067,13 +7721,13 @@ def CatFileArrayToArrayIndex(inarray, returnfp=False):
|
|
|
8067
7721
|
return outarray
|
|
8068
7722
|
|
|
8069
7723
|
|
|
8070
|
-
def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7724
|
+
def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8071
7725
|
if(isinstance(infile, dict)):
|
|
8072
|
-
|
|
7726
|
+
listarrayfiles = infile
|
|
8073
7727
|
else:
|
|
8074
7728
|
if(infile != "-" and not isinstance(infile, bytes) and not hasattr(infile, "read") and not hasattr(infile, "write")):
|
|
8075
7729
|
infile = RemoveWindowsPath(infile)
|
|
8076
|
-
|
|
7730
|
+
listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8077
7731
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
8078
7732
|
formatspecs = formatspecs[fmttype]
|
|
8079
7733
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
@@ -8099,15 +7753,15 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8099
7753
|
os.unlink(outfile)
|
|
8100
7754
|
except OSError:
|
|
8101
7755
|
pass
|
|
8102
|
-
if(not
|
|
7756
|
+
if(not listarrayfiles):
|
|
8103
7757
|
return False
|
|
8104
7758
|
if(outfile == "-" or outfile is None):
|
|
8105
7759
|
verbose = False
|
|
8106
|
-
fp =
|
|
7760
|
+
fp = MkTempFile()
|
|
8107
7761
|
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
8108
7762
|
fp = outfile
|
|
8109
|
-
elif(re.findall(
|
|
8110
|
-
fp =
|
|
7763
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
7764
|
+
fp = MkTempFile()
|
|
8111
7765
|
else:
|
|
8112
7766
|
fbasename = os.path.splitext(outfile)[0]
|
|
8113
7767
|
fextname = os.path.splitext(outfile)[1]
|
|
@@ -8119,19 +7773,19 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8119
7773
|
return False
|
|
8120
7774
|
formver = formatspecs['format_ver']
|
|
8121
7775
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
8122
|
-
lenlist = len(
|
|
8123
|
-
fnumfiles = int(
|
|
7776
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7777
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8124
7778
|
if(lenlist > fnumfiles or lenlist < fnumfiles):
|
|
8125
7779
|
fnumfiles = lenlist
|
|
8126
|
-
AppendFileHeader(fp, fnumfiles,
|
|
8127
|
-
lenlist = len(
|
|
8128
|
-
fnumfiles = int(
|
|
7780
|
+
AppendFileHeader(fp, fnumfiles, listarrayfiles['fencoding'], [], checksumtype[0], formatspecs)
|
|
7781
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
7782
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8129
7783
|
lcfi = 0
|
|
8130
|
-
lcfx = int(
|
|
8131
|
-
if(lenlist >
|
|
7784
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
7785
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8132
7786
|
lcfx = int(lenlist)
|
|
8133
7787
|
else:
|
|
8134
|
-
lcfx = int(
|
|
7788
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8135
7789
|
curinode = 0
|
|
8136
7790
|
curfid = 0
|
|
8137
7791
|
inodelist = []
|
|
@@ -8139,66 +7793,66 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8139
7793
|
filetoinode = {}
|
|
8140
7794
|
reallcfi = 0
|
|
8141
7795
|
while(lcfi < lcfx):
|
|
8142
|
-
fencoding =
|
|
8143
|
-
fcencoding =
|
|
8144
|
-
if(re.findall("^[.|/]",
|
|
8145
|
-
fname =
|
|
7796
|
+
fencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7797
|
+
fcencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
|
|
7798
|
+
if(re.findall("^[.|/]", listarrayfiles['ffilelist'][reallcfi]['fname'])):
|
|
7799
|
+
fname = listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8146
7800
|
else:
|
|
8147
|
-
fname = "./"+
|
|
7801
|
+
fname = "./"+listarrayfiles['ffilelist'][reallcfi]['fname']
|
|
8148
7802
|
if(verbose):
|
|
8149
7803
|
VerbosePrintOut(fname)
|
|
8150
7804
|
fheadersize = format(
|
|
8151
|
-
int(
|
|
7805
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fheadersize']), 'x').lower()
|
|
8152
7806
|
fsize = format(
|
|
8153
|
-
int(
|
|
8154
|
-
flinkname =
|
|
7807
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fsize']), 'x').lower()
|
|
7808
|
+
flinkname = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
8155
7809
|
fatime = format(
|
|
8156
|
-
int(
|
|
7810
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fatime']), 'x').lower()
|
|
8157
7811
|
fmtime = format(
|
|
8158
|
-
int(
|
|
7812
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmtime']), 'x').lower()
|
|
8159
7813
|
fctime = format(
|
|
8160
|
-
int(
|
|
7814
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fctime']), 'x').lower()
|
|
8161
7815
|
fbtime = format(
|
|
8162
|
-
int(
|
|
7816
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fbtime']), 'x').lower()
|
|
8163
7817
|
fmode = format(
|
|
8164
|
-
int(
|
|
7818
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmode']), 'x').lower()
|
|
8165
7819
|
fchmode = format(
|
|
8166
|
-
int(
|
|
7820
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fchmode']), 'x').lower()
|
|
8167
7821
|
fuid = format(
|
|
8168
|
-
int(
|
|
8169
|
-
funame =
|
|
7822
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fuid']), 'x').lower()
|
|
7823
|
+
funame = listarrayfiles['ffilelist'][reallcfi]['funame']
|
|
8170
7824
|
fgid = format(
|
|
8171
|
-
int(
|
|
8172
|
-
fgname =
|
|
7825
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fgid']), 'x').lower()
|
|
7826
|
+
fgname = listarrayfiles['ffilelist'][reallcfi]['fgname']
|
|
8173
7827
|
finode = format(
|
|
8174
|
-
int(
|
|
7828
|
+
int(listarrayfiles['ffilelist'][reallcfi]['finode']), 'x').lower()
|
|
8175
7829
|
flinkcount = format(
|
|
8176
|
-
int(
|
|
7830
|
+
int(listarrayfiles['ffilelist'][reallcfi]['flinkcount']), 'x').lower()
|
|
8177
7831
|
fwinattributes = format(
|
|
8178
|
-
int(
|
|
8179
|
-
fcompression =
|
|
7832
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fwinattributes']), 'x').lower()
|
|
7833
|
+
fcompression = listarrayfiles['ffilelist'][reallcfi]['fcompression']
|
|
8180
7834
|
fcsize = format(
|
|
8181
|
-
int(
|
|
7835
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fcsize']), 'x').lower()
|
|
8182
7836
|
fdev = format(
|
|
8183
|
-
int(
|
|
7837
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fdev']), 'x').lower()
|
|
8184
7838
|
fdev_minor = format(
|
|
8185
|
-
int(
|
|
7839
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fminor']), 'x').lower()
|
|
8186
7840
|
fdev_major = format(
|
|
8187
|
-
int(
|
|
8188
|
-
fseeknextfile =
|
|
8189
|
-
if(len(
|
|
8190
|
-
|
|
8191
|
-
|
|
7841
|
+
int(listarrayfiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
|
|
7842
|
+
fseeknextfile = listarrayfiles['ffilelist'][reallcfi]['fseeknextfile']
|
|
7843
|
+
if(len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > 0):
|
|
7844
|
+
listarrayfiles['ffilelist'][reallcfi]['fextrafields'] = len(
|
|
7845
|
+
listarrayfiles['ffilelist'][reallcfi]['fextralist'])
|
|
8192
7846
|
if(not followlink and len(extradata) <= 0):
|
|
8193
|
-
extradata =
|
|
7847
|
+
extradata = listarrayfiles['ffilelist'][reallcfi]['fextralist']
|
|
8194
7848
|
if(not followlink and len(jsondata) <= 0):
|
|
8195
|
-
jsondata =
|
|
8196
|
-
fcontents =
|
|
8197
|
-
if(not
|
|
8198
|
-
fcontents =
|
|
8199
|
-
typechecktest = CheckCompressionType(fcontents, closefp=False)
|
|
7849
|
+
jsondata = listarrayfiles['ffilelist'][reallcfi]['fjsondata']
|
|
7850
|
+
fcontents = listarrayfiles['ffilelist'][reallcfi]['fcontents']
|
|
7851
|
+
if(not listarrayfiles['ffilelist'][reallcfi]['fcontentasfile']):
|
|
7852
|
+
fcontents = MkTempFile(fcontents)
|
|
7853
|
+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
8200
7854
|
fcontents.seek(0, 0)
|
|
8201
|
-
fcencoding = GetFileEncoding(fcontents, False)
|
|
7855
|
+
fcencoding = GetFileEncoding(fcontents, 0, False)
|
|
8202
7856
|
fcompression = ""
|
|
8203
7857
|
fcsize = format(int(0), 'x').lower()
|
|
8204
7858
|
curcompression = "none"
|
|
@@ -8211,7 +7865,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8211
7865
|
ilmin = 0
|
|
8212
7866
|
ilcsize = []
|
|
8213
7867
|
while(ilmin < ilsize):
|
|
8214
|
-
cfcontents =
|
|
7868
|
+
cfcontents = MkTempFile()
|
|
8215
7869
|
fcontents.seek(0, 0)
|
|
8216
7870
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
8217
7871
|
fcontents.seek(0, 0)
|
|
@@ -8228,7 +7882,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8228
7882
|
ilcmin = ilcsize.index(min(ilcsize))
|
|
8229
7883
|
curcompression = compressionuselist[ilcmin]
|
|
8230
7884
|
fcontents.seek(0, 0)
|
|
8231
|
-
cfcontents =
|
|
7885
|
+
cfcontents = MkTempFile()
|
|
8232
7886
|
shutil.copyfileobj(fcontents, cfcontents)
|
|
8233
7887
|
cfcontents.seek(0, 0)
|
|
8234
7888
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -8241,10 +7895,10 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8241
7895
|
fcontents.close()
|
|
8242
7896
|
fcontents = cfcontents
|
|
8243
7897
|
if followlink:
|
|
8244
|
-
if(
|
|
8245
|
-
getflinkpath =
|
|
8246
|
-
flinkid =
|
|
8247
|
-
flinkinfo =
|
|
7898
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] == 1 or listarrayfiles['ffilelist'][reallcfi]['ftype'] == 2):
|
|
7899
|
+
getflinkpath = listarrayfiles['ffilelist'][reallcfi]['flinkname']
|
|
7900
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
7901
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8248
7902
|
fheadersize = format(
|
|
8249
7903
|
int(flinkinfo['fheadersize']), 'x').lower()
|
|
8250
7904
|
fsize = format(int(flinkinfo['fsize']), 'x').lower()
|
|
@@ -8277,14 +7931,14 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8277
7931
|
extradata = flinkinfo['fjsondata']
|
|
8278
7932
|
fcontents = flinkinfo['fcontents']
|
|
8279
7933
|
if(not flinkinfo['fcontentasfile']):
|
|
8280
|
-
fcontents =
|
|
7934
|
+
fcontents = MkTempFile(fcontents)
|
|
8281
7935
|
ftypehex = format(flinkinfo['ftype'], 'x').lower()
|
|
8282
7936
|
else:
|
|
8283
7937
|
ftypehex = format(
|
|
8284
|
-
|
|
7938
|
+
listarrayfiles['ffilelist'][reallcfi]['ftype'], 'x').lower()
|
|
8285
7939
|
fcurfid = format(curfid, 'x').lower()
|
|
8286
7940
|
if(not followlink and finode != 0):
|
|
8287
|
-
if(
|
|
7941
|
+
if(listarrayfiles['ffilelist'][reallcfi]['ftype'] != 1):
|
|
8288
7942
|
fcurinode = format(int(curinode), 'x').lower()
|
|
8289
7943
|
inodetofile.update({curinode: fname})
|
|
8290
7944
|
filetoinode.update({fname: curinode})
|
|
@@ -8334,7 +7988,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8334
7988
|
outvar = fp.read()
|
|
8335
7989
|
fp.close()
|
|
8336
7990
|
return outvar
|
|
8337
|
-
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(
|
|
7991
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
8338
7992
|
fp = CompressOpenFileAlt(
|
|
8339
7993
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
8340
7994
|
fp.seek(0, 0)
|
|
@@ -8347,50 +8001,50 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
8347
8001
|
return True
|
|
8348
8002
|
|
|
8349
8003
|
|
|
8350
|
-
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8351
|
-
fp =
|
|
8352
|
-
|
|
8353
|
-
checksumtype, skipchecksum, extradata, formatspecs, verbose, returnfp)
|
|
8354
|
-
return
|
|
8004
|
+
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8005
|
+
fp = MkTempFile(instr)
|
|
8006
|
+
listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8007
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8008
|
+
return listarrayfiles
|
|
8355
8009
|
|
|
8356
8010
|
|
|
8357
|
-
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False,
|
|
8358
|
-
outarray =
|
|
8011
|
+
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8012
|
+
outarray = MkTempFile()
|
|
8359
8013
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
8360
8014
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
8361
|
-
|
|
8362
|
-
|
|
8363
|
-
return
|
|
8015
|
+
listarrayfiles = RePackCatFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
8016
|
+
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
8017
|
+
return listarrayfiles
|
|
8364
8018
|
|
|
8365
8019
|
|
|
8366
|
-
def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8020
|
+
def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
|
|
8367
8021
|
if(outdir is not None):
|
|
8368
8022
|
outdir = RemoveWindowsPath(outdir)
|
|
8369
8023
|
if(verbose):
|
|
8370
8024
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8371
8025
|
if(isinstance(infile, dict)):
|
|
8372
|
-
|
|
8026
|
+
listarrayfiles = infile
|
|
8373
8027
|
else:
|
|
8374
8028
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8375
8029
|
infile = RemoveWindowsPath(infile)
|
|
8376
|
-
|
|
8377
|
-
if(not
|
|
8030
|
+
listarrayfiles = CatFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8031
|
+
if(not listarrayfiles):
|
|
8378
8032
|
return False
|
|
8379
|
-
lenlist = len(
|
|
8380
|
-
fnumfiles = int(
|
|
8033
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8034
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8381
8035
|
lcfi = 0
|
|
8382
|
-
lcfx = int(
|
|
8383
|
-
if(lenlist >
|
|
8036
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8037
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8384
8038
|
lcfx = int(lenlist)
|
|
8385
8039
|
else:
|
|
8386
|
-
lcfx = int(
|
|
8040
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8387
8041
|
while(lcfi < lcfx):
|
|
8388
8042
|
funame = ""
|
|
8389
8043
|
try:
|
|
8390
8044
|
import pwd
|
|
8391
8045
|
try:
|
|
8392
8046
|
userinfo = pwd.getpwuid(
|
|
8393
|
-
|
|
8047
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'])
|
|
8394
8048
|
funame = userinfo.pw_name
|
|
8395
8049
|
except KeyError:
|
|
8396
8050
|
funame = ""
|
|
@@ -8401,7 +8055,7 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8401
8055
|
import grp
|
|
8402
8056
|
try:
|
|
8403
8057
|
groupinfo = grp.getgrgid(
|
|
8404
|
-
|
|
8058
|
+
listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8405
8059
|
fgname = groupinfo.gr_name
|
|
8406
8060
|
except KeyError:
|
|
8407
8061
|
fgname = ""
|
|
@@ -8409,15 +8063,15 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8409
8063
|
fgname = ""
|
|
8410
8064
|
if(verbose):
|
|
8411
8065
|
VerbosePrintOut(PrependPath(
|
|
8412
|
-
outdir,
|
|
8413
|
-
if(
|
|
8414
|
-
with open(PrependPath(outdir,
|
|
8415
|
-
if(not
|
|
8416
|
-
|
|
8417
|
-
|
|
8418
|
-
|
|
8066
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8067
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
|
|
8068
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8069
|
+
if(not listarrayfiles['ffilelist'][lcfi]['fcontentasfile']):
|
|
8070
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'] = MkTempFile(
|
|
8071
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
8072
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
8419
8073
|
shutil.copyfileobj(
|
|
8420
|
-
|
|
8074
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
8421
8075
|
try:
|
|
8422
8076
|
fpc.flush()
|
|
8423
8077
|
if(hasattr(os, "sync")):
|
|
@@ -8428,20 +8082,20 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8428
8082
|
pass
|
|
8429
8083
|
except OSError:
|
|
8430
8084
|
pass
|
|
8431
|
-
if(hasattr(os, "chown") and funame ==
|
|
8432
|
-
os.chown(PrependPath(outdir,
|
|
8433
|
-
|
|
8085
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8086
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8087
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8434
8088
|
if(preservepermissions):
|
|
8435
8089
|
os.chmod(PrependPath(
|
|
8436
|
-
outdir,
|
|
8090
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8437
8091
|
if(preservetime):
|
|
8438
|
-
os.utime(PrependPath(outdir,
|
|
8439
|
-
|
|
8440
|
-
if(
|
|
8092
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8093
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8094
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8441
8095
|
if(followlink):
|
|
8442
|
-
getflinkpath =
|
|
8443
|
-
flinkid =
|
|
8444
|
-
flinkinfo =
|
|
8096
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8097
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8098
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8445
8099
|
funame = ""
|
|
8446
8100
|
try:
|
|
8447
8101
|
import pwd
|
|
@@ -8463,9 +8117,9 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8463
8117
|
except ImportError:
|
|
8464
8118
|
fgname = ""
|
|
8465
8119
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8466
|
-
with open(PrependPath(outdir,
|
|
8120
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8467
8121
|
if(not flinkinfo['fcontentasfile']):
|
|
8468
|
-
flinkinfo['fcontents'] =
|
|
8122
|
+
flinkinfo['fcontents'] = MkTempFile(
|
|
8469
8123
|
flinkinfo['fcontents'])
|
|
8470
8124
|
flinkinfo['fcontents'].seek(0, 0)
|
|
8471
8125
|
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
@@ -8481,46 +8135,46 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8481
8135
|
pass
|
|
8482
8136
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8483
8137
|
os.chown(PrependPath(
|
|
8484
|
-
outdir,
|
|
8138
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8485
8139
|
if(preservepermissions):
|
|
8486
8140
|
os.chmod(PrependPath(
|
|
8487
|
-
outdir,
|
|
8141
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8488
8142
|
if(preservetime):
|
|
8489
|
-
os.utime(PrependPath(outdir,
|
|
8143
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8490
8144
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8491
8145
|
if(flinkinfo['ftype'] == 1):
|
|
8492
8146
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8493
|
-
outdir,
|
|
8147
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8494
8148
|
if(flinkinfo['ftype'] == 2):
|
|
8495
8149
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8496
|
-
outdir,
|
|
8150
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8497
8151
|
if(flinkinfo['ftype'] == 5):
|
|
8498
8152
|
if(preservepermissions):
|
|
8499
8153
|
os.mkdir(PrependPath(
|
|
8500
|
-
outdir,
|
|
8154
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8501
8155
|
else:
|
|
8502
8156
|
os.mkdir(PrependPath(
|
|
8503
|
-
outdir,
|
|
8157
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8504
8158
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8505
8159
|
os.chown(PrependPath(
|
|
8506
|
-
outdir,
|
|
8160
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8507
8161
|
if(preservepermissions):
|
|
8508
8162
|
os.chmod(PrependPath(
|
|
8509
|
-
outdir,
|
|
8163
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8510
8164
|
if(preservetime):
|
|
8511
|
-
os.utime(PrependPath(outdir,
|
|
8165
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8512
8166
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8513
8167
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8514
8168
|
os.mkfifo(PrependPath(
|
|
8515
|
-
outdir,
|
|
8169
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8516
8170
|
else:
|
|
8517
|
-
os.link(
|
|
8518
|
-
outdir,
|
|
8519
|
-
if(
|
|
8171
|
+
os.link(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8172
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8173
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8520
8174
|
if(followlink):
|
|
8521
|
-
getflinkpath =
|
|
8522
|
-
flinkid =
|
|
8523
|
-
flinkinfo =
|
|
8175
|
+
getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8176
|
+
flinkid = prelistarrayfiles['filetoid'][getflinkpath]
|
|
8177
|
+
flinkinfo = listarrayfiles['ffilelist'][flinkid]
|
|
8524
8178
|
funame = ""
|
|
8525
8179
|
try:
|
|
8526
8180
|
import pwd
|
|
@@ -8542,9 +8196,9 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8542
8196
|
except ImportError:
|
|
8543
8197
|
fgname = ""
|
|
8544
8198
|
if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
|
|
8545
|
-
with open(PrependPath(outdir,
|
|
8199
|
+
with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
|
|
8546
8200
|
if(not flinkinfo['fcontentasfile']):
|
|
8547
|
-
flinkinfo['fcontents'] =
|
|
8201
|
+
flinkinfo['fcontents'] = MkTempFile(
|
|
8548
8202
|
flinkinfo['fcontents'])
|
|
8549
8203
|
flinkinfo['fcontents'].seek(0, 0)
|
|
8550
8204
|
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
@@ -8560,71 +8214,71 @@ def UnPackCatFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0,
|
|
|
8560
8214
|
pass
|
|
8561
8215
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8562
8216
|
os.chown(PrependPath(
|
|
8563
|
-
outdir,
|
|
8217
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8564
8218
|
if(preservepermissions):
|
|
8565
8219
|
os.chmod(PrependPath(
|
|
8566
|
-
outdir,
|
|
8220
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8567
8221
|
if(preservetime):
|
|
8568
|
-
os.utime(PrependPath(outdir,
|
|
8222
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8569
8223
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8570
8224
|
if(flinkinfo['ftype'] == 1):
|
|
8571
8225
|
os.link(flinkinfo['flinkname'], PrependPath(
|
|
8572
|
-
outdir,
|
|
8226
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8573
8227
|
if(flinkinfo['ftype'] == 2):
|
|
8574
8228
|
os.symlink(flinkinfo['flinkname'], PrependPath(
|
|
8575
|
-
outdir,
|
|
8229
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8576
8230
|
if(flinkinfo['ftype'] == 5):
|
|
8577
8231
|
if(preservepermissions):
|
|
8578
8232
|
os.mkdir(PrependPath(
|
|
8579
|
-
outdir,
|
|
8233
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8580
8234
|
else:
|
|
8581
8235
|
os.mkdir(PrependPath(
|
|
8582
|
-
outdir,
|
|
8236
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8583
8237
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
8584
8238
|
os.chown(PrependPath(
|
|
8585
|
-
outdir,
|
|
8239
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
|
|
8586
8240
|
if(preservepermissions):
|
|
8587
8241
|
os.chmod(PrependPath(
|
|
8588
|
-
outdir,
|
|
8242
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8589
8243
|
if(preservetime):
|
|
8590
|
-
os.utime(PrependPath(outdir,
|
|
8244
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8591
8245
|
flinkinfo['fatime'], flinkinfo['fmtime']))
|
|
8592
8246
|
if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8593
8247
|
os.mkfifo(PrependPath(
|
|
8594
|
-
outdir,
|
|
8248
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
|
|
8595
8249
|
else:
|
|
8596
|
-
os.symlink(
|
|
8597
|
-
outdir,
|
|
8598
|
-
if(
|
|
8250
|
+
os.symlink(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
|
|
8251
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8252
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
|
|
8599
8253
|
if(preservepermissions):
|
|
8600
8254
|
os.mkdir(PrependPath(
|
|
8601
|
-
outdir,
|
|
8255
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8602
8256
|
else:
|
|
8603
8257
|
os.mkdir(PrependPath(
|
|
8604
|
-
outdir,
|
|
8605
|
-
if(hasattr(os, "chown") and funame ==
|
|
8606
|
-
os.chown(PrependPath(outdir,
|
|
8607
|
-
|
|
8258
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
|
|
8259
|
+
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
8260
|
+
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
8261
|
+
listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
|
|
8608
8262
|
if(preservepermissions):
|
|
8609
8263
|
os.chmod(PrependPath(
|
|
8610
|
-
outdir,
|
|
8264
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8611
8265
|
if(preservetime):
|
|
8612
|
-
os.utime(PrependPath(outdir,
|
|
8613
|
-
|
|
8614
|
-
if(
|
|
8266
|
+
os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
|
|
8267
|
+
listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
|
|
8268
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6 and hasattr(os, "mkfifo")):
|
|
8615
8269
|
os.mkfifo(PrependPath(
|
|
8616
|
-
outdir,
|
|
8270
|
+
outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
|
|
8617
8271
|
lcfi = lcfi + 1
|
|
8618
8272
|
if(returnfp):
|
|
8619
|
-
return
|
|
8273
|
+
return listarrayfiles['ffilelist']['fp']
|
|
8620
8274
|
else:
|
|
8621
8275
|
return True
|
|
8622
8276
|
|
|
8623
8277
|
|
|
8624
8278
|
def UnPackCatFileString(instr, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8625
|
-
fp =
|
|
8626
|
-
|
|
8627
|
-
return
|
|
8279
|
+
fp = MkTempFile(instr)
|
|
8280
|
+
listarrayfiles = UnPackCatFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
8281
|
+
return listarrayfiles
|
|
8628
8282
|
|
|
8629
8283
|
def ftype_to_str(ftype):
|
|
8630
8284
|
mapping = {
|
|
@@ -8641,71 +8295,71 @@ def ftype_to_str(ftype):
|
|
|
8641
8295
|
# Default to "file" if unknown
|
|
8642
8296
|
return mapping.get(ftype, "file")
|
|
8643
8297
|
|
|
8644
|
-
def CatFileListFiles(infile, fmttype="auto", seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8298
|
+
def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8645
8299
|
if(verbose):
|
|
8646
8300
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8647
8301
|
if(isinstance(infile, dict)):
|
|
8648
|
-
|
|
8302
|
+
listarrayfiles = infile
|
|
8649
8303
|
else:
|
|
8650
8304
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8651
8305
|
infile = RemoveWindowsPath(infile)
|
|
8652
|
-
|
|
8653
|
-
if(not
|
|
8306
|
+
listarrayfiles = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8307
|
+
if(not listarrayfiles):
|
|
8654
8308
|
return False
|
|
8655
|
-
lenlist = len(
|
|
8656
|
-
fnumfiles = int(
|
|
8309
|
+
lenlist = len(listarrayfiles['ffilelist'])
|
|
8310
|
+
fnumfiles = int(listarrayfiles['fnumfiles'])
|
|
8657
8311
|
lcfi = 0
|
|
8658
|
-
lcfx = int(
|
|
8659
|
-
if(lenlist >
|
|
8312
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8313
|
+
if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
|
|
8660
8314
|
lcfx = int(lenlist)
|
|
8661
8315
|
else:
|
|
8662
|
-
lcfx = int(
|
|
8316
|
+
lcfx = int(listarrayfiles['fnumfiles'])
|
|
8663
8317
|
returnval = {}
|
|
8664
8318
|
while(lcfi < lcfx):
|
|
8665
|
-
returnval.update({lcfi:
|
|
8319
|
+
returnval.update({lcfi: listarrayfiles['ffilelist'][lcfi]['fname']})
|
|
8666
8320
|
if(not verbose):
|
|
8667
|
-
VerbosePrintOut(
|
|
8321
|
+
VerbosePrintOut(listarrayfiles['ffilelist'][lcfi]['fname'])
|
|
8668
8322
|
if(verbose):
|
|
8669
8323
|
permissions = {'access': {'0': ('---'), '1': ('--x'), '2': ('-w-'), '3': ('-wx'), '4': (
|
|
8670
8324
|
'r--'), '5': ('r-x'), '6': ('rw-'), '7': ('rwx')}, 'roles': {0: 'owner', 1: 'group', 2: 'other'}}
|
|
8671
|
-
printfname =
|
|
8672
|
-
if(
|
|
8673
|
-
printfname =
|
|
8674
|
-
" link to " +
|
|
8675
|
-
if(
|
|
8676
|
-
printfname =
|
|
8677
|
-
" -> " +
|
|
8678
|
-
fuprint =
|
|
8325
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname']
|
|
8326
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
|
|
8327
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8328
|
+
" link to " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8329
|
+
if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
|
|
8330
|
+
printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
|
|
8331
|
+
" -> " + listarrayfiles['ffilelist'][lcfi]['flinkname']
|
|
8332
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['funame']
|
|
8679
8333
|
if(len(fuprint) <= 0):
|
|
8680
|
-
fuprint =
|
|
8681
|
-
fgprint =
|
|
8334
|
+
fuprint = listarrayfiles['ffilelist'][lcfi]['fuid']
|
|
8335
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgname']
|
|
8682
8336
|
if(len(fgprint) <= 0):
|
|
8683
|
-
fgprint =
|
|
8337
|
+
fgprint = listarrayfiles['ffilelist'][lcfi]['fgid']
|
|
8684
8338
|
if(newstyle):
|
|
8685
|
-
VerbosePrintOut(ftype_to_str(
|
|
8686
|
-
|
|
8339
|
+
VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
|
|
8340
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
|
|
8687
8341
|
else:
|
|
8688
|
-
VerbosePrintOut(PrintPermissionString(
|
|
8689
|
-
|
|
8342
|
+
VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
|
|
8343
|
+
listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8690
8344
|
lcfi = lcfi + 1
|
|
8691
8345
|
if(returnfp):
|
|
8692
|
-
return
|
|
8346
|
+
return listarrayfiles['fp']
|
|
8693
8347
|
else:
|
|
8694
8348
|
return True
|
|
8695
8349
|
|
|
8696
8350
|
|
|
8697
8351
|
def CatFileStringListFiles(instr, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8698
|
-
fp =
|
|
8699
|
-
|
|
8352
|
+
fp = MkTempFile(instr)
|
|
8353
|
+
listarrayfiles = CatFileListFiles(
|
|
8700
8354
|
instr, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
8701
|
-
return
|
|
8355
|
+
return listarrayfiles
|
|
8702
8356
|
|
|
8703
8357
|
|
|
8704
8358
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
8705
8359
|
if(verbose):
|
|
8706
8360
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8707
8361
|
if(infile == "-"):
|
|
8708
|
-
infile =
|
|
8362
|
+
infile = MkTempFile()
|
|
8709
8363
|
if(hasattr(sys.stdin, "buffer")):
|
|
8710
8364
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
8711
8365
|
else:
|
|
@@ -8714,7 +8368,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8714
8368
|
if(not infile):
|
|
8715
8369
|
return False
|
|
8716
8370
|
infile.seek(0, 0)
|
|
8717
|
-
elif(re.findall(
|
|
8371
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8718
8372
|
infile = download_file_from_internet_file(infile)
|
|
8719
8373
|
infile.seek(0, 0)
|
|
8720
8374
|
if(not infile):
|
|
@@ -8738,7 +8392,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8738
8392
|
return False
|
|
8739
8393
|
try:
|
|
8740
8394
|
if(hasattr(infile, "read") or hasattr(infile, "write")):
|
|
8741
|
-
compresscheck = CheckCompressionType(infile, formatspecs, False)
|
|
8395
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
|
|
8742
8396
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8743
8397
|
formatspecs = formatspecs[compresscheck]
|
|
8744
8398
|
if(compresscheck=="zstd"):
|
|
@@ -8750,7 +8404,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8750
8404
|
else:
|
|
8751
8405
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
8752
8406
|
else:
|
|
8753
|
-
compresscheck = CheckCompressionType(infile, formatspecs, True)
|
|
8407
|
+
compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
|
|
8754
8408
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
8755
8409
|
formatspecs = formatspecs[compresscheck]
|
|
8756
8410
|
if(compresscheck=="zstd"):
|
|
@@ -8821,7 +8475,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8821
8475
|
member.size).rjust(15) + " " + datetime.datetime.utcfromtimestamp(member.mtime).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8822
8476
|
lcfi = lcfi + 1
|
|
8823
8477
|
if(returnfp):
|
|
8824
|
-
return
|
|
8478
|
+
return listarrayfiles['fp']
|
|
8825
8479
|
else:
|
|
8826
8480
|
return True
|
|
8827
8481
|
|
|
@@ -8830,7 +8484,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8830
8484
|
if(verbose):
|
|
8831
8485
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
8832
8486
|
if(infile == "-"):
|
|
8833
|
-
infile =
|
|
8487
|
+
infile = MkTempFile()
|
|
8834
8488
|
if(hasattr(sys.stdin, "buffer")):
|
|
8835
8489
|
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
8836
8490
|
else:
|
|
@@ -8839,7 +8493,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8839
8493
|
if(not infile):
|
|
8840
8494
|
return False
|
|
8841
8495
|
infile.seek(0, 0)
|
|
8842
|
-
elif(re.findall(
|
|
8496
|
+
elif(re.findall(__download_proto_support__, infile)):
|
|
8843
8497
|
infile = download_file_from_internet_file(infile)
|
|
8844
8498
|
infile.seek(0, 0)
|
|
8845
8499
|
if(not infile):
|
|
@@ -8954,7 +8608,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
8954
8608
|
15) + " " + datetime.datetime.utcfromtimestamp(int(time.mktime(member.date_time + (0, 0, -1)))).strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
8955
8609
|
lcfi = lcfi + 1
|
|
8956
8610
|
if(returnfp):
|
|
8957
|
-
return
|
|
8611
|
+
return listarrayfiles['fp']
|
|
8958
8612
|
else:
|
|
8959
8613
|
return True
|
|
8960
8614
|
|
|
@@ -9092,7 +8746,7 @@ if(rarfile_support):
|
|
|
9092
8746
|
member.file_size).rjust(15) + " " + member.mtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9093
8747
|
lcfi = lcfi + 1
|
|
9094
8748
|
if(returnfp):
|
|
9095
|
-
return
|
|
8749
|
+
return listarrayfiles['fp']
|
|
9096
8750
|
else:
|
|
9097
8751
|
return True
|
|
9098
8752
|
|
|
@@ -9110,7 +8764,7 @@ if(py7zr_support):
|
|
|
9110
8764
|
returnval = {}
|
|
9111
8765
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
9112
8766
|
file_content = szpfp.readall()
|
|
9113
|
-
#sztest = szpfp.testzip()
|
|
8767
|
+
#sztest = szpfp.testzip()
|
|
9114
8768
|
sztestalt = szpfp.test()
|
|
9115
8769
|
if(sztestalt):
|
|
9116
8770
|
VerbosePrintOut("Bad file found!")
|
|
@@ -9199,7 +8853,7 @@ if(py7zr_support):
|
|
|
9199
8853
|
fsize).rjust(15) + " " + member.creationtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
|
|
9200
8854
|
lcfi = lcfi + 1
|
|
9201
8855
|
if(returnfp):
|
|
9202
|
-
return
|
|
8856
|
+
return listarrayfiles['fp']
|
|
9203
8857
|
else:
|
|
9204
8858
|
return True
|
|
9205
8859
|
|
|
@@ -9207,7 +8861,7 @@ if(py7zr_support):
|
|
|
9207
8861
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
9208
8862
|
if(verbose):
|
|
9209
8863
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
|
|
9210
|
-
checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
|
|
8864
|
+
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
9211
8865
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9212
8866
|
formatspecs = formatspecs[checkcompressfile]
|
|
9213
8867
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
@@ -9226,12 +8880,12 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
|
|
|
9226
8880
|
|
|
9227
8881
|
|
|
9228
8882
|
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
9229
|
-
outarray =
|
|
8883
|
+
outarray = MkTempFile()
|
|
9230
8884
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
9231
8885
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
9232
|
-
|
|
8886
|
+
listarrayfiles = CatFileListFiles(
|
|
9233
8887
|
outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
9234
|
-
return
|
|
8888
|
+
return listarrayfiles
|
|
9235
8889
|
|
|
9236
8890
|
"""
|
|
9237
8891
|
PyNeoFile compatibility layer
|
|
@@ -9244,7 +8898,7 @@ def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='crc32',
|
|
|
9244
8898
|
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
9245
8899
|
|
|
9246
8900
|
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
9247
|
-
return MakeEmptyFile(outfile, fmttype, "auto", False, None, checksumtype, formatspecs, returnfp)
|
|
8901
|
+
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
9248
8902
|
|
|
9249
8903
|
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
9250
8904
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
@@ -9253,7 +8907,7 @@ def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, chec
|
|
|
9253
8907
|
return PackCatFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
9254
8908
|
|
|
9255
8909
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9256
|
-
return CatFileToArray(infile, "auto", 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
8910
|
+
return CatFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
|
|
9257
8911
|
|
|
9258
8912
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
9259
8913
|
return UnPackCatFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
@@ -9268,13 +8922,26 @@ def archivefilelistfiles_neo(infile, formatspecs=__file_format_multi_dict__, adv
|
|
|
9268
8922
|
return CatFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
9269
8923
|
|
|
9270
8924
|
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
|
|
9271
|
-
intmp = InFileToArray(infile, 0, 0, False, True, False, formatspecs, False, False)
|
|
8925
|
+
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
9272
8926
|
return RePackCatFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
9273
8927
|
|
|
8928
|
+
def detect_cwd(ftp, file_dir):
|
|
8929
|
+
"""
|
|
8930
|
+
Test whether cwd into file_dir works. Returns True if it does,
|
|
8931
|
+
False if not (so absolute paths should be used).
|
|
8932
|
+
"""
|
|
8933
|
+
if not file_dir or file_dir in ("/", ""):
|
|
8934
|
+
return False # nothing to cwd into
|
|
8935
|
+
try:
|
|
8936
|
+
ftp.cwd(file_dir)
|
|
8937
|
+
return True
|
|
8938
|
+
except all_errors:
|
|
8939
|
+
return False
|
|
8940
|
+
|
|
9274
8941
|
def download_file_from_ftp_file(url):
|
|
9275
8942
|
urlparts = urlparse(url)
|
|
9276
|
-
file_name = os.path.basename(urlparts.path)
|
|
9277
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
8943
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
8944
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9278
8945
|
if(urlparts.username is not None):
|
|
9279
8946
|
ftp_username = urlparts.username
|
|
9280
8947
|
else:
|
|
@@ -9291,7 +8958,7 @@ def download_file_from_ftp_file(url):
|
|
|
9291
8958
|
ftp = FTP_TLS()
|
|
9292
8959
|
else:
|
|
9293
8960
|
return False
|
|
9294
|
-
if(urlparts.scheme == "sftp"):
|
|
8961
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9295
8962
|
if(__use_pysftp__):
|
|
9296
8963
|
return download_file_from_pysftp_file(url)
|
|
9297
8964
|
else:
|
|
@@ -9309,26 +8976,70 @@ def download_file_from_ftp_file(url):
|
|
|
9309
8976
|
except socket.timeout:
|
|
9310
8977
|
log.info("Error With URL "+url)
|
|
9311
8978
|
return False
|
|
9312
|
-
|
|
9313
|
-
|
|
9314
|
-
|
|
9315
|
-
|
|
9316
|
-
|
|
9317
|
-
|
|
8979
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8980
|
+
try:
|
|
8981
|
+
ftp.auth()
|
|
8982
|
+
except all_errors:
|
|
8983
|
+
pass
|
|
8984
|
+
ftp.login(ftp_username, ftp_password)
|
|
8985
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
8986
|
+
try:
|
|
8987
|
+
ftp.prot_p()
|
|
8988
|
+
except all_errors:
|
|
8989
|
+
ftp.prot_c()
|
|
8990
|
+
# UTF-8 filenames if supported
|
|
8991
|
+
try:
|
|
8992
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
8993
|
+
ftp.encoding = "utf-8"
|
|
8994
|
+
except all_errors:
|
|
8995
|
+
pass
|
|
8996
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
8997
|
+
ftpfile = MkTempFile()
|
|
8998
|
+
# Try EPSV first, then fall back
|
|
8999
|
+
try:
|
|
9000
|
+
ftp.force_epsv = True
|
|
9001
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9002
|
+
if(is_cwd_allowed):
|
|
9003
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9004
|
+
else:
|
|
9005
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9006
|
+
except all_errors:
|
|
9007
|
+
try:
|
|
9008
|
+
ftp.set_pasv(True)
|
|
9009
|
+
if(is_cwd_allowed):
|
|
9010
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9011
|
+
else:
|
|
9012
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9013
|
+
except all_errors:
|
|
9014
|
+
ftp.set_pasv(False)
|
|
9015
|
+
if(is_cwd_allowed):
|
|
9016
|
+
ftp.retrbinary("RETR "+file_name, ftpfile.write)
|
|
9017
|
+
else:
|
|
9018
|
+
ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
|
|
9318
9019
|
ftp.close()
|
|
9319
9020
|
ftpfile.seek(0, 0)
|
|
9320
9021
|
return ftpfile
|
|
9321
9022
|
|
|
9322
9023
|
|
|
9024
|
+
def download_file_from_ftps_file(url):
|
|
9025
|
+
return download_file_from_ftp_file(url)
|
|
9026
|
+
|
|
9027
|
+
|
|
9323
9028
|
def download_file_from_ftp_string(url):
|
|
9324
9029
|
ftpfile = download_file_from_ftp_file(url)
|
|
9325
|
-
|
|
9030
|
+
ftpout = ftpfile.read()
|
|
9031
|
+
ftpfile.close()
|
|
9032
|
+
return ftpout
|
|
9033
|
+
|
|
9034
|
+
|
|
9035
|
+
def download_file_from_ftps_string(url):
|
|
9036
|
+
return download_file_from_ftp_string(url)
|
|
9326
9037
|
|
|
9327
9038
|
|
|
9328
9039
|
def upload_file_to_ftp_file(ftpfile, url):
|
|
9329
9040
|
urlparts = urlparse(url)
|
|
9330
|
-
file_name = os.path.basename(urlparts.path)
|
|
9331
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9041
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9042
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9332
9043
|
if(urlparts.username is not None):
|
|
9333
9044
|
ftp_username = urlparts.username
|
|
9334
9045
|
else:
|
|
@@ -9345,7 +9056,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9345
9056
|
ftp = FTP_TLS()
|
|
9346
9057
|
else:
|
|
9347
9058
|
return False
|
|
9348
|
-
if(urlparts.scheme == "sftp"):
|
|
9059
|
+
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9349
9060
|
if(__use_pysftp__):
|
|
9350
9061
|
return upload_file_to_pysftp_file(url)
|
|
9351
9062
|
else:
|
|
@@ -9363,22 +9074,66 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9363
9074
|
except socket.timeout:
|
|
9364
9075
|
log.info("Error With URL "+url)
|
|
9365
9076
|
return False
|
|
9366
|
-
|
|
9367
|
-
|
|
9368
|
-
|
|
9369
|
-
|
|
9077
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9078
|
+
try:
|
|
9079
|
+
ftp.auth()
|
|
9080
|
+
except all_errors:
|
|
9081
|
+
pass
|
|
9082
|
+
ftp.login(ftp_username, ftp_password)
|
|
9083
|
+
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
9084
|
+
try:
|
|
9085
|
+
ftp.prot_p()
|
|
9086
|
+
except all_errors:
|
|
9087
|
+
ftp.prot_c()
|
|
9088
|
+
# UTF-8 filenames if supported
|
|
9089
|
+
try:
|
|
9090
|
+
ftp.sendcmd("OPTS UTF8 ON")
|
|
9091
|
+
ftp.encoding = "utf-8"
|
|
9092
|
+
except all_errors:
|
|
9093
|
+
pass
|
|
9094
|
+
is_cwd_allowed = detect_cwd(ftp, file_dir)
|
|
9095
|
+
ftpfile.seek(0, 0)
|
|
9096
|
+
# Try EPSV first, then fall back
|
|
9097
|
+
try:
|
|
9098
|
+
ftp.force_epsv = True
|
|
9099
|
+
ftp.sendcmd("EPSV") # request extended passive
|
|
9100
|
+
if(is_cwd_allowed):
|
|
9101
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9102
|
+
else:
|
|
9103
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9104
|
+
except all_errors:
|
|
9105
|
+
try:
|
|
9106
|
+
ftp.set_pasv(True)
|
|
9107
|
+
if(is_cwd_allowed):
|
|
9108
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9109
|
+
else:
|
|
9110
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9111
|
+
except all_errors:
|
|
9112
|
+
ftp.set_pasv(False)
|
|
9113
|
+
if(is_cwd_allowed):
|
|
9114
|
+
ftp.storbinary("STOR "+file_name, ftpfile)
|
|
9115
|
+
else:
|
|
9116
|
+
ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
|
|
9370
9117
|
ftp.close()
|
|
9371
9118
|
ftpfile.seek(0, 0)
|
|
9372
9119
|
return ftpfile
|
|
9373
9120
|
|
|
9374
9121
|
|
|
9122
|
+
def upload_file_to_ftps_file(ftpfile, url):
|
|
9123
|
+
return upload_file_to_ftp_file(ftpfile, url)
|
|
9124
|
+
|
|
9125
|
+
|
|
9375
9126
|
def upload_file_to_ftp_string(ftpstring, url):
|
|
9376
|
-
ftpfileo =
|
|
9127
|
+
ftpfileo = MkTempFile(ftpstring)
|
|
9377
9128
|
ftpfile = upload_file_to_ftp_file(ftpfileo, url)
|
|
9378
9129
|
ftpfileo.close()
|
|
9379
9130
|
return ftpfile
|
|
9380
9131
|
|
|
9381
9132
|
|
|
9133
|
+
def upload_file_to_ftps_string(ftpstring, url):
|
|
9134
|
+
return upload_file_to_ftp_string(ftpstring, url)
|
|
9135
|
+
|
|
9136
|
+
|
|
9382
9137
|
class RawIteratorWrapper:
|
|
9383
9138
|
def __init__(self, iterator):
|
|
9384
9139
|
self.iterator = iterator
|
|
@@ -9416,7 +9171,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9416
9171
|
urlparts.params, urlparts.query, urlparts.fragment))
|
|
9417
9172
|
|
|
9418
9173
|
# Handle SFTP/FTP
|
|
9419
|
-
if urlparts.scheme == "sftp":
|
|
9174
|
+
if urlparts.scheme == "sftp" or urlparts.scheme == "scp":
|
|
9420
9175
|
if __use_pysftp__:
|
|
9421
9176
|
return download_file_from_pysftp_file(url)
|
|
9422
9177
|
else:
|
|
@@ -9425,7 +9180,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9425
9180
|
return download_file_from_ftp_file(url)
|
|
9426
9181
|
|
|
9427
9182
|
# Create a temporary file object
|
|
9428
|
-
httpfile =
|
|
9183
|
+
httpfile = MkTempFile()
|
|
9429
9184
|
|
|
9430
9185
|
# 1) Requests branch
|
|
9431
9186
|
if usehttp == 'requests' and haverequests:
|
|
@@ -9489,14 +9244,16 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9489
9244
|
|
|
9490
9245
|
def download_file_from_http_string(url, headers=geturls_headers_pyfile_python_alt, usehttp=__use_http_lib__):
|
|
9491
9246
|
httpfile = download_file_from_http_file(url, headers, usehttp)
|
|
9492
|
-
|
|
9247
|
+
httpout = httpfile.read()
|
|
9248
|
+
httpfile.close()
|
|
9249
|
+
return httpout
|
|
9493
9250
|
|
|
9494
9251
|
|
|
9495
9252
|
if(haveparamiko):
|
|
9496
9253
|
def download_file_from_sftp_file(url):
|
|
9497
9254
|
urlparts = urlparse(url)
|
|
9498
|
-
file_name = os.path.basename(urlparts.path)
|
|
9499
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9255
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9256
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9500
9257
|
sftp_port = urlparts.port
|
|
9501
9258
|
if(urlparts.port is None):
|
|
9502
9259
|
sftp_port = 22
|
|
@@ -9516,14 +9273,14 @@ if(haveparamiko):
|
|
|
9516
9273
|
return download_file_from_ftp_file(url)
|
|
9517
9274
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9518
9275
|
return download_file_from_http_file(url)
|
|
9519
|
-
if(urlparts.scheme != "sftp"):
|
|
9276
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9520
9277
|
return False
|
|
9521
9278
|
ssh = paramiko.SSHClient()
|
|
9522
9279
|
ssh.load_system_host_keys()
|
|
9523
9280
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9524
9281
|
try:
|
|
9525
9282
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9526
|
-
username=
|
|
9283
|
+
username=sftp_username, password=urlparts.password)
|
|
9527
9284
|
except paramiko.ssh_exception.SSHException:
|
|
9528
9285
|
return False
|
|
9529
9286
|
except socket.gaierror:
|
|
@@ -9533,8 +9290,8 @@ if(haveparamiko):
|
|
|
9533
9290
|
log.info("Error With URL "+url)
|
|
9534
9291
|
return False
|
|
9535
9292
|
sftp = ssh.open_sftp()
|
|
9536
|
-
sftpfile =
|
|
9537
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9293
|
+
sftpfile = MkTempFile()
|
|
9294
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9538
9295
|
sftp.close()
|
|
9539
9296
|
ssh.close()
|
|
9540
9297
|
sftpfile.seek(0, 0)
|
|
@@ -9546,7 +9303,9 @@ else:
|
|
|
9546
9303
|
if(haveparamiko):
|
|
9547
9304
|
def download_file_from_sftp_string(url):
|
|
9548
9305
|
sftpfile = download_file_from_sftp_file(url)
|
|
9549
|
-
|
|
9306
|
+
sftpout = sftpfile.read()
|
|
9307
|
+
sftpfile.close()
|
|
9308
|
+
return sftpout
|
|
9550
9309
|
else:
|
|
9551
9310
|
def download_file_from_sftp_string(url):
|
|
9552
9311
|
return False
|
|
@@ -9554,8 +9313,8 @@ else:
|
|
|
9554
9313
|
if(haveparamiko):
|
|
9555
9314
|
def upload_file_to_sftp_file(sftpfile, url):
|
|
9556
9315
|
urlparts = urlparse(url)
|
|
9557
|
-
file_name = os.path.basename(urlparts.path)
|
|
9558
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9316
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9317
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9559
9318
|
sftp_port = urlparts.port
|
|
9560
9319
|
if(urlparts.port is None):
|
|
9561
9320
|
sftp_port = 22
|
|
@@ -9572,17 +9331,17 @@ if(haveparamiko):
|
|
|
9572
9331
|
else:
|
|
9573
9332
|
sftp_password = ""
|
|
9574
9333
|
if(urlparts.scheme == "ftp"):
|
|
9575
|
-
return upload_file_to_ftp_file(url)
|
|
9334
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9576
9335
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9577
9336
|
return False
|
|
9578
|
-
if(urlparts.scheme != "sftp"):
|
|
9337
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9579
9338
|
return False
|
|
9580
9339
|
ssh = paramiko.SSHClient()
|
|
9581
9340
|
ssh.load_system_host_keys()
|
|
9582
9341
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9583
9342
|
try:
|
|
9584
9343
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9585
|
-
username=
|
|
9344
|
+
username=sftp_username, password=sftp_password)
|
|
9586
9345
|
except paramiko.ssh_exception.SSHException:
|
|
9587
9346
|
return False
|
|
9588
9347
|
except socket.gaierror:
|
|
@@ -9592,7 +9351,8 @@ if(haveparamiko):
|
|
|
9592
9351
|
log.info("Error With URL "+url)
|
|
9593
9352
|
return False
|
|
9594
9353
|
sftp = ssh.open_sftp()
|
|
9595
|
-
|
|
9354
|
+
sftpfile.seek(0, 0)
|
|
9355
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9596
9356
|
sftp.close()
|
|
9597
9357
|
ssh.close()
|
|
9598
9358
|
sftpfile.seek(0, 0)
|
|
@@ -9603,7 +9363,7 @@ else:
|
|
|
9603
9363
|
|
|
9604
9364
|
if(haveparamiko):
|
|
9605
9365
|
def upload_file_to_sftp_string(sftpstring, url):
|
|
9606
|
-
sftpfileo =
|
|
9366
|
+
sftpfileo = MkTempFile(sftpstring)
|
|
9607
9367
|
sftpfile = upload_file_to_sftp_files(sftpfileo, url)
|
|
9608
9368
|
sftpfileo.close()
|
|
9609
9369
|
return sftpfile
|
|
@@ -9614,8 +9374,8 @@ else:
|
|
|
9614
9374
|
if(havepysftp):
|
|
9615
9375
|
def download_file_from_pysftp_file(url):
|
|
9616
9376
|
urlparts = urlparse(url)
|
|
9617
|
-
file_name = os.path.basename(urlparts.path)
|
|
9618
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9377
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9378
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9619
9379
|
sftp_port = urlparts.port
|
|
9620
9380
|
if(urlparts.port is None):
|
|
9621
9381
|
sftp_port = 22
|
|
@@ -9635,11 +9395,11 @@ if(havepysftp):
|
|
|
9635
9395
|
return download_file_from_ftp_file(url)
|
|
9636
9396
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9637
9397
|
return download_file_from_http_file(url)
|
|
9638
|
-
if(urlparts.scheme != "sftp"):
|
|
9398
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9639
9399
|
return False
|
|
9640
9400
|
try:
|
|
9641
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9642
|
-
username=
|
|
9401
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9402
|
+
username=sftp_username, password=sftp_password)
|
|
9643
9403
|
except paramiko.ssh_exception.SSHException:
|
|
9644
9404
|
return False
|
|
9645
9405
|
except socket.gaierror:
|
|
@@ -9648,9 +9408,8 @@ if(havepysftp):
|
|
|
9648
9408
|
except socket.timeout:
|
|
9649
9409
|
log.info("Error With URL "+url)
|
|
9650
9410
|
return False
|
|
9651
|
-
|
|
9652
|
-
|
|
9653
|
-
sftp.getfo(urlparts.path, sftpfile)
|
|
9411
|
+
sftpfile = MkTempFile()
|
|
9412
|
+
sftp.getfo(unquote(urlparts.path), sftpfile)
|
|
9654
9413
|
sftp.close()
|
|
9655
9414
|
ssh.close()
|
|
9656
9415
|
sftpfile.seek(0, 0)
|
|
@@ -9662,7 +9421,9 @@ else:
|
|
|
9662
9421
|
if(havepysftp):
|
|
9663
9422
|
def download_file_from_pysftp_string(url):
|
|
9664
9423
|
sftpfile = download_file_from_pysftp_file(url)
|
|
9665
|
-
|
|
9424
|
+
sftpout = sftpfile.read()
|
|
9425
|
+
sftpfile.close()
|
|
9426
|
+
return sftpout
|
|
9666
9427
|
else:
|
|
9667
9428
|
def download_file_from_pysftp_string(url):
|
|
9668
9429
|
return False
|
|
@@ -9670,8 +9431,8 @@ else:
|
|
|
9670
9431
|
if(havepysftp):
|
|
9671
9432
|
def upload_file_to_pysftp_file(sftpfile, url):
|
|
9672
9433
|
urlparts = urlparse(url)
|
|
9673
|
-
file_name = os.path.basename(urlparts.path)
|
|
9674
|
-
file_dir = os.path.dirname(urlparts.path)
|
|
9434
|
+
file_name = os.path.basename(unquote(urlparts.path))
|
|
9435
|
+
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9675
9436
|
sftp_port = urlparts.port
|
|
9676
9437
|
if(urlparts.port is None):
|
|
9677
9438
|
sftp_port = 22
|
|
@@ -9688,14 +9449,14 @@ if(havepysftp):
|
|
|
9688
9449
|
else:
|
|
9689
9450
|
sftp_password = ""
|
|
9690
9451
|
if(urlparts.scheme == "ftp"):
|
|
9691
|
-
return upload_file_to_ftp_file(url)
|
|
9452
|
+
return upload_file_to_ftp_file(sftpfile, url)
|
|
9692
9453
|
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9693
9454
|
return False
|
|
9694
|
-
if(urlparts.scheme != "sftp"):
|
|
9455
|
+
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9695
9456
|
return False
|
|
9696
9457
|
try:
|
|
9697
|
-
pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9698
|
-
username=
|
|
9458
|
+
sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
|
|
9459
|
+
username=sftp_username, password=sftp_password)
|
|
9699
9460
|
except paramiko.ssh_exception.SSHException:
|
|
9700
9461
|
return False
|
|
9701
9462
|
except socket.gaierror:
|
|
@@ -9704,8 +9465,8 @@ if(havepysftp):
|
|
|
9704
9465
|
except socket.timeout:
|
|
9705
9466
|
log.info("Error With URL "+url)
|
|
9706
9467
|
return False
|
|
9707
|
-
|
|
9708
|
-
sftp.putfo(sftpfile, urlparts.path)
|
|
9468
|
+
sftpfile.seek(0, 0)
|
|
9469
|
+
sftp.putfo(sftpfile, unquote(urlparts.path))
|
|
9709
9470
|
sftp.close()
|
|
9710
9471
|
ssh.close()
|
|
9711
9472
|
sftpfile.seek(0, 0)
|
|
@@ -9716,7 +9477,7 @@ else:
|
|
|
9716
9477
|
|
|
9717
9478
|
if(havepysftp):
|
|
9718
9479
|
def upload_file_to_pysftp_string(sftpstring, url):
|
|
9719
|
-
sftpfileo =
|
|
9480
|
+
sftpfileo = MkTempFile(sftpstring)
|
|
9720
9481
|
sftpfile = upload_file_to_pysftp_file(ftpfileo, url)
|
|
9721
9482
|
sftpfileo.close()
|
|
9722
9483
|
return sftpfile
|
|
@@ -9731,7 +9492,7 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9731
9492
|
return download_file_from_http_file(url, headers, usehttp)
|
|
9732
9493
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9733
9494
|
return download_file_from_ftp_file(url)
|
|
9734
|
-
elif(urlparts.scheme == "sftp"):
|
|
9495
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9735
9496
|
if(__use_pysftp__ and havepysftp):
|
|
9736
9497
|
return download_file_from_pysftp_file(url)
|
|
9737
9498
|
else:
|
|
@@ -9741,9 +9502,9 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9741
9502
|
return False
|
|
9742
9503
|
|
|
9743
9504
|
|
|
9744
|
-
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9505
|
+
def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9745
9506
|
fp = download_file_from_internet_file(url)
|
|
9746
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9507
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9747
9508
|
fp.seek(0, 0)
|
|
9748
9509
|
if(not fp):
|
|
9749
9510
|
return False
|
|
@@ -9756,7 +9517,7 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9756
9517
|
return download_file_from_http_string(url, headers)
|
|
9757
9518
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9758
9519
|
return download_file_from_ftp_string(url)
|
|
9759
|
-
elif(urlparts.scheme == "sftp"):
|
|
9520
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9760
9521
|
if(__use_pysftp__ and havepysftp):
|
|
9761
9522
|
return download_file_from_pysftp_string(url)
|
|
9762
9523
|
else:
|
|
@@ -9766,13 +9527,15 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
|
|
|
9766
9527
|
return False
|
|
9767
9528
|
|
|
9768
9529
|
|
|
9769
|
-
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
|
|
9530
|
+
def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
|
|
9770
9531
|
fp = download_file_from_internet_string(url)
|
|
9771
|
-
fp = UncompressFileAlt(fp, formatspecs)
|
|
9772
|
-
fp.seek(0, 0)
|
|
9532
|
+
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9773
9533
|
if(not fp):
|
|
9774
9534
|
return False
|
|
9775
|
-
|
|
9535
|
+
fp.seek(0, 0)
|
|
9536
|
+
fpout = fp.read()
|
|
9537
|
+
fp.close
|
|
9538
|
+
return fpout
|
|
9776
9539
|
|
|
9777
9540
|
|
|
9778
9541
|
def upload_file_to_internet_file(ifp, url):
|
|
@@ -9781,7 +9544,7 @@ def upload_file_to_internet_file(ifp, url):
|
|
|
9781
9544
|
return False
|
|
9782
9545
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9783
9546
|
return upload_file_to_ftp_file(ifp, url)
|
|
9784
|
-
elif(urlparts.scheme == "sftp"):
|
|
9547
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9785
9548
|
if(__use_pysftp__ and havepysftp):
|
|
9786
9549
|
return upload_file_to_pysftp_file(ifp, url)
|
|
9787
9550
|
else:
|
|
@@ -9794,11 +9557,10 @@ def upload_file_to_internet_file(ifp, url):
|
|
|
9794
9557
|
def upload_file_to_internet_compress_file(ifp, url, compression="auto", compressionlevel=None, compressionuselist=compressionlistalt, formatspecs=__file_format_dict__):
|
|
9795
9558
|
fp = CompressOpenFileAlt(
|
|
9796
9559
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
9797
|
-
if(not
|
|
9560
|
+
if(not catfileout):
|
|
9798
9561
|
return False
|
|
9799
9562
|
fp.seek(0, 0)
|
|
9800
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9801
|
-
return True
|
|
9563
|
+
return upload_file_to_internet_file(fp, outfile)
|
|
9802
9564
|
|
|
9803
9565
|
|
|
9804
9566
|
def upload_file_to_internet_string(ifp, url):
|
|
@@ -9807,7 +9569,7 @@ def upload_file_to_internet_string(ifp, url):
|
|
|
9807
9569
|
return False
|
|
9808
9570
|
elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
|
|
9809
9571
|
return upload_file_to_ftp_string(ifp, url)
|
|
9810
|
-
elif(urlparts.scheme == "sftp"):
|
|
9572
|
+
elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9811
9573
|
if(__use_pysftp__ and havepysftp):
|
|
9812
9574
|
return upload_file_to_pysftp_string(ifp, url)
|
|
9813
9575
|
else:
|
|
@@ -9818,10 +9580,10 @@ def upload_file_to_internet_string(ifp, url):
|
|
|
9818
9580
|
|
|
9819
9581
|
|
|
9820
9582
|
def upload_file_to_internet_compress_string(ifp, url, compression="auto", compressionlevel=None, compressionuselist=compressionlistalt, formatspecs=__file_format_dict__):
|
|
9583
|
+
internetfileo = MkTempFile(ifp)
|
|
9821
9584
|
fp = CompressOpenFileAlt(
|
|
9822
|
-
|
|
9823
|
-
if(not
|
|
9585
|
+
internetfileo, compression, compressionlevel, compressionuselist, formatspecs)
|
|
9586
|
+
if(not catfileout):
|
|
9824
9587
|
return False
|
|
9825
9588
|
fp.seek(0, 0)
|
|
9826
|
-
upload_file_to_internet_file(fp, outfile)
|
|
9827
|
-
return True
|
|
9589
|
+
return upload_file_to_internet_file(fp, outfile)
|