PyArchiveFile 0.21.4__py3-none-any.whl → 0.22.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyarchivefile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyarchivefile.py - Last Update: 8/26/2025 Ver. 0.21.4 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyarchivefile.py - Last Update: 8/29/2025 Ver. 0.22.2 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -38,6 +38,7 @@ import zipfile
38
38
  import binascii
39
39
  import platform
40
40
  from io import StringIO, BytesIO
41
+ import posixpath as pp # POSIX-safe joins/normpaths
41
42
  try:
42
43
  from backports import tempfile
43
44
  except ImportError:
@@ -45,10 +46,10 @@ except ImportError:
45
46
  # FTP Support
46
47
  ftpssl = True
47
48
  try:
48
- from ftplib import FTP, FTP_TLS
49
+ from ftplib import FTP, FTP_TLS, all_errors
49
50
  except ImportError:
50
51
  ftpssl = False
51
- from ftplib import FTP
52
+ from ftplib import FTP, all_errors
52
53
 
53
54
  try:
54
55
  import ujson as json
@@ -102,9 +103,13 @@ baseint = tuple(baseint)
102
103
 
103
104
  # URL Parsing
104
105
  try:
105
- from urllib.parse import urlparse, urlunparse
106
+ # Python 3
107
+ from urllib.parse import urlparse, urlunparse, unquote
108
+ from urllib.request import url2pathname
106
109
  except ImportError:
110
+ # Python 2
107
111
  from urlparse import urlparse, urlunparse
112
+ from urllib import unquote, url2pathname
108
113
 
109
114
  # Windows-specific setup
110
115
  if os.name == "nt":
@@ -264,7 +269,8 @@ def get_default_threads():
264
269
  # os.cpu_count() might not be available in some environments
265
270
  return 1
266
271
 
267
-
272
+ __upload_proto_support__ = "^(ftp|ftps|sftp|scp)://"
273
+ __download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp)://"
268
274
  __use_pysftp__ = False
269
275
  if(not havepysftp):
270
276
  __use_pysftp__ = False
@@ -373,12 +379,12 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
373
379
  __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
374
380
  __project__ = __program_name__
375
381
  __project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
376
- __version_info__ = (0, 21, 4, "RC 1", 1)
377
- __version_date_info__ = (2025, 9, 26, "RC 1", 1)
382
+ __version_info__ = (0, 22, 2, "RC 1", 1)
383
+ __version_date_info__ = (2025, 9, 29, "RC 1", 1)
378
384
  __version_date__ = str(__version_date_info__[0]) + "." + str(
379
385
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
380
386
  __revision__ = __version_info__[3]
381
- __revision_id__ = "$Id: 51621a2b361db767bc985f415869721fb7be5e6c $"
387
+ __revision_id__ = "$Id: 2d0113e4a024eebc00f4f678644ba85ab9cd699d $"
382
388
  if(__version_info__[4] is not None):
383
389
  __version_date_plusrc__ = __version_date__ + \
384
390
  "-" + str(__version_date_info__[4])
@@ -608,6 +614,182 @@ def _normalize_initial_data(data, isbytes, encoding):
608
614
  return str(data)
609
615
 
610
616
 
617
+ def _split_posix(path_text):
618
+ """Split POSIX paths regardless of OS; return list of components."""
619
+ # Normalize leading './'
620
+ if path_text.startswith(u'./'):
621
+ path_text = path_text[2:]
622
+ # Strip redundant slashes
623
+ path_text = re.sub(u'/+', u'/', path_text)
624
+ # Drop trailing '/' so 'dir/' -> ['dir']
625
+ if path_text.endswith(u'/'):
626
+ path_text = path_text[:-1]
627
+ return path_text.split(u'/') if path_text else []
628
+
629
+ def _is_abs_like(s):
630
+ """Absolute targets (POSIX or Windows-drive style)."""
631
+ return s.startswith(u'/') or s.startswith(u'\\') or re.match(u'^[A-Za-z]:[/\\\\]', s)
632
+
633
+ def _resolves_outside(base_rel, target_rel):
634
+ """
635
+ Given a base directory (relative, POSIX) and a target (relative),
636
+ return True if base/target resolves outside of base.
637
+ We anchor under '/' so normpath is root-anchored and portable.
638
+ """
639
+ base_clean = u'/'.join(_split_posix(base_rel))
640
+ target_clean = u'/'.join(_split_posix(target_rel))
641
+ base_abs = u'/' + base_clean if base_clean else u'/'
642
+ combined = pp.normpath(pp.join(base_abs, target_clean))
643
+ if combined == base_abs or combined.startswith(base_abs + u'/'):
644
+ return False
645
+ return True
646
+
647
+
648
+ def DetectTarBombArchiveFileArray(listarrayfiles,
649
+ top_file_ratio_threshold=0.6,
650
+ min_members_for_ratio=4,
651
+ symlink_policy="escape-only", # 'escape-only' | 'deny' | 'single-folder-only'
652
+ to_text=to_text):
653
+ """
654
+ Detect 'tarbomb-like' archives from ArchiveFileToArray/TarFileToArray dicts.
655
+
656
+ Parameters:
657
+ listarrayfiles: dict with key 'ffilelist' -> list of entries (requires 'fname')
658
+ top_file_ratio_threshold: float, fraction of root files considered tarbomb
659
+ min_members_for_ratio: int, minimum members before ratio heuristic applies
660
+ symlink_policy:
661
+ - 'escape-only': only symlinks that escape parent/are absolute are unsafe
662
+ - 'deny': any symlink is unsafe
663
+ - 'single-folder-only': symlinks allowed only if archive has a single top-level folder
664
+ to_text: normalization function (your provided to_text)
665
+
666
+ Returns dict with:
667
+ - is_tarbomb, reasons, total_members, top_level_entries, top_level_files_count,
668
+ has_absolute_paths, has_parent_traversal,
669
+ symlink_escapes_root (bool), symlink_issues (list[{entry,target,reason}])
670
+ """
671
+ files = listarrayfiles or {}
672
+ members = files.get('ffilelist') or []
673
+
674
+ names = []
675
+ has_abs = False
676
+ has_parent = False
677
+
678
+ # Symlink tracking
679
+ has_any_symlink = False
680
+ symlink_issues = []
681
+ any_symlink_escape = False
682
+
683
+ for m in members:
684
+ m = m or {}
685
+ name = to_text(m.get('fname', u""))
686
+
687
+ if _is_abs_like(name):
688
+ has_abs = True
689
+
690
+ parts = _split_posix(name)
691
+ if u'..' in parts:
692
+ has_parent = True
693
+
694
+ if not parts:
695
+ continue
696
+
697
+ norm_name = u'/'.join(parts)
698
+ names.append(norm_name)
699
+
700
+ # ---- Symlink detection ----
701
+ ftype = m.get('ftype')
702
+ is_symlink = (ftype == 2) or (to_text(ftype).lower() == u'symlink' if ftype is not None else False)
703
+ if is_symlink:
704
+ has_any_symlink = True
705
+ target = to_text(m.get('flinkname', u""))
706
+ # Absolute symlink target is unsafe
707
+ if _is_abs_like(target):
708
+ any_symlink_escape = True
709
+ symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'absolute symlink target'})
710
+ else:
711
+ parent = u'/'.join(parts[:-1]) # may be ''
712
+ if _resolves_outside(parent, target):
713
+ any_symlink_escape = True
714
+ symlink_issues.append({'entry': norm_name, 'target': target, 'reason': 'symlink escapes parent directory'})
715
+
716
+ total = len(names)
717
+ reasons = []
718
+ if total == 0:
719
+ return {
720
+ "is_tarbomb": False,
721
+ "reasons": ["archive contains no members"],
722
+ "total_members": 0,
723
+ "top_level_entries": [],
724
+ "top_level_files_count": 0,
725
+ "has_absolute_paths": has_abs,
726
+ "has_parent_traversal": has_parent,
727
+ "symlink_escapes_root": any_symlink_escape,
728
+ "symlink_issues": symlink_issues,
729
+ }
730
+
731
+ # Layout counts
732
+ top_counts = {}
733
+ top_level_files_count = 0
734
+ for name in names:
735
+ parts = name.split(u'/')
736
+ first = parts[0]
737
+ top_counts[first] = top_counts.get(first, 0) + 1
738
+ if len(parts) == 1: # directly at archive root
739
+ top_level_files_count += 1
740
+
741
+ top_keys = sorted(top_counts.keys())
742
+ is_tarbomb = False
743
+
744
+ # Path-based dangers
745
+ if has_abs:
746
+ is_tarbomb = True
747
+ reasons.append("contains absolute paths (dangerous)")
748
+ if has_parent:
749
+ is_tarbomb = True
750
+ reasons.append("contains parent-traversal ('..') entries (dangerous)")
751
+ if any_symlink_escape:
752
+ is_tarbomb = True
753
+ reasons.append("contains symlinks that escape their parent directory")
754
+
755
+ # Symlink policy enforcement
756
+ if symlink_policy == "deny" and has_any_symlink:
757
+ is_tarbomb = True
758
+ reasons.append("symlinks present and policy is 'deny'")
759
+ elif symlink_policy == "single-folder-only" and has_any_symlink and len(top_keys) != 1:
760
+ is_tarbomb = True
761
+ reasons.append("symlinks present but archive lacks a single top-level folder")
762
+
763
+ # Tarbomb layout heuristics
764
+ if len(top_keys) == 1:
765
+ reasons.append("single top-level entry '{0}'".format(top_keys[0]))
766
+ else:
767
+ ratio = float(top_level_files_count) / float(total)
768
+ if total >= min_members_for_ratio and ratio > float(top_file_ratio_threshold):
769
+ is_tarbomb = True
770
+ reasons.append("high fraction of members ({0:.0%}) at archive root".format(ratio))
771
+ else:
772
+ max_bucket = max(top_counts.values()) if top_counts else 0
773
+ if max_bucket < total * 0.9:
774
+ is_tarbomb = True
775
+ reasons.append("multiple top-level entries with no dominant folder: {0}".format(
776
+ u", ".join(top_keys[:10])))
777
+ else:
778
+ reasons.append("multiple top-level entries but one dominates")
779
+
780
+ return {
781
+ "is_tarbomb": bool(is_tarbomb),
782
+ "reasons": reasons,
783
+ "total_members": total,
784
+ "top_level_entries": top_keys,
785
+ "top_level_files_count": top_level_files_count,
786
+ "has_absolute_paths": has_abs,
787
+ "has_parent_traversal": has_parent,
788
+ "symlink_escapes_root": any_symlink_escape,
789
+ "symlink_issues": symlink_issues,
790
+ }
791
+
792
+
611
793
  def MkTempFile(data=None, inmem=__use_inmemfile__, isbytes=True, prefix=__project__,
612
794
  delete=True, encoding="utf-8"):
613
795
  """
@@ -674,6 +856,13 @@ def RemoveWindowsPath(dpath):
674
856
  """
675
857
  if not dpath:
676
858
  return ""
859
+ if re.match("^file://", dpath, re.IGNORECASE):
860
+ # Normalize to file:/// if it's a local path (no host)
861
+ if dpath.lower().startswith("file://") and not dpath.lower().startswith("file:///"):
862
+ # insert the extra slash
863
+ dpath = "file:///" + dpath[7:]
864
+ dparsed = urlparse(dpath)
865
+ dpath = url2pathname(dparsed.path)
677
866
  # Accept bytes and decode safely
678
867
  if isinstance(dpath, (bytes, bytearray)):
679
868
  dpath = dpath.decode("utf-8", "ignore")
@@ -689,6 +878,13 @@ def NormalizeRelativePath(inpath):
689
878
  """
690
879
  Ensures the path is relative unless it is absolute. Prepares consistent relative paths.
691
880
  """
881
+ if re.match("^file://", inpath, re.IGNORECASE):
882
+ # Normalize to file:/// if it's a local path (no host)
883
+ if inpath.lower().startswith("file://") and not inpath.lower().startswith("file:///"):
884
+ # insert the extra slash
885
+ inpath = "file:///" + inpath[7:]
886
+ dparsed = urlparse(inpath)
887
+ inpath = url2pathname(dparsed.path)
692
888
  inpath = RemoveWindowsPath(inpath)
693
889
  if os.path.isabs(inpath):
694
890
  outpath = inpath
@@ -745,6 +941,13 @@ def ListDir(dirpath, followlink=False, duplicates=False, include_regex=None, exc
745
941
  include_pattern = re.compile(include_regex) if include_regex else None
746
942
  exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
747
943
  for mydirfile in dirpath:
944
+ if re.match("^file://", mydirfile, re.IGNORECASE):
945
+ # Normalize to file:/// if it's a local path (no host)
946
+ if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
947
+ # insert the extra slash
948
+ mydirfile = "file:///" + mydirfile[7:]
949
+ dparsed = urlparse(mydirfile)
950
+ mydirfile = url2pathname(dparsed.path)
748
951
  if not os.path.exists(mydirfile):
749
952
  return False
750
953
  mydirfile = NormalizeRelativePath(mydirfile)
@@ -815,6 +1018,13 @@ def ListDirAdvanced(dirpath, followlink=False, duplicates=False, include_regex=N
815
1018
  include_pattern = re.compile(include_regex) if include_regex else None
816
1019
  exclude_pattern = re.compile(exclude_regex) if exclude_regex else None
817
1020
  for mydirfile in dirpath:
1021
+ if re.match("^file://", mydirfile, re.IGNORECASE):
1022
+ # Normalize to file:/// if it's a local path (no host)
1023
+ if mydirfile.lower().startswith("file://") and not mydirfile.lower().startswith("file:///"):
1024
+ # insert the extra slash
1025
+ mydirfile = "file:///" + mydirfile[7:]
1026
+ dparsed = urlparse(mydirfile)
1027
+ mydirfile = url2pathname(dparsed.path)
818
1028
  if not os.path.exists(mydirfile):
819
1029
  return False
820
1030
  mydirfile = NormalizeRelativePath(mydirfile)
@@ -2024,7 +2234,7 @@ def ReadFileHeaderDataWoSize(fp, delimiter=__file_format_dict__['format_delimite
2024
2234
  if(headersize <= 0 or headernumfields <= 0):
2025
2235
  return []
2026
2236
  headerdata = ReadTillNullByteByNum(fp, delimiter, headernumfields)
2027
- #headerdata = ReadFileHeaderData(fp, headernumfields, delimiter);
2237
+ #headerdata = ReadFileHeaderData(fp, headernumfields, delimiter)
2028
2238
  HeaderOut = preheaderdata + headerdata
2029
2239
  return HeaderOut
2030
2240
 
@@ -2511,22 +2721,20 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
2511
2721
  return outlist
2512
2722
 
2513
2723
 
2514
- def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
2724
+ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__):
2515
2725
  if(not hasattr(fp, "read")):
2516
2726
  return False
2517
2727
  delimiter = formatspecs['format_delimiter']
2518
- curloc = fp.tell()
2728
+ curloc = filestart
2519
2729
  try:
2520
- fp.seek(0, 2);
2730
+ fp.seek(0, 2)
2521
2731
  except OSError:
2522
- SeekToEndOfFile(fp);
2732
+ SeekToEndOfFile(fp)
2523
2733
  except ValueError:
2524
- SeekToEndOfFile(fp);
2525
- CatSize = fp.tell();
2526
- CatSizeEnd = CatSize;
2734
+ SeekToEndOfFile(fp)
2735
+ CatSize = fp.tell()
2736
+ CatSizeEnd = CatSize
2527
2737
  fp.seek(curloc, 0)
2528
- if(curloc > 0):
2529
- fp.seek(0, 0)
2530
2738
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
2531
2739
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
2532
2740
  formdelszie = len(formatspecs['format_delimiter'])
@@ -2541,8 +2749,6 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
2541
2749
  else:
2542
2750
  inheader = ReadFileHeaderDataWoSize(
2543
2751
  fp, formatspecs['format_delimiter'])
2544
- if(curloc > 0):
2545
- fp.seek(curloc, 0)
2546
2752
  fprechecksumtype = inheader[-2]
2547
2753
  fprechecksum = inheader[-1]
2548
2754
  headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
@@ -2566,22 +2772,20 @@ def ReadFileDataWithContent(fp, listonly=False, uncompress=True, skipchecksum=Fa
2566
2772
  return flist
2567
2773
 
2568
2774
 
2569
- def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
2775
+ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
2570
2776
  if(not hasattr(fp, "read")):
2571
2777
  return False
2572
2778
  delimiter = formatspecs['format_delimiter']
2573
- curloc = fp.tell()
2779
+ curloc = filestart
2574
2780
  try:
2575
- fp.seek(0, 2);
2781
+ fp.seek(0, 2)
2576
2782
  except OSError:
2577
- SeekToEndOfFile(fp);
2783
+ SeekToEndOfFile(fp)
2578
2784
  except ValueError:
2579
- SeekToEndOfFile(fp);
2580
- CatSize = fp.tell();
2581
- CatSizeEnd = CatSize;
2785
+ SeekToEndOfFile(fp)
2786
+ CatSize = fp.tell()
2787
+ CatSizeEnd = CatSize
2582
2788
  fp.seek(curloc, 0)
2583
- if(curloc > 0):
2584
- fp.seek(0, 0)
2585
2789
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
2586
2790
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
2587
2791
  formdelszie = len(formatspecs['format_delimiter'])
@@ -2613,8 +2817,6 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
2613
2817
  fextrafieldslist = json.loads(fextrafieldslist[0])
2614
2818
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
2615
2819
  pass
2616
- if(curloc > 0):
2617
- fp.seek(curloc, 0)
2618
2820
  formversion = re.findall("([\\d]+)", formstring)
2619
2821
  fheadsize = int(inheader[0], 16)
2620
2822
  fnumfields = int(inheader[1], 16)
@@ -2726,22 +2928,20 @@ def ReadFileDataWithContentToArray(fp, seekstart=0, seekend=0, listonly=False, c
2726
2928
  return outlist
2727
2929
 
2728
2930
 
2729
- def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
2931
+ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=False, uncompress=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False):
2730
2932
  if(not hasattr(fp, "read")):
2731
2933
  return False
2732
2934
  delimiter = formatspecs['format_delimiter']
2733
- curloc = fp.tell()
2935
+ curloc = filestart
2734
2936
  try:
2735
- fp.seek(0, 2);
2937
+ fp.seek(0, 2)
2736
2938
  except OSError:
2737
- SeekToEndOfFile(fp);
2939
+ SeekToEndOfFile(fp)
2738
2940
  except ValueError:
2739
- SeekToEndOfFile(fp);
2740
- CatSize = fp.tell();
2741
- CatSizeEnd = CatSize;
2941
+ SeekToEndOfFile(fp)
2942
+ CatSize = fp.tell()
2943
+ CatSizeEnd = CatSize
2742
2944
  fp.seek(curloc, 0)
2743
- if(curloc > 0):
2744
- fp.seek(0, 0)
2745
2945
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
2746
2946
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
2747
2947
  formdelszie = len(formatspecs['format_delimiter'])
@@ -2773,8 +2973,6 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
2773
2973
  fextrafieldslist = json.loads(fextrafieldslist[0])
2774
2974
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
2775
2975
  pass
2776
- if(curloc > 0):
2777
- fp.seek(curloc, 0)
2778
2976
  formversion = re.findall("([\\d]+)", formstring)
2779
2977
  fheadsize = int(inheader[0], 16)
2780
2978
  fnumfields = int(inheader[1], 16)
@@ -2892,25 +3090,25 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
2892
3090
  return outlist
2893
3091
 
2894
3092
 
2895
- def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3093
+ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
2896
3094
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
2897
3095
  formatspecs = formatspecs[fmttype]
2898
3096
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
2899
3097
  fmttype = "auto"
2900
3098
  if(hasattr(infile, "read") or hasattr(infile, "write")):
2901
3099
  fp = infile
2902
- fp.seek(0, 0)
2903
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3100
+ fp.seek(filestart, 0)
3101
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
2904
3102
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
2905
3103
  formatspecs = formatspecs[compresscheck]
2906
3104
  else:
2907
- fp.seek(0, 0)
2908
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3105
+ fp.seek(filestart, 0)
3106
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
2909
3107
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
2910
3108
  formatspecs = formatspecs[checkcompressfile]
2911
- fp.seek(0, 0)
2912
- fp = UncompressFileAlt(fp, formatspecs)
2913
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
3109
+ fp.seek(filestart, 0)
3110
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3111
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
2914
3112
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
2915
3113
  return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
2916
3114
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
@@ -2945,58 +3143,58 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
2945
3143
  compresscheck = "zlib"
2946
3144
  else:
2947
3145
  return False
2948
- fp.seek(0, 0)
3146
+ fp.seek(filestart, 0)
2949
3147
  elif(infile == "-"):
2950
3148
  fp = MkTempFile()
2951
3149
  if(hasattr(sys.stdin, "buffer")):
2952
3150
  shutil.copyfileobj(sys.stdin.buffer, fp)
2953
3151
  else:
2954
3152
  shutil.copyfileobj(sys.stdin, fp)
2955
- fp.seek(0, 0)
2956
- fp = UncompressFileAlt(fp, formatspecs)
2957
- fp.seek(0, 0)
2958
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3153
+ fp.seek(filestart, 0)
3154
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3155
+ fp.seek(filestart, 0)
3156
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
2959
3157
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
2960
3158
  formatspecs = formatspecs[compresscheck]
2961
3159
  else:
2962
- fp.seek(0, 0)
2963
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3160
+ fp.seek(filestart, 0)
3161
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
2964
3162
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
2965
3163
  formatspecs = formatspecs[checkcompressfile]
2966
- fp.seek(0, 0)
3164
+ fp.seek(filestart, 0)
2967
3165
  if(not fp):
2968
3166
  return False
2969
- fp.seek(0, 0)
3167
+ fp.seek(filestart, 0)
2970
3168
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
2971
3169
  fp = MkTempFile()
2972
3170
  fp.write(infile)
2973
- fp.seek(0, 0)
2974
- fp = UncompressFileAlt(fp, formatspecs)
2975
- fp.seek(0, 0)
2976
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3171
+ fp.seek(filestart, 0)
3172
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3173
+ fp.seek(filestart, 0)
3174
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
2977
3175
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
2978
3176
  formatspecs = formatspecs[compresscheck]
2979
3177
  else:
2980
- fp.seek(0, 0)
2981
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3178
+ fp.seek(filestart, 0)
3179
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
2982
3180
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
2983
3181
  formatspecs = formatspecs[checkcompressfile]
2984
- fp.seek(0, 0)
3182
+ fp.seek(filestart, 0)
2985
3183
  if(not fp):
2986
3184
  return False
2987
- fp.seek(0, 0)
2988
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
3185
+ fp.seek(filestart, 0)
3186
+ elif(re.findall(__download_proto_support__, infile)):
2989
3187
  fp = download_file_from_internet_file(infile)
2990
- fp.seek(0, 0)
2991
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3188
+ fp.seek(filestart, 0)
3189
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
2992
3190
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
2993
3191
  formatspecs = formatspecs[compresscheck]
2994
3192
  else:
2995
- fp.seek(0, 0)
2996
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3193
+ fp.seek(filestart, 0)
3194
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
2997
3195
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
2998
3196
  formatspecs = formatspecs[checkcompressfile]
2999
- fp.seek(0, 0)
3197
+ fp.seek(filestart, 0)
3000
3198
  if(not compresscheck):
3001
3199
  fextname = os.path.splitext(infile)[1]
3002
3200
  if(fextname == ".gz"):
@@ -3017,14 +3215,14 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
3017
3215
  compresscheck = "zlib"
3018
3216
  else:
3019
3217
  return False
3020
- fp.seek(0, 0)
3021
- fp = UncompressFileAlt(fp, formatspecs)
3218
+ fp.seek(filestart, 0)
3219
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3022
3220
  if(not fp):
3023
3221
  return False
3024
- fp.seek(0, 0)
3222
+ fp.seek(filestart, 0)
3025
3223
  else:
3026
3224
  infile = RemoveWindowsPath(infile)
3027
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
3225
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
3028
3226
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
3029
3227
  formatspecs = formatspecs[checkcompressfile]
3030
3228
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -3039,7 +3237,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
3039
3237
  return False
3040
3238
  elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
3041
3239
  return False
3042
- compresscheck = CheckCompressionType(infile, formatspecs, True)
3240
+ compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
3043
3241
  if(not compresscheck):
3044
3242
  fextname = os.path.splitext(infile)[1]
3045
3243
  if(fextname == ".gz"):
@@ -3062,43 +3260,43 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0,
3062
3260
  return False
3063
3261
  if(not compresscheck):
3064
3262
  return False
3065
- fp = UncompressFile(infile, formatspecs, "rb")
3066
- return ReadFileDataWithContentToArray(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3263
+ fp = UncompressFile(infile, formatspecs, "rb", filestart)
3264
+ return ReadFileDataWithContentToArray(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3067
3265
 
3068
3266
 
3069
- def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3267
+ def ReadInMultipleFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3070
3268
  if(isinstance(infile, (list, tuple, ))):
3071
3269
  pass
3072
3270
  else:
3073
3271
  infile = [infile]
3074
3272
  outretval = {}
3075
3273
  for curfname in infile:
3076
- outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3274
+ outretval[curfname] = ReadInFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3077
3275
  return outretval
3078
3276
 
3079
- def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3080
- return ReadInMultipleFileWithContentToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3277
+ def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3278
+ return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3081
3279
 
3082
3280
 
3083
- def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3281
+ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3084
3282
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
3085
3283
  formatspecs = formatspecs[fmttype]
3086
3284
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
3087
3285
  fmttype = "auto"
3088
3286
  if(hasattr(infile, "read") or hasattr(infile, "write")):
3089
3287
  fp = infile
3090
- fp.seek(0, 0)
3091
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3288
+ fp.seek(filestart, 0)
3289
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
3092
3290
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
3093
3291
  formatspecs = formatspecs[compresscheck]
3094
3292
  else:
3095
- fp.seek(0, 0)
3096
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3293
+ fp.seek(filestart, 0)
3294
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
3097
3295
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
3098
3296
  formatspecs = formatspecs[checkcompressfile]
3099
- fp.seek(0, 0)
3100
- fp = UncompressFileAlt(fp, formatspecs)
3101
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
3297
+ fp.seek(filestart, 0)
3298
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3299
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
3102
3300
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
3103
3301
  return TarFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, True)
3104
3302
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
@@ -3133,58 +3331,58 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
3133
3331
  compresscheck = "zlib"
3134
3332
  else:
3135
3333
  return False
3136
- fp.seek(0, 0)
3334
+ fp.seek(filestart, 0)
3137
3335
  elif(infile == "-"):
3138
3336
  fp = MkTempFile()
3139
3337
  if(hasattr(sys.stdin, "buffer")):
3140
3338
  shutil.copyfileobj(sys.stdin.buffer, fp)
3141
3339
  else:
3142
3340
  shutil.copyfileobj(sys.stdin, fp)
3143
- fp.seek(0, 0)
3144
- fp = UncompressFileAlt(fp, formatspecs)
3145
- fp.seek(0, 0)
3146
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3341
+ fp.seek(filestart, 0)
3342
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3343
+ fp.seek(filestart, 0)
3344
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
3147
3345
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
3148
3346
  formatspecs = formatspecs[compresscheck]
3149
3347
  else:
3150
- fp.seek(0, 0)
3151
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3348
+ fp.seek(filestart, 0)
3349
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
3152
3350
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
3153
3351
  formatspecs = formatspecs[checkcompressfile]
3154
- fp.seek(0, 0)
3352
+ fp.seek(filestart, 0)
3155
3353
  if(not fp):
3156
3354
  return False
3157
- fp.seek(0, 0)
3355
+ fp.seek(filestart, 0)
3158
3356
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
3159
3357
  fp = MkTempFile()
3160
3358
  fp.write(infile)
3161
- fp.seek(0, 0)
3162
- fp = UncompressFileAlt(fp, formatspecs)
3163
- fp.seek(0, 0)
3164
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3359
+ fp.seek(filestart, 0)
3360
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3361
+ fp.seek(filestart, 0)
3362
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
3165
3363
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
3166
3364
  formatspecs = formatspecs[compresscheck]
3167
3365
  else:
3168
- fp.seek(0, 0)
3169
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3366
+ fp.seek(filestart, 0)
3367
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
3170
3368
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
3171
3369
  formatspecs = formatspecs[checkcompressfile]
3172
- fp.seek(0, 0)
3370
+ fp.seek(filestart, 0)
3173
3371
  if(not fp):
3174
3372
  return False
3175
- fp.seek(0, 0)
3176
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
3373
+ fp.seek(filestart, 0)
3374
+ elif(re.findall(__download_proto_support__, infile)):
3177
3375
  fp = download_file_from_internet_file(infile)
3178
- fp.seek(0, 0)
3179
- compresscheck = CheckCompressionType(fp, formatspecs, False)
3376
+ fp.seek(filestart, 0)
3377
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
3180
3378
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
3181
3379
  formatspecs = formatspecs[compresscheck]
3182
3380
  else:
3183
- fp.seek(0, 0)
3184
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, False)
3381
+ fp.seek(filestart, 0)
3382
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, False)
3185
3383
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
3186
3384
  formatspecs = formatspecs[checkcompressfile]
3187
- fp.seek(0, 0)
3385
+ fp.seek(filestart, 0)
3188
3386
  if(not compresscheck):
3189
3387
  fextname = os.path.splitext(infile)[1]
3190
3388
  if(fextname == ".gz"):
@@ -3205,14 +3403,14 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
3205
3403
  compresscheck = "zlib"
3206
3404
  else:
3207
3405
  return False
3208
- fp.seek(0, 0)
3209
- fp = UncompressFileAlt(fp, formatspecs)
3406
+ fp.seek(filestart, 0)
3407
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
3210
3408
  if(not fp):
3211
3409
  return False
3212
- fp.seek(0, 0)
3410
+ fp.seek(filestart, 0)
3213
3411
  else:
3214
3412
  infile = RemoveWindowsPath(infile)
3215
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
3413
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
3216
3414
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
3217
3415
  formatspecs = formatspecs[checkcompressfile]
3218
3416
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -3227,7 +3425,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
3227
3425
  return False
3228
3426
  elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
3229
3427
  return False
3230
- compresscheck = CheckCompressionType(infile, formatspecs, True)
3428
+ compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
3231
3429
  if(not compresscheck):
3232
3430
  fextname = os.path.splitext(infile)[1]
3233
3431
  if(fextname == ".gz"):
@@ -3250,22 +3448,22 @@ def ReadInFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0,
3250
3448
  return False
3251
3449
  if(not compresscheck):
3252
3450
  return False
3253
- fp = UncompressFile(infile, formatspecs, "rb")
3254
- return ReadFileDataWithContentToList(fp, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3451
+ fp = UncompressFile(infile, formatspecs, "rb", filestart)
3452
+ return ReadFileDataWithContentToList(fp, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3255
3453
 
3256
3454
 
3257
- def ReadInMultipleFileWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3455
+ def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3258
3456
  if(isinstance(infile, (list, tuple, ))):
3259
3457
  pass
3260
3458
  else:
3261
3459
  infile = [infile]
3262
3460
  outretval = {}
3263
3461
  for curfname in infile:
3264
- curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3462
+ curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3265
3463
  return outretval
3266
3464
 
3267
- def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3268
- return ReadInMultipleFileWithContentToList(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3465
+ def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3466
+ return ReadInMultipleFileWithContentToList(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3269
3467
 
3270
3468
 
3271
3469
  def AppendNullByte(indata, delimiter=__file_format_dict__['format_delimiter']):
@@ -3394,7 +3592,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
3394
3592
  fp = MkTempFile()
3395
3593
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
3396
3594
  fp = outfile
3397
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
3595
+ elif(re.findall(__upload_proto_support__, outfile)):
3398
3596
  fp = MkTempFile()
3399
3597
  else:
3400
3598
  fbasename = os.path.splitext(outfile)[0]
@@ -3430,7 +3628,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
3430
3628
  outvar = fp.read()
3431
3629
  fp.close()
3432
3630
  return outvar
3433
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
3631
+ elif(re.findall(__upload_proto_support__, outfile)):
3434
3632
  fp = CompressOpenFileAlt(
3435
3633
  fp, compression, compressionlevel, compressionuselist, formatspecs)
3436
3634
  fp.seek(0, 0)
@@ -3734,9 +3932,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
3734
3932
  if not followlink and ftype in data_types:
3735
3933
  with open(fname, "rb") as fpc:
3736
3934
  shutil.copyfileobj(fpc, fcontents)
3737
- typechecktest = CheckCompressionType(fcontents, closefp=False)
3935
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
3738
3936
  fcontents.seek(0, 0)
3739
- fcencoding = GetFileEncoding(fcontents, False)
3937
+ fcencoding = GetFileEncoding(fcontents, 0, False)
3740
3938
  if(typechecktest is False and not compresswholefile):
3741
3939
  fcontents.seek(0, 2)
3742
3940
  ucfsize = fcontents.tell()
@@ -3781,9 +3979,9 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
3781
3979
  flstatinfo = os.stat(flinkname)
3782
3980
  with open(flinkname, "rb") as fpc:
3783
3981
  shutil.copyfileobj(fpc, fcontents)
3784
- typechecktest = CheckCompressionType(fcontents, closefp=False)
3982
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
3785
3983
  fcontents.seek(0, 0)
3786
- fcencoding = GetFileEncoding(fcontents, False)
3984
+ fcencoding = GetFileEncoding(fcontents, 0, False)
3787
3985
  if(typechecktest is False and not compresswholefile):
3788
3986
  fcontents.seek(0, 2)
3789
3987
  ucfsize = fcontents.tell()
@@ -3893,7 +4091,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
3893
4091
  fheaderchecksumtype = curfname[26]
3894
4092
  fcontentchecksumtype = curfname[27]
3895
4093
  fcontents = curfname[28]
3896
- fencoding = GetFileEncoding(fcontents, False)
4094
+ fencoding = GetFileEncoding(fcontents, 0, False)
3897
4095
  tmpoutlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression, fcsize,
3898
4096
  fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
3899
4097
  fcontents.seek(0, 0)
@@ -3943,7 +4141,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
3943
4141
  fp = MkTempFile()
3944
4142
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
3945
4143
  fp = outfile
3946
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
4144
+ elif(re.findall(__upload_proto_support__, outfile)):
3947
4145
  fp = MkTempFile()
3948
4146
  else:
3949
4147
  fbasename = os.path.splitext(outfile)[0]
@@ -3980,7 +4178,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
3980
4178
  outvar = fp.read()
3981
4179
  fp.close()
3982
4180
  return outvar
3983
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
4181
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
3984
4182
  fp = CompressOpenFileAlt(
3985
4183
  fp, compression, compressionlevel, compressionuselist, formatspecs)
3986
4184
  fp.seek(0, 0)
@@ -4023,7 +4221,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
4023
4221
  fp = MkTempFile()
4024
4222
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
4025
4223
  fp = outfile
4026
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
4224
+ elif(re.findall(__upload_proto_support__, outfile)):
4027
4225
  fp = MkTempFile()
4028
4226
  else:
4029
4227
  fbasename = os.path.splitext(outfile)[0]
@@ -4060,7 +4258,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
4060
4258
  outvar = fp.read()
4061
4259
  fp.close()
4062
4260
  return outvar
4063
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
4261
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
4064
4262
  fp = CompressOpenFileAlt(
4065
4263
  fp, compression, compressionlevel, compressionuselist, formatspecs)
4066
4264
  fp.seek(0, 0)
@@ -4144,7 +4342,8 @@ def GzipCompressData(data, compresslevel=9):
4144
4342
  out = MkTempFile()
4145
4343
  with gzip.GzipFile(fileobj=out, mode="wb", compresslevel=compresslevel) as f:
4146
4344
  f.write(data)
4147
- compressed_data = out.getvalue()
4345
+ out.seek(0, 0)
4346
+ compressed_data = out.read()
4148
4347
  return compressed_data
4149
4348
 
4150
4349
 
@@ -4242,7 +4441,7 @@ def IsSingleDict(variable):
4242
4441
  return True
4243
4442
 
4244
4443
 
4245
- def GetFileEncoding(infile, closefp=True):
4444
+ def GetFileEncoding(infile, filestart=0, closefp=True):
4246
4445
  if(hasattr(infile, "read") or hasattr(infile, "write")):
4247
4446
  fp = infile
4248
4447
  else:
@@ -4251,19 +4450,19 @@ def GetFileEncoding(infile, closefp=True):
4251
4450
  except FileNotFoundError:
4252
4451
  return False
4253
4452
  file_encoding = "UTF-8"
4254
- fp.seek(0, 0)
4453
+ fp.seek(filestart, 0)
4255
4454
  prefp = fp.read(2)
4256
4455
  if(prefp == binascii.unhexlify("fffe")):
4257
4456
  file_encoding = "UTF-16LE"
4258
4457
  elif(prefp == binascii.unhexlify("feff")):
4259
4458
  file_encoding = "UTF-16BE"
4260
- fp.seek(0, 0)
4459
+ fp.seek(filestart, 0)
4261
4460
  prefp = fp.read(3)
4262
4461
  if(prefp == binascii.unhexlify("efbbbf")):
4263
4462
  file_encoding = "UTF-8"
4264
4463
  elif(prefp == binascii.unhexlify("0efeff")):
4265
4464
  file_encoding = "SCSU"
4266
- fp.seek(0, 0)
4465
+ fp.seek(filestart, 0)
4267
4466
  prefp = fp.read(4)
4268
4467
  if(prefp == binascii.unhexlify("fffe0000")):
4269
4468
  file_encoding = "UTF-32LE"
@@ -4279,21 +4478,21 @@ def GetFileEncoding(infile, closefp=True):
4279
4478
  file_encoding = "UTF-7"
4280
4479
  elif(prefp == binascii.unhexlify("2b2f762f")):
4281
4480
  file_encoding = "UTF-7"
4282
- fp.seek(0, 0)
4481
+ fp.seek(filestart, 0)
4283
4482
  if(closefp):
4284
4483
  fp.close()
4285
4484
  return file_encoding
4286
4485
 
4287
4486
 
4288
- def GetFileEncodingFromString(instring, closefp=True):
4487
+ def GetFileEncodingFromString(instring, filestart=0, closefp=True):
4289
4488
  try:
4290
4489
  instringsfile = MkTempFile(instring)
4291
4490
  except TypeError:
4292
4491
  instringsfile = MkTempFile(instring.encode("UTF-8"))
4293
- return GetFileEncoding(instringsfile, closefp)
4492
+ return GetFileEncoding(instringsfile, filestart, closefp)
4294
4493
 
4295
4494
 
4296
- def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
4495
+ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
4297
4496
  if(hasattr(infile, "read") or hasattr(infile, "write")):
4298
4497
  fp = infile
4299
4498
  else:
@@ -4302,7 +4501,8 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
4302
4501
  except FileNotFoundError:
4303
4502
  return False
4304
4503
  filetype = False
4305
- fp.seek(0, 0)
4504
+ curloc = filestart
4505
+ fp.seek(filestart, 0)
4306
4506
  prefp = fp.read(2)
4307
4507
  if(prefp == binascii.unhexlify("1f8b")):
4308
4508
  filetype = "gzip"
@@ -4318,13 +4518,13 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
4318
4518
  filetype = "zlib"
4319
4519
  elif(prefp == binascii.unhexlify("1f9d")):
4320
4520
  filetype = "zcompress"
4321
- fp.seek(0, 0)
4521
+ fp.seek(curloc, 0)
4322
4522
  prefp = fp.read(3)
4323
4523
  if(prefp == binascii.unhexlify("425a68")):
4324
4524
  filetype = "bzip2"
4325
4525
  elif(prefp == binascii.unhexlify("5d0000")):
4326
4526
  filetype = "lzma"
4327
- fp.seek(0, 0)
4527
+ fp.seek(curloc, 0)
4328
4528
  prefp = fp.read(4)
4329
4529
  if(prefp == binascii.unhexlify("28b52ffd")):
4330
4530
  filetype = "zstd"
@@ -4336,29 +4536,29 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
4336
4536
  filetype = "zipfile"
4337
4537
  elif(prefp == binascii.unhexlify("504b0708")):
4338
4538
  filetype = "zipfile"
4339
- fp.seek(0, 0)
4539
+ fp.seek(curloc, 0)
4340
4540
  prefp = fp.read(5)
4341
4541
  if(prefp == binascii.unhexlify("7573746172")):
4342
4542
  filetype = "tarfile"
4343
4543
  if(prefp == binascii.unhexlify("7573746172")):
4344
4544
  filetype = "tarfile"
4345
- fp.seek(0, 0)
4545
+ fp.seek(curloc, 0)
4346
4546
  prefp = fp.read(6)
4347
4547
  if(prefp == binascii.unhexlify("fd377a585a00")):
4348
4548
  filetype = "xz"
4349
4549
  elif(prefp == binascii.unhexlify("377abcaf271c")):
4350
4550
  filetype = "7zipfile"
4351
- fp.seek(0, 0)
4551
+ fp.seek(curloc, 0)
4352
4552
  prefp = fp.read(7)
4353
4553
  if(prefp == binascii.unhexlify("526172211a0700")):
4354
4554
  filetype = "rarfile"
4355
4555
  elif(prefp == binascii.unhexlify("2a2a4143452a2a")):
4356
4556
  filetype = "ace"
4357
- fp.seek(0, 0)
4557
+ fp.seek(curloc, 0)
4358
4558
  prefp = fp.read(7)
4359
4559
  if(prefp == binascii.unhexlify("894c5a4f0d0a1a")):
4360
4560
  filetype = "lzo"
4361
- fp.seek(0, 0)
4561
+ fp.seek(curloc, 0)
4362
4562
  prefp = fp.read(8)
4363
4563
  if(prefp == binascii.unhexlify("7573746172003030")):
4364
4564
  filetype = "tarfile"
@@ -4366,7 +4566,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
4366
4566
  filetype = "tarfile"
4367
4567
  if(prefp == binascii.unhexlify("526172211a070100")):
4368
4568
  filetype = "rarfile"
4369
- fp.seek(0, 0)
4569
+ fp.seek(curloc, 0)
4370
4570
  if(IsNestedDict(formatspecs)):
4371
4571
  for key, value in formatspecs.items():
4372
4572
  prefp = fp.read(formatspecs[key]['format_len'])
@@ -4382,7 +4582,7 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
4382
4582
  if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
4383
4583
  filetype = formatspecs[key]['format_magic']
4384
4584
  continue
4385
- fp.seek(0, 0)
4585
+ fp.seek(curloc, 0)
4386
4586
  elif(IsSingleDict(formatspecs)):
4387
4587
  prefp = fp.read(formatspecs['format_len'])
4388
4588
  if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
@@ -4397,15 +4597,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
4397
4597
  filetype = formatspecs['format_magic']
4398
4598
  else:
4399
4599
  pass
4400
- fp.seek(0, 0)
4600
+ fp.seek(curloc, 0)
4401
4601
  prefp = fp.read(9)
4402
4602
  if(prefp == binascii.unhexlify("894c5a4f000d0a1a0a")):
4403
4603
  filetype = "lzo"
4404
- fp.seek(0, 0)
4604
+ fp.seek(curloc, 0)
4405
4605
  prefp = fp.read(10)
4406
4606
  if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
4407
4607
  filetype = "tarfile"
4408
- fp.seek(0, 0)
4608
+ fp.seek(curloc, 0)
4409
4609
  if(filetype == "gzip" or filetype == "bzip2" or filetype == "lzma" or filetype == "zstd" or filetype == "lz4" or filetype == "zlib"):
4410
4610
  if(TarFileCheck(fp)):
4411
4611
  filetype = "tarfile"
@@ -4420,14 +4620,15 @@ def CheckCompressionType(infile, formatspecs=__file_format_multi_dict__, closefp
4420
4620
  return "7zipfile"
4421
4621
  else:
4422
4622
  filetype = False
4423
- fp.seek(0, 0)
4623
+ fp.seek(curloc, 0)
4424
4624
  if(closefp):
4425
4625
  fp.close()
4426
4626
  return filetype
4427
4627
 
4428
4628
 
4429
- def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, closefp=True):
4430
- compresscheck = CheckCompressionType(infile, formatspecs, False)
4629
+ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
4630
+ compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
4631
+ curloc = filestart
4431
4632
  if(not compresscheck):
4432
4633
  fextname = os.path.splitext(infile)[1]
4433
4634
  if(fextname == ".gz"):
@@ -4476,7 +4677,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
4476
4677
  elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
4477
4678
  return "7zipfile"
4478
4679
  if(hasattr(infile, "read") or hasattr(infile, "write")):
4479
- fp = UncompressFileAlt(infile, formatspecs)
4680
+ fp = UncompressFileAlt(infile, formatspecs, filestart)
4480
4681
  else:
4481
4682
  try:
4482
4683
  if(compresscheck == "gzip" and compresscheck in compressionsupport):
@@ -4506,10 +4707,11 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
4506
4707
  except FileNotFoundError:
4507
4708
  return False
4508
4709
  filetype = False
4710
+ fp.seek(filestart, 0)
4509
4711
  prefp = fp.read(5)
4510
4712
  if(prefp == binascii.unhexlify("7573746172")):
4511
4713
  filetype = "tarfile"
4512
- fp.seek(0, 0)
4714
+ fp.seek(curloc, 0)
4513
4715
  if(IsNestedDict(formatspecs)):
4514
4716
  for key, value in formatspecs.items():
4515
4717
  prefp = fp.read(formatspecs[key]['format_len'])
@@ -4525,7 +4727,7 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
4525
4727
  if(formstring == inheaderver and formdel == formatspecs[key]['format_delimiter']):
4526
4728
  filetype = formatspecs[key]['format_magic']
4527
4729
  continue
4528
- fp.seek(0, 0)
4730
+ fp.seek(curloc, 0)
4529
4731
  elif(IsSingleDict(formatspecs)):
4530
4732
  prefp = fp.read(formatspecs['format_len'])
4531
4733
  if(prefp == binascii.unhexlify(formatspecs['format_hex'])):
@@ -4540,36 +4742,36 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, clos
4540
4742
  filetype = formatspecs['format_magic']
4541
4743
  else:
4542
4744
  pass
4543
- fp.seek(0, 0)
4745
+ fp.seek(curloc, 0)
4544
4746
  prefp = fp.read(10)
4545
4747
  if(prefp == binascii.unhexlify("7061785f676c6f62616c")):
4546
4748
  filetype = "tarfile"
4547
- fp.seek(0, 0)
4749
+ fp.seek(curloc, 0)
4548
4750
  if(closefp):
4549
4751
  fp.close()
4550
4752
  return filetype
4551
4753
 
4552
4754
 
4553
- def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, closefp=True):
4755
+ def CheckCompressionTypeFromString(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
4554
4756
  try:
4555
4757
  instringsfile = MkTempFile(instring)
4556
4758
  except TypeError:
4557
4759
  instringsfile = MkTempFile(instring.encode("UTF-8"))
4558
- return CheckCompressionType(instringsfile, formatspecs, closefp)
4760
+ return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
4559
4761
 
4560
4762
 
4561
- def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, closefp=True):
4763
+ def CheckCompressionTypeFromBytes(instring, formatspecs=__file_format_multi_dict__, filestart=0, closefp=True):
4562
4764
  try:
4563
4765
  instringsfile = MkTempFile(instring)
4564
4766
  except TypeError:
4565
4767
  instringsfile = MkTempFile(instring.decode("UTF-8"))
4566
- return CheckCompressionType(instringsfile, formatspecs, closefp)
4768
+ return CheckCompressionType(instringsfile, formatspecs, filestart, closefp)
4567
4769
 
4568
4770
 
4569
- def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
4771
+ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0):
4570
4772
  if(not hasattr(fp, "read")):
4571
4773
  return False
4572
- compresscheck = CheckCompressionType(fp, formatspecs, False)
4774
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
4573
4775
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
4574
4776
  formatspecs = formatspecs[compresscheck]
4575
4777
  if(compresscheck == "gzip" and compresscheck in compressionsupport):
@@ -4603,8 +4805,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__):
4603
4805
  return fp
4604
4806
 
4605
4807
 
4606
- def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
4607
- compresscheck = CheckCompressionType(infile, formatspecs, False)
4808
+ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb", filestart=0):
4809
+ compresscheck = CheckCompressionType(infile, formatspecs, filestart, False)
4608
4810
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
4609
4811
  formatspecs = formatspecs[compresscheck]
4610
4812
  if(sys.version_info[0] == 2 and compresscheck):
@@ -4650,8 +4852,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb"):
4650
4852
  return filefp
4651
4853
 
4652
4854
 
4653
- def UncompressString(infile, formatspecs=__file_format_multi_dict__):
4654
- compresscheck = CheckCompressionTypeFromString(infile, formatspecs, False)
4855
+ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0):
4856
+ compresscheck = CheckCompressionTypeFromString(infile, formatspecs, filestart, False)
4655
4857
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
4656
4858
  formatspecs = formatspecs[compresscheck]
4657
4859
  if(compresscheck == "gzip" and compresscheck in compressionsupport):
@@ -4678,32 +4880,32 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__):
4678
4880
  return fileuz
4679
4881
 
4680
4882
 
4681
- def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__):
4883
+ def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
4682
4884
  filefp = StringIO()
4683
- outstring = UncompressString(instring, formatspecs)
4885
+ outstring = UncompressString(instring, formatspecs, filestart)
4684
4886
  filefp.write(outstring)
4685
4887
  filefp.seek(0, 0)
4686
4888
  return filefp
4687
4889
 
4688
- def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__):
4890
+ def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
4689
4891
  if(not hasattr(fp, "read")):
4690
4892
  return False
4691
- prechck = CheckCompressionType(fp, formatspecs, False)
4893
+ prechck = CheckCompressionType(fp, formatspecs, filestart, False)
4692
4894
  if(IsNestedDict(formatspecs) and prechck in formatspecs):
4693
4895
  formatspecs = formatspecs[prechck]
4694
- fp.seek(0, 0)
4896
+ fp.seek(filestart, 0)
4695
4897
  if(prechck!="zstd"):
4696
- return UncompressFileAlt(fp, formatspecs)
4898
+ return UncompressFileAlt(fp, formatspecs, filestart)
4697
4899
  filefp = StringIO()
4698
- fp.seek(0, 0)
4699
- outstring = UncompressString(fp.read(), formatspecs)
4900
+ fp.seek(filestart, 0)
4901
+ outstring = UncompressString(fp.read(), formatspecs, 0)
4700
4902
  filefp.write(outstring)
4701
4903
  filefp.seek(0, 0)
4702
4904
  return filefp
4703
4905
 
4704
4906
 
4705
- def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
4706
- compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, False)
4907
+ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0):
4908
+ compresscheck = CheckCompressionTypeFromBytes(infile, formatspecs, filestart, False)
4707
4909
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
4708
4910
  formatspecs = formatspecs[compresscheck]
4709
4911
  if(compresscheck == "gzip" and compresscheck in compressionsupport):
@@ -4728,26 +4930,26 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__):
4728
4930
  return fileuz
4729
4931
 
4730
4932
 
4731
- def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__):
4933
+ def UncompressBytesAlt(inbytes, formatspecs=__file_format_multi_dict__, filestart=0):
4732
4934
  filefp = MkTempFile()
4733
- outstring = UncompressBytes(inbytes, formatspecs)
4935
+ outstring = UncompressBytes(inbytes, formatspecs, filestart)
4734
4936
  filefp.write(outstring)
4735
4937
  filefp.seek(0, 0)
4736
4938
  return filefp
4737
4939
 
4738
4940
 
4739
- def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__):
4941
+ def UncompressBytesAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=0):
4740
4942
  if(not hasattr(fp, "read")):
4741
4943
  return False
4742
- prechck = CheckCompressionType(fp, formatspecs, False)
4944
+ prechck = CheckCompressionType(fp, formatspecs, filestart, False)
4743
4945
  if(IsNestedDict(formatspecs) and prechck in formatspecs):
4744
4946
  formatspecs = formatspecs[prechck]
4745
- fp.seek(0, 0)
4947
+ fp.seek(filestart, 0)
4746
4948
  if(prechck!="zstd"):
4747
- return UncompressFileAlt(fp, formatspecs)
4949
+ return UncompressFileAlt(fp, formatspecs, filestart)
4748
4950
  filefp = MkTempFile()
4749
- fp.seek(0, 0)
4750
- outstring = UncompressBytes(fp.read(), formatspecs)
4951
+ fp.seek(filestart, 0)
4952
+ outstring = UncompressBytes(fp.read(), formatspecs, 0)
4751
4953
  filefp.write(outstring)
4752
4954
  filefp.seek(0, 0)
4753
4955
  return filefp
@@ -4994,7 +5196,7 @@ def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", comp
4994
5196
  fp = MkTempFile()
4995
5197
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
4996
5198
  fp = outfile
4997
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
5199
+ elif(re.findall(__upload_proto_support__, outfile)):
4998
5200
  fp = MkTempFile()
4999
5201
  else:
5000
5202
  fbasename = os.path.splitext(outfile)[0]
@@ -5196,9 +5398,9 @@ def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", comp
5196
5398
  if not followlink and ftype in data_types:
5197
5399
  with open(fname, "rb") as fpc:
5198
5400
  shutil.copyfileobj(fpc, fcontents)
5199
- typechecktest = CheckCompressionType(fcontents, closefp=False)
5401
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
5200
5402
  fcontents.seek(0, 0)
5201
- fcencoding = GetFileEncoding(fcontents, False)
5403
+ fcencoding = GetFileEncoding(fcontents, 0, False)
5202
5404
  if(typechecktest is False and not compresswholefile):
5203
5405
  fcontents.seek(0, 2)
5204
5406
  ucfsize = fcontents.tell()
@@ -5243,9 +5445,9 @@ def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", comp
5243
5445
  flstatinfo = os.stat(flinkname)
5244
5446
  with open(flinkname, "rb") as fpc:
5245
5447
  shutil.copyfileobj(fpc, fcontents)
5246
- typechecktest = CheckCompressionType(fcontents, closefp=False)
5448
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
5247
5449
  fcontents.seek(0, 0)
5248
- fcencoding = GetFileEncoding(fcontents, False)
5450
+ fcencoding = GetFileEncoding(fcontents, 0, False)
5249
5451
  if(typechecktest is False and not compresswholefile):
5250
5452
  fcontents.seek(0, 2)
5251
5453
  ucfsize = fcontents.tell()
@@ -5323,7 +5525,7 @@ def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", comp
5323
5525
  outvar = fp.read()
5324
5526
  fp.close()
5325
5527
  return outvar
5326
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
5528
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
5327
5529
  fp = CompressOpenFileAlt(
5328
5530
  fp, compression, compressionlevel, compressionuselist, formatspecs)
5329
5531
  fp.seek(0, 0)
@@ -5378,7 +5580,7 @@ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="aut
5378
5580
  fp = MkTempFile()
5379
5581
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
5380
5582
  fp = outfile
5381
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
5583
+ elif(re.findall(__upload_proto_support__, outfile)):
5382
5584
  fp = MkTempFile()
5383
5585
  else:
5384
5586
  fbasename = os.path.splitext(outfile)[0]
@@ -5407,7 +5609,7 @@ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="aut
5407
5609
  if(not infile):
5408
5610
  return False
5409
5611
  infile.seek(0, 0)
5410
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
5612
+ elif(re.findall(__download_proto_support__, infile)):
5411
5613
  infile = download_file_from_internet_file(infile)
5412
5614
  infile.seek(0, 0)
5413
5615
  if(not infile):
@@ -5431,7 +5633,7 @@ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="aut
5431
5633
  return False
5432
5634
  try:
5433
5635
  if(hasattr(infile, "read") or hasattr(infile, "write")):
5434
- compresscheck = CheckCompressionType(infile, formatspecs, False)
5636
+ compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
5435
5637
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
5436
5638
  formatspecs = formatspecs[compresscheck]
5437
5639
  if(compresscheck=="zstd"):
@@ -5443,7 +5645,7 @@ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="aut
5443
5645
  else:
5444
5646
  tarfp = tarfile.open(fileobj=infile, mode="r")
5445
5647
  else:
5446
- compresscheck = CheckCompressionType(infile, formatspecs, True)
5648
+ compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
5447
5649
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
5448
5650
  formatspecs = formatspecs[compresscheck]
5449
5651
  if(compresscheck=="zstd"):
@@ -5544,9 +5746,9 @@ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="aut
5544
5746
  fpc = tarfp.extractfile(member)
5545
5747
  shutil.copyfileobj(fpc, fcontents)
5546
5748
  fpc.close()
5547
- typechecktest = CheckCompressionType(fcontents, closefp=False)
5749
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
5548
5750
  fcontents.seek(0, 0)
5549
- fcencoding = GetFileEncoding(fcontents, False)
5751
+ fcencoding = GetFileEncoding(fcontents, 0, False)
5550
5752
  if(typechecktest is False and not compresswholefile):
5551
5753
  fcontents.seek(0, 2)
5552
5754
  ucfsize = fcontents.tell()
@@ -5624,7 +5826,7 @@ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="aut
5624
5826
  outvar = fp.read()
5625
5827
  fp.close()
5626
5828
  return outvar
5627
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
5829
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
5628
5830
  fp = CompressOpenFileAlt(
5629
5831
  fp, compression, compressionlevel, compressionuselist, formatspecs)
5630
5832
  fp.seek(0, 0)
@@ -5675,7 +5877,7 @@ def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="aut
5675
5877
  fp = MkTempFile()
5676
5878
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
5677
5879
  fp = outfile
5678
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
5880
+ elif(re.findall(__upload_proto_support__, outfile)):
5679
5881
  fp = MkTempFile()
5680
5882
  else:
5681
5883
  fbasename = os.path.splitext(outfile)[0]
@@ -5704,7 +5906,7 @@ def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="aut
5704
5906
  if(not infile):
5705
5907
  return False
5706
5908
  infile.seek(0, 0)
5707
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
5909
+ elif(re.findall(__download_proto_support__, infile)):
5708
5910
  infile = download_file_from_internet_file(infile)
5709
5911
  infile.seek(0, 0)
5710
5912
  if(not infile):
@@ -5841,9 +6043,9 @@ def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="aut
5841
6043
  curcompression = "none"
5842
6044
  if ftype == 0:
5843
6045
  fcontents.write(zipfp.read(member.filename))
5844
- typechecktest = CheckCompressionType(fcontents, closefp=False)
6046
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
5845
6047
  fcontents.seek(0, 0)
5846
- fcencoding = GetFileEncoding(fcontents, False)
6048
+ fcencoding = GetFileEncoding(fcontents, 0, False)
5847
6049
  if(typechecktest is False and not compresswholefile):
5848
6050
  fcontents.seek(0, 2)
5849
6051
  ucfsize = fcontents.tell()
@@ -5918,7 +6120,7 @@ def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="aut
5918
6120
  outvar = fp.read()
5919
6121
  fp.close()
5920
6122
  return outvar
5921
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
6123
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
5922
6124
  fp = CompressOpenFileAlt(
5923
6125
  fp, compression, compressionlevel, compressionuselist, formatspecs)
5924
6126
  fp.seek(0, 0)
@@ -5974,7 +6176,7 @@ if(rarfile_support):
5974
6176
  fp = MkTempFile()
5975
6177
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
5976
6178
  fp = outfile
5977
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
6179
+ elif(re.findall(__upload_proto_support__, outfile)):
5978
6180
  fp = MkTempFile()
5979
6181
  else:
5980
6182
  fbasename = os.path.splitext(outfile)[0]
@@ -6158,9 +6360,9 @@ if(rarfile_support):
6158
6360
  curcompression = "none"
6159
6361
  if ftype == 0:
6160
6362
  fcontents.write(rarfp.read(member.filename))
6161
- typechecktest = CheckCompressionType(fcontents, closefp=False)
6363
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6162
6364
  fcontents.seek(0, 0)
6163
- fcencoding = GetFileEncoding(fcontents, False)
6365
+ fcencoding = GetFileEncoding(fcontents, 0, False)
6164
6366
  if(typechecktest is False and not compresswholefile):
6165
6367
  fcontents.seek(0, 2)
6166
6368
  ucfsize = fcontents.tell()
@@ -6238,7 +6440,7 @@ if(rarfile_support):
6238
6440
  outvar = fp.read()
6239
6441
  fp.close()
6240
6442
  return outvar
6241
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
6443
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
6242
6444
  fp = CompressOpenFileAlt(
6243
6445
  fp, compression, compressionlevel, compressionuselist, formatspecs)
6244
6446
  fp.seek(0, 0)
@@ -6294,7 +6496,7 @@ if(py7zr_support):
6294
6496
  fp = MkTempFile()
6295
6497
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
6296
6498
  fp = outfile
6297
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
6499
+ elif(re.findall(__upload_proto_support__, outfile)):
6298
6500
  fp = MkTempFile()
6299
6501
  else:
6300
6502
  fbasename = os.path.splitext(outfile)[0]
@@ -6317,7 +6519,7 @@ if(py7zr_support):
6317
6519
  return False
6318
6520
  szpfp = py7zr.SevenZipFile(infile, mode="r")
6319
6521
  file_content = szpfp.readall()
6320
- #sztest = szpfp.testzip();
6522
+ #sztest = szpfp.testzip()
6321
6523
  sztestalt = szpfp.test()
6322
6524
  if(sztestalt):
6323
6525
  VerbosePrintOut("Bad file found!")
@@ -6411,9 +6613,9 @@ if(py7zr_support):
6411
6613
  fcontents.write(file_content[member.filename].read())
6412
6614
  fsize = format(fcontents.tell(), 'x').lower()
6413
6615
  fcontents.seek(0, 0)
6414
- typechecktest = CheckCompressionType(fcontents, closefp=False)
6616
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6415
6617
  fcontents.seek(0, 0)
6416
- fcencoding = GetFileEncoding(fcontents, False)
6618
+ fcencoding = GetFileEncoding(fcontents, 0, False)
6417
6619
  file_content[member.filename].close()
6418
6620
  if(typechecktest is False and not compresswholefile):
6419
6621
  fcontents.seek(0, 2)
@@ -6492,7 +6694,7 @@ if(py7zr_support):
6492
6694
  outvar = fp.read()
6493
6695
  fp.close()
6494
6696
  return outvar
6495
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
6697
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
6496
6698
  fp = CompressOpenFileAlt(
6497
6699
  fp, compression, compressionlevel, compressionuselist, formatspecs)
6498
6700
  fp.seek(0, 0)
@@ -6506,7 +6708,7 @@ if(py7zr_support):
6506
6708
 
6507
6709
 
6508
6710
  def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
6509
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
6711
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
6510
6712
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
6511
6713
  formatspecs = formatspecs[checkcompressfile]
6512
6714
  if(verbose):
@@ -6526,18 +6728,20 @@ def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto
6526
6728
  return False
6527
6729
 
6528
6730
 
6529
- def ArchiveFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
6731
+ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
6732
+ if(verbose):
6733
+ logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
6530
6734
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
6531
6735
  formatspecs = formatspecs[fmttype]
6532
6736
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
6533
6737
  fmttype = "auto"
6534
- curloc = 0
6738
+ curloc = filestart
6535
6739
  if(hasattr(infile, "read") or hasattr(infile, "write")):
6536
6740
  curloc = infile.tell()
6537
6741
  fp = infile
6538
- fp.seek(0, 0)
6539
- fp = UncompressFileAlt(fp, formatspecs)
6540
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
6742
+ fp.seek(filestart, 0)
6743
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
6744
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
6541
6745
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
6542
6746
  formatspecs = formatspecs[checkcompressfile]
6543
6747
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -6554,45 +6758,45 @@ def ArchiveFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, c
6554
6758
  return False
6555
6759
  if(not fp):
6556
6760
  return False
6557
- fp.seek(0, 0)
6761
+ fp.seek(filestart, 0)
6558
6762
  elif(infile == "-"):
6559
6763
  fp = MkTempFile()
6560
6764
  if(hasattr(sys.stdin, "buffer")):
6561
6765
  shutil.copyfileobj(sys.stdin.buffer, fp)
6562
6766
  else:
6563
6767
  shutil.copyfileobj(sys.stdin, fp)
6564
- fp.seek(0, 0)
6565
- fp = UncompressFileAlt(fp, formatspecs)
6566
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
6768
+ fp.seek(filestart, 0)
6769
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
6770
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
6567
6771
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
6568
6772
  formatspecs = formatspecs[checkcompressfile]
6569
6773
  if(not fp):
6570
6774
  return False
6571
- fp.seek(0, 0)
6775
+ fp.seek(filestart, 0)
6572
6776
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
6573
6777
  fp = MkTempFile()
6574
6778
  fp.write(infile)
6575
- fp.seek(0, 0)
6576
- fp = UncompressFileAlt(fp, formatspecs)
6577
- compresscheck = CheckCompressionType(fp, formatspecs, False)
6779
+ fp.seek(filestart, 0)
6780
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
6781
+ compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
6578
6782
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6579
6783
  formatspecs = formatspecs[compresscheck]
6580
6784
  if(not fp):
6581
6785
  return False
6582
- fp.seek(0, 0)
6583
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
6786
+ fp.seek(filestart, 0)
6787
+ elif(re.findall(__download_proto_support__, infile)):
6584
6788
  fp = download_file_from_internet_file(infile)
6585
- fp = UncompressFileAlt(fp, formatspecs)
6586
- compresscheck = CheckCompressionType(fp, formatspecs, False)
6789
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
6790
+ compresscheck = CheckCompressionType(fp, formatspecs, 0, False)
6587
6791
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6588
6792
  formatspecs = formatspecs[compresscheck]
6589
- fp.seek(0, 0)
6793
+ fp.seek(filestart, 0)
6590
6794
  if(not fp):
6591
6795
  return False
6592
- fp.seek(0, 0)
6796
+ fp.seek(filestart, 0)
6593
6797
  else:
6594
6798
  infile = RemoveWindowsPath(infile)
6595
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
6799
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
6596
6800
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
6597
6801
  formatspecs = formatspecs[checkcompressfile]
6598
6802
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -6607,7 +6811,7 @@ def ArchiveFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, c
6607
6811
  return False
6608
6812
  elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
6609
6813
  return False
6610
- compresscheck = CheckCompressionType(infile, formatspecs, True)
6814
+ compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
6611
6815
  if(not compresscheck):
6612
6816
  fextname = os.path.splitext(infile)[1]
6613
6817
  if(fextname == ".gz"):
@@ -6630,26 +6834,23 @@ def ArchiveFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, c
6630
6834
  return False
6631
6835
  if(not compresscheck):
6632
6836
  return False
6633
- fp = UncompressFile(infile, formatspecs, "rb")
6837
+ fp = UncompressFile(infile, formatspecs, "rb", filestart)
6634
6838
  try:
6635
- fp.seek(0, 2);
6839
+ fp.seek(0, 2)
6636
6840
  except OSError:
6637
- SeekToEndOfFile(fp);
6841
+ SeekToEndOfFile(fp)
6638
6842
  except ValueError:
6639
- SeekToEndOfFile(fp);
6640
- CatSize = fp.tell();
6641
- CatSizeEnd = CatSize;
6843
+ SeekToEndOfFile(fp)
6844
+ CatSize = fp.tell()
6845
+ CatSizeEnd = CatSize
6642
6846
  fp.seek(curloc, 0)
6643
- if(curloc > 0):
6644
- fp.seek(0, 0)
6645
6847
  if(IsNestedDict(formatspecs)):
6646
- compresschecking = CheckCompressionType(fp, formatspecs, False)
6848
+ compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
6647
6849
  if(compresschecking not in formatspecs):
6648
- fp.seek(0, 0)
6649
6850
  return False
6650
6851
  else:
6651
6852
  formatspecs = formatspecs[compresschecking]
6652
- fp.seek(0, 0)
6853
+ fp.seek(filestart, 0)
6653
6854
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
6654
6855
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
6655
6856
  formdelszie = len(formatspecs['format_delimiter'])
@@ -6666,23 +6867,8 @@ def ArchiveFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, c
6666
6867
  fp, formatspecs['format_delimiter'])
6667
6868
  fnumextrafieldsize = int(inheader[5], 16)
6668
6869
  fnumextrafields = int(inheader[6], 16)
6669
- fextrafieldslist = []
6670
6870
  extrastart = 7
6671
6871
  extraend = extrastart + fnumextrafields
6672
- while(extrastart < extraend):
6673
- fextrafieldslist.append(inheader[extrastart])
6674
- extrastart = extrastart + 1
6675
- if(fnumextrafields==1):
6676
- try:
6677
- fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
6678
- fnumextrafields = len(fextrafieldslist)
6679
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
6680
- try:
6681
- fextrafieldslist = json.loads(fextrafieldslist[0])
6682
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
6683
- pass
6684
- if(curloc > 0):
6685
- fp.seek(curloc, 0)
6686
6872
  formversion = re.findall("([\\d]+)", formstring)
6687
6873
  fheadsize = int(inheader[0], 16)
6688
6874
  fnumfields = int(inheader[1], 16)
@@ -6691,649 +6877,27 @@ def ArchiveFileSeekToFileNum(infile, fmttype="auto", seekto=0, listonly=False, c
6691
6877
  fnumfiles = int(inheader[4], 16)
6692
6878
  fprechecksumtype = inheader[-2]
6693
6879
  fprechecksum = inheader[-1]
6880
+ il = 0
6694
6881
  headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
6695
6882
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
6696
- if(not headercheck and not skipchecksum):
6697
- VerbosePrintOut(
6698
- "File Header Checksum Error with file at offset " + str(0))
6699
- VerbosePrintOut("'" + fprechecksum + "' != " +
6700
- "'" + newfcs + "'")
6701
- return False
6702
- formversions = re.search('(.*?)(\\d+)', formstring).groups()
6703
- fcompresstype = compresscheck
6704
- if(fcompresstype==formatspecs['format_magic']):
6705
- fcompresstype = ""
6706
- outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
6707
- if(seekto >= fnumfiles):
6708
- seekto = fnumfiles - 1
6709
- if(seekto < 0):
6710
- seekto = 0
6711
- if(seekto >= 0):
6712
- il = -1
6713
- while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
6714
- prefhstart = fp.tell()
6715
- if(formatspecs['new_style']):
6716
- preheaderdata = ReadFileHeaderDataBySize(
6717
- fp, formatspecs['format_delimiter'])
6718
- else:
6719
- preheaderdata = ReadFileHeaderDataWoSize(
6720
- fp, formatspecs['format_delimiter'])
6721
- if(len(preheaderdata) == 0):
6722
- break
6723
- prefheadsize = int(preheaderdata[0], 16)
6724
- prefnumfields = int(preheaderdata[1], 16)
6725
- preftype = int(preheaderdata[2], 16)
6726
- prefencoding = preheaderdata[3]
6727
- prefcencoding = preheaderdata[4]
6728
- if(re.findall("^[.|/]", preheaderdata[5])):
6729
- prefname = preheaderdata[5]
6730
- else:
6731
- prefname = "./"+preheaderdata[5]
6732
- prefbasedir = os.path.dirname(prefname)
6733
- preflinkname = preheaderdata[6]
6734
- prefsize = int(preheaderdata[7], 16)
6735
- prefatime = int(preheaderdata[8], 16)
6736
- prefmtime = int(preheaderdata[9], 16)
6737
- prefctime = int(preheaderdata[10], 16)
6738
- prefbtime = int(preheaderdata[11], 16)
6739
- prefmode = int(preheaderdata[12], 16)
6740
- prefchmode = stat.S_IMODE(prefmode)
6741
- preftypemod = stat.S_IFMT(prefmode)
6742
- prefwinattributes = int(preheaderdata[13], 16)
6743
- prefcompression = preheaderdata[14]
6744
- prefcsize = int(preheaderdata[15], 16)
6745
- prefuid = int(preheaderdata[16], 16)
6746
- prefuname = preheaderdata[17]
6747
- prefgid = int(preheaderdata[18], 16)
6748
- prefgname = preheaderdata[19]
6749
- fid = int(preheaderdata[20], 16)
6750
- finode = int(preheaderdata[21], 16)
6751
- flinkcount = int(preheaderdata[22], 16)
6752
- prefdev = int(preheaderdata[23], 16)
6753
- prefdev_minor = int(preheaderdata[24], 16)
6754
- prefdev_major = int(preheaderdata[25], 16)
6755
- prefseeknextfile = preheaderdata[26]
6756
- prefjsontype = preheaderdata[27]
6757
- prefjsonlen = int(preheaderdata[28], 16)
6758
- prefjsonsize = int(preheaderdata[29], 16)
6759
- prefjsonchecksumtype = preheaderdata[30]
6760
- prefjsonchecksum = preheaderdata[31]
6761
- prefhend = fp.tell() - 1
6762
- prefjstart = fp.tell()
6763
- prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
6764
- prefjend = fp.tell()
6765
- fp.seek(len(formatspecs['format_delimiter']), 1)
6766
- prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
6767
- prefextrasize = int(preheaderdata[32], 16)
6768
- prefextrafields = int(preheaderdata[33], 16)
6769
- extrastart = 34
6770
- extraend = extrastart + prefextrafields
6771
- prefcs = preheaderdata[-2].lower()
6772
- prenewfcs = preheaderdata[-1].lower()
6773
- prenewfcs = GetHeaderChecksum(
6774
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
6775
- if(prefcs != prenewfcs and not skipchecksum):
6776
- VerbosePrintOut("File Header Checksum Error with file " +
6777
- prefname + " at offset " + str(prefhstart))
6778
- VerbosePrintOut("'" + prefcs + "' != " +
6779
- "'" + prenewfcs + "'")
6780
- return False
6781
- if(prefjsonsize > 0):
6782
- if(prejsonfcs != prefjsonchecksum and not skipchecksum):
6783
- VerbosePrintOut("File JSON Data Checksum Error with file " +
6784
- prefname + " at offset " + str(prefjstart))
6785
- VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
6786
- return False
6787
- prefcontentstart = fp.tell()
6788
- prefcontents = ""
6789
- pyhascontents = False
6790
- if(prefsize > 0):
6791
- if(prefcompression):
6792
- prefcontents = fp.read(prefsize)
6793
- else:
6794
- prefcontents = fp.read(prefcsize)
6795
- prenewfccs = GetFileChecksum(
6796
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
6797
- pyhascontents = True
6798
- if(prefccs != prenewfccs and not skipchecksum):
6799
- VerbosePrintOut("File Content Checksum Error with file " +
6800
- prefname + " at offset " + str(prefcontentstart))
6801
- VerbosePrintOut("'" + prefccs +
6802
- "' != " + "'" + prenewfccs + "'")
6803
- return False
6804
- if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
6805
- fseeknextasnum = int(prefseeknextfile.replace("+", ""))
6806
- if(abs(fseeknextasnum) == 0):
6807
- pass
6808
- fp.seek(fseeknextasnum, 1)
6809
- elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
6810
- fseeknextasnum = int(prefseeknextfile)
6811
- if(abs(fseeknextasnum) == 0):
6812
- pass
6813
- fp.seek(fseeknextasnum, 1)
6814
- elif(re.findall("^([0-9]+)", prefseeknextfile)):
6815
- fseeknextasnum = int(prefseeknextfile)
6816
- if(abs(fseeknextasnum) == 0):
6817
- pass
6818
- fp.seek(fseeknextasnum, 0)
6819
- else:
6820
- return False
6821
- il = il + 1
6822
- fp.seek(seekstart, 0)
6823
- fileidnum = il
6824
- outfheadsize = int(preheaderdata[0], 16)
6825
- outfnumfields = int(preheaderdata[1], 16)
6826
- outftype = int(preheaderdata[2], 16)
6827
- outfencoding = preheaderdata[3]
6828
- if(re.findall("^[.|/]", preheaderdata[4])):
6829
- outfname = preheaderdata[4]
6830
- else:
6831
- outfname = "./"+preheaderdata[4]
6832
- outflinkname = preheaderdata[5]
6833
- outfsize = int(preheaderdata[6], 16)
6834
- outfbasedir = os.path.dirname(outfname)
6835
- outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
6836
- 'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
6837
- if(returnfp):
6838
- outlist.update({'fp': fp})
6839
- else:
6840
- fp.close()
6841
- return outlist
6842
-
6843
-
6844
- def ArchiveFileSeekToFileName(infile, fmttype="auto", seekfile=None, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
6845
- if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
6846
- formatspecs = formatspecs[fmttype]
6847
- elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
6848
- fmttype = "auto"
6849
- curloc = 0
6850
- if(hasattr(infile, "read") or hasattr(infile, "write")):
6851
- curloc = infile.tell()
6852
- fp = infile
6853
- fp.seek(0, 0)
6854
- fp = UncompressFileAlt(fp, formatspecs)
6855
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
6856
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
6857
- formatspecs = formatspecs[checkcompressfile]
6858
- if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
6859
- return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6860
- elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
6861
- return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6862
- elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
6863
- return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6864
- elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
6865
- return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6866
- elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
6867
- return False
6868
- elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
6869
- return False
6870
- if(not fp):
6871
- return False
6872
- fp.seek(0, 0)
6873
- elif(infile == "-"):
6874
- fp = MkTempFile()
6875
- if(hasattr(sys.stdin, "buffer")):
6876
- shutil.copyfileobj(sys.stdin.buffer, fp)
6877
- else:
6878
- shutil.copyfileobj(sys.stdin, fp)
6879
- fp.seek(0, 0)
6880
- fp = UncompressFileAlt(fp, formatspecs)
6881
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
6882
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
6883
- formatspecs = formatspecs[checkcompressfile]
6884
- if(not fp):
6885
- return False
6886
- fp.seek(0, 0)
6887
- elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
6888
- fp = MkTempFile()
6889
- fp.write(infile)
6890
- fp.seek(0, 0)
6891
- fp = UncompressFileAlt(fp, formatspecs)
6892
- compresscheck = CheckCompressionType(fp, formatspecs, False)
6893
- if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6894
- formatspecs = formatspecs[compresscheck]
6895
- if(not fp):
6896
- return False
6897
- fp.seek(0, 0)
6898
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
6899
- fp = download_file_from_internet_file(infile)
6900
- fp = UncompressFileAlt(fp, formatspecs)
6901
- compresscheck = CheckCompressionType(fp, formatspecs, False)
6902
- if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
6903
- formatspecs = formatspecs[compresscheck]
6904
- fp.seek(0, 0)
6905
- if(not fp):
6906
- return False
6907
- fp.seek(0, 0)
6908
- else:
6909
- infile = RemoveWindowsPath(infile)
6910
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
6911
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
6912
- formatspecs = formatspecs[checkcompressfile]
6913
- if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
6914
- return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6915
- elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
6916
- return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6917
- elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
6918
- return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6919
- elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
6920
- return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
6921
- elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
6922
- return False
6923
- elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
6924
- return False
6925
- compresscheck = CheckCompressionType(infile, formatspecs, True)
6926
- if(not compresscheck):
6927
- fextname = os.path.splitext(infile)[1]
6928
- if(fextname == ".gz"):
6929
- compresscheck = "gzip"
6930
- elif(fextname == ".bz2"):
6931
- compresscheck = "bzip2"
6932
- elif(fextname == ".zst"):
6933
- compresscheck = "zstd"
6934
- elif(fextname == ".lz4" or fextname == ".clz4"):
6935
- compresscheck = "lz4"
6936
- elif(fextname == ".lzo" or fextname == ".lzop"):
6937
- compresscheck = "lzo"
6938
- elif(fextname == ".lzma"):
6939
- compresscheck = "lzma"
6940
- elif(fextname == ".xz"):
6941
- compresscheck = "xz"
6942
- elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
6943
- compresscheck = "zlib"
6944
- else:
6945
- return False
6946
- if(not compresscheck):
6947
- return False
6948
- fp = UncompressFile(infile, formatspecs, "rb")
6949
- try:
6950
- fp.seek(0, 2);
6951
- except OSError:
6952
- SeekToEndOfFile(fp);
6953
- except ValueError:
6954
- SeekToEndOfFile(fp);
6955
- CatSize = fp.tell();
6956
- CatSizeEnd = CatSize;
6957
- fp.seek(curloc, 0)
6958
- if(curloc > 0):
6959
- fp.seek(0, 0)
6960
- if(IsNestedDict(formatspecs)):
6961
- compresschecking = CheckCompressionType(fp, formatspecs, False)
6962
- if(compresschecking not in formatspecs):
6963
- return False
6964
- else:
6965
- formatspecs = formatspecs[compresschecking]
6966
- fp.seek(0, 0)
6967
- inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
6968
- formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
6969
- formdelszie = len(formatspecs['format_delimiter'])
6970
- formdel = fp.read(formdelszie).decode("UTF-8")
6971
- if(formstring != formatspecs['format_magic']+inheaderver):
6972
- return False
6973
- if(formdel != formatspecs['format_delimiter']):
6974
- return False
6975
- if(formatspecs['new_style']):
6976
- inheader = ReadFileHeaderDataBySize(
6977
- fp, formatspecs['format_delimiter'])
6978
- else:
6979
- inheader = ReadFileHeaderDataWoSize(
6980
- fp, formatspecs['format_delimiter'])
6981
- fnumextrafieldsize = int(inheader[5], 16)
6982
- fnumextrafields = int(inheader[6], 16)
6983
- fextrafieldslist = []
6984
- extrastart = 7
6985
- extraend = extrastart + fnumextrafields
6986
- while(extrastart < extraend):
6987
- fextrafieldslist.append(inheader[extrastart])
6988
- extrastart = extrastart + 1
6989
- if(fnumextrafields==1):
6990
- try:
6991
- fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
6992
- fnumextrafields = len(fextrafieldslist)
6993
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
6994
- try:
6995
- fextrafieldslist = json.loads(fextrafieldslist[0])
6996
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
6997
- pass
6998
- if(curloc > 0):
6999
- fp.seek(curloc, 0)
7000
- formversion = re.findall("([\\d]+)", formstring)
7001
- fheadsize = int(inheader[0], 16)
7002
- fnumfields = int(inheader[1], 16)
7003
- fhencoding = inheader[2]
7004
- fostype = inheader[3]
7005
- fnumfiles = int(inheader[4], 16)
7006
- fprechecksumtype = inheader[-2]
7007
- fprechecksum = inheader[-1]
7008
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
7009
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
7010
- if(not headercheck and not skipchecksum):
7011
- VerbosePrintOut(
7012
- "File Header Checksum Error with file at offset " + str(0))
7013
- VerbosePrintOut("'" + fprechecksum + "' != " +
7014
- "'" + newfcs + "'")
7015
- return False
7016
- formversions = re.search('(.*?)(\\d+)', formstring).groups()
7017
- fcompresstype = compresscheck
7018
- if(fcompresstype==formatspecs['format_magic']):
7019
- fcompresstype = ""
7020
- outlist = {'fnumfiles': fnumfiles, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
7021
- seekto = fnumfiles - 1
7022
- filefound = False
7023
- if(seekto >= 0):
7024
- il = -1
7025
- while (fp.tell() < CatSizeEnd) if seektoend else (il < seekto):
7026
- prefhstart = fp.tell()
7027
- if(formatspecs['new_style']):
7028
- preheaderdata = ReadFileHeaderDataBySize(
7029
- fp, formatspecs['format_delimiter'])
7030
- else:
7031
- preheaderdata = ReadFileHeaderDataWoSize(
7032
- fp, formatspecs['format_delimiter'])
7033
- if(len(preheaderdata) == 0):
7034
- break
7035
- prefheadsize = int(preheaderdata[0], 16)
7036
- prefnumfields = int(preheaderdata[1], 16)
7037
- preftype = int(preheaderdata[2], 16)
7038
- prefencoding = preheaderdata[3]
7039
- prefencoding = preheaderdata[4]
7040
- if(re.findall("^[.|/]", preheaderdata[5])):
7041
- prefname = preheaderdata[5]
7042
- else:
7043
- prefname = "./"+preheaderdata[5]
7044
- prefbasedir = os.path.dirname(prefname)
7045
- preflinkname = preheaderdata[6]
7046
- prefsize = int(preheaderdata[7], 16)
7047
- prefatime = int(preheaderdata[8], 16)
7048
- prefmtime = int(preheaderdata[9], 16)
7049
- prefctime = int(preheaderdata[10], 16)
7050
- prefbtime = int(preheaderdata[11], 16)
7051
- prefmode = int(preheaderdata[12], 16)
7052
- prefchmode = stat.S_IMODE(prefmode)
7053
- preftypemod = stat.S_IFMT(prefmode)
7054
- prefwinattributes = int(preheaderdata[13], 16)
7055
- prefcompression = preheaderdata[14]
7056
- prefcsize = int(preheaderdata[15], 16)
7057
- prefuid = int(preheaderdata[16], 16)
7058
- prefuname = preheaderdata[17]
7059
- prefgid = int(preheaderdata[18], 16)
7060
- prefgname = preheaderdata[19]
7061
- fid = int(preheaderdata[20], 16)
7062
- finode = int(preheaderdata[21], 16)
7063
- flinkcount = int(preheaderdata[22], 16)
7064
- prefdev = int(preheaderdata[23], 16)
7065
- prefdev_minor = int(preheaderdata[24], 16)
7066
- prefdev_major = int(preheaderdata[25], 16)
7067
- prefseeknextfile = preheaderdata[26]
7068
- prefjsontype = preheaderdata[27]
7069
- prefjsonlen = int(preheaderdata[28], 16)
7070
- prefjsonsize = int(preheaderdata[29], 16)
7071
- prefjsonchecksumtype = preheaderdata[30]
7072
- prefjsonchecksum = preheaderdata[31]
7073
- prefhend = fp.tell() - 1
7074
- prefjstart = fp.tell()
7075
- prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
7076
- prefjend = fp.tell()
7077
- fp.seek(len(formatspecs['format_delimiter']), 1)
7078
- prejsonfcs = GetFileChecksum(prefjoutfprejsoncontent, prefjsonchecksumtype, True, formatspecs)
7079
- prefextrasize = int(preheaderdata[32], 16)
7080
- prefextrafields = int(preheaderdata[33], 16)
7081
- extrastart = 34
7082
- extraend = extrastart + prefextrafields
7083
- prefcs = preheaderdata[-2].lower()
7084
- prenewfcs = preheaderdata[-1].lower()
7085
- prenewfcs = GetHeaderChecksum(
7086
- preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
7087
- if(prefcs != prenewfcs and not skipchecksum):
7088
- VerbosePrintOut("File Header Checksum Error with file " +
7089
- prefname + " at offset " + str(prefhstart))
7090
- VerbosePrintOut("'" + prefcs + "' != " +
7091
- "'" + prenewfcs + "'")
7092
- return False
7093
- if(prefjsonsize > 0):
7094
- if(prejsonfcs != prefjsonchecksum and not skipchecksum):
7095
- VerbosePrintOut("File JSON Data Checksum Error with file " +
7096
- prefname + " at offset " + str(prefjstart))
7097
- VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
7098
- return False
7099
- prefcontentstart = fp.tell()
7100
- prefcontents = ""
7101
- pyhascontents = False
7102
- if(prefsize > 0):
7103
- if(prefcompression):
7104
- prefcontents = fp.read(prefsize)
7105
- else:
7106
- prefcontents = fp.read(prefcsize)
7107
- prenewfccs = GetFileChecksum(
7108
- prefcontents, preheaderdata[-3].lower(), False, formatspecs)
7109
- pyhascontents = True
7110
- if(prefccs != prenewfccs and not skipchecksum):
7111
- VerbosePrintOut("File Content Checksum Error with file " +
7112
- prefname + " at offset " + str(prefcontentstart))
7113
- VerbosePrintOut("'" + prefccs +
7114
- "' != " + "'" + prenewfccs + "'")
7115
- return False
7116
- if(re.findall("^\\+([0-9]+)", prefseeknextfile)):
7117
- fseeknextasnum = int(prefseeknextfile.replace("+", ""))
7118
- if(abs(fseeknextasnum) == 0):
7119
- pass
7120
- fp.seek(fseeknextasnum, 1)
7121
- elif(re.findall("^\\-([0-9]+)", prefseeknextfile)):
7122
- fseeknextasnum = int(prefseeknextfile)
7123
- if(abs(fseeknextasnum) == 0):
7124
- pass
7125
- fp.seek(fseeknextasnum, 1)
7126
- elif(re.findall("^([0-9]+)", prefseeknextfile)):
7127
- fseeknextasnum = int(prefseeknextfile)
7128
- if(abs(fseeknextasnum) == 0):
7129
- pass
7130
- fp.seek(fseeknextasnum, 0)
7131
- else:
7132
- return False
7133
- il = il + 1
7134
- filefound = False
7135
- if(prefname == seekfile):
7136
- filefound = True
7137
- break
7138
- fp.seek(seekstart, 0)
7139
- fileidnum = il
7140
- outfheadsize = int(preheaderdata[0], 16)
7141
- outfnumfields = int(preheaderdata[1], 16)
7142
- outftype = int(preheaderdata[2], 16)
7143
- outfencoding = preheaderdata[3]
7144
- if(re.findall("^[.|/]", preheaderdata[4])):
7145
- outfname = preheaderdata[4]
7146
- else:
7147
- outfname = "./"+preheaderdata[4]
7148
- outflinkname = preheaderdata[5]
7149
- outfsize = int(preheaderdata[6], 16)
7150
- outfbasedir = os.path.dirname(outfname)
7151
- if(filefound):
7152
- outlist = {'fid': fileidnum, 'foffset': fp.tell(), 'ftype': outftype, 'fencoding': outfencoding, 'fname': outfname,
7153
- 'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize}
7154
- else:
7155
- return False
7156
- if(returnfp):
7157
- outlist.update({'fp': fp})
7158
- else:
7159
- fp.close()
7160
- return outlist
7161
-
7162
-
7163
- def ArchiveFileValidate(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7164
- if(verbose):
7165
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
7166
- if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
7167
- formatspecs = formatspecs[fmttype]
7168
- elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
7169
- fmttype = "auto"
7170
- curloc = 0
7171
- if(hasattr(infile, "read") or hasattr(infile, "write")):
7172
- curloc = infile.tell()
7173
- fp = infile
7174
- fp.seek(0, 0)
7175
- fp = UncompressFileAlt(fp, formatspecs)
7176
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
7177
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7178
- formatspecs = formatspecs[checkcompressfile]
7179
- if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
7180
- return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7181
- elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
7182
- return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7183
- elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
7184
- return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7185
- elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
7186
- return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7187
- elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
7188
- return False
7189
- elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
7190
- return False
7191
- if(not fp):
7192
- return False
7193
- fp.seek(0, 0)
7194
- elif(infile == "-"):
7195
- fp = MkTempFile()
7196
- if(hasattr(sys.stdin, "buffer")):
7197
- shutil.copyfileobj(sys.stdin.buffer, fp)
7198
- else:
7199
- shutil.copyfileobj(sys.stdin, fp)
7200
- fp.seek(0, 0)
7201
- fp = UncompressFileAlt(fp, formatspecs)
7202
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
7203
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7204
- formatspecs = formatspecs[checkcompressfile]
7205
- if(not fp):
7206
- return False
7207
- fp.seek(0, 0)
7208
- elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
7209
- fp = MkTempFile()
7210
- fp.write(infile)
7211
- fp.seek(0, 0)
7212
- fp = UncompressFileAlt(fp, formatspecs)
7213
- compresscheck = CheckCompressionType(fp, formatspecs, False)
7214
- if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
7215
- formatspecs = formatspecs[compresscheck]
7216
- if(not fp):
7217
- return False
7218
- fp.seek(0, 0)
7219
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
7220
- fp = download_file_from_internet_file(infile)
7221
- fp = UncompressFileAlt(fp, formatspecs)
7222
- compresscheck = CheckCompressionType(fp, formatspecs, False)
7223
- if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
7224
- formatspecs = formatspecs[compresscheck]
7225
- fp.seek(0, 0)
7226
- if(not fp):
7227
- return False
7228
- fp.seek(0, 0)
7229
- else:
7230
- infile = RemoveWindowsPath(infile)
7231
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7232
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7233
- formatspecs = formatspecs[checkcompressfile]
7234
- if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
7235
- return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7236
- elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
7237
- return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7238
- elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
7239
- return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7240
- elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
7241
- return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7242
- elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
7243
- return False
7244
- elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
7245
- return False
7246
- compresscheck = CheckCompressionType(infile, formatspecs, True)
7247
- if(not compresscheck):
7248
- fextname = os.path.splitext(infile)[1]
7249
- if(fextname == ".gz"):
7250
- compresscheck = "gzip"
7251
- elif(fextname == ".bz2"):
7252
- compresscheck = "bzip2"
7253
- elif(fextname == ".zst"):
7254
- compresscheck = "zstd"
7255
- elif(fextname == ".lz4" or fextname == ".clz4"):
7256
- compresscheck = "lz4"
7257
- elif(fextname == ".lzo" or fextname == ".lzop"):
7258
- compresscheck = "lzo"
7259
- elif(fextname == ".lzma"):
7260
- compresscheck = "lzma"
7261
- elif(fextname == ".xz"):
7262
- compresscheck = "xz"
7263
- elif(fextname == ".zz" or fextname == ".zl" or fextname == ".zlib"):
7264
- compresscheck = "zlib"
7265
- else:
7266
- return False
7267
- if(not compresscheck):
7268
- return False
7269
- fp = UncompressFile(infile, formatspecs, "rb")
7270
- try:
7271
- fp.seek(0, 2);
7272
- except OSError:
7273
- SeekToEndOfFile(fp);
7274
- except ValueError:
7275
- SeekToEndOfFile(fp);
7276
- CatSize = fp.tell();
7277
- CatSizeEnd = CatSize;
7278
- fp.seek(curloc, 0)
7279
- if(curloc > 0):
7280
- fp.seek(0, 0)
7281
- if(IsNestedDict(formatspecs)):
7282
- compresschecking = CheckCompressionType(fp, formatspecs, False)
7283
- if(compresschecking not in formatspecs):
7284
- return False
7285
- else:
7286
- formatspecs = formatspecs[compresschecking]
7287
- fp.seek(0, 0)
7288
- inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
7289
- formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
7290
- formdelszie = len(formatspecs['format_delimiter'])
7291
- formdel = fp.read(formdelszie).decode("UTF-8")
7292
- if(formstring != formatspecs['format_magic']+inheaderver):
7293
- return False
7294
- if(formdel != formatspecs['format_delimiter']):
7295
- return False
7296
- if(formatspecs['new_style']):
7297
- inheader = ReadFileHeaderDataBySize(
7298
- fp, formatspecs['format_delimiter'])
7299
- else:
7300
- inheader = ReadFileHeaderDataWoSize(
7301
- fp, formatspecs['format_delimiter'])
7302
- fnumextrafieldsize = int(inheader[5], 16)
7303
- fnumextrafields = int(inheader[6], 16)
7304
- extrastart = 7
7305
- extraend = extrastart + fnumextrafields
7306
- if(curloc > 0):
7307
- fp.seek(curloc, 0)
7308
- formversion = re.findall("([\\d]+)", formstring)
7309
- fheadsize = int(inheader[0], 16)
7310
- fnumfields = int(inheader[1], 16)
7311
- fhencoding = inheader[2]
7312
- fostype = inheader[3]
7313
- fnumfiles = int(inheader[4], 16)
7314
- fprechecksumtype = inheader[-2]
7315
- fprechecksum = inheader[-1]
7316
- il = 0
7317
- headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
7318
- newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
7319
- valid_archive = True
7320
- invalid_archive = False
7321
- if(verbose):
7322
- if(hasattr(infile, "read") or hasattr(infile, "write")):
7323
- try:
7324
- VerbosePrintOut(infile.name)
7325
- except AttributeError:
7326
- pass
7327
- elif(sys.version_info[0] >= 3 and isinstance(infile, bytes)):
7328
- pass
7329
- else:
7330
- VerbosePrintOut(infile)
7331
- VerbosePrintOut("Number of Records " + str(fnumfiles))
7332
- if(headercheck):
7333
- if(verbose):
7334
- VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
7335
- VerbosePrintOut("'" + fprechecksum + "' == " +
7336
- "'" + newfcs + "'")
6883
+ valid_archive = True
6884
+ invalid_archive = False
6885
+ if(verbose):
6886
+ if(hasattr(infile, "read") or hasattr(infile, "write")):
6887
+ try:
6888
+ VerbosePrintOut(infile.name)
6889
+ except AttributeError:
6890
+ pass
6891
+ elif(sys.version_info[0] >= 3 and isinstance(infile, bytes)):
6892
+ pass
6893
+ else:
6894
+ VerbosePrintOut(infile)
6895
+ VerbosePrintOut("Number of Records " + str(fnumfiles))
6896
+ if(headercheck):
6897
+ if(verbose):
6898
+ VerbosePrintOut("File Header Checksum Passed at offset " + str(0))
6899
+ VerbosePrintOut("'" + fprechecksum + "' == " +
6900
+ "'" + newfcs + "'")
7337
6901
  else:
7338
6902
  if(verbose):
7339
6903
  VerbosePrintOut("File Header Checksum Failed at offset " + str(0))
@@ -7514,18 +7078,18 @@ def ArchiveFileValidateMultiple(infile, fmttype="auto", formatspecs=__file_forma
7514
7078
  def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7515
7079
  return ArchiveFileValidateMultiple(infile, fmttype, formatspecs, verbose, returnfp)
7516
7080
 
7517
- def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7081
+ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7518
7082
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
7519
7083
  formatspecs = formatspecs[fmttype]
7520
7084
  elif(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype not in formatspecs):
7521
7085
  fmttype = "auto"
7522
- curloc = 0
7086
+ curloc = filestart
7523
7087
  if(hasattr(infile, "read") or hasattr(infile, "write")):
7524
7088
  curloc = infile.tell()
7525
7089
  fp = infile
7526
- fp.seek(0, 0)
7527
- fp = UncompressFileAlt(fp, formatspecs)
7528
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
7090
+ fp.seek(filestart, 0)
7091
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
7092
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
7529
7093
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7530
7094
  formatspecs = formatspecs[checkcompressfile]
7531
7095
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -7542,45 +7106,45 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
7542
7106
  return False
7543
7107
  if(not fp):
7544
7108
  return False
7545
- fp.seek(0, 0)
7109
+ fp.seek(filestart, 0)
7546
7110
  elif(infile == "-"):
7547
7111
  fp = MkTempFile()
7548
7112
  if(hasattr(sys.stdin, "buffer")):
7549
7113
  shutil.copyfileobj(sys.stdin.buffer, fp)
7550
7114
  else:
7551
7115
  shutil.copyfileobj(sys.stdin, fp)
7552
- fp.seek(0, 0)
7553
- fp = UncompressFileAlt(fp, formatspecs)
7554
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, True)
7116
+ fp.seek(filestart, 0)
7117
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
7118
+ checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
7555
7119
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7556
7120
  formatspecs = formatspecs[checkcompressfile]
7557
7121
  if(not fp):
7558
7122
  return False
7559
- fp.seek(0, 0)
7123
+ fp.seek(filestart, 0)
7560
7124
  elif(isinstance(infile, bytes) and sys.version_info[0] >= 3):
7561
7125
  fp = MkTempFile()
7562
7126
  fp.write(infile)
7563
- fp.seek(0, 0)
7564
- fp = UncompressFileAlt(fp, formatspecs)
7565
- compresscheck = CheckCompressionType(fp, formatspecs, False)
7127
+ fp.seek(filestart, 0)
7128
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
7129
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
7566
7130
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
7567
7131
  formatspecs = formatspecs[compresscheck]
7568
7132
  if(not fp):
7569
7133
  return False
7570
- fp.seek(0, 0)
7571
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
7134
+ fp.seek(filestart, 0)
7135
+ elif(re.findall(__download_proto_support__, infile)):
7572
7136
  fp = download_file_from_internet_file(infile)
7573
- fp = UncompressFileAlt(fp, formatspecs)
7574
- compresscheck = CheckCompressionType(fp, formatspecs, False)
7137
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
7138
+ compresscheck = CheckCompressionType(fp, formatspecs, filestart, False)
7575
7139
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
7576
7140
  formatspecs = formatspecs[compresscheck]
7577
- fp.seek(0, 0)
7141
+ fp.seek(filestart, 0)
7578
7142
  if(not fp):
7579
7143
  return False
7580
- fp.seek(0, 0)
7144
+ fp.seek(filestart, 0)
7581
7145
  else:
7582
7146
  infile = RemoveWindowsPath(infile)
7583
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7147
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
7584
7148
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7585
7149
  formatspecs = formatspecs[checkcompressfile]
7586
7150
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -7595,7 +7159,7 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
7595
7159
  return False
7596
7160
  elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
7597
7161
  return False
7598
- compresscheck = CheckCompressionType(infile, formatspecs, True)
7162
+ compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
7599
7163
  if(not compresscheck):
7600
7164
  fextname = os.path.splitext(infile)[1]
7601
7165
  if(fextname == ".gz"):
@@ -7618,25 +7182,23 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
7618
7182
  return False
7619
7183
  if(not compresscheck):
7620
7184
  return False
7621
- fp = UncompressFile(infile, formatspecs, "rb")
7185
+ fp = UncompressFile(infile, formatspecs, "rb", filestart)
7622
7186
  try:
7623
- fp.seek(0, 2);
7187
+ fp.seek(0, 2)
7624
7188
  except OSError:
7625
- SeekToEndOfFile(fp);
7189
+ SeekToEndOfFile(fp)
7626
7190
  except ValueError:
7627
- SeekToEndOfFile(fp);
7628
- CatSize = fp.tell();
7191
+ SeekToEndOfFile(fp)
7192
+ CatSize = fp.tell()
7629
7193
  CatSizeEnd = CatSize;
7630
7194
  fp.seek(curloc, 0)
7631
- if(curloc > 0):
7632
- fp.seek(0, 0)
7633
7195
  if(IsNestedDict(formatspecs)):
7634
- compresschecking = CheckCompressionType(fp, formatspecs, False)
7196
+ compresschecking = CheckCompressionType(fp, formatspecs, filestart, False)
7635
7197
  if(compresschecking not in formatspecs):
7636
7198
  return False
7637
7199
  else:
7638
7200
  formatspecs = formatspecs[compresschecking]
7639
- fp.seek(0, 0)
7201
+ fp.seek(filestart, 0)
7640
7202
  inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
7641
7203
  formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
7642
7204
  formdelszie = len(formatspecs['format_delimiter'])
@@ -7668,8 +7230,6 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
7668
7230
  fextrafieldslist = json.loads(fextrafieldslist[0])
7669
7231
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7670
7232
  pass
7671
- if(curloc > 0):
7672
- fp.seek(curloc, 0)
7673
7233
  formversion = re.findall("([\\d]+)", formstring)
7674
7234
  fheadsize = int(inheader[0], 16)
7675
7235
  fnumfields = int(inheader[1], 16)
@@ -7934,7 +7494,7 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
7934
7494
  outfcontents.seek(0, 0)
7935
7495
  if(uncompress):
7936
7496
  cfcontents = UncompressFileAlt(
7937
- outfcontents, formatspecs)
7497
+ outfcontents, formatspecs, 0)
7938
7498
  cfcontents.seek(0, 0)
7939
7499
  outfcontents = MkTempFile()
7940
7500
  shutil.copyfileobj(cfcontents, outfcontents)
@@ -7979,49 +7539,49 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
7979
7539
  return outlist
7980
7540
 
7981
7541
 
7982
- def MultipleArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7542
+ def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7983
7543
  if(isinstance(infile, (list, tuple, ))):
7984
7544
  pass
7985
7545
  else:
7986
7546
  infile = [infile]
7987
7547
  outretval = {}
7988
7548
  for curfname in infile:
7989
- curretfile[curfname] = ArchiveFileToArray(curfname, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
7549
+ curretfile[curfname] = ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
7990
7550
  return outretval
7991
7551
 
7992
- def MultipleArchiveFilesToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7993
- return MultipleArchiveFileToArray(infile, fmttype, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
7552
+ def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7553
+ return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
7994
7554
 
7995
7555
 
7996
- def ArchiveFileStringToArray(instr, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7997
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7556
+ def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7557
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
7998
7558
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7999
7559
  formatspecs = formatspecs[checkcompressfile]
8000
7560
  fp = MkTempFile(instr)
8001
- listarchivefiles = ArchiveFileToArray(fp, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
8002
- return listarchivefiles
7561
+ listarrayfiles = ArchiveFileToArray(fp, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
7562
+ return listarrayfiles
8003
7563
 
8004
7564
 
8005
7565
  def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
8006
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7566
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
8007
7567
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
8008
7568
  formatspecs = formatspecs[checkcompressfile]
8009
7569
  fp = MkTempFile()
8010
7570
  fp = PackArchiveFileFromTarFile(
8011
7571
  infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
8012
- listarchivefiles = ArchiveFileToArray(fp, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
8013
- return listarchivefiles
7572
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
7573
+ return listarrayfiles
8014
7574
 
8015
7575
 
8016
7576
  def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
8017
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7577
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
8018
7578
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
8019
7579
  formatspecs = formatspecs[checkcompressfile]
8020
7580
  fp = MkTempFile()
8021
7581
  fp = PackArchiveFileFromZipFile(
8022
7582
  infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
8023
- listarchivefiles = ArchiveFileToArray(fp, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
8024
- return listarchivefiles
7583
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
7584
+ return listarrayfiles
8025
7585
 
8026
7586
 
8027
7587
  if(not rarfile_support):
@@ -8030,14 +7590,14 @@ if(not rarfile_support):
8030
7590
 
8031
7591
  if(rarfile_support):
8032
7592
  def RarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
8033
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7593
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
8034
7594
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
8035
7595
  formatspecs = formatspecs[checkcompressfile]
8036
7596
  fp = MkTempFile()
8037
7597
  fp = PackArchiveFileFromRarFile(
8038
7598
  infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
8039
- listarchivefiles = ArchiveFileToArray(fp, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
8040
- return listarchivefiles
7599
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
7600
+ return listarrayfiles
8041
7601
 
8042
7602
  if(not py7zr_support):
8043
7603
  def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
@@ -8045,18 +7605,18 @@ if(not py7zr_support):
8045
7605
 
8046
7606
  if(py7zr_support):
8047
7607
  def SevenZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_dict__, seektoend=False, returnfp=False):
8048
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7608
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
8049
7609
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
8050
7610
  formatspecs = formatspecs[checkcompressfile]
8051
7611
  fp = MkTempFile()
8052
7612
  fp = PackArchiveFileFromSevenZipFile(
8053
7613
  infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
8054
- listarchivefiles = ArchiveFileToArray(fp, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
8055
- return listarchivefiles
7614
+ listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
7615
+ return listarrayfiles
8056
7616
 
8057
7617
 
8058
- def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
8059
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
7618
+ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7619
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
8060
7620
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
8061
7621
  formatspecs = formatspecs[checkcompressfile]
8062
7622
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -8068,78 +7628,78 @@ def InFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile=
8068
7628
  elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
8069
7629
  return SevenZipFileToArray(infile, seekstart, seekend, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
8070
7630
  elif(checkcompressfile == formatspecs['format_magic']):
8071
- return ArchiveFileToArray(infile, "auto", seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
7631
+ return ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
8072
7632
  else:
8073
7633
  return False
8074
7634
  return False
8075
7635
 
8076
7636
 
8077
- def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
7637
+ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
8078
7638
  outarray = MkTempFile()
8079
7639
  packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
8080
7640
  compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
8081
- listarchivefiles = ArchiveFileToArray(outarray, "auto", seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
8082
- return listarchivefiles
7641
+ listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
7642
+ return listarrayfiles
8083
7643
 
8084
7644
 
8085
7645
  def ArchiveFileArrayToArrayIndex(inarray, returnfp=False):
8086
7646
  if(isinstance(inarray, dict)):
8087
- listarchivefiles = inarray
7647
+ listarrayfiles = inarray
8088
7648
  else:
8089
7649
  return False
8090
- if(not listarchivefiles):
7650
+ if(not listarrayfiles):
8091
7651
  return False
8092
- outarray = {'list': listarchivefiles, 'filetoid': {}, 'idtofile': {}, 'filetypes': {'directories': {'filetoid': {}, 'idtofile': {}}, 'files': {'filetoid': {}, 'idtofile': {}}, 'links': {'filetoid': {}, 'idtofile': {}}, 'symlinks': {'filetoid': {
7652
+ outarray = {'list': listarrayfiles, 'filetoid': {}, 'idtofile': {}, 'filetypes': {'directories': {'filetoid': {}, 'idtofile': {}}, 'files': {'filetoid': {}, 'idtofile': {}}, 'links': {'filetoid': {}, 'idtofile': {}}, 'symlinks': {'filetoid': {
8093
7653
  }, 'idtofile': {}}, 'hardlinks': {'filetoid': {}, 'idtofile': {}}, 'character': {'filetoid': {}, 'idtofile': {}}, 'block': {'filetoid': {}, 'idtofile': {}}, 'fifo': {'filetoid': {}, 'idtofile': {}}, 'devices': {'filetoid': {}, 'idtofile': {}}}}
8094
7654
  if(returnfp):
8095
- outarray.update({'fp': listarchivefiles['fp']})
8096
- lenlist = len(listarchivefiles['ffilelist'])
7655
+ outarray.update({'fp': listarrayfiles['fp']})
7656
+ lenlist = len(listarrayfiles['ffilelist'])
8097
7657
  lcfi = 0
8098
- lcfx = int(listarchivefiles['fnumfiles'])
8099
- if(lenlist > listarchivefiles['fnumfiles'] or lenlist < listarchivefiles['fnumfiles']):
7658
+ lcfx = int(listarrayfiles['fnumfiles'])
7659
+ if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
8100
7660
  lcfx = int(lenlist)
8101
7661
  else:
8102
- lcfx = int(listarchivefiles['fnumfiles'])
7662
+ lcfx = int(listarrayfiles['fnumfiles'])
8103
7663
  while(lcfi < lcfx):
8104
- filetoidarray = {listarchivefiles['ffilelist'][lcfi]
8105
- ['fname']: listarchivefiles['ffilelist'][lcfi]['fid']}
8106
- idtofilearray = {listarchivefiles['ffilelist'][lcfi]
8107
- ['fid']: listarchivefiles['ffilelist'][lcfi]['fname']}
7664
+ filetoidarray = {listarrayfiles['ffilelist'][lcfi]
7665
+ ['fname']: listarrayfiles['ffilelist'][lcfi]['fid']}
7666
+ idtofilearray = {listarrayfiles['ffilelist'][lcfi]
7667
+ ['fid']: listarrayfiles['ffilelist'][lcfi]['fname']}
8108
7668
  outarray['filetoid'].update(filetoidarray)
8109
7669
  outarray['idtofile'].update(idtofilearray)
8110
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 0 or listarchivefiles['ffilelist'][lcfi]['ftype'] == 7):
7670
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
8111
7671
  outarray['filetypes']['files']['filetoid'].update(filetoidarray)
8112
7672
  outarray['filetypes']['files']['idtofile'].update(idtofilearray)
8113
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 1):
7673
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
8114
7674
  outarray['filetypes']['hardlinks']['filetoid'].update(
8115
7675
  filetoidarray)
8116
7676
  outarray['filetypes']['hardlinks']['idtofile'].update(
8117
7677
  idtofilearray)
8118
7678
  outarray['filetypes']['links']['filetoid'].update(filetoidarray)
8119
7679
  outarray['filetypes']['links']['idtofile'].update(idtofilearray)
8120
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 2):
7680
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
8121
7681
  outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
8122
7682
  outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
8123
7683
  outarray['filetypes']['links']['filetoid'].update(filetoidarray)
8124
7684
  outarray['filetypes']['links']['idtofile'].update(idtofilearray)
8125
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 3):
7685
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 3):
8126
7686
  outarray['filetypes']['character']['filetoid'].update(
8127
7687
  filetoidarray)
8128
7688
  outarray['filetypes']['character']['idtofile'].update(
8129
7689
  idtofilearray)
8130
7690
  outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
8131
7691
  outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
8132
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 4):
7692
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 4):
8133
7693
  outarray['filetypes']['block']['filetoid'].update(filetoidarray)
8134
7694
  outarray['filetypes']['block']['idtofile'].update(idtofilearray)
8135
7695
  outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
8136
7696
  outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
8137
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 5):
7697
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
8138
7698
  outarray['filetypes']['directories']['filetoid'].update(
8139
7699
  filetoidarray)
8140
7700
  outarray['filetypes']['directories']['idtofile'].update(
8141
7701
  idtofilearray)
8142
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 6):
7702
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6):
8143
7703
  outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
8144
7704
  outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
8145
7705
  outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
@@ -8148,13 +7708,13 @@ def ArchiveFileArrayToArrayIndex(inarray, returnfp=False):
8148
7708
  return outarray
8149
7709
 
8150
7710
 
8151
- def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
7711
+ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
8152
7712
  if(isinstance(infile, dict)):
8153
- listarchivefiles = infile
7713
+ listarrayfiles = infile
8154
7714
  else:
8155
7715
  if(infile != "-" and not isinstance(infile, bytes) and not hasattr(infile, "read") and not hasattr(infile, "write")):
8156
7716
  infile = RemoveWindowsPath(infile)
8157
- listarchivefiles = ArchiveFileToArray(infile, "auto", seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
7717
+ listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
8158
7718
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
8159
7719
  formatspecs = formatspecs[fmttype]
8160
7720
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
@@ -8180,14 +7740,14 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
8180
7740
  os.unlink(outfile)
8181
7741
  except OSError:
8182
7742
  pass
8183
- if(not listarchivefiles):
7743
+ if(not listarrayfiles):
8184
7744
  return False
8185
7745
  if(outfile == "-" or outfile is None):
8186
7746
  verbose = False
8187
7747
  fp = MkTempFile()
8188
7748
  elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
8189
7749
  fp = outfile
8190
- elif(re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
7750
+ elif(re.findall(__upload_proto_support__, outfile)):
8191
7751
  fp = MkTempFile()
8192
7752
  else:
8193
7753
  fbasename = os.path.splitext(outfile)[0]
@@ -8200,19 +7760,19 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
8200
7760
  return False
8201
7761
  formver = formatspecs['format_ver']
8202
7762
  fileheaderver = str(int(formver.replace(".", "")))
8203
- lenlist = len(listarchivefiles['ffilelist'])
8204
- fnumfiles = int(listarchivefiles['fnumfiles'])
7763
+ lenlist = len(listarrayfiles['ffilelist'])
7764
+ fnumfiles = int(listarrayfiles['fnumfiles'])
8205
7765
  if(lenlist > fnumfiles or lenlist < fnumfiles):
8206
7766
  fnumfiles = lenlist
8207
- AppendFileHeader(fp, fnumfiles, listarchivefiles['fencoding'], [], checksumtype[0], formatspecs)
8208
- lenlist = len(listarchivefiles['ffilelist'])
8209
- fnumfiles = int(listarchivefiles['fnumfiles'])
7767
+ AppendFileHeader(fp, fnumfiles, listarrayfiles['fencoding'], [], checksumtype[0], formatspecs)
7768
+ lenlist = len(listarrayfiles['ffilelist'])
7769
+ fnumfiles = int(listarrayfiles['fnumfiles'])
8210
7770
  lcfi = 0
8211
- lcfx = int(listarchivefiles['fnumfiles'])
8212
- if(lenlist > listarchivefiles['fnumfiles'] or lenlist < listarchivefiles['fnumfiles']):
7771
+ lcfx = int(listarrayfiles['fnumfiles'])
7772
+ if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
8213
7773
  lcfx = int(lenlist)
8214
7774
  else:
8215
- lcfx = int(listarchivefiles['fnumfiles'])
7775
+ lcfx = int(listarrayfiles['fnumfiles'])
8216
7776
  curinode = 0
8217
7777
  curfid = 0
8218
7778
  inodelist = []
@@ -8220,66 +7780,66 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
8220
7780
  filetoinode = {}
8221
7781
  reallcfi = 0
8222
7782
  while(lcfi < lcfx):
8223
- fencoding = listarchivefiles['ffilelist'][reallcfi]['fencoding']
8224
- fcencoding = listarchivefiles['ffilelist'][reallcfi]['fencoding']
8225
- if(re.findall("^[.|/]", listarchivefiles['ffilelist'][reallcfi]['fname'])):
8226
- fname = listarchivefiles['ffilelist'][reallcfi]['fname']
7783
+ fencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
7784
+ fcencoding = listarrayfiles['ffilelist'][reallcfi]['fencoding']
7785
+ if(re.findall("^[.|/]", listarrayfiles['ffilelist'][reallcfi]['fname'])):
7786
+ fname = listarrayfiles['ffilelist'][reallcfi]['fname']
8227
7787
  else:
8228
- fname = "./"+listarchivefiles['ffilelist'][reallcfi]['fname']
7788
+ fname = "./"+listarrayfiles['ffilelist'][reallcfi]['fname']
8229
7789
  if(verbose):
8230
7790
  VerbosePrintOut(fname)
8231
7791
  fheadersize = format(
8232
- int(listarchivefiles['ffilelist'][reallcfi]['fheadersize']), 'x').lower()
7792
+ int(listarrayfiles['ffilelist'][reallcfi]['fheadersize']), 'x').lower()
8233
7793
  fsize = format(
8234
- int(listarchivefiles['ffilelist'][reallcfi]['fsize']), 'x').lower()
8235
- flinkname = listarchivefiles['ffilelist'][reallcfi]['flinkname']
7794
+ int(listarrayfiles['ffilelist'][reallcfi]['fsize']), 'x').lower()
7795
+ flinkname = listarrayfiles['ffilelist'][reallcfi]['flinkname']
8236
7796
  fatime = format(
8237
- int(listarchivefiles['ffilelist'][reallcfi]['fatime']), 'x').lower()
7797
+ int(listarrayfiles['ffilelist'][reallcfi]['fatime']), 'x').lower()
8238
7798
  fmtime = format(
8239
- int(listarchivefiles['ffilelist'][reallcfi]['fmtime']), 'x').lower()
7799
+ int(listarrayfiles['ffilelist'][reallcfi]['fmtime']), 'x').lower()
8240
7800
  fctime = format(
8241
- int(listarchivefiles['ffilelist'][reallcfi]['fctime']), 'x').lower()
7801
+ int(listarrayfiles['ffilelist'][reallcfi]['fctime']), 'x').lower()
8242
7802
  fbtime = format(
8243
- int(listarchivefiles['ffilelist'][reallcfi]['fbtime']), 'x').lower()
7803
+ int(listarrayfiles['ffilelist'][reallcfi]['fbtime']), 'x').lower()
8244
7804
  fmode = format(
8245
- int(listarchivefiles['ffilelist'][reallcfi]['fmode']), 'x').lower()
7805
+ int(listarrayfiles['ffilelist'][reallcfi]['fmode']), 'x').lower()
8246
7806
  fchmode = format(
8247
- int(listarchivefiles['ffilelist'][reallcfi]['fchmode']), 'x').lower()
7807
+ int(listarrayfiles['ffilelist'][reallcfi]['fchmode']), 'x').lower()
8248
7808
  fuid = format(
8249
- int(listarchivefiles['ffilelist'][reallcfi]['fuid']), 'x').lower()
8250
- funame = listarchivefiles['ffilelist'][reallcfi]['funame']
7809
+ int(listarrayfiles['ffilelist'][reallcfi]['fuid']), 'x').lower()
7810
+ funame = listarrayfiles['ffilelist'][reallcfi]['funame']
8251
7811
  fgid = format(
8252
- int(listarchivefiles['ffilelist'][reallcfi]['fgid']), 'x').lower()
8253
- fgname = listarchivefiles['ffilelist'][reallcfi]['fgname']
7812
+ int(listarrayfiles['ffilelist'][reallcfi]['fgid']), 'x').lower()
7813
+ fgname = listarrayfiles['ffilelist'][reallcfi]['fgname']
8254
7814
  finode = format(
8255
- int(listarchivefiles['ffilelist'][reallcfi]['finode']), 'x').lower()
7815
+ int(listarrayfiles['ffilelist'][reallcfi]['finode']), 'x').lower()
8256
7816
  flinkcount = format(
8257
- int(listarchivefiles['ffilelist'][reallcfi]['flinkcount']), 'x').lower()
7817
+ int(listarrayfiles['ffilelist'][reallcfi]['flinkcount']), 'x').lower()
8258
7818
  fwinattributes = format(
8259
- int(listarchivefiles['ffilelist'][reallcfi]['fwinattributes']), 'x').lower()
8260
- fcompression = listarchivefiles['ffilelist'][reallcfi]['fcompression']
7819
+ int(listarrayfiles['ffilelist'][reallcfi]['fwinattributes']), 'x').lower()
7820
+ fcompression = listarrayfiles['ffilelist'][reallcfi]['fcompression']
8261
7821
  fcsize = format(
8262
- int(listarchivefiles['ffilelist'][reallcfi]['fcsize']), 'x').lower()
7822
+ int(listarrayfiles['ffilelist'][reallcfi]['fcsize']), 'x').lower()
8263
7823
  fdev = format(
8264
- int(listarchivefiles['ffilelist'][reallcfi]['fdev']), 'x').lower()
7824
+ int(listarrayfiles['ffilelist'][reallcfi]['fdev']), 'x').lower()
8265
7825
  fdev_minor = format(
8266
- int(listarchivefiles['ffilelist'][reallcfi]['fminor']), 'x').lower()
7826
+ int(listarrayfiles['ffilelist'][reallcfi]['fminor']), 'x').lower()
8267
7827
  fdev_major = format(
8268
- int(listarchivefiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
8269
- fseeknextfile = listarchivefiles['ffilelist'][reallcfi]['fseeknextfile']
8270
- if(len(listarchivefiles['ffilelist'][reallcfi]['fextralist']) > listarchivefiles['ffilelist'][reallcfi]['fextrafields'] and len(listarchivefiles['ffilelist'][reallcfi]['fextralist']) > 0):
8271
- listarchivefiles['ffilelist'][reallcfi]['fextrafields'] = len(
8272
- listarchivefiles['ffilelist'][reallcfi]['fextralist'])
7828
+ int(listarrayfiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
7829
+ fseeknextfile = listarrayfiles['ffilelist'][reallcfi]['fseeknextfile']
7830
+ if(len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > 0):
7831
+ listarrayfiles['ffilelist'][reallcfi]['fextrafields'] = len(
7832
+ listarrayfiles['ffilelist'][reallcfi]['fextralist'])
8273
7833
  if(not followlink and len(extradata) <= 0):
8274
- extradata = listarchivefiles['ffilelist'][reallcfi]['fextralist']
7834
+ extradata = listarrayfiles['ffilelist'][reallcfi]['fextralist']
8275
7835
  if(not followlink and len(jsondata) <= 0):
8276
- jsondata = listarchivefiles['ffilelist'][reallcfi]['fjsondata']
8277
- fcontents = listarchivefiles['ffilelist'][reallcfi]['fcontents']
8278
- if(not listarchivefiles['ffilelist'][reallcfi]['fcontentasfile']):
7836
+ jsondata = listarrayfiles['ffilelist'][reallcfi]['fjsondata']
7837
+ fcontents = listarrayfiles['ffilelist'][reallcfi]['fcontents']
7838
+ if(not listarrayfiles['ffilelist'][reallcfi]['fcontentasfile']):
8279
7839
  fcontents = MkTempFile(fcontents)
8280
- typechecktest = CheckCompressionType(fcontents, closefp=False)
7840
+ typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
8281
7841
  fcontents.seek(0, 0)
8282
- fcencoding = GetFileEncoding(fcontents, False)
7842
+ fcencoding = GetFileEncoding(fcontents, 0, False)
8283
7843
  fcompression = ""
8284
7844
  fcsize = format(int(0), 'x').lower()
8285
7845
  curcompression = "none"
@@ -8322,10 +7882,10 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
8322
7882
  fcontents.close()
8323
7883
  fcontents = cfcontents
8324
7884
  if followlink:
8325
- if(listarchivefiles['ffilelist'][reallcfi]['ftype'] == 1 or listarchivefiles['ffilelist'][reallcfi]['ftype'] == 2):
8326
- getflinkpath = listarchivefiles['ffilelist'][reallcfi]['flinkname']
8327
- flinkid = prelistarchivefiles['filetoid'][getflinkpath]
8328
- flinkinfo = listarchivefiles['ffilelist'][flinkid]
7885
+ if(listarrayfiles['ffilelist'][reallcfi]['ftype'] == 1 or listarrayfiles['ffilelist'][reallcfi]['ftype'] == 2):
7886
+ getflinkpath = listarrayfiles['ffilelist'][reallcfi]['flinkname']
7887
+ flinkid = prelistarrayfiles['filetoid'][getflinkpath]
7888
+ flinkinfo = listarrayfiles['ffilelist'][flinkid]
8329
7889
  fheadersize = format(
8330
7890
  int(flinkinfo['fheadersize']), 'x').lower()
8331
7891
  fsize = format(int(flinkinfo['fsize']), 'x').lower()
@@ -8362,10 +7922,10 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
8362
7922
  ftypehex = format(flinkinfo['ftype'], 'x').lower()
8363
7923
  else:
8364
7924
  ftypehex = format(
8365
- listarchivefiles['ffilelist'][reallcfi]['ftype'], 'x').lower()
7925
+ listarrayfiles['ffilelist'][reallcfi]['ftype'], 'x').lower()
8366
7926
  fcurfid = format(curfid, 'x').lower()
8367
7927
  if(not followlink and finode != 0):
8368
- if(listarchivefiles['ffilelist'][reallcfi]['ftype'] != 1):
7928
+ if(listarrayfiles['ffilelist'][reallcfi]['ftype'] != 1):
8369
7929
  fcurinode = format(int(curinode), 'x').lower()
8370
7930
  inodetofile.update({curinode: fname})
8371
7931
  filetoinode.update({fname: curinode})
@@ -8415,7 +7975,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
8415
7975
  outvar = fp.read()
8416
7976
  fp.close()
8417
7977
  return outvar
8418
- elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall("^(ftp|ftps|sftp):\\/\\/", outfile)):
7978
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
8419
7979
  fp = CompressOpenFileAlt(
8420
7980
  fp, compression, compressionlevel, compressionuselist, formatspecs)
8421
7981
  fp.seek(0, 0)
@@ -8428,50 +7988,50 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
8428
7988
  return True
8429
7989
 
8430
7990
 
8431
- def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7991
+ def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
8432
7992
  fp = MkTempFile(instr)
8433
- listarchivefiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist,
8434
- checksumtype, skipchecksum, extradata, formatspecs, verbose, returnfp)
8435
- return listarchivefiles
7993
+ listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
7994
+ checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
7995
+ return listarrayfiles
8436
7996
 
8437
7997
 
8438
- def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7998
+ def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
8439
7999
  outarray = MkTempFile()
8440
8000
  packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
8441
8001
  compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
8442
- listarchivefiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile,
8443
- compressionlevel, checksumtype, skipchecksum, extradata, formatspecs, verbose, returnfp)
8444
- return listarchivefiles
8002
+ listarrayfiles = RePackArchiveFile(outarray, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
8003
+ checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
8004
+ return listarrayfiles
8445
8005
 
8446
8006
 
8447
- def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
8007
+ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, preservepermissions=True, preservetime=True, seektoend=False, verbose=False, returnfp=False):
8448
8008
  if(outdir is not None):
8449
8009
  outdir = RemoveWindowsPath(outdir)
8450
8010
  if(verbose):
8451
8011
  logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
8452
8012
  if(isinstance(infile, dict)):
8453
- listarchivefiles = infile
8013
+ listarrayfiles = infile
8454
8014
  else:
8455
8015
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
8456
8016
  infile = RemoveWindowsPath(infile)
8457
- listarchivefiles = ArchiveFileToArray(infile, "auto", seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
8458
- if(not listarchivefiles):
8017
+ listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
8018
+ if(not listarrayfiles):
8459
8019
  return False
8460
- lenlist = len(listarchivefiles['ffilelist'])
8461
- fnumfiles = int(listarchivefiles['fnumfiles'])
8020
+ lenlist = len(listarrayfiles['ffilelist'])
8021
+ fnumfiles = int(listarrayfiles['fnumfiles'])
8462
8022
  lcfi = 0
8463
- lcfx = int(listarchivefiles['fnumfiles'])
8464
- if(lenlist > listarchivefiles['fnumfiles'] or lenlist < listarchivefiles['fnumfiles']):
8023
+ lcfx = int(listarrayfiles['fnumfiles'])
8024
+ if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
8465
8025
  lcfx = int(lenlist)
8466
8026
  else:
8467
- lcfx = int(listarchivefiles['fnumfiles'])
8027
+ lcfx = int(listarrayfiles['fnumfiles'])
8468
8028
  while(lcfi < lcfx):
8469
8029
  funame = ""
8470
8030
  try:
8471
8031
  import pwd
8472
8032
  try:
8473
8033
  userinfo = pwd.getpwuid(
8474
- listarchivefiles['ffilelist'][lcfi]['fuid'])
8034
+ listarrayfiles['ffilelist'][lcfi]['fuid'])
8475
8035
  funame = userinfo.pw_name
8476
8036
  except KeyError:
8477
8037
  funame = ""
@@ -8482,7 +8042,7 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seeken
8482
8042
  import grp
8483
8043
  try:
8484
8044
  groupinfo = grp.getgrgid(
8485
- listarchivefiles['ffilelist'][lcfi]['fgid'])
8045
+ listarrayfiles['ffilelist'][lcfi]['fgid'])
8486
8046
  fgname = groupinfo.gr_name
8487
8047
  except KeyError:
8488
8048
  fgname = ""
@@ -8490,15 +8050,15 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seeken
8490
8050
  fgname = ""
8491
8051
  if(verbose):
8492
8052
  VerbosePrintOut(PrependPath(
8493
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8494
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 0 or listarchivefiles['ffilelist'][lcfi]['ftype'] == 7):
8495
- with open(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
8496
- if(not listarchivefiles['ffilelist'][lcfi]['fcontentasfile']):
8497
- listarchivefiles['ffilelist'][lcfi]['fcontents'] = MkTempFile(
8498
- listarchivefiles['ffilelist'][lcfi]['fcontents'])
8499
- listarchivefiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
8053
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8054
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
8055
+ with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
8056
+ if(not listarrayfiles['ffilelist'][lcfi]['fcontentasfile']):
8057
+ listarrayfiles['ffilelist'][lcfi]['fcontents'] = MkTempFile(
8058
+ listarrayfiles['ffilelist'][lcfi]['fcontents'])
8059
+ listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
8500
8060
  shutil.copyfileobj(
8501
- listarchivefiles['ffilelist'][lcfi]['fcontents'], fpc)
8061
+ listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
8502
8062
  try:
8503
8063
  fpc.flush()
8504
8064
  if(hasattr(os, "sync")):
@@ -8509,20 +8069,20 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seeken
8509
8069
  pass
8510
8070
  except OSError:
8511
8071
  pass
8512
- if(hasattr(os, "chown") and funame == listarchivefiles['ffilelist'][lcfi]['funame'] and fgname == listarchivefiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
8513
- os.chown(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']),
8514
- listarchivefiles['ffilelist'][lcfi]['fuid'], listarchivefiles['ffilelist'][lcfi]['fgid'])
8072
+ if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
8073
+ os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
8074
+ listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
8515
8075
  if(preservepermissions):
8516
8076
  os.chmod(PrependPath(
8517
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), listarchivefiles['ffilelist'][lcfi]['fchmode'])
8077
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
8518
8078
  if(preservetime):
8519
- os.utime(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), (
8520
- listarchivefiles['ffilelist'][lcfi]['fatime'], listarchivefiles['ffilelist'][lcfi]['fmtime']))
8521
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 1):
8079
+ os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
8080
+ listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
8081
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
8522
8082
  if(followlink):
8523
- getflinkpath = listarchivefiles['ffilelist'][lcfi]['flinkname']
8524
- flinkid = prelistarchivefiles['filetoid'][getflinkpath]
8525
- flinkinfo = listarchivefiles['ffilelist'][flinkid]
8083
+ getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
8084
+ flinkid = prelistarrayfiles['filetoid'][getflinkpath]
8085
+ flinkinfo = listarrayfiles['ffilelist'][flinkid]
8526
8086
  funame = ""
8527
8087
  try:
8528
8088
  import pwd
@@ -8544,7 +8104,7 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seeken
8544
8104
  except ImportError:
8545
8105
  fgname = ""
8546
8106
  if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
8547
- with open(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
8107
+ with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
8548
8108
  if(not flinkinfo['fcontentasfile']):
8549
8109
  flinkinfo['fcontents'] = MkTempFile(
8550
8110
  flinkinfo['fcontents'])
@@ -8562,46 +8122,46 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seeken
8562
8122
  pass
8563
8123
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
8564
8124
  os.chown(PrependPath(
8565
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8125
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8566
8126
  if(preservepermissions):
8567
8127
  os.chmod(PrependPath(
8568
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8128
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8569
8129
  if(preservetime):
8570
- os.utime(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), (
8130
+ os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
8571
8131
  flinkinfo['fatime'], flinkinfo['fmtime']))
8572
8132
  if(flinkinfo['ftype'] == 1):
8573
8133
  os.link(flinkinfo['flinkname'], PrependPath(
8574
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8134
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8575
8135
  if(flinkinfo['ftype'] == 2):
8576
8136
  os.symlink(flinkinfo['flinkname'], PrependPath(
8577
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8137
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8578
8138
  if(flinkinfo['ftype'] == 5):
8579
8139
  if(preservepermissions):
8580
8140
  os.mkdir(PrependPath(
8581
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8141
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8582
8142
  else:
8583
8143
  os.mkdir(PrependPath(
8584
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8144
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8585
8145
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
8586
8146
  os.chown(PrependPath(
8587
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8147
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8588
8148
  if(preservepermissions):
8589
8149
  os.chmod(PrependPath(
8590
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8150
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8591
8151
  if(preservetime):
8592
- os.utime(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), (
8152
+ os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
8593
8153
  flinkinfo['fatime'], flinkinfo['fmtime']))
8594
8154
  if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
8595
8155
  os.mkfifo(PrependPath(
8596
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8156
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8597
8157
  else:
8598
- os.link(listarchivefiles['ffilelist'][lcfi]['flinkname'], PrependPath(
8599
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8600
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 2):
8158
+ os.link(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
8159
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8160
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
8601
8161
  if(followlink):
8602
- getflinkpath = listarchivefiles['ffilelist'][lcfi]['flinkname']
8603
- flinkid = prelistarchivefiles['filetoid'][getflinkpath]
8604
- flinkinfo = listarchivefiles['ffilelist'][flinkid]
8162
+ getflinkpath = listarrayfiles['ffilelist'][lcfi]['flinkname']
8163
+ flinkid = prelistarrayfiles['filetoid'][getflinkpath]
8164
+ flinkinfo = listarrayfiles['ffilelist'][flinkid]
8605
8165
  funame = ""
8606
8166
  try:
8607
8167
  import pwd
@@ -8623,7 +8183,7 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seeken
8623
8183
  except ImportError:
8624
8184
  fgname = ""
8625
8185
  if(flinkinfo['ftype'] == 0 or flinkinfo['ftype'] == 7):
8626
- with open(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
8186
+ with open(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), "wb") as fpc:
8627
8187
  if(not flinkinfo['fcontentasfile']):
8628
8188
  flinkinfo['fcontents'] = MkTempFile(
8629
8189
  flinkinfo['fcontents'])
@@ -8641,71 +8201,71 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, seekstart=0, seeken
8641
8201
  pass
8642
8202
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
8643
8203
  os.chown(PrependPath(
8644
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8204
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8645
8205
  if(preservepermissions):
8646
8206
  os.chmod(PrependPath(
8647
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8207
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8648
8208
  if(preservetime):
8649
- os.utime(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), (
8209
+ os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
8650
8210
  flinkinfo['fatime'], flinkinfo['fmtime']))
8651
8211
  if(flinkinfo['ftype'] == 1):
8652
8212
  os.link(flinkinfo['flinkname'], PrependPath(
8653
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8213
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8654
8214
  if(flinkinfo['ftype'] == 2):
8655
8215
  os.symlink(flinkinfo['flinkname'], PrependPath(
8656
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8216
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8657
8217
  if(flinkinfo['ftype'] == 5):
8658
8218
  if(preservepermissions):
8659
8219
  os.mkdir(PrependPath(
8660
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8220
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8661
8221
  else:
8662
8222
  os.mkdir(PrependPath(
8663
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8223
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8664
8224
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
8665
8225
  os.chown(PrependPath(
8666
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8226
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fuid'], flinkinfo['fgid'])
8667
8227
  if(preservepermissions):
8668
8228
  os.chmod(PrependPath(
8669
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8229
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8670
8230
  if(preservetime):
8671
- os.utime(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), (
8231
+ os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
8672
8232
  flinkinfo['fatime'], flinkinfo['fmtime']))
8673
8233
  if(flinkinfo['ftype'] == 6 and hasattr(os, "mkfifo")):
8674
8234
  os.mkfifo(PrependPath(
8675
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8235
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), flinkinfo['fchmode'])
8676
8236
  else:
8677
- os.symlink(listarchivefiles['ffilelist'][lcfi]['flinkname'], PrependPath(
8678
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8679
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 5):
8237
+ os.symlink(listarrayfiles['ffilelist'][lcfi]['flinkname'], PrependPath(
8238
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8239
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
8680
8240
  if(preservepermissions):
8681
8241
  os.mkdir(PrependPath(
8682
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), listarchivefiles['ffilelist'][lcfi]['fchmode'])
8242
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
8683
8243
  else:
8684
8244
  os.mkdir(PrependPath(
8685
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']))
8686
- if(hasattr(os, "chown") and funame == listarchivefiles['ffilelist'][lcfi]['funame'] and fgname == listarchivefiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
8687
- os.chown(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']),
8688
- listarchivefiles['ffilelist'][lcfi]['fuid'], listarchivefiles['ffilelist'][lcfi]['fgid'])
8245
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']))
8246
+ if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
8247
+ os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
8248
+ listarrayfiles['ffilelist'][lcfi]['fuid'], listarrayfiles['ffilelist'][lcfi]['fgid'])
8689
8249
  if(preservepermissions):
8690
8250
  os.chmod(PrependPath(
8691
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), listarchivefiles['ffilelist'][lcfi]['fchmode'])
8251
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
8692
8252
  if(preservetime):
8693
- os.utime(PrependPath(outdir, listarchivefiles['ffilelist'][lcfi]['fname']), (
8694
- listarchivefiles['ffilelist'][lcfi]['fatime'], listarchivefiles['ffilelist'][lcfi]['fmtime']))
8695
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 6 and hasattr(os, "mkfifo")):
8253
+ os.utime(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']), (
8254
+ listarrayfiles['ffilelist'][lcfi]['fatime'], listarrayfiles['ffilelist'][lcfi]['fmtime']))
8255
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6 and hasattr(os, "mkfifo")):
8696
8256
  os.mkfifo(PrependPath(
8697
- outdir, listarchivefiles['ffilelist'][lcfi]['fname']), listarchivefiles['ffilelist'][lcfi]['fchmode'])
8257
+ outdir, listarrayfiles['ffilelist'][lcfi]['fname']), listarrayfiles['ffilelist'][lcfi]['fchmode'])
8698
8258
  lcfi = lcfi + 1
8699
8259
  if(returnfp):
8700
- return listarchivefiles['ffilelist']['fp']
8260
+ return listarrayfiles['ffilelist']['fp']
8701
8261
  else:
8702
8262
  return True
8703
8263
 
8704
8264
 
8705
8265
  def UnPackArchiveFileString(instr, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
8706
8266
  fp = MkTempFile(instr)
8707
- listarchivefiles = UnPackArchiveFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
8708
- return listarchivefiles
8267
+ listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
8268
+ return listarrayfiles
8709
8269
 
8710
8270
  def ftype_to_str(ftype):
8711
8271
  mapping = {
@@ -8722,64 +8282,64 @@ def ftype_to_str(ftype):
8722
8282
  # Default to "file" if unknown
8723
8283
  return mapping.get(ftype, "file")
8724
8284
 
8725
- def ArchiveFileListFiles(infile, fmttype="auto", seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
8285
+ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
8726
8286
  if(verbose):
8727
8287
  logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
8728
8288
  if(isinstance(infile, dict)):
8729
- listarchivefiles = infile
8289
+ listarrayfiles = infile
8730
8290
  else:
8731
8291
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
8732
8292
  infile = RemoveWindowsPath(infile)
8733
- listarchivefiles = ArchiveFileToArray(infile, fmttype, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
8734
- if(not listarchivefiles):
8293
+ listarrayfiles = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
8294
+ if(not listarrayfiles):
8735
8295
  return False
8736
- lenlist = len(listarchivefiles['ffilelist'])
8737
- fnumfiles = int(listarchivefiles['fnumfiles'])
8296
+ lenlist = len(listarrayfiles['ffilelist'])
8297
+ fnumfiles = int(listarrayfiles['fnumfiles'])
8738
8298
  lcfi = 0
8739
- lcfx = int(listarchivefiles['fnumfiles'])
8740
- if(lenlist > listarchivefiles['fnumfiles'] or lenlist < listarchivefiles['fnumfiles']):
8299
+ lcfx = int(listarrayfiles['fnumfiles'])
8300
+ if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
8741
8301
  lcfx = int(lenlist)
8742
8302
  else:
8743
- lcfx = int(listarchivefiles['fnumfiles'])
8303
+ lcfx = int(listarrayfiles['fnumfiles'])
8744
8304
  returnval = {}
8745
8305
  while(lcfi < lcfx):
8746
- returnval.update({lcfi: listarchivefiles['ffilelist'][lcfi]['fname']})
8306
+ returnval.update({lcfi: listarrayfiles['ffilelist'][lcfi]['fname']})
8747
8307
  if(not verbose):
8748
- VerbosePrintOut(listarchivefiles['ffilelist'][lcfi]['fname'])
8308
+ VerbosePrintOut(listarrayfiles['ffilelist'][lcfi]['fname'])
8749
8309
  if(verbose):
8750
8310
  permissions = {'access': {'0': ('---'), '1': ('--x'), '2': ('-w-'), '3': ('-wx'), '4': (
8751
8311
  'r--'), '5': ('r-x'), '6': ('rw-'), '7': ('rwx')}, 'roles': {0: 'owner', 1: 'group', 2: 'other'}}
8752
- printfname = listarchivefiles['ffilelist'][lcfi]['fname']
8753
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 1):
8754
- printfname = listarchivefiles['ffilelist'][lcfi]['fname'] + \
8755
- " link to " + listarchivefiles['ffilelist'][lcfi]['flinkname']
8756
- if(listarchivefiles['ffilelist'][lcfi]['ftype'] == 2):
8757
- printfname = listarchivefiles['ffilelist'][lcfi]['fname'] + \
8758
- " -> " + listarchivefiles['ffilelist'][lcfi]['flinkname']
8759
- fuprint = listarchivefiles['ffilelist'][lcfi]['funame']
8312
+ printfname = listarrayfiles['ffilelist'][lcfi]['fname']
8313
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
8314
+ printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
8315
+ " link to " + listarrayfiles['ffilelist'][lcfi]['flinkname']
8316
+ if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
8317
+ printfname = listarrayfiles['ffilelist'][lcfi]['fname'] + \
8318
+ " -> " + listarrayfiles['ffilelist'][lcfi]['flinkname']
8319
+ fuprint = listarrayfiles['ffilelist'][lcfi]['funame']
8760
8320
  if(len(fuprint) <= 0):
8761
- fuprint = listarchivefiles['ffilelist'][lcfi]['fuid']
8762
- fgprint = listarchivefiles['ffilelist'][lcfi]['fgname']
8321
+ fuprint = listarrayfiles['ffilelist'][lcfi]['fuid']
8322
+ fgprint = listarrayfiles['ffilelist'][lcfi]['fgname']
8763
8323
  if(len(fgprint) <= 0):
8764
- fgprint = listarchivefiles['ffilelist'][lcfi]['fgid']
8324
+ fgprint = listarrayfiles['ffilelist'][lcfi]['fgid']
8765
8325
  if(newstyle):
8766
- VerbosePrintOut(ftype_to_str(listarchivefiles['ffilelist'][lcfi]['ftype']) + "\t" + listarchivefiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
8767
- listarchivefiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
8326
+ VerbosePrintOut(ftype_to_str(listarrayfiles['ffilelist'][lcfi]['ftype']) + "\t" + listarrayfiles['ffilelist'][lcfi]['fcompression'] + "\t" + str(
8327
+ listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + "\t" + printfname)
8768
8328
  else:
8769
- VerbosePrintOut(PrintPermissionString(listarchivefiles['ffilelist'][lcfi]['fmode'], listarchivefiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
8770
- listarchivefiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarchivefiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
8329
+ VerbosePrintOut(PrintPermissionString(listarrayfiles['ffilelist'][lcfi]['fmode'], listarrayfiles['ffilelist'][lcfi]['ftype']) + " " + str(fuprint) + "/" + str(fgprint) + " " + str(
8330
+ listarrayfiles['ffilelist'][lcfi]['fsize']).rjust(15) + " " + datetime.datetime.utcfromtimestamp(listarrayfiles['ffilelist'][lcfi]['fmtime']).strftime('%Y-%m-%d %H:%M') + " " + printfname)
8771
8331
  lcfi = lcfi + 1
8772
8332
  if(returnfp):
8773
- return listarchivefiles['fp']
8333
+ return listarrayfiles['fp']
8774
8334
  else:
8775
8335
  return True
8776
8336
 
8777
8337
 
8778
8338
  def ArchiveFileStringListFiles(instr, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
8779
8339
  fp = MkTempFile(instr)
8780
- listarchivefiles = ArchiveFileListFiles(
8340
+ listarrayfiles = ArchiveFileListFiles(
8781
8341
  instr, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
8782
- return listarchivefiles
8342
+ return listarrayfiles
8783
8343
 
8784
8344
 
8785
8345
  def TarFileListFiles(infile, verbose=False, returnfp=False):
@@ -8795,7 +8355,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
8795
8355
  if(not infile):
8796
8356
  return False
8797
8357
  infile.seek(0, 0)
8798
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
8358
+ elif(re.findall(__download_proto_support__, infile)):
8799
8359
  infile = download_file_from_internet_file(infile)
8800
8360
  infile.seek(0, 0)
8801
8361
  if(not infile):
@@ -8819,7 +8379,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
8819
8379
  return False
8820
8380
  try:
8821
8381
  if(hasattr(infile, "read") or hasattr(infile, "write")):
8822
- compresscheck = CheckCompressionType(infile, formatspecs, False)
8382
+ compresscheck = CheckCompressionType(infile, formatspecs, 0, False)
8823
8383
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
8824
8384
  formatspecs = formatspecs[compresscheck]
8825
8385
  if(compresscheck=="zstd"):
@@ -8831,7 +8391,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
8831
8391
  else:
8832
8392
  tarfp = tarfile.open(fileobj=infile, mode="r")
8833
8393
  else:
8834
- compresscheck = CheckCompressionType(infile, formatspecs, True)
8394
+ compresscheck = CheckCompressionType(infile, formatspecs, 0, True)
8835
8395
  if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
8836
8396
  formatspecs = formatspecs[compresscheck]
8837
8397
  if(compresscheck=="zstd"):
@@ -8902,7 +8462,7 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
8902
8462
  member.size).rjust(15) + " " + datetime.datetime.utcfromtimestamp(member.mtime).strftime('%Y-%m-%d %H:%M') + " " + printfname)
8903
8463
  lcfi = lcfi + 1
8904
8464
  if(returnfp):
8905
- return listarchivefiles['fp']
8465
+ return listarrayfiles['fp']
8906
8466
  else:
8907
8467
  return True
8908
8468
 
@@ -8920,7 +8480,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
8920
8480
  if(not infile):
8921
8481
  return False
8922
8482
  infile.seek(0, 0)
8923
- elif(re.findall("^(http|https|ftp|ftps|sftp):\\/\\/", infile)):
8483
+ elif(re.findall(__download_proto_support__, infile)):
8924
8484
  infile = download_file_from_internet_file(infile)
8925
8485
  infile.seek(0, 0)
8926
8486
  if(not infile):
@@ -9035,7 +8595,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
9035
8595
  15) + " " + datetime.datetime.utcfromtimestamp(int(time.mktime(member.date_time + (0, 0, -1)))).strftime('%Y-%m-%d %H:%M') + " " + printfname)
9036
8596
  lcfi = lcfi + 1
9037
8597
  if(returnfp):
9038
- return listarchivefiles['fp']
8598
+ return listarrayfiles['fp']
9039
8599
  else:
9040
8600
  return True
9041
8601
 
@@ -9173,7 +8733,7 @@ if(rarfile_support):
9173
8733
  member.file_size).rjust(15) + " " + member.mtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
9174
8734
  lcfi = lcfi + 1
9175
8735
  if(returnfp):
9176
- return listarchivefiles['fp']
8736
+ return listarrayfiles['fp']
9177
8737
  else:
9178
8738
  return True
9179
8739
 
@@ -9191,7 +8751,7 @@ if(py7zr_support):
9191
8751
  returnval = {}
9192
8752
  szpfp = py7zr.SevenZipFile(infile, mode="r")
9193
8753
  file_content = szpfp.readall()
9194
- #sztest = szpfp.testzip();
8754
+ #sztest = szpfp.testzip()
9195
8755
  sztestalt = szpfp.test()
9196
8756
  if(sztestalt):
9197
8757
  VerbosePrintOut("Bad file found!")
@@ -9280,7 +8840,7 @@ if(py7zr_support):
9280
8840
  fsize).rjust(15) + " " + member.creationtime.strftime('%Y-%m-%d %H:%M') + " " + printfname)
9281
8841
  lcfi = lcfi + 1
9282
8842
  if(returnfp):
9283
- return listarchivefiles['fp']
8843
+ return listarrayfiles['fp']
9284
8844
  else:
9285
8845
  return True
9286
8846
 
@@ -9288,7 +8848,7 @@ if(py7zr_support):
9288
8848
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
9289
8849
  if(verbose):
9290
8850
  logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
9291
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, True)
8851
+ checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
9292
8852
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9293
8853
  formatspecs = formatspecs[checkcompressfile]
9294
8854
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
@@ -9310,9 +8870,9 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
9310
8870
  outarray = MkTempFile()
9311
8871
  packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
9312
8872
  compressionlevel, followlink, checksumtype, formatspecs, False, True)
9313
- listarchivefiles = ArchiveFileListFiles(
8873
+ listarrayfiles = ArchiveFileListFiles(
9314
8874
  outarray, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
9315
- return listarchivefiles
8875
+ return listarrayfiles
9316
8876
 
9317
8877
  """
9318
8878
  PyNeoFile compatibility layer
@@ -9334,7 +8894,7 @@ def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, chec
9334
8894
  return PackArchiveFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
9335
8895
 
9336
8896
  def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
9337
- return ArchiveFileToArray(infile, "auto", 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
8897
+ return ArchiveFileToArray(infile, "auto", 0, 0, 0, listonly, True, uncompress, skipchecksum, formatspecs, False, returnfp)
9338
8898
 
9339
8899
  def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
9340
8900
  return UnPackArchiveFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
@@ -9349,13 +8909,26 @@ def archivefilelistfiles_neo(infile, formatspecs=__file_format_multi_dict__, adv
9349
8909
  return ArchiveFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
9350
8910
 
9351
8911
  def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
9352
- intmp = InFileToArray(infile, 0, 0, False, True, False, formatspecs, False, False)
8912
+ intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
9353
8913
  return RePackArchiveFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
9354
8914
 
8915
+ def detect_cwd(ftp, file_dir):
8916
+ """
8917
+ Test whether cwd into file_dir works. Returns True if it does,
8918
+ False if not (so absolute paths should be used).
8919
+ """
8920
+ if not file_dir or file_dir in ("/", ""):
8921
+ return False # nothing to cwd into
8922
+ try:
8923
+ ftp.cwd(file_dir)
8924
+ return True
8925
+ except all_errors:
8926
+ return False
8927
+
9355
8928
  def download_file_from_ftp_file(url):
9356
8929
  urlparts = urlparse(url)
9357
- file_name = os.path.basename(urlparts.path)
9358
- file_dir = os.path.dirname(urlparts.path)
8930
+ file_name = os.path.basename(unquote(urlparts.path))
8931
+ file_dir = os.path.dirname(unquote(urlparts.path))
9359
8932
  if(urlparts.username is not None):
9360
8933
  ftp_username = urlparts.username
9361
8934
  else:
@@ -9372,7 +8945,7 @@ def download_file_from_ftp_file(url):
9372
8945
  ftp = FTP_TLS()
9373
8946
  else:
9374
8947
  return False
9375
- if(urlparts.scheme == "sftp"):
8948
+ if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
9376
8949
  if(__use_pysftp__):
9377
8950
  return download_file_from_pysftp_file(url)
9378
8951
  else:
@@ -9390,26 +8963,70 @@ def download_file_from_ftp_file(url):
9390
8963
  except socket.timeout:
9391
8964
  log.info("Error With URL "+url)
9392
8965
  return False
9393
- ftp.login(urlparts.username, urlparts.password)
9394
- if(urlparts.scheme == "ftps"):
9395
- ftp.prot_p()
8966
+ if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
8967
+ try:
8968
+ ftp.auth()
8969
+ except all_errors:
8970
+ pass
8971
+ ftp.login(ftp_username, ftp_password)
8972
+ if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
8973
+ try:
8974
+ ftp.prot_p()
8975
+ except all_errors:
8976
+ ftp.prot_c()
8977
+ # UTF-8 filenames if supported
8978
+ try:
8979
+ ftp.sendcmd("OPTS UTF8 ON")
8980
+ ftp.encoding = "utf-8"
8981
+ except all_errors:
8982
+ pass
8983
+ is_cwd_allowed = detect_cwd(ftp, file_dir)
9396
8984
  ftpfile = MkTempFile()
9397
- ftp.retrbinary("RETR "+urlparts.path, ftpfile.write)
9398
- #ftp.storbinary("STOR "+urlparts.path, ftpfile.write);
8985
+ # Try EPSV first, then fall back
8986
+ try:
8987
+ ftp.force_epsv = True
8988
+ ftp.sendcmd("EPSV") # request extended passive
8989
+ if(is_cwd_allowed):
8990
+ ftp.retrbinary("RETR "+file_name, ftpfile.write)
8991
+ else:
8992
+ ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
8993
+ except all_errors:
8994
+ try:
8995
+ ftp.set_pasv(True)
8996
+ if(is_cwd_allowed):
8997
+ ftp.retrbinary("RETR "+file_name, ftpfile.write)
8998
+ else:
8999
+ ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
9000
+ except all_errors:
9001
+ ftp.set_pasv(False)
9002
+ if(is_cwd_allowed):
9003
+ ftp.retrbinary("RETR "+file_name, ftpfile.write)
9004
+ else:
9005
+ ftp.retrbinary("RETR "+unquote(urlparts.path), ftpfile.write)
9399
9006
  ftp.close()
9400
9007
  ftpfile.seek(0, 0)
9401
9008
  return ftpfile
9402
9009
 
9403
9010
 
9011
+ def download_file_from_ftps_file(url):
9012
+ return download_file_from_ftp_file(url)
9013
+
9014
+
9404
9015
  def download_file_from_ftp_string(url):
9405
9016
  ftpfile = download_file_from_ftp_file(url)
9406
- return ftpfile.read()
9017
+ ftpout = ftpfile.read()
9018
+ ftpfile.close()
9019
+ return ftpout
9020
+
9021
+
9022
+ def download_file_from_ftps_string(url):
9023
+ return download_file_from_ftp_string(url)
9407
9024
 
9408
9025
 
9409
9026
  def upload_file_to_ftp_file(ftpfile, url):
9410
9027
  urlparts = urlparse(url)
9411
- file_name = os.path.basename(urlparts.path)
9412
- file_dir = os.path.dirname(urlparts.path)
9028
+ file_name = os.path.basename(unquote(urlparts.path))
9029
+ file_dir = os.path.dirname(unquote(urlparts.path))
9413
9030
  if(urlparts.username is not None):
9414
9031
  ftp_username = urlparts.username
9415
9032
  else:
@@ -9426,7 +9043,7 @@ def upload_file_to_ftp_file(ftpfile, url):
9426
9043
  ftp = FTP_TLS()
9427
9044
  else:
9428
9045
  return False
9429
- if(urlparts.scheme == "sftp"):
9046
+ if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
9430
9047
  if(__use_pysftp__):
9431
9048
  return upload_file_to_pysftp_file(url)
9432
9049
  else:
@@ -9444,15 +9061,55 @@ def upload_file_to_ftp_file(ftpfile, url):
9444
9061
  except socket.timeout:
9445
9062
  log.info("Error With URL "+url)
9446
9063
  return False
9447
- ftp.login(urlparts.username, urlparts.password)
9448
- if(urlparts.scheme == "ftps"):
9449
- ftp.prot_p()
9450
- ftp.storbinary("STOR "+urlparts.path, ftpfile)
9064
+ if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
9065
+ try:
9066
+ ftp.auth()
9067
+ except all_errors:
9068
+ pass
9069
+ ftp.login(ftp_username, ftp_password)
9070
+ if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
9071
+ try:
9072
+ ftp.prot_p()
9073
+ except all_errors:
9074
+ ftp.prot_c()
9075
+ # UTF-8 filenames if supported
9076
+ try:
9077
+ ftp.sendcmd("OPTS UTF8 ON")
9078
+ ftp.encoding = "utf-8"
9079
+ except all_errors:
9080
+ pass
9081
+ is_cwd_allowed = detect_cwd(ftp, file_dir)
9082
+ ftpfile.seek(0, 0)
9083
+ # Try EPSV first, then fall back
9084
+ try:
9085
+ ftp.force_epsv = True
9086
+ ftp.sendcmd("EPSV") # request extended passive
9087
+ if(is_cwd_allowed):
9088
+ ftp.storbinary("STOR "+file_name, ftpfile)
9089
+ else:
9090
+ ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
9091
+ except all_errors:
9092
+ try:
9093
+ ftp.set_pasv(True)
9094
+ if(is_cwd_allowed):
9095
+ ftp.storbinary("STOR "+file_name, ftpfile)
9096
+ else:
9097
+ ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
9098
+ except all_errors:
9099
+ ftp.set_pasv(False)
9100
+ if(is_cwd_allowed):
9101
+ ftp.storbinary("STOR "+file_name, ftpfile)
9102
+ else:
9103
+ ftp.storbinary("STOR "+unquote(urlparts.path), ftpfile)
9451
9104
  ftp.close()
9452
9105
  ftpfile.seek(0, 0)
9453
9106
  return ftpfile
9454
9107
 
9455
9108
 
9109
+ def upload_file_to_ftps_file(ftpfile, url):
9110
+ return upload_file_to_ftp_file(ftpfile, url)
9111
+
9112
+
9456
9113
  def upload_file_to_ftp_string(ftpstring, url):
9457
9114
  ftpfileo = MkTempFile(ftpstring)
9458
9115
  ftpfile = upload_file_to_ftp_file(ftpfileo, url)
@@ -9460,6 +9117,10 @@ def upload_file_to_ftp_string(ftpstring, url):
9460
9117
  return ftpfile
9461
9118
 
9462
9119
 
9120
+ def upload_file_to_ftps_string(ftpstring, url):
9121
+ return upload_file_to_ftp_string(ftpstring, url)
9122
+
9123
+
9463
9124
  class RawIteratorWrapper:
9464
9125
  def __init__(self, iterator):
9465
9126
  self.iterator = iterator
@@ -9497,7 +9158,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
9497
9158
  urlparts.params, urlparts.query, urlparts.fragment))
9498
9159
 
9499
9160
  # Handle SFTP/FTP
9500
- if urlparts.scheme == "sftp":
9161
+ if urlparts.scheme == "sftp" or urlparts.scheme == "scp":
9501
9162
  if __use_pysftp__:
9502
9163
  return download_file_from_pysftp_file(url)
9503
9164
  else:
@@ -9570,14 +9231,16 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
9570
9231
 
9571
9232
  def download_file_from_http_string(url, headers=geturls_headers_pyfile_python_alt, usehttp=__use_http_lib__):
9572
9233
  httpfile = download_file_from_http_file(url, headers, usehttp)
9573
- return httpfile.read()
9234
+ httpout = httpfile.read()
9235
+ httpfile.close()
9236
+ return httpout
9574
9237
 
9575
9238
 
9576
9239
  if(haveparamiko):
9577
9240
  def download_file_from_sftp_file(url):
9578
9241
  urlparts = urlparse(url)
9579
- file_name = os.path.basename(urlparts.path)
9580
- file_dir = os.path.dirname(urlparts.path)
9242
+ file_name = os.path.basename(unquote(urlparts.path))
9243
+ file_dir = os.path.dirname(unquote(urlparts.path))
9581
9244
  sftp_port = urlparts.port
9582
9245
  if(urlparts.port is None):
9583
9246
  sftp_port = 22
@@ -9597,14 +9260,14 @@ if(haveparamiko):
9597
9260
  return download_file_from_ftp_file(url)
9598
9261
  elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9599
9262
  return download_file_from_http_file(url)
9600
- if(urlparts.scheme != "sftp"):
9263
+ if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9601
9264
  return False
9602
9265
  ssh = paramiko.SSHClient()
9603
9266
  ssh.load_system_host_keys()
9604
9267
  ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
9605
9268
  try:
9606
9269
  ssh.connect(urlparts.hostname, port=sftp_port,
9607
- username=urlparts.username, password=urlparts.password)
9270
+ username=sftp_username, password=urlparts.password)
9608
9271
  except paramiko.ssh_exception.SSHException:
9609
9272
  return False
9610
9273
  except socket.gaierror:
@@ -9615,7 +9278,7 @@ if(haveparamiko):
9615
9278
  return False
9616
9279
  sftp = ssh.open_sftp()
9617
9280
  sftpfile = MkTempFile()
9618
- sftp.getfo(urlparts.path, sftpfile)
9281
+ sftp.getfo(unquote(urlparts.path), sftpfile)
9619
9282
  sftp.close()
9620
9283
  ssh.close()
9621
9284
  sftpfile.seek(0, 0)
@@ -9627,7 +9290,9 @@ else:
9627
9290
  if(haveparamiko):
9628
9291
  def download_file_from_sftp_string(url):
9629
9292
  sftpfile = download_file_from_sftp_file(url)
9630
- return sftpfile.read()
9293
+ sftpout = sftpfile.read()
9294
+ sftpfile.close()
9295
+ return sftpout
9631
9296
  else:
9632
9297
  def download_file_from_sftp_string(url):
9633
9298
  return False
@@ -9635,8 +9300,8 @@ else:
9635
9300
  if(haveparamiko):
9636
9301
  def upload_file_to_sftp_file(sftpfile, url):
9637
9302
  urlparts = urlparse(url)
9638
- file_name = os.path.basename(urlparts.path)
9639
- file_dir = os.path.dirname(urlparts.path)
9303
+ file_name = os.path.basename(unquote(urlparts.path))
9304
+ file_dir = os.path.dirname(unquote(urlparts.path))
9640
9305
  sftp_port = urlparts.port
9641
9306
  if(urlparts.port is None):
9642
9307
  sftp_port = 22
@@ -9653,17 +9318,17 @@ if(haveparamiko):
9653
9318
  else:
9654
9319
  sftp_password = ""
9655
9320
  if(urlparts.scheme == "ftp"):
9656
- return upload_file_to_ftp_file(url)
9321
+ return upload_file_to_ftp_file(sftpfile, url)
9657
9322
  elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9658
9323
  return False
9659
- if(urlparts.scheme != "sftp"):
9324
+ if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9660
9325
  return False
9661
9326
  ssh = paramiko.SSHClient()
9662
9327
  ssh.load_system_host_keys()
9663
9328
  ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
9664
9329
  try:
9665
9330
  ssh.connect(urlparts.hostname, port=sftp_port,
9666
- username=urlparts.username, password=urlparts.password)
9331
+ username=sftp_username, password=sftp_password)
9667
9332
  except paramiko.ssh_exception.SSHException:
9668
9333
  return False
9669
9334
  except socket.gaierror:
@@ -9673,7 +9338,8 @@ if(haveparamiko):
9673
9338
  log.info("Error With URL "+url)
9674
9339
  return False
9675
9340
  sftp = ssh.open_sftp()
9676
- sftp.putfo(sftpfile, urlparts.path)
9341
+ sftpfile.seek(0, 0)
9342
+ sftp.putfo(sftpfile, unquote(urlparts.path))
9677
9343
  sftp.close()
9678
9344
  ssh.close()
9679
9345
  sftpfile.seek(0, 0)
@@ -9695,8 +9361,8 @@ else:
9695
9361
  if(havepysftp):
9696
9362
  def download_file_from_pysftp_file(url):
9697
9363
  urlparts = urlparse(url)
9698
- file_name = os.path.basename(urlparts.path)
9699
- file_dir = os.path.dirname(urlparts.path)
9364
+ file_name = os.path.basename(unquote(urlparts.path))
9365
+ file_dir = os.path.dirname(unquote(urlparts.path))
9700
9366
  sftp_port = urlparts.port
9701
9367
  if(urlparts.port is None):
9702
9368
  sftp_port = 22
@@ -9716,11 +9382,11 @@ if(havepysftp):
9716
9382
  return download_file_from_ftp_file(url)
9717
9383
  elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9718
9384
  return download_file_from_http_file(url)
9719
- if(urlparts.scheme != "sftp"):
9385
+ if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9720
9386
  return False
9721
9387
  try:
9722
- pysftp.Connection(urlparts.hostname, port=sftp_port,
9723
- username=urlparts.username, password=urlparts.password)
9388
+ sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
9389
+ username=sftp_username, password=sftp_password)
9724
9390
  except paramiko.ssh_exception.SSHException:
9725
9391
  return False
9726
9392
  except socket.gaierror:
@@ -9729,9 +9395,8 @@ if(havepysftp):
9729
9395
  except socket.timeout:
9730
9396
  log.info("Error With URL "+url)
9731
9397
  return False
9732
- sftp = ssh.open_sftp()
9733
9398
  sftpfile = MkTempFile()
9734
- sftp.getfo(urlparts.path, sftpfile)
9399
+ sftp.getfo(unquote(urlparts.path), sftpfile)
9735
9400
  sftp.close()
9736
9401
  ssh.close()
9737
9402
  sftpfile.seek(0, 0)
@@ -9743,7 +9408,9 @@ else:
9743
9408
  if(havepysftp):
9744
9409
  def download_file_from_pysftp_string(url):
9745
9410
  sftpfile = download_file_from_pysftp_file(url)
9746
- return sftpfile.read()
9411
+ sftpout = sftpfile.read()
9412
+ sftpfile.close()
9413
+ return sftpout
9747
9414
  else:
9748
9415
  def download_file_from_pysftp_string(url):
9749
9416
  return False
@@ -9751,8 +9418,8 @@ else:
9751
9418
  if(havepysftp):
9752
9419
  def upload_file_to_pysftp_file(sftpfile, url):
9753
9420
  urlparts = urlparse(url)
9754
- file_name = os.path.basename(urlparts.path)
9755
- file_dir = os.path.dirname(urlparts.path)
9421
+ file_name = os.path.basename(unquote(urlparts.path))
9422
+ file_dir = os.path.dirname(unquote(urlparts.path))
9756
9423
  sftp_port = urlparts.port
9757
9424
  if(urlparts.port is None):
9758
9425
  sftp_port = 22
@@ -9769,14 +9436,14 @@ if(havepysftp):
9769
9436
  else:
9770
9437
  sftp_password = ""
9771
9438
  if(urlparts.scheme == "ftp"):
9772
- return upload_file_to_ftp_file(url)
9439
+ return upload_file_to_ftp_file(sftpfile, url)
9773
9440
  elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9774
9441
  return False
9775
- if(urlparts.scheme != "sftp"):
9442
+ if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9776
9443
  return False
9777
9444
  try:
9778
- pysftp.Connection(urlparts.hostname, port=sftp_port,
9779
- username=urlparts.username, password=urlparts.password)
9445
+ sftp = pysftp.Connection(urlparts.hostname, port=sftp_port,
9446
+ username=sftp_username, password=sftp_password)
9780
9447
  except paramiko.ssh_exception.SSHException:
9781
9448
  return False
9782
9449
  except socket.gaierror:
@@ -9785,8 +9452,8 @@ if(havepysftp):
9785
9452
  except socket.timeout:
9786
9453
  log.info("Error With URL "+url)
9787
9454
  return False
9788
- sftp = ssh.open_sftp()
9789
- sftp.putfo(sftpfile, urlparts.path)
9455
+ sftpfile.seek(0, 0)
9456
+ sftp.putfo(sftpfile, unquote(urlparts.path))
9790
9457
  sftp.close()
9791
9458
  ssh.close()
9792
9459
  sftpfile.seek(0, 0)
@@ -9812,7 +9479,7 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
9812
9479
  return download_file_from_http_file(url, headers, usehttp)
9813
9480
  elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
9814
9481
  return download_file_from_ftp_file(url)
9815
- elif(urlparts.scheme == "sftp"):
9482
+ elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
9816
9483
  if(__use_pysftp__ and havepysftp):
9817
9484
  return download_file_from_pysftp_file(url)
9818
9485
  else:
@@ -9822,9 +9489,9 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
9822
9489
  return False
9823
9490
 
9824
9491
 
9825
- def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
9492
+ def download_file_from_internet_uncompress_file(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
9826
9493
  fp = download_file_from_internet_file(url)
9827
- fp = UncompressFileAlt(fp, formatspecs)
9494
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
9828
9495
  fp.seek(0, 0)
9829
9496
  if(not fp):
9830
9497
  return False
@@ -9837,7 +9504,7 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
9837
9504
  return download_file_from_http_string(url, headers)
9838
9505
  elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
9839
9506
  return download_file_from_ftp_string(url)
9840
- elif(urlparts.scheme == "sftp"):
9507
+ elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
9841
9508
  if(__use_pysftp__ and havepysftp):
9842
9509
  return download_file_from_pysftp_string(url)
9843
9510
  else:
@@ -9847,13 +9514,15 @@ def download_file_from_internet_string(url, headers=geturls_headers_pyfile_pytho
9847
9514
  return False
9848
9515
 
9849
9516
 
9850
- def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, formatspecs=__file_format_dict__):
9517
+ def download_file_from_internet_uncompress_string(url, headers=geturls_headers_pyfile_python_alt, filestart=0, formatspecs=__file_format_dict__):
9851
9518
  fp = download_file_from_internet_string(url)
9852
- fp = UncompressFileAlt(fp, formatspecs)
9853
- fp.seek(0, 0)
9519
+ fp = UncompressFileAlt(fp, formatspecs, filestart)
9854
9520
  if(not fp):
9855
9521
  return False
9856
- return fp
9522
+ fp.seek(0, 0)
9523
+ fpout = fp.read()
9524
+ fp.close
9525
+ return fpout
9857
9526
 
9858
9527
 
9859
9528
  def upload_file_to_internet_file(ifp, url):
@@ -9862,7 +9531,7 @@ def upload_file_to_internet_file(ifp, url):
9862
9531
  return False
9863
9532
  elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
9864
9533
  return upload_file_to_ftp_file(ifp, url)
9865
- elif(urlparts.scheme == "sftp"):
9534
+ elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
9866
9535
  if(__use_pysftp__ and havepysftp):
9867
9536
  return upload_file_to_pysftp_file(ifp, url)
9868
9537
  else:
@@ -9878,8 +9547,7 @@ def upload_file_to_internet_compress_file(ifp, url, compression="auto", compress
9878
9547
  if(not archivefileout):
9879
9548
  return False
9880
9549
  fp.seek(0, 0)
9881
- upload_file_to_internet_file(fp, outfile)
9882
- return True
9550
+ return upload_file_to_internet_file(fp, outfile)
9883
9551
 
9884
9552
 
9885
9553
  def upload_file_to_internet_string(ifp, url):
@@ -9888,7 +9556,7 @@ def upload_file_to_internet_string(ifp, url):
9888
9556
  return False
9889
9557
  elif(urlparts.scheme == "ftp" or urlparts.scheme == "ftps"):
9890
9558
  return upload_file_to_ftp_string(ifp, url)
9891
- elif(urlparts.scheme == "sftp"):
9559
+ elif(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
9892
9560
  if(__use_pysftp__ and havepysftp):
9893
9561
  return upload_file_to_pysftp_string(ifp, url)
9894
9562
  else:
@@ -9905,5 +9573,4 @@ def upload_file_to_internet_compress_string(ifp, url, compression="auto", compre
9905
9573
  if(not archivefileout):
9906
9574
  return False
9907
9575
  fp.seek(0, 0)
9908
- upload_file_to_internet_file(fp, outfile)
9909
- return True
9576
+ return upload_file_to_internet_file(fp, outfile)