PyFoxFile 0.22.4__py3-none-any.whl → 0.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyfoxfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyfoxfile.py - Last Update: 8/29/2025 Ver. 0.22.4 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyfoxfile.py - Last Update: 10/1/2025 Ver. 0.23.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -27,8 +27,8 @@ import stat
27
27
  import zlib
28
28
  import base64
29
29
  import shutil
30
- import struct
31
30
  import socket
31
+ import struct
32
32
  import hashlib
33
33
  import inspect
34
34
  import datetime
@@ -79,6 +79,7 @@ try:
79
79
  except NameError:
80
80
  basestring = str
81
81
 
82
+ PY2 = (sys.version_info[0] == 2)
82
83
  try:
83
84
  unicode # Py2
84
85
  except NameError: # Py3
@@ -271,8 +272,8 @@ def get_default_threads():
271
272
 
272
273
 
273
274
  __use_pysftp__ = False
274
- __upload_proto_support__ = "^(ftp|ftps|sftp|scp)://"
275
- __download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp)://"
275
+ __upload_proto_support__ = "^(ftp|ftps|sftp|scp|tcp|udp)://"
276
+ __download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp|tcp|udp)://"
276
277
  if(not havepysftp):
277
278
  __use_pysftp__ = False
278
279
  __use_http_lib__ = "httpx"
@@ -390,13 +391,13 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
390
391
  __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
391
392
  __project__ = __program_name__
392
393
  __project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
393
- __version_info__ = (0, 22, 4, "RC 1", 1)
394
- __version_info__ = (0, 22, 4, "RC 1", 1)
395
- __version_date_info__ = (2025, 9, 29, "RC 1", 1)
394
+ __version_info__ = (0, 23, 0, "RC 1", 1)
395
+ __version_info__ = (0, 23, 0, "RC 1", 1)
396
+ __version_date_info__ = (2025, 10, 1, "RC 1", 1)
396
397
  __version_date__ = str(__version_date_info__[0]) + "." + str(
397
398
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
398
399
  __revision__ = __version_info__[3]
399
- __revision_id__ = "$Id: c5f2e77a91df1432f3fb4213ee32b80c79427e3a $"
400
+ __revision_id__ = "$Id: ac611cb1eb63d3a9340aa91f967326a6adaf8b61 $"
400
401
  if(__version_info__[4] is not None):
401
402
  __version_date_plusrc__ = __version_date__ + \
402
403
  "-" + str(__version_date_info__[4])
@@ -408,15 +409,67 @@ if(__version_info__[3] is not None):
408
409
  if(__version_info__[3] is None):
409
410
  __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
410
411
 
412
+ # ===== Module-level type code table & helpers (reuse anywhere) =====
413
+
414
+ FT = {
415
+ "FILE": 0,
416
+ "HARDLINK": 1,
417
+ "SYMLINK": 2,
418
+ "CHAR": 3,
419
+ "BLOCK": 4,
420
+ "DIR": 5,
421
+ "FIFO": 6,
422
+ "CONTAGIOUS": 7, # treated like regular file
423
+ "SOCK": 8,
424
+ "DOOR": 9,
425
+ "PORT": 10,
426
+ "WHT": 11,
427
+ "SPARSE": 12,
428
+ "JUNCTION": 13,
429
+ }
430
+
431
+ BASE_CATEGORY_BY_CODE = {
432
+ 0: "files",
433
+ 1: "hardlinks",
434
+ 2: "symlinks",
435
+ 3: "characters",
436
+ 4: "blocks",
437
+ 5: "directories",
438
+ 6: "fifos",
439
+ 7: "files", # contagious treated as file
440
+ 8: "sockets",
441
+ 9: "doors",
442
+ 10: "ports",
443
+ 11: "whiteouts",
444
+ 12: "sparsefiles",
445
+ 13: "junctions",
446
+ }
447
+
448
+ # Union categories defined by which base codes should populate them.
449
+ UNION_RULES = [
450
+ ("links", set([FT["HARDLINK"], FT["SYMLINK"]])),
451
+ ("devices", set([FT["CHAR"], FT["BLOCK"]])),
452
+ ]
453
+
454
+ # Deterministic category order (handy for consistent output/printing).
455
+ CATEGORY_ORDER = [
456
+ "files", "hardlinks", "symlinks", "character", "block",
457
+ "directories", "fifo", "sockets", "doors", "ports",
458
+ "whiteouts", "sparsefiles", "junctions", "links", "devices"
459
+ ]
460
+
411
461
  # Robust bitness detection
412
462
  # Works on Py2 & Py3, all platforms
463
+
464
+ # Python interpreter bitness
465
+ PyBitness = "64" if struct.calcsize("P") * 8 == 64 else ("64" if sys.maxsize > 2**32 else "32")
466
+
467
+ # Operating system bitness
413
468
  try:
414
- import struct
415
- PyBitness = "64" if struct.calcsize("P") * 8 == 64 else "32"
469
+ OSBitness = platform.architecture()[0].replace("bit", "")
416
470
  except Exception:
417
- # conservative fallback
418
- m = platform.machine() or ""
419
- PyBitness = "64" if m.endswith("64") else "32"
471
+ m = platform.machine().lower()
472
+ OSBitness = "64" if "64" in m else "32"
420
473
 
421
474
  geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
422
475
  proname=__project__, prover=__version__, prourl=__project_url__)
@@ -657,6 +710,415 @@ def _resolves_outside(base_rel, target_rel):
657
710
  return True
658
711
 
659
712
 
713
+ def _to_bytes(data):
714
+ if data is None:
715
+ return b""
716
+ if isinstance(data, bytes):
717
+ return data
718
+ if isinstance(data, unicode):
719
+ return data.encode("utf-8")
720
+ try:
721
+ return bytes(data)
722
+ except Exception:
723
+ return (u"%s" % data).encode("utf-8")
724
+
725
+ def _to_text(b):
726
+ if isinstance(b, bytes):
727
+ return b.decode("utf-8", "replace")
728
+ return b
729
+
730
+ # ---------- TLS helpers (TCP only) ----------
731
+ def _ssl_available():
732
+ try:
733
+ import ssl # noqa
734
+ return True
735
+ except Exception:
736
+ return False
737
+
738
+ def _build_ssl_context(server_side=False, verify=True, ca_file=None, certfile=None, keyfile=None):
739
+ import ssl
740
+ create_ctx = getattr(ssl, "create_default_context", None)
741
+ SSLContext = getattr(ssl, "SSLContext", None)
742
+ Purpose = getattr(ssl, "Purpose", None)
743
+ if create_ctx and Purpose:
744
+ ctx = create_ctx(ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH)
745
+ elif SSLContext:
746
+ ctx = SSLContext(getattr(ssl, "PROTOCOL_TLS", getattr(ssl, "PROTOCOL_SSLv23")))
747
+ else:
748
+ return None
749
+
750
+ if hasattr(ctx, "check_hostname") and not server_side:
751
+ ctx.check_hostname = bool(verify)
752
+
753
+ if verify:
754
+ if hasattr(ctx, "verify_mode"):
755
+ ctx.verify_mode = getattr(ssl, "CERT_REQUIRED", 2)
756
+ if ca_file:
757
+ try: ctx.load_verify_locations(cafile=ca_file)
758
+ except Exception: pass
759
+ else:
760
+ load_default_certs = getattr(ctx, "load_default_certs", None)
761
+ if load_default_certs: load_default_certs()
762
+ else:
763
+ if hasattr(ctx, "verify_mode"):
764
+ ctx.verify_mode = getattr(ssl, "CERT_NONE", 0)
765
+ if hasattr(ctx, "check_hostname"):
766
+ ctx.check_hostname = False
767
+
768
+ if certfile:
769
+ ctx.load_cert_chain(certfile=certfile, keyfile=keyfile or None)
770
+
771
+ try:
772
+ ctx.set_ciphers("HIGH:!aNULL:!MD5:!RC4")
773
+ except Exception:
774
+ pass
775
+ return ctx
776
+
777
+ def _ssl_wrap_socket(sock, server_side=False, server_hostname=None,
778
+ verify=True, ca_file=None, certfile=None, keyfile=None):
779
+ import ssl
780
+ ctx = _build_ssl_context(server_side, verify, ca_file, certfile, keyfile)
781
+ if ctx is not None:
782
+ kwargs = {}
783
+ if not server_side and getattr(ssl, "HAS_SNI", False) and server_hostname:
784
+ kwargs["server_hostname"] = server_hostname
785
+ return ctx.wrap_socket(sock, server_side=server_side, **kwargs)
786
+ # Very old Python fallback
787
+ kwargs = {
788
+ "ssl_version": getattr(ssl, "PROTOCOL_TLS", getattr(ssl, "PROTOCOL_SSLv23")),
789
+ "certfile": certfile or None,
790
+ "keyfile": keyfile or None,
791
+ "cert_reqs": (getattr(ssl, "CERT_REQUIRED", 2) if (verify and ca_file) else getattr(ssl, "CERT_NONE", 0)),
792
+ }
793
+ if verify and ca_file:
794
+ kwargs["ca_certs"] = ca_file
795
+ return ssl.wrap_socket(sock, **kwargs)
796
+
797
+ # ---------- IPv6 / multi-A dialer + keepalive ----------
798
+ def _enable_keepalive(s, idle=60, intvl=15, cnt=4):
799
+ try:
800
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
801
+ if hasattr(socket, 'TCP_KEEPIDLE'):
802
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, idle)
803
+ if hasattr(socket, 'TCP_KEEPINTVL'):
804
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, intvl)
805
+ if hasattr(socket, 'TCP_KEEPCNT'):
806
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, cnt)
807
+ except Exception:
808
+ pass
809
+
810
+ def _connect_stream(host, port, timeout):
811
+ err = None
812
+ for fam, st, proto, _, sa in socket.getaddrinfo(host, int(port), 0, socket.SOCK_STREAM):
813
+ try:
814
+ s = socket.socket(fam, st, proto)
815
+ if timeout is not None:
816
+ s.settimeout(timeout)
817
+ try: s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
818
+ except Exception: pass
819
+ s.connect(sa)
820
+ _enable_keepalive(s)
821
+ return s
822
+ except Exception as e:
823
+ err = e
824
+ try: s.close()
825
+ except Exception: pass
826
+ if err: raise err
827
+ raise RuntimeError("no usable address")
828
+
829
+ # ---------- Auth: AF1 (HMAC) + legacy fallback ----------
830
+ # AF1: single ASCII line ending with '\n':
831
+ # AF1 ts=<unix> user=<b64url> nonce=<b64url_12B> scope=<b64url> alg=sha256 mac=<hex>\n
832
+ def _b64url_encode(b):
833
+ s = base64.urlsafe_b64encode(b)
834
+ return _to_text(s.rstrip(b'='))
835
+
836
+ def _b64url_decode(s):
837
+ s = _to_bytes(s)
838
+ pad = b'=' * ((4 - (len(s) % 4)) % 4)
839
+ return base64.urlsafe_b64decode(s + pad)
840
+
841
+ def _auth_msg(ts_int, user_utf8, nonce_bytes, scope_utf8, length_str, sha_hex):
842
+ # canonical message for MAC: v1|ts|user|nonce_b64|scope|len|sha
843
+ return _to_bytes("v1|%d|%s|%s|%s|%s|%s" % (
844
+ ts_int,
845
+ _to_text(user_utf8),
846
+ _b64url_encode(nonce_bytes),
847
+ _to_text(scope_utf8),
848
+ length_str if length_str is not None else "",
849
+ sha_hex if sha_hex is not None else "",
850
+ ))
851
+
852
+ def build_auth_blob_v1(user, secret, scope=u"", now=None, length=None, sha_hex=None):
853
+ """
854
+ user: text; secret: text/bytes (HMAC key)
855
+ scope: optional text (e.g., path)
856
+ length: int or None (payload bytes)
857
+ sha_hex: ascii hex SHA-256 of payload (optional)
858
+ """
859
+ ts = int(time.time() if now is None else now)
860
+ user_b = _to_bytes(user or u"")
861
+ scope_b = _to_bytes(scope or u"")
862
+ key_b = _to_bytes(secret or u"")
863
+ nonce = os.urandom(12)
864
+
865
+ length_str = (str(int(length)) if (length is not None and int(length) >= 0) else "")
866
+ sha_hex = (sha_hex or None)
867
+ mac = hmac.new(
868
+ key_b,
869
+ _auth_msg(ts, user_b, nonce, scope_b, length_str, sha_hex),
870
+ hashlib.sha256
871
+ ).hexdigest()
872
+
873
+ line = "AF1 ts=%d user=%s nonce=%s scope=%s len=%s sha=%s alg=sha256 mac=%s\n" % (
874
+ ts,
875
+ _b64url_encode(user_b),
876
+ _b64url_encode(nonce),
877
+ _b64url_encode(scope_b),
878
+ length_str,
879
+ (sha_hex or ""),
880
+ mac,
881
+ )
882
+ return _to_bytes(line)
883
+
884
+ from collections import deque
885
+ class _NonceCache(object):
886
+ def __init__(self, max_items=10000, ttl_seconds=600):
887
+ self.max_items = int(max_items); self.ttl = int(ttl_seconds)
888
+ self.q = deque(); self.s = set()
889
+ def seen(self, nonce_b64, now_ts):
890
+ # evict old / over-capacity
891
+ while self.q and (now_ts - self.q[0][0] > self.ttl or len(self.q) > self.max_items):
892
+ _, n = self.q.popleft(); self.s.discard(n)
893
+ if nonce_b64 in self.s: return True
894
+ self.s.add(nonce_b64); self.q.append((now_ts, nonce_b64))
895
+ return False
896
+
897
+ _NONCES = _NonceCache()
898
+
899
+ def verify_auth_blob_v1(blob_bytes, expected_user=None, secret=None,
900
+ max_skew=600, expect_scope=None):
901
+ """
902
+ Returns (ok_bool, user_text, scope_text, reason_text, length_or_None, sha_hex_or_None)
903
+ """
904
+ try:
905
+ line = _to_text(blob_bytes).strip()
906
+ if not line.startswith("AF1 "):
907
+ return (False, None, None, "bad magic", None, None)
908
+ kv = {}
909
+ for tok in line.split()[1:]:
910
+ if '=' in tok:
911
+ k, v = tok.split('=', 1); kv[k] = v
912
+
913
+ for req in ("ts","user","nonce","mac","alg"):
914
+ if req not in kv:
915
+ return (False, None, None, "missing %s" % req, None, None)
916
+ if kv["alg"].lower() != "sha256":
917
+ return (False, None, None, "alg", None, None)
918
+
919
+ ts = int(kv["ts"])
920
+ userb = _b64url_decode(kv["user"])
921
+ nonce_b64 = kv["nonce"]; nonce = _b64url_decode(nonce_b64)
922
+ scopeb = _b64url_decode(kv.get("scope","")) if kv.get("scope") else b""
923
+ length_str = kv.get("len","")
924
+ sha_hex = kv.get("sha","") or None
925
+ mac = kv["mac"]
926
+
927
+ now = int(time.time())
928
+ if abs(now - ts) > int(max_skew):
929
+ return (False, None, None, "skew", None, None)
930
+
931
+ if _NONCES.seen(nonce_b64, now):
932
+ return (False, None, None, "replay", None, None)
933
+
934
+ if expected_user is not None and _to_bytes(expected_user) != userb:
935
+ return (False, None, None, "user", None, None)
936
+
937
+ calc = hmac.new(
938
+ _to_bytes(secret or u""),
939
+ _auth_msg(ts, userb, nonce, scopeb, length_str, sha_hex),
940
+ hashlib.sha256
941
+ ).hexdigest()
942
+ if not hmac.compare_digest(calc, mac):
943
+ return (False, None, None, "mac", None, None)
944
+
945
+ if expect_scope is not None and _to_bytes(expect_scope) != scopeb:
946
+ return (False, None, None, "scope", None, None)
947
+
948
+ length = int(length_str) if (length_str and length_str.isdigit()) else None
949
+ return (True, _to_text(userb), _to_text(scopeb), "ok", length, sha_hex)
950
+ except Exception as e:
951
+ return (False, None, None, "exc:%s" % e, None, None)
952
+
953
+ # Legacy blob (kept for backward compatibility)
954
+ _MAGIC = b"AUTH\0"; _OK = b"OK"; _NO = b"NO"
955
+
956
+ def _build_auth_blob_legacy(user, pw):
957
+ return _MAGIC + _to_bytes(user) + b"\0" + _to_bytes(pw) + b"\0"
958
+
959
+ def _parse_auth_blob_legacy(data):
960
+ if not data.startswith(_MAGIC):
961
+ return (None, None)
962
+ rest = data[len(_MAGIC):]
963
+ try:
964
+ user, rest = rest.split(b"\0", 1)
965
+ pw, _tail = rest.split(b"\0", 1)
966
+ return (user, pw)
967
+ except Exception:
968
+ return (None, None)
969
+
970
+ # ---------- URL helpers ----------
971
+ def _qflag(qs, key, default=False):
972
+ v = qs.get(key, [None])[0]
973
+ if v is None: return bool(default)
974
+ return _to_text(v).lower() in ("1", "true", "yes", "on")
975
+
976
+ def _qnum(qs, key, default=None, cast=float):
977
+ v = qs.get(key, [None])[0]
978
+ if v is None or v == "": return default
979
+ try: return cast(v)
980
+ except Exception: return default
981
+
982
+ def _qstr(qs, key, default=None):
983
+ v = qs.get(key, [None])[0]
984
+ if v is None: return default
985
+ return v
986
+
987
+ def _parse_net_url(url):
988
+ """
989
+ Parse tcp:// / udp:// URL and extract transport options.
990
+ Returns (parts, opts)
991
+ """
992
+ parts = urlparse(url)
993
+ qs = parse_qs(parts.query or "")
994
+
995
+ proto = parts.scheme.lower()
996
+ if proto not in ("tcp", "udp"):
997
+ raise ValueError("Only tcp:// or udp:// supported here")
998
+
999
+ user = unquote(parts.username) if parts.username else None
1000
+ pw = unquote(parts.password) if parts.password else None
1001
+
1002
+ use_ssl = _qflag(qs, "ssl", False) if proto == "tcp" else False
1003
+ ssl_verify = _qflag(qs, "verify", True)
1004
+ ssl_ca_file = _qstr(qs, "ca", None)
1005
+ ssl_cert = _qstr(qs, "cert", None)
1006
+ ssl_key = _qstr(qs, "key", None)
1007
+
1008
+ timeout = _qnum(qs, "timeout", None, float)
1009
+ total_timeout = _qnum(qs, "total_timeout", None, float)
1010
+ chunk_size = int(_qnum(qs, "chunk", 65536, float))
1011
+
1012
+ force_auth = _qflag(qs, "auth", False)
1013
+ want_sha = _qflag(qs, "sha", True) # <— NEW: default compute sha
1014
+
1015
+ opts = dict(
1016
+ proto=proto,
1017
+ host=parts.hostname or "127.0.0.1",
1018
+ port=int(parts.port or 0),
1019
+
1020
+ user=user, pw=pw, force_auth=force_auth,
1021
+
1022
+ use_ssl=use_ssl, ssl_verify=ssl_verify,
1023
+ ssl_ca_file=ssl_ca_file, ssl_certfile=ssl_cert, ssl_keyfile=ssl_key,
1024
+
1025
+ timeout=timeout, total_timeout=total_timeout, chunk_size=chunk_size,
1026
+
1027
+ server_hostname=parts.hostname or None,
1028
+
1029
+ # new option
1030
+ want_sha=want_sha,
1031
+
1032
+ # convenience (used as scope in AF1)
1033
+ path=(parts.path or u""),
1034
+ )
1035
+ return parts, opts
1036
+
1037
+ def _rewrite_url_without_auth(url):
1038
+ u = urlparse(url)
1039
+ netloc = u.hostname or ''
1040
+ if u.port:
1041
+ netloc += ':' + str(u.port)
1042
+ rebuilt = urlunparse((u.scheme, netloc, u.path, u.params, u.query, u.fragment))
1043
+ usr = unquote(u.username) if u.username else ''
1044
+ pwd = unquote(u.password) if u.password else ''
1045
+ return rebuilt, usr, pwd
1046
+
1047
+ def _guess_filename(url, filename):
1048
+ if filename:
1049
+ return filename
1050
+ path = urlparse(url).path or ''
1051
+ base = os.path.basename(path)
1052
+ return base or 'OutFile.'+__file_format_extension__
1053
+
1054
+ # ---- progress + rate limiting helpers ----
1055
+ try:
1056
+ monotonic = time.monotonic # Py3
1057
+ except Exception:
1058
+ # Py2 fallback: time.time() is good enough for coarse throttling
1059
+ monotonic = time.time
1060
+
1061
+ def _progress_tick(now_bytes, total_bytes, last_ts, last_bytes, rate_limit_bps, min_interval=0.1):
1062
+ """
1063
+ Returns (sleep_seconds, new_last_ts, new_last_bytes).
1064
+ - If rate_limit_bps is set, computes how long to sleep to keep average <= limit.
1065
+ - Also enforces a minimum interval between progress callbacks (handled by caller).
1066
+ """
1067
+ now = monotonic()
1068
+ elapsed = max(1e-9, now - last_ts)
1069
+ # Desired time to have elapsed for the given rate:
1070
+ desired = (now_bytes - last_bytes) / float(rate_limit_bps) if rate_limit_bps else 0.0
1071
+ extra = desired - elapsed
1072
+ return (max(0.0, extra), now, now_bytes)
1073
+
1074
+ def _discover_len_and_reset(fobj):
1075
+ """
1076
+ Try hard to get total length and restore original position.
1077
+ Returns (length_or_None, start_pos_or_None).
1078
+ Works with seekable files and BytesIO; leaves stream position unchanged.
1079
+ """
1080
+ # Generic seek/tell
1081
+ try:
1082
+ pos0 = fobj.tell()
1083
+ fobj.seek(0, os.SEEK_END)
1084
+ end = fobj.tell()
1085
+ fobj.seek(pos0, os.SEEK_SET)
1086
+ if end is not None and pos0 is not None and end >= pos0:
1087
+ return (end - pos0, pos0)
1088
+ except Exception:
1089
+ pass
1090
+
1091
+ # BytesIO fast path
1092
+ try:
1093
+ getvalue = getattr(fobj, "getvalue", None)
1094
+ if callable(getvalue):
1095
+ buf = getvalue()
1096
+ L = len(buf)
1097
+ try:
1098
+ pos0 = fobj.tell()
1099
+ except Exception:
1100
+ pos0 = 0
1101
+ return (max(0, L - pos0), pos0)
1102
+ except Exception:
1103
+ pass
1104
+
1105
+ # Memoryview/getbuffer
1106
+ try:
1107
+ getbuffer = getattr(fobj, "getbuffer", None)
1108
+ if callable(getbuffer):
1109
+ mv = getbuffer()
1110
+ L = len(mv)
1111
+ try:
1112
+ pos0 = fobj.tell()
1113
+ except Exception:
1114
+ pos0 = 0
1115
+ return (max(0, L - pos0), pos0)
1116
+ except Exception:
1117
+ pass
1118
+
1119
+ return (None, None)
1120
+
1121
+
660
1122
  def DetectTarBombFoxFileArray(listarrayfiles,
661
1123
  top_file_ratio_threshold=0.6,
662
1124
  min_members_for_ratio=4,
@@ -2781,6 +3243,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
2781
3243
  break
2782
3244
  flist.append(HeaderOut)
2783
3245
  countnum = countnum + 1
3246
+ outlist.update({'fp': fp})
2784
3247
  return flist
2785
3248
 
2786
3249
 
@@ -3290,6 +3753,41 @@ def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, s
3290
3753
  return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3291
3754
 
3292
3755
 
3756
+ def ReadInStackedFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3757
+ outretval = []
3758
+ outstartfile = filestart
3759
+ outfsize = float('inf')
3760
+ while True:
3761
+ if outstartfile >= outfsize: # stop when function signals False
3762
+ break
3763
+ outarray = FoxFileToArray(infile, fmttype, outstartfile, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, True)
3764
+ outfsize = outarray['fsize']
3765
+ if outarray is False: # stop when function signals False
3766
+ break
3767
+ infile = outarray['fp']
3768
+ outstartfile = infile.tell()
3769
+ outretval.append(outarray)
3770
+ return outretval
3771
+
3772
+
3773
+ def ReadInStackedFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3774
+ return ReadInStackedFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3775
+
3776
+
3777
+ def ReadInMultipleStackedFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3778
+ if(isinstance(infile, (list, tuple, ))):
3779
+ pass
3780
+ else:
3781
+ infile = [infile]
3782
+ outretval = {}
3783
+ for curfname in infile:
3784
+ outretval[curfname] = ReadInStackedFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3785
+ return outretval
3786
+
3787
+ def ReadInMultipleStackedFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3788
+ return ReadInMultipleStackedFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3789
+
3790
+
3293
3791
  def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
3294
3792
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
3295
3793
  formatspecs = formatspecs[fmttype]
@@ -3471,7 +3969,7 @@ def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, see
3471
3969
  infile = [infile]
3472
3970
  outretval = {}
3473
3971
  for curfname in infile:
3474
- curretfile[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3972
+ outretval[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
3475
3973
  return outretval
3476
3974
 
3477
3975
  def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
@@ -4040,12 +4538,6 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
4040
4538
  fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
4041
4539
  AppendFileHeaderWithContent(
4042
4540
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
4043
- if(numfiles > 0):
4044
- try:
4045
- fp.write(AppendNullBytes(
4046
- ["0", "0"], formatspecs['format_delimiter']))
4047
- except OSError:
4048
- return False
4049
4541
  fp.seek(0, 0)
4050
4542
  return fp
4051
4543
 
@@ -4109,12 +4601,6 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
4109
4601
  fcontents.seek(0, 0)
4110
4602
  AppendFileHeaderWithContent(
4111
4603
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
4112
- if(numfiles > 0):
4113
- try:
4114
- fp.write(AppendNullBytes(
4115
- ["0", "0"], formatspecs['format_delimiter']))
4116
- except OSError:
4117
- return False
4118
4604
  return fp
4119
4605
 
4120
4606
 
@@ -5507,12 +5993,6 @@ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compress
5507
5993
  AppendFileHeaderWithContent(
5508
5994
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
5509
5995
  fcontents.close()
5510
- if(numfiles > 0):
5511
- try:
5512
- fp.write(AppendNullBytes(
5513
- ["0", "0"], formatspecs['format_delimiter']))
5514
- except OSError:
5515
- return False
5516
5996
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
5517
5997
  fp = CompressOpenFileAlt(
5518
5998
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -5808,12 +6288,6 @@ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto",
5808
6288
  AppendFileHeaderWithContent(
5809
6289
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
5810
6290
  fcontents.close()
5811
- if(numfiles > 0):
5812
- try:
5813
- fp.write(AppendNullBytes(
5814
- ["0", "0"], formatspecs['format_delimiter']))
5815
- except OSError:
5816
- return False
5817
6291
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
5818
6292
  fp = CompressOpenFileAlt(
5819
6293
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -6102,12 +6576,6 @@ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto",
6102
6576
  AppendFileHeaderWithContent(
6103
6577
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6104
6578
  fcontents.close()
6105
- if(numfiles > 0):
6106
- try:
6107
- fp.write(AppendNullBytes(
6108
- ["0", "0"], formatspecs['format_delimiter']))
6109
- except OSError:
6110
- return False
6111
6579
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
6112
6580
  fp = CompressOpenFileAlt(
6113
6581
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -6422,12 +6890,6 @@ if(rarfile_support):
6422
6890
  AppendFileHeaderWithContent(
6423
6891
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6424
6892
  fcontents.close()
6425
- if(numfiles > 0):
6426
- try:
6427
- fp.write(AppendNullBytes(
6428
- ["0", "0"], formatspecs['format_delimiter']))
6429
- except OSError:
6430
- return False
6431
6893
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
6432
6894
  fp = CompressOpenFileAlt(
6433
6895
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -6676,12 +7138,6 @@ if(py7zr_support):
6676
7138
  AppendFileHeaderWithContent(
6677
7139
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6678
7140
  fcontents.close()
6679
- if(numfiles > 0):
6680
- try:
6681
- fp.write(AppendNullBytes(
6682
- ["0", "0"], formatspecs['format_delimiter']))
6683
- except OSError:
6684
- return False
6685
7141
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
6686
7142
  fp = CompressOpenFileAlt(
6687
7143
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -7071,24 +7527,73 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
7071
7527
  return False
7072
7528
 
7073
7529
 
7074
- def FoxFileValidateFile(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7075
- return FoxFileValidate(infile, fmttype, formatspecs, verbose, returnfp)
7530
+ def FoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7531
+ return FoxFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
7532
+
7533
+
7534
+ def FoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7535
+ if(isinstance(infile, (list, tuple, ))):
7536
+ pass
7537
+ else:
7538
+ infile = [infile]
7539
+ outretval = True
7540
+ for curfname in infile:
7541
+ curretfile = FoxFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
7542
+ if(not curretfile):
7543
+ outretval = False
7544
+ return outretval
7545
+
7546
+ def FoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7547
+ return FoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
7548
+
7549
+
7550
+ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7551
+ outretval = []
7552
+ outstartfile = filestart
7553
+ outfsize = float('inf')
7554
+ while True:
7555
+ if outstartfile >= outfsize: # stop when function signals False
7556
+ break
7557
+ is_valid_file = FoxFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, True)
7558
+ if is_valid_file is False: # stop when function signals False
7559
+ outretval.append(is_valid_file)
7560
+ else:
7561
+ outretval.append(True)
7562
+ infile = is_valid_file
7563
+ outstartfile = infile.tell()
7564
+ try:
7565
+ infile.seek(0, 2)
7566
+ except OSError:
7567
+ SeekToEndOfFile(infile)
7568
+ except ValueError:
7569
+ SeekToEndOfFile(infile)
7570
+ outfsize = infile.tell()
7571
+ infile.seek(outstartfile, 0)
7572
+ if(returnfp):
7573
+ return infile
7574
+ else:
7575
+ infile.close()
7576
+ return outretval
7577
+
7076
7578
 
7579
+ def StackedFoxFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7580
+ return StackedFoxFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
7077
7581
 
7078
- def FoxFileValidateMultiple(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7582
+
7583
+ def StackedFoxFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7079
7584
  if(isinstance(infile, (list, tuple, ))):
7080
7585
  pass
7081
7586
  else:
7082
7587
  infile = [infile]
7083
7588
  outretval = True
7084
7589
  for curfname in infile:
7085
- curretfile = FoxFileValidate(curfname, fmttype, formatspecs, verbose, returnfp)
7590
+ curretfile = StackedFoxFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
7086
7591
  if(not curretfile):
7087
7592
  outretval = False
7088
7593
  return outretval
7089
7594
 
7090
- def FoxFileValidateMultipleFiles(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7091
- return FoxFileValidateMultiple(infile, fmttype, formatspecs, verbose, returnfp)
7595
+ def StackedFoxFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
7596
+ return StackedFoxFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
7092
7597
 
7093
7598
  def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7094
7599
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
@@ -7101,20 +7606,20 @@ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
7101
7606
  fp = infile
7102
7607
  fp.seek(filestart, 0)
7103
7608
  fp = UncompressFileAlt(fp, formatspecs, filestart)
7104
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
7105
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7106
- formatspecs = formatspecs[checkcompressfile]
7107
- if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
7609
+ compresscheck = CheckCompressionSubType(fp, formatspecs, filestart, True)
7610
+ if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
7611
+ formatspecs = formatspecs[compresscheck]
7612
+ if(compresscheck == "tarfile" and TarFileCheck(infile)):
7108
7613
  return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7109
- elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
7614
+ elif(compresscheck == "zipfile" and zipfile.is_zipfile(infile)):
7110
7615
  return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7111
- elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
7616
+ elif(rarfile_support and compresscheck == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
7112
7617
  return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7113
- elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
7618
+ elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
7114
7619
  return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7115
- elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
7620
+ elif(IsSingleDict(formatspecs) and compresscheck != formatspecs['format_magic']):
7116
7621
  return False
7117
- elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
7622
+ elif(IsNestedDict(formatspecs) and compresscheck not in formatspecs):
7118
7623
  return False
7119
7624
  if(not fp):
7120
7625
  return False
@@ -7127,9 +7632,9 @@ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
7127
7632
  shutil.copyfileobj(sys.stdin, fp)
7128
7633
  fp.seek(filestart, 0)
7129
7634
  fp = UncompressFileAlt(fp, formatspecs, filestart)
7130
- checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
7131
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7132
- formatspecs = formatspecs[checkcompressfile]
7635
+ compresscheck = CheckCompressionSubType(fp, formatspecs, filestart, True)
7636
+ if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
7637
+ formatspecs = formatspecs[compresscheck]
7133
7638
  if(not fp):
7134
7639
  return False
7135
7640
  fp.seek(filestart, 0)
@@ -7156,20 +7661,20 @@ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
7156
7661
  fp.seek(filestart, 0)
7157
7662
  else:
7158
7663
  infile = RemoveWindowsPath(infile)
7159
- checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
7160
- if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
7161
- formatspecs = formatspecs[checkcompressfile]
7162
- if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
7664
+ compresscheck = CheckCompressionSubType(infile, formatspecs, filestart, True)
7665
+ if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
7666
+ formatspecs = formatspecs[compresscheck]
7667
+ if(compresscheck == "tarfile" and TarFileCheck(infile)):
7163
7668
  return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7164
- elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
7669
+ elif(compresscheck == "zipfile" and zipfile.is_zipfile(infile)):
7165
7670
  return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7166
- elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
7671
+ elif(rarfile_support and compresscheck == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
7167
7672
  return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7168
- elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
7673
+ elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
7169
7674
  return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
7170
- elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
7675
+ elif(IsSingleDict(formatspecs) and compresscheck != formatspecs['format_magic']):
7171
7676
  return False
7172
- elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
7677
+ elif(IsNestedDict(formatspecs) and compresscheck not in formatspecs):
7173
7678
  return False
7174
7679
  compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
7175
7680
  if(not compresscheck):
@@ -7548,6 +8053,7 @@ def FoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0,
7548
8053
  outlist.update({'fp': fp})
7549
8054
  else:
7550
8055
  fp.close()
8056
+ outlist.update({'fp': None})
7551
8057
  return outlist
7552
8058
 
7553
8059
 
@@ -7558,13 +8064,48 @@ def MultipleFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, see
7558
8064
  infile = [infile]
7559
8065
  outretval = {}
7560
8066
  for curfname in infile:
7561
- curretfile[curfname] = FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
8067
+ outretval[curfname] = FoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
7562
8068
  return outretval
7563
8069
 
7564
8070
  def MultipleFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7565
8071
  return MultipleFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
7566
8072
 
7567
8073
 
8074
+ def StackedFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
8075
+ outretval = []
8076
+ outstartfile = filestart
8077
+ outfsize = float('inf')
8078
+ while True:
8079
+ if outstartfile >= outfsize: # stop when function signals False
8080
+ break
8081
+ outarray = FoxFileToArray(infile, fmttype, outstartfile, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, True)
8082
+ outfsize = outarray['fsize']
8083
+ if outarray is False: # stop when function signals False
8084
+ break
8085
+ infile = outarray['fp']
8086
+ outstartfile = infile.tell()
8087
+ if(not returnfp):
8088
+ outarray.update({"fp": None})
8089
+ outretval.append(outarray)
8090
+ if(not returnfp):
8091
+ infile.close()
8092
+ return outretval
8093
+
8094
+
8095
+ def MultipleStackedFoxFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
8096
+ if(isinstance(infile, (list, tuple, ))):
8097
+ pass
8098
+ else:
8099
+ infile = [infile]
8100
+ outretval = {}
8101
+ for curfname in infile:
8102
+ outretval[curfname] = StackedFoxFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
8103
+ return outretval
8104
+
8105
+ def MultipleStackedFoxFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
8106
+ return MultipleStackedFoxFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
8107
+
8108
+
7568
8109
  def FoxFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
7569
8110
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
7570
8111
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
@@ -7650,74 +8191,126 @@ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default_
7650
8191
  outarray = MkTempFile()
7651
8192
  packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
7652
8193
  compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
7653
- listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
8194
+ listarrayfiles = FoxFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
7654
8195
  return listarrayfiles
7655
8196
 
7656
8197
 
8198
+ # ===== Function (keeps inarray schema; returns entries + indexes) =====
8199
+
7657
8200
  def FoxFileArrayToArrayIndex(inarray, returnfp=False):
7658
- if(isinstance(inarray, dict)):
7659
- listarrayfiles = inarray
7660
- else:
8201
+ """
8202
+ Build a bidirectional index over an archive listing while preserving the
8203
+ input 'inarray' as-is. Python 2/3 compatible, no external deps.
8204
+
8205
+ Input (unchanged contract):
8206
+ inarray: dict with at least:
8207
+ - 'ffilelist': list of dicts: {'fname': <str>, 'fid': <any>, 'ftype': <int>}
8208
+ - 'fnumfiles': int (expected count)
8209
+ - optional 'fp': any (passed through if returnfp=True)
8210
+
8211
+ Output structure:
8212
+ {
8213
+ 'list': inarray, # alias to original input (not copied)
8214
+ 'fp': inarray.get('fp') or None,
8215
+ 'entries': { fid: {'name': fname, 'type': ftype} },
8216
+ 'indexes': {
8217
+ 'by_name': { fname: fid },
8218
+ 'by_type': {
8219
+ <category>: {
8220
+ 'by_name': { fname: fid },
8221
+ 'by_id': { fid: fname },
8222
+ 'count': <int>
8223
+ }, ...
8224
+ }
8225
+ },
8226
+ 'counts': {
8227
+ 'total': <int>,
8228
+ 'by_type': { <category>: <int>, ... }
8229
+ },
8230
+ 'unknown_types': { <ftype_int>: [fname, ...] }
8231
+ }
8232
+ """
8233
+ if not isinstance(inarray, dict):
7661
8234
  return False
7662
- if(not listarrayfiles):
8235
+ if not inarray:
7663
8236
  return False
7664
- outarray = {'list': listarrayfiles, 'filetoid': {}, 'idtofile': {}, 'filetypes': {'directories': {'filetoid': {}, 'idtofile': {}}, 'files': {'filetoid': {}, 'idtofile': {}}, 'links': {'filetoid': {}, 'idtofile': {}}, 'symlinks': {'filetoid': {
7665
- }, 'idtofile': {}}, 'hardlinks': {'filetoid': {}, 'idtofile': {}}, 'character': {'filetoid': {}, 'idtofile': {}}, 'block': {'filetoid': {}, 'idtofile': {}}, 'fifo': {'filetoid': {}, 'idtofile': {}}, 'devices': {'filetoid': {}, 'idtofile': {}}}}
7666
- if(returnfp):
7667
- outarray.update({'fp': listarrayfiles['fp']})
7668
- lenlist = len(listarrayfiles['ffilelist'])
7669
- lcfi = 0
7670
- lcfx = int(listarrayfiles['fnumfiles'])
7671
- if(lenlist > listarrayfiles['fnumfiles'] or lenlist < listarrayfiles['fnumfiles']):
7672
- lcfx = int(lenlist)
7673
- else:
7674
- lcfx = int(listarrayfiles['fnumfiles'])
7675
- while(lcfi < lcfx):
7676
- filetoidarray = {listarrayfiles['ffilelist'][lcfi]
7677
- ['fname']: listarrayfiles['ffilelist'][lcfi]['fid']}
7678
- idtofilearray = {listarrayfiles['ffilelist'][lcfi]
7679
- ['fid']: listarrayfiles['ffilelist'][lcfi]['fname']}
7680
- outarray['filetoid'].update(filetoidarray)
7681
- outarray['idtofile'].update(idtofilearray)
7682
- if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 0 or listarrayfiles['ffilelist'][lcfi]['ftype'] == 7):
7683
- outarray['filetypes']['files']['filetoid'].update(filetoidarray)
7684
- outarray['filetypes']['files']['idtofile'].update(idtofilearray)
7685
- if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 1):
7686
- outarray['filetypes']['hardlinks']['filetoid'].update(
7687
- filetoidarray)
7688
- outarray['filetypes']['hardlinks']['idtofile'].update(
7689
- idtofilearray)
7690
- outarray['filetypes']['links']['filetoid'].update(filetoidarray)
7691
- outarray['filetypes']['links']['idtofile'].update(idtofilearray)
7692
- if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 2):
7693
- outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
7694
- outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
7695
- outarray['filetypes']['links']['filetoid'].update(filetoidarray)
7696
- outarray['filetypes']['links']['idtofile'].update(idtofilearray)
7697
- if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 3):
7698
- outarray['filetypes']['character']['filetoid'].update(
7699
- filetoidarray)
7700
- outarray['filetypes']['character']['idtofile'].update(
7701
- idtofilearray)
7702
- outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
7703
- outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
7704
- if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 4):
7705
- outarray['filetypes']['block']['filetoid'].update(filetoidarray)
7706
- outarray['filetypes']['block']['idtofile'].update(idtofilearray)
7707
- outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
7708
- outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
7709
- if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 5):
7710
- outarray['filetypes']['directories']['filetoid'].update(
7711
- filetoidarray)
7712
- outarray['filetypes']['directories']['idtofile'].update(
7713
- idtofilearray)
7714
- if(listarrayfiles['ffilelist'][lcfi]['ftype'] == 6):
7715
- outarray['filetypes']['symlinks']['filetoid'].update(filetoidarray)
7716
- outarray['filetypes']['symlinks']['idtofile'].update(idtofilearray)
7717
- outarray['filetypes']['devices']['filetoid'].update(filetoidarray)
7718
- outarray['filetypes']['devices']['idtofile'].update(idtofilearray)
7719
- lcfi = lcfi + 1
7720
- return outarray
8237
+
8238
+ # Buckets for categories
8239
+ def _bucket():
8240
+ return {"by_name": {}, "by_id": {}, "count": 0}
8241
+
8242
+ by_type = {}
8243
+ for cat in CATEGORY_ORDER:
8244
+ by_type[cat] = _bucket()
8245
+
8246
+ out = {
8247
+ "list": inarray,
8248
+ "fp": inarray.get("fp") if returnfp else None,
8249
+ "entries": {},
8250
+ "indexes": {
8251
+ "by_name": {},
8252
+ "by_type": by_type,
8253
+ },
8254
+ "counts": {"total": 0, "by_type": {}},
8255
+ "unknown_types": {},
8256
+ }
8257
+
8258
+ ffilelist = inarray.get("ffilelist") or []
8259
+ try:
8260
+ fnumfiles = int(inarray.get("fnumfiles", len(ffilelist)))
8261
+ except Exception:
8262
+ fnumfiles = len(ffilelist)
8263
+
8264
+ # Process only what's present
8265
+ total = min(len(ffilelist), fnumfiles)
8266
+
8267
+ def _add(cat, name, fid):
8268
+ b = by_type[cat]
8269
+ b["by_name"][name] = fid
8270
+ b["by_id"][fid] = name
8271
+ # Count is number of unique names in this category
8272
+ b["count"] = len(b["by_name"])
8273
+
8274
+ i = 0
8275
+ while i < total:
8276
+ e = ffilelist[i]
8277
+ name = e.get("fname")
8278
+ fid = e.get("fid")
8279
+ t = e.get("ftype")
8280
+
8281
+ if name is None or fid is None or t is None:
8282
+ i += 1
8283
+ continue
8284
+
8285
+ # Store canonical entry once, keyed by fid
8286
+ out["entries"][fid] = {"name": name, "type": t}
8287
+
8288
+ # Global reverse index for fast name -> id
8289
+ out["indexes"]["by_name"][name] = fid
8290
+
8291
+ # Base category
8292
+ base_cat = BASE_CATEGORY_BY_CODE.get(t)
8293
+ if base_cat is not None:
8294
+ _add(base_cat, name, fid)
8295
+ else:
8296
+ # Track unknown codes for visibility/forward-compat
8297
+ lst = out["unknown_types"].setdefault(t, [])
8298
+ if name not in lst:
8299
+ lst.append(name)
8300
+
8301
+ # Union categories
8302
+ for union_name, code_set in UNION_RULES:
8303
+ if t in code_set:
8304
+ _add(union_name, name, fid)
8305
+
8306
+ i += 1
8307
+
8308
+ # Counts
8309
+ out["counts"]["total"] = total
8310
+ for cat in CATEGORY_ORDER:
8311
+ out["counts"]["by_type"][cat] = by_type[cat]["count"]
8312
+
8313
+ return out
7721
8314
 
7722
8315
 
7723
8316
  def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
@@ -7726,7 +8319,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
7726
8319
  else:
7727
8320
  if(infile != "-" and not isinstance(infile, bytes) and not hasattr(infile, "read") and not hasattr(infile, "write")):
7728
8321
  infile = RemoveWindowsPath(infile)
7729
- listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
8322
+ listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
7730
8323
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
7731
8324
  formatspecs = formatspecs[fmttype]
7732
8325
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
@@ -7839,11 +8432,11 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
7839
8432
  fdev_major = format(
7840
8433
  int(listarrayfiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
7841
8434
  fseeknextfile = listarrayfiles['ffilelist'][reallcfi]['fseeknextfile']
7842
- if(len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextralist']) > 0):
8435
+ if(len(listarrayfiles['ffilelist'][reallcfi]['fextradata']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextradata']) > 0):
7843
8436
  listarrayfiles['ffilelist'][reallcfi]['fextrafields'] = len(
7844
- listarrayfiles['ffilelist'][reallcfi]['fextralist'])
8437
+ listarrayfiles['ffilelist'][reallcfi]['fextradata'])
7845
8438
  if(not followlink and len(extradata) <= 0):
7846
- extradata = listarrayfiles['ffilelist'][reallcfi]['fextralist']
8439
+ extradata = listarrayfiles['ffilelist'][reallcfi]['fextradata']
7847
8440
  if(not followlink and len(jsondata) <= 0):
7848
8441
  jsondata = listarrayfiles['ffilelist'][reallcfi]['fjsondata']
7849
8442
  fcontents = listarrayfiles['ffilelist'][reallcfi]['fcontents']
@@ -7922,10 +8515,10 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
7922
8515
  fdev_minor = format(int(flinkinfo['fminor']), 'x').lower()
7923
8516
  fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
7924
8517
  fseeknextfile = flinkinfo['fseeknextfile']
7925
- if(len(flinkinfo['fextralist']) > flinkinfo['fextrafields'] and len(flinkinfo['fextralist']) > 0):
7926
- flinkinfo['fextrafields'] = len(flinkinfo['fextralist'])
8518
+ if(len(flinkinfo['fextradata']) > flinkinfo['fextrafields'] and len(flinkinfo['fextradata']) > 0):
8519
+ flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
7927
8520
  if(len(extradata) < 0):
7928
- extradata = flinkinfo['fextralist']
8521
+ extradata = flinkinfo['fextradata']
7929
8522
  if(len(jsondata) < 0):
7930
8523
  extradata = flinkinfo['fjsondata']
7931
8524
  fcontents = flinkinfo['fcontents']
@@ -7957,12 +8550,6 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
7957
8550
  fcontents.close()
7958
8551
  lcfi = lcfi + 1
7959
8552
  reallcfi = reallcfi + 1
7960
- if(lcfx > 0):
7961
- try:
7962
- fp.write(AppendNullBytes(
7963
- ["0", "0"], formatspecs['format_delimiter']))
7964
- except OSError:
7965
- return False
7966
8553
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7967
8554
  fp = CompressOpenFileAlt(
7968
8555
  fp, compression, compressionlevel, compressionuselist, formatspecs)
@@ -8026,7 +8613,7 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
8026
8613
  else:
8027
8614
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
8028
8615
  infile = RemoveWindowsPath(infile)
8029
- listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
8616
+ listarrayfiles = FoxFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
8030
8617
  if(not listarrayfiles):
8031
8618
  return False
8032
8619
  lenlist = len(listarrayfiles['ffilelist'])
@@ -8274,9 +8861,9 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
8274
8861
  return True
8275
8862
 
8276
8863
 
8277
- def UnPackFoxFileString(instr, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
8864
+ def UnPackFoxFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
8278
8865
  fp = MkTempFile(instr)
8279
- listarrayfiles = UnPackFoxFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
8866
+ listarrayfiles = UnPackFoxFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
8280
8867
  return listarrayfiles
8281
8868
 
8282
8869
  def ftype_to_str(ftype):
@@ -8347,10 +8934,60 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
8347
8934
  return True
8348
8935
 
8349
8936
 
8350
- def FoxFileStringListFiles(instr, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
8937
+ def MultipleFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
8938
+ if(isinstance(infile, (list, tuple, ))):
8939
+ pass
8940
+ else:
8941
+ infile = [infile]
8942
+ outretval = {}
8943
+ for curfname in infile:
8944
+ outretval[curfname] = FoxFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
8945
+ return outretval
8946
+
8947
+
8948
+ def StackedArchiveListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
8949
+ outretval = []
8950
+ outstartfile = filestart
8951
+ outfsize = float('inf')
8952
+ while True:
8953
+ if outstartfile >= outfsize: # stop when function signals False
8954
+ break
8955
+ list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
8956
+ if list_file_retu is False: # stop when function signals False
8957
+ outretval.append(list_file_retu)
8958
+ else:
8959
+ outretval.append(True)
8960
+ infile = list_file_retu
8961
+ outstartfile = infile.tell()
8962
+ try:
8963
+ infile.seek(0, 2)
8964
+ except OSError:
8965
+ SeekToEndOfFile(infile)
8966
+ except ValueError:
8967
+ SeekToEndOfFile(infile)
8968
+ outfsize = infile.tell()
8969
+ infile.seek(outstartfile, 0)
8970
+ if(returnfp):
8971
+ return infile
8972
+ else:
8973
+ infile.close()
8974
+ return outretval
8975
+
8976
+
8977
+ def MultipleStackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
8978
+ if(isinstance(infile, (list, tuple, ))):
8979
+ pass
8980
+ else:
8981
+ infile = [infile]
8982
+ outretval = {}
8983
+ for curfname in infile:
8984
+ outretval[curfname] = StackedArchiveListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, True)
8985
+ return outretval
8986
+
8987
+
8988
+ def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
8351
8989
  fp = MkTempFile(instr)
8352
- listarrayfiles = FoxFileListFiles(
8353
- instr, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
8990
+ listarrayfiles = FoxFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
8354
8991
  return listarrayfiles
8355
8992
 
8356
8993
 
@@ -8942,11 +9579,11 @@ def download_file_from_ftp_file(url):
8942
9579
  file_name = os.path.basename(unquote(urlparts.path))
8943
9580
  file_dir = os.path.dirname(unquote(urlparts.path))
8944
9581
  if(urlparts.username is not None):
8945
- ftp_username = urlparts.username
9582
+ ftp_username = unquote(urlparts.username)
8946
9583
  else:
8947
9584
  ftp_username = "anonymous"
8948
9585
  if(urlparts.password is not None):
8949
- ftp_password = urlparts.password
9586
+ ftp_password = unquote(urlparts.password)
8950
9587
  elif(urlparts.password is None and urlparts.username == "anonymous"):
8951
9588
  ftp_password = "anonymous"
8952
9589
  else:
@@ -8957,13 +9594,6 @@ def download_file_from_ftp_file(url):
8957
9594
  ftp = FTP_TLS()
8958
9595
  else:
8959
9596
  return False
8960
- if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
8961
- if(__use_pysftp__):
8962
- return download_file_from_pysftp_file(url)
8963
- else:
8964
- return download_file_from_sftp_file(url)
8965
- elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
8966
- return download_file_from_http_file(url)
8967
9597
  ftp_port = urlparts.port
8968
9598
  if(urlparts.port is None):
8969
9599
  ftp_port = 21
@@ -9040,11 +9670,11 @@ def upload_file_to_ftp_file(ftpfile, url):
9040
9670
  file_name = os.path.basename(unquote(urlparts.path))
9041
9671
  file_dir = os.path.dirname(unquote(urlparts.path))
9042
9672
  if(urlparts.username is not None):
9043
- ftp_username = urlparts.username
9673
+ ftp_username = unquote(urlparts.username)
9044
9674
  else:
9045
9675
  ftp_username = "anonymous"
9046
9676
  if(urlparts.password is not None):
9047
- ftp_password = urlparts.password
9677
+ ftp_password = unquote(urlparts.password)
9048
9678
  elif(urlparts.password is None and urlparts.username == "anonymous"):
9049
9679
  ftp_password = "anonymous"
9050
9680
  else:
@@ -9055,13 +9685,6 @@ def upload_file_to_ftp_file(ftpfile, url):
9055
9685
  ftp = FTP_TLS()
9056
9686
  else:
9057
9687
  return False
9058
- if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
9059
- if(__use_pysftp__):
9060
- return upload_file_to_pysftp_file(url)
9061
- else:
9062
- return upload_file_to_sftp_file(url)
9063
- elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9064
- return False
9065
9688
  ftp_port = urlparts.port
9066
9689
  if(urlparts.port is None):
9067
9690
  ftp_port = 21
@@ -9159,8 +9782,8 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
9159
9782
  if headers is None:
9160
9783
  headers = {}
9161
9784
  urlparts = urlparse(url)
9162
- username = urlparts.username
9163
- password = urlparts.password
9785
+ username = unquote(urlparts.username)
9786
+ password = unquote(urlparts.password)
9164
9787
 
9165
9788
  # Rebuild URL without username and password
9166
9789
  netloc = urlparts.hostname or ''
@@ -9169,15 +9792,6 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
9169
9792
  rebuilt_url = urlunparse((urlparts.scheme, netloc, urlparts.path,
9170
9793
  urlparts.params, urlparts.query, urlparts.fragment))
9171
9794
 
9172
- # Handle SFTP/FTP
9173
- if urlparts.scheme == "sftp" or urlparts.scheme == "scp":
9174
- if __use_pysftp__:
9175
- return download_file_from_pysftp_file(url)
9176
- else:
9177
- return download_file_from_sftp_file(url)
9178
- elif urlparts.scheme == "ftp" or urlparts.scheme == "ftps":
9179
- return download_file_from_ftp_file(url)
9180
-
9181
9795
  # Create a temporary file object
9182
9796
  httpfile = MkTempFile()
9183
9797
 
@@ -9241,6 +9855,184 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
9241
9855
  return httpfile
9242
9856
 
9243
9857
 
9858
+ def upload_file_to_http_file(
9859
+ fileobj,
9860
+ url,
9861
+ method="POST", # "POST" or "PUT"
9862
+ headers=None,
9863
+ form=None, # dict of extra form fields → triggers multipart/form-data
9864
+ field_name="file", # form field name for the file content
9865
+ filename=None, # defaults to basename of URL path
9866
+ content_type="application/octet-stream",
9867
+ usehttp=__use_http_lib__, # 'requests' | 'httpx' | 'mechanize' | anything → urllib fallback
9868
+ ):
9869
+ """
9870
+ Py2+Py3 compatible HTTP/HTTPS upload.
9871
+
9872
+ - If `form` is provided (dict), uses multipart/form-data:
9873
+ * text fields from `form`
9874
+ * file part named by `field_name` with given `filename` and `content_type`
9875
+ - If `form` is None, uploads raw body as POST/PUT with Content-Type.
9876
+ - Returns True on HTTP 2xx, else False.
9877
+ """
9878
+ if headers is None:
9879
+ headers = {}
9880
+ method = (method or "POST").upper()
9881
+
9882
+ rebuilt_url, username, password = _rewrite_url_without_auth(url)
9883
+ filename = _guess_filename(url, filename)
9884
+
9885
+ # rewind if possible
9886
+ try:
9887
+ fileobj.seek(0)
9888
+ except Exception:
9889
+ pass
9890
+
9891
+ # ========== 1) requests (Py2+Py3) ==========
9892
+ if usehttp == 'requests' and haverequests:
9893
+ import requests
9894
+
9895
+ auth = (username, password) if (username or password) else None
9896
+
9897
+ if form is not None:
9898
+ # multipart/form-data
9899
+ files = {field_name: (filename, fileobj, content_type)}
9900
+ data = form or {}
9901
+ resp = requests.request(method, rebuilt_url, headers=headers, auth=auth,
9902
+ files=files, data=data, timeout=(5, 120))
9903
+ else:
9904
+ # raw body
9905
+ hdrs = {'Content-Type': content_type}
9906
+ hdrs.update(headers)
9907
+ # best-effort content-length (helps some servers)
9908
+ if hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'):
9909
+ try:
9910
+ cur = fileobj.tell()
9911
+ fileobj.seek(0, io.SEEK_END if hasattr(io, 'SEEK_END') else 2)
9912
+ size = fileobj.tell() - cur
9913
+ fileobj.seek(cur)
9914
+ hdrs.setdefault('Content-Length', str(size))
9915
+ except Exception:
9916
+ pass
9917
+ resp = requests.request(method, rebuilt_url, headers=hdrs, auth=auth,
9918
+ data=fileobj, timeout=(5, 300))
9919
+
9920
+ return (200 <= resp.status_code < 300)
9921
+
9922
+ # ========== 2) httpx (Py3 only) ==========
9923
+ if usehttp == 'httpx' and havehttpx and not PY2:
9924
+ import httpx
9925
+ auth = (username, password) if (username or password) else None
9926
+
9927
+ with httpx.Client(follow_redirects=True, timeout=60) as client:
9928
+ if form is not None:
9929
+ files = {field_name: (filename, fileobj, content_type)}
9930
+ data = form or {}
9931
+ resp = client.request(method, rebuilt_url, headers=headers, auth=auth,
9932
+ files=files, data=data)
9933
+ else:
9934
+ hdrs = {'Content-Type': content_type}
9935
+ hdrs.update(headers)
9936
+ resp = client.request(method, rebuilt_url, headers=hdrs, auth=auth,
9937
+ content=fileobj)
9938
+ return (200 <= resp.status_code < 300)
9939
+
9940
+ # ========== 3) mechanize (forms) → prefer requests if available ==========
9941
+ if usehttp == 'mechanize' and havemechanize:
9942
+ # mechanize is great for HTML forms, but file upload requires form discovery.
9943
+ # For a generic upload helper, prefer requests. If not available, fall through.
9944
+ try:
9945
+ import requests # noqa
9946
+ # delegate to requests path to ensure robust multipart handling
9947
+ return upload_file_to_http_file(
9948
+ fileobj, url, method=method, headers=headers,
9949
+ form=(form or {}), field_name=field_name,
9950
+ filename=filename, content_type=content_type,
9951
+ usehttp='requests'
9952
+ )
9953
+ except Exception:
9954
+ pass # fall through to urllib
9955
+
9956
+ # ========== 4) urllib fallback (Py2+Py3) ==========
9957
+ # multipart builder (no f-strings)
9958
+ boundary = ('----pyuploader-%s' % uuid.uuid4().hex)
9959
+
9960
+ if form is not None:
9961
+ # Build multipart body to a temp file-like (your MkTempFile())
9962
+ buf = MkTempFile()
9963
+
9964
+ def _w(s):
9965
+ buf.write(_to_bytes(s))
9966
+
9967
+ # text fields
9968
+ if form:
9969
+ for k, v in form.items():
9970
+ _w('--' + boundary + '\r\n')
9971
+ _w('Content-Disposition: form-data; name="%s"\r\n\r\n' % k)
9972
+ _w('' if v is None else (v if isinstance(v, (str, bytes)) else str(v)))
9973
+ _w('\r\n')
9974
+
9975
+ # file field
9976
+ _w('--' + boundary + '\r\n')
9977
+ _w('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (field_name, filename))
9978
+ _w('Content-Type: %s\r\n\r\n' % content_type)
9979
+
9980
+ try:
9981
+ fileobj.seek(0)
9982
+ except Exception:
9983
+ pass
9984
+ shutil.copyfileobj(fileobj, buf)
9985
+
9986
+ _w('\r\n')
9987
+ _w('--' + boundary + '--\r\n')
9988
+
9989
+ buf.seek(0)
9990
+ data = buf.read()
9991
+ hdrs = {'Content-Type': 'multipart/form-data; boundary=%s' % boundary}
9992
+ hdrs.update(headers)
9993
+ req = Request(rebuilt_url, data=data)
9994
+ # method override for Py3; Py2 Request ignores 'method' kw
9995
+ if not PY2:
9996
+ req.method = method # type: ignore[attr-defined]
9997
+ else:
9998
+ # raw body
9999
+ try:
10000
+ fileobj.seek(0)
10001
+ except Exception:
10002
+ pass
10003
+ data = fileobj.read()
10004
+ hdrs = {'Content-Type': content_type}
10005
+ hdrs.update(headers)
10006
+ req = Request(rebuilt_url, data=data)
10007
+ if not PY2:
10008
+ req.method = method # type: ignore[attr-defined]
10009
+
10010
+ for k, v in hdrs.items():
10011
+ req.add_header(k, v)
10012
+
10013
+ # Basic auth if present
10014
+ if username or password:
10015
+ pwd_mgr = HTTPPasswordMgrWithDefaultRealm()
10016
+ pwd_mgr.add_password(None, rebuilt_url, username, password)
10017
+ opener = build_opener(HTTPBasicAuthHandler(pwd_mgr))
10018
+ else:
10019
+ opener = build_opener()
10020
+
10021
+ # Py2 OpenerDirector.open takes timeout since 2.6; to be safe, avoid passing if it explodes
10022
+ try:
10023
+ resp = opener.open(req, timeout=60)
10024
+ except TypeError:
10025
+ resp = opener.open(req)
10026
+
10027
+ # Status code compat
10028
+ code = getattr(resp, 'status', None) or getattr(resp, 'code', None) or 0
10029
+ try:
10030
+ resp.close()
10031
+ except Exception:
10032
+ pass
10033
+ return (200 <= int(code) < 300)
10034
+
10035
+
9244
10036
  def download_file_from_http_string(url, headers=geturls_headers_pyfile_python_alt, usehttp=__use_http_lib__):
9245
10037
  httpfile = download_file_from_http_file(url, headers, usehttp)
9246
10038
  httpout = httpfile.read()
@@ -9259,19 +10051,15 @@ if(haveparamiko):
9259
10051
  else:
9260
10052
  sftp_port = urlparts.port
9261
10053
  if(urlparts.username is not None):
9262
- sftp_username = urlparts.username
10054
+ sftp_username = unquote(urlparts.username)
9263
10055
  else:
9264
10056
  sftp_username = "anonymous"
9265
10057
  if(urlparts.password is not None):
9266
- sftp_password = urlparts.password
10058
+ sftp_password = unquote(urlparts.password)
9267
10059
  elif(urlparts.password is None and urlparts.username == "anonymous"):
9268
10060
  sftp_password = "anonymous"
9269
10061
  else:
9270
10062
  sftp_password = ""
9271
- if(urlparts.scheme == "ftp"):
9272
- return download_file_from_ftp_file(url)
9273
- elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9274
- return download_file_from_http_file(url)
9275
10063
  if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9276
10064
  return False
9277
10065
  ssh = paramiko.SSHClient()
@@ -9279,7 +10067,7 @@ if(haveparamiko):
9279
10067
  ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
9280
10068
  try:
9281
10069
  ssh.connect(urlparts.hostname, port=sftp_port,
9282
- username=sftp_username, password=urlparts.password)
10070
+ username=sftp_username, password=sftp_password)
9283
10071
  except paramiko.ssh_exception.SSHException:
9284
10072
  return False
9285
10073
  except socket.gaierror:
@@ -9320,19 +10108,15 @@ if(haveparamiko):
9320
10108
  else:
9321
10109
  sftp_port = urlparts.port
9322
10110
  if(urlparts.username is not None):
9323
- sftp_username = urlparts.username
10111
+ sftp_username = unquote(urlparts.username)
9324
10112
  else:
9325
10113
  sftp_username = "anonymous"
9326
10114
  if(urlparts.password is not None):
9327
- sftp_password = urlparts.password
10115
+ sftp_password = unquote(urlparts.password)
9328
10116
  elif(urlparts.password is None and urlparts.username == "anonymous"):
9329
10117
  sftp_password = "anonymous"
9330
10118
  else:
9331
10119
  sftp_password = ""
9332
- if(urlparts.scheme == "ftp"):
9333
- return upload_file_to_ftp_file(sftpfile, url)
9334
- elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9335
- return False
9336
10120
  if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9337
10121
  return False
9338
10122
  ssh = paramiko.SSHClient()
@@ -9381,19 +10165,15 @@ if(havepysftp):
9381
10165
  else:
9382
10166
  sftp_port = urlparts.port
9383
10167
  if(urlparts.username is not None):
9384
- sftp_username = urlparts.username
10168
+ sftp_username = unquote(urlparts.username)
9385
10169
  else:
9386
10170
  sftp_username = "anonymous"
9387
10171
  if(urlparts.password is not None):
9388
- sftp_password = urlparts.password
10172
+ sftp_password = unquote(urlparts.password)
9389
10173
  elif(urlparts.password is None and urlparts.username == "anonymous"):
9390
10174
  sftp_password = "anonymous"
9391
10175
  else:
9392
10176
  sftp_password = ""
9393
- if(urlparts.scheme == "ftp"):
9394
- return download_file_from_ftp_file(url)
9395
- elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9396
- return download_file_from_http_file(url)
9397
10177
  if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9398
10178
  return False
9399
10179
  try:
@@ -9438,19 +10218,15 @@ if(havepysftp):
9438
10218
  else:
9439
10219
  sftp_port = urlparts.port
9440
10220
  if(urlparts.username is not None):
9441
- sftp_username = urlparts.username
10221
+ sftp_username = unquote(urlparts.username)
9442
10222
  else:
9443
10223
  sftp_username = "anonymous"
9444
10224
  if(urlparts.password is not None):
9445
- sftp_password = urlparts.password
10225
+ sftp_password = unquote(urlparts.password)
9446
10226
  elif(urlparts.password is None and urlparts.username == "anonymous"):
9447
10227
  sftp_password = "anonymous"
9448
10228
  else:
9449
10229
  sftp_password = ""
9450
- if(urlparts.scheme == "ftp"):
9451
- return upload_file_to_ftp_file(sftpfile, url)
9452
- elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
9453
- return False
9454
10230
  if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
9455
10231
  return False
9456
10232
  try:
@@ -9496,6 +10272,13 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
9496
10272
  return download_file_from_pysftp_file(url)
9497
10273
  else:
9498
10274
  return download_file_from_sftp_file(url)
10275
+ elif(urlparts.scheme == "tcp" or urlparts.scheme == "udp"):
10276
+ outfile = MkTempFile()
10277
+ returnval = recv_via_url(outfile, url, recv_to_fileobj)
10278
+ if(not returnval):
10279
+ return False
10280
+ outfile.seek(0, 0)
10281
+ return outfile
9499
10282
  else:
9500
10283
  return False
9501
10284
  return False
@@ -9548,6 +10331,12 @@ def upload_file_to_internet_file(ifp, url):
9548
10331
  return upload_file_to_pysftp_file(ifp, url)
9549
10332
  else:
9550
10333
  return upload_file_to_sftp_file(ifp, url)
10334
+ elif(urlparts.scheme == "tcp" or urlparts.scheme == "udp"):
10335
+ ifp.seek(0, 0)
10336
+ returnval = send_via_url(ifp, url, send_from_fileobj)
10337
+ if(not returnval):
10338
+ return False
10339
+ return returnval
9551
10340
  else:
9552
10341
  return False
9553
10342
  return False
@@ -9556,7 +10345,7 @@ def upload_file_to_internet_file(ifp, url):
9556
10345
  def upload_file_to_internet_compress_file(ifp, url, compression="auto", compressionlevel=None, compressionuselist=compressionlistalt, formatspecs=__file_format_dict__):
9557
10346
  fp = CompressOpenFileAlt(
9558
10347
  fp, compression, compressionlevel, compressionuselist, formatspecs)
9559
- if(not foxfileout):
10348
+ if(not archivefileout):
9560
10349
  return False
9561
10350
  fp.seek(0, 0)
9562
10351
  return upload_file_to_internet_file(fp, outfile)
@@ -9582,7 +10371,602 @@ def upload_file_to_internet_compress_string(ifp, url, compression="auto", compre
9582
10371
  internetfileo = MkTempFile(ifp)
9583
10372
  fp = CompressOpenFileAlt(
9584
10373
  internetfileo, compression, compressionlevel, compressionuselist, formatspecs)
9585
- if(not foxfileout):
10374
+ if(not archivefileout):
9586
10375
  return False
9587
10376
  fp.seek(0, 0)
9588
10377
  return upload_file_to_internet_file(fp, outfile)
10378
+
10379
+
10380
+ # ---------- Core: send / recv ----------
10381
+ def send_from_fileobj(fileobj, host, port, proto="tcp", timeout=None,
10382
+ chunk_size=65536,
10383
+ use_ssl=False, ssl_verify=True, ssl_ca_file=None,
10384
+ ssl_certfile=None, ssl_keyfile=None, server_hostname=None,
10385
+ auth_user=None, auth_pass=None, auth_scope=u"",
10386
+ on_progress=None, rate_limit_bps=None, want_sha=True):
10387
+ """
10388
+ Send fileobj contents to (host, port) via TCP or UDP.
10389
+
10390
+ UDP behavior:
10391
+ - Computes total length and sha256 when possible.
10392
+ - Sends: AF1 (if auth) + 'LEN <n> [<sha>]\\n' + payload
10393
+ - If length unknown: stream payload, then 'HASH <sha>\\n' (if enabled), then 'DONE\\n'.
10394
+ - Uses small datagrams (<=1200B) to avoid fragmentation.
10395
+ """
10396
+ proto = (proto or "tcp").lower()
10397
+ total = 0
10398
+ port = int(port)
10399
+ if proto not in ("tcp", "udp"):
10400
+ raise ValueError("proto must be 'tcp' or 'udp'")
10401
+
10402
+ # ---------------- UDP ----------------
10403
+ if proto == "udp":
10404
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
10405
+ try:
10406
+ if timeout is not None:
10407
+ sock.settimeout(timeout)
10408
+
10409
+ # connect UDP for convenience
10410
+ try:
10411
+ sock.connect((host, port))
10412
+ connected = True
10413
+ except Exception:
10414
+ connected = False
10415
+
10416
+ # length + optional sha
10417
+ total_bytes, start_pos = _discover_len_and_reset(fileobj)
10418
+
10419
+ sha_hex = None
10420
+ if want_sha and total_bytes is not None:
10421
+ import hashlib
10422
+ h = hashlib.sha256()
10423
+ try:
10424
+ cur = fileobj.tell()
10425
+ except Exception:
10426
+ cur = None
10427
+ if start_pos is not None:
10428
+ try: fileobj.seek(start_pos, os.SEEK_SET)
10429
+ except Exception: pass
10430
+ _HSZ = 1024 * 1024
10431
+ while True:
10432
+ blk = fileobj.read(_HSZ)
10433
+ if not blk: break
10434
+ h.update(_to_bytes(blk))
10435
+ sha_hex = h.hexdigest()
10436
+ if start_pos is not None:
10437
+ try: fileobj.seek(start_pos, os.SEEK_SET)
10438
+ except Exception: pass
10439
+ elif cur is not None:
10440
+ try: fileobj.seek(cur, os.SEEK_SET)
10441
+ except Exception: pass
10442
+
10443
+ # optional AF1 (also carries len/sha, but we'll still send LEN for robustness)
10444
+ if auth_user is not None or auth_pass is not None:
10445
+ try:
10446
+ blob = build_auth_blob_v1(
10447
+ auth_user or u"", auth_pass or u"",
10448
+ scope=auth_scope, length=total_bytes, sha_hex=(sha_hex if want_sha else None)
10449
+ )
10450
+ except Exception:
10451
+ blob = _build_auth_blob_legacy(auth_user or b"", auth_pass or b"")
10452
+ if connected:
10453
+ sock.send(blob)
10454
+ # You may ignore the ack in UDP; keep try/except minimal
10455
+ try:
10456
+ resp = sock.recv(16)
10457
+ if resp != _OK:
10458
+ raise RuntimeError("UDP auth failed")
10459
+ except Exception:
10460
+ pass
10461
+ else:
10462
+ sock.sendto(blob, (host, port))
10463
+ try:
10464
+ resp, _ = sock.recvfrom(16)
10465
+ if resp != _OK:
10466
+ raise RuntimeError("UDP auth failed")
10467
+ except Exception:
10468
+ pass
10469
+
10470
+ # ALWAYS send LEN when length is known
10471
+ if total_bytes is not None:
10472
+ preface = b"LEN " + str(int(total_bytes)).encode("ascii")
10473
+ if want_sha and sha_hex:
10474
+ preface += b" " + sha_hex.encode("ascii")
10475
+ preface += b"\n"
10476
+ if connected: sock.send(preface)
10477
+ else: sock.sendto(preface, (host, port))
10478
+
10479
+ # payload stream
10480
+ UDP_PAYLOAD_MAX = 1200
10481
+ effective_chunk = min(int(chunk_size or 65536), UDP_PAYLOAD_MAX)
10482
+
10483
+ sent_so_far = 0
10484
+ last_cb_ts = monotonic()
10485
+ last_rate_ts = last_cb_ts
10486
+ last_rate_bytes = 0
10487
+
10488
+ rolling_h = None
10489
+ if want_sha and total_bytes is None:
10490
+ try:
10491
+ import hashlib
10492
+ rolling_h = hashlib.sha256()
10493
+ except Exception:
10494
+ rolling_h = None
10495
+
10496
+ while True:
10497
+ chunk = fileobj.read(effective_chunk)
10498
+ if not chunk:
10499
+ break
10500
+ b = _to_bytes(chunk)
10501
+ if rolling_h is not None:
10502
+ rolling_h.update(b)
10503
+ n = (sock.send(b) if connected else sock.sendto(b, (host, port)))
10504
+ total += n
10505
+ sent_so_far += n
10506
+
10507
+ if rate_limit_bps:
10508
+ sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
10509
+ sent_so_far, total_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
10510
+ )
10511
+ if sleep_s > 0.0:
10512
+ time.sleep(min(sleep_s, 0.25))
10513
+
10514
+ if on_progress and (monotonic() - last_cb_ts) >= 0.1:
10515
+ try: on_progress(sent_so_far, total_bytes)
10516
+ except Exception: pass
10517
+ last_cb_ts = monotonic()
10518
+
10519
+ # unknown-length trailers
10520
+ if total_bytes is None:
10521
+ if rolling_h is not None:
10522
+ try:
10523
+ th = rolling_h.hexdigest().encode("ascii")
10524
+ (sock.send(b"HASH " + th + b"\n") if connected
10525
+ else sock.sendto(b"HASH " + th + b"\n", (host, port)))
10526
+ except Exception:
10527
+ pass
10528
+ try:
10529
+ (sock.send(b"DONE\n") if connected else sock.sendto(b"DONE\n", (host, port)))
10530
+ except Exception:
10531
+ pass
10532
+
10533
+ finally:
10534
+ try: sock.close()
10535
+ except Exception: pass
10536
+ return total
10537
+
10538
+ # ---------------- TCP ----------------
10539
+ sock = _connect_stream(host, port, timeout)
10540
+ try:
10541
+ if use_ssl:
10542
+ if not _ssl_available():
10543
+ raise RuntimeError("SSL requested but 'ssl' module unavailable.")
10544
+ sock = _ssl_wrap_socket(sock, server_side=False,
10545
+ server_hostname=(server_hostname or host),
10546
+ verify=ssl_verify, ca_file=ssl_ca_file,
10547
+ certfile=ssl_certfile, keyfile=ssl_keyfile)
10548
+
10549
+ total_bytes, start_pos = _discover_len_and_reset(fileobj)
10550
+ sha_hex = None
10551
+ if want_sha and total_bytes is not None:
10552
+ try:
10553
+ import hashlib
10554
+ h = hashlib.sha256()
10555
+ cur = fileobj.tell()
10556
+ if start_pos is not None:
10557
+ fileobj.seek(start_pos, os.SEEK_SET)
10558
+ _HSZ = 1024 * 1024
10559
+ while True:
10560
+ blk = fileobj.read(_HSZ)
10561
+ if not blk: break
10562
+ h.update(_to_bytes(blk))
10563
+ sha_hex = h.hexdigest()
10564
+ fileobj.seek(cur, os.SEEK_SET)
10565
+ except Exception:
10566
+ sha_hex = None
10567
+
10568
+ if auth_user is not None or auth_pass is not None:
10569
+ try:
10570
+ blob = build_auth_blob_v1(
10571
+ auth_user or u"", auth_pass or u"",
10572
+ scope=auth_scope, length=total_bytes, sha_hex=(sha_hex if want_sha else None)
10573
+ )
10574
+ except Exception:
10575
+ blob = _build_auth_blob_legacy(auth_user or b"", auth_pass or b"")
10576
+ sock.sendall(blob)
10577
+ try:
10578
+ resp = sock.recv(16)
10579
+ if resp != _OK:
10580
+ raise RuntimeError("TCP auth failed")
10581
+ except Exception:
10582
+ pass
10583
+
10584
+ sent_so_far = 0
10585
+ last_cb_ts = monotonic()
10586
+ last_rate_ts = last_cb_ts
10587
+ last_rate_bytes = 0
10588
+
10589
+ use_sendfile = hasattr(sock, "sendfile") and hasattr(fileobj, "read")
10590
+ if use_sendfile:
10591
+ try:
10592
+ sent = sock.sendfile(fileobj)
10593
+ if isinstance(sent, int):
10594
+ total += sent
10595
+ sent_so_far += sent
10596
+ if on_progress:
10597
+ try: on_progress(sent_so_far, total_bytes)
10598
+ except Exception: pass
10599
+ else:
10600
+ raise RuntimeError("sendfile returned unexpected type")
10601
+ except Exception:
10602
+ while True:
10603
+ chunk = fileobj.read(chunk_size)
10604
+ if not chunk: break
10605
+ view = memoryview(_to_bytes(chunk))
10606
+ while view:
10607
+ n = sock.send(view); total += n; sent_so_far += n; view = view[n:]
10608
+ if rate_limit_bps:
10609
+ sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
10610
+ sent_so_far, total_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
10611
+ )
10612
+ if sleep_s > 0.0:
10613
+ time.sleep(min(sleep_s, 0.25))
10614
+ if on_progress and (monotonic() - last_cb_ts) >= 0.1:
10615
+ try: on_progress(sent_so_far, total_bytes)
10616
+ except Exception: pass
10617
+ last_cb_ts = monotonic()
10618
+ else:
10619
+ while True:
10620
+ chunk = fileobj.read(chunk_size)
10621
+ if not chunk: break
10622
+ view = memoryview(_to_bytes(chunk))
10623
+ while view:
10624
+ n = sock.send(view); total += n; sent_so_far += n; view = view[n:]
10625
+ if rate_limit_bps:
10626
+ sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
10627
+ sent_so_far, total_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
10628
+ )
10629
+ if sleep_s > 0.0:
10630
+ time.sleep(min(sleep_s, 0.25))
10631
+ if on_progress and (monotonic() - last_cb_ts) >= 0.1:
10632
+ try: on_progress(sent_so_far, total_bytes)
10633
+ except Exception: pass
10634
+ last_cb_ts = monotonic()
10635
+ finally:
10636
+ try: sock.shutdown(socket.SHUT_WR)
10637
+ except Exception: pass
10638
+ try: sock.close()
10639
+ except Exception: pass
10640
+ return total
10641
+
10642
+ def recv_to_fileobj(fileobj, host="", port=0, proto="tcp", timeout=None,
10643
+ max_bytes=None, chunk_size=65536, backlog=1,
10644
+ use_ssl=False, ssl_verify=True, ssl_ca_file=None,
10645
+ ssl_certfile=None, ssl_keyfile=None,
10646
+ require_auth=False, expected_user=None, expected_pass=None,
10647
+ total_timeout=None, expect_scope=None,
10648
+ on_progress=None, rate_limit_bps=None):
10649
+ """
10650
+ Receive bytes into fileobj over TCP/UDP.
10651
+
10652
+ UDP specifics:
10653
+ * Accepts 'LEN <n> [<sha>]\\n' and 'HASH <sha>\\n' control frames (unauth) or AF1 with len/sha.
10654
+ * If length unknown, accepts final 'DONE\\n' to end cleanly.
10655
+ """
10656
+ proto = (proto or "tcp").lower()
10657
+ port = int(port)
10658
+ total = 0
10659
+
10660
+ start_ts = time.time()
10661
+ def _time_left():
10662
+ if total_timeout is None:
10663
+ return None
10664
+ left = total_timeout - (time.time() - start_ts)
10665
+ return 0.0 if left <= 0 else left
10666
+ def _set_effective_timeout(socklike, base_timeout):
10667
+ left = _time_left()
10668
+ if left == 0.0:
10669
+ return False
10670
+ eff = base_timeout
10671
+ if left is not None:
10672
+ eff = left if eff is None else min(eff, left)
10673
+ if eff is not None:
10674
+ try:
10675
+ socklike.settimeout(eff)
10676
+ except Exception:
10677
+ pass
10678
+ return True
10679
+
10680
+ if proto not in ("tcp", "udp"):
10681
+ raise ValueError("proto must be 'tcp' or 'udp'")
10682
+
10683
+ # ---------------- UDP server ----------------
10684
+ if proto == "udp":
10685
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
10686
+ authed_addr = None
10687
+ expected_len = None
10688
+ expected_sha = None
10689
+
10690
+ try:
10691
+ sock.bind(("", port))
10692
+ if timeout is None:
10693
+ try: sock.settimeout(10.0)
10694
+ except Exception: pass
10695
+
10696
+ recvd_so_far = 0
10697
+ last_cb_ts = monotonic()
10698
+ last_rate_ts = last_cb_ts
10699
+ last_rate_bytes = 0
10700
+
10701
+ while True:
10702
+ if _time_left() == 0.0:
10703
+ if expected_len is not None and total < expected_len:
10704
+ raise RuntimeError("UDP receive aborted by total_timeout before full payload received")
10705
+ break
10706
+ if (max_bytes is not None) and (total >= max_bytes):
10707
+ break
10708
+
10709
+ if not _set_effective_timeout(sock, timeout):
10710
+ if expected_len is not None and total < expected_len:
10711
+ raise RuntimeError("UDP receive timed out before full payload received")
10712
+ if expected_len is None and total > 0:
10713
+ raise RuntimeError("UDP receive timed out with unknown length; partial data")
10714
+ if expected_len is None and total == 0:
10715
+ raise RuntimeError("UDP receive: no packets received before timeout (is the sender running?)")
10716
+ break
10717
+
10718
+ try:
10719
+ data, addr = sock.recvfrom(chunk_size)
10720
+ except socket.timeout:
10721
+ if expected_len is not None and total < expected_len:
10722
+ raise RuntimeError("UDP receive idle-timeout before full payload received")
10723
+ if expected_len is None and total > 0:
10724
+ raise RuntimeError("UDP receive idle-timeout with unknown length; partial data")
10725
+ if expected_len is None and total == 0:
10726
+ raise RuntimeError("UDP receive: no packets received before timeout (is the sender running?)")
10727
+ break
10728
+
10729
+ if not data:
10730
+ continue
10731
+
10732
+ # (0) Control frames FIRST: LEN / HASH / DONE
10733
+ if data.startswith(b"LEN ") and expected_len is None:
10734
+ try:
10735
+ parts = data.strip().split()
10736
+ n = int(parts[1])
10737
+ expected_len = (None if n < 0 else n)
10738
+ if len(parts) >= 3:
10739
+ expected_sha = parts[2].decode("ascii")
10740
+ except Exception:
10741
+ expected_len = None
10742
+ expected_sha = None
10743
+ continue
10744
+
10745
+ if data.startswith(b"HASH "):
10746
+ try:
10747
+ expected_sha = data.strip().split()[1].decode("ascii")
10748
+ except Exception:
10749
+ expected_sha = None
10750
+ continue
10751
+
10752
+ if data == b"DONE\n":
10753
+ break
10754
+
10755
+ # (1) Auth (AF1 preferred; legacy fallback)
10756
+ if authed_addr is None and require_auth:
10757
+ ok = False
10758
+ v_ok, v_user, v_scope, _r, v_len, v_sha = verify_auth_blob_v1(
10759
+ data, expected_user=expected_user, secret=expected_pass,
10760
+ max_skew=600, expect_scope=expect_scope
10761
+ )
10762
+ if v_ok:
10763
+ ok = True
10764
+ if expected_len is None:
10765
+ expected_len = v_len
10766
+ if expected_sha is None:
10767
+ expected_sha = v_sha
10768
+ else:
10769
+ user, pw = _parse_auth_blob_legacy(data)
10770
+ ok = (user is not None and
10771
+ (expected_user is None or user == _to_bytes(expected_user)) and
10772
+ (expected_pass is None or pw == _to_bytes(expected_pass)))
10773
+ try:
10774
+ sock.sendto((_OK if ok else _NO), addr)
10775
+ except Exception:
10776
+ pass
10777
+ if ok:
10778
+ authed_addr = addr
10779
+ continue
10780
+
10781
+ if require_auth and addr != authed_addr:
10782
+ continue
10783
+
10784
+ # (2) Payload
10785
+ fileobj.write(data)
10786
+ try: fileobj.flush()
10787
+ except Exception: pass
10788
+ total += len(data)
10789
+ recvd_so_far += len(data)
10790
+
10791
+ if rate_limit_bps:
10792
+ sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
10793
+ recvd_so_far, expected_len, last_rate_ts, last_rate_bytes, rate_limit_bps
10794
+ )
10795
+ if sleep_s > 0.0:
10796
+ time.sleep(min(sleep_s, 0.25))
10797
+
10798
+ if on_progress and (monotonic() - last_cb_ts) >= 0.1:
10799
+ try: on_progress(recvd_so_far, expected_len)
10800
+ except Exception: pass
10801
+ last_cb_ts = monotonic()
10802
+
10803
+ if expected_len is not None and total >= expected_len:
10804
+ break
10805
+
10806
+ # Post-conditions
10807
+ if expected_len is not None and total != expected_len:
10808
+ raise RuntimeError("UDP receive incomplete: got %d of %s bytes" % (total, expected_len))
10809
+
10810
+ if expected_sha:
10811
+ import hashlib
10812
+ try:
10813
+ cur = fileobj.tell(); fileobj.seek(0)
10814
+ except Exception:
10815
+ cur = None
10816
+ h = hashlib.sha256(); _HSZ = 1024 * 1024
10817
+ while True:
10818
+ blk = fileobj.read(_HSZ)
10819
+ if not blk: break
10820
+ h.update(_to_bytes(blk))
10821
+ got = h.hexdigest()
10822
+ if cur is not None:
10823
+ try: fileobj.seek(cur)
10824
+ except Exception: pass
10825
+ if got != expected_sha:
10826
+ raise RuntimeError("UDP checksum mismatch: got %s expected %s" % (got, expected_sha))
10827
+
10828
+ finally:
10829
+ try: sock.close()
10830
+ except Exception: pass
10831
+ return total
10832
+
10833
+ # ---------------- TCP server ----------------
10834
+ srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
10835
+ try:
10836
+ try: srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
10837
+ except Exception: pass
10838
+ srv.bind((host or "", port))
10839
+ srv.listen(int(backlog) if backlog else 1)
10840
+
10841
+ if not _set_effective_timeout(srv, timeout):
10842
+ return 0
10843
+ try:
10844
+ conn, _peer = srv.accept()
10845
+ except socket.timeout:
10846
+ return 0
10847
+
10848
+ if use_ssl:
10849
+ if not _ssl_available():
10850
+ try: conn.close()
10851
+ except Exception: pass
10852
+ raise RuntimeError("SSL requested but 'ssl' module unavailable.")
10853
+ if not ssl_certfile:
10854
+ try: conn.close()
10855
+ except Exception: pass
10856
+ raise ValueError("TLS server requires ssl_certfile (and usually ssl_keyfile).")
10857
+ conn = _ssl_wrap_socket(conn, server_side=True, server_hostname=None,
10858
+ verify=ssl_verify, ca_file=ssl_ca_file,
10859
+ certfile=ssl_certfile, keyfile=ssl_keyfile)
10860
+
10861
+ recvd_so_far = 0
10862
+ last_cb_ts = monotonic()
10863
+ last_rate_ts = last_cb_ts
10864
+ last_rate_bytes = 0
10865
+
10866
+ try:
10867
+ if require_auth:
10868
+ if not _set_effective_timeout(conn, timeout):
10869
+ return 0
10870
+ try:
10871
+ preface = conn.recv(2048)
10872
+ except socket.timeout:
10873
+ try: conn.sendall(_NO)
10874
+ except Exception: pass
10875
+ return 0
10876
+
10877
+ ok = False
10878
+ v_ok, v_user, v_scope, _r, v_len, v_sha = verify_auth_blob_v1(
10879
+ preface or b"", expected_user=expected_user, secret=expected_pass,
10880
+ max_skew=600, expect_scope=expect_scope
10881
+ )
10882
+ if v_ok:
10883
+ ok = True
10884
+ else:
10885
+ user, pw = _parse_auth_blob_legacy(preface or b"")
10886
+ ok = (user is not None and
10887
+ (expected_user is None or user == _to_bytes(expected_user)) and
10888
+ (expected_pass is None or pw == _to_bytes(expected_pass)))
10889
+
10890
+ try: conn.sendall(_OK if ok else _NO)
10891
+ except Exception: pass
10892
+ if not ok:
10893
+ return 0
10894
+
10895
+ while True:
10896
+ if _time_left() == 0.0: break
10897
+ if (max_bytes is not None) and (total >= max_bytes): break
10898
+
10899
+ if not _set_effective_timeout(conn, timeout):
10900
+ break
10901
+ try:
10902
+ data = conn.recv(chunk_size)
10903
+ except socket.timeout:
10904
+ break
10905
+ if not data:
10906
+ break
10907
+
10908
+ fileobj.write(data)
10909
+ try: fileobj.flush()
10910
+ except Exception: pass
10911
+ total += len(data)
10912
+ recvd_so_far += len(data)
10913
+
10914
+ if rate_limit_bps:
10915
+ sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
10916
+ recvd_so_far, max_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
10917
+ )
10918
+ if sleep_s > 0.0:
10919
+ time.sleep(min(sleep_s, 0.25))
10920
+
10921
+ if on_progress and (monotonic() - last_cb_ts) >= 0.1:
10922
+ try: on_progress(recvd_so_far, max_bytes)
10923
+ except Exception: pass
10924
+ last_cb_ts = monotonic()
10925
+ finally:
10926
+ try: conn.shutdown(socket.SHUT_RD)
10927
+ except Exception: pass
10928
+ try: conn.close()
10929
+ except Exception: pass
10930
+ finally:
10931
+ try: srv.close()
10932
+ except Exception: pass
10933
+
10934
+ return total
10935
+
10936
+ # ---------- URL drivers ----------
10937
+ def send_via_url(fileobj, url, send_from_fileobj_func=send_from_fileobj):
10938
+ """
10939
+ Use URL options to drive the sender. Returns bytes sent.
10940
+ """
10941
+ parts, o = _parse_net_url(url)
10942
+ use_auth = (o["user"] is not None and o["pw"] is not None) or o["force_auth"]
10943
+ return send_from_fileobj_func(
10944
+ fileobj,
10945
+ o["host"], o["port"], proto=o["proto"],
10946
+ timeout=o["timeout"], chunk_size=o["chunk_size"],
10947
+ use_ssl=o["use_ssl"], ssl_verify=o["ssl_verify"],
10948
+ ssl_ca_file=o["ssl_ca_file"], ssl_certfile=o["ssl_certfile"], ssl_keyfile=o["ssl_keyfile"],
10949
+ server_hostname=o["server_hostname"],
10950
+ auth_user=(o["user"] if use_auth else None),
10951
+ auth_pass=(o["pw"] if use_auth else None),
10952
+ auth_scope=o.get("path", u""),
10953
+ want_sha=o["want_sha"], # <— pass through
10954
+ )
10955
+
10956
+ def recv_via_url(fileobj, url, recv_to_fileobj_func=recv_to_fileobj):
10957
+ """
10958
+ Use URL options to drive the receiver. Returns bytes received.
10959
+ """
10960
+ parts, o = _parse_net_url(url)
10961
+ require_auth = (o["user"] is not None and o["pw"] is not None) or o["force_auth"]
10962
+ return recv_to_fileobj_func(
10963
+ fileobj,
10964
+ o["host"], o["port"], proto=o["proto"],
10965
+ timeout=o["timeout"], total_timeout=o["total_timeout"],
10966
+ chunk_size=o["chunk_size"],
10967
+ use_ssl=o["use_ssl"], ssl_verify=o["ssl_verify"],
10968
+ ssl_ca_file=o["ssl_ca_file"], ssl_certfile=o["ssl_certfile"], ssl_keyfile=o["ssl_keyfile"],
10969
+ require_auth=require_auth,
10970
+ expected_user=o["user"], expected_pass=o["pw"],
10971
+ expect_scope=o.get("path", u""),
10972
+ )