PyArchiveFile 0.22.4__py3-none-any.whl → 0.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.22.4.data → pyarchivefile-0.23.0.data}/scripts/archivefile.py +1 -1
- {pyarchivefile-0.22.4.dist-info → pyarchivefile-0.23.0.dist-info}/METADATA +1 -1
- pyarchivefile-0.23.0.dist-info/RECORD +10 -0
- pyarchivefile.py +1635 -221
- pyarchivefile-0.22.4.dist-info/RECORD +0 -10
- {pyarchivefile-0.22.4.data → pyarchivefile-0.23.0.data}/scripts/archiveneofile.py +0 -0
- {pyarchivefile-0.22.4.data → pyarchivefile-0.23.0.data}/scripts/neoarchivefile.py +0 -0
- {pyarchivefile-0.22.4.dist-info → pyarchivefile-0.23.0.dist-info}/WHEEL +0 -0
- {pyarchivefile-0.22.4.dist-info → pyarchivefile-0.23.0.dist-info}/licenses/LICENSE +0 -0
- {pyarchivefile-0.22.4.dist-info → pyarchivefile-0.23.0.dist-info}/top_level.txt +0 -0
- {pyarchivefile-0.22.4.dist-info → pyarchivefile-0.23.0.dist-info}/zip-safe +0 -0
pyarchivefile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update:
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 10/1/2025 Ver. 0.23.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -27,8 +27,8 @@ import stat
|
|
|
27
27
|
import zlib
|
|
28
28
|
import base64
|
|
29
29
|
import shutil
|
|
30
|
-
import struct
|
|
31
30
|
import socket
|
|
31
|
+
import struct
|
|
32
32
|
import hashlib
|
|
33
33
|
import inspect
|
|
34
34
|
import datetime
|
|
@@ -79,6 +79,7 @@ try:
|
|
|
79
79
|
except NameError:
|
|
80
80
|
basestring = str
|
|
81
81
|
|
|
82
|
+
PY2 = (sys.version_info[0] == 2)
|
|
82
83
|
try:
|
|
83
84
|
unicode # Py2
|
|
84
85
|
except NameError: # Py3
|
|
@@ -104,11 +105,11 @@ baseint = tuple(baseint)
|
|
|
104
105
|
# URL Parsing
|
|
105
106
|
try:
|
|
106
107
|
# Python 3
|
|
107
|
-
from urllib.parse import urlparse, urlunparse, unquote
|
|
108
|
+
from urllib.parse import urlparse, urlunparse, parse_qs, unquote
|
|
108
109
|
from urllib.request import url2pathname
|
|
109
110
|
except ImportError:
|
|
110
111
|
# Python 2
|
|
111
|
-
from urlparse import urlparse, urlunparse
|
|
112
|
+
from urlparse import urlparse, urlunparse, parse_qs
|
|
112
113
|
from urllib import unquote, url2pathname
|
|
113
114
|
|
|
114
115
|
# Windows-specific setup
|
|
@@ -269,8 +270,8 @@ def get_default_threads():
|
|
|
269
270
|
# os.cpu_count() might not be available in some environments
|
|
270
271
|
return 1
|
|
271
272
|
|
|
272
|
-
__upload_proto_support__ = "^(ftp|ftps|sftp|scp)://"
|
|
273
|
-
__download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp)://"
|
|
273
|
+
__upload_proto_support__ = "^(ftp|ftps|sftp|scp|tcp|udp)://"
|
|
274
|
+
__download_proto_support__ = "^(http|https|ftp|ftps|sftp|scp|tcp|udp)://"
|
|
274
275
|
__use_pysftp__ = False
|
|
275
276
|
if(not havepysftp):
|
|
276
277
|
__use_pysftp__ = False
|
|
@@ -379,12 +380,12 @@ __file_format_extension__ = __file_format_multi_dict__[__file_format_default__][
|
|
|
379
380
|
__file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
|
|
380
381
|
__project__ = __program_name__
|
|
381
382
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
382
|
-
__version_info__ = (0,
|
|
383
|
-
__version_date_info__ = (2025,
|
|
383
|
+
__version_info__ = (0, 23, 0, "RC 1", 1)
|
|
384
|
+
__version_date_info__ = (2025, 10, 1, "RC 1", 1)
|
|
384
385
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
385
386
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
386
387
|
__revision__ = __version_info__[3]
|
|
387
|
-
__revision_id__ = "$Id:
|
|
388
|
+
__revision_id__ = "$Id: 0648f57aa9e81ed48e68f5f42de5c4565b870555 $"
|
|
388
389
|
if(__version_info__[4] is not None):
|
|
389
390
|
__version_date_plusrc__ = __version_date__ + \
|
|
390
391
|
"-" + str(__version_date_info__[4])
|
|
@@ -396,15 +397,67 @@ if(__version_info__[3] is not None):
|
|
|
396
397
|
if(__version_info__[3] is None):
|
|
397
398
|
__version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
|
|
398
399
|
|
|
400
|
+
# ===== Module-level type code table & helpers (reuse anywhere) =====
|
|
401
|
+
|
|
402
|
+
FT = {
|
|
403
|
+
"FILE": 0,
|
|
404
|
+
"HARDLINK": 1,
|
|
405
|
+
"SYMLINK": 2,
|
|
406
|
+
"CHAR": 3,
|
|
407
|
+
"BLOCK": 4,
|
|
408
|
+
"DIR": 5,
|
|
409
|
+
"FIFO": 6,
|
|
410
|
+
"CONTAGIOUS": 7, # treated like regular file
|
|
411
|
+
"SOCK": 8,
|
|
412
|
+
"DOOR": 9,
|
|
413
|
+
"PORT": 10,
|
|
414
|
+
"WHT": 11,
|
|
415
|
+
"SPARSE": 12,
|
|
416
|
+
"JUNCTION": 13,
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
BASE_CATEGORY_BY_CODE = {
|
|
420
|
+
0: "files",
|
|
421
|
+
1: "hardlinks",
|
|
422
|
+
2: "symlinks",
|
|
423
|
+
3: "characters",
|
|
424
|
+
4: "blocks",
|
|
425
|
+
5: "directories",
|
|
426
|
+
6: "fifos",
|
|
427
|
+
7: "files", # contagious treated as file
|
|
428
|
+
8: "sockets",
|
|
429
|
+
9: "doors",
|
|
430
|
+
10: "ports",
|
|
431
|
+
11: "whiteouts",
|
|
432
|
+
12: "sparsefiles",
|
|
433
|
+
13: "junctions",
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
# Union categories defined by which base codes should populate them.
|
|
437
|
+
UNION_RULES = [
|
|
438
|
+
("links", set([FT["HARDLINK"], FT["SYMLINK"]])),
|
|
439
|
+
("devices", set([FT["CHAR"], FT["BLOCK"]])),
|
|
440
|
+
]
|
|
441
|
+
|
|
442
|
+
# Deterministic category order (handy for consistent output/printing).
|
|
443
|
+
CATEGORY_ORDER = [
|
|
444
|
+
"files", "hardlinks", "symlinks", "character", "block",
|
|
445
|
+
"directories", "fifo", "sockets", "doors", "ports",
|
|
446
|
+
"whiteouts", "sparsefiles", "junctions", "links", "devices"
|
|
447
|
+
]
|
|
448
|
+
|
|
399
449
|
# Robust bitness detection
|
|
400
450
|
# Works on Py2 & Py3, all platforms
|
|
451
|
+
|
|
452
|
+
# Python interpreter bitness
|
|
453
|
+
PyBitness = "64" if struct.calcsize("P") * 8 == 64 else ("64" if sys.maxsize > 2**32 else "32")
|
|
454
|
+
|
|
455
|
+
# Operating system bitness
|
|
401
456
|
try:
|
|
402
|
-
|
|
403
|
-
PyBitness = "64" if struct.calcsize("P") * 8 == 64 else "32"
|
|
457
|
+
OSBitness = platform.architecture()[0].replace("bit", "")
|
|
404
458
|
except Exception:
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
PyBitness = "64" if m.endswith("64") else "32"
|
|
459
|
+
m = platform.machine().lower()
|
|
460
|
+
OSBitness = "64" if "64" in m else "32"
|
|
408
461
|
|
|
409
462
|
geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
|
|
410
463
|
proname=__project__, prover=__version__, prourl=__project_url__)
|
|
@@ -644,6 +697,414 @@ def _resolves_outside(base_rel, target_rel):
|
|
|
644
697
|
return False
|
|
645
698
|
return True
|
|
646
699
|
|
|
700
|
+
def _to_bytes(data):
|
|
701
|
+
if data is None:
|
|
702
|
+
return b""
|
|
703
|
+
if isinstance(data, bytes):
|
|
704
|
+
return data
|
|
705
|
+
if isinstance(data, unicode):
|
|
706
|
+
return data.encode("utf-8")
|
|
707
|
+
try:
|
|
708
|
+
return bytes(data)
|
|
709
|
+
except Exception:
|
|
710
|
+
return (u"%s" % data).encode("utf-8")
|
|
711
|
+
|
|
712
|
+
def _to_text(b):
|
|
713
|
+
if isinstance(b, bytes):
|
|
714
|
+
return b.decode("utf-8", "replace")
|
|
715
|
+
return b
|
|
716
|
+
|
|
717
|
+
# ---------- TLS helpers (TCP only) ----------
|
|
718
|
+
def _ssl_available():
|
|
719
|
+
try:
|
|
720
|
+
import ssl # noqa
|
|
721
|
+
return True
|
|
722
|
+
except Exception:
|
|
723
|
+
return False
|
|
724
|
+
|
|
725
|
+
def _build_ssl_context(server_side=False, verify=True, ca_file=None, certfile=None, keyfile=None):
|
|
726
|
+
import ssl
|
|
727
|
+
create_ctx = getattr(ssl, "create_default_context", None)
|
|
728
|
+
SSLContext = getattr(ssl, "SSLContext", None)
|
|
729
|
+
Purpose = getattr(ssl, "Purpose", None)
|
|
730
|
+
if create_ctx and Purpose:
|
|
731
|
+
ctx = create_ctx(ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH)
|
|
732
|
+
elif SSLContext:
|
|
733
|
+
ctx = SSLContext(getattr(ssl, "PROTOCOL_TLS", getattr(ssl, "PROTOCOL_SSLv23")))
|
|
734
|
+
else:
|
|
735
|
+
return None
|
|
736
|
+
|
|
737
|
+
if hasattr(ctx, "check_hostname") and not server_side:
|
|
738
|
+
ctx.check_hostname = bool(verify)
|
|
739
|
+
|
|
740
|
+
if verify:
|
|
741
|
+
if hasattr(ctx, "verify_mode"):
|
|
742
|
+
ctx.verify_mode = getattr(ssl, "CERT_REQUIRED", 2)
|
|
743
|
+
if ca_file:
|
|
744
|
+
try: ctx.load_verify_locations(cafile=ca_file)
|
|
745
|
+
except Exception: pass
|
|
746
|
+
else:
|
|
747
|
+
load_default_certs = getattr(ctx, "load_default_certs", None)
|
|
748
|
+
if load_default_certs: load_default_certs()
|
|
749
|
+
else:
|
|
750
|
+
if hasattr(ctx, "verify_mode"):
|
|
751
|
+
ctx.verify_mode = getattr(ssl, "CERT_NONE", 0)
|
|
752
|
+
if hasattr(ctx, "check_hostname"):
|
|
753
|
+
ctx.check_hostname = False
|
|
754
|
+
|
|
755
|
+
if certfile:
|
|
756
|
+
ctx.load_cert_chain(certfile=certfile, keyfile=keyfile or None)
|
|
757
|
+
|
|
758
|
+
try:
|
|
759
|
+
ctx.set_ciphers("HIGH:!aNULL:!MD5:!RC4")
|
|
760
|
+
except Exception:
|
|
761
|
+
pass
|
|
762
|
+
return ctx
|
|
763
|
+
|
|
764
|
+
def _ssl_wrap_socket(sock, server_side=False, server_hostname=None,
|
|
765
|
+
verify=True, ca_file=None, certfile=None, keyfile=None):
|
|
766
|
+
import ssl
|
|
767
|
+
ctx = _build_ssl_context(server_side, verify, ca_file, certfile, keyfile)
|
|
768
|
+
if ctx is not None:
|
|
769
|
+
kwargs = {}
|
|
770
|
+
if not server_side and getattr(ssl, "HAS_SNI", False) and server_hostname:
|
|
771
|
+
kwargs["server_hostname"] = server_hostname
|
|
772
|
+
return ctx.wrap_socket(sock, server_side=server_side, **kwargs)
|
|
773
|
+
# Very old Python fallback
|
|
774
|
+
kwargs = {
|
|
775
|
+
"ssl_version": getattr(ssl, "PROTOCOL_TLS", getattr(ssl, "PROTOCOL_SSLv23")),
|
|
776
|
+
"certfile": certfile or None,
|
|
777
|
+
"keyfile": keyfile or None,
|
|
778
|
+
"cert_reqs": (getattr(ssl, "CERT_REQUIRED", 2) if (verify and ca_file) else getattr(ssl, "CERT_NONE", 0)),
|
|
779
|
+
}
|
|
780
|
+
if verify and ca_file:
|
|
781
|
+
kwargs["ca_certs"] = ca_file
|
|
782
|
+
return ssl.wrap_socket(sock, **kwargs)
|
|
783
|
+
|
|
784
|
+
# ---------- IPv6 / multi-A dialer + keepalive ----------
|
|
785
|
+
def _enable_keepalive(s, idle=60, intvl=15, cnt=4):
|
|
786
|
+
try:
|
|
787
|
+
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
|
788
|
+
if hasattr(socket, 'TCP_KEEPIDLE'):
|
|
789
|
+
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, idle)
|
|
790
|
+
if hasattr(socket, 'TCP_KEEPINTVL'):
|
|
791
|
+
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, intvl)
|
|
792
|
+
if hasattr(socket, 'TCP_KEEPCNT'):
|
|
793
|
+
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, cnt)
|
|
794
|
+
except Exception:
|
|
795
|
+
pass
|
|
796
|
+
|
|
797
|
+
def _connect_stream(host, port, timeout):
|
|
798
|
+
err = None
|
|
799
|
+
for fam, st, proto, _, sa in socket.getaddrinfo(host, int(port), 0, socket.SOCK_STREAM):
|
|
800
|
+
try:
|
|
801
|
+
s = socket.socket(fam, st, proto)
|
|
802
|
+
if timeout is not None:
|
|
803
|
+
s.settimeout(timeout)
|
|
804
|
+
try: s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
|
805
|
+
except Exception: pass
|
|
806
|
+
s.connect(sa)
|
|
807
|
+
_enable_keepalive(s)
|
|
808
|
+
return s
|
|
809
|
+
except Exception as e:
|
|
810
|
+
err = e
|
|
811
|
+
try: s.close()
|
|
812
|
+
except Exception: pass
|
|
813
|
+
if err: raise err
|
|
814
|
+
raise RuntimeError("no usable address")
|
|
815
|
+
|
|
816
|
+
# ---------- Auth: AF1 (HMAC) + legacy fallback ----------
|
|
817
|
+
# AF1: single ASCII line ending with '\n':
|
|
818
|
+
# AF1 ts=<unix> user=<b64url> nonce=<b64url_12B> scope=<b64url> alg=sha256 mac=<hex>\n
|
|
819
|
+
def _b64url_encode(b):
|
|
820
|
+
s = base64.urlsafe_b64encode(b)
|
|
821
|
+
return _to_text(s.rstrip(b'='))
|
|
822
|
+
|
|
823
|
+
def _b64url_decode(s):
|
|
824
|
+
s = _to_bytes(s)
|
|
825
|
+
pad = b'=' * ((4 - (len(s) % 4)) % 4)
|
|
826
|
+
return base64.urlsafe_b64decode(s + pad)
|
|
827
|
+
|
|
828
|
+
def _auth_msg(ts_int, user_utf8, nonce_bytes, scope_utf8, length_str, sha_hex):
|
|
829
|
+
# canonical message for MAC: v1|ts|user|nonce_b64|scope|len|sha
|
|
830
|
+
return _to_bytes("v1|%d|%s|%s|%s|%s|%s" % (
|
|
831
|
+
ts_int,
|
|
832
|
+
_to_text(user_utf8),
|
|
833
|
+
_b64url_encode(nonce_bytes),
|
|
834
|
+
_to_text(scope_utf8),
|
|
835
|
+
length_str if length_str is not None else "",
|
|
836
|
+
sha_hex if sha_hex is not None else "",
|
|
837
|
+
))
|
|
838
|
+
|
|
839
|
+
def build_auth_blob_v1(user, secret, scope=u"", now=None, length=None, sha_hex=None):
|
|
840
|
+
"""
|
|
841
|
+
user: text; secret: text/bytes (HMAC key)
|
|
842
|
+
scope: optional text (e.g., path)
|
|
843
|
+
length: int or None (payload bytes)
|
|
844
|
+
sha_hex: ascii hex SHA-256 of payload (optional)
|
|
845
|
+
"""
|
|
846
|
+
ts = int(time.time() if now is None else now)
|
|
847
|
+
user_b = _to_bytes(user or u"")
|
|
848
|
+
scope_b = _to_bytes(scope or u"")
|
|
849
|
+
key_b = _to_bytes(secret or u"")
|
|
850
|
+
nonce = os.urandom(12)
|
|
851
|
+
|
|
852
|
+
length_str = (str(int(length)) if (length is not None and int(length) >= 0) else "")
|
|
853
|
+
sha_hex = (sha_hex or None)
|
|
854
|
+
mac = hmac.new(
|
|
855
|
+
key_b,
|
|
856
|
+
_auth_msg(ts, user_b, nonce, scope_b, length_str, sha_hex),
|
|
857
|
+
hashlib.sha256
|
|
858
|
+
).hexdigest()
|
|
859
|
+
|
|
860
|
+
line = "AF1 ts=%d user=%s nonce=%s scope=%s len=%s sha=%s alg=sha256 mac=%s\n" % (
|
|
861
|
+
ts,
|
|
862
|
+
_b64url_encode(user_b),
|
|
863
|
+
_b64url_encode(nonce),
|
|
864
|
+
_b64url_encode(scope_b),
|
|
865
|
+
length_str,
|
|
866
|
+
(sha_hex or ""),
|
|
867
|
+
mac,
|
|
868
|
+
)
|
|
869
|
+
return _to_bytes(line)
|
|
870
|
+
|
|
871
|
+
from collections import deque
|
|
872
|
+
class _NonceCache(object):
|
|
873
|
+
def __init__(self, max_items=10000, ttl_seconds=600):
|
|
874
|
+
self.max_items = int(max_items); self.ttl = int(ttl_seconds)
|
|
875
|
+
self.q = deque(); self.s = set()
|
|
876
|
+
def seen(self, nonce_b64, now_ts):
|
|
877
|
+
# evict old / over-capacity
|
|
878
|
+
while self.q and (now_ts - self.q[0][0] > self.ttl or len(self.q) > self.max_items):
|
|
879
|
+
_, n = self.q.popleft(); self.s.discard(n)
|
|
880
|
+
if nonce_b64 in self.s: return True
|
|
881
|
+
self.s.add(nonce_b64); self.q.append((now_ts, nonce_b64))
|
|
882
|
+
return False
|
|
883
|
+
|
|
884
|
+
_NONCES = _NonceCache()
|
|
885
|
+
|
|
886
|
+
def verify_auth_blob_v1(blob_bytes, expected_user=None, secret=None,
|
|
887
|
+
max_skew=600, expect_scope=None):
|
|
888
|
+
"""
|
|
889
|
+
Returns (ok_bool, user_text, scope_text, reason_text, length_or_None, sha_hex_or_None)
|
|
890
|
+
"""
|
|
891
|
+
try:
|
|
892
|
+
line = _to_text(blob_bytes).strip()
|
|
893
|
+
if not line.startswith("AF1 "):
|
|
894
|
+
return (False, None, None, "bad magic", None, None)
|
|
895
|
+
kv = {}
|
|
896
|
+
for tok in line.split()[1:]:
|
|
897
|
+
if '=' in tok:
|
|
898
|
+
k, v = tok.split('=', 1); kv[k] = v
|
|
899
|
+
|
|
900
|
+
for req in ("ts","user","nonce","mac","alg"):
|
|
901
|
+
if req not in kv:
|
|
902
|
+
return (False, None, None, "missing %s" % req, None, None)
|
|
903
|
+
if kv["alg"].lower() != "sha256":
|
|
904
|
+
return (False, None, None, "alg", None, None)
|
|
905
|
+
|
|
906
|
+
ts = int(kv["ts"])
|
|
907
|
+
userb = _b64url_decode(kv["user"])
|
|
908
|
+
nonce_b64 = kv["nonce"]; nonce = _b64url_decode(nonce_b64)
|
|
909
|
+
scopeb = _b64url_decode(kv.get("scope","")) if kv.get("scope") else b""
|
|
910
|
+
length_str = kv.get("len","")
|
|
911
|
+
sha_hex = kv.get("sha","") or None
|
|
912
|
+
mac = kv["mac"]
|
|
913
|
+
|
|
914
|
+
now = int(time.time())
|
|
915
|
+
if abs(now - ts) > int(max_skew):
|
|
916
|
+
return (False, None, None, "skew", None, None)
|
|
917
|
+
|
|
918
|
+
if _NONCES.seen(nonce_b64, now):
|
|
919
|
+
return (False, None, None, "replay", None, None)
|
|
920
|
+
|
|
921
|
+
if expected_user is not None and _to_bytes(expected_user) != userb:
|
|
922
|
+
return (False, None, None, "user", None, None)
|
|
923
|
+
|
|
924
|
+
calc = hmac.new(
|
|
925
|
+
_to_bytes(secret or u""),
|
|
926
|
+
_auth_msg(ts, userb, nonce, scopeb, length_str, sha_hex),
|
|
927
|
+
hashlib.sha256
|
|
928
|
+
).hexdigest()
|
|
929
|
+
if not hmac.compare_digest(calc, mac):
|
|
930
|
+
return (False, None, None, "mac", None, None)
|
|
931
|
+
|
|
932
|
+
if expect_scope is not None and _to_bytes(expect_scope) != scopeb:
|
|
933
|
+
return (False, None, None, "scope", None, None)
|
|
934
|
+
|
|
935
|
+
length = int(length_str) if (length_str and length_str.isdigit()) else None
|
|
936
|
+
return (True, _to_text(userb), _to_text(scopeb), "ok", length, sha_hex)
|
|
937
|
+
except Exception as e:
|
|
938
|
+
return (False, None, None, "exc:%s" % e, None, None)
|
|
939
|
+
|
|
940
|
+
# Legacy blob (kept for backward compatibility)
|
|
941
|
+
_MAGIC = b"AUTH\0"; _OK = b"OK"; _NO = b"NO"
|
|
942
|
+
|
|
943
|
+
def _build_auth_blob_legacy(user, pw):
|
|
944
|
+
return _MAGIC + _to_bytes(user) + b"\0" + _to_bytes(pw) + b"\0"
|
|
945
|
+
|
|
946
|
+
def _parse_auth_blob_legacy(data):
|
|
947
|
+
if not data.startswith(_MAGIC):
|
|
948
|
+
return (None, None)
|
|
949
|
+
rest = data[len(_MAGIC):]
|
|
950
|
+
try:
|
|
951
|
+
user, rest = rest.split(b"\0", 1)
|
|
952
|
+
pw, _tail = rest.split(b"\0", 1)
|
|
953
|
+
return (user, pw)
|
|
954
|
+
except Exception:
|
|
955
|
+
return (None, None)
|
|
956
|
+
|
|
957
|
+
# ---------- URL helpers ----------
|
|
958
|
+
def _qflag(qs, key, default=False):
|
|
959
|
+
v = qs.get(key, [None])[0]
|
|
960
|
+
if v is None: return bool(default)
|
|
961
|
+
return _to_text(v).lower() in ("1", "true", "yes", "on")
|
|
962
|
+
|
|
963
|
+
def _qnum(qs, key, default=None, cast=float):
|
|
964
|
+
v = qs.get(key, [None])[0]
|
|
965
|
+
if v is None or v == "": return default
|
|
966
|
+
try: return cast(v)
|
|
967
|
+
except Exception: return default
|
|
968
|
+
|
|
969
|
+
def _qstr(qs, key, default=None):
|
|
970
|
+
v = qs.get(key, [None])[0]
|
|
971
|
+
if v is None: return default
|
|
972
|
+
return v
|
|
973
|
+
|
|
974
|
+
def _parse_net_url(url):
|
|
975
|
+
"""
|
|
976
|
+
Parse tcp:// / udp:// URL and extract transport options.
|
|
977
|
+
Returns (parts, opts)
|
|
978
|
+
"""
|
|
979
|
+
parts = urlparse(url)
|
|
980
|
+
qs = parse_qs(parts.query or "")
|
|
981
|
+
|
|
982
|
+
proto = parts.scheme.lower()
|
|
983
|
+
if proto not in ("tcp", "udp"):
|
|
984
|
+
raise ValueError("Only tcp:// or udp:// supported here")
|
|
985
|
+
|
|
986
|
+
user = unquote(parts.username) if parts.username else None
|
|
987
|
+
pw = unquote(parts.password) if parts.password else None
|
|
988
|
+
|
|
989
|
+
use_ssl = _qflag(qs, "ssl", False) if proto == "tcp" else False
|
|
990
|
+
ssl_verify = _qflag(qs, "verify", True)
|
|
991
|
+
ssl_ca_file = _qstr(qs, "ca", None)
|
|
992
|
+
ssl_cert = _qstr(qs, "cert", None)
|
|
993
|
+
ssl_key = _qstr(qs, "key", None)
|
|
994
|
+
|
|
995
|
+
timeout = _qnum(qs, "timeout", None, float)
|
|
996
|
+
total_timeout = _qnum(qs, "total_timeout", None, float)
|
|
997
|
+
chunk_size = int(_qnum(qs, "chunk", 65536, float))
|
|
998
|
+
|
|
999
|
+
force_auth = _qflag(qs, "auth", False)
|
|
1000
|
+
want_sha = _qflag(qs, "sha", True) # <— NEW: default compute sha
|
|
1001
|
+
|
|
1002
|
+
opts = dict(
|
|
1003
|
+
proto=proto,
|
|
1004
|
+
host=parts.hostname or "127.0.0.1",
|
|
1005
|
+
port=int(parts.port or 0),
|
|
1006
|
+
|
|
1007
|
+
user=user, pw=pw, force_auth=force_auth,
|
|
1008
|
+
|
|
1009
|
+
use_ssl=use_ssl, ssl_verify=ssl_verify,
|
|
1010
|
+
ssl_ca_file=ssl_ca_file, ssl_certfile=ssl_cert, ssl_keyfile=ssl_key,
|
|
1011
|
+
|
|
1012
|
+
timeout=timeout, total_timeout=total_timeout, chunk_size=chunk_size,
|
|
1013
|
+
|
|
1014
|
+
server_hostname=parts.hostname or None,
|
|
1015
|
+
|
|
1016
|
+
# new option
|
|
1017
|
+
want_sha=want_sha,
|
|
1018
|
+
|
|
1019
|
+
# convenience (used as scope in AF1)
|
|
1020
|
+
path=(parts.path or u""),
|
|
1021
|
+
)
|
|
1022
|
+
return parts, opts
|
|
1023
|
+
|
|
1024
|
+
def _rewrite_url_without_auth(url):
|
|
1025
|
+
u = urlparse(url)
|
|
1026
|
+
netloc = u.hostname or ''
|
|
1027
|
+
if u.port:
|
|
1028
|
+
netloc += ':' + str(u.port)
|
|
1029
|
+
rebuilt = urlunparse((u.scheme, netloc, u.path, u.params, u.query, u.fragment))
|
|
1030
|
+
usr = unquote(u.username) if u.username else ''
|
|
1031
|
+
pwd = unquote(u.password) if u.password else ''
|
|
1032
|
+
return rebuilt, usr, pwd
|
|
1033
|
+
|
|
1034
|
+
def _guess_filename(url, filename):
|
|
1035
|
+
if filename:
|
|
1036
|
+
return filename
|
|
1037
|
+
path = urlparse(url).path or ''
|
|
1038
|
+
base = os.path.basename(path)
|
|
1039
|
+
return base or 'OutFile.'+__file_format_extension__
|
|
1040
|
+
|
|
1041
|
+
# ---- progress + rate limiting helpers ----
|
|
1042
|
+
try:
|
|
1043
|
+
monotonic = time.monotonic # Py3
|
|
1044
|
+
except Exception:
|
|
1045
|
+
# Py2 fallback: time.time() is good enough for coarse throttling
|
|
1046
|
+
monotonic = time.time
|
|
1047
|
+
|
|
1048
|
+
def _progress_tick(now_bytes, total_bytes, last_ts, last_bytes, rate_limit_bps, min_interval=0.1):
|
|
1049
|
+
"""
|
|
1050
|
+
Returns (sleep_seconds, new_last_ts, new_last_bytes).
|
|
1051
|
+
- If rate_limit_bps is set, computes how long to sleep to keep average <= limit.
|
|
1052
|
+
- Also enforces a minimum interval between progress callbacks (handled by caller).
|
|
1053
|
+
"""
|
|
1054
|
+
now = monotonic()
|
|
1055
|
+
elapsed = max(1e-9, now - last_ts)
|
|
1056
|
+
# Desired time to have elapsed for the given rate:
|
|
1057
|
+
desired = (now_bytes - last_bytes) / float(rate_limit_bps) if rate_limit_bps else 0.0
|
|
1058
|
+
extra = desired - elapsed
|
|
1059
|
+
return (max(0.0, extra), now, now_bytes)
|
|
1060
|
+
|
|
1061
|
+
def _discover_len_and_reset(fobj):
|
|
1062
|
+
"""
|
|
1063
|
+
Try hard to get total length and restore original position.
|
|
1064
|
+
Returns (length_or_None, start_pos_or_None).
|
|
1065
|
+
Works with seekable files and BytesIO; leaves stream position unchanged.
|
|
1066
|
+
"""
|
|
1067
|
+
# Generic seek/tell
|
|
1068
|
+
try:
|
|
1069
|
+
pos0 = fobj.tell()
|
|
1070
|
+
fobj.seek(0, os.SEEK_END)
|
|
1071
|
+
end = fobj.tell()
|
|
1072
|
+
fobj.seek(pos0, os.SEEK_SET)
|
|
1073
|
+
if end is not None and pos0 is not None and end >= pos0:
|
|
1074
|
+
return (end - pos0, pos0)
|
|
1075
|
+
except Exception:
|
|
1076
|
+
pass
|
|
1077
|
+
|
|
1078
|
+
# BytesIO fast path
|
|
1079
|
+
try:
|
|
1080
|
+
getvalue = getattr(fobj, "getvalue", None)
|
|
1081
|
+
if callable(getvalue):
|
|
1082
|
+
buf = getvalue()
|
|
1083
|
+
L = len(buf)
|
|
1084
|
+
try:
|
|
1085
|
+
pos0 = fobj.tell()
|
|
1086
|
+
except Exception:
|
|
1087
|
+
pos0 = 0
|
|
1088
|
+
return (max(0, L - pos0), pos0)
|
|
1089
|
+
except Exception:
|
|
1090
|
+
pass
|
|
1091
|
+
|
|
1092
|
+
# Memoryview/getbuffer
|
|
1093
|
+
try:
|
|
1094
|
+
getbuffer = getattr(fobj, "getbuffer", None)
|
|
1095
|
+
if callable(getbuffer):
|
|
1096
|
+
mv = getbuffer()
|
|
1097
|
+
L = len(mv)
|
|
1098
|
+
try:
|
|
1099
|
+
pos0 = fobj.tell()
|
|
1100
|
+
except Exception:
|
|
1101
|
+
pos0 = 0
|
|
1102
|
+
return (max(0, L - pos0), pos0)
|
|
1103
|
+
except Exception:
|
|
1104
|
+
pass
|
|
1105
|
+
|
|
1106
|
+
return (None, None)
|
|
1107
|
+
|
|
647
1108
|
|
|
648
1109
|
def DetectTarBombArchiveFileArray(listarrayfiles,
|
|
649
1110
|
top_file_ratio_threshold=0.6,
|
|
@@ -2925,6 +3386,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
2925
3386
|
outlist['ffilelist'].append(HeaderOut)
|
|
2926
3387
|
countnum = countnum + 1
|
|
2927
3388
|
realidnum = realidnum + 1
|
|
3389
|
+
outlist.update({'fp': fp})
|
|
2928
3390
|
return outlist
|
|
2929
3391
|
|
|
2930
3392
|
|
|
@@ -3278,6 +3740,42 @@ def ReadInMultipleFilesWithContentToArray(infile, fmttype="auto", filestart=0, s
|
|
|
3278
3740
|
return ReadInMultipleFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3279
3741
|
|
|
3280
3742
|
|
|
3743
|
+
def ReadInStackedFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3744
|
+
outretval = []
|
|
3745
|
+
outstartfile = filestart
|
|
3746
|
+
outfsize = float('inf')
|
|
3747
|
+
while True:
|
|
3748
|
+
if outstartfile >= outfsize: # stop when function signals False
|
|
3749
|
+
break
|
|
3750
|
+
outarray = ArchiveFileToArray(infile, fmttype, outstartfile, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, True)
|
|
3751
|
+
outfsize = outarray['fsize']
|
|
3752
|
+
if outarray is False: # stop when function signals False
|
|
3753
|
+
break
|
|
3754
|
+
infile = outarray['fp']
|
|
3755
|
+
outstartfile = infile.tell()
|
|
3756
|
+
outretval.append(outarray)
|
|
3757
|
+
return outretval
|
|
3758
|
+
|
|
3759
|
+
|
|
3760
|
+
def ReadInMultipleStackedFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3761
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
3762
|
+
pass
|
|
3763
|
+
else:
|
|
3764
|
+
infile = [infile]
|
|
3765
|
+
outretval = {}
|
|
3766
|
+
for curfname in infile:
|
|
3767
|
+
outretval[curfname] = ReadInStackedFileWithContentToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3768
|
+
return outretval
|
|
3769
|
+
|
|
3770
|
+
|
|
3771
|
+
def ReadInStackedFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3772
|
+
return ReadInStackedFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3773
|
+
|
|
3774
|
+
|
|
3775
|
+
def ReadInMultipleStackedFilesWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3776
|
+
return ReadInMultipleStackedFileWithContentToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3777
|
+
|
|
3778
|
+
|
|
3281
3779
|
def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
3282
3780
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
3283
3781
|
formatspecs = formatspecs[fmttype]
|
|
@@ -3459,7 +3957,7 @@ def ReadInMultipleFileWithContentToList(infile, fmttype="auto", filestart=0, see
|
|
|
3459
3957
|
infile = [infile]
|
|
3460
3958
|
outretval = {}
|
|
3461
3959
|
for curfname in infile:
|
|
3462
|
-
|
|
3960
|
+
outretval[curfname] = ReadInFileWithContentToList(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend)
|
|
3463
3961
|
return outretval
|
|
3464
3962
|
|
|
3465
3963
|
def ReadInMultipleFilesWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False):
|
|
@@ -4028,12 +4526,6 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
|
|
|
4028
4526
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
4029
4527
|
AppendFileHeaderWithContent(
|
|
4030
4528
|
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
|
|
4031
|
-
if(numfiles > 0):
|
|
4032
|
-
try:
|
|
4033
|
-
fp.write(AppendNullBytes(
|
|
4034
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
4035
|
-
except OSError:
|
|
4036
|
-
return False
|
|
4037
4529
|
fp.seek(0, 0)
|
|
4038
4530
|
return fp
|
|
4039
4531
|
|
|
@@ -4097,12 +4589,6 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
|
|
|
4097
4589
|
fcontents.seek(0, 0)
|
|
4098
4590
|
AppendFileHeaderWithContent(
|
|
4099
4591
|
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
|
|
4100
|
-
if(numfiles > 0):
|
|
4101
|
-
try:
|
|
4102
|
-
fp.write(AppendNullBytes(
|
|
4103
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
4104
|
-
except OSError:
|
|
4105
|
-
return False
|
|
4106
4592
|
return fp
|
|
4107
4593
|
|
|
4108
4594
|
|
|
@@ -5495,12 +5981,6 @@ def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", comp
|
|
|
5495
5981
|
AppendFileHeaderWithContent(
|
|
5496
5982
|
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
|
|
5497
5983
|
fcontents.close()
|
|
5498
|
-
if(numfiles > 0):
|
|
5499
|
-
try:
|
|
5500
|
-
fp.write(AppendNullBytes(
|
|
5501
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
5502
|
-
except OSError:
|
|
5503
|
-
return False
|
|
5504
5984
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5505
5985
|
fp = CompressOpenFileAlt(
|
|
5506
5986
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -5796,12 +6276,6 @@ def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="aut
|
|
|
5796
6276
|
AppendFileHeaderWithContent(
|
|
5797
6277
|
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
|
|
5798
6278
|
fcontents.close()
|
|
5799
|
-
if(numfiles > 0):
|
|
5800
|
-
try:
|
|
5801
|
-
fp.write(AppendNullBytes(
|
|
5802
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
5803
|
-
except OSError:
|
|
5804
|
-
return False
|
|
5805
6279
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5806
6280
|
fp = CompressOpenFileAlt(
|
|
5807
6281
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6090,12 +6564,6 @@ def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="aut
|
|
|
6090
6564
|
AppendFileHeaderWithContent(
|
|
6091
6565
|
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
|
|
6092
6566
|
fcontents.close()
|
|
6093
|
-
if(numfiles > 0):
|
|
6094
|
-
try:
|
|
6095
|
-
fp.write(AppendNullBytes(
|
|
6096
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
6097
|
-
except OSError:
|
|
6098
|
-
return False
|
|
6099
6567
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6100
6568
|
fp = CompressOpenFileAlt(
|
|
6101
6569
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6410,12 +6878,6 @@ if(rarfile_support):
|
|
|
6410
6878
|
AppendFileHeaderWithContent(
|
|
6411
6879
|
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
|
|
6412
6880
|
fcontents.close()
|
|
6413
|
-
if(numfiles > 0):
|
|
6414
|
-
try:
|
|
6415
|
-
fp.write(AppendNullBytes(
|
|
6416
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
6417
|
-
except OSError:
|
|
6418
|
-
return False
|
|
6419
6881
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6420
6882
|
fp = CompressOpenFileAlt(
|
|
6421
6883
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6664,12 +7126,6 @@ if(py7zr_support):
|
|
|
6664
7126
|
AppendFileHeaderWithContent(
|
|
6665
7127
|
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
|
|
6666
7128
|
fcontents.close()
|
|
6667
|
-
if(numfiles > 0):
|
|
6668
|
-
try:
|
|
6669
|
-
fp.write(AppendNullBytes(
|
|
6670
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
6671
|
-
except OSError:
|
|
6672
|
-
return False
|
|
6673
7129
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6674
7130
|
fp = CompressOpenFileAlt(
|
|
6675
7131
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7059,24 +7515,75 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_
|
|
|
7059
7515
|
return False
|
|
7060
7516
|
|
|
7061
7517
|
|
|
7062
|
-
def ArchiveFileValidateFile(infile, fmttype="auto", formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7063
|
-
return ArchiveFileValidate(infile, fmttype, formatspecs, verbose, returnfp)
|
|
7518
|
+
def ArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7519
|
+
return ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
7520
|
+
|
|
7521
|
+
|
|
7522
|
+
def ArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7523
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
7524
|
+
pass
|
|
7525
|
+
else:
|
|
7526
|
+
infile = [infile]
|
|
7527
|
+
outretval = True
|
|
7528
|
+
for curfname in infile:
|
|
7529
|
+
curretfile = ArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
7530
|
+
if(not curretfile):
|
|
7531
|
+
outretval = False
|
|
7532
|
+
return outretval
|
|
7533
|
+
|
|
7534
|
+
def ArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7535
|
+
return ArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
7536
|
+
|
|
7064
7537
|
|
|
7538
|
+
def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7539
|
+
outretval = []
|
|
7540
|
+
outstartfile = filestart
|
|
7541
|
+
outfsize = float('inf')
|
|
7542
|
+
while True:
|
|
7543
|
+
if outstartfile >= outfsize: # stop when function signals False
|
|
7544
|
+
break
|
|
7545
|
+
is_valid_file = ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, True)
|
|
7546
|
+
if is_valid_file is False: # stop when function signals False
|
|
7547
|
+
outretval.append(is_valid_file)
|
|
7548
|
+
else:
|
|
7549
|
+
outretval.append(True)
|
|
7550
|
+
infile = is_valid_file
|
|
7551
|
+
outstartfile = infile.tell()
|
|
7552
|
+
try:
|
|
7553
|
+
infile.seek(0, 2)
|
|
7554
|
+
except OSError:
|
|
7555
|
+
SeekToEndOfFile(infile)
|
|
7556
|
+
except ValueError:
|
|
7557
|
+
SeekToEndOfFile(infile)
|
|
7558
|
+
outfsize = infile.tell()
|
|
7559
|
+
infile.seek(outstartfile, 0)
|
|
7560
|
+
if(returnfp):
|
|
7561
|
+
return infile
|
|
7562
|
+
else:
|
|
7563
|
+
infile.close()
|
|
7564
|
+
return outretval
|
|
7565
|
+
|
|
7566
|
+
|
|
7567
|
+
|
|
7568
|
+
def StackedArchiveFileValidateFile(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7569
|
+
return StackedArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
7065
7570
|
|
|
7066
|
-
|
|
7571
|
+
|
|
7572
|
+
def StackedArchiveFileValidateMultiple(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7067
7573
|
if(isinstance(infile, (list, tuple, ))):
|
|
7068
7574
|
pass
|
|
7069
7575
|
else:
|
|
7070
7576
|
infile = [infile]
|
|
7071
7577
|
outretval = True
|
|
7072
7578
|
for curfname in infile:
|
|
7073
|
-
curretfile =
|
|
7579
|
+
curretfile = StackedArchiveFileValidate(curfname, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
7074
7580
|
if(not curretfile):
|
|
7075
7581
|
outretval = False
|
|
7076
7582
|
return outretval
|
|
7077
7583
|
|
|
7078
|
-
def
|
|
7079
|
-
return
|
|
7584
|
+
def StackedArchiveFileValidateMultipleFiles(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
7585
|
+
return StackedArchiveFileValidateMultiple(infile, fmttype, filestart, formatspecs, seektoend, verbose, returnfp)
|
|
7586
|
+
|
|
7080
7587
|
|
|
7081
7588
|
def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7082
7589
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
@@ -7089,20 +7596,20 @@ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend
|
|
|
7089
7596
|
fp = infile
|
|
7090
7597
|
fp.seek(filestart, 0)
|
|
7091
7598
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7092
|
-
|
|
7093
|
-
if(IsNestedDict(formatspecs) and
|
|
7094
|
-
formatspecs = formatspecs[
|
|
7095
|
-
if(
|
|
7599
|
+
compresscheck = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7600
|
+
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7601
|
+
formatspecs = formatspecs[compresscheck]
|
|
7602
|
+
if(compresscheck == "tarfile" and TarFileCheck(infile)):
|
|
7096
7603
|
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7097
|
-
elif(
|
|
7604
|
+
elif(compresscheck == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7098
7605
|
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7099
|
-
elif(rarfile_support and
|
|
7606
|
+
elif(rarfile_support and compresscheck == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7100
7607
|
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7101
|
-
elif(py7zr_support and
|
|
7608
|
+
elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7102
7609
|
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7103
|
-
elif(IsSingleDict(formatspecs) and
|
|
7610
|
+
elif(IsSingleDict(formatspecs) and compresscheck != formatspecs['format_magic']):
|
|
7104
7611
|
return False
|
|
7105
|
-
elif(IsNestedDict(formatspecs) and
|
|
7612
|
+
elif(IsNestedDict(formatspecs) and compresscheck not in formatspecs):
|
|
7106
7613
|
return False
|
|
7107
7614
|
if(not fp):
|
|
7108
7615
|
return False
|
|
@@ -7115,9 +7622,9 @@ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend
|
|
|
7115
7622
|
shutil.copyfileobj(sys.stdin, fp)
|
|
7116
7623
|
fp.seek(filestart, 0)
|
|
7117
7624
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
7118
|
-
|
|
7119
|
-
if(IsNestedDict(formatspecs) and
|
|
7120
|
-
formatspecs = formatspecs[
|
|
7625
|
+
compresscheck = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
7626
|
+
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7627
|
+
formatspecs = formatspecs[compresscheck]
|
|
7121
7628
|
if(not fp):
|
|
7122
7629
|
return False
|
|
7123
7630
|
fp.seek(filestart, 0)
|
|
@@ -7144,20 +7651,20 @@ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend
|
|
|
7144
7651
|
fp.seek(filestart, 0)
|
|
7145
7652
|
else:
|
|
7146
7653
|
infile = RemoveWindowsPath(infile)
|
|
7147
|
-
|
|
7148
|
-
if(IsNestedDict(formatspecs) and
|
|
7149
|
-
formatspecs = formatspecs[
|
|
7150
|
-
if(
|
|
7654
|
+
compresscheck = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7655
|
+
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
7656
|
+
formatspecs = formatspecs[compresscheck]
|
|
7657
|
+
if(compresscheck == "tarfile" and TarFileCheck(infile)):
|
|
7151
7658
|
return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7152
|
-
elif(
|
|
7659
|
+
elif(compresscheck == "zipfile" and zipfile.is_zipfile(infile)):
|
|
7153
7660
|
return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7154
|
-
elif(rarfile_support and
|
|
7661
|
+
elif(rarfile_support and compresscheck == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
|
|
7155
7662
|
return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7156
|
-
elif(py7zr_support and
|
|
7663
|
+
elif(py7zr_support and compresscheck == "7zipfile" and py7zr.is_7zfile(infile)):
|
|
7157
7664
|
return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7158
|
-
elif(IsSingleDict(formatspecs) and
|
|
7665
|
+
elif(IsSingleDict(formatspecs) and compresscheck != formatspecs['format_magic']):
|
|
7159
7666
|
return False
|
|
7160
|
-
elif(IsNestedDict(formatspecs) and
|
|
7667
|
+
elif(IsNestedDict(formatspecs) and compresscheck not in formatspecs):
|
|
7161
7668
|
return False
|
|
7162
7669
|
compresscheck = CheckCompressionType(infile, formatspecs, filestart, True)
|
|
7163
7670
|
if(not compresscheck):
|
|
@@ -7536,6 +8043,7 @@ def ArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend
|
|
|
7536
8043
|
outlist.update({'fp': fp})
|
|
7537
8044
|
else:
|
|
7538
8045
|
fp.close()
|
|
8046
|
+
outlist.update({'fp': None})
|
|
7539
8047
|
return outlist
|
|
7540
8048
|
|
|
7541
8049
|
|
|
@@ -7546,13 +8054,48 @@ def MultipleArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0,
|
|
|
7546
8054
|
infile = [infile]
|
|
7547
8055
|
outretval = {}
|
|
7548
8056
|
for curfname in infile:
|
|
7549
|
-
|
|
8057
|
+
outretval[curfname] = ArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7550
8058
|
return outretval
|
|
7551
8059
|
|
|
7552
8060
|
def MultipleArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7553
8061
|
return MultipleArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7554
8062
|
|
|
7555
8063
|
|
|
8064
|
+
def StackedArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8065
|
+
outretval = []
|
|
8066
|
+
outstartfile = filestart
|
|
8067
|
+
outfsize = float('inf')
|
|
8068
|
+
while True:
|
|
8069
|
+
if outstartfile >= outfsize: # stop when function signals False
|
|
8070
|
+
break
|
|
8071
|
+
outarray = ArchiveFileToArray(infile, fmttype, outstartfile, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, True)
|
|
8072
|
+
outfsize = outarray['fsize']
|
|
8073
|
+
if outarray is False: # stop when function signals False
|
|
8074
|
+
break
|
|
8075
|
+
infile = outarray['fp']
|
|
8076
|
+
outstartfile = infile.tell()
|
|
8077
|
+
if(not returnfp):
|
|
8078
|
+
outarray.update({"fp": None})
|
|
8079
|
+
outretval.append(outarray)
|
|
8080
|
+
if(not returnfp):
|
|
8081
|
+
infile.close()
|
|
8082
|
+
return outretval
|
|
8083
|
+
|
|
8084
|
+
|
|
8085
|
+
def MultipleStackedArchiveFileToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8086
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
8087
|
+
pass
|
|
8088
|
+
else:
|
|
8089
|
+
infile = [infile]
|
|
8090
|
+
outretval = {}
|
|
8091
|
+
for curfname in infile:
|
|
8092
|
+
outretval[curfname] = StackedArchiveFileToArray(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8093
|
+
return outretval
|
|
8094
|
+
|
|
8095
|
+
def MultipleStackedArchiveFilesToArray(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8096
|
+
return MultipleStackedArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8097
|
+
|
|
8098
|
+
|
|
7556
8099
|
def ArchiveFileStringToArray(instr, filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
7557
8100
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
7558
8101
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
@@ -7638,74 +8181,126 @@ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default_
|
|
|
7638
8181
|
outarray = MkTempFile()
|
|
7639
8182
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
7640
8183
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
7641
|
-
listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8184
|
+
listarrayfiles = ArchiveFileToArray(outarray, "auto", filestart, seekstart, seekend, listonly, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7642
8185
|
return listarrayfiles
|
|
7643
8186
|
|
|
7644
8187
|
|
|
8188
|
+
# ===== Function (keeps inarray schema; returns entries + indexes) =====
|
|
8189
|
+
|
|
7645
8190
|
def ArchiveFileArrayToArrayIndex(inarray, returnfp=False):
|
|
7646
|
-
|
|
7647
|
-
|
|
7648
|
-
|
|
8191
|
+
"""
|
|
8192
|
+
Build a bidirectional index over an archive listing while preserving the
|
|
8193
|
+
input 'inarray' as-is. Python 2/3 compatible, no external deps.
|
|
8194
|
+
|
|
8195
|
+
Input (unchanged contract):
|
|
8196
|
+
inarray: dict with at least:
|
|
8197
|
+
- 'ffilelist': list of dicts: {'fname': <str>, 'fid': <any>, 'ftype': <int>}
|
|
8198
|
+
- 'fnumfiles': int (expected count)
|
|
8199
|
+
- optional 'fp': any (passed through if returnfp=True)
|
|
8200
|
+
|
|
8201
|
+
Output structure:
|
|
8202
|
+
{
|
|
8203
|
+
'list': inarray, # alias to original input (not copied)
|
|
8204
|
+
'fp': inarray.get('fp') or None,
|
|
8205
|
+
'entries': { fid: {'name': fname, 'type': ftype} },
|
|
8206
|
+
'indexes': {
|
|
8207
|
+
'by_name': { fname: fid },
|
|
8208
|
+
'by_type': {
|
|
8209
|
+
<category>: {
|
|
8210
|
+
'by_name': { fname: fid },
|
|
8211
|
+
'by_id': { fid: fname },
|
|
8212
|
+
'count': <int>
|
|
8213
|
+
}, ...
|
|
8214
|
+
}
|
|
8215
|
+
},
|
|
8216
|
+
'counts': {
|
|
8217
|
+
'total': <int>,
|
|
8218
|
+
'by_type': { <category>: <int>, ... }
|
|
8219
|
+
},
|
|
8220
|
+
'unknown_types': { <ftype_int>: [fname, ...] }
|
|
8221
|
+
}
|
|
8222
|
+
"""
|
|
8223
|
+
if not isinstance(inarray, dict):
|
|
7649
8224
|
return False
|
|
7650
|
-
if
|
|
8225
|
+
if not inarray:
|
|
7651
8226
|
return False
|
|
7652
|
-
|
|
7653
|
-
|
|
7654
|
-
|
|
7655
|
-
|
|
7656
|
-
|
|
7657
|
-
|
|
7658
|
-
|
|
7659
|
-
|
|
7660
|
-
|
|
7661
|
-
|
|
7662
|
-
|
|
7663
|
-
|
|
7664
|
-
|
|
7665
|
-
|
|
7666
|
-
|
|
7667
|
-
|
|
7668
|
-
|
|
7669
|
-
|
|
7670
|
-
|
|
7671
|
-
|
|
7672
|
-
|
|
7673
|
-
|
|
7674
|
-
|
|
7675
|
-
|
|
7676
|
-
|
|
7677
|
-
|
|
7678
|
-
|
|
7679
|
-
|
|
7680
|
-
|
|
7681
|
-
|
|
7682
|
-
|
|
7683
|
-
|
|
7684
|
-
|
|
7685
|
-
|
|
7686
|
-
|
|
7687
|
-
|
|
7688
|
-
|
|
7689
|
-
|
|
7690
|
-
|
|
7691
|
-
|
|
7692
|
-
|
|
7693
|
-
|
|
7694
|
-
|
|
7695
|
-
|
|
7696
|
-
|
|
7697
|
-
|
|
7698
|
-
|
|
7699
|
-
|
|
7700
|
-
|
|
7701
|
-
|
|
7702
|
-
|
|
7703
|
-
|
|
7704
|
-
|
|
7705
|
-
|
|
7706
|
-
|
|
7707
|
-
|
|
7708
|
-
|
|
8227
|
+
|
|
8228
|
+
# Buckets for categories
|
|
8229
|
+
def _bucket():
|
|
8230
|
+
return {"by_name": {}, "by_id": {}, "count": 0}
|
|
8231
|
+
|
|
8232
|
+
by_type = {}
|
|
8233
|
+
for cat in CATEGORY_ORDER:
|
|
8234
|
+
by_type[cat] = _bucket()
|
|
8235
|
+
|
|
8236
|
+
out = {
|
|
8237
|
+
"list": inarray,
|
|
8238
|
+
"fp": inarray.get("fp") if returnfp else None,
|
|
8239
|
+
"entries": {},
|
|
8240
|
+
"indexes": {
|
|
8241
|
+
"by_name": {},
|
|
8242
|
+
"by_type": by_type,
|
|
8243
|
+
},
|
|
8244
|
+
"counts": {"total": 0, "by_type": {}},
|
|
8245
|
+
"unknown_types": {},
|
|
8246
|
+
}
|
|
8247
|
+
|
|
8248
|
+
ffilelist = inarray.get("ffilelist") or []
|
|
8249
|
+
try:
|
|
8250
|
+
fnumfiles = int(inarray.get("fnumfiles", len(ffilelist)))
|
|
8251
|
+
except Exception:
|
|
8252
|
+
fnumfiles = len(ffilelist)
|
|
8253
|
+
|
|
8254
|
+
# Process only what's present
|
|
8255
|
+
total = min(len(ffilelist), fnumfiles)
|
|
8256
|
+
|
|
8257
|
+
def _add(cat, name, fid):
|
|
8258
|
+
b = by_type[cat]
|
|
8259
|
+
b["by_name"][name] = fid
|
|
8260
|
+
b["by_id"][fid] = name
|
|
8261
|
+
# Count is number of unique names in this category
|
|
8262
|
+
b["count"] = len(b["by_name"])
|
|
8263
|
+
|
|
8264
|
+
i = 0
|
|
8265
|
+
while i < total:
|
|
8266
|
+
e = ffilelist[i]
|
|
8267
|
+
name = e.get("fname")
|
|
8268
|
+
fid = e.get("fid")
|
|
8269
|
+
t = e.get("ftype")
|
|
8270
|
+
|
|
8271
|
+
if name is None or fid is None or t is None:
|
|
8272
|
+
i += 1
|
|
8273
|
+
continue
|
|
8274
|
+
|
|
8275
|
+
# Store canonical entry once, keyed by fid
|
|
8276
|
+
out["entries"][fid] = {"name": name, "type": t}
|
|
8277
|
+
|
|
8278
|
+
# Global reverse index for fast name -> id
|
|
8279
|
+
out["indexes"]["by_name"][name] = fid
|
|
8280
|
+
|
|
8281
|
+
# Base category
|
|
8282
|
+
base_cat = BASE_CATEGORY_BY_CODE.get(t)
|
|
8283
|
+
if base_cat is not None:
|
|
8284
|
+
_add(base_cat, name, fid)
|
|
8285
|
+
else:
|
|
8286
|
+
# Track unknown codes for visibility/forward-compat
|
|
8287
|
+
lst = out["unknown_types"].setdefault(t, [])
|
|
8288
|
+
if name not in lst:
|
|
8289
|
+
lst.append(name)
|
|
8290
|
+
|
|
8291
|
+
# Union categories
|
|
8292
|
+
for union_name, code_set in UNION_RULES:
|
|
8293
|
+
if t in code_set:
|
|
8294
|
+
_add(union_name, name, fid)
|
|
8295
|
+
|
|
8296
|
+
i += 1
|
|
8297
|
+
|
|
8298
|
+
# Counts
|
|
8299
|
+
out["counts"]["total"] = total
|
|
8300
|
+
for cat in CATEGORY_ORDER:
|
|
8301
|
+
out["counts"]["by_type"][cat] = by_type[cat]["count"]
|
|
8302
|
+
|
|
8303
|
+
return out
|
|
7709
8304
|
|
|
7710
8305
|
|
|
7711
8306
|
def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
@@ -7714,13 +8309,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
7714
8309
|
else:
|
|
7715
8310
|
if(infile != "-" and not isinstance(infile, bytes) and not hasattr(infile, "read") and not hasattr(infile, "write")):
|
|
7716
8311
|
infile = RemoveWindowsPath(infile)
|
|
7717
|
-
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8312
|
+
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
7718
8313
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
7719
8314
|
formatspecs = formatspecs[fmttype]
|
|
7720
8315
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
7721
8316
|
fmttype = __file_format_default__
|
|
7722
8317
|
formatspecs = formatspecs[fmttype]
|
|
7723
|
-
|
|
7724
8318
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
7725
8319
|
formatspecs = formatspecs[fmttype]
|
|
7726
8320
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
@@ -7827,11 +8421,11 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
7827
8421
|
fdev_major = format(
|
|
7828
8422
|
int(listarrayfiles['ffilelist'][reallcfi]['fmajor']), 'x').lower()
|
|
7829
8423
|
fseeknextfile = listarrayfiles['ffilelist'][reallcfi]['fseeknextfile']
|
|
7830
|
-
if(len(listarrayfiles['ffilelist'][reallcfi]['
|
|
8424
|
+
if(len(listarrayfiles['ffilelist'][reallcfi]['fextradata']) > listarrayfiles['ffilelist'][reallcfi]['fextrafields'] and len(listarrayfiles['ffilelist'][reallcfi]['fextradata']) > 0):
|
|
7831
8425
|
listarrayfiles['ffilelist'][reallcfi]['fextrafields'] = len(
|
|
7832
|
-
listarrayfiles['ffilelist'][reallcfi]['
|
|
8426
|
+
listarrayfiles['ffilelist'][reallcfi]['fextradata'])
|
|
7833
8427
|
if(not followlink and len(extradata) <= 0):
|
|
7834
|
-
extradata = listarrayfiles['ffilelist'][reallcfi]['
|
|
8428
|
+
extradata = listarrayfiles['ffilelist'][reallcfi]['fextradata']
|
|
7835
8429
|
if(not followlink and len(jsondata) <= 0):
|
|
7836
8430
|
jsondata = listarrayfiles['ffilelist'][reallcfi]['fjsondata']
|
|
7837
8431
|
fcontents = listarrayfiles['ffilelist'][reallcfi]['fcontents']
|
|
@@ -7910,10 +8504,10 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
7910
8504
|
fdev_minor = format(int(flinkinfo['fminor']), 'x').lower()
|
|
7911
8505
|
fdev_major = format(int(flinkinfo['fmajor']), 'x').lower()
|
|
7912
8506
|
fseeknextfile = flinkinfo['fseeknextfile']
|
|
7913
|
-
if(len(flinkinfo['
|
|
7914
|
-
flinkinfo['fextrafields'] = len(flinkinfo['
|
|
8507
|
+
if(len(flinkinfo['fextradata']) > flinkinfo['fextrafields'] and len(flinkinfo['fextradata']) > 0):
|
|
8508
|
+
flinkinfo['fextrafields'] = len(flinkinfo['fextradata'])
|
|
7915
8509
|
if(len(extradata) < 0):
|
|
7916
|
-
extradata = flinkinfo['
|
|
8510
|
+
extradata = flinkinfo['fextradata']
|
|
7917
8511
|
if(len(jsondata) < 0):
|
|
7918
8512
|
extradata = flinkinfo['fjsondata']
|
|
7919
8513
|
fcontents = flinkinfo['fcontents']
|
|
@@ -7945,12 +8539,6 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
7945
8539
|
fcontents.close()
|
|
7946
8540
|
lcfi = lcfi + 1
|
|
7947
8541
|
reallcfi = reallcfi + 1
|
|
7948
|
-
if(lcfx > 0):
|
|
7949
|
-
try:
|
|
7950
|
-
fp.write(AppendNullBytes(
|
|
7951
|
-
["0", "0"], formatspecs['format_delimiter']))
|
|
7952
|
-
except OSError:
|
|
7953
|
-
return False
|
|
7954
8542
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
7955
8543
|
fp = CompressOpenFileAlt(
|
|
7956
8544
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -8014,7 +8602,7 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
8014
8602
|
else:
|
|
8015
8603
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
8016
8604
|
infile = RemoveWindowsPath(infile)
|
|
8017
|
-
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8605
|
+
listarrayfiles = ArchiveFileToArray(infile, "auto", filestart, seekstart, seekend, False, True, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
8018
8606
|
if(not listarrayfiles):
|
|
8019
8607
|
return False
|
|
8020
8608
|
lenlist = len(listarrayfiles['ffilelist'])
|
|
@@ -8262,9 +8850,9 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
8262
8850
|
return True
|
|
8263
8851
|
|
|
8264
8852
|
|
|
8265
|
-
def UnPackArchiveFileString(instr, outdir=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8853
|
+
def UnPackArchiveFileString(instr, outdir=None, followlink=False, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8266
8854
|
fp = MkTempFile(instr)
|
|
8267
|
-
listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
8855
|
+
listarrayfiles = UnPackArchiveFile(fp, outdir, followlink, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, returnfp)
|
|
8268
8856
|
return listarrayfiles
|
|
8269
8857
|
|
|
8270
8858
|
def ftype_to_str(ftype):
|
|
@@ -8335,10 +8923,89 @@ def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seeke
|
|
|
8335
8923
|
return True
|
|
8336
8924
|
|
|
8337
8925
|
|
|
8338
|
-
def
|
|
8926
|
+
def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8927
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
8928
|
+
pass
|
|
8929
|
+
else:
|
|
8930
|
+
infile = [infile]
|
|
8931
|
+
outretval = {}
|
|
8932
|
+
for curfname in infile:
|
|
8933
|
+
outretval[curfname] = ArchiveFileListFiles(infile, fmttype, filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
8934
|
+
return outretval
|
|
8935
|
+
|
|
8936
|
+
|
|
8937
|
+
def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
8938
|
+
outretval = []
|
|
8939
|
+
outstartfile = filestart
|
|
8940
|
+
outfsize = float('inf')
|
|
8941
|
+
while True:
|
|
8942
|
+
if outstartfile >= outfsize: # stop when function signals False
|
|
8943
|
+
break
|
|
8944
|
+
is_valid_file = ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, True)
|
|
8945
|
+
if is_valid_file is False: # stop when function signals False
|
|
8946
|
+
outretval.append(is_valid_file)
|
|
8947
|
+
else:
|
|
8948
|
+
outretval.append(True)
|
|
8949
|
+
infile = is_valid_file
|
|
8950
|
+
outstartfile = infile.tell()
|
|
8951
|
+
try:
|
|
8952
|
+
infile.seek(0, 2)
|
|
8953
|
+
except OSError:
|
|
8954
|
+
SeekToEndOfFile(infile)
|
|
8955
|
+
except ValueError:
|
|
8956
|
+
SeekToEndOfFile(infile)
|
|
8957
|
+
outfsize = infile.tell()
|
|
8958
|
+
infile.seek(outstartfile, 0)
|
|
8959
|
+
if(returnfp):
|
|
8960
|
+
return infile
|
|
8961
|
+
else:
|
|
8962
|
+
infile.close()
|
|
8963
|
+
return outretval
|
|
8964
|
+
|
|
8965
|
+
|
|
8966
|
+
def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8967
|
+
outretval = []
|
|
8968
|
+
outstartfile = filestart
|
|
8969
|
+
outfsize = float('inf')
|
|
8970
|
+
while True:
|
|
8971
|
+
if outstartfile >= outfsize: # stop when function signals False
|
|
8972
|
+
break
|
|
8973
|
+
list_file_retu = ArchiveFileListFiles(infile, fmttype, outstartfile, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, True)
|
|
8974
|
+
if list_file_retu is False: # stop when function signals False
|
|
8975
|
+
outretval.append(list_file_retu)
|
|
8976
|
+
else:
|
|
8977
|
+
outretval.append(True)
|
|
8978
|
+
infile = list_file_retu
|
|
8979
|
+
outstartfile = infile.tell()
|
|
8980
|
+
try:
|
|
8981
|
+
infile.seek(0, 2)
|
|
8982
|
+
except OSError:
|
|
8983
|
+
SeekToEndOfFile(infile)
|
|
8984
|
+
except ValueError:
|
|
8985
|
+
SeekToEndOfFile(infile)
|
|
8986
|
+
outfsize = infile.tell()
|
|
8987
|
+
infile.seek(outstartfile, 0)
|
|
8988
|
+
if(returnfp):
|
|
8989
|
+
return infile
|
|
8990
|
+
else:
|
|
8991
|
+
infile.close()
|
|
8992
|
+
return outretval
|
|
8993
|
+
|
|
8994
|
+
|
|
8995
|
+
def MultipleStackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, listonly=False, contentasfile=True, uncompress=True, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, returnfp=False):
|
|
8996
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
8997
|
+
pass
|
|
8998
|
+
else:
|
|
8999
|
+
infile = [infile]
|
|
9000
|
+
outretval = {}
|
|
9001
|
+
for curfname in infile:
|
|
9002
|
+
outretval[curfname] = StackedArchiveFileListFiles(curfname, fmttype, filestart, seekstart, seekend, listonly, contentasfile, uncompress, skipchecksum, formatspecs, seektoend, returnfp)
|
|
9003
|
+
return outretval
|
|
9004
|
+
|
|
9005
|
+
|
|
9006
|
+
def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
8339
9007
|
fp = MkTempFile(instr)
|
|
8340
|
-
listarrayfiles = ArchiveFileListFiles(
|
|
8341
|
-
instr, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
9008
|
+
listarrayfiles = ArchiveFileListFiles(instr, "auto", filestart, seekstart, seekend, skipchecksum, formatspecs, seektoend, verbose, newstyle, returnfp)
|
|
8342
9009
|
return listarrayfiles
|
|
8343
9010
|
|
|
8344
9011
|
|
|
@@ -8930,11 +9597,11 @@ def download_file_from_ftp_file(url):
|
|
|
8930
9597
|
file_name = os.path.basename(unquote(urlparts.path))
|
|
8931
9598
|
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
8932
9599
|
if(urlparts.username is not None):
|
|
8933
|
-
ftp_username = urlparts.username
|
|
9600
|
+
ftp_username = unquote(urlparts.username)
|
|
8934
9601
|
else:
|
|
8935
9602
|
ftp_username = "anonymous"
|
|
8936
9603
|
if(urlparts.password is not None):
|
|
8937
|
-
ftp_password = urlparts.password
|
|
9604
|
+
ftp_password = unquote(urlparts.password)
|
|
8938
9605
|
elif(urlparts.password is None and urlparts.username == "anonymous"):
|
|
8939
9606
|
ftp_password = "anonymous"
|
|
8940
9607
|
else:
|
|
@@ -8945,13 +9612,6 @@ def download_file_from_ftp_file(url):
|
|
|
8945
9612
|
ftp = FTP_TLS()
|
|
8946
9613
|
else:
|
|
8947
9614
|
return False
|
|
8948
|
-
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
8949
|
-
if(__use_pysftp__):
|
|
8950
|
-
return download_file_from_pysftp_file(url)
|
|
8951
|
-
else:
|
|
8952
|
-
return download_file_from_sftp_file(url)
|
|
8953
|
-
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
8954
|
-
return download_file_from_http_file(url)
|
|
8955
9615
|
ftp_port = urlparts.port
|
|
8956
9616
|
if(urlparts.port is None):
|
|
8957
9617
|
ftp_port = 21
|
|
@@ -9028,11 +9688,11 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9028
9688
|
file_name = os.path.basename(unquote(urlparts.path))
|
|
9029
9689
|
file_dir = os.path.dirname(unquote(urlparts.path))
|
|
9030
9690
|
if(urlparts.username is not None):
|
|
9031
|
-
ftp_username = urlparts.username
|
|
9691
|
+
ftp_username = unquote(urlparts.username)
|
|
9032
9692
|
else:
|
|
9033
9693
|
ftp_username = "anonymous"
|
|
9034
9694
|
if(urlparts.password is not None):
|
|
9035
|
-
ftp_password = urlparts.password
|
|
9695
|
+
ftp_password = unquote(urlparts.password)
|
|
9036
9696
|
elif(urlparts.password is None and urlparts.username == "anonymous"):
|
|
9037
9697
|
ftp_password = "anonymous"
|
|
9038
9698
|
else:
|
|
@@ -9043,13 +9703,6 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
9043
9703
|
ftp = FTP_TLS()
|
|
9044
9704
|
else:
|
|
9045
9705
|
return False
|
|
9046
|
-
if(urlparts.scheme == "sftp" or urlparts.scheme == "scp"):
|
|
9047
|
-
if(__use_pysftp__):
|
|
9048
|
-
return upload_file_to_pysftp_file(url)
|
|
9049
|
-
else:
|
|
9050
|
-
return upload_file_to_sftp_file(url)
|
|
9051
|
-
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9052
|
-
return False
|
|
9053
9706
|
ftp_port = urlparts.port
|
|
9054
9707
|
if(urlparts.port is None):
|
|
9055
9708
|
ftp_port = 21
|
|
@@ -9147,8 +9800,8 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9147
9800
|
if headers is None:
|
|
9148
9801
|
headers = {}
|
|
9149
9802
|
urlparts = urlparse(url)
|
|
9150
|
-
username = urlparts.username
|
|
9151
|
-
password = urlparts.password
|
|
9803
|
+
username = unquote(urlparts.username)
|
|
9804
|
+
password = unquote(urlparts.password)
|
|
9152
9805
|
|
|
9153
9806
|
# Rebuild URL without username and password
|
|
9154
9807
|
netloc = urlparts.hostname or ''
|
|
@@ -9157,15 +9810,6 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9157
9810
|
rebuilt_url = urlunparse((urlparts.scheme, netloc, urlparts.path,
|
|
9158
9811
|
urlparts.params, urlparts.query, urlparts.fragment))
|
|
9159
9812
|
|
|
9160
|
-
# Handle SFTP/FTP
|
|
9161
|
-
if urlparts.scheme == "sftp" or urlparts.scheme == "scp":
|
|
9162
|
-
if __use_pysftp__:
|
|
9163
|
-
return download_file_from_pysftp_file(url)
|
|
9164
|
-
else:
|
|
9165
|
-
return download_file_from_sftp_file(url)
|
|
9166
|
-
elif urlparts.scheme == "ftp" or urlparts.scheme == "ftps":
|
|
9167
|
-
return download_file_from_ftp_file(url)
|
|
9168
|
-
|
|
9169
9813
|
# Create a temporary file object
|
|
9170
9814
|
httpfile = MkTempFile()
|
|
9171
9815
|
|
|
@@ -9229,6 +9873,184 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
9229
9873
|
return httpfile
|
|
9230
9874
|
|
|
9231
9875
|
|
|
9876
|
+
def upload_file_to_http_file(
|
|
9877
|
+
fileobj,
|
|
9878
|
+
url,
|
|
9879
|
+
method="POST", # "POST" or "PUT"
|
|
9880
|
+
headers=None,
|
|
9881
|
+
form=None, # dict of extra form fields → triggers multipart/form-data
|
|
9882
|
+
field_name="file", # form field name for the file content
|
|
9883
|
+
filename=None, # defaults to basename of URL path
|
|
9884
|
+
content_type="application/octet-stream",
|
|
9885
|
+
usehttp=__use_http_lib__, # 'requests' | 'httpx' | 'mechanize' | anything → urllib fallback
|
|
9886
|
+
):
|
|
9887
|
+
"""
|
|
9888
|
+
Py2+Py3 compatible HTTP/HTTPS upload.
|
|
9889
|
+
|
|
9890
|
+
- If `form` is provided (dict), uses multipart/form-data:
|
|
9891
|
+
* text fields from `form`
|
|
9892
|
+
* file part named by `field_name` with given `filename` and `content_type`
|
|
9893
|
+
- If `form` is None, uploads raw body as POST/PUT with Content-Type.
|
|
9894
|
+
- Returns True on HTTP 2xx, else False.
|
|
9895
|
+
"""
|
|
9896
|
+
if headers is None:
|
|
9897
|
+
headers = {}
|
|
9898
|
+
method = (method or "POST").upper()
|
|
9899
|
+
|
|
9900
|
+
rebuilt_url, username, password = _rewrite_url_without_auth(url)
|
|
9901
|
+
filename = _guess_filename(url, filename)
|
|
9902
|
+
|
|
9903
|
+
# rewind if possible
|
|
9904
|
+
try:
|
|
9905
|
+
fileobj.seek(0)
|
|
9906
|
+
except Exception:
|
|
9907
|
+
pass
|
|
9908
|
+
|
|
9909
|
+
# ========== 1) requests (Py2+Py3) ==========
|
|
9910
|
+
if usehttp == 'requests' and haverequests:
|
|
9911
|
+
import requests
|
|
9912
|
+
|
|
9913
|
+
auth = (username, password) if (username or password) else None
|
|
9914
|
+
|
|
9915
|
+
if form is not None:
|
|
9916
|
+
# multipart/form-data
|
|
9917
|
+
files = {field_name: (filename, fileobj, content_type)}
|
|
9918
|
+
data = form or {}
|
|
9919
|
+
resp = requests.request(method, rebuilt_url, headers=headers, auth=auth,
|
|
9920
|
+
files=files, data=data, timeout=(5, 120))
|
|
9921
|
+
else:
|
|
9922
|
+
# raw body
|
|
9923
|
+
hdrs = {'Content-Type': content_type}
|
|
9924
|
+
hdrs.update(headers)
|
|
9925
|
+
# best-effort content-length (helps some servers)
|
|
9926
|
+
if hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'):
|
|
9927
|
+
try:
|
|
9928
|
+
cur = fileobj.tell()
|
|
9929
|
+
fileobj.seek(0, io.SEEK_END if hasattr(io, 'SEEK_END') else 2)
|
|
9930
|
+
size = fileobj.tell() - cur
|
|
9931
|
+
fileobj.seek(cur)
|
|
9932
|
+
hdrs.setdefault('Content-Length', str(size))
|
|
9933
|
+
except Exception:
|
|
9934
|
+
pass
|
|
9935
|
+
resp = requests.request(method, rebuilt_url, headers=hdrs, auth=auth,
|
|
9936
|
+
data=fileobj, timeout=(5, 300))
|
|
9937
|
+
|
|
9938
|
+
return (200 <= resp.status_code < 300)
|
|
9939
|
+
|
|
9940
|
+
# ========== 2) httpx (Py3 only) ==========
|
|
9941
|
+
if usehttp == 'httpx' and havehttpx and not PY2:
|
|
9942
|
+
import httpx
|
|
9943
|
+
auth = (username, password) if (username or password) else None
|
|
9944
|
+
|
|
9945
|
+
with httpx.Client(follow_redirects=True, timeout=60) as client:
|
|
9946
|
+
if form is not None:
|
|
9947
|
+
files = {field_name: (filename, fileobj, content_type)}
|
|
9948
|
+
data = form or {}
|
|
9949
|
+
resp = client.request(method, rebuilt_url, headers=headers, auth=auth,
|
|
9950
|
+
files=files, data=data)
|
|
9951
|
+
else:
|
|
9952
|
+
hdrs = {'Content-Type': content_type}
|
|
9953
|
+
hdrs.update(headers)
|
|
9954
|
+
resp = client.request(method, rebuilt_url, headers=hdrs, auth=auth,
|
|
9955
|
+
content=fileobj)
|
|
9956
|
+
return (200 <= resp.status_code < 300)
|
|
9957
|
+
|
|
9958
|
+
# ========== 3) mechanize (forms) → prefer requests if available ==========
|
|
9959
|
+
if usehttp == 'mechanize' and havemechanize:
|
|
9960
|
+
# mechanize is great for HTML forms, but file upload requires form discovery.
|
|
9961
|
+
# For a generic upload helper, prefer requests. If not available, fall through.
|
|
9962
|
+
try:
|
|
9963
|
+
import requests # noqa
|
|
9964
|
+
# delegate to requests path to ensure robust multipart handling
|
|
9965
|
+
return upload_file_to_http_file(
|
|
9966
|
+
fileobj, url, method=method, headers=headers,
|
|
9967
|
+
form=(form or {}), field_name=field_name,
|
|
9968
|
+
filename=filename, content_type=content_type,
|
|
9969
|
+
usehttp='requests'
|
|
9970
|
+
)
|
|
9971
|
+
except Exception:
|
|
9972
|
+
pass # fall through to urllib
|
|
9973
|
+
|
|
9974
|
+
# ========== 4) urllib fallback (Py2+Py3) ==========
|
|
9975
|
+
# multipart builder (no f-strings)
|
|
9976
|
+
boundary = ('----pyuploader-%s' % uuid.uuid4().hex)
|
|
9977
|
+
|
|
9978
|
+
if form is not None:
|
|
9979
|
+
# Build multipart body to a temp file-like (your MkTempFile())
|
|
9980
|
+
buf = MkTempFile()
|
|
9981
|
+
|
|
9982
|
+
def _w(s):
|
|
9983
|
+
buf.write(_to_bytes(s))
|
|
9984
|
+
|
|
9985
|
+
# text fields
|
|
9986
|
+
if form:
|
|
9987
|
+
for k, v in form.items():
|
|
9988
|
+
_w('--' + boundary + '\r\n')
|
|
9989
|
+
_w('Content-Disposition: form-data; name="%s"\r\n\r\n' % k)
|
|
9990
|
+
_w('' if v is None else (v if isinstance(v, (str, bytes)) else str(v)))
|
|
9991
|
+
_w('\r\n')
|
|
9992
|
+
|
|
9993
|
+
# file field
|
|
9994
|
+
_w('--' + boundary + '\r\n')
|
|
9995
|
+
_w('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (field_name, filename))
|
|
9996
|
+
_w('Content-Type: %s\r\n\r\n' % content_type)
|
|
9997
|
+
|
|
9998
|
+
try:
|
|
9999
|
+
fileobj.seek(0)
|
|
10000
|
+
except Exception:
|
|
10001
|
+
pass
|
|
10002
|
+
shutil.copyfileobj(fileobj, buf)
|
|
10003
|
+
|
|
10004
|
+
_w('\r\n')
|
|
10005
|
+
_w('--' + boundary + '--\r\n')
|
|
10006
|
+
|
|
10007
|
+
buf.seek(0)
|
|
10008
|
+
data = buf.read()
|
|
10009
|
+
hdrs = {'Content-Type': 'multipart/form-data; boundary=%s' % boundary}
|
|
10010
|
+
hdrs.update(headers)
|
|
10011
|
+
req = Request(rebuilt_url, data=data)
|
|
10012
|
+
# method override for Py3; Py2 Request ignores 'method' kw
|
|
10013
|
+
if not PY2:
|
|
10014
|
+
req.method = method # type: ignore[attr-defined]
|
|
10015
|
+
else:
|
|
10016
|
+
# raw body
|
|
10017
|
+
try:
|
|
10018
|
+
fileobj.seek(0)
|
|
10019
|
+
except Exception:
|
|
10020
|
+
pass
|
|
10021
|
+
data = fileobj.read()
|
|
10022
|
+
hdrs = {'Content-Type': content_type}
|
|
10023
|
+
hdrs.update(headers)
|
|
10024
|
+
req = Request(rebuilt_url, data=data)
|
|
10025
|
+
if not PY2:
|
|
10026
|
+
req.method = method # type: ignore[attr-defined]
|
|
10027
|
+
|
|
10028
|
+
for k, v in hdrs.items():
|
|
10029
|
+
req.add_header(k, v)
|
|
10030
|
+
|
|
10031
|
+
# Basic auth if present
|
|
10032
|
+
if username or password:
|
|
10033
|
+
pwd_mgr = HTTPPasswordMgrWithDefaultRealm()
|
|
10034
|
+
pwd_mgr.add_password(None, rebuilt_url, username, password)
|
|
10035
|
+
opener = build_opener(HTTPBasicAuthHandler(pwd_mgr))
|
|
10036
|
+
else:
|
|
10037
|
+
opener = build_opener()
|
|
10038
|
+
|
|
10039
|
+
# Py2 OpenerDirector.open takes timeout since 2.6; to be safe, avoid passing if it explodes
|
|
10040
|
+
try:
|
|
10041
|
+
resp = opener.open(req, timeout=60)
|
|
10042
|
+
except TypeError:
|
|
10043
|
+
resp = opener.open(req)
|
|
10044
|
+
|
|
10045
|
+
# Status code compat
|
|
10046
|
+
code = getattr(resp, 'status', None) or getattr(resp, 'code', None) or 0
|
|
10047
|
+
try:
|
|
10048
|
+
resp.close()
|
|
10049
|
+
except Exception:
|
|
10050
|
+
pass
|
|
10051
|
+
return (200 <= int(code) < 300)
|
|
10052
|
+
|
|
10053
|
+
|
|
9232
10054
|
def download_file_from_http_string(url, headers=geturls_headers_pyfile_python_alt, usehttp=__use_http_lib__):
|
|
9233
10055
|
httpfile = download_file_from_http_file(url, headers, usehttp)
|
|
9234
10056
|
httpout = httpfile.read()
|
|
@@ -9247,19 +10069,15 @@ if(haveparamiko):
|
|
|
9247
10069
|
else:
|
|
9248
10070
|
sftp_port = urlparts.port
|
|
9249
10071
|
if(urlparts.username is not None):
|
|
9250
|
-
sftp_username = urlparts.username
|
|
10072
|
+
sftp_username = unquote(urlparts.username)
|
|
9251
10073
|
else:
|
|
9252
10074
|
sftp_username = "anonymous"
|
|
9253
10075
|
if(urlparts.password is not None):
|
|
9254
|
-
sftp_password = urlparts.password
|
|
10076
|
+
sftp_password = unquote(urlparts.password)
|
|
9255
10077
|
elif(urlparts.password is None and urlparts.username == "anonymous"):
|
|
9256
10078
|
sftp_password = "anonymous"
|
|
9257
10079
|
else:
|
|
9258
10080
|
sftp_password = ""
|
|
9259
|
-
if(urlparts.scheme == "ftp"):
|
|
9260
|
-
return download_file_from_ftp_file(url)
|
|
9261
|
-
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9262
|
-
return download_file_from_http_file(url)
|
|
9263
10081
|
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9264
10082
|
return False
|
|
9265
10083
|
ssh = paramiko.SSHClient()
|
|
@@ -9267,7 +10085,7 @@ if(haveparamiko):
|
|
|
9267
10085
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
9268
10086
|
try:
|
|
9269
10087
|
ssh.connect(urlparts.hostname, port=sftp_port,
|
|
9270
|
-
username=sftp_username, password=
|
|
10088
|
+
username=sftp_username, password=sftp_password)
|
|
9271
10089
|
except paramiko.ssh_exception.SSHException:
|
|
9272
10090
|
return False
|
|
9273
10091
|
except socket.gaierror:
|
|
@@ -9308,19 +10126,15 @@ if(haveparamiko):
|
|
|
9308
10126
|
else:
|
|
9309
10127
|
sftp_port = urlparts.port
|
|
9310
10128
|
if(urlparts.username is not None):
|
|
9311
|
-
sftp_username = urlparts.username
|
|
10129
|
+
sftp_username = unquote(urlparts.username)
|
|
9312
10130
|
else:
|
|
9313
10131
|
sftp_username = "anonymous"
|
|
9314
10132
|
if(urlparts.password is not None):
|
|
9315
|
-
sftp_password = urlparts.password
|
|
10133
|
+
sftp_password = unquote(urlparts.password)
|
|
9316
10134
|
elif(urlparts.password is None and urlparts.username == "anonymous"):
|
|
9317
10135
|
sftp_password = "anonymous"
|
|
9318
10136
|
else:
|
|
9319
10137
|
sftp_password = ""
|
|
9320
|
-
if(urlparts.scheme == "ftp"):
|
|
9321
|
-
return upload_file_to_ftp_file(sftpfile, url)
|
|
9322
|
-
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9323
|
-
return False
|
|
9324
10138
|
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9325
10139
|
return False
|
|
9326
10140
|
ssh = paramiko.SSHClient()
|
|
@@ -9369,19 +10183,15 @@ if(havepysftp):
|
|
|
9369
10183
|
else:
|
|
9370
10184
|
sftp_port = urlparts.port
|
|
9371
10185
|
if(urlparts.username is not None):
|
|
9372
|
-
sftp_username = urlparts.username
|
|
10186
|
+
sftp_username = unquote(urlparts.username)
|
|
9373
10187
|
else:
|
|
9374
10188
|
sftp_username = "anonymous"
|
|
9375
10189
|
if(urlparts.password is not None):
|
|
9376
|
-
sftp_password = urlparts.password
|
|
10190
|
+
sftp_password = unquote(urlparts.password)
|
|
9377
10191
|
elif(urlparts.password is None and urlparts.username == "anonymous"):
|
|
9378
10192
|
sftp_password = "anonymous"
|
|
9379
10193
|
else:
|
|
9380
10194
|
sftp_password = ""
|
|
9381
|
-
if(urlparts.scheme == "ftp"):
|
|
9382
|
-
return download_file_from_ftp_file(url)
|
|
9383
|
-
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9384
|
-
return download_file_from_http_file(url)
|
|
9385
10195
|
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9386
10196
|
return False
|
|
9387
10197
|
try:
|
|
@@ -9426,19 +10236,15 @@ if(havepysftp):
|
|
|
9426
10236
|
else:
|
|
9427
10237
|
sftp_port = urlparts.port
|
|
9428
10238
|
if(urlparts.username is not None):
|
|
9429
|
-
sftp_username = urlparts.username
|
|
10239
|
+
sftp_username = unquote(urlparts.username)
|
|
9430
10240
|
else:
|
|
9431
10241
|
sftp_username = "anonymous"
|
|
9432
10242
|
if(urlparts.password is not None):
|
|
9433
|
-
sftp_password = urlparts.password
|
|
10243
|
+
sftp_password = unquote(urlparts.password)
|
|
9434
10244
|
elif(urlparts.password is None and urlparts.username == "anonymous"):
|
|
9435
10245
|
sftp_password = "anonymous"
|
|
9436
10246
|
else:
|
|
9437
10247
|
sftp_password = ""
|
|
9438
|
-
if(urlparts.scheme == "ftp"):
|
|
9439
|
-
return upload_file_to_ftp_file(sftpfile, url)
|
|
9440
|
-
elif(urlparts.scheme == "http" or urlparts.scheme == "https"):
|
|
9441
|
-
return False
|
|
9442
10248
|
if(urlparts.scheme != "sftp" and urlparts.scheme != "scp"):
|
|
9443
10249
|
return False
|
|
9444
10250
|
try:
|
|
@@ -9484,6 +10290,13 @@ def download_file_from_internet_file(url, headers=geturls_headers_pyfile_python_
|
|
|
9484
10290
|
return download_file_from_pysftp_file(url)
|
|
9485
10291
|
else:
|
|
9486
10292
|
return download_file_from_sftp_file(url)
|
|
10293
|
+
elif(urlparts.scheme == "tcp" or urlparts.scheme == "udp"):
|
|
10294
|
+
outfile = MkTempFile()
|
|
10295
|
+
returnval = recv_via_url(outfile, url, recv_to_fileobj)
|
|
10296
|
+
if(not returnval):
|
|
10297
|
+
return False
|
|
10298
|
+
outfile.seek(0, 0)
|
|
10299
|
+
return outfile
|
|
9487
10300
|
else:
|
|
9488
10301
|
return False
|
|
9489
10302
|
return False
|
|
@@ -9536,6 +10349,12 @@ def upload_file_to_internet_file(ifp, url):
|
|
|
9536
10349
|
return upload_file_to_pysftp_file(ifp, url)
|
|
9537
10350
|
else:
|
|
9538
10351
|
return upload_file_to_sftp_file(ifp, url)
|
|
10352
|
+
elif(urlparts.scheme == "tcp" or urlparts.scheme == "udp"):
|
|
10353
|
+
ifp.seek(0, 0)
|
|
10354
|
+
returnval = send_via_url(ifp, url, send_from_fileobj)
|
|
10355
|
+
if(not returnval):
|
|
10356
|
+
return False
|
|
10357
|
+
return returnval
|
|
9539
10358
|
else:
|
|
9540
10359
|
return False
|
|
9541
10360
|
return False
|
|
@@ -9574,3 +10393,598 @@ def upload_file_to_internet_compress_string(ifp, url, compression="auto", compre
|
|
|
9574
10393
|
return False
|
|
9575
10394
|
fp.seek(0, 0)
|
|
9576
10395
|
return upload_file_to_internet_file(fp, outfile)
|
|
10396
|
+
|
|
10397
|
+
|
|
10398
|
+
# ---------- Core: send / recv ----------
|
|
10399
|
+
def send_from_fileobj(fileobj, host, port, proto="tcp", timeout=None,
|
|
10400
|
+
chunk_size=65536,
|
|
10401
|
+
use_ssl=False, ssl_verify=True, ssl_ca_file=None,
|
|
10402
|
+
ssl_certfile=None, ssl_keyfile=None, server_hostname=None,
|
|
10403
|
+
auth_user=None, auth_pass=None, auth_scope=u"",
|
|
10404
|
+
on_progress=None, rate_limit_bps=None, want_sha=True):
|
|
10405
|
+
"""
|
|
10406
|
+
Send fileobj contents to (host, port) via TCP or UDP.
|
|
10407
|
+
|
|
10408
|
+
UDP behavior:
|
|
10409
|
+
- Computes total length and sha256 when possible.
|
|
10410
|
+
- Sends: AF1 (if auth) + 'LEN <n> [<sha>]\\n' + payload
|
|
10411
|
+
- If length unknown: stream payload, then 'HASH <sha>\\n' (if enabled), then 'DONE\\n'.
|
|
10412
|
+
- Uses small datagrams (<=1200B) to avoid fragmentation.
|
|
10413
|
+
"""
|
|
10414
|
+
proto = (proto or "tcp").lower()
|
|
10415
|
+
total = 0
|
|
10416
|
+
port = int(port)
|
|
10417
|
+
if proto not in ("tcp", "udp"):
|
|
10418
|
+
raise ValueError("proto must be 'tcp' or 'udp'")
|
|
10419
|
+
|
|
10420
|
+
# ---------------- UDP ----------------
|
|
10421
|
+
if proto == "udp":
|
|
10422
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
10423
|
+
try:
|
|
10424
|
+
if timeout is not None:
|
|
10425
|
+
sock.settimeout(timeout)
|
|
10426
|
+
|
|
10427
|
+
# connect UDP for convenience
|
|
10428
|
+
try:
|
|
10429
|
+
sock.connect((host, port))
|
|
10430
|
+
connected = True
|
|
10431
|
+
except Exception:
|
|
10432
|
+
connected = False
|
|
10433
|
+
|
|
10434
|
+
# length + optional sha
|
|
10435
|
+
total_bytes, start_pos = _discover_len_and_reset(fileobj)
|
|
10436
|
+
|
|
10437
|
+
sha_hex = None
|
|
10438
|
+
if want_sha and total_bytes is not None:
|
|
10439
|
+
import hashlib
|
|
10440
|
+
h = hashlib.sha256()
|
|
10441
|
+
try:
|
|
10442
|
+
cur = fileobj.tell()
|
|
10443
|
+
except Exception:
|
|
10444
|
+
cur = None
|
|
10445
|
+
if start_pos is not None:
|
|
10446
|
+
try: fileobj.seek(start_pos, os.SEEK_SET)
|
|
10447
|
+
except Exception: pass
|
|
10448
|
+
_HSZ = 1024 * 1024
|
|
10449
|
+
while True:
|
|
10450
|
+
blk = fileobj.read(_HSZ)
|
|
10451
|
+
if not blk: break
|
|
10452
|
+
h.update(_to_bytes(blk))
|
|
10453
|
+
sha_hex = h.hexdigest()
|
|
10454
|
+
if start_pos is not None:
|
|
10455
|
+
try: fileobj.seek(start_pos, os.SEEK_SET)
|
|
10456
|
+
except Exception: pass
|
|
10457
|
+
elif cur is not None:
|
|
10458
|
+
try: fileobj.seek(cur, os.SEEK_SET)
|
|
10459
|
+
except Exception: pass
|
|
10460
|
+
|
|
10461
|
+
# optional AF1 (also carries len/sha, but we'll still send LEN for robustness)
|
|
10462
|
+
if auth_user is not None or auth_pass is not None:
|
|
10463
|
+
try:
|
|
10464
|
+
blob = build_auth_blob_v1(
|
|
10465
|
+
auth_user or u"", auth_pass or u"",
|
|
10466
|
+
scope=auth_scope, length=total_bytes, sha_hex=(sha_hex if want_sha else None)
|
|
10467
|
+
)
|
|
10468
|
+
except Exception:
|
|
10469
|
+
blob = _build_auth_blob_legacy(auth_user or b"", auth_pass or b"")
|
|
10470
|
+
if connected:
|
|
10471
|
+
sock.send(blob)
|
|
10472
|
+
# You may ignore the ack in UDP; keep try/except minimal
|
|
10473
|
+
try:
|
|
10474
|
+
resp = sock.recv(16)
|
|
10475
|
+
if resp != _OK:
|
|
10476
|
+
raise RuntimeError("UDP auth failed")
|
|
10477
|
+
except Exception:
|
|
10478
|
+
pass
|
|
10479
|
+
else:
|
|
10480
|
+
sock.sendto(blob, (host, port))
|
|
10481
|
+
try:
|
|
10482
|
+
resp, _ = sock.recvfrom(16)
|
|
10483
|
+
if resp != _OK:
|
|
10484
|
+
raise RuntimeError("UDP auth failed")
|
|
10485
|
+
except Exception:
|
|
10486
|
+
pass
|
|
10487
|
+
|
|
10488
|
+
# ALWAYS send LEN when length is known
|
|
10489
|
+
if total_bytes is not None:
|
|
10490
|
+
preface = b"LEN " + str(int(total_bytes)).encode("ascii")
|
|
10491
|
+
if want_sha and sha_hex:
|
|
10492
|
+
preface += b" " + sha_hex.encode("ascii")
|
|
10493
|
+
preface += b"\n"
|
|
10494
|
+
if connected: sock.send(preface)
|
|
10495
|
+
else: sock.sendto(preface, (host, port))
|
|
10496
|
+
|
|
10497
|
+
# payload stream
|
|
10498
|
+
UDP_PAYLOAD_MAX = 1200
|
|
10499
|
+
effective_chunk = min(int(chunk_size or 65536), UDP_PAYLOAD_MAX)
|
|
10500
|
+
|
|
10501
|
+
sent_so_far = 0
|
|
10502
|
+
last_cb_ts = monotonic()
|
|
10503
|
+
last_rate_ts = last_cb_ts
|
|
10504
|
+
last_rate_bytes = 0
|
|
10505
|
+
|
|
10506
|
+
rolling_h = None
|
|
10507
|
+
if want_sha and total_bytes is None:
|
|
10508
|
+
try:
|
|
10509
|
+
import hashlib
|
|
10510
|
+
rolling_h = hashlib.sha256()
|
|
10511
|
+
except Exception:
|
|
10512
|
+
rolling_h = None
|
|
10513
|
+
|
|
10514
|
+
while True:
|
|
10515
|
+
chunk = fileobj.read(effective_chunk)
|
|
10516
|
+
if not chunk:
|
|
10517
|
+
break
|
|
10518
|
+
b = _to_bytes(chunk)
|
|
10519
|
+
if rolling_h is not None:
|
|
10520
|
+
rolling_h.update(b)
|
|
10521
|
+
n = (sock.send(b) if connected else sock.sendto(b, (host, port)))
|
|
10522
|
+
total += n
|
|
10523
|
+
sent_so_far += n
|
|
10524
|
+
|
|
10525
|
+
if rate_limit_bps:
|
|
10526
|
+
sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
|
|
10527
|
+
sent_so_far, total_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
|
|
10528
|
+
)
|
|
10529
|
+
if sleep_s > 0.0:
|
|
10530
|
+
time.sleep(min(sleep_s, 0.25))
|
|
10531
|
+
|
|
10532
|
+
if on_progress and (monotonic() - last_cb_ts) >= 0.1:
|
|
10533
|
+
try: on_progress(sent_so_far, total_bytes)
|
|
10534
|
+
except Exception: pass
|
|
10535
|
+
last_cb_ts = monotonic()
|
|
10536
|
+
|
|
10537
|
+
# unknown-length trailers
|
|
10538
|
+
if total_bytes is None:
|
|
10539
|
+
if rolling_h is not None:
|
|
10540
|
+
try:
|
|
10541
|
+
th = rolling_h.hexdigest().encode("ascii")
|
|
10542
|
+
(sock.send(b"HASH " + th + b"\n") if connected
|
|
10543
|
+
else sock.sendto(b"HASH " + th + b"\n", (host, port)))
|
|
10544
|
+
except Exception:
|
|
10545
|
+
pass
|
|
10546
|
+
try:
|
|
10547
|
+
(sock.send(b"DONE\n") if connected else sock.sendto(b"DONE\n", (host, port)))
|
|
10548
|
+
except Exception:
|
|
10549
|
+
pass
|
|
10550
|
+
|
|
10551
|
+
finally:
|
|
10552
|
+
try: sock.close()
|
|
10553
|
+
except Exception: pass
|
|
10554
|
+
return total
|
|
10555
|
+
|
|
10556
|
+
# ---------------- TCP ----------------
|
|
10557
|
+
sock = _connect_stream(host, port, timeout)
|
|
10558
|
+
try:
|
|
10559
|
+
if use_ssl:
|
|
10560
|
+
if not _ssl_available():
|
|
10561
|
+
raise RuntimeError("SSL requested but 'ssl' module unavailable.")
|
|
10562
|
+
sock = _ssl_wrap_socket(sock, server_side=False,
|
|
10563
|
+
server_hostname=(server_hostname or host),
|
|
10564
|
+
verify=ssl_verify, ca_file=ssl_ca_file,
|
|
10565
|
+
certfile=ssl_certfile, keyfile=ssl_keyfile)
|
|
10566
|
+
|
|
10567
|
+
total_bytes, start_pos = _discover_len_and_reset(fileobj)
|
|
10568
|
+
sha_hex = None
|
|
10569
|
+
if want_sha and total_bytes is not None:
|
|
10570
|
+
try:
|
|
10571
|
+
import hashlib
|
|
10572
|
+
h = hashlib.sha256()
|
|
10573
|
+
cur = fileobj.tell()
|
|
10574
|
+
if start_pos is not None:
|
|
10575
|
+
fileobj.seek(start_pos, os.SEEK_SET)
|
|
10576
|
+
_HSZ = 1024 * 1024
|
|
10577
|
+
while True:
|
|
10578
|
+
blk = fileobj.read(_HSZ)
|
|
10579
|
+
if not blk: break
|
|
10580
|
+
h.update(_to_bytes(blk))
|
|
10581
|
+
sha_hex = h.hexdigest()
|
|
10582
|
+
fileobj.seek(cur, os.SEEK_SET)
|
|
10583
|
+
except Exception:
|
|
10584
|
+
sha_hex = None
|
|
10585
|
+
|
|
10586
|
+
if auth_user is not None or auth_pass is not None:
|
|
10587
|
+
try:
|
|
10588
|
+
blob = build_auth_blob_v1(
|
|
10589
|
+
auth_user or u"", auth_pass or u"",
|
|
10590
|
+
scope=auth_scope, length=total_bytes, sha_hex=(sha_hex if want_sha else None)
|
|
10591
|
+
)
|
|
10592
|
+
except Exception:
|
|
10593
|
+
blob = _build_auth_blob_legacy(auth_user or b"", auth_pass or b"")
|
|
10594
|
+
sock.sendall(blob)
|
|
10595
|
+
try:
|
|
10596
|
+
resp = sock.recv(16)
|
|
10597
|
+
if resp != _OK:
|
|
10598
|
+
raise RuntimeError("TCP auth failed")
|
|
10599
|
+
except Exception:
|
|
10600
|
+
pass
|
|
10601
|
+
|
|
10602
|
+
sent_so_far = 0
|
|
10603
|
+
last_cb_ts = monotonic()
|
|
10604
|
+
last_rate_ts = last_cb_ts
|
|
10605
|
+
last_rate_bytes = 0
|
|
10606
|
+
|
|
10607
|
+
use_sendfile = hasattr(sock, "sendfile") and hasattr(fileobj, "read")
|
|
10608
|
+
if use_sendfile:
|
|
10609
|
+
try:
|
|
10610
|
+
sent = sock.sendfile(fileobj)
|
|
10611
|
+
if isinstance(sent, int):
|
|
10612
|
+
total += sent
|
|
10613
|
+
sent_so_far += sent
|
|
10614
|
+
if on_progress:
|
|
10615
|
+
try: on_progress(sent_so_far, total_bytes)
|
|
10616
|
+
except Exception: pass
|
|
10617
|
+
else:
|
|
10618
|
+
raise RuntimeError("sendfile returned unexpected type")
|
|
10619
|
+
except Exception:
|
|
10620
|
+
while True:
|
|
10621
|
+
chunk = fileobj.read(chunk_size)
|
|
10622
|
+
if not chunk: break
|
|
10623
|
+
view = memoryview(_to_bytes(chunk))
|
|
10624
|
+
while view:
|
|
10625
|
+
n = sock.send(view); total += n; sent_so_far += n; view = view[n:]
|
|
10626
|
+
if rate_limit_bps:
|
|
10627
|
+
sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
|
|
10628
|
+
sent_so_far, total_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
|
|
10629
|
+
)
|
|
10630
|
+
if sleep_s > 0.0:
|
|
10631
|
+
time.sleep(min(sleep_s, 0.25))
|
|
10632
|
+
if on_progress and (monotonic() - last_cb_ts) >= 0.1:
|
|
10633
|
+
try: on_progress(sent_so_far, total_bytes)
|
|
10634
|
+
except Exception: pass
|
|
10635
|
+
last_cb_ts = monotonic()
|
|
10636
|
+
else:
|
|
10637
|
+
while True:
|
|
10638
|
+
chunk = fileobj.read(chunk_size)
|
|
10639
|
+
if not chunk: break
|
|
10640
|
+
view = memoryview(_to_bytes(chunk))
|
|
10641
|
+
while view:
|
|
10642
|
+
n = sock.send(view); total += n; sent_so_far += n; view = view[n:]
|
|
10643
|
+
if rate_limit_bps:
|
|
10644
|
+
sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
|
|
10645
|
+
sent_so_far, total_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
|
|
10646
|
+
)
|
|
10647
|
+
if sleep_s > 0.0:
|
|
10648
|
+
time.sleep(min(sleep_s, 0.25))
|
|
10649
|
+
if on_progress and (monotonic() - last_cb_ts) >= 0.1:
|
|
10650
|
+
try: on_progress(sent_so_far, total_bytes)
|
|
10651
|
+
except Exception: pass
|
|
10652
|
+
last_cb_ts = monotonic()
|
|
10653
|
+
finally:
|
|
10654
|
+
try: sock.shutdown(socket.SHUT_WR)
|
|
10655
|
+
except Exception: pass
|
|
10656
|
+
try: sock.close()
|
|
10657
|
+
except Exception: pass
|
|
10658
|
+
return total
|
|
10659
|
+
|
|
10660
|
+
def recv_to_fileobj(fileobj, host="", port=0, proto="tcp", timeout=None,
|
|
10661
|
+
max_bytes=None, chunk_size=65536, backlog=1,
|
|
10662
|
+
use_ssl=False, ssl_verify=True, ssl_ca_file=None,
|
|
10663
|
+
ssl_certfile=None, ssl_keyfile=None,
|
|
10664
|
+
require_auth=False, expected_user=None, expected_pass=None,
|
|
10665
|
+
total_timeout=None, expect_scope=None,
|
|
10666
|
+
on_progress=None, rate_limit_bps=None):
|
|
10667
|
+
"""
|
|
10668
|
+
Receive bytes into fileobj over TCP/UDP.
|
|
10669
|
+
|
|
10670
|
+
UDP specifics:
|
|
10671
|
+
* Accepts 'LEN <n> [<sha>]\\n' and 'HASH <sha>\\n' control frames (unauth) or AF1 with len/sha.
|
|
10672
|
+
* If length unknown, accepts final 'DONE\\n' to end cleanly.
|
|
10673
|
+
"""
|
|
10674
|
+
proto = (proto or "tcp").lower()
|
|
10675
|
+
port = int(port)
|
|
10676
|
+
total = 0
|
|
10677
|
+
|
|
10678
|
+
start_ts = time.time()
|
|
10679
|
+
def _time_left():
|
|
10680
|
+
if total_timeout is None:
|
|
10681
|
+
return None
|
|
10682
|
+
left = total_timeout - (time.time() - start_ts)
|
|
10683
|
+
return 0.0 if left <= 0 else left
|
|
10684
|
+
def _set_effective_timeout(socklike, base_timeout):
|
|
10685
|
+
left = _time_left()
|
|
10686
|
+
if left == 0.0:
|
|
10687
|
+
return False
|
|
10688
|
+
eff = base_timeout
|
|
10689
|
+
if left is not None:
|
|
10690
|
+
eff = left if eff is None else min(eff, left)
|
|
10691
|
+
if eff is not None:
|
|
10692
|
+
try:
|
|
10693
|
+
socklike.settimeout(eff)
|
|
10694
|
+
except Exception:
|
|
10695
|
+
pass
|
|
10696
|
+
return True
|
|
10697
|
+
|
|
10698
|
+
if proto not in ("tcp", "udp"):
|
|
10699
|
+
raise ValueError("proto must be 'tcp' or 'udp'")
|
|
10700
|
+
|
|
10701
|
+
# ---------------- UDP server ----------------
|
|
10702
|
+
if proto == "udp":
|
|
10703
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
10704
|
+
authed_addr = None
|
|
10705
|
+
expected_len = None
|
|
10706
|
+
expected_sha = None
|
|
10707
|
+
|
|
10708
|
+
try:
|
|
10709
|
+
sock.bind(("", port))
|
|
10710
|
+
if timeout is None:
|
|
10711
|
+
try: sock.settimeout(10.0)
|
|
10712
|
+
except Exception: pass
|
|
10713
|
+
|
|
10714
|
+
recvd_so_far = 0
|
|
10715
|
+
last_cb_ts = monotonic()
|
|
10716
|
+
last_rate_ts = last_cb_ts
|
|
10717
|
+
last_rate_bytes = 0
|
|
10718
|
+
|
|
10719
|
+
while True:
|
|
10720
|
+
if _time_left() == 0.0:
|
|
10721
|
+
if expected_len is not None and total < expected_len:
|
|
10722
|
+
raise RuntimeError("UDP receive aborted by total_timeout before full payload received")
|
|
10723
|
+
break
|
|
10724
|
+
if (max_bytes is not None) and (total >= max_bytes):
|
|
10725
|
+
break
|
|
10726
|
+
|
|
10727
|
+
if not _set_effective_timeout(sock, timeout):
|
|
10728
|
+
if expected_len is not None and total < expected_len:
|
|
10729
|
+
raise RuntimeError("UDP receive timed out before full payload received")
|
|
10730
|
+
if expected_len is None and total > 0:
|
|
10731
|
+
raise RuntimeError("UDP receive timed out with unknown length; partial data")
|
|
10732
|
+
if expected_len is None and total == 0:
|
|
10733
|
+
raise RuntimeError("UDP receive: no packets received before timeout (is the sender running?)")
|
|
10734
|
+
break
|
|
10735
|
+
|
|
10736
|
+
try:
|
|
10737
|
+
data, addr = sock.recvfrom(chunk_size)
|
|
10738
|
+
except socket.timeout:
|
|
10739
|
+
if expected_len is not None and total < expected_len:
|
|
10740
|
+
raise RuntimeError("UDP receive idle-timeout before full payload received")
|
|
10741
|
+
if expected_len is None and total > 0:
|
|
10742
|
+
raise RuntimeError("UDP receive idle-timeout with unknown length; partial data")
|
|
10743
|
+
if expected_len is None and total == 0:
|
|
10744
|
+
raise RuntimeError("UDP receive: no packets received before timeout (is the sender running?)")
|
|
10745
|
+
break
|
|
10746
|
+
|
|
10747
|
+
if not data:
|
|
10748
|
+
continue
|
|
10749
|
+
|
|
10750
|
+
# (0) Control frames FIRST: LEN / HASH / DONE
|
|
10751
|
+
if data.startswith(b"LEN ") and expected_len is None:
|
|
10752
|
+
try:
|
|
10753
|
+
parts = data.strip().split()
|
|
10754
|
+
n = int(parts[1])
|
|
10755
|
+
expected_len = (None if n < 0 else n)
|
|
10756
|
+
if len(parts) >= 3:
|
|
10757
|
+
expected_sha = parts[2].decode("ascii")
|
|
10758
|
+
except Exception:
|
|
10759
|
+
expected_len = None
|
|
10760
|
+
expected_sha = None
|
|
10761
|
+
continue
|
|
10762
|
+
|
|
10763
|
+
if data.startswith(b"HASH "):
|
|
10764
|
+
try:
|
|
10765
|
+
expected_sha = data.strip().split()[1].decode("ascii")
|
|
10766
|
+
except Exception:
|
|
10767
|
+
expected_sha = None
|
|
10768
|
+
continue
|
|
10769
|
+
|
|
10770
|
+
if data == b"DONE\n":
|
|
10771
|
+
break
|
|
10772
|
+
|
|
10773
|
+
# (1) Auth (AF1 preferred; legacy fallback)
|
|
10774
|
+
if authed_addr is None and require_auth:
|
|
10775
|
+
ok = False
|
|
10776
|
+
v_ok, v_user, v_scope, _r, v_len, v_sha = verify_auth_blob_v1(
|
|
10777
|
+
data, expected_user=expected_user, secret=expected_pass,
|
|
10778
|
+
max_skew=600, expect_scope=expect_scope
|
|
10779
|
+
)
|
|
10780
|
+
if v_ok:
|
|
10781
|
+
ok = True
|
|
10782
|
+
if expected_len is None:
|
|
10783
|
+
expected_len = v_len
|
|
10784
|
+
if expected_sha is None:
|
|
10785
|
+
expected_sha = v_sha
|
|
10786
|
+
else:
|
|
10787
|
+
user, pw = _parse_auth_blob_legacy(data)
|
|
10788
|
+
ok = (user is not None and
|
|
10789
|
+
(expected_user is None or user == _to_bytes(expected_user)) and
|
|
10790
|
+
(expected_pass is None or pw == _to_bytes(expected_pass)))
|
|
10791
|
+
try:
|
|
10792
|
+
sock.sendto((_OK if ok else _NO), addr)
|
|
10793
|
+
except Exception:
|
|
10794
|
+
pass
|
|
10795
|
+
if ok:
|
|
10796
|
+
authed_addr = addr
|
|
10797
|
+
continue
|
|
10798
|
+
|
|
10799
|
+
if require_auth and addr != authed_addr:
|
|
10800
|
+
continue
|
|
10801
|
+
|
|
10802
|
+
# (2) Payload
|
|
10803
|
+
fileobj.write(data)
|
|
10804
|
+
try: fileobj.flush()
|
|
10805
|
+
except Exception: pass
|
|
10806
|
+
total += len(data)
|
|
10807
|
+
recvd_so_far += len(data)
|
|
10808
|
+
|
|
10809
|
+
if rate_limit_bps:
|
|
10810
|
+
sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
|
|
10811
|
+
recvd_so_far, expected_len, last_rate_ts, last_rate_bytes, rate_limit_bps
|
|
10812
|
+
)
|
|
10813
|
+
if sleep_s > 0.0:
|
|
10814
|
+
time.sleep(min(sleep_s, 0.25))
|
|
10815
|
+
|
|
10816
|
+
if on_progress and (monotonic() - last_cb_ts) >= 0.1:
|
|
10817
|
+
try: on_progress(recvd_so_far, expected_len)
|
|
10818
|
+
except Exception: pass
|
|
10819
|
+
last_cb_ts = monotonic()
|
|
10820
|
+
|
|
10821
|
+
if expected_len is not None and total >= expected_len:
|
|
10822
|
+
break
|
|
10823
|
+
|
|
10824
|
+
# Post-conditions
|
|
10825
|
+
if expected_len is not None and total != expected_len:
|
|
10826
|
+
raise RuntimeError("UDP receive incomplete: got %d of %s bytes" % (total, expected_len))
|
|
10827
|
+
|
|
10828
|
+
if expected_sha:
|
|
10829
|
+
import hashlib
|
|
10830
|
+
try:
|
|
10831
|
+
cur = fileobj.tell(); fileobj.seek(0)
|
|
10832
|
+
except Exception:
|
|
10833
|
+
cur = None
|
|
10834
|
+
h = hashlib.sha256(); _HSZ = 1024 * 1024
|
|
10835
|
+
while True:
|
|
10836
|
+
blk = fileobj.read(_HSZ)
|
|
10837
|
+
if not blk: break
|
|
10838
|
+
h.update(_to_bytes(blk))
|
|
10839
|
+
got = h.hexdigest()
|
|
10840
|
+
if cur is not None:
|
|
10841
|
+
try: fileobj.seek(cur)
|
|
10842
|
+
except Exception: pass
|
|
10843
|
+
if got != expected_sha:
|
|
10844
|
+
raise RuntimeError("UDP checksum mismatch: got %s expected %s" % (got, expected_sha))
|
|
10845
|
+
|
|
10846
|
+
finally:
|
|
10847
|
+
try: sock.close()
|
|
10848
|
+
except Exception: pass
|
|
10849
|
+
return total
|
|
10850
|
+
|
|
10851
|
+
# ---------------- TCP server ----------------
|
|
10852
|
+
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
10853
|
+
try:
|
|
10854
|
+
try: srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
10855
|
+
except Exception: pass
|
|
10856
|
+
srv.bind((host or "", port))
|
|
10857
|
+
srv.listen(int(backlog) if backlog else 1)
|
|
10858
|
+
|
|
10859
|
+
if not _set_effective_timeout(srv, timeout):
|
|
10860
|
+
return 0
|
|
10861
|
+
try:
|
|
10862
|
+
conn, _peer = srv.accept()
|
|
10863
|
+
except socket.timeout:
|
|
10864
|
+
return 0
|
|
10865
|
+
|
|
10866
|
+
if use_ssl:
|
|
10867
|
+
if not _ssl_available():
|
|
10868
|
+
try: conn.close()
|
|
10869
|
+
except Exception: pass
|
|
10870
|
+
raise RuntimeError("SSL requested but 'ssl' module unavailable.")
|
|
10871
|
+
if not ssl_certfile:
|
|
10872
|
+
try: conn.close()
|
|
10873
|
+
except Exception: pass
|
|
10874
|
+
raise ValueError("TLS server requires ssl_certfile (and usually ssl_keyfile).")
|
|
10875
|
+
conn = _ssl_wrap_socket(conn, server_side=True, server_hostname=None,
|
|
10876
|
+
verify=ssl_verify, ca_file=ssl_ca_file,
|
|
10877
|
+
certfile=ssl_certfile, keyfile=ssl_keyfile)
|
|
10878
|
+
|
|
10879
|
+
recvd_so_far = 0
|
|
10880
|
+
last_cb_ts = monotonic()
|
|
10881
|
+
last_rate_ts = last_cb_ts
|
|
10882
|
+
last_rate_bytes = 0
|
|
10883
|
+
|
|
10884
|
+
try:
|
|
10885
|
+
if require_auth:
|
|
10886
|
+
if not _set_effective_timeout(conn, timeout):
|
|
10887
|
+
return 0
|
|
10888
|
+
try:
|
|
10889
|
+
preface = conn.recv(2048)
|
|
10890
|
+
except socket.timeout:
|
|
10891
|
+
try: conn.sendall(_NO)
|
|
10892
|
+
except Exception: pass
|
|
10893
|
+
return 0
|
|
10894
|
+
|
|
10895
|
+
ok = False
|
|
10896
|
+
v_ok, v_user, v_scope, _r, v_len, v_sha = verify_auth_blob_v1(
|
|
10897
|
+
preface or b"", expected_user=expected_user, secret=expected_pass,
|
|
10898
|
+
max_skew=600, expect_scope=expect_scope
|
|
10899
|
+
)
|
|
10900
|
+
if v_ok:
|
|
10901
|
+
ok = True
|
|
10902
|
+
else:
|
|
10903
|
+
user, pw = _parse_auth_blob_legacy(preface or b"")
|
|
10904
|
+
ok = (user is not None and
|
|
10905
|
+
(expected_user is None or user == _to_bytes(expected_user)) and
|
|
10906
|
+
(expected_pass is None or pw == _to_bytes(expected_pass)))
|
|
10907
|
+
|
|
10908
|
+
try: conn.sendall(_OK if ok else _NO)
|
|
10909
|
+
except Exception: pass
|
|
10910
|
+
if not ok:
|
|
10911
|
+
return 0
|
|
10912
|
+
|
|
10913
|
+
while True:
|
|
10914
|
+
if _time_left() == 0.0: break
|
|
10915
|
+
if (max_bytes is not None) and (total >= max_bytes): break
|
|
10916
|
+
|
|
10917
|
+
if not _set_effective_timeout(conn, timeout):
|
|
10918
|
+
break
|
|
10919
|
+
try:
|
|
10920
|
+
data = conn.recv(chunk_size)
|
|
10921
|
+
except socket.timeout:
|
|
10922
|
+
break
|
|
10923
|
+
if not data:
|
|
10924
|
+
break
|
|
10925
|
+
|
|
10926
|
+
fileobj.write(data)
|
|
10927
|
+
try: fileobj.flush()
|
|
10928
|
+
except Exception: pass
|
|
10929
|
+
total += len(data)
|
|
10930
|
+
recvd_so_far += len(data)
|
|
10931
|
+
|
|
10932
|
+
if rate_limit_bps:
|
|
10933
|
+
sleep_s, last_rate_ts, last_rate_bytes = _progress_tick(
|
|
10934
|
+
recvd_so_far, max_bytes, last_rate_ts, last_rate_bytes, rate_limit_bps
|
|
10935
|
+
)
|
|
10936
|
+
if sleep_s > 0.0:
|
|
10937
|
+
time.sleep(min(sleep_s, 0.25))
|
|
10938
|
+
|
|
10939
|
+
if on_progress and (monotonic() - last_cb_ts) >= 0.1:
|
|
10940
|
+
try: on_progress(recvd_so_far, max_bytes)
|
|
10941
|
+
except Exception: pass
|
|
10942
|
+
last_cb_ts = monotonic()
|
|
10943
|
+
finally:
|
|
10944
|
+
try: conn.shutdown(socket.SHUT_RD)
|
|
10945
|
+
except Exception: pass
|
|
10946
|
+
try: conn.close()
|
|
10947
|
+
except Exception: pass
|
|
10948
|
+
finally:
|
|
10949
|
+
try: srv.close()
|
|
10950
|
+
except Exception: pass
|
|
10951
|
+
|
|
10952
|
+
return total
|
|
10953
|
+
|
|
10954
|
+
# ---------- URL drivers ----------
|
|
10955
|
+
def send_via_url(fileobj, url, send_from_fileobj_func=send_from_fileobj):
|
|
10956
|
+
"""
|
|
10957
|
+
Use URL options to drive the sender. Returns bytes sent.
|
|
10958
|
+
"""
|
|
10959
|
+
parts, o = _parse_net_url(url)
|
|
10960
|
+
use_auth = (o["user"] is not None and o["pw"] is not None) or o["force_auth"]
|
|
10961
|
+
return send_from_fileobj_func(
|
|
10962
|
+
fileobj,
|
|
10963
|
+
o["host"], o["port"], proto=o["proto"],
|
|
10964
|
+
timeout=o["timeout"], chunk_size=o["chunk_size"],
|
|
10965
|
+
use_ssl=o["use_ssl"], ssl_verify=o["ssl_verify"],
|
|
10966
|
+
ssl_ca_file=o["ssl_ca_file"], ssl_certfile=o["ssl_certfile"], ssl_keyfile=o["ssl_keyfile"],
|
|
10967
|
+
server_hostname=o["server_hostname"],
|
|
10968
|
+
auth_user=(o["user"] if use_auth else None),
|
|
10969
|
+
auth_pass=(o["pw"] if use_auth else None),
|
|
10970
|
+
auth_scope=o.get("path", u""),
|
|
10971
|
+
want_sha=o["want_sha"], # <— pass through
|
|
10972
|
+
)
|
|
10973
|
+
|
|
10974
|
+
def recv_via_url(fileobj, url, recv_to_fileobj_func=recv_to_fileobj):
|
|
10975
|
+
"""
|
|
10976
|
+
Use URL options to drive the receiver. Returns bytes received.
|
|
10977
|
+
"""
|
|
10978
|
+
parts, o = _parse_net_url(url)
|
|
10979
|
+
require_auth = (o["user"] is not None and o["pw"] is not None) or o["force_auth"]
|
|
10980
|
+
return recv_to_fileobj_func(
|
|
10981
|
+
fileobj,
|
|
10982
|
+
o["host"], o["port"], proto=o["proto"],
|
|
10983
|
+
timeout=o["timeout"], total_timeout=o["total_timeout"],
|
|
10984
|
+
chunk_size=o["chunk_size"],
|
|
10985
|
+
use_ssl=o["use_ssl"], ssl_verify=o["ssl_verify"],
|
|
10986
|
+
ssl_ca_file=o["ssl_ca_file"], ssl_certfile=o["ssl_certfile"], ssl_keyfile=o["ssl_keyfile"],
|
|
10987
|
+
require_auth=require_auth,
|
|
10988
|
+
expected_user=o["user"], expected_pass=o["pw"],
|
|
10989
|
+
expect_scope=o.get("path", u""),
|
|
10990
|
+
)
|