copyparty 1.15.1__py3-none-any.whl → 1.15.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copyparty/__init__.py +1 -0
- copyparty/__main__.py +16 -12
- copyparty/__version__.py +2 -2
- copyparty/authsrv.py +11 -6
- copyparty/broker_mp.py +2 -2
- copyparty/broker_mpw.py +2 -2
- copyparty/broker_thr.py +4 -9
- copyparty/broker_util.py +13 -1
- copyparty/cfg.py +1 -0
- copyparty/fsutil.py +0 -3
- copyparty/httpcli.py +105 -60
- copyparty/httpconn.py +0 -1
- copyparty/httpsrv.py +3 -5
- copyparty/svchub.py +2 -6
- copyparty/th_srv.py +15 -7
- copyparty/u2idx.py +0 -3
- copyparty/up2k.py +206 -98
- copyparty/util.py +116 -63
- copyparty/web/browser.js.gz +0 -0
- copyparty/web/splash.css.gz +0 -0
- copyparty/web/splash.html +12 -0
- copyparty/web/splash.js.gz +0 -0
- {copyparty-1.15.1.dist-info → copyparty-1.15.2.dist-info}/METADATA +15 -3
- {copyparty-1.15.1.dist-info → copyparty-1.15.2.dist-info}/RECORD +28 -28
- {copyparty-1.15.1.dist-info → copyparty-1.15.2.dist-info}/WHEEL +1 -1
- {copyparty-1.15.1.dist-info → copyparty-1.15.2.dist-info}/LICENSE +0 -0
- {copyparty-1.15.1.dist-info → copyparty-1.15.2.dist-info}/entry_points.txt +0 -0
- {copyparty-1.15.1.dist-info → copyparty-1.15.2.dist-info}/top_level.txt +0 -0
copyparty/httpcli.py
CHANGED
@@ -2,7 +2,6 @@
|
|
2
2
|
from __future__ import print_function, unicode_literals
|
3
3
|
|
4
4
|
import argparse # typechk
|
5
|
-
import base64
|
6
5
|
import calendar
|
7
6
|
import copy
|
8
7
|
import errno
|
@@ -58,6 +57,7 @@ from .util import (
|
|
58
57
|
absreal,
|
59
58
|
alltrace,
|
60
59
|
atomic_move,
|
60
|
+
b64dec,
|
61
61
|
exclude_dotfiles,
|
62
62
|
formatdate,
|
63
63
|
fsenc,
|
@@ -87,6 +87,7 @@ from .util import (
|
|
87
87
|
relchk,
|
88
88
|
ren_open,
|
89
89
|
runhook,
|
90
|
+
s2hms,
|
90
91
|
s3enc,
|
91
92
|
sanitize_fn,
|
92
93
|
sanitize_vpath,
|
@@ -123,7 +124,6 @@ class HttpCli(object):
|
|
123
124
|
"""
|
124
125
|
|
125
126
|
def __init__(self, conn ) :
|
126
|
-
assert conn.sr
|
127
127
|
|
128
128
|
self.t0 = time.time()
|
129
129
|
self.conn = conn
|
@@ -498,7 +498,7 @@ class HttpCli(object):
|
|
498
498
|
):
|
499
499
|
try:
|
500
500
|
zb = zso.split(" ")[1].encode("ascii")
|
501
|
-
zs =
|
501
|
+
zs = b64dec(zb).decode("utf-8")
|
502
502
|
# try "pwd", "x:pwd", "pwd:x"
|
503
503
|
for bauth in [zs] + zs.split(":", 1)[::-1]:
|
504
504
|
if bauth in self.asrv.sesa:
|
@@ -1391,7 +1391,6 @@ class HttpCli(object):
|
|
1391
1391
|
xroot = mkenod("D:orz")
|
1392
1392
|
xroot.insert(0, parse_xml(txt))
|
1393
1393
|
xprop = xroot.find(r"./{DAV:}propertyupdate/{DAV:}set/{DAV:}prop")
|
1394
|
-
assert xprop
|
1395
1394
|
for ze in xprop:
|
1396
1395
|
ze.clear()
|
1397
1396
|
|
@@ -1399,12 +1398,10 @@ class HttpCli(object):
|
|
1399
1398
|
xroot = parse_xml(txt)
|
1400
1399
|
|
1401
1400
|
el = xroot.find(r"./{DAV:}response")
|
1402
|
-
assert el
|
1403
1401
|
e2 = mktnod("D:href", quotep(self.args.SRS + self.vpath))
|
1404
1402
|
el.insert(0, e2)
|
1405
1403
|
|
1406
1404
|
el = xroot.find(r"./{DAV:}response/{DAV:}propstat")
|
1407
|
-
assert el
|
1408
1405
|
el.insert(0, xprop)
|
1409
1406
|
|
1410
1407
|
ret = '<?xml version="1.0" encoding="{}"?>\n'.format(uenc)
|
@@ -1788,7 +1785,6 @@ class HttpCli(object):
|
|
1788
1785
|
fn = os.devnull
|
1789
1786
|
|
1790
1787
|
params.update(open_ka)
|
1791
|
-
assert fn
|
1792
1788
|
|
1793
1789
|
if not self.args.nw:
|
1794
1790
|
if rnd:
|
@@ -1860,10 +1856,12 @@ class HttpCli(object):
|
|
1860
1856
|
# small toctou, but better than clobbering a hardlink
|
1861
1857
|
wunlink(self.log, path, vfs.flags)
|
1862
1858
|
|
1863
|
-
|
1864
|
-
|
1859
|
+
f, fn = ren_open(fn, *open_a, **params)
|
1860
|
+
try:
|
1865
1861
|
path = os.path.join(fdir, fn)
|
1866
1862
|
post_sz, sha_hex, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
|
1863
|
+
finally:
|
1864
|
+
f.close()
|
1867
1865
|
|
1868
1866
|
if lim:
|
1869
1867
|
lim.nup(self.ip)
|
@@ -1902,8 +1900,8 @@ class HttpCli(object):
|
|
1902
1900
|
fn2 = fn.rsplit(".", 1)[0] + "." + ext
|
1903
1901
|
|
1904
1902
|
params["suffix"] = suffix[:-4]
|
1905
|
-
|
1906
|
-
|
1903
|
+
f, fn2 = ren_open(fn2, *open_a, **params)
|
1904
|
+
f.close()
|
1907
1905
|
|
1908
1906
|
path2 = os.path.join(fdir, fn2)
|
1909
1907
|
atomic_move(self.log, path, path2, vfs.flags)
|
@@ -2097,7 +2095,6 @@ class HttpCli(object):
|
|
2097
2095
|
raise Pebkac(422, 'invalid action "{}"'.format(act))
|
2098
2096
|
|
2099
2097
|
def handle_zip_post(self) :
|
2100
|
-
assert self.parser
|
2101
2098
|
try:
|
2102
2099
|
k = next(x for x in self.uparam if x in ("zip", "tar"))
|
2103
2100
|
except:
|
@@ -2297,11 +2294,16 @@ class HttpCli(object):
|
|
2297
2294
|
vfs, _ = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
2298
2295
|
ptop = (vfs.dbv or vfs).realpath
|
2299
2296
|
|
2300
|
-
|
2297
|
+
broker = self.conn.hsrv.broker
|
2298
|
+
x = broker.ask("up2k.handle_chunks", ptop, wark, chashes)
|
2301
2299
|
response = x.get()
|
2302
2300
|
chashes, chunksize, cstarts, path, lastmod, sprs = response
|
2303
2301
|
maxsize = chunksize * len(chashes)
|
2304
2302
|
cstart0 = cstarts[0]
|
2303
|
+
locked = chashes # remaining chunks to be received in this request
|
2304
|
+
written = [] # chunks written to disk, but not yet released by up2k
|
2305
|
+
num_left = -1 # num chunks left according to most recent up2k release
|
2306
|
+
treport = time.time() # ratelimit up2k reporting to reduce overhead
|
2305
2307
|
|
2306
2308
|
try:
|
2307
2309
|
if self.args.nw:
|
@@ -2347,11 +2349,8 @@ class HttpCli(object):
|
|
2347
2349
|
remains -= chunksize
|
2348
2350
|
|
2349
2351
|
if len(cstart) > 1 and path != os.devnull:
|
2350
|
-
|
2351
|
-
|
2352
|
-
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
|
2353
|
-
)
|
2354
|
-
)
|
2352
|
+
t = " & ".join(unicode(x) for x in cstart[1:])
|
2353
|
+
self.log("clone %s to %s" % (cstart[0], t))
|
2355
2354
|
ofs = 0
|
2356
2355
|
while ofs < chunksize:
|
2357
2356
|
bufsz = max(4 * 1024 * 1024, self.args.iobuf)
|
@@ -2366,6 +2365,25 @@ class HttpCli(object):
|
|
2366
2365
|
|
2367
2366
|
self.log("clone {} done".format(cstart[0]))
|
2368
2367
|
|
2368
|
+
# be quick to keep the tcp winsize scale;
|
2369
|
+
# if we can't confirm rn then that's fine
|
2370
|
+
written.append(chash)
|
2371
|
+
now = time.time()
|
2372
|
+
if now - treport < 1:
|
2373
|
+
continue
|
2374
|
+
treport = now
|
2375
|
+
x = broker.ask("up2k.fast_confirm_chunks", ptop, wark, written)
|
2376
|
+
num_left, t = x.get()
|
2377
|
+
if num_left < -1:
|
2378
|
+
self.loud_reply(t, status=500)
|
2379
|
+
locked = written = []
|
2380
|
+
return False
|
2381
|
+
elif num_left >= 0:
|
2382
|
+
t = "got %d more chunks, %d left"
|
2383
|
+
self.log(t % (len(written), num_left), 6)
|
2384
|
+
locked = locked[len(written) :]
|
2385
|
+
written = []
|
2386
|
+
|
2369
2387
|
if not fpool:
|
2370
2388
|
f.close()
|
2371
2389
|
else:
|
@@ -2376,25 +2394,25 @@ class HttpCli(object):
|
|
2376
2394
|
f.close()
|
2377
2395
|
raise
|
2378
2396
|
finally:
|
2379
|
-
|
2380
|
-
|
2397
|
+
if locked:
|
2398
|
+
# now block until all chunks released+confirmed
|
2399
|
+
x = broker.ask("up2k.confirm_chunks", ptop, wark, locked)
|
2400
|
+
num_left, t = x.get()
|
2401
|
+
if num_left < 0:
|
2402
|
+
self.loud_reply(t, status=500)
|
2403
|
+
return False
|
2404
|
+
t = "got %d more chunks, %d left"
|
2405
|
+
self.log(t % (len(locked), num_left), 6)
|
2381
2406
|
|
2382
|
-
|
2383
|
-
|
2384
|
-
try:
|
2385
|
-
num_left, fin_path = ztis
|
2386
|
-
except:
|
2387
|
-
self.loud_reply(ztis, status=500)
|
2388
|
-
return False
|
2407
|
+
if num_left < 0:
|
2408
|
+
raise Pebkac(500, "unconfirmed; see serverlog")
|
2389
2409
|
|
2390
2410
|
if not num_left and fpool:
|
2391
2411
|
with self.u2mutex:
|
2392
2412
|
self.u2fh.close(path)
|
2393
2413
|
|
2394
2414
|
if not num_left and not self.args.nw:
|
2395
|
-
self.
|
2396
|
-
"up2k.finish_upload", ptop, wark, self.u2fh.aps
|
2397
|
-
).get()
|
2415
|
+
broker.ask("up2k.finish_upload", ptop, wark, self.u2fh.aps).get()
|
2398
2416
|
|
2399
2417
|
cinf = self.headers.get("x-up2k-stat", "")
|
2400
2418
|
|
@@ -2404,7 +2422,6 @@ class HttpCli(object):
|
|
2404
2422
|
return True
|
2405
2423
|
|
2406
2424
|
def handle_chpw(self) :
|
2407
|
-
assert self.parser
|
2408
2425
|
pwd = self.parser.require("pw", 64)
|
2409
2426
|
self.parser.drop()
|
2410
2427
|
|
@@ -2421,7 +2438,6 @@ class HttpCli(object):
|
|
2421
2438
|
return True
|
2422
2439
|
|
2423
2440
|
def handle_login(self) :
|
2424
|
-
assert self.parser
|
2425
2441
|
pwd = self.parser.require("cppwd", 64)
|
2426
2442
|
try:
|
2427
2443
|
uhash = self.parser.require("uhash", 256)
|
@@ -2449,7 +2465,6 @@ class HttpCli(object):
|
|
2449
2465
|
return True
|
2450
2466
|
|
2451
2467
|
def handle_logout(self) :
|
2452
|
-
assert self.parser
|
2453
2468
|
self.parser.drop()
|
2454
2469
|
|
2455
2470
|
self.log("logout " + self.uname)
|
@@ -2478,7 +2493,7 @@ class HttpCli(object):
|
|
2478
2493
|
logpwd = ""
|
2479
2494
|
elif self.args.log_badpwd == 2:
|
2480
2495
|
zb = hashlib.sha512(pwd.encode("utf-8", "replace")).digest()
|
2481
|
-
logpwd = "%" +
|
2496
|
+
logpwd = "%" + ub64enc(zb[:12]).decode("ascii")
|
2482
2497
|
|
2483
2498
|
if pwd != "x":
|
2484
2499
|
self.log("invalid password: {}".format(logpwd), 3)
|
@@ -2503,7 +2518,6 @@ class HttpCli(object):
|
|
2503
2518
|
return dur > 0, msg
|
2504
2519
|
|
2505
2520
|
def handle_mkdir(self) :
|
2506
|
-
assert self.parser
|
2507
2521
|
new_dir = self.parser.require("name", 512)
|
2508
2522
|
self.parser.drop()
|
2509
2523
|
|
@@ -2549,7 +2563,6 @@ class HttpCli(object):
|
|
2549
2563
|
return True
|
2550
2564
|
|
2551
2565
|
def handle_new_md(self) :
|
2552
|
-
assert self.parser
|
2553
2566
|
new_file = self.parser.require("name", 512)
|
2554
2567
|
self.parser.drop()
|
2555
2568
|
|
@@ -2715,8 +2728,8 @@ class HttpCli(object):
|
|
2715
2728
|
bos.makedirs(fdir)
|
2716
2729
|
|
2717
2730
|
# reserve destination filename
|
2718
|
-
|
2719
|
-
|
2731
|
+
f, fname = ren_open(fname, "wb", fdir=fdir, suffix=suffix)
|
2732
|
+
f.close()
|
2720
2733
|
|
2721
2734
|
tnam = fname + ".PARTIAL"
|
2722
2735
|
if self.args.dotpart:
|
@@ -2739,8 +2752,8 @@ class HttpCli(object):
|
|
2739
2752
|
v2 = lim.dfv - lim.dfl
|
2740
2753
|
max_sz = min(v1, v2) if v1 and v2 else v1 or v2
|
2741
2754
|
|
2742
|
-
|
2743
|
-
|
2755
|
+
f, tnam = ren_open(tnam, "wb", self.args.iobuf, **open_args)
|
2756
|
+
try:
|
2744
2757
|
tabspath = os.path.join(fdir, tnam)
|
2745
2758
|
self.log("writing to {}".format(tabspath))
|
2746
2759
|
sz, sha_hex, sha_b64 = hashcopy(
|
@@ -2748,6 +2761,8 @@ class HttpCli(object):
|
|
2748
2761
|
)
|
2749
2762
|
if sz == 0:
|
2750
2763
|
raise Pebkac(400, "empty files in post")
|
2764
|
+
finally:
|
2765
|
+
f.close()
|
2751
2766
|
|
2752
2767
|
if lim:
|
2753
2768
|
lim.nup(self.ip)
|
@@ -2957,7 +2972,6 @@ class HttpCli(object):
|
|
2957
2972
|
return True
|
2958
2973
|
|
2959
2974
|
def handle_text_upload(self) :
|
2960
|
-
assert self.parser
|
2961
2975
|
try:
|
2962
2976
|
cli_lastmod3 = int(self.parser.require("lastmod", 16))
|
2963
2977
|
except:
|
@@ -3042,7 +3056,6 @@ class HttpCli(object):
|
|
3042
3056
|
pass
|
3043
3057
|
wrename(self.log, fp, os.path.join(mdir, ".hist", mfile2), vfs.flags)
|
3044
3058
|
|
3045
|
-
assert self.parser.gen
|
3046
3059
|
p_field, _, p_data = next(self.parser.gen)
|
3047
3060
|
if p_field != "body":
|
3048
3061
|
raise Pebkac(400, "expected body, got {}".format(p_field))
|
@@ -3143,7 +3156,6 @@ class HttpCli(object):
|
|
3143
3156
|
# some browser append "; length=573"
|
3144
3157
|
cli_lastmod = cli_lastmod.split(";")[0].strip()
|
3145
3158
|
cli_dt = parsedate(cli_lastmod)
|
3146
|
-
assert cli_dt
|
3147
3159
|
cli_ts = calendar.timegm(cli_dt)
|
3148
3160
|
return file_lastmod, int(file_ts) > int(cli_ts)
|
3149
3161
|
except Exception as ex:
|
@@ -3911,6 +3923,9 @@ class HttpCli(object):
|
|
3911
3923
|
vp = re.sub(r"[<>&$?`\"']", "_", self.uparam["hc"] or "").lstrip("/")
|
3912
3924
|
pw = pw.replace(" ", "%20")
|
3913
3925
|
vp = vp.replace(" ", "%20")
|
3926
|
+
if pw in self.asrv.sesa:
|
3927
|
+
pw = "pwd"
|
3928
|
+
|
3914
3929
|
html = self.j2s(
|
3915
3930
|
"svcs",
|
3916
3931
|
args=self.args,
|
@@ -3935,11 +3950,30 @@ class HttpCli(object):
|
|
3935
3950
|
for y in [self.rvol, self.wvol, self.avol]
|
3936
3951
|
]
|
3937
3952
|
|
3938
|
-
|
3939
|
-
|
3953
|
+
ups = []
|
3954
|
+
now = time.time()
|
3955
|
+
get_vst = self.avol and not self.args.no_rescan
|
3956
|
+
get_ups = self.rvol and not self.args.no_up_list and self.uname or ""
|
3957
|
+
if get_vst or get_ups:
|
3958
|
+
x = self.conn.hsrv.broker.ask("up2k.get_state", get_vst, get_ups)
|
3940
3959
|
vs = json.loads(x.get())
|
3941
3960
|
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
|
3942
|
-
|
3961
|
+
try:
|
3962
|
+
for rem, sz, t0, poke, vp in vs["ups"]:
|
3963
|
+
fdone = max(0.001, 1 - rem)
|
3964
|
+
td = max(0.1, now - t0)
|
3965
|
+
rd, fn = vsplit(vp.replace(os.sep, "/"))
|
3966
|
+
if not rd:
|
3967
|
+
rd = "/"
|
3968
|
+
erd = quotep(rd)
|
3969
|
+
rds = rd.replace("/", " / ")
|
3970
|
+
spd = humansize(sz * fdone / td, True) + "/s"
|
3971
|
+
eta = s2hms((td / fdone) - td, True)
|
3972
|
+
idle = s2hms(now - poke, True)
|
3973
|
+
ups.append((int(100 * fdone), spd, eta, idle, erd, rds, fn))
|
3974
|
+
except Exception as ex:
|
3975
|
+
self.log("failed to list upload progress: %r" % (ex,), 1)
|
3976
|
+
if not get_vst:
|
3943
3977
|
vstate = {}
|
3944
3978
|
vs = {
|
3945
3979
|
"scanning": None,
|
@@ -3964,6 +3998,12 @@ class HttpCli(object):
|
|
3964
3998
|
for k in ["scanning", "hashq", "tagq", "mtpq", "dbwt"]:
|
3965
3999
|
txt += " {}({})".format(k, vs[k])
|
3966
4000
|
|
4001
|
+
if ups:
|
4002
|
+
txt += "\n\nincoming files:"
|
4003
|
+
for zt in ups:
|
4004
|
+
txt += "\n%s" % (", ".join((str(x) for x in zt)),)
|
4005
|
+
txt += "\n"
|
4006
|
+
|
3967
4007
|
if rvol:
|
3968
4008
|
txt += "\nyou can browse:"
|
3969
4009
|
for v in rvol:
|
@@ -3987,6 +4027,7 @@ class HttpCli(object):
|
|
3987
4027
|
avol=avol,
|
3988
4028
|
in_shr=self.args.shr and self.vpath.startswith(self.args.shr[1:]),
|
3989
4029
|
vstate=vstate,
|
4030
|
+
ups=ups,
|
3990
4031
|
scanning=vs["scanning"],
|
3991
4032
|
hashq=vs["hashq"],
|
3992
4033
|
tagq=vs["tagq"],
|
@@ -5091,7 +5132,6 @@ class HttpCli(object):
|
|
5091
5132
|
dirs.append(item)
|
5092
5133
|
else:
|
5093
5134
|
files.append(item)
|
5094
|
-
item["rd"] = rem
|
5095
5135
|
|
5096
5136
|
if is_dk and not vf.get("dks"):
|
5097
5137
|
dirs = []
|
@@ -5114,16 +5154,9 @@ class HttpCli(object):
|
|
5114
5154
|
add_up_at = ".up_at" in mte
|
5115
5155
|
is_admin = self.can_admin
|
5116
5156
|
tagset = set()
|
5117
|
-
|
5157
|
+
rd = vrem
|
5158
|
+
for fe in files if icur else []:
|
5118
5159
|
fn = fe["name"]
|
5119
|
-
rd = fe["rd"]
|
5120
|
-
del fe["rd"]
|
5121
|
-
if not icur:
|
5122
|
-
continue
|
5123
|
-
|
5124
|
-
if vn != dbv:
|
5125
|
-
_, rd = vn.get_dbv(rd)
|
5126
|
-
|
5127
5160
|
erd_efn = (rd, fn)
|
5128
5161
|
q = "select mt.k, mt.v from up inner join mt on mt.w = substr(up.w,1,16) where up.rd = ? and up.fn = ? and +mt.k != 'x'"
|
5129
5162
|
try:
|
@@ -5165,13 +5198,25 @@ class HttpCli(object):
|
|
5165
5198
|
fe["tags"] = tags
|
5166
5199
|
|
5167
5200
|
if icur:
|
5201
|
+
for fe in dirs:
|
5202
|
+
fe["tags"] = ODict()
|
5203
|
+
|
5168
5204
|
lmte = list(mte)
|
5169
5205
|
if self.can_admin:
|
5170
5206
|
lmte.extend(("up_ip", ".up_at"))
|
5171
5207
|
|
5208
|
+
if "nodirsz" not in vf:
|
5209
|
+
tagset.add(".files")
|
5210
|
+
vdir = "%s/" % (rd,) if rd else ""
|
5211
|
+
q = "select sz, nf from ds where rd=? limit 1"
|
5212
|
+
for fe in dirs:
|
5213
|
+
try:
|
5214
|
+
hit = icur.execute(q, (vdir + fe["name"],)).fetchone()
|
5215
|
+
(fe["sz"], fe["tags"][".files"]) = hit
|
5216
|
+
except:
|
5217
|
+
pass # 404 or mojibake
|
5218
|
+
|
5172
5219
|
taglist = [k for k in lmte if k in tagset]
|
5173
|
-
for fe in dirs:
|
5174
|
-
fe["tags"] = ODict()
|
5175
5220
|
else:
|
5176
5221
|
taglist = list(tagset)
|
5177
5222
|
|
@@ -5315,7 +5360,7 @@ class HttpCli(object):
|
|
5315
5360
|
fmt = vn.flags.get("og_th", "j")
|
5316
5361
|
th_base = ujoin(url_base, quotep(thumb))
|
5317
5362
|
query = "th=%s&cache" % (fmt,)
|
5318
|
-
query = ub64enc(query.encode("utf-8")).decode("
|
5363
|
+
query = ub64enc(query.encode("utf-8")).decode("ascii")
|
5319
5364
|
# discord looks at file extension, not content-type...
|
5320
5365
|
query += "/th.jpg" if "j" in fmt else "/th.webp"
|
5321
5366
|
j2a["og_thumb"] = "%s/.uqe/%s" % (th_base, query)
|
@@ -5324,7 +5369,7 @@ class HttpCli(object):
|
|
5324
5369
|
j2a["og_file"] = file
|
5325
5370
|
if og_fn:
|
5326
5371
|
og_fn_q = quotep(og_fn)
|
5327
|
-
query = ub64enc(b"raw").decode("
|
5372
|
+
query = ub64enc(b"raw").decode("ascii")
|
5328
5373
|
query += "/%s" % (og_fn_q,)
|
5329
5374
|
j2a["og_url"] = ujoin(url_base, og_fn_q)
|
5330
5375
|
j2a["og_raw"] = j2a["og_url"] + "/.uqe/" + query
|
copyparty/httpconn.py
CHANGED
@@ -187,7 +187,6 @@ class HttpConn(object):
|
|
187
187
|
|
188
188
|
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
189
189
|
ciphers = self.s.shared_ciphers()
|
190
|
-
assert ciphers
|
191
190
|
overlap = [str(y[::-1]) for y in ciphers]
|
192
191
|
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
193
192
|
for k, v in [
|
copyparty/httpsrv.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
# coding: utf-8
|
2
2
|
from __future__ import print_function, unicode_literals
|
3
3
|
|
4
|
-
import base64
|
5
4
|
import math
|
6
5
|
import os
|
7
6
|
import re
|
@@ -75,6 +74,7 @@ from .util import (
|
|
75
74
|
spack,
|
76
75
|
start_log_thrs,
|
77
76
|
start_stackmon,
|
77
|
+
ub64enc,
|
78
78
|
)
|
79
79
|
|
80
80
|
if TYPE_CHECKING:
|
@@ -234,7 +234,6 @@ class HttpSrv(object):
|
|
234
234
|
if self.args.log_htp:
|
235
235
|
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
236
236
|
|
237
|
-
assert self.tp_q
|
238
237
|
for _ in range(n):
|
239
238
|
self.tp_q.put(None)
|
240
239
|
|
@@ -428,7 +427,6 @@ class HttpSrv(object):
|
|
428
427
|
)
|
429
428
|
|
430
429
|
def thr_poolw(self) :
|
431
|
-
assert self.tp_q
|
432
430
|
while True:
|
433
431
|
task = self.tp_q.get()
|
434
432
|
if not task:
|
@@ -540,8 +538,8 @@ class HttpSrv(object):
|
|
540
538
|
except:
|
541
539
|
pass
|
542
540
|
|
543
|
-
|
544
|
-
self.cb_v = v.decode("ascii")
|
541
|
+
# spack gives 4 lsb, take 3 lsb, get 4 ch
|
542
|
+
self.cb_v = ub64enc(spack(b">L", int(v))[1:]).decode("ascii")
|
545
543
|
self.cb_ts = time.time()
|
546
544
|
return self.cb_v
|
547
545
|
|
copyparty/svchub.py
CHANGED
@@ -2,7 +2,6 @@
|
|
2
2
|
from __future__ import print_function, unicode_literals
|
3
3
|
|
4
4
|
import argparse
|
5
|
-
import base64
|
6
5
|
import errno
|
7
6
|
import gzip
|
8
7
|
import logging
|
@@ -61,6 +60,7 @@ from .util import (
|
|
61
60
|
pybin,
|
62
61
|
start_log_thrs,
|
63
62
|
start_stackmon,
|
63
|
+
ub64enc,
|
64
64
|
)
|
65
65
|
|
66
66
|
if TYPE_CHECKING:
|
@@ -413,8 +413,6 @@ class SvcHub(object):
|
|
413
413
|
r"insert into kv values ('sver', 1)",
|
414
414
|
]
|
415
415
|
|
416
|
-
assert db # type: ignore
|
417
|
-
assert cur # type: ignore
|
418
416
|
if create:
|
419
417
|
for cmd in sch:
|
420
418
|
cur.execute(cmd)
|
@@ -482,8 +480,6 @@ class SvcHub(object):
|
|
482
480
|
r"create index sh_t1 on sh(t1)",
|
483
481
|
]
|
484
482
|
|
485
|
-
assert db # type: ignore
|
486
|
-
assert cur # type: ignore
|
487
483
|
if create:
|
488
484
|
dver = 2
|
489
485
|
modified = True
|
@@ -1291,5 +1287,5 @@ class SvcHub(object):
|
|
1291
1287
|
zs = "{}\n{}".format(VERSIONS, alltrace())
|
1292
1288
|
zb = zs.encode("utf-8", "replace")
|
1293
1289
|
zb = gzip.compress(zb)
|
1294
|
-
zs =
|
1290
|
+
zs = ub64enc(zb).decode("ascii")
|
1295
1291
|
self.log("stacks", zs)
|
copyparty/th_srv.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
# coding: utf-8
|
2
2
|
from __future__ import print_function, unicode_literals
|
3
3
|
|
4
|
-
import base64
|
5
4
|
import hashlib
|
6
5
|
import logging
|
7
6
|
import os
|
@@ -27,6 +26,7 @@ from .util import (
|
|
27
26
|
min_ex,
|
28
27
|
runcmd,
|
29
28
|
statdir,
|
29
|
+
ub64enc,
|
30
30
|
vsplit,
|
31
31
|
wrename,
|
32
32
|
wunlink,
|
@@ -106,6 +106,9 @@ except:
|
|
106
106
|
HAVE_VIPS = False
|
107
107
|
|
108
108
|
|
109
|
+
th_dir_cache = {}
|
110
|
+
|
111
|
+
|
109
112
|
def thumb_path(histpath , rem , mtime , fmt , ffa ) :
|
110
113
|
# base16 = 16 = 256
|
111
114
|
# b64-lc = 38 = 1444
|
@@ -119,14 +122,20 @@ def thumb_path(histpath , rem , mtime , fmt , ffa ) :
|
|
119
122
|
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
120
123
|
fmt = fmt.replace("f", "")
|
121
124
|
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
125
|
+
dcache = th_dir_cache
|
126
|
+
rd_key = rd + "\n" + fmt
|
127
|
+
rd = dcache.get(rd_key)
|
128
|
+
if not rd:
|
129
|
+
h = hashlib.sha512(afsenc(rd_key)).digest()
|
130
|
+
b64 = ub64enc(h).decode("ascii")[:24]
|
131
|
+
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
132
|
+
if len(dcache) > 9001:
|
133
|
+
dcache.clear()
|
134
|
+
dcache[rd_key] = rd
|
126
135
|
|
127
136
|
# could keep original filenames but this is safer re pathlen
|
128
137
|
h = hashlib.sha512(afsenc(fn)).digest()
|
129
|
-
fn =
|
138
|
+
fn = ub64enc(h).decode("ascii")[:24]
|
130
139
|
|
131
140
|
if fmt in ("opus", "caf", "mp3"):
|
132
141
|
cat = "ac"
|
@@ -476,7 +485,6 @@ class ThumbSrv(object):
|
|
476
485
|
if c == crops[-1]:
|
477
486
|
raise
|
478
487
|
|
479
|
-
assert img # type: ignore
|
480
488
|
img.write_to_file(tpath, Q=40)
|
481
489
|
|
482
490
|
def conv_ffmpeg(self, abspath , tpath , fmt , vn ) :
|
copyparty/u2idx.py
CHANGED
@@ -101,7 +101,6 @@ class U2idx(object):
|
|
101
101
|
if not HAVE_SQLITE3 or not self.args.shr:
|
102
102
|
return None
|
103
103
|
|
104
|
-
assert sqlite3 # type: ignore
|
105
104
|
|
106
105
|
db = sqlite3.connect(self.args.shr_db, timeout=2, check_same_thread=False)
|
107
106
|
cur = db.cursor()
|
@@ -117,7 +116,6 @@ class U2idx(object):
|
|
117
116
|
if not HAVE_SQLITE3 or "e2d" not in vn.flags:
|
118
117
|
return None
|
119
118
|
|
120
|
-
assert sqlite3 # type: ignore
|
121
119
|
|
122
120
|
ptop = vn.realpath
|
123
121
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
@@ -464,5 +462,4 @@ class U2idx(object):
|
|
464
462
|
return
|
465
463
|
|
466
464
|
if identifier == self.active_id:
|
467
|
-
assert self.active_cur
|
468
465
|
self.active_cur.connection.interrupt()
|