copyparty 1.13.4__py3-none-any.whl → 1.13.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copyparty/__main__.py +10 -3
- copyparty/__version__.py +2 -2
- copyparty/broker_util.py +3 -3
- copyparty/httpcli.py +52 -42
- copyparty/th_cli.py +3 -2
- copyparty/th_srv.py +16 -8
- copyparty/up2k.py +44 -28
- copyparty/util.py +1 -1
- copyparty/web/a/u2c.py +123 -38
- copyparty/web/baguettebox.js.gz +0 -0
- copyparty/web/browser.js.gz +0 -0
- copyparty/web/svcs.html +13 -12
- copyparty/web/ui.css.gz +0 -0
- copyparty/web/up2k.js.gz +0 -0
- copyparty/web/util.js.gz +0 -0
- copyparty/web/w.hash.js.gz +0 -0
- {copyparty-1.13.4.dist-info → copyparty-1.13.6.dist-info}/METADATA +9 -4
- {copyparty-1.13.4.dist-info → copyparty-1.13.6.dist-info}/RECORD +22 -22
- {copyparty-1.13.4.dist-info → copyparty-1.13.6.dist-info}/WHEEL +1 -1
- {copyparty-1.13.4.dist-info → copyparty-1.13.6.dist-info}/LICENSE +0 -0
- {copyparty-1.13.4.dist-info → copyparty-1.13.6.dist-info}/entry_points.txt +0 -0
- {copyparty-1.13.4.dist-info → copyparty-1.13.6.dist-info}/top_level.txt +0 -0
copyparty/__main__.py
CHANGED
@@ -485,11 +485,17 @@ def disable_quickedit() :
|
|
485
485
|
|
486
486
|
|
487
487
|
def sfx_tpoke(top ):
|
488
|
-
files = [
|
488
|
+
files = [top] + [
|
489
|
+
os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df
|
490
|
+
]
|
489
491
|
while True:
|
490
492
|
t = int(time.time())
|
491
|
-
for f in
|
492
|
-
|
493
|
+
for f in list(files):
|
494
|
+
try:
|
495
|
+
os.utime(f, (t, t))
|
496
|
+
except Exception as ex:
|
497
|
+
lprint("<TPOKE> [%s] %r" % (f, ex))
|
498
|
+
files.remove(f)
|
493
499
|
|
494
500
|
time.sleep(78123)
|
495
501
|
|
@@ -936,6 +942,7 @@ def add_upload(ap):
|
|
936
942
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
937
943
|
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
938
944
|
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
945
|
+
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for this size. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
939
946
|
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
940
947
|
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
941
948
|
|
copyparty/__version__.py
CHANGED
copyparty/broker_util.py
CHANGED
@@ -23,7 +23,7 @@ class ExceptionalQueue(Queue, object):
|
|
23
23
|
if rv[1] == "pebkac":
|
24
24
|
raise Pebkac(*rv[2:])
|
25
25
|
else:
|
26
|
-
raise
|
26
|
+
raise rv[2]
|
27
27
|
|
28
28
|
return rv
|
29
29
|
|
@@ -60,8 +60,8 @@ def try_exec(want_retval , func , *args ) :
|
|
60
60
|
|
61
61
|
return ["exception", "pebkac", ex.code, str(ex)]
|
62
62
|
|
63
|
-
except:
|
63
|
+
except Exception as ex:
|
64
64
|
if not want_retval:
|
65
65
|
raise
|
66
66
|
|
67
|
-
return ["exception", "stack",
|
67
|
+
return ["exception", "stack", ex]
|
copyparty/httpcli.py
CHANGED
@@ -642,11 +642,8 @@ class HttpCli(object):
|
|
642
642
|
if not self._check_nonfatal(pex, post):
|
643
643
|
self.keepalive = False
|
644
644
|
|
645
|
-
|
646
|
-
|
647
|
-
else:
|
648
|
-
em = repr(ex)
|
649
|
-
msg = min_ex()
|
645
|
+
em = str(ex)
|
646
|
+
msg = em if pex is ex else min_ex()
|
650
647
|
|
651
648
|
if pex.code != 404 or self.do_log:
|
652
649
|
self.log(
|
@@ -2198,33 +2195,39 @@ class HttpCli(object):
|
|
2198
2195
|
|
2199
2196
|
def handle_post_binary(self) :
|
2200
2197
|
try:
|
2201
|
-
remains = int(self.headers["content-length"])
|
2198
|
+
postsize = remains = int(self.headers["content-length"])
|
2202
2199
|
except:
|
2203
2200
|
raise Pebkac(400, "you must supply a content-length for binary POST")
|
2204
2201
|
|
2205
2202
|
try:
|
2206
|
-
|
2203
|
+
chashes = self.headers["x-up2k-hash"].split(",")
|
2207
2204
|
wark = self.headers["x-up2k-wark"]
|
2208
2205
|
except KeyError:
|
2209
2206
|
raise Pebkac(400, "need hash and wark headers for binary POST")
|
2210
2207
|
|
2208
|
+
chashes = [x.strip() for x in chashes]
|
2209
|
+
|
2211
2210
|
vfs, _ = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
2212
2211
|
ptop = (vfs.dbv or vfs).realpath
|
2213
2212
|
|
2214
|
-
x = self.conn.hsrv.broker.ask("up2k.
|
2213
|
+
x = self.conn.hsrv.broker.ask("up2k.handle_chunks", ptop, wark, chashes)
|
2215
2214
|
response = x.get()
|
2216
|
-
chunksize,
|
2215
|
+
chunksize, cstarts, path, lastmod, sprs = response
|
2216
|
+
maxsize = chunksize * len(chashes)
|
2217
|
+
cstart0 = cstarts[0]
|
2217
2218
|
|
2218
2219
|
try:
|
2219
2220
|
if self.args.nw:
|
2220
2221
|
path = os.devnull
|
2221
2222
|
|
2222
|
-
if remains >
|
2223
|
-
|
2224
|
-
|
2225
|
-
self.log("writing {} #{} @{} len {}".format(path, chash, cstart, remains))
|
2223
|
+
if remains > maxsize:
|
2224
|
+
t = "your client is sending %d bytes which is too much (server expected %d bytes at most)"
|
2225
|
+
raise Pebkac(400, t % (remains, maxsize))
|
2226
2226
|
|
2227
|
-
|
2227
|
+
t = "writing %s %s+%d #%d+%d %s"
|
2228
|
+
chunkno = cstart0[0] // chunksize
|
2229
|
+
zs = " ".join([chashes[0][:15]] + [x[:9] for x in chashes[1:]])
|
2230
|
+
self.log(t % (path, cstart0, remains, chunkno, len(chashes), zs))
|
2228
2231
|
|
2229
2232
|
f = None
|
2230
2233
|
fpool = not self.args.no_fpool and sprs
|
@@ -2238,37 +2241,43 @@ class HttpCli(object):
|
|
2238
2241
|
f = f or open(fsenc(path), "rb+", self.args.iobuf)
|
2239
2242
|
|
2240
2243
|
try:
|
2241
|
-
|
2242
|
-
|
2244
|
+
for chash, cstart in zip(chashes, cstarts):
|
2245
|
+
f.seek(cstart[0])
|
2246
|
+
reader = read_socket(
|
2247
|
+
self.sr, self.args.s_rd_sz, min(remains, chunksize)
|
2248
|
+
)
|
2249
|
+
post_sz, _, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
|
2243
2250
|
|
2244
|
-
|
2245
|
-
|
2246
|
-
|
2247
|
-
|
2248
|
-
|
2251
|
+
if sha_b64 != chash:
|
2252
|
+
try:
|
2253
|
+
self.bakflip(f, cstart[0], post_sz, sha_b64, vfs.flags)
|
2254
|
+
except:
|
2255
|
+
self.log("bakflip failed: " + min_ex())
|
2249
2256
|
|
2250
|
-
|
2251
|
-
|
2257
|
+
t = "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}"
|
2258
|
+
raise Pebkac(400, t.format(post_sz, chash, sha_b64))
|
2252
2259
|
|
2253
|
-
|
2254
|
-
|
2255
|
-
|
2256
|
-
|
2260
|
+
remains -= chunksize
|
2261
|
+
|
2262
|
+
if len(cstart) > 1 and path != os.devnull:
|
2263
|
+
self.log(
|
2264
|
+
"clone {} to {}".format(
|
2265
|
+
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
|
2266
|
+
)
|
2257
2267
|
)
|
2258
|
-
|
2259
|
-
|
2260
|
-
|
2261
|
-
|
2262
|
-
|
2263
|
-
|
2264
|
-
|
2265
|
-
|
2266
|
-
|
2267
|
-
f.write(buf)
|
2268
|
+
ofs = 0
|
2269
|
+
while ofs < chunksize:
|
2270
|
+
bufsz = max(4 * 1024 * 1024, self.args.iobuf)
|
2271
|
+
bufsz = min(chunksize - ofs, bufsz)
|
2272
|
+
f.seek(cstart[0] + ofs)
|
2273
|
+
buf = f.read(bufsz)
|
2274
|
+
for wofs in cstart[1:]:
|
2275
|
+
f.seek(wofs + ofs)
|
2276
|
+
f.write(buf)
|
2268
2277
|
|
2269
|
-
|
2278
|
+
ofs += len(buf)
|
2270
2279
|
|
2271
|
-
|
2280
|
+
self.log("clone {} done".format(cstart[0]))
|
2272
2281
|
|
2273
2282
|
if not fpool:
|
2274
2283
|
f.close()
|
@@ -2280,10 +2289,10 @@ class HttpCli(object):
|
|
2280
2289
|
f.close()
|
2281
2290
|
raise
|
2282
2291
|
finally:
|
2283
|
-
x = self.conn.hsrv.broker.ask("up2k.
|
2292
|
+
x = self.conn.hsrv.broker.ask("up2k.release_chunks", ptop, wark, chashes)
|
2284
2293
|
x.get() # block client until released
|
2285
2294
|
|
2286
|
-
x = self.conn.hsrv.broker.ask("up2k.
|
2295
|
+
x = self.conn.hsrv.broker.ask("up2k.confirm_chunks", ptop, wark, chashes)
|
2287
2296
|
ztis = x.get()
|
2288
2297
|
try:
|
2289
2298
|
num_left, fin_path = ztis
|
@@ -2302,7 +2311,7 @@ class HttpCli(object):
|
|
2302
2311
|
|
2303
2312
|
cinf = self.headers.get("x-up2k-stat", "")
|
2304
2313
|
|
2305
|
-
spd = self._spd(
|
2314
|
+
spd = self._spd(postsize)
|
2306
2315
|
self.log("{:70} thank {}".format(spd, cinf))
|
2307
2316
|
self.reply(b"thank")
|
2308
2317
|
return True
|
@@ -4499,6 +4508,7 @@ class HttpCli(object):
|
|
4499
4508
|
"themes": self.args.themes,
|
4500
4509
|
"turbolvl": self.args.turbo,
|
4501
4510
|
"u2j": self.args.u2j,
|
4511
|
+
"u2sz": self.args.u2sz,
|
4502
4512
|
"idxh": int(self.args.ih),
|
4503
4513
|
"u2sort": self.args.u2sort,
|
4504
4514
|
}
|
copyparty/th_cli.py
CHANGED
@@ -56,7 +56,8 @@ class ThumbCli(object):
|
|
56
56
|
|
57
57
|
want_opus = fmt in ("opus", "caf", "mp3")
|
58
58
|
is_au = ext in self.fmt_ffa
|
59
|
-
|
59
|
+
is_vau = want_opus and ext in self.fmt_ffv
|
60
|
+
if is_au or is_vau:
|
60
61
|
if want_opus:
|
61
62
|
if self.args.no_acode:
|
62
63
|
return None
|
@@ -104,7 +105,7 @@ class ThumbCli(object):
|
|
104
105
|
|
105
106
|
fmt = sfmt
|
106
107
|
|
107
|
-
elif fmt[:1] == "p" and not is_au:
|
108
|
+
elif fmt[:1] == "p" and not is_au and not is_vid:
|
108
109
|
t = "cannot thumbnail [%s]: png only allowed for waveforms"
|
109
110
|
self.log(t % (rem), 6)
|
110
111
|
return None
|
copyparty/th_srv.py
CHANGED
@@ -301,23 +301,31 @@ class ThumbSrv(object):
|
|
301
301
|
ap_unpk = abspath
|
302
302
|
|
303
303
|
if not bos.path.exists(tpath):
|
304
|
+
want_mp3 = tpath.endswith(".mp3")
|
305
|
+
want_opus = tpath.endswith(".opus") or tpath.endswith(".caf")
|
306
|
+
want_png = tpath.endswith(".png")
|
307
|
+
want_au = want_mp3 or want_opus
|
304
308
|
for lib in self.args.th_dec:
|
309
|
+
can_au = lib == "ff" and (
|
310
|
+
ext in self.fmt_ffa or ext in self.fmt_ffv
|
311
|
+
)
|
312
|
+
|
305
313
|
if lib == "pil" and ext in self.fmt_pil:
|
306
314
|
funs.append(self.conv_pil)
|
307
315
|
elif lib == "vips" and ext in self.fmt_vips:
|
308
316
|
funs.append(self.conv_vips)
|
309
|
-
elif
|
310
|
-
|
311
|
-
elif lib == "ff" and ext in self.fmt_ffa:
|
312
|
-
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
317
|
+
elif can_au and (want_png or want_au):
|
318
|
+
if want_opus:
|
313
319
|
funs.append(self.conv_opus)
|
314
|
-
elif
|
320
|
+
elif want_mp3:
|
315
321
|
funs.append(self.conv_mp3)
|
316
|
-
elif
|
322
|
+
elif want_png:
|
317
323
|
funs.append(self.conv_waves)
|
318
324
|
png_ok = True
|
319
|
-
|
320
|
-
|
325
|
+
elif lib == "ff" and (ext in self.fmt_ffi or ext in self.fmt_ffv):
|
326
|
+
funs.append(self.conv_ffmpeg)
|
327
|
+
elif lib == "ff" and ext in self.fmt_ffa and not want_au:
|
328
|
+
funs.append(self.conv_spec)
|
321
329
|
|
322
330
|
tdir, tfn = os.path.split(tpath)
|
323
331
|
ttpath = os.path.join(tdir, "w", tfn)
|
copyparty/up2k.py
CHANGED
@@ -542,7 +542,7 @@ class Up2k(object):
|
|
542
542
|
nrm += 1
|
543
543
|
|
544
544
|
if nrm:
|
545
|
-
self.log("
|
545
|
+
self.log("%d files graduated in /%s" % (nrm, vp))
|
546
546
|
|
547
547
|
if timeout < 10:
|
548
548
|
continue
|
@@ -1293,7 +1293,7 @@ class Up2k(object):
|
|
1293
1293
|
not cv
|
1294
1294
|
or liname not in th_cvds
|
1295
1295
|
or cv.lower() not in th_cvds
|
1296
|
-
or th_cvd.index(
|
1296
|
+
or th_cvd.index(liname) < th_cvd.index(cv.lower())
|
1297
1297
|
)
|
1298
1298
|
):
|
1299
1299
|
cv = iname
|
@@ -3010,8 +3010,8 @@ class Up2k(object):
|
|
3010
3010
|
times = (int(time.time()), int(lmod))
|
3011
3011
|
bos.utime(dst, times, False)
|
3012
3012
|
|
3013
|
-
def
|
3014
|
-
self, ptop , wark ,
|
3013
|
+
def handle_chunks(
|
3014
|
+
self, ptop , wark , chashes
|
3015
3015
|
) :
|
3016
3016
|
with self.mutex, self.reg_mutex:
|
3017
3017
|
self.db_act = self.vol_act[ptop] = time.time()
|
@@ -3021,26 +3021,37 @@ class Up2k(object):
|
|
3021
3021
|
self.log("unknown wark [{}], known: {}".format(wark, known))
|
3022
3022
|
raise Pebkac(400, "unknown wark" + SSEELOG)
|
3023
3023
|
|
3024
|
-
|
3025
|
-
|
3026
|
-
|
3027
|
-
|
3028
|
-
|
3024
|
+
for chash in chashes:
|
3025
|
+
if chash not in job["need"]:
|
3026
|
+
msg = "chash = {} , need:\n".format(chash)
|
3027
|
+
msg += "\n".join(job["need"])
|
3028
|
+
self.log(msg)
|
3029
|
+
raise Pebkac(400, "already got that (%s) but thanks??" % (chash,))
|
3029
3030
|
|
3030
|
-
|
3031
|
-
|
3032
|
-
|
3031
|
+
if chash in job["busy"]:
|
3032
|
+
nh = len(job["hash"])
|
3033
|
+
idx = job["hash"].index(chash)
|
3034
|
+
t = "that chunk is already being written to:\n {}\n {} {}/{}\n {}"
|
3035
|
+
raise Pebkac(400, t.format(wark, chash, idx, nh, job["name"]))
|
3033
3036
|
|
3034
|
-
|
3035
|
-
nh = len(job["hash"])
|
3036
|
-
idx = job["hash"].index(chash)
|
3037
|
-
t = "that chunk is already being written to:\n {}\n {} {}/{}\n {}"
|
3038
|
-
raise Pebkac(400, t.format(wark, chash, idx, nh, job["name"]))
|
3037
|
+
chunksize = up2k_chunksize(job["size"])
|
3039
3038
|
|
3040
|
-
|
3039
|
+
coffsets = []
|
3040
|
+
for chash in chashes:
|
3041
|
+
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
3042
|
+
if not nchunk:
|
3043
|
+
raise Pebkac(400, "unknown chunk %s" % (chash))
|
3041
3044
|
|
3042
|
-
|
3043
|
-
|
3045
|
+
ofs = [chunksize * x for x in nchunk]
|
3046
|
+
coffsets.append(ofs)
|
3047
|
+
|
3048
|
+
for ofs1, ofs2 in zip(coffsets, coffsets[1:]):
|
3049
|
+
gap = (ofs2[0] - ofs1[0]) - chunksize
|
3050
|
+
if gap:
|
3051
|
+
t = "only sibling chunks can be stitched; gap of %d bytes between offsets %d and %d in %s"
|
3052
|
+
raise Pebkac(400, t % (gap, ofs1[0], ofs2[0], job["name"]))
|
3053
|
+
|
3054
|
+
path = djoin(job["ptop"], job["prel"], job["tnam"])
|
3044
3055
|
|
3045
3056
|
if not job["sprs"]:
|
3046
3057
|
cur_sz = bos.path.getsize(path)
|
@@ -3053,17 +3064,20 @@ class Up2k(object):
|
|
3053
3064
|
|
3054
3065
|
job["poke"] = time.time()
|
3055
3066
|
|
3056
|
-
return chunksize,
|
3067
|
+
return chunksize, coffsets, path, job["lmod"], job["sprs"]
|
3057
3068
|
|
3058
|
-
def
|
3069
|
+
def release_chunks(self, ptop , wark , chashes ) :
|
3059
3070
|
with self.reg_mutex:
|
3060
3071
|
job = self.registry[ptop].get(wark)
|
3061
3072
|
if job:
|
3062
|
-
|
3073
|
+
for chash in chashes:
|
3074
|
+
job["busy"].pop(chash, None)
|
3063
3075
|
|
3064
3076
|
return True
|
3065
3077
|
|
3066
|
-
def
|
3078
|
+
def confirm_chunks(
|
3079
|
+
self, ptop , wark , chashes
|
3080
|
+
) :
|
3067
3081
|
with self.mutex, self.reg_mutex:
|
3068
3082
|
self.db_act = self.vol_act[ptop] = time.time()
|
3069
3083
|
try:
|
@@ -3072,14 +3086,16 @@ class Up2k(object):
|
|
3072
3086
|
src = djoin(pdir, job["tnam"])
|
3073
3087
|
dst = djoin(pdir, job["name"])
|
3074
3088
|
except Exception as ex:
|
3075
|
-
return "confirm_chunk, wark
|
3089
|
+
return "confirm_chunk, wark(%r)" % (ex,) # type: ignore
|
3076
3090
|
|
3077
|
-
|
3091
|
+
for chash in chashes:
|
3092
|
+
job["busy"].pop(chash, None)
|
3078
3093
|
|
3079
3094
|
try:
|
3080
|
-
|
3095
|
+
for chash in chashes:
|
3096
|
+
job["need"].remove(chash)
|
3081
3097
|
except Exception as ex:
|
3082
|
-
return "confirm_chunk, chash
|
3098
|
+
return "confirm_chunk, chash(%s) %r" % (chash, ex) # type: ignore
|
3083
3099
|
|
3084
3100
|
ret = len(job["need"])
|
3085
3101
|
if ret > 0:
|
copyparty/util.py
CHANGED
@@ -1356,7 +1356,7 @@ def vol_san(vols , txt ) :
|
|
1356
1356
|
def min_ex(max_lines = 8, reverse = False) :
|
1357
1357
|
et, ev, tb = sys.exc_info()
|
1358
1358
|
stb = traceback.extract_tb(tb) if tb else traceback.extract_stack()[:-1]
|
1359
|
-
fmt = "%s
|
1359
|
+
fmt = "%s:%d <%s>: %s"
|
1360
1360
|
ex = [fmt % (fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in stb]
|
1361
1361
|
if et or ev or tb:
|
1362
1362
|
ex.append("[%s] %s" % (et.__name__ if et else "(anonymous)", ev))
|
copyparty/web/a/u2c.py
CHANGED
@@ -1,8 +1,8 @@
|
|
1
1
|
#!/usr/bin/env python3
|
2
2
|
from __future__ import print_function, unicode_literals
|
3
3
|
|
4
|
-
S_VERSION = "1.
|
5
|
-
S_BUILD_DT = "2024-
|
4
|
+
S_VERSION = "1.21"
|
5
|
+
S_BUILD_DT = "2024-07-26"
|
6
6
|
|
7
7
|
"""
|
8
8
|
u2c.py: upload to copyparty
|
@@ -20,6 +20,7 @@ import sys
|
|
20
20
|
import stat
|
21
21
|
import math
|
22
22
|
import time
|
23
|
+
import json
|
23
24
|
import atexit
|
24
25
|
import signal
|
25
26
|
import socket
|
@@ -79,7 +80,7 @@ req_ses = requests.Session()
|
|
79
80
|
|
80
81
|
|
81
82
|
class Daemon(threading.Thread):
|
82
|
-
def __init__(self, target, name
|
83
|
+
def __init__(self, target, name=None, a=None):
|
83
84
|
threading.Thread.__init__(self, name=name)
|
84
85
|
self.a = a or ()
|
85
86
|
self.fun = target
|
@@ -110,18 +111,22 @@ class File(object):
|
|
110
111
|
# set by get_hashlist
|
111
112
|
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
112
113
|
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
114
|
+
self.t_hash = 0.0 # type: float
|
113
115
|
|
114
116
|
# set by handshake
|
115
117
|
self.recheck = False # duplicate; redo handshake after all files done
|
116
118
|
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
117
119
|
self.wark = "" # type: str
|
118
120
|
self.url = "" # type: str
|
119
|
-
self.nhs = 0
|
121
|
+
self.nhs = 0 # type: int
|
120
122
|
|
121
123
|
# set by upload
|
124
|
+
self.t0_up = 0.0 # type: float
|
125
|
+
self.t1_up = 0.0 # type: float
|
126
|
+
self.nojoin = 0 # type: int
|
122
127
|
self.up_b = 0 # type: int
|
123
128
|
self.up_c = 0 # type: int
|
124
|
-
self.cd = 0
|
129
|
+
self.cd = 0 # type: int
|
125
130
|
|
126
131
|
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
127
132
|
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
@@ -130,10 +135,20 @@ class File(object):
|
|
130
135
|
class FileSlice(object):
|
131
136
|
"""file-like object providing a fixed window into a file"""
|
132
137
|
|
133
|
-
def __init__(self, file,
|
138
|
+
def __init__(self, file, cids):
|
134
139
|
# type: (File, str) -> None
|
135
140
|
|
136
|
-
self.
|
141
|
+
self.file = file
|
142
|
+
self.cids = cids
|
143
|
+
|
144
|
+
self.car, tlen = file.kchunks[cids[0]]
|
145
|
+
for cid in cids[1:]:
|
146
|
+
ofs, clen = file.kchunks[cid]
|
147
|
+
if ofs != self.car + tlen:
|
148
|
+
raise Exception(9)
|
149
|
+
tlen += clen
|
150
|
+
|
151
|
+
self.len = tlen
|
137
152
|
self.cdr = self.car + self.len
|
138
153
|
self.ofs = 0 # type: int
|
139
154
|
self.f = open(file.abs, "rb", 512 * 1024)
|
@@ -357,7 +372,7 @@ def undns(url):
|
|
357
372
|
usp = urlsplit(url)
|
358
373
|
hn = usp.hostname
|
359
374
|
gai = None
|
360
|
-
eprint("resolving host [
|
375
|
+
eprint("resolving host [%s] ..." % (hn,))
|
361
376
|
try:
|
362
377
|
gai = socket.getaddrinfo(hn, None)
|
363
378
|
hn = gai[0][4][0]
|
@@ -375,7 +390,7 @@ def undns(url):
|
|
375
390
|
|
376
391
|
usp = usp._replace(netloc=hn)
|
377
392
|
url = urlunsplit(usp)
|
378
|
-
eprint("
|
393
|
+
eprint(" %s\n" % (url,))
|
379
394
|
return url
|
380
395
|
|
381
396
|
|
@@ -518,6 +533,8 @@ def get_hashlist(file, pcb, mth):
|
|
518
533
|
file_ofs = 0
|
519
534
|
ret = []
|
520
535
|
with open(file.abs, "rb", 512 * 1024) as f:
|
536
|
+
t0 = time.time()
|
537
|
+
|
521
538
|
if mth and file.size >= 1024 * 512:
|
522
539
|
ret = mth.hash(f, file.size, chunk_sz, pcb, file)
|
523
540
|
file_rem = 0
|
@@ -544,10 +561,12 @@ def get_hashlist(file, pcb, mth):
|
|
544
561
|
if pcb:
|
545
562
|
pcb(file, file_ofs)
|
546
563
|
|
564
|
+
file.t_hash = time.time() - t0
|
547
565
|
file.cids = ret
|
548
566
|
file.kchunks = {}
|
549
567
|
for k, v1, v2 in ret:
|
550
|
-
|
568
|
+
if k not in file.kchunks:
|
569
|
+
file.kchunks[k] = [v1, v2]
|
551
570
|
|
552
571
|
|
553
572
|
def handshake(ar, file, search):
|
@@ -589,7 +608,8 @@ def handshake(ar, file, search):
|
|
589
608
|
sc = 600
|
590
609
|
txt = ""
|
591
610
|
try:
|
592
|
-
|
611
|
+
zs = json.dumps(req, separators=(",\n", ": "))
|
612
|
+
r = req_ses.post(url, headers=headers, data=zs)
|
593
613
|
sc = r.status_code
|
594
614
|
txt = r.text
|
595
615
|
if sc < 400:
|
@@ -636,13 +656,13 @@ def handshake(ar, file, search):
|
|
636
656
|
return r["hash"], r["sprs"]
|
637
657
|
|
638
658
|
|
639
|
-
def upload(
|
640
|
-
# type: (
|
641
|
-
"""upload
|
659
|
+
def upload(fsl, pw, stats):
|
660
|
+
# type: (FileSlice, str, str) -> None
|
661
|
+
"""upload a range of file data, defined by one or more `cid` (chunk-hash)"""
|
642
662
|
|
643
663
|
headers = {
|
644
|
-
"X-Up2k-Hash":
|
645
|
-
"X-Up2k-Wark": file.wark,
|
664
|
+
"X-Up2k-Hash": ",".join(fsl.cids),
|
665
|
+
"X-Up2k-Wark": fsl.file.wark,
|
646
666
|
"Content-Type": "application/octet-stream",
|
647
667
|
}
|
648
668
|
|
@@ -652,15 +672,24 @@ def upload(file, cid, pw, stats):
|
|
652
672
|
if pw:
|
653
673
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
654
674
|
|
655
|
-
f = FileSlice(file, cid)
|
656
675
|
try:
|
657
|
-
r = req_ses.post(file.url, headers=headers, data=
|
676
|
+
r = req_ses.post(fsl.file.url, headers=headers, data=fsl)
|
677
|
+
|
678
|
+
if r.status_code == 400:
|
679
|
+
txt = r.text
|
680
|
+
if (
|
681
|
+
"already being written" in txt
|
682
|
+
or "already got that" in txt
|
683
|
+
or "only sibling chunks" in txt
|
684
|
+
):
|
685
|
+
fsl.file.nojoin = 1
|
686
|
+
|
658
687
|
if not r:
|
659
688
|
raise Exception(repr(r))
|
660
689
|
|
661
690
|
_ = r.content
|
662
691
|
finally:
|
663
|
-
|
692
|
+
fsl.f.close()
|
664
693
|
|
665
694
|
|
666
695
|
class Ctl(object):
|
@@ -724,6 +753,9 @@ class Ctl(object):
|
|
724
753
|
if ar.safe:
|
725
754
|
self._safe()
|
726
755
|
else:
|
756
|
+
self.at_hash = 0.0
|
757
|
+
self.at_up = 0.0
|
758
|
+
self.at_upr = 0.0
|
727
759
|
self.hash_f = 0
|
728
760
|
self.hash_c = 0
|
729
761
|
self.hash_b = 0
|
@@ -743,7 +775,7 @@ class Ctl(object):
|
|
743
775
|
|
744
776
|
self.mutex = threading.Lock()
|
745
777
|
self.q_handshake = Queue() # type: Queue[File]
|
746
|
-
self.q_upload = Queue() # type: Queue[
|
778
|
+
self.q_upload = Queue() # type: Queue[FileSlice]
|
747
779
|
|
748
780
|
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
749
781
|
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
@@ -788,7 +820,8 @@ class Ctl(object):
|
|
788
820
|
for nc, cid in enumerate(hs):
|
789
821
|
print(" {0} up {1}".format(ncs - nc, cid))
|
790
822
|
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
|
791
|
-
|
823
|
+
fslice = FileSlice(file, [cid])
|
824
|
+
upload(fslice, self.ar.a, stats)
|
792
825
|
|
793
826
|
print(" ok!")
|
794
827
|
if file.recheck:
|
@@ -797,7 +830,7 @@ class Ctl(object):
|
|
797
830
|
if not self.recheck:
|
798
831
|
return
|
799
832
|
|
800
|
-
eprint("finalizing
|
833
|
+
eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
|
801
834
|
for file in self.recheck:
|
802
835
|
handshake(self.ar, file, search)
|
803
836
|
|
@@ -871,10 +904,17 @@ class Ctl(object):
|
|
871
904
|
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
|
872
905
|
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
873
906
|
|
907
|
+
if self.hash_b and self.at_hash:
|
908
|
+
spd = humansize(self.hash_b / self.at_hash)
|
909
|
+
eprint("\nhasher: %.2f sec, %s/s\n" % (self.at_hash, spd))
|
910
|
+
if self.up_b and self.at_up:
|
911
|
+
spd = humansize(self.up_b / self.at_up)
|
912
|
+
eprint("upload: %.2f sec, %s/s\n" % (self.at_up, spd))
|
913
|
+
|
874
914
|
if not self.recheck:
|
875
915
|
return
|
876
916
|
|
877
|
-
eprint("finalizing
|
917
|
+
eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
|
878
918
|
for file in self.recheck:
|
879
919
|
handshake(self.ar, file, False)
|
880
920
|
|
@@ -1060,21 +1100,62 @@ class Ctl(object):
|
|
1060
1100
|
self.handshaker_busy -= 1
|
1061
1101
|
|
1062
1102
|
if not hs:
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1103
|
+
self.at_hash += file.t_hash
|
1104
|
+
|
1105
|
+
if self.ar.spd:
|
1106
|
+
if VT100:
|
1107
|
+
c1 = "\033[36m"
|
1108
|
+
c2 = "\033[0m"
|
1109
|
+
else:
|
1110
|
+
c1 = c2 = ""
|
1111
|
+
|
1112
|
+
spd_h = humansize(file.size / file.t_hash, True)
|
1113
|
+
if file.up_b:
|
1114
|
+
t_up = file.t1_up - file.t0_up
|
1115
|
+
spd_u = humansize(file.size / t_up, True)
|
1116
|
+
|
1117
|
+
t = "uploaded %s %s(h:%.2fs,%s/s,up:%.2fs,%s/s)%s"
|
1118
|
+
print(t % (upath, c1, file.t_hash, spd_h, t_up, spd_u, c2))
|
1119
|
+
else:
|
1120
|
+
t = " found %s %s(%.2fs,%s/s)%s"
|
1121
|
+
print(t % (upath, c1, file.t_hash, spd_h, c2))
|
1122
|
+
else:
|
1123
|
+
kw = "uploaded" if file.up_b else " found"
|
1124
|
+
print("{0} {1}".format(kw, upath))
|
1125
|
+
|
1126
|
+
chunksz = up2k_chunksize(file.size)
|
1127
|
+
njoin = (self.ar.sz * 1024 * 1024) // chunksz
|
1128
|
+
cs = hs[:]
|
1129
|
+
while cs:
|
1130
|
+
fsl = FileSlice(file, cs[:1])
|
1131
|
+
try:
|
1132
|
+
if file.nojoin:
|
1133
|
+
raise Exception()
|
1134
|
+
for n in range(2, min(len(cs), njoin + 1)):
|
1135
|
+
fsl = FileSlice(file, cs[:n])
|
1136
|
+
except:
|
1137
|
+
pass
|
1138
|
+
cs = cs[len(fsl.cids) :]
|
1139
|
+
self.q_upload.put(fsl)
|
1067
1140
|
|
1068
1141
|
def uploader(self):
|
1069
1142
|
while True:
|
1070
|
-
|
1071
|
-
if not
|
1143
|
+
fsl = self.q_upload.get()
|
1144
|
+
if not fsl:
|
1072
1145
|
self.st_up = [None, "(finished)"]
|
1073
1146
|
break
|
1074
1147
|
|
1148
|
+
file = fsl.file
|
1149
|
+
cids = fsl.cids
|
1150
|
+
|
1075
1151
|
with self.mutex:
|
1152
|
+
if not self.uploader_busy:
|
1153
|
+
self.at_upr = time.time()
|
1076
1154
|
self.uploader_busy += 1
|
1077
|
-
|
1155
|
+
if not file.t0_up:
|
1156
|
+
file.t0_up = time.time()
|
1157
|
+
if not self.t0_up:
|
1158
|
+
self.t0_up = file.t0_up
|
1078
1159
|
|
1079
1160
|
stats = "%d/%d/%d/%d %d/%d %s" % (
|
1080
1161
|
self.up_f,
|
@@ -1086,28 +1167,30 @@ class Ctl(object):
|
|
1086
1167
|
self.eta,
|
1087
1168
|
)
|
1088
1169
|
|
1089
|
-
file, cid = task
|
1090
1170
|
try:
|
1091
|
-
upload(
|
1171
|
+
upload(fsl, self.ar.a, stats)
|
1092
1172
|
except Exception as ex:
|
1093
|
-
t = "upload failed, retrying:
|
1094
|
-
eprint(t
|
1173
|
+
t = "upload failed, retrying: %s #%s+%d (%s)\n"
|
1174
|
+
eprint(t % (file.name, cids[0][:8], len(cids) - 1, ex))
|
1095
1175
|
file.cd = time.time() + self.ar.cd
|
1096
1176
|
# handshake will fix it
|
1097
1177
|
|
1098
1178
|
with self.mutex:
|
1099
|
-
sz =
|
1100
|
-
file.ucids = [x for x in file.ucids if x
|
1179
|
+
sz = fsl.len
|
1180
|
+
file.ucids = [x for x in file.ucids if x not in cids]
|
1101
1181
|
if not file.ucids:
|
1182
|
+
file.t1_up = time.time()
|
1102
1183
|
self.q_handshake.put(file)
|
1103
1184
|
|
1104
|
-
self.st_up = [file,
|
1185
|
+
self.st_up = [file, cids[0]]
|
1105
1186
|
file.up_b += sz
|
1106
1187
|
self.up_b += sz
|
1107
1188
|
self.up_br += sz
|
1108
1189
|
file.up_c += 1
|
1109
1190
|
self.up_c += 1
|
1110
1191
|
self.uploader_busy -= 1
|
1192
|
+
if not self.uploader_busy:
|
1193
|
+
self.at_up += time.time() - self.at_upr
|
1111
1194
|
|
1112
1195
|
def up_done(self, file):
|
1113
1196
|
if self.ar.dl:
|
@@ -1150,6 +1233,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
|
1150
1233
|
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
1151
1234
|
ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
|
1152
1235
|
ap.add_argument("--ow", action="store_true", help="overwrite existing files instead of autorenaming")
|
1236
|
+
ap.add_argument("--spd", action="store_true", help="print speeds for each file")
|
1153
1237
|
ap.add_argument("--version", action="store_true", help="show version and exit")
|
1154
1238
|
|
1155
1239
|
ap = app.add_argument_group("compatibility")
|
@@ -1164,6 +1248,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
|
1164
1248
|
ap = app.add_argument_group("performance tweaks")
|
1165
1249
|
ap.add_argument("-j", type=int, metavar="CONNS", default=2, help="parallel connections")
|
1166
1250
|
ap.add_argument("-J", type=int, metavar="CORES", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
1251
|
+
ap.add_argument("--sz", type=int, metavar="MiB", default=64, help="try to make each POST this big")
|
1167
1252
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
1168
1253
|
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
1169
1254
|
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
@@ -1208,7 +1293,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
|
1208
1293
|
ar.url = ar.url.rstrip("/") + "/"
|
1209
1294
|
if "://" not in ar.url:
|
1210
1295
|
ar.url = "http://" + ar.url
|
1211
|
-
|
1296
|
+
|
1212
1297
|
if "https://" in ar.url.lower():
|
1213
1298
|
try:
|
1214
1299
|
import ssl, zipfile
|
copyparty/web/baguettebox.js.gz
CHANGED
Binary file
|
copyparty/web/browser.js.gz
CHANGED
Binary file
|
copyparty/web/svcs.html
CHANGED
@@ -56,7 +56,7 @@
|
|
56
56
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
57
57
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
58
58
|
</ul>
|
59
|
-
|
59
|
+
|
60
60
|
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
61
61
|
<pre>
|
62
62
|
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ rvp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
@@ -64,16 +64,7 @@
|
|
64
64
|
</div>
|
65
65
|
|
66
66
|
<div class="os lin">
|
67
|
-
<
|
68
|
-
yum install davfs2
|
69
|
-
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
70
|
-
</pre>
|
71
|
-
<p>make it automount on boot:</p>
|
72
|
-
<pre>
|
73
|
-
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
74
|
-
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
75
|
-
</pre>
|
76
|
-
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
|
67
|
+
<p>rclone (v1.63 or later) is recommended:</p>
|
77
68
|
<pre>
|
78
69
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
79
70
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
@@ -85,6 +76,16 @@
|
|
85
76
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
86
77
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
87
78
|
</ul>
|
79
|
+
<p>alternatively use davfs2 (requires root, is slower, forgets lastmodified-timestamp on upload):</p>
|
80
|
+
<pre>
|
81
|
+
yum install davfs2
|
82
|
+
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
83
|
+
</pre>
|
84
|
+
<p>make davfs2 automount on boot:</p>
|
85
|
+
<pre>
|
86
|
+
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
87
|
+
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
88
|
+
</pre>
|
88
89
|
<p>or the emergency alternative (gnome/gui-only):</p>
|
89
90
|
<!-- gnome-bug: ignores vp -->
|
90
91
|
<pre>
|
@@ -104,7 +105,7 @@
|
|
104
105
|
<pre>
|
105
106
|
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}
|
106
107
|
</pre>
|
107
|
-
|
108
|
+
|
108
109
|
{% if s %}
|
109
110
|
<p><em>replace <code>https</code> with <code>http</code> if it doesn't work</em></p>
|
110
111
|
{% endif %}
|
copyparty/web/ui.css.gz
CHANGED
Binary file
|
copyparty/web/up2k.js.gz
CHANGED
Binary file
|
copyparty/web/util.js.gz
CHANGED
Binary file
|
copyparty/web/w.hash.js.gz
CHANGED
Binary file
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: copyparty
|
3
|
-
Version: 1.13.
|
3
|
+
Version: 1.13.6
|
4
4
|
Summary: Portable file server with accelerated resumable uploads, deduplication, WebDAV, FTP, zeroconf, media indexer, video thumbnails, audio transcoding, and write-only folders
|
5
5
|
Author-email: ed <copyparty@ocv.me>
|
6
6
|
License: MIT
|
@@ -263,7 +263,7 @@ also see [comparison to similar software](./docs/versus.md)
|
|
263
263
|
* upload
|
264
264
|
* ☑ basic: plain multipart, ie6 support
|
265
265
|
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
266
|
-
*
|
266
|
+
* **no filesize limit!** ...unless you use Cloudflare, then it's 383.9 GiB
|
267
267
|
* ☑ stash: simple PUT filedropper
|
268
268
|
* ☑ filename randomizer
|
269
269
|
* ☑ write-only folders
|
@@ -279,6 +279,7 @@ also see [comparison to similar software](./docs/versus.md)
|
|
279
279
|
* ☑ [navpane](#navpane) (directory tree sidebar)
|
280
280
|
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
281
281
|
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus/mp3 transcoding)
|
282
|
+
* ☑ play video files as audio (converted on server)
|
282
283
|
* ☑ image gallery with webm player
|
283
284
|
* ☑ textfile browser with syntax hilighting
|
284
285
|
* ☑ [thumbnails](#thumbnails)
|
@@ -700,6 +701,7 @@ up2k has several advantages:
|
|
700
701
|
* uploads resume if you reboot your browser or pc, just upload the same files again
|
701
702
|
* server detects any corruption; the client reuploads affected chunks
|
702
703
|
* the client doesn't upload anything that already exists on the server
|
704
|
+
* no filesize limit unless imposed by a proxy, for example Cloudflare, which blocks uploads over 383.9 GiB
|
703
705
|
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
704
706
|
* the last-modified timestamp of the file is preserved
|
705
707
|
|
@@ -854,6 +856,7 @@ some hilights:
|
|
854
856
|
* OS integration; control playback from your phone's lockscreen ([windows](https://user-images.githubusercontent.com/241032/233213022-298a98ba-721a-4cf1-a3d4-f62634bc53d5.png) // [iOS](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) // [android](https://user-images.githubusercontent.com/241032/233212311-a7368590-08c7-4f9f-a1af-48ccf3f36fad.png))
|
855
857
|
* shows the audio waveform in the seekbar
|
856
858
|
* not perfectly gapless but can get really close (see settings + eq below); good enough to enjoy gapless albums as intended
|
859
|
+
* videos can be played as audio, without wasting bandwidth on the video
|
857
860
|
|
858
861
|
click the `play` link next to an audio file, or copy the link target to [share it](https://a.ocv.me/pub/demo/music/Ubiktune%20-%20SOUNDSHOCK%202%20-%20FM%20FUNK%20TERRROR!!/#af-1fbfba61&t=18) (optionally with a timestamp to start playing from, like that example does)
|
859
862
|
|
@@ -1041,7 +1044,7 @@ some recommended FTP / FTPS clients; `wark` = example password:
|
|
1041
1044
|
|
1042
1045
|
## webdav server
|
1043
1046
|
|
1044
|
-
with read-write support, supports winXP and later, macos, nautilus/gvfs
|
1047
|
+
with read-write support, supports winXP and later, macos, nautilus/gvfs ... a greay way to [access copyparty straight from the file explorer in your OS](#mount-as-drive)
|
1045
1048
|
|
1046
1049
|
click the [connect](http://127.0.0.1:3923/?hc) button in the control-panel to see connection instructions for windows, linux, macos
|
1047
1050
|
|
@@ -1365,6 +1368,8 @@ you can set hooks before and/or after an event happens, and currently you can ho
|
|
1365
1368
|
|
1366
1369
|
there's a bunch of flags and stuff, see `--help-hooks`
|
1367
1370
|
|
1371
|
+
if you want to write your own hooks, see [devnotes](./docs/devnotes.md#event-hooks)
|
1372
|
+
|
1368
1373
|
|
1369
1374
|
### upload events
|
1370
1375
|
|
@@ -1852,7 +1857,7 @@ alternatively, some alternatives roughly sorted by speed (unreproducible benchma
|
|
1852
1857
|
* [rclone-http](./docs/rclone.md) (26s), read-only
|
1853
1858
|
* [partyfuse.py](./bin/#partyfusepy) (35s), read-only
|
1854
1859
|
* [rclone-ftp](./docs/rclone.md) (47s), read/WRITE
|
1855
|
-
* davfs2 (103s), read/WRITE
|
1860
|
+
* davfs2 (103s), read/WRITE
|
1856
1861
|
* [win10-webdav](#webdav-server) (138s), read/WRITE
|
1857
1862
|
* [win10-smb2](#smb-server) (387s), read/WRITE
|
1858
1863
|
|
@@ -1,17 +1,17 @@
|
|
1
1
|
copyparty/__init__.py,sha256=fUINM1abqDGzCCH_JcXdOnLdKOV-SrTI2Xo2QgQW2P4,1703
|
2
|
-
copyparty/__main__.py,sha256=
|
3
|
-
copyparty/__version__.py,sha256=
|
2
|
+
copyparty/__main__.py,sha256=3fS9elMSksgji7chNwRitbHfgtLWRmV15JD_BvaKmTk,103492
|
3
|
+
copyparty/__version__.py,sha256=4MXEq4Neanau2UCgcagryjApwUEbLu_Yj4mLLDmobr0,255
|
4
4
|
copyparty/authsrv.py,sha256=zCo1-CmE2UhsnSRFqALTu1GBO6FVlo1l7Ex7PCv52Xg,87191
|
5
5
|
copyparty/broker_mp.py,sha256=YFe1S6Zziht8Qc__dCLj_ff8z0DDny9lqk_Mi5ajsJk,3868
|
6
6
|
copyparty/broker_mpw.py,sha256=4ZI7bJYOwUibeAJVv9_FPGNmHrr9eOtkj_Kz0JEppTU,3197
|
7
7
|
copyparty/broker_thr.py,sha256=eKr--HJGig5zqvNGwH9UoBG9Nvi9mT2axrRmJwknd0s,1759
|
8
|
-
copyparty/broker_util.py,sha256=
|
8
|
+
copyparty/broker_util.py,sha256=w0E-GhoOgq8ow7mEWi3GOyqraux6VG9yk1tif1yo0jc,1474
|
9
9
|
copyparty/cert.py,sha256=BVMXKRzr1du0WgGifh_HrM-NEuezlgPDejaY3UaQUQ0,7728
|
10
10
|
copyparty/cfg.py,sha256=i8-bjWgbguQooxiA172RcptqR_SEOwDHJ4cqldrZ8oQ,9792
|
11
11
|
copyparty/dxml.py,sha256=lZpg-kn-kQsXRtNY1n6fRaS-b7uXzMCyv8ovKnhZcZc,1548
|
12
12
|
copyparty/fsutil.py,sha256=NEdhYYgQxDQ7MmgTbtjMKorikCjDls2AXVX16EH2JfQ,4613
|
13
13
|
copyparty/ftpd.py,sha256=g9FDgoIV5DncmkovIo9C2jowtS6SmGX4Wgw44rWqM5g,17192
|
14
|
-
copyparty/httpcli.py,sha256=
|
14
|
+
copyparty/httpcli.py,sha256=7TospSu2141aWo0BnKQN3k5Ue5Y75KDA6WFmoYovYFI,169183
|
15
15
|
copyparty/httpconn.py,sha256=6MOQgBtOGrlVRr6ZiHBKYzkzcls-YWwaWEtqE6DweM0,6873
|
16
16
|
copyparty/httpsrv.py,sha256=U9CYy_5eK-VO1QZPeiybHUm9MD7-FZXAo22IZsEUErA,16369
|
17
17
|
copyparty/ico.py,sha256=AYHdK6NlYBfBgafVYXia3jHQ9XHZdUL1D8WftLMAzIU,3545
|
@@ -28,11 +28,11 @@ copyparty/svchub.py,sha256=JbWbypa7OyYkJgsUTo0SRhgvb4WT4B1zDNi110Fuvl0,32743
|
|
28
28
|
copyparty/szip.py,sha256=5xbfbnTKt97c59pZD_nAzWEYpMel7xFDuGUjj60VjOs,8619
|
29
29
|
copyparty/tcpsrv.py,sha256=ssomz8MUjX62Yf6oDovH-Fzxi-9rZF6JFvsdDgGEho0,17680
|
30
30
|
copyparty/tftpd.py,sha256=GkO1jN08ufPDcO2dri1VfvcDKcMW7x1RtL2Rj0SrN3o,12983
|
31
|
-
copyparty/th_cli.py,sha256=
|
32
|
-
copyparty/th_srv.py,sha256=
|
31
|
+
copyparty/th_cli.py,sha256=o6FMkerYvAXS455z3DUossVztu_nzFlYSQhs6qN6Jt8,4636
|
32
|
+
copyparty/th_srv.py,sha256=M5bYynDed1FR-UyuyOnsCCQP2IUxV_O3kONtH0n5Z8E,28729
|
33
33
|
copyparty/u2idx.py,sha256=uEUcEbye1jzGlQfEJkLtD060XA6Rv_6lXLgeg6oAU5M,13033
|
34
|
-
copyparty/up2k.py,sha256=
|
35
|
-
copyparty/util.py,sha256=
|
34
|
+
copyparty/up2k.py,sha256=o5EVfOGer67KmrbvyM2wTNVwx04CIdifS7gx767jW9w,146115
|
35
|
+
copyparty/util.py,sha256=eLumq1eaFZKxMV3xwWsCCIGMp2IS2k5ijaO83oNr0zc,84958
|
36
36
|
copyparty/bos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
37
37
|
copyparty/bos/bos.py,sha256=Wb7eWsXJgR5AFlBR9ZOyKrLTwy-Kct9RrGiOu4Jo37Y,1622
|
38
38
|
copyparty/bos/path.py,sha256=yEjCq2ki9CvxA5sCT8pS0keEXwugs0ZeUyUhdBziOCI,777
|
@@ -54,10 +54,10 @@ copyparty/stolen/ifaddr/__init__.py,sha256=_BUN7eM5oD2Jgib6B22tEFSb20fD9urNPPaAl
|
|
54
54
|
copyparty/stolen/ifaddr/_posix.py,sha256=-67NdfGrCktfQPakT2fLbjl2U00QMvyBGkSvrUuTOrU,2626
|
55
55
|
copyparty/stolen/ifaddr/_shared.py,sha256=cJACl8cOxQ-HSYphZTzKMAjAx_TAFyJwUPjfD102Xqw,6111
|
56
56
|
copyparty/stolen/ifaddr/_win32.py,sha256=EE-QyoBgeB7lYQ6z62VjXNaRozaYfCkaJBHGNA8QtZM,4026
|
57
|
-
copyparty/web/baguettebox.js.gz,sha256=
|
57
|
+
copyparty/web/baguettebox.js.gz,sha256=4dS8-r4si84ca71l98672ahnRI86Aq95MU-bc5knykk,7962
|
58
58
|
copyparty/web/browser.css.gz,sha256=8AZ53KZ-fWInji7m-eGNrwdZbWEeLmyjwg1V23ON6Mc,11539
|
59
59
|
copyparty/web/browser.html,sha256=-tLasq2GKe9mUceqXG4PczQ7odBMrX0qlWuyaA9SjPI,4882
|
60
|
-
copyparty/web/browser.js.gz,sha256
|
60
|
+
copyparty/web/browser.js.gz,sha256=-FlIylCcVX7aQvOInLDdSNZAqsVzs6RKczzvNBSFQ50,69593
|
61
61
|
copyparty/web/browser2.html,sha256=ciQlgr9GWuIapdsRBFNRvRFvN5T_5n920LqDMbsj5-g,1605
|
62
62
|
copyparty/web/cf.html,sha256=lJThtNFNAQT1ClCHHlivAkDGE0LutedwopXD62Z8Nys,589
|
63
63
|
copyparty/web/dbg-audio.js.gz,sha256=Ma-KZtK8LnmiwNvNKFKXMPYl_Nn_3U7GsJ6-DRWC2HE,688
|
@@ -74,15 +74,15 @@ copyparty/web/msg.html,sha256=HcBeXXpcF2JKwcj8KD3dGCvONMnTZ6lXYmm4SYgBMlA,905
|
|
74
74
|
copyparty/web/splash.css.gz,sha256=zgDs-SY3VrInsXeARRPcGHziVOUs-1hUtSObzybwD1g,1006
|
75
75
|
copyparty/web/splash.html,sha256=z5OrfZqA5RBxeY86BJiQ5NZNHIIDHDvPlTuht-Q0v64,3917
|
76
76
|
copyparty/web/splash.js.gz,sha256=P4BLL_SBqfqWniq_gzUD-opVAkblAPgKDwmfxyfDB7o,1469
|
77
|
-
copyparty/web/svcs.html,sha256=
|
77
|
+
copyparty/web/svcs.html,sha256=9NUxNAohl-RY-uDP73nFqTG69voN_QfqNNGYwIolDEs,11728
|
78
78
|
copyparty/web/svcs.js.gz,sha256=k81ZvZ3I-f4fMHKrNGGOgOlvXnCBz0mVjD-8mieoWCA,520
|
79
|
-
copyparty/web/ui.css.gz,sha256=
|
80
|
-
copyparty/web/up2k.js.gz,sha256
|
81
|
-
copyparty/web/util.js.gz,sha256=
|
82
|
-
copyparty/web/w.hash.js.gz,sha256=
|
79
|
+
copyparty/web/ui.css.gz,sha256=GnR_PxnZGcNs2IJnb5hFffnhlW3cUHkPad3tNIm-7DQ,2637
|
80
|
+
copyparty/web/up2k.js.gz,sha256=-tb11N3zmwTIhNSJCuaCbiFdt_A7Yhg_NSXfz87rzFs,22711
|
81
|
+
copyparty/web/util.js.gz,sha256=uZDsKkwTcEiFv-GTfy-zmArqirjhxGBLKjAhtLvj72s,14524
|
82
|
+
copyparty/web/w.hash.js.gz,sha256=7wP9EZQNXQxwZnCCFUVsi_-6TM9PLZJeZ9krutXRRj8,1060
|
83
83
|
copyparty/web/a/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
84
84
|
copyparty/web/a/partyfuse.py,sha256=MuRkaSuYsdfWfBFMOkbPwDXqSvNTw3sd7QhhlKCDZ8I,32311
|
85
|
-
copyparty/web/a/u2c.py,sha256=
|
85
|
+
copyparty/web/a/u2c.py,sha256=W0B4QfyM7QqPNEPdAGQhtFSNNLt2r6xr9oZcG05F0Io,42213
|
86
86
|
copyparty/web/a/webdav-cfg.bat,sha256=Y4NoGZlksAIg4cBMb7KdJrpKC6Nx97onaTl6yMjaimk,1449
|
87
87
|
copyparty/web/dd/2.png,sha256=gJ14XFPzaw95L6z92fSq9eMPikSQyu-03P1lgiGe0_I,258
|
88
88
|
copyparty/web/dd/3.png,sha256=4lho8Koz5tV7jJ4ODo6GMTScZfkqsT05yp48EDFIlyg,252
|
@@ -102,9 +102,9 @@ copyparty/web/deps/prismd.css.gz,sha256=ObUlksQVr-OuYlTz-I4B23TeBg2QDVVGRnWBz8cV
|
|
102
102
|
copyparty/web/deps/scp.woff2,sha256=w99BDU5i8MukkMEL-iW0YO9H4vFFZSPWxbkH70ytaAg,8612
|
103
103
|
copyparty/web/deps/sha512.ac.js.gz,sha256=lFZaCLumgWxrvEuDr4bqdKHsqjX82AbVAb7_F45Yk88,7033
|
104
104
|
copyparty/web/deps/sha512.hw.js.gz,sha256=vqoXeracj-99Z5MfY3jK2N4WiSzYQdfjy0RnUlQDhSU,8110
|
105
|
-
copyparty-1.13.
|
106
|
-
copyparty-1.13.
|
107
|
-
copyparty-1.13.
|
108
|
-
copyparty-1.13.
|
109
|
-
copyparty-1.13.
|
110
|
-
copyparty-1.13.
|
105
|
+
copyparty-1.13.6.dist-info/LICENSE,sha256=gOr4h33pCsBEg9uIy9AYmb7qlocL4V9t2uPJS5wllr0,1072
|
106
|
+
copyparty-1.13.6.dist-info/METADATA,sha256=RaamHNHak-QoDV2Jd7kBxvyTZk-qol0lUAWLgm0XDig,124426
|
107
|
+
copyparty-1.13.6.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
108
|
+
copyparty-1.13.6.dist-info/entry_points.txt,sha256=4zw6a3rqASywQomiYLObjjlxybaI65LYYOTJwgKz7b0,128
|
109
|
+
copyparty-1.13.6.dist-info/top_level.txt,sha256=LnYUPsDyk-8kFgM6YJLG4h820DQekn81cObKSu9g-sI,10
|
110
|
+
copyparty-1.13.6.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|