copyparty 1.16.18__py3-none-any.whl → 1.16.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copyparty/__init__.py +1 -1
- copyparty/__main__.py +21 -2
- copyparty/__version__.py +2 -2
- copyparty/authsrv.py +62 -9
- copyparty/cfg.py +3 -0
- copyparty/httpcli.py +44 -24
- copyparty/ico.py +13 -2
- copyparty/pwhash.py +1 -1
- copyparty/svchub.py +164 -61
- copyparty/tcpsrv.py +1 -1
- copyparty/th_cli.py +23 -4
- copyparty/th_srv.py +62 -7
- copyparty/u2idx.py +2 -2
- copyparty/up2k.py +7 -4
- copyparty/util.py +81 -3
- copyparty/web/browser.css.gz +0 -0
- copyparty/web/browser.js.gz +0 -0
- copyparty/web/deps/marked.js.gz +0 -0
- copyparty/web/splash.html +1 -1
- copyparty/web/ui.css.gz +0 -0
- copyparty/web/up2k.js.gz +0 -0
- copyparty/web/util.js.gz +0 -0
- {copyparty-1.16.18.dist-info → copyparty-1.16.20.dist-info}/METADATA +15 -3
- {copyparty-1.16.18.dist-info → copyparty-1.16.20.dist-info}/RECORD +28 -28
- {copyparty-1.16.18.dist-info → copyparty-1.16.20.dist-info}/WHEEL +1 -1
- {copyparty-1.16.18.dist-info → copyparty-1.16.20.dist-info}/entry_points.txt +0 -0
- {copyparty-1.16.18.dist-info → copyparty-1.16.20.dist-info}/licenses/LICENSE +0 -0
- {copyparty-1.16.18.dist-info → copyparty-1.16.20.dist-info}/top_level.txt +0 -0
copyparty/__init__.py
CHANGED
copyparty/__main__.py
CHANGED
@@ -222,7 +222,23 @@ def init_E(EE ) :
|
|
222
222
|
if E.mod.endswith("__init__"):
|
223
223
|
E.mod = os.path.dirname(E.mod)
|
224
224
|
|
225
|
-
|
225
|
+
try:
|
226
|
+
p = os.environ.get("XDG_CONFIG_HOME")
|
227
|
+
if not p:
|
228
|
+
raise Exception()
|
229
|
+
if p.startswith("~"):
|
230
|
+
p = os.path.expanduser(p)
|
231
|
+
p = os.path.abspath(os.path.realpath(p))
|
232
|
+
p = os.path.join(p, "copyparty")
|
233
|
+
if not os.path.isdir(p):
|
234
|
+
os.mkdir(p)
|
235
|
+
os.listdir(p)
|
236
|
+
except:
|
237
|
+
p = ""
|
238
|
+
|
239
|
+
if p:
|
240
|
+
E.cfg = p
|
241
|
+
elif sys.platform == "win32":
|
226
242
|
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
|
227
243
|
E.cfg = os.path.normpath(bdir + "/copyparty")
|
228
244
|
elif sys.platform == "darwin":
|
@@ -1003,7 +1019,7 @@ def add_upload(ap):
|
|
1003
1019
|
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
1004
1020
|
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
1005
1021
|
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
1006
|
-
ap2.add_argument("--u2ow", metavar="NUM", type=int, default=0, help="web-client: default setting for when to overwrite existing files; [\033[32m0\033[0m]=never, [\033[32m1\033[0m]=if-client-newer, [\033[32m2\033[0m]=always (volflag=u2ow)")
|
1022
|
+
ap2.add_argument("--u2ow", metavar="NUM", type=int, default=0, help="web-client: default setting for when to replace/overwrite existing files; [\033[32m0\033[0m]=never, [\033[32m1\033[0m]=if-client-newer, [\033[32m2\033[0m]=always (volflag=u2ow)")
|
1007
1023
|
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
1008
1024
|
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
1009
1025
|
|
@@ -1353,6 +1369,7 @@ def add_thumbnail(ap):
|
|
1353
1369
|
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,cbz,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
1354
1370
|
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
1355
1371
|
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
1372
|
+
ap2.add_argument("--th-spec-cnv", metavar="T,T", type=u, default="it,itgz,itxz,itz,mdgz,mdxz,mdz,mo3,mod,s3m,s3gz,s3xz,s3z,xm,xmgz,xmxz,xmz,xpk", help="audio formats which provoke https://trac.ffmpeg.org/ticket/10797 (huge ram usage for s3xmodit spectrograms)")
|
1356
1373
|
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz, cbz=jpg.cbz", help="audio/image formats to decompress before passing to ffmpeg")
|
1357
1374
|
|
1358
1375
|
|
@@ -1385,6 +1402,7 @@ def add_db_general(ap, hcores):
|
|
1385
1402
|
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
1386
1403
|
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
1387
1404
|
ap2.add_argument("--hist", metavar="PATH", type=u, default="", help="where to store volume data (db, thumbs); default is a folder named \".hist\" inside each volume (volflag=hist)")
|
1405
|
+
ap2.add_argument("--dbpath", metavar="PATH", type=u, default="", help="override where the volume databases are to be placed; default is the same as \033[33m--hist\033[0m (volflag=dbpath)")
|
1388
1406
|
ap2.add_argument("--no-hash", metavar="PTN", type=u, default="", help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
1389
1407
|
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
1390
1408
|
ap2.add_argument("--no-dirsz", action="store_true", help="do not show total recursive size of folders in listings, show inode size instead; slightly faster (volflag=nodirsz)")
|
@@ -1423,6 +1441,7 @@ def add_db_metadata(ap):
|
|
1423
1441
|
|
1424
1442
|
def add_txt(ap):
|
1425
1443
|
ap2 = ap.add_argument_group('textfile options')
|
1444
|
+
ap2.add_argument("--md-hist", metavar="TXT", type=u, default="s", help="where to store old version of markdown files; [\033[32ms\033[0m]=subfolder, [\033[32mv\033[0m]=volume-histpath, [\033[32mn\033[0m]=nope/disabled (volflag=md_hist)")
|
1426
1445
|
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
1427
1446
|
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
1428
1447
|
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
copyparty/__version__.py
CHANGED
copyparty/authsrv.py
CHANGED
@@ -353,6 +353,7 @@ class VFS(object):
|
|
353
353
|
self.badcfg1 = False
|
354
354
|
self.nodes = {} # child nodes
|
355
355
|
self.histtab = {} # all realpath->histpath
|
356
|
+
self.dbpaths = {} # all realpath->dbpath
|
356
357
|
self.dbv = None # closest full/non-jump parent
|
357
358
|
self.lim = None # upload limits; only set for dbv
|
358
359
|
self.shr_src = None # source vfs+rem of a share
|
@@ -374,12 +375,13 @@ class VFS(object):
|
|
374
375
|
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
|
375
376
|
vp = vpath + ("/" if vpath else "")
|
376
377
|
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
|
378
|
+
self.dbpath = self.histpath
|
377
379
|
self.all_vols = {vpath: self} # flattened recursive
|
378
380
|
self.all_nodes = {vpath: self} # also jumpvols/shares
|
379
381
|
self.all_aps = [(rp, self)]
|
380
382
|
self.all_vps = [(vp, self)]
|
381
383
|
else:
|
382
|
-
self.histpath = ""
|
384
|
+
self.histpath = self.dbpath = ""
|
383
385
|
self.all_vols = {}
|
384
386
|
self.all_nodes = {}
|
385
387
|
self.all_aps = []
|
@@ -454,17 +456,23 @@ class VFS(object):
|
|
454
456
|
|
455
457
|
def _copy_flags(self, name ) :
|
456
458
|
flags = {k: v for k, v in self.flags.items()}
|
459
|
+
|
457
460
|
hist = flags.get("hist")
|
458
461
|
if hist and hist != "-":
|
459
462
|
zs = "{}/{}".format(hist.rstrip("/"), name)
|
460
463
|
flags["hist"] = os.path.expandvars(os.path.expanduser(zs))
|
461
464
|
|
465
|
+
dbp = flags.get("dbpath")
|
466
|
+
if dbp and dbp != "-":
|
467
|
+
zs = "{}/{}".format(dbp.rstrip("/"), name)
|
468
|
+
flags["dbpath"] = os.path.expandvars(os.path.expanduser(zs))
|
469
|
+
|
462
470
|
return flags
|
463
471
|
|
464
472
|
def bubble_flags(self) :
|
465
473
|
if self.dbv:
|
466
474
|
for k, v in self.dbv.flags.items():
|
467
|
-
if k not in
|
475
|
+
if k not in ("hist", "dbpath"):
|
468
476
|
self.flags[k] = v
|
469
477
|
|
470
478
|
for n in self.nodes.values():
|
@@ -1752,7 +1760,7 @@ class AuthSrv(object):
|
|
1752
1760
|
pass
|
1753
1761
|
elif vflag:
|
1754
1762
|
vflag = os.path.expandvars(os.path.expanduser(vflag))
|
1755
|
-
vol.histpath = uncyg(vflag) if WINDOWS else vflag
|
1763
|
+
vol.histpath = vol.dbpath = uncyg(vflag) if WINDOWS else vflag
|
1756
1764
|
elif self.args.hist:
|
1757
1765
|
for nch in range(len(hid)):
|
1758
1766
|
hpath = os.path.join(self.args.hist, hid[: nch + 1])
|
@@ -1773,12 +1781,45 @@ class AuthSrv(object):
|
|
1773
1781
|
with open(powner, "wb") as f:
|
1774
1782
|
f.write(me)
|
1775
1783
|
|
1776
|
-
vol.histpath = hpath
|
1784
|
+
vol.histpath = vol.dbpath = hpath
|
1777
1785
|
break
|
1778
1786
|
|
1779
1787
|
vol.histpath = absreal(vol.histpath)
|
1788
|
+
|
1789
|
+
for vol in vfs.all_vols.values():
|
1790
|
+
hid = self.hid_cache[vol.realpath]
|
1791
|
+
vflag = vol.flags.get("dbpath")
|
1792
|
+
if vflag == "-":
|
1793
|
+
pass
|
1794
|
+
elif vflag:
|
1795
|
+
vflag = os.path.expandvars(os.path.expanduser(vflag))
|
1796
|
+
vol.dbpath = uncyg(vflag) if WINDOWS else vflag
|
1797
|
+
elif self.args.dbpath:
|
1798
|
+
for nch in range(len(hid)):
|
1799
|
+
hpath = os.path.join(self.args.dbpath, hid[: nch + 1])
|
1800
|
+
bos.makedirs(hpath)
|
1801
|
+
|
1802
|
+
powner = os.path.join(hpath, "owner.txt")
|
1803
|
+
try:
|
1804
|
+
with open(powner, "rb") as f:
|
1805
|
+
owner = f.read().rstrip()
|
1806
|
+
except:
|
1807
|
+
owner = None
|
1808
|
+
|
1809
|
+
me = afsenc(vol.realpath).rstrip()
|
1810
|
+
if owner not in [None, me]:
|
1811
|
+
continue
|
1812
|
+
|
1813
|
+
if owner is None:
|
1814
|
+
with open(powner, "wb") as f:
|
1815
|
+
f.write(me)
|
1816
|
+
|
1817
|
+
vol.dbpath = hpath
|
1818
|
+
break
|
1819
|
+
|
1820
|
+
vol.dbpath = absreal(vol.dbpath)
|
1780
1821
|
if vol.dbv:
|
1781
|
-
if bos.path.exists(os.path.join(vol.
|
1822
|
+
if bos.path.exists(os.path.join(vol.dbpath, "up2k.db")):
|
1782
1823
|
promote.append(vol)
|
1783
1824
|
vol.dbv = None
|
1784
1825
|
else:
|
@@ -1793,9 +1834,7 @@ class AuthSrv(object):
|
|
1793
1834
|
"\n the following jump-volumes were generated to assist the vfs.\n As they contain a database (probably from v0.11.11 or older),\n they are promoted to full volumes:"
|
1794
1835
|
]
|
1795
1836
|
for vol in promote:
|
1796
|
-
ta.append(
|
1797
|
-
" /{} ({}) ({})".format(vol.vpath, vol.realpath, vol.histpath)
|
1798
|
-
)
|
1837
|
+
ta.append(" /%s (%s) (%s)" % (vol.vpath, vol.realpath, vol.dbpath))
|
1799
1838
|
|
1800
1839
|
self.log("\n\n".join(ta) + "\n", c=3)
|
1801
1840
|
|
@@ -1806,13 +1845,27 @@ class AuthSrv(object):
|
|
1806
1845
|
is_shr = shr and zv.vpath.split("/")[0] == shr
|
1807
1846
|
if histp and not is_shr and histp in rhisttab:
|
1808
1847
|
zv2 = rhisttab[histp]
|
1809
|
-
t = "invalid config; multiple volumes share the same histpath (database location):\n histpath: %s\n volume 1: /%s [%s]\n volume 2: %s [%s]"
|
1848
|
+
t = "invalid config; multiple volumes share the same histpath (database+thumbnails location):\n histpath: %s\n volume 1: /%s [%s]\n volume 2: %s [%s]"
|
1810
1849
|
t = t % (histp, zv2.vpath, zv2.realpath, zv.vpath, zv.realpath)
|
1811
1850
|
self.log(t, 1)
|
1812
1851
|
raise Exception(t)
|
1813
1852
|
rhisttab[histp] = zv
|
1814
1853
|
vfs.histtab[zv.realpath] = histp
|
1815
1854
|
|
1855
|
+
rdbpaths = {}
|
1856
|
+
vfs.dbpaths = {}
|
1857
|
+
for zv in vfs.all_vols.values():
|
1858
|
+
dbp = zv.dbpath
|
1859
|
+
is_shr = shr and zv.vpath.split("/")[0] == shr
|
1860
|
+
if dbp and not is_shr and dbp in rdbpaths:
|
1861
|
+
zv2 = rdbpaths[dbp]
|
1862
|
+
t = "invalid config; multiple volumes share the same dbpath (database location):\n dbpath: %s\n volume 1: /%s [%s]\n volume 2: %s [%s]"
|
1863
|
+
t = t % (dbp, zv2.vpath, zv2.realpath, zv.vpath, zv.realpath)
|
1864
|
+
self.log(t, 1)
|
1865
|
+
raise Exception(t)
|
1866
|
+
rdbpaths[dbp] = zv
|
1867
|
+
vfs.dbpaths[zv.realpath] = dbp
|
1868
|
+
|
1816
1869
|
for vol in vfs.all_vols.values():
|
1817
1870
|
use = False
|
1818
1871
|
for k in ["zipmaxn", "zipmaxs"]:
|
copyparty/cfg.py
CHANGED
@@ -83,6 +83,7 @@ def vf_vmap() :
|
|
83
83
|
"md_sbf",
|
84
84
|
"lg_sba",
|
85
85
|
"md_sba",
|
86
|
+
"md_hist",
|
86
87
|
"nrand",
|
87
88
|
"u2ow",
|
88
89
|
"og_desc",
|
@@ -204,6 +205,7 @@ flagcats = {
|
|
204
205
|
"d2v": "disables file verification, overrides -e2v*",
|
205
206
|
"d2d": "disables all database stuff, overrides -e2*",
|
206
207
|
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
|
208
|
+
"dbpath=/tmp/cdb": "puts indexes at that location",
|
207
209
|
"scan=60": "scan for new files every 60sec, same as --re-maxage",
|
208
210
|
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
209
211
|
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
@@ -291,6 +293,7 @@ flagcats = {
|
|
291
293
|
"og_ua": "if defined: only send OG html if useragent matches this regex",
|
292
294
|
},
|
293
295
|
"textfiles": {
|
296
|
+
"md_hist": "where to put markdown backups; s=subfolder, v=volHist, n=nope",
|
294
297
|
"exp": "enable textfile expansion; see --help-exp",
|
295
298
|
"exp_md": "placeholders to expand in markdown files; see --help",
|
296
299
|
"exp_lg": "placeholders to expand in prologue/epilogue; see --help",
|
copyparty/httpcli.py
CHANGED
@@ -57,6 +57,7 @@ from .util import (
|
|
57
57
|
UnrecvEOF,
|
58
58
|
WrongPostKey,
|
59
59
|
absreal,
|
60
|
+
afsenc,
|
60
61
|
alltrace,
|
61
62
|
atomic_move,
|
62
63
|
b64dec,
|
@@ -1200,11 +1201,6 @@ class HttpCli(object):
|
|
1200
1201
|
else:
|
1201
1202
|
return self.tx_res(res_path)
|
1202
1203
|
|
1203
|
-
if res_path != undot(res_path):
|
1204
|
-
t = "malicious user; attempted path traversal; req(%r) vp(%r) => %r"
|
1205
|
-
self.log(t % (self.req, "/" + self.vpath, res_path), 1)
|
1206
|
-
self.cbonk(self.conn.hsrv.gmal, self.req, "trav", "path traversal")
|
1207
|
-
|
1208
1204
|
self.tx_404()
|
1209
1205
|
return False
|
1210
1206
|
|
@@ -2983,9 +2979,6 @@ class HttpCli(object):
|
|
2983
2979
|
vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True)
|
2984
2980
|
rem = sanitize_vpath(rem, "/")
|
2985
2981
|
fn = vfs.canonical(rem)
|
2986
|
-
if not fn.startswith(vfs.realpath):
|
2987
|
-
self.log("invalid mkdir %r %r" % (self.gctx, vpath), 1)
|
2988
|
-
raise Pebkac(422)
|
2989
2982
|
|
2990
2983
|
if not nullwrite:
|
2991
2984
|
fdir = os.path.dirname(fn)
|
@@ -3484,6 +3477,7 @@ class HttpCli(object):
|
|
3484
3477
|
|
3485
3478
|
fp = os.path.join(fp, fn)
|
3486
3479
|
rem = "{}/{}".format(rp, fn).strip("/")
|
3480
|
+
dbv, vrem = vfs.get_dbv(rem)
|
3487
3481
|
|
3488
3482
|
if not rem.endswith(".md") and not self.can_delete:
|
3489
3483
|
raise Pebkac(400, "only markdown pls")
|
@@ -3538,13 +3532,27 @@ class HttpCli(object):
|
|
3538
3532
|
mdir, mfile = os.path.split(fp)
|
3539
3533
|
fname, fext = mfile.rsplit(".", 1) if "." in mfile else (mfile, "md")
|
3540
3534
|
mfile2 = "{}.{:.3f}.{}".format(fname, srv_lastmod, fext)
|
3541
|
-
|
3535
|
+
|
3536
|
+
dp = ""
|
3537
|
+
hist_cfg = dbv.flags["md_hist"]
|
3538
|
+
if hist_cfg == "v":
|
3539
|
+
vrd = vsplit(vrem)[0]
|
3540
|
+
zb = hashlib.sha512(afsenc(vrd)).digest()
|
3541
|
+
zs = ub64enc(zb).decode("ascii")[:24].lower()
|
3542
|
+
dp = "%s/md/%s/%s/%s" % (dbv.histpath, zs[:2], zs[2:4], zs)
|
3543
|
+
self.log("moving old version to %s/%s" % (dp, mfile2))
|
3544
|
+
if bos.makedirs(dp):
|
3545
|
+
with open(os.path.join(dp, "dir.txt"), "wb") as f:
|
3546
|
+
f.write(afsenc(vrd))
|
3547
|
+
elif hist_cfg == "s":
|
3542
3548
|
dp = os.path.join(mdir, ".hist")
|
3543
|
-
|
3544
|
-
|
3545
|
-
|
3546
|
-
|
3547
|
-
|
3549
|
+
try:
|
3550
|
+
bos.mkdir(dp)
|
3551
|
+
hidedir(dp)
|
3552
|
+
except:
|
3553
|
+
pass
|
3554
|
+
if dp:
|
3555
|
+
wrename(self.log, fp, os.path.join(dp, mfile2), vfs.flags)
|
3548
3556
|
|
3549
3557
|
p_field, _, p_data = next(self.parser.gen)
|
3550
3558
|
if p_field != "body":
|
@@ -3616,13 +3624,12 @@ class HttpCli(object):
|
|
3616
3624
|
wunlink(self.log, fp, vfs.flags)
|
3617
3625
|
raise Pebkac(403, t)
|
3618
3626
|
|
3619
|
-
vfs, rem = vfs.get_dbv(rem)
|
3620
3627
|
self.conn.hsrv.broker.say(
|
3621
3628
|
"up2k.hash_file",
|
3622
|
-
|
3623
|
-
|
3624
|
-
|
3625
|
-
vsplit(
|
3629
|
+
dbv.realpath,
|
3630
|
+
dbv.vpath,
|
3631
|
+
dbv.flags,
|
3632
|
+
vsplit(vrem)[0],
|
3626
3633
|
fn,
|
3627
3634
|
self.ip,
|
3628
3635
|
new_lastmod,
|
@@ -4219,6 +4226,7 @@ class HttpCli(object):
|
|
4219
4226
|
self.log(t % (data_end / M, lower / M, upper / M), 6)
|
4220
4227
|
with self.u2mutex:
|
4221
4228
|
if data_end > self.u2fh.aps.get(ap_data, data_end):
|
4229
|
+
fhs = None
|
4222
4230
|
try:
|
4223
4231
|
fhs = self.u2fh.cache[ap_data].all_fhs
|
4224
4232
|
for fh in fhs:
|
@@ -4226,7 +4234,11 @@ class HttpCli(object):
|
|
4226
4234
|
self.u2fh.aps[ap_data] = data_end
|
4227
4235
|
self.log("pipe: flushed %d up2k-FDs" % (len(fhs),))
|
4228
4236
|
except Exception as ex:
|
4229
|
-
|
4237
|
+
if fhs is None:
|
4238
|
+
err = "file is not being written to right now"
|
4239
|
+
else:
|
4240
|
+
err = repr(ex)
|
4241
|
+
self.log("pipe: u2fh flush failed: " + err)
|
4230
4242
|
|
4231
4243
|
if lower >= data_end:
|
4232
4244
|
if data_end:
|
@@ -4849,7 +4861,7 @@ class HttpCli(object):
|
|
4849
4861
|
self.reply(pt.encode("utf-8"), status=rc)
|
4850
4862
|
return True
|
4851
4863
|
|
4852
|
-
if "th" in self.ouparam:
|
4864
|
+
if "th" in self.ouparam and str(self.ouparam["th"])[:1] in "jw":
|
4853
4865
|
return self.tx_svg("e" + pt[:3])
|
4854
4866
|
|
4855
4867
|
# most webdav clients will not send credentials until they
|
@@ -5776,7 +5788,13 @@ class HttpCli(object):
|
|
5776
5788
|
|
5777
5789
|
thp = None
|
5778
5790
|
if self.thumbcli and not nothumb:
|
5779
|
-
|
5791
|
+
try:
|
5792
|
+
thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt)
|
5793
|
+
except Pebkac as ex:
|
5794
|
+
if ex.code == 500 and th_fmt[:1] in "jw":
|
5795
|
+
self.log("failed to convert [%s]:\n%s" % (abspath, ex), 3)
|
5796
|
+
return self.tx_svg("--error--\ncheck\nserver\nlog")
|
5797
|
+
raise
|
5780
5798
|
|
5781
5799
|
if thp:
|
5782
5800
|
return self.tx_file(thp)
|
@@ -5998,9 +6016,11 @@ class HttpCli(object):
|
|
5998
6016
|
# check for old versions of files,
|
5999
6017
|
# [num-backups, most-recent, hist-path]
|
6000
6018
|
hist = {}
|
6001
|
-
histdir = os.path.join(fsroot, ".hist")
|
6002
|
-
ptn = RE_MDV
|
6003
6019
|
try:
|
6020
|
+
if vf["md_hist"] != "s":
|
6021
|
+
raise Exception()
|
6022
|
+
histdir = os.path.join(fsroot, ".hist")
|
6023
|
+
ptn = RE_MDV
|
6004
6024
|
for hfn in bos.listdir(histdir):
|
6005
6025
|
m = ptn.match(hfn)
|
6006
6026
|
if not m:
|
copyparty/ico.py
CHANGED
@@ -94,10 +94,21 @@ class Ico(object):
|
|
94
94
|
<?xml version="1.0" encoding="UTF-8"?>
|
95
95
|
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
96
96
|
<rect width="100%" height="100%" fill="#{}" />
|
97
|
-
<text x="50%" y="
|
97
|
+
<text x="50%" y="{}" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
|
98
98
|
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
99
99
|
</g></svg>
|
100
100
|
"""
|
101
|
-
|
101
|
+
|
102
|
+
txt = html_escape(ext, True)
|
103
|
+
if "\n" in txt:
|
104
|
+
lines = txt.split("\n")
|
105
|
+
n = len(lines)
|
106
|
+
y = "20%" if n == 2 else "10%" if n == 3 else "0"
|
107
|
+
zs = '<tspan x="50%%" dy="1.2em">%s</tspan>'
|
108
|
+
txt = "".join([zs % (x,) for x in lines])
|
109
|
+
else:
|
110
|
+
y = "50%"
|
111
|
+
|
112
|
+
svg = svg.format(h, c[:6], y, c[6:], txt)
|
102
113
|
|
103
114
|
return "image/svg+xml", svg.encode("utf-8")
|