copyparty 1.16.17__py3-none-any.whl → 1.16.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
copyparty/__init__.py CHANGED
@@ -11,7 +11,7 @@ _=(0,0) # _____________________________________________________________________
11
11
  # fmt: on
12
12
 
13
13
  try:
14
- from typing import TYPE_CHECKING
14
+ TYPE_CHECKING = False
15
15
  except:
16
16
  TYPE_CHECKING = False
17
17
 
copyparty/__main__.py CHANGED
@@ -40,6 +40,7 @@ from .cfg import flagcats, onedash
40
40
  from .svchub import SvcHub
41
41
  from .util import (
42
42
  APPLESAN_TXT,
43
+ BAD_BOTS,
43
44
  DEF_EXP,
44
45
  DEF_MTE,
45
46
  DEF_MTH,
@@ -221,7 +222,23 @@ def init_E(EE ) :
221
222
  if E.mod.endswith("__init__"):
222
223
  E.mod = os.path.dirname(E.mod)
223
224
 
224
- if sys.platform == "win32":
225
+ try:
226
+ p = os.environ.get("XDG_CONFIG_HOME")
227
+ if not p:
228
+ raise Exception()
229
+ if p.startswith("~"):
230
+ p = os.path.expanduser(p)
231
+ p = os.path.abspath(os.path.realpath(p))
232
+ p = os.path.join(p, "copyparty")
233
+ if not os.path.isdir(p):
234
+ os.mkdir(p)
235
+ os.listdir(p)
236
+ except:
237
+ p = ""
238
+
239
+ if p:
240
+ E.cfg = p
241
+ elif sys.platform == "win32":
225
242
  bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
226
243
  E.cfg = os.path.normpath(bdir + "/copyparty")
227
244
  elif sys.platform == "darwin":
@@ -1002,7 +1019,7 @@ def add_upload(ap):
1002
1019
  ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
1003
1020
  ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
1004
1021
  ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
1005
- ap2.add_argument("--u2ow", metavar="NUM", type=int, default=0, help="web-client: default setting for when to overwrite existing files; [\033[32m0\033[0m]=never, [\033[32m1\033[0m]=if-client-newer, [\033[32m2\033[0m]=always (volflag=u2ow)")
1022
+ ap2.add_argument("--u2ow", metavar="NUM", type=int, default=0, help="web-client: default setting for when to replace/overwrite existing files; [\033[32m0\033[0m]=never, [\033[32m1\033[0m]=if-client-newer, [\033[32m2\033[0m]=always (volflag=u2ow)")
1006
1023
  ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
1007
1024
  ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
1008
1025
 
@@ -1021,6 +1038,8 @@ def add_network(ap):
1021
1038
  ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
1022
1039
  else:
1023
1040
  ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
1041
+ ap2.add_argument("--wr-h-eps", metavar="PATH", type=u, default="", help="write list of listening-on ip:port to textfile at \033[33mPATH\033[0m when http-servers have started")
1042
+ ap2.add_argument("--wr-h-aon", metavar="PATH", type=u, default="", help="write list of accessible-on ip:port to textfile at \033[33mPATH\033[0m when http-servers have started")
1024
1043
  ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
1025
1044
  ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=128.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
1026
1045
  ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
@@ -1214,6 +1233,7 @@ def add_yolo(ap):
1214
1233
  ap2 = ap.add_argument_group('yolo options')
1215
1234
  ap2.add_argument("--allow-csrf", action="store_true", help="disable csrf protections; let other domains/sites impersonate you through cross-site requests")
1216
1235
  ap2.add_argument("--getmod", action="store_true", help="permit ?move=[...] and ?delete as GET")
1236
+ ap2.add_argument("--wo-up-readme", action="store_true", help="allow users with write-only access to upload logues and readmes without adding the _wo_ filename prefix (volflag=wo_up_readme)")
1217
1237
 
1218
1238
 
1219
1239
  def add_optouts(ap):
@@ -1233,6 +1253,7 @@ def add_optouts(ap):
1233
1253
  ap2.add_argument("--zipmaxt", metavar="TXT", type=u, default="", help="custom errormessage when download size exceeds max (volflag=zipmaxt)")
1234
1254
  ap2.add_argument("--zipmaxu", action="store_true", help="authenticated users bypass the zip size limit (volflag=zipmaxu)")
1235
1255
  ap2.add_argument("--zip-who", metavar="LVL", type=int, default=3, help="who can download as zip/tar? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=authenticated-with-read-access, [\033[32m3\033[0m]=everyone-with-read-access (volflag=zip_who)\n\033[1;31mWARNING:\033[0m if a nested volume has a more restrictive value than a parent volume, then this will be \033[33mignored\033[0m if the download is initiated from the parent, more lenient volume")
1256
+ ap2.add_argument("--ua-nozip", metavar="PTN", type=u, default=BAD_BOTS, help="regex of user-agents to reject from download-as-zip/tar; disable with [\033[32mno\033[0m] or blank")
1236
1257
  ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar; same as \033[33m--zip-who=0\033[0m")
1237
1258
  ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
1238
1259
  ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
@@ -1380,6 +1401,7 @@ def add_db_general(ap, hcores):
1380
1401
  ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
1381
1402
  ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
1382
1403
  ap2.add_argument("--hist", metavar="PATH", type=u, default="", help="where to store volume data (db, thumbs); default is a folder named \".hist\" inside each volume (volflag=hist)")
1404
+ ap2.add_argument("--dbpath", metavar="PATH", type=u, default="", help="override where the volume databases are to be placed; default is the same as \033[33m--hist\033[0m (volflag=dbpath)")
1383
1405
  ap2.add_argument("--no-hash", metavar="PTN", type=u, default="", help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
1384
1406
  ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
1385
1407
  ap2.add_argument("--no-dirsz", action="store_true", help="do not show total recursive size of folders in listings, show inode size instead; slightly faster (volflag=nodirsz)")
@@ -1418,11 +1440,13 @@ def add_db_metadata(ap):
1418
1440
 
1419
1441
  def add_txt(ap):
1420
1442
  ap2 = ap.add_argument_group('textfile options')
1443
+ ap2.add_argument("--md-hist", metavar="TXT", type=u, default="s", help="where to store old version of markdown files; [\033[32ms\033[0m]=subfolder, [\033[32mv\033[0m]=volume-histpath, [\033[32mn\033[0m]=nope/disabled (volflag=md_hist)")
1421
1444
  ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
1422
1445
  ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
1423
1446
  ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
1424
1447
  ap2.add_argument("--exp-md", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in markdown files; add/remove stuff on the default list with +hdr_foo or /vf.scan (volflag=exp_md)")
1425
1448
  ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
1449
+ ap2.add_argument("--ua-nodoc", metavar="PTN", type=u, default=BAD_BOTS, help="regex of user-agents to reject from viewing documents through ?doc=[...]; disable with [\033[32mno\033[0m] or blank")
1426
1450
 
1427
1451
 
1428
1452
  def add_og(ap):
copyparty/__version__.py CHANGED
@@ -1,8 +1,8 @@
1
1
  # coding: utf-8
2
2
 
3
- VERSION = (1, 16, 17)
3
+ VERSION = (1, 16, 19)
4
4
  CODENAME = "COPYparty"
5
- BUILD_DT = (2025, 3, 16)
5
+ BUILD_DT = (2025, 4, 8)
6
6
 
7
7
  S_VERSION = ".".join(map(str, VERSION))
8
8
  S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
copyparty/authsrv.py CHANGED
@@ -353,6 +353,7 @@ class VFS(object):
353
353
  self.badcfg1 = False
354
354
  self.nodes = {} # child nodes
355
355
  self.histtab = {} # all realpath->histpath
356
+ self.dbpaths = {} # all realpath->dbpath
356
357
  self.dbv = None # closest full/non-jump parent
357
358
  self.lim = None # upload limits; only set for dbv
358
359
  self.shr_src = None # source vfs+rem of a share
@@ -374,12 +375,13 @@ class VFS(object):
374
375
  rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
375
376
  vp = vpath + ("/" if vpath else "")
376
377
  self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
378
+ self.dbpath = self.histpath
377
379
  self.all_vols = {vpath: self} # flattened recursive
378
380
  self.all_nodes = {vpath: self} # also jumpvols/shares
379
381
  self.all_aps = [(rp, self)]
380
382
  self.all_vps = [(vp, self)]
381
383
  else:
382
- self.histpath = ""
384
+ self.histpath = self.dbpath = ""
383
385
  self.all_vols = {}
384
386
  self.all_nodes = {}
385
387
  self.all_aps = []
@@ -454,17 +456,23 @@ class VFS(object):
454
456
 
455
457
  def _copy_flags(self, name ) :
456
458
  flags = {k: v for k, v in self.flags.items()}
459
+
457
460
  hist = flags.get("hist")
458
461
  if hist and hist != "-":
459
462
  zs = "{}/{}".format(hist.rstrip("/"), name)
460
463
  flags["hist"] = os.path.expandvars(os.path.expanduser(zs))
461
464
 
465
+ dbp = flags.get("dbpath")
466
+ if dbp and dbp != "-":
467
+ zs = "{}/{}".format(dbp.rstrip("/"), name)
468
+ flags["dbpath"] = os.path.expandvars(os.path.expanduser(zs))
469
+
462
470
  return flags
463
471
 
464
472
  def bubble_flags(self) :
465
473
  if self.dbv:
466
474
  for k, v in self.dbv.flags.items():
467
- if k not in ["hist"]:
475
+ if k not in ("hist", "dbpath"):
468
476
  self.flags[k] = v
469
477
 
470
478
  for n in self.nodes.values():
@@ -1752,7 +1760,7 @@ class AuthSrv(object):
1752
1760
  pass
1753
1761
  elif vflag:
1754
1762
  vflag = os.path.expandvars(os.path.expanduser(vflag))
1755
- vol.histpath = uncyg(vflag) if WINDOWS else vflag
1763
+ vol.histpath = vol.dbpath = uncyg(vflag) if WINDOWS else vflag
1756
1764
  elif self.args.hist:
1757
1765
  for nch in range(len(hid)):
1758
1766
  hpath = os.path.join(self.args.hist, hid[: nch + 1])
@@ -1773,12 +1781,45 @@ class AuthSrv(object):
1773
1781
  with open(powner, "wb") as f:
1774
1782
  f.write(me)
1775
1783
 
1776
- vol.histpath = hpath
1784
+ vol.histpath = vol.dbpath = hpath
1777
1785
  break
1778
1786
 
1779
1787
  vol.histpath = absreal(vol.histpath)
1788
+
1789
+ for vol in vfs.all_vols.values():
1790
+ hid = self.hid_cache[vol.realpath]
1791
+ vflag = vol.flags.get("dbpath")
1792
+ if vflag == "-":
1793
+ pass
1794
+ elif vflag:
1795
+ vflag = os.path.expandvars(os.path.expanduser(vflag))
1796
+ vol.dbpath = uncyg(vflag) if WINDOWS else vflag
1797
+ elif self.args.dbpath:
1798
+ for nch in range(len(hid)):
1799
+ hpath = os.path.join(self.args.dbpath, hid[: nch + 1])
1800
+ bos.makedirs(hpath)
1801
+
1802
+ powner = os.path.join(hpath, "owner.txt")
1803
+ try:
1804
+ with open(powner, "rb") as f:
1805
+ owner = f.read().rstrip()
1806
+ except:
1807
+ owner = None
1808
+
1809
+ me = afsenc(vol.realpath).rstrip()
1810
+ if owner not in [None, me]:
1811
+ continue
1812
+
1813
+ if owner is None:
1814
+ with open(powner, "wb") as f:
1815
+ f.write(me)
1816
+
1817
+ vol.dbpath = hpath
1818
+ break
1819
+
1820
+ vol.dbpath = absreal(vol.dbpath)
1780
1821
  if vol.dbv:
1781
- if bos.path.exists(os.path.join(vol.histpath, "up2k.db")):
1822
+ if bos.path.exists(os.path.join(vol.dbpath, "up2k.db")):
1782
1823
  promote.append(vol)
1783
1824
  vol.dbv = None
1784
1825
  else:
@@ -1793,9 +1834,7 @@ class AuthSrv(object):
1793
1834
  "\n the following jump-volumes were generated to assist the vfs.\n As they contain a database (probably from v0.11.11 or older),\n they are promoted to full volumes:"
1794
1835
  ]
1795
1836
  for vol in promote:
1796
- ta.append(
1797
- " /{} ({}) ({})".format(vol.vpath, vol.realpath, vol.histpath)
1798
- )
1837
+ ta.append(" /%s (%s) (%s)" % (vol.vpath, vol.realpath, vol.dbpath))
1799
1838
 
1800
1839
  self.log("\n\n".join(ta) + "\n", c=3)
1801
1840
 
@@ -1806,13 +1845,27 @@ class AuthSrv(object):
1806
1845
  is_shr = shr and zv.vpath.split("/")[0] == shr
1807
1846
  if histp and not is_shr and histp in rhisttab:
1808
1847
  zv2 = rhisttab[histp]
1809
- t = "invalid config; multiple volumes share the same histpath (database location):\n histpath: %s\n volume 1: /%s [%s]\n volume 2: %s [%s]"
1848
+ t = "invalid config; multiple volumes share the same histpath (database+thumbnails location):\n histpath: %s\n volume 1: /%s [%s]\n volume 2: %s [%s]"
1810
1849
  t = t % (histp, zv2.vpath, zv2.realpath, zv.vpath, zv.realpath)
1811
1850
  self.log(t, 1)
1812
1851
  raise Exception(t)
1813
1852
  rhisttab[histp] = zv
1814
1853
  vfs.histtab[zv.realpath] = histp
1815
1854
 
1855
+ rdbpaths = {}
1856
+ vfs.dbpaths = {}
1857
+ for zv in vfs.all_vols.values():
1858
+ dbp = zv.dbpath
1859
+ is_shr = shr and zv.vpath.split("/")[0] == shr
1860
+ if dbp and not is_shr and dbp in rdbpaths:
1861
+ zv2 = rdbpaths[dbp]
1862
+ t = "invalid config; multiple volumes share the same dbpath (database location):\n dbpath: %s\n volume 1: /%s [%s]\n volume 2: %s [%s]"
1863
+ t = t % (dbp, zv2.vpath, zv2.realpath, zv.vpath, zv.realpath)
1864
+ self.log(t, 1)
1865
+ raise Exception(t)
1866
+ rdbpaths[dbp] = zv
1867
+ vfs.dbpaths[zv.realpath] = dbp
1868
+
1816
1869
  for vol in vfs.all_vols.values():
1817
1870
  use = False
1818
1871
  for k in ["zipmaxn", "zipmaxs"]:
copyparty/cfg.py CHANGED
@@ -52,6 +52,7 @@ def vf_bmap() :
52
52
  "og_s_title",
53
53
  "rand",
54
54
  "rss",
55
+ "wo_up_readme",
55
56
  "xdev",
56
57
  "xlink",
57
58
  "xvol",
@@ -82,6 +83,7 @@ def vf_vmap() :
82
83
  "md_sbf",
83
84
  "lg_sba",
84
85
  "md_sba",
86
+ "md_hist",
85
87
  "nrand",
86
88
  "u2ow",
87
89
  "og_desc",
@@ -173,6 +175,7 @@ flagcats = {
173
175
  "vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
174
176
  "vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
175
177
  "medialinks": "return medialinks for non-up2k uploads (not hotlinks)",
178
+ "wo_up_readme": "write-only users can upload logues without getting renamed",
176
179
  "rand": "force randomized filenames, 9 chars long by default",
177
180
  "nrand=N": "randomized filenames are N chars long",
178
181
  "u2ow=N": "overwrite existing files? 0=no 1=if-older 2=always",
@@ -202,6 +205,7 @@ flagcats = {
202
205
  "d2v": "disables file verification, overrides -e2v*",
203
206
  "d2d": "disables all database stuff, overrides -e2*",
204
207
  "hist=/tmp/cdb": "puts thumbnails and indexes at that location",
208
+ "dbpath=/tmp/cdb": "puts indexes at that location",
205
209
  "scan=60": "scan for new files every 60sec, same as --re-maxage",
206
210
  "nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
207
211
  "noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
@@ -289,6 +293,7 @@ flagcats = {
289
293
  "og_ua": "if defined: only send OG html if useragent matches this regex",
290
294
  },
291
295
  "textfiles": {
296
+ "md_hist": "where to put markdown backups; s=subfolder, v=volHist, n=nope",
292
297
  "exp": "enable textfile expansion; see --help-exp",
293
298
  "exp_md": "placeholders to expand in markdown files; see --help",
294
299
  "exp_lg": "placeholders to expand in prologue/epilogue; see --help",
copyparty/ftpd.py CHANGED
@@ -19,6 +19,7 @@ from .__init__ import PY2, TYPE_CHECKING
19
19
  from .authsrv import VFS
20
20
  from .bos import bos
21
21
  from .util import (
22
+ FN_EMB,
22
23
  VF_CAREFUL,
23
24
  Daemon,
24
25
  ODict,
@@ -166,6 +167,16 @@ class FtpFs(AbstractedFS):
166
167
  fn = sanitize_fn(fn or "", "")
167
168
  vpath = vjoin(rd, fn)
168
169
  vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
170
+ if (
171
+ w
172
+ and fn.lower() in FN_EMB
173
+ and self.h.uname not in vfs.axs.uread
174
+ and "wo_up_readme" not in vfs.flags
175
+ ):
176
+ fn = "_wo_" + fn
177
+ vpath = vjoin(rd, fn)
178
+ vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
179
+
169
180
  if not vfs.realpath:
170
181
  t = "No filesystem mounted at [{}]"
171
182
  raise FSE(t.format(vpath))
copyparty/httpcli.py CHANGED
@@ -4,7 +4,6 @@ from __future__ import print_function, unicode_literals
4
4
  import argparse # typechk
5
5
  import copy
6
6
  import errno
7
- import gzip
8
7
  import hashlib
9
8
  import itertools
10
9
  import json
@@ -46,6 +45,7 @@ from .util import (
46
45
  APPLESAN_RE,
47
46
  BITNESS,
48
47
  DAV_ALLPROPS,
48
+ FN_EMB,
49
49
  HAVE_SQLITE3,
50
50
  HTTPCODE,
51
51
  META_NOBOTS,
@@ -57,6 +57,7 @@ from .util import (
57
57
  UnrecvEOF,
58
58
  WrongPostKey,
59
59
  absreal,
60
+ afsenc,
60
61
  alltrace,
61
62
  atomic_move,
62
63
  b64dec,
@@ -69,6 +70,7 @@ from .util import (
69
70
  get_df,
70
71
  get_spd,
71
72
  guess_mime,
73
+ gzip,
72
74
  gzip_file_orig_sz,
73
75
  gzip_orig_sz,
74
76
  has_resource,
@@ -2539,6 +2541,16 @@ class HttpCli(object):
2539
2541
  vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
2540
2542
  dbv, vrem = vfs.get_dbv(rem)
2541
2543
 
2544
+ name = sanitize_fn(name, "")
2545
+ if (
2546
+ not self.can_read
2547
+ and self.can_write
2548
+ and name.lower() in FN_EMB
2549
+ and "wo_up_readme" not in dbv.flags
2550
+ ):
2551
+ name = "_wo_" + name
2552
+
2553
+ body["name"] = name
2542
2554
  body["vtop"] = dbv.vpath
2543
2555
  body["ptop"] = dbv.realpath
2544
2556
  body["prel"] = vrem
@@ -2972,9 +2984,6 @@ class HttpCli(object):
2972
2984
  vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True)
2973
2985
  rem = sanitize_vpath(rem, "/")
2974
2986
  fn = vfs.canonical(rem)
2975
- if not fn.startswith(vfs.realpath):
2976
- self.log("invalid mkdir %r %r" % (self.gctx, vpath), 1)
2977
- raise Pebkac(422)
2978
2987
 
2979
2988
  if not nullwrite:
2980
2989
  fdir = os.path.dirname(fn)
@@ -3473,6 +3482,7 @@ class HttpCli(object):
3473
3482
 
3474
3483
  fp = os.path.join(fp, fn)
3475
3484
  rem = "{}/{}".format(rp, fn).strip("/")
3485
+ dbv, vrem = vfs.get_dbv(rem)
3476
3486
 
3477
3487
  if not rem.endswith(".md") and not self.can_delete:
3478
3488
  raise Pebkac(400, "only markdown pls")
@@ -3527,13 +3537,27 @@ class HttpCli(object):
3527
3537
  mdir, mfile = os.path.split(fp)
3528
3538
  fname, fext = mfile.rsplit(".", 1) if "." in mfile else (mfile, "md")
3529
3539
  mfile2 = "{}.{:.3f}.{}".format(fname, srv_lastmod, fext)
3530
- try:
3540
+
3541
+ dp = ""
3542
+ hist_cfg = dbv.flags["md_hist"]
3543
+ if hist_cfg == "v":
3544
+ vrd = vsplit(vrem)[0]
3545
+ zb = hashlib.sha512(afsenc(vrd)).digest()
3546
+ zs = ub64enc(zb).decode("ascii")[:24].lower()
3547
+ dp = "%s/md/%s/%s/%s" % (dbv.histpath, zs[:2], zs[2:4], zs)
3548
+ self.log("moving old version to %s/%s" % (dp, mfile2))
3549
+ if bos.makedirs(dp):
3550
+ with open(os.path.join(dp, "dir.txt"), "wb") as f:
3551
+ f.write(afsenc(vrd))
3552
+ elif hist_cfg == "s":
3531
3553
  dp = os.path.join(mdir, ".hist")
3532
- bos.mkdir(dp)
3533
- hidedir(dp)
3534
- except:
3535
- pass
3536
- wrename(self.log, fp, os.path.join(mdir, ".hist", mfile2), vfs.flags)
3554
+ try:
3555
+ bos.mkdir(dp)
3556
+ hidedir(dp)
3557
+ except:
3558
+ pass
3559
+ if dp:
3560
+ wrename(self.log, fp, os.path.join(dp, mfile2), vfs.flags)
3537
3561
 
3538
3562
  p_field, _, p_data = next(self.parser.gen)
3539
3563
  if p_field != "body":
@@ -3605,13 +3629,12 @@ class HttpCli(object):
3605
3629
  wunlink(self.log, fp, vfs.flags)
3606
3630
  raise Pebkac(403, t)
3607
3631
 
3608
- vfs, rem = vfs.get_dbv(rem)
3609
3632
  self.conn.hsrv.broker.say(
3610
3633
  "up2k.hash_file",
3611
- vfs.realpath,
3612
- vfs.vpath,
3613
- vfs.flags,
3614
- vsplit(rem)[0],
3634
+ dbv.realpath,
3635
+ dbv.vpath,
3636
+ dbv.flags,
3637
+ vsplit(vrem)[0],
3615
3638
  fn,
3616
3639
  self.ip,
3617
3640
  new_lastmod,
@@ -3778,6 +3801,9 @@ class HttpCli(object):
3778
3801
  return "download-as-zip/tar is admin-only on this server"
3779
3802
  elif lvl <= 2 and self.uname in ("", "*"):
3780
3803
  return "you must be authenticated to download-as-zip/tar on this server"
3804
+ elif self.args.ua_nozip and self.args.ua_nozip.search(self.ua):
3805
+ t = "this URL contains no valuable information for bots/crawlers"
3806
+ raise Pebkac(403, t)
3781
3807
  return ""
3782
3808
 
3783
3809
  def tx_res(self, req_path ) :
@@ -4835,7 +4861,7 @@ class HttpCli(object):
4835
4861
  self.reply(pt.encode("utf-8"), status=rc)
4836
4862
  return True
4837
4863
 
4838
- if "th" in self.ouparam:
4864
+ if "th" in self.ouparam and str(self.ouparam["th"])[:1] in "jw":
4839
4865
  return self.tx_svg("e" + pt[:3])
4840
4866
 
4841
4867
  # most webdav clients will not send credentials until they
@@ -5762,7 +5788,13 @@ class HttpCli(object):
5762
5788
 
5763
5789
  thp = None
5764
5790
  if self.thumbcli and not nothumb:
5765
- thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt)
5791
+ try:
5792
+ thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt)
5793
+ except Pebkac as ex:
5794
+ if ex.code == 500 and th_fmt[:1] in "jw":
5795
+ self.log("failed to convert [%s]:\n%s" % (abspath, ex), 3)
5796
+ return self.tx_svg("--error--\ncheck\nserver\nlog")
5797
+ raise
5766
5798
 
5767
5799
  if thp:
5768
5800
  return self.tx_file(thp)
@@ -5984,9 +6016,11 @@ class HttpCli(object):
5984
6016
  # check for old versions of files,
5985
6017
  # [num-backups, most-recent, hist-path]
5986
6018
  hist = {}
5987
- histdir = os.path.join(fsroot, ".hist")
5988
- ptn = RE_MDV
5989
6019
  try:
6020
+ if vf["md_hist"] != "s":
6021
+ raise Exception()
6022
+ histdir = os.path.join(fsroot, ".hist")
6023
+ ptn = RE_MDV
5990
6024
  for hfn in bos.listdir(histdir):
5991
6025
  m = ptn.match(hfn)
5992
6026
  if not m:
@@ -6257,6 +6291,10 @@ class HttpCli(object):
6257
6291
 
6258
6292
  doc = self.uparam.get("doc") if self.can_read else None
6259
6293
  if doc:
6294
+ zp = self.args.ua_nodoc
6295
+ if zp and zp.search(self.ua):
6296
+ t = "this URL contains no valuable information for bots/crawlers"
6297
+ raise Pebkac(403, t)
6260
6298
  j2a["docname"] = doc
6261
6299
  doctxt = None
6262
6300
  dfn = lnames.get(doc.lower())
copyparty/ico.py CHANGED
@@ -94,10 +94,21 @@ class Ico(object):
94
94
  <?xml version="1.0" encoding="UTF-8"?>
95
95
  <svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
96
96
  <rect width="100%" height="100%" fill="#{}" />
97
- <text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
97
+ <text x="50%" y="{}" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
98
98
  fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
99
99
  </g></svg>
100
100
  """
101
- svg = svg.format(h, c[:6], c[6:], html_escape(ext, True))
101
+
102
+ txt = html_escape(ext, True)
103
+ if "\n" in txt:
104
+ lines = txt.split("\n")
105
+ n = len(lines)
106
+ y = "20%" if n == 2 else "10%" if n == 3 else "0"
107
+ zs = '<tspan x="50%%" dy="1.2em">%s</tspan>'
108
+ txt = "".join([zs % (x,) for x in lines])
109
+ else:
110
+ y = "50%"
111
+
112
+ svg = svg.format(h, c[:6], y, c[6:], txt)
102
113
 
103
114
  return "image/svg+xml", svg.encode("utf-8")
copyparty/mtag.py CHANGED
@@ -18,6 +18,7 @@ from .util import (
18
18
  REKOBO_LKEY,
19
19
  VF_CAREFUL,
20
20
  fsenc,
21
+ gzip,
21
22
  min_ex,
22
23
  pybin,
23
24
  retchk,
@@ -132,8 +133,6 @@ def au_unpk(
132
133
  fd, ret = tempfile.mkstemp("." + au)
133
134
 
134
135
  if pk == "gz":
135
- import gzip
136
-
137
136
  fi = gzip.GzipFile(abspath, mode="rb")
138
137
 
139
138
  elif pk == "xz":
copyparty/pwhash.py CHANGED
@@ -15,7 +15,7 @@ try:
15
15
  raise Exception()
16
16
 
17
17
  HAVE_ARGON2 = True
18
- from argon2 import __version__ as argon2ver
18
+ from argon2 import exceptions as argon2ex
19
19
  except:
20
20
  HAVE_ARGON2 = False
21
21