copyparty 1.16.7__py3-none-any.whl → 1.16.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
copyparty/__main__.py CHANGED
@@ -54,6 +54,8 @@ from .util import (
54
54
  RAM_TOTAL,
55
55
  SQLITE_VER,
56
56
  UNPLICATIONS,
57
+ URL_BUG,
58
+ URL_PRJ,
57
59
  Daemon,
58
60
  align_tab,
59
61
  ansi_re,
@@ -326,17 +328,16 @@ def ensure_webdeps() :
326
328
  if has_resource(E, "web/deps/mini-fa.woff"):
327
329
  return
328
330
 
329
- warn(
330
- """could not find webdeps;
331
+ t = """could not find webdeps;
331
332
  if you are running the sfx, or exe, or pypi package, or docker image,
332
333
  then this is a bug! Please let me know so I can fix it, thanks :-)
333
- https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md
334
+ %s
334
335
 
335
336
  however, if you are a dev, or running copyparty from source, and you want
336
337
  full client functionality, you will need to build or obtain the webdeps:
337
- https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
338
+ %s/blob/hovudstraum/docs/devnotes.md#building
338
339
  """
339
- )
340
+ warn(t % (URL_BUG, URL_PRJ))
340
341
 
341
342
 
342
343
  def configure_ssl_ver(al ) :
@@ -731,6 +732,10 @@ def get_sects():
731
732
  the \033[33m,,\033[35m stops copyparty from reading the rest as flags and
732
733
  the \033[33m--\033[35m stops notify-send from reading the message as args
733
734
  and the alert will be "hey" followed by the messagetext
735
+
736
+ \033[36m--xau zmq:pub:tcp://*:5556\033[35m announces uploads on zeromq;
737
+ \033[36m--xau t3,zmq:push:tcp://*:5557\033[35m also works, and you can
738
+ \033[36m--xau t3,j,zmq:req:tcp://localhost:5555\033[35m too for example
734
739
  \033[0m
735
740
  each hook is executed once for each event, except for \033[36mxiu\033[0m
736
741
  which builds up a backlog of uploads, running the hook just once
@@ -1468,12 +1473,14 @@ def add_ui(ap, retry):
1468
1473
  ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
1469
1474
  ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
1470
1475
  ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
1471
- ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
1476
+ ap2.add_argument("--pb-url", metavar="URL", type=u, default=URL_PRJ, help="powered-by link; disable with \033[33m-np\033[0m")
1472
1477
  ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
1473
1478
  ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
1474
1479
  ap2.add_argument("--no304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable no304 on the controlpanel (workaround for buggy caching in browsers); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
1475
- ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
1476
- ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
1480
+ ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to allow in the iframe 'sandbox' attribute for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#sandbox")
1481
+ ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to allow in the iframe 'sandbox' attribute for prologue/epilogue docs (volflag=lg_sbf)")
1482
+ ap2.add_argument("--md-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for README.md docs, for example [\033[32mfullscreen\033[0m] (volflag=md_sba)")
1483
+ ap2.add_argument("--lg-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for prologue/epilogue docs (volflag=lg_sba); see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Permissions-Policy#iframes")
1477
1484
  ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README/PREADME.md documents (volflags: no_sb_md | sb_md)")
1478
1485
  ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
1479
1486
 
copyparty/__version__.py CHANGED
@@ -1,8 +1,8 @@
1
1
  # coding: utf-8
2
2
 
3
- VERSION = (1, 16, 7)
3
+ VERSION = (1, 16, 9)
4
4
  CODENAME = "COPYparty"
5
- BUILD_DT = (2024, 12, 23)
5
+ BUILD_DT = (2025, 1, 22)
6
6
 
7
7
  S_VERSION = ".".join(map(str, VERSION))
8
8
  S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
copyparty/authsrv.py CHANGED
@@ -1825,7 +1825,11 @@ class AuthSrv(object):
1825
1825
  if fka and not fk:
1826
1826
  fk = fka
1827
1827
  if fk:
1828
- vol.flags["fk"] = int(fk) if fk is not True else 8
1828
+ fk = 8 if fk is True else int(fk)
1829
+ if fk > 72:
1830
+ t = "max filekey-length is 72; volume /%s specified %d (anything higher than 16 is pointless btw)"
1831
+ raise Exception(t % (vol.vpath, fk))
1832
+ vol.flags["fk"] = fk
1829
1833
  have_fk = True
1830
1834
 
1831
1835
  dk = vol.flags.get("dk")
@@ -2332,6 +2336,7 @@ class AuthSrv(object):
2332
2336
  "frand": bool(vf.get("rand")),
2333
2337
  "lifetime": vf.get("lifetime") or 0,
2334
2338
  "unlist": vf.get("unlist") or "",
2339
+ "sb_lg": "" if "no_sb_lg" in vf else (vf.get("lg_sbf") or "y"),
2335
2340
  }
2336
2341
  js_htm = {
2337
2342
  "s_name": self.args.bname,
@@ -2344,6 +2349,8 @@ class AuthSrv(object):
2344
2349
  "have_unpost": int(self.args.unpost),
2345
2350
  "have_emp": self.args.emp,
2346
2351
  "sb_md": "" if "no_sb_md" in vf else (vf.get("md_sbf") or "y"),
2352
+ "sba_md": vf.get("md_sba") or "",
2353
+ "sba_lg": vf.get("lg_sba") or "",
2347
2354
  "txt_ext": self.args.textfiles.replace(",", " "),
2348
2355
  "def_hcols": list(vf.get("mth") or []),
2349
2356
  "unlist0": vf.get("unlist") or "",
copyparty/cfg.py CHANGED
@@ -74,6 +74,8 @@ def vf_vmap() :
74
74
  "html_head",
75
75
  "lg_sbf",
76
76
  "md_sbf",
77
+ "lg_sba",
78
+ "md_sba",
77
79
  "nrand",
78
80
  "og_desc",
79
81
  "og_site",
@@ -144,6 +146,7 @@ flagcats = {
144
146
  "noclone": "take dupe data from clients, even if available on HDD",
145
147
  "nodupe": "rejects existing files (instead of linking/cloning them)",
146
148
  "sparse": "force use of sparse files, mainly for s3-backed storage",
149
+ "nosparse": "deny use of sparse files, mainly for slow storage",
147
150
  "daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
148
151
  "nosub": "forces all uploads into the top folder of the vfs",
149
152
  "magic": "enables filetype detection for nameless uploads",
@@ -240,6 +243,8 @@ flagcats = {
240
243
  "sb_lg": "enable js sandbox for prologue/epilogue (default)",
241
244
  "md_sbf": "list of markdown-sandbox safeguards to disable",
242
245
  "lg_sbf": "list of *logue-sandbox safeguards to disable",
246
+ "md_sba": "value of iframe allow-prop for markdown-sandbox",
247
+ "lg_sba": "value of iframe allow-prop for *logue-sandbox",
243
248
  "nohtml": "return html and markdown as text/html",
244
249
  },
245
250
  "others": {
copyparty/dxml.py CHANGED
@@ -1,9 +1,16 @@
1
+ # coding: utf-8
2
+ from __future__ import print_function, unicode_literals
3
+
1
4
  import importlib
2
5
  import sys
3
6
  import xml.etree.ElementTree as ET
4
7
 
5
8
  from .__init__ import PY2
6
9
 
10
+ class BadXML(Exception):
11
+ pass
12
+
13
+
7
14
  def get_ET() :
8
15
  pn = "xml.etree.ElementTree"
9
16
  cn = "_elementtree"
@@ -30,7 +37,7 @@ def get_ET() :
30
37
  XMLParser = get_ET()
31
38
 
32
39
 
33
- class DXMLParser(XMLParser): # type: ignore
40
+ class _DXMLParser(XMLParser): # type: ignore
34
41
  def __init__(self) :
35
42
  tb = ET.TreeBuilder()
36
43
  super(DXMLParser, self).__init__(target=tb)
@@ -45,8 +52,12 @@ class DXMLParser(XMLParser): # type: ignore
45
52
  raise BadXML("{}, {}".format(a, ka))
46
53
 
47
54
 
48
- class BadXML(Exception):
49
- pass
55
+ class _NG(XMLParser): # type: ignore
56
+ def __int__(self) :
57
+ raise BadXML("dxml selftest failed")
58
+
59
+
60
+ DXMLParser = _DXMLParser
50
61
 
51
62
 
52
63
  def parse_xml(txt ) :
@@ -55,6 +66,40 @@ def parse_xml(txt ) :
55
66
  return parser.close() # type: ignore
56
67
 
57
68
 
69
+ def selftest() :
70
+ qbe = r"""<!DOCTYPE d [
71
+ <!ENTITY a "nice_bakuretsu">
72
+ ]>
73
+ <root>&a;&a;&a;</root>"""
74
+
75
+ emb = r"""<!DOCTYPE d [
76
+ <!ENTITY a SYSTEM "file:///etc/hostname">
77
+ ]>
78
+ <root>&a;</root>"""
79
+
80
+ # future-proofing; there's never been any known vulns
81
+ # regarding DTDs and ET.XMLParser, but might as well
82
+ # block them since webdav-clients don't use them
83
+ dtd = r"""<!DOCTYPE d SYSTEM "a.dtd">
84
+ <root>a</root>"""
85
+
86
+ for txt in (qbe, emb, dtd):
87
+ try:
88
+ parse_xml(txt)
89
+ t = "WARNING: dxml selftest failed:\n%s\n"
90
+ print(t % (txt,), file=sys.stderr)
91
+ return False
92
+ except BadXML:
93
+ pass
94
+
95
+ return True
96
+
97
+
98
+ DXML_OK = selftest()
99
+ if not DXML_OK:
100
+ DXMLParser = _NG
101
+
102
+
58
103
  def mktnod(name , text ) :
59
104
  el = ET.Element(name)
60
105
  el.text = text
copyparty/httpcli.py CHANGED
@@ -128,6 +128,8 @@ NO_CACHE = {"Cache-Control": "no-cache"}
128
128
 
129
129
  ALL_COOKIES = "k304 no304 js idxh dots cppwd cppws".split()
130
130
 
131
+ BADXFF = " due to dangerous misconfiguration (the http-header specified by --xff-hdr was received from an untrusted reverse-proxy)"
132
+
131
133
  H_CONN_KEEPALIVE = "Connection: Keep-Alive"
132
134
  H_CONN_CLOSE = "Connection: Close"
133
135
 
@@ -157,6 +159,8 @@ class HttpCli(object):
157
159
 
158
160
  def __init__(self, conn ) :
159
161
 
162
+ empty_stringlist = []
163
+
160
164
  self.t0 = time.time()
161
165
  self.conn = conn
162
166
  self.u2mutex = conn.u2mutex # mypy404
@@ -202,9 +206,7 @@ class HttpCli(object):
202
206
  self.trailing_slash = True
203
207
  self.uname = " "
204
208
  self.pw = " "
205
- self.rvol = [" "]
206
- self.wvol = [" "]
207
- self.avol = [" "]
209
+ self.rvol = self.wvol = self.avol = empty_stringlist
208
210
  self.do_log = True
209
211
  self.can_read = False
210
212
  self.can_write = False
@@ -385,6 +387,7 @@ class HttpCli(object):
385
387
  ) + "0.0/16"
386
388
  zs2 = ' or "--xff-src=lan"' if self.conn.xff_lan.map(pip) else ""
387
389
  self.log(t % (self.args.xff_hdr, pip, cli_ip, zso, zs, zs2), 3)
390
+ self.bad_xff = True
388
391
  else:
389
392
  self.ip = cli_ip
390
393
  self.is_vproxied = bool(self.args.R)
@@ -505,7 +508,7 @@ class HttpCli(object):
505
508
  return False
506
509
 
507
510
  if "k" in uparam:
508
- m = RE_K.search(uparam["k"])
511
+ m = re_k.search(uparam["k"])
509
512
  if m:
510
513
  zs = uparam["k"]
511
514
  t = "malicious user; illegal filekey; req(%r) k(%r) => %r"
@@ -1889,7 +1892,7 @@ class HttpCli(object):
1889
1892
  return self.handle_stash(False)
1890
1893
 
1891
1894
  if "save" in opt:
1892
- post_sz, _, _, _, path, _ = self.dump_to_file(False)
1895
+ post_sz, _, _, _, _, path, _ = self.dump_to_file(False)
1893
1896
  self.log("urlform: %d bytes, %r" % (post_sz, path))
1894
1897
  elif "print" in opt:
1895
1898
  reader, _ = self.get_body_reader()
@@ -1970,11 +1973,11 @@ class HttpCli(object):
1970
1973
  else:
1971
1974
  return read_socket(self.sr, bufsz, remains), remains
1972
1975
 
1973
- def dump_to_file(self, is_put ) :
1974
- # post_sz, sha_hex, sha_b64, remains, path, url
1976
+ def dump_to_file(self, is_put ) :
1977
+ # post_sz, halg, sha_hex, sha_b64, remains, path, url
1975
1978
  reader, remains = self.get_body_reader()
1976
1979
  vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
1977
- rnd, _, lifetime, xbu, xau = self.upload_flags(vfs)
1980
+ rnd, lifetime, xbu, xau = self.upload_flags(vfs)
1978
1981
  lim = vfs.get_dbv(rem)[0].lim
1979
1982
  fdir = vfs.canonical(rem)
1980
1983
  if lim:
@@ -2122,12 +2125,14 @@ class HttpCli(object):
2122
2125
  # small toctou, but better than clobbering a hardlink
2123
2126
  wunlink(self.log, path, vfs.flags)
2124
2127
 
2128
+ halg = "sha512"
2125
2129
  hasher = None
2126
2130
  copier = hashcopy
2127
2131
  if "ck" in self.ouparam or "ck" in self.headers:
2128
- zs = self.ouparam.get("ck") or self.headers.get("ck") or ""
2132
+ halg = zs = self.ouparam.get("ck") or self.headers.get("ck") or ""
2129
2133
  if not zs or zs == "no":
2130
2134
  copier = justcopy
2135
+ halg = ""
2131
2136
  elif zs == "md5":
2132
2137
  hasher = hashlib.md5(**USED4SEC)
2133
2138
  elif zs == "sha1":
@@ -2161,7 +2166,7 @@ class HttpCli(object):
2161
2166
  raise
2162
2167
 
2163
2168
  if self.args.nw:
2164
- return post_sz, sha_hex, sha_b64, remains, path, ""
2169
+ return post_sz, halg, sha_hex, sha_b64, remains, path, ""
2165
2170
 
2166
2171
  at = mt = time.time() - lifetime
2167
2172
  cli_mt = self.headers.get("x-oc-mtime")
@@ -2272,19 +2277,30 @@ class HttpCli(object):
2272
2277
  self.args.RS + vpath + vsuf,
2273
2278
  )
2274
2279
 
2275
- return post_sz, sha_hex, sha_b64, remains, path, url
2280
+ return post_sz, halg, sha_hex, sha_b64, remains, path, url
2276
2281
 
2277
2282
  def handle_stash(self, is_put ) :
2278
- post_sz, sha_hex, sha_b64, remains, path, url = self.dump_to_file(is_put)
2283
+ post_sz, halg, sha_hex, sha_b64, remains, path, url = self.dump_to_file(is_put)
2279
2284
  spd = self._spd(post_sz)
2280
2285
  t = "%s wrote %d/%d bytes to %r # %s"
2281
2286
  self.log(t % (spd, post_sz, remains, path, sha_b64[:28])) # 21
2282
2287
 
2283
- ac = self.uparam.get(
2284
- "want", self.headers.get("accept", "").lower().split(";")[-1]
2285
- )
2288
+ mime = "text/plain; charset=utf-8"
2289
+ ac = self.uparam.get("want") or self.headers.get("accept") or ""
2290
+ if ac:
2291
+ ac = ac.split(";", 1)[0].lower()
2292
+ if ac == "application/json":
2293
+ ac = "json"
2286
2294
  if ac == "url":
2287
2295
  t = url
2296
+ elif ac == "json" or "j" in self.uparam:
2297
+ jmsg = {"fileurl": url, "filesz": post_sz}
2298
+ if halg:
2299
+ jmsg[halg] = sha_hex[:56]
2300
+ jmsg["sha_b64"] = sha_b64
2301
+
2302
+ mime = "application/json"
2303
+ t = json.dumps(jmsg, indent=2, sort_keys=True)
2288
2304
  else:
2289
2305
  t = "{}\n{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56], url)
2290
2306
 
@@ -2294,7 +2310,7 @@ class HttpCli(object):
2294
2310
  h["X-OC-MTime"] = "accepted"
2295
2311
  t = "" # some webdav clients expect/prefer this
2296
2312
 
2297
- self.reply(t.encode("utf-8"), 201, headers=h)
2313
+ self.reply(t.encode("utf-8", "replace"), 201, mime=mime, headers=h)
2298
2314
  return True
2299
2315
 
2300
2316
  def bakflip(
@@ -2967,7 +2983,7 @@ class HttpCli(object):
2967
2983
  self.redirect(vpath, "?edit")
2968
2984
  return True
2969
2985
 
2970
- def upload_flags(self, vfs ) :
2986
+ def upload_flags(self, vfs ) :
2971
2987
  if self.args.nw:
2972
2988
  rnd = 0
2973
2989
  else:
@@ -2975,10 +2991,6 @@ class HttpCli(object):
2975
2991
  if vfs.flags.get("rand"): # force-enable
2976
2992
  rnd = max(rnd, vfs.flags["nrand"])
2977
2993
 
2978
- ac = self.uparam.get(
2979
- "want", self.headers.get("accept", "").lower().split(";")[-1]
2980
- )
2981
- want_url = ac == "url"
2982
2994
  zs = self.uparam.get("life", self.headers.get("life", ""))
2983
2995
  if zs:
2984
2996
  vlife = vfs.flags.get("lifetime") or 0
@@ -2988,7 +3000,6 @@ class HttpCli(object):
2988
3000
 
2989
3001
  return (
2990
3002
  rnd,
2991
- want_url,
2992
3003
  lifetime,
2993
3004
  vfs.flags.get("xbu") or [],
2994
3005
  vfs.flags.get("xau") or [],
@@ -3041,7 +3052,14 @@ class HttpCli(object):
3041
3052
  if not nullwrite:
3042
3053
  bos.makedirs(fdir_base)
3043
3054
 
3044
- rnd, want_url, lifetime, xbu, xau = self.upload_flags(vfs)
3055
+ rnd, lifetime, xbu, xau = self.upload_flags(vfs)
3056
+ zs = self.uparam.get("want") or self.headers.get("accept") or ""
3057
+ if zs:
3058
+ zs = zs.split(";", 1)[0].lower()
3059
+ if zs == "application/json":
3060
+ zs = "json"
3061
+ want_url = zs == "url"
3062
+ want_json = zs == "json" or "j" in self.uparam
3045
3063
 
3046
3064
  files = []
3047
3065
  # sz, sha_hex, sha_b64, p_file, fname, abspath
@@ -3363,7 +3381,9 @@ class HttpCli(object):
3363
3381
  msg += "\n" + errmsg
3364
3382
 
3365
3383
  self.reply(msg.encode("utf-8", "replace"), status=sc)
3366
- elif "j" in self.uparam:
3384
+ elif want_json:
3385
+ if len(jmsg["files"]) == 1:
3386
+ jmsg["fileurl"] = jmsg["files"][0]["url"]
3367
3387
  jtxt = json.dumps(jmsg, indent=2, sort_keys=True).encode("utf-8", "replace")
3368
3388
  self.reply(jtxt, mime="application/json", status=sc)
3369
3389
  else:
@@ -4317,7 +4337,7 @@ class HttpCli(object):
4317
4337
  self.log,
4318
4338
  self.asrv,
4319
4339
  fgen,
4320
- utf8="utf" in uarg,
4340
+ utf8="utf" in uarg or not uarg,
4321
4341
  pre_crc="crc" in uarg,
4322
4342
  cmp=uarg if cancmp or uarg == "pax" else "",
4323
4343
  )
@@ -4531,12 +4551,12 @@ class HttpCli(object):
4531
4551
  else self.conn.hsrv.nm.map(self.ip) or host
4532
4552
  )
4533
4553
  # safer than html_escape/quotep since this avoids both XSS and shell-stuff
4534
- pw = re.sub(r"[<>&$?`\"']", "_", self.pw or "pw")
4554
+ pw = re.sub(r"[<>&$?`\"']", "_", self.pw or "hunter2")
4535
4555
  vp = re.sub(r"[<>&$?`\"']", "_", self.uparam["hc"] or "").lstrip("/")
4536
4556
  pw = pw.replace(" ", "%20")
4537
4557
  vp = vp.replace(" ", "%20")
4538
4558
  if pw in self.asrv.sesa:
4539
- pw = "pwd"
4559
+ pw = "hunter2"
4540
4560
 
4541
4561
  html = self.j2s(
4542
4562
  "svcs",
@@ -4965,8 +4985,16 @@ class HttpCli(object):
4965
4985
  and (self.uname in vol.axs.uread or self.uname in vol.axs.upget)
4966
4986
  }
4967
4987
 
4988
+ bad_xff = hasattr(self, "bad_xff")
4989
+ if bad_xff:
4990
+ allvols = []
4991
+ t = "will not return list of recent uploads" + BADXFF
4992
+ self.log(t, 1)
4993
+ if self.avol:
4994
+ raise Pebkac(500, t)
4995
+
4968
4996
  x = self.conn.hsrv.broker.ask(
4969
- "up2k.get_unfinished_by_user", self.uname, self.ip
4997
+ "up2k.get_unfinished_by_user", self.uname, "" if bad_xff else self.ip
4970
4998
  )
4971
4999
  uret = x.get()
4972
5000
 
@@ -5360,12 +5388,16 @@ class HttpCli(object):
5360
5388
  if self.args.no_del:
5361
5389
  raise Pebkac(403, "the delete feature is disabled in server config")
5362
5390
 
5391
+ unpost = "unpost" in self.uparam
5392
+ if unpost and hasattr(self, "bad_xff"):
5393
+ self.log("unpost was denied" + BADXFF, 1)
5394
+ raise Pebkac(403, "the delete feature is disabled in server config")
5395
+
5363
5396
  if not req:
5364
5397
  req = [self.vpath]
5365
5398
  elif self.is_vproxied:
5366
5399
  req = [x[len(self.args.SR) :] for x in req]
5367
5400
 
5368
- unpost = "unpost" in self.uparam
5369
5401
  nlim = int(self.uparam.get("lim") or 0)
5370
5402
  lim = [nlim, nlim] if nlim else []
5371
5403
 
@@ -5765,7 +5797,7 @@ class HttpCli(object):
5765
5797
  "taglist": [],
5766
5798
  "have_tags_idx": int(e2t),
5767
5799
  "have_b_u": (self.can_write and self.uparam.get("b") == "u"),
5768
- "sb_lg": "" if "no_sb_lg" in vf else (vf.get("lg_sbf") or "y"),
5800
+ "sb_lg": vn.js_ls["sb_lg"],
5769
5801
  "url_suf": url_suf,
5770
5802
  "title": html_escape("%s %s" % (self.args.bname, self.vpath), crlf=True),
5771
5803
  "srv_info": srv_infot,
copyparty/svchub.py CHANGED
@@ -44,6 +44,8 @@ from .util import (
44
44
  FFMPEG_URL,
45
45
  HAVE_PSUTIL,
46
46
  HAVE_SQLITE3,
47
+ HAVE_ZMQ,
48
+ URL_BUG,
47
49
  UTC,
48
50
  VERSIONS,
49
51
  Daemon,
@@ -54,6 +56,7 @@ from .util import (
54
56
  alltrace,
55
57
  ansi_re,
56
58
  build_netmap,
59
+ expat_ver,
57
60
  load_ipu,
58
61
  min_ex,
59
62
  mp,
@@ -629,6 +632,7 @@ class SvcHub(object):
629
632
  (HAVE_FFPROBE, "ffprobe", t_ff + ", read audio/media tags"),
630
633
  (HAVE_MUTAGEN, "mutagen", "read audio tags (ffprobe is better but slower)"),
631
634
  (HAVE_ARGON2, "argon2", "secure password hashing (advanced users only)"),
635
+ (HAVE_ZMQ, "pyzmq", "send zeromq messages from event-hooks"),
632
636
  (HAVE_HEIF, "pillow-heif", "read .heif images with pillow (rarely useful)"),
633
637
  (HAVE_AVIF, "pillow-avif", "read .avif images with pillow (rarely useful)"),
634
638
  ]
@@ -685,6 +689,15 @@ class SvcHub(object):
685
689
  if self.args.bauth_last:
686
690
  self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
687
691
 
692
+ if not self.args.no_dav:
693
+ from .dxml import DXML_OK
694
+
695
+ if not DXML_OK:
696
+ if not self.args.no_dav:
697
+ self.args.no_dav = True
698
+ t = "WARNING:\nDisabling WebDAV support because dxml selftest failed. Please report this bug;\n%s\n...and include the following information in the bug-report:\n%s | expat %s\n"
699
+ self.log("root", t % (URL_BUG, VERSIONS, expat_ver()), 1)
700
+
688
701
  def _process_config(self) :
689
702
  al = self.args
690
703
 
copyparty/up2k.py CHANGED
@@ -790,7 +790,7 @@ class Up2k(object):
790
790
  continue
791
791
 
792
792
  self.log("xiu: %d# %r" % (len(wrfs), cmd))
793
- runihook(self.log, cmd, vol, ups)
793
+ runihook(self.log, self.args.hook_v, cmd, vol, ups)
794
794
 
795
795
  def _vis_job_progress(self, job ) :
796
796
  perc = 100 - (len(job["need"]) * 100.0 / (len(job["hash"]) or 1))
@@ -851,7 +851,7 @@ class Up2k(object):
851
851
  self.iacct = self.asrv.iacct
852
852
  self.grps = self.asrv.grps
853
853
 
854
- have_e2d = self.args.idp_h_usr
854
+ have_e2d = self.args.idp_h_usr or self.args.chpw or self.args.shr
855
855
  vols = list(all_vols.values())
856
856
  t0 = time.time()
857
857
 
@@ -1112,6 +1112,7 @@ class Up2k(object):
1112
1112
  reg = {}
1113
1113
  drp = None
1114
1114
  emptylist = []
1115
+ dotpart = "." if self.args.dotpart else ""
1115
1116
  snap = os.path.join(histpath, "up2k.snap")
1116
1117
  if bos.path.exists(snap):
1117
1118
  with gzip.GzipFile(snap, "rb") as f:
@@ -1124,6 +1125,8 @@ class Up2k(object):
1124
1125
  except:
1125
1126
  pass
1126
1127
 
1128
+ reg = reg2 # diff-golf
1129
+
1127
1130
  if reg2 and "dwrk" not in reg2[next(iter(reg2))]:
1128
1131
  for job in reg2.values():
1129
1132
  job["dwrk"] = job["wark"]
@@ -1131,7 +1134,8 @@ class Up2k(object):
1131
1134
  rm = []
1132
1135
  for k, job in reg2.items():
1133
1136
  job["ptop"] = ptop
1134
- if "done" in job:
1137
+ is_done = "done" in job
1138
+ if is_done:
1135
1139
  job["need"] = job["hash"] = emptylist
1136
1140
  else:
1137
1141
  if "need" not in job:
@@ -1139,10 +1143,13 @@ class Up2k(object):
1139
1143
  if "hash" not in job:
1140
1144
  job["hash"] = []
1141
1145
 
1142
- fp = djoin(ptop, job["prel"], job["name"])
1146
+ if is_done:
1147
+ fp = djoin(ptop, job["prel"], job["name"])
1148
+ else:
1149
+ fp = djoin(ptop, job["prel"], dotpart + job["name"] + ".PARTIAL")
1150
+
1143
1151
  if bos.path.exists(fp):
1144
- reg[k] = job
1145
- if "done" in job:
1152
+ if is_done:
1146
1153
  continue
1147
1154
  job["poke"] = time.time()
1148
1155
  job["busy"] = {}
@@ -1150,11 +1157,18 @@ class Up2k(object):
1150
1157
  self.log("ign deleted file in snap: %r" % (fp,))
1151
1158
  if not n4g:
1152
1159
  rm.append(k)
1153
- continue
1154
1160
 
1155
1161
  for x in rm:
1156
1162
  del reg2[x]
1157
1163
 
1164
+ # optimize pre-1.15.4 entries
1165
+ if next((x for x in reg.values() if "done" in x and "poke" in x), None):
1166
+ zsl = "host tnam busy sprs poke t0c".split()
1167
+ for job in reg.values():
1168
+ if "done" in job:
1169
+ for k in zsl:
1170
+ job.pop(k, None)
1171
+
1158
1172
  if drp is None:
1159
1173
  drp = [k for k, v in reg.items() if not v["need"]]
1160
1174
  else:
@@ -2989,7 +3003,7 @@ class Up2k(object):
2989
3003
  if wark in reg:
2990
3004
  del reg[wark]
2991
3005
  job["hash"] = job["need"] = []
2992
- job["done"] = True
3006
+ job["done"] = 1
2993
3007
  job["busy"] = {}
2994
3008
 
2995
3009
  if lost:
@@ -4867,7 +4881,8 @@ class Up2k(object):
4867
4881
  except:
4868
4882
  pass
4869
4883
 
4870
- xbu = self.flags[job["ptop"]].get("xbu")
4884
+ vf = self.flags[job["ptop"]]
4885
+ xbu = vf.get("xbu")
4871
4886
  ap_chk = djoin(pdir, job["name"])
4872
4887
  vp_chk = djoin(job["vtop"], job["prel"], job["name"])
4873
4888
  if xbu:
@@ -4897,7 +4912,7 @@ class Up2k(object):
4897
4912
  if x:
4898
4913
  zvfs = vfs
4899
4914
  pdir, _, job["name"], (vfs, rem) = x
4900
- job["vcfg"] = vfs.flags
4915
+ job["vcfg"] = vf = vfs.flags
4901
4916
  job["ptop"] = vfs.realpath
4902
4917
  job["vtop"] = vfs.vpath
4903
4918
  job["prel"] = rem
@@ -4947,8 +4962,13 @@ class Up2k(object):
4947
4962
  fs = self.fstab.get(pdir)
4948
4963
  if fs == "ok":
4949
4964
  pass
4950
- elif "sparse" in self.flags[job["ptop"]]:
4951
- t = "volflag 'sparse' is forcing use of sparse files for uploads to [%s]"
4965
+ elif "nosparse" in vf:
4966
+ t = "volflag 'nosparse' is preventing creation of sparse files for uploads to [%s]"
4967
+ self.log(t % (job["ptop"],))
4968
+ relabel = True
4969
+ sprs = False
4970
+ elif "sparse" in vf:
4971
+ t = "volflag 'sparse' is forcing creation of sparse files for uploads to [%s]"
4952
4972
  self.log(t % (job["ptop"],))
4953
4973
  relabel = True
4954
4974
  else: