copyparty 1.12.2__py3-none-any.whl → 1.13.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
copyparty/httpcli.py CHANGED
@@ -36,6 +36,7 @@ from .bos import bos
36
36
  from .star import StreamTar
37
37
  from .sutil import StreamArc, gfilter
38
38
  from .szip import StreamZip
39
+ from .up2k import up2k_chunksize
39
40
  from .util import unquote # type: ignore
40
41
  from .util import (
41
42
  APPLESAN_RE,
@@ -83,6 +84,9 @@ from .util import (
83
84
  sanitize_vpath,
84
85
  sendfile_kern,
85
86
  sendfile_py,
87
+ ub64dec,
88
+ ub64enc,
89
+ ujoin,
86
90
  undot,
87
91
  unescape_cookie,
88
92
  unquotep,
@@ -123,6 +127,7 @@ class HttpCli(object):
123
127
  self.ico = conn.ico # mypy404
124
128
  self.thumbcli = conn.thumbcli # mypy404
125
129
  self.u2fh = conn.u2fh # mypy404
130
+ self.pipes = conn.pipes # mypy404
126
131
  self.log_func = conn.log_func # mypy404
127
132
  self.log_src = conn.log_src # mypy404
128
133
  self.gen_fk = self._gen_fk if self.args.log_fk else gen_filekey
@@ -211,6 +216,13 @@ class HttpCli(object):
211
216
  ka["favico"] = self.args.favico
212
217
  ka["s_name"] = self.args.bname
213
218
  ka["s_doctitle"] = self.args.doctitle
219
+ ka["tcolor"] = self.vn.flags["tcolor"]
220
+
221
+ zso = self.vn.flags.get("html_head")
222
+ if zso:
223
+ ka["this"] = self
224
+ self._build_html_head(zso, ka)
225
+
214
226
  ka["html_head"] = self.html_head
215
227
  return tpl.render(**ka) # type: ignore
216
228
 
@@ -358,6 +370,21 @@ class HttpCli(object):
358
370
  if "&" in self.req and "?" not in self.req:
359
371
  self.hint = "did you mean '?' instead of '&'"
360
372
 
373
+ if self.args.uqe and "/.uqe/" in self.req:
374
+ try:
375
+ vpath, query = self.req.split("?")[0].split("/.uqe/")
376
+ query = query.split("/")[0] # discard trailing junk
377
+ # (usually a "filename" to trick discord into behaving)
378
+ query = ub64dec(query.encode("utf-8")).decode("utf-8", "replace")
379
+ if query.startswith("/"):
380
+ self.req = "%s/?%s" % (vpath, query[1:])
381
+ else:
382
+ self.req = "%s?%s" % (vpath, query)
383
+ except Exception as ex:
384
+ t = "bad uqe in request [%s]: %r" % (self.req, ex)
385
+ self.loud_reply(t, status=400)
386
+ return False
387
+
361
388
  # split req into vpath + uparam
362
389
  uparam = {}
363
390
  if "?" not in self.req:
@@ -424,7 +451,8 @@ class HttpCli(object):
424
451
  cookie_pw = ""
425
452
 
426
453
  if len(uparam) > 10 or len(cookies) > 50:
427
- raise Pebkac(400, "u wot m8")
454
+ self.loud_reply("u wot m8", status=400)
455
+ return False
428
456
 
429
457
  self.uparam = uparam
430
458
  self.cookies = cookies
@@ -712,6 +740,31 @@ class HttpCli(object):
712
740
  or ("; Trident/" in self.ua and not k304)
713
741
  )
714
742
 
743
+ def _build_html_head(self, maybe_html , kv ) :
744
+ html = str(maybe_html)
745
+ is_jinja = html[:2] in "%@%"
746
+ if is_jinja:
747
+ html = html.replace("%", "", 1)
748
+
749
+ if html.startswith("@"):
750
+ with open(html[1:], "rb") as f:
751
+ html = f.read().decode("utf-8")
752
+
753
+ if html.startswith("%"):
754
+ html = html[1:]
755
+ is_jinja = True
756
+
757
+ if is_jinja:
758
+ print("applying jinja")
759
+ with self.conn.hsrv.mutex:
760
+ if html not in self.conn.hsrv.j2:
761
+ j2env = jinja2.Environment()
762
+ tpl = j2env.from_string(html)
763
+ self.conn.hsrv.j2[html] = tpl
764
+ html = self.conn.hsrv.j2[html].render(**kv)
765
+
766
+ self.html_head += html + "\n"
767
+
715
768
  def send_headers(
716
769
  self,
717
770
  length ,
@@ -2245,6 +2298,10 @@ class HttpCli(object):
2245
2298
  def handle_login(self) :
2246
2299
  assert self.parser
2247
2300
  pwd = self.parser.require("cppwd", 64)
2301
+ try:
2302
+ uhash = self.parser.require("uhash", 256)
2303
+ except:
2304
+ uhash = ""
2248
2305
  self.parser.drop()
2249
2306
 
2250
2307
  self.out_headerlist = [
@@ -2257,6 +2314,11 @@ class HttpCli(object):
2257
2314
 
2258
2315
  dst += self.ourlq()
2259
2316
 
2317
+ uhash = uhash.lstrip("#")
2318
+ if uhash not in ("", "-"):
2319
+ dst += "&" if "?" in dst else "?"
2320
+ dst += "_=1#" + html_escape(uhash, True, True)
2321
+
2260
2322
  msg = self.get_pwd_cookie(pwd)
2261
2323
  html = self.j2s("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
2262
2324
  self.reply(html.encode("utf-8"))
@@ -2925,17 +2987,42 @@ class HttpCli(object):
2925
2987
 
2926
2988
  return txt
2927
2989
 
2928
- def tx_file(self, req_path ) :
2990
+ def tx_file(self, req_path , ptop = None) :
2929
2991
  status = 200
2930
2992
  logmsg = "{:4} {} ".format("", self.req)
2931
2993
  logtail = ""
2932
2994
 
2995
+ if ptop is not None:
2996
+ try:
2997
+ dp, fn = os.path.split(req_path)
2998
+ tnam = fn + ".PARTIAL"
2999
+ if self.args.dotpart:
3000
+ tnam = "." + tnam
3001
+ ap_data = os.path.join(dp, tnam)
3002
+ st_data = bos.stat(ap_data)
3003
+ if not st_data.st_size:
3004
+ raise Exception("partial is empty")
3005
+ x = self.conn.hsrv.broker.ask("up2k.find_job_by_ap", ptop, req_path)
3006
+ job = json.loads(x.get())
3007
+ if not job:
3008
+ raise Exception("not found in registry")
3009
+ self.pipes.set(req_path, job)
3010
+ except Exception as ex:
3011
+ self.log("will not pipe [%s]; %s" % (ap_data, ex), 6)
3012
+ ptop = None
3013
+
2933
3014
  #
2934
3015
  # if request is for foo.js, check if we have foo.js.gz
2935
3016
 
2936
3017
  file_ts = 0.0
2937
3018
  editions = {}
2938
3019
  for ext in ("", ".gz"):
3020
+ if ptop is not None:
3021
+ sz = job["size"]
3022
+ file_ts = job["lmod"]
3023
+ editions["plain"] = (ap_data, sz)
3024
+ break
3025
+
2939
3026
  try:
2940
3027
  fs_path = req_path + ext
2941
3028
  st = bos.stat(fs_path)
@@ -3092,6 +3179,11 @@ class HttpCli(object):
3092
3179
  self.send_headers(length=upper - lower, status=status, mime=mime)
3093
3180
  return True
3094
3181
 
3182
+ if ptop is not None:
3183
+ return self.tx_pipe(
3184
+ ptop, req_path, ap_data, job, lower, upper, status, mime, logmsg
3185
+ )
3186
+
3095
3187
  ret = True
3096
3188
  with open_func(*open_args) as f:
3097
3189
  self.send_headers(length=upper - lower, status=status, mime=mime)
@@ -3111,6 +3203,143 @@ class HttpCli(object):
3111
3203
 
3112
3204
  return ret
3113
3205
 
3206
+ def tx_pipe(
3207
+ self,
3208
+ ptop ,
3209
+ req_path ,
3210
+ ap_data ,
3211
+ job ,
3212
+ lower ,
3213
+ upper ,
3214
+ status ,
3215
+ mime ,
3216
+ logmsg ,
3217
+ ) :
3218
+ M = 1048576
3219
+ self.send_headers(length=upper - lower, status=status, mime=mime)
3220
+ wr_slp = self.args.s_wr_slp
3221
+ wr_sz = self.args.s_wr_sz
3222
+ file_size = job["size"]
3223
+ chunk_size = up2k_chunksize(file_size)
3224
+ num_need = -1
3225
+ data_end = 0
3226
+ remains = upper - lower
3227
+ broken = False
3228
+ spins = 0
3229
+ tier = 0
3230
+ tiers = ["uncapped", "reduced speed", "one byte per sec"]
3231
+
3232
+ while lower < upper and not broken:
3233
+ with self.u2mutex:
3234
+ job = self.pipes.get(req_path)
3235
+ if not job:
3236
+ x = self.conn.hsrv.broker.ask("up2k.find_job_by_ap", ptop, req_path)
3237
+ job = json.loads(x.get())
3238
+ if job:
3239
+ self.pipes.set(req_path, job)
3240
+
3241
+ if not job:
3242
+ t = "pipe: OK, upload has finished; yeeting remainder"
3243
+ self.log(t, 2)
3244
+ data_end = file_size
3245
+ break
3246
+
3247
+ if num_need != len(job["need"]) and data_end - lower < 8 * M:
3248
+ num_need = len(job["need"])
3249
+ data_end = 0
3250
+ for cid in job["hash"]:
3251
+ if cid in job["need"]:
3252
+ break
3253
+ data_end += chunk_size
3254
+ t = "pipe: can stream %.2f MiB; requested range is %.2f to %.2f"
3255
+ self.log(t % (data_end / M, lower / M, upper / M), 6)
3256
+ with self.u2mutex:
3257
+ if data_end > self.u2fh.aps.get(ap_data, data_end):
3258
+ try:
3259
+ fhs = self.u2fh.cache[ap_data].all_fhs
3260
+ for fh in fhs:
3261
+ fh.flush()
3262
+ self.u2fh.aps[ap_data] = data_end
3263
+ self.log("pipe: flushed %d up2k-FDs" % (len(fhs),))
3264
+ except Exception as ex:
3265
+ self.log("pipe: u2fh flush failed: %r" % (ex,))
3266
+
3267
+ if lower >= data_end:
3268
+ if data_end:
3269
+ t = "pipe: uploader is too slow; aborting download at %.2f MiB"
3270
+ self.log(t % (data_end / M))
3271
+ raise Pebkac(416, "uploader is too slow")
3272
+
3273
+ raise Pebkac(416, "no data available yet; please retry in a bit")
3274
+
3275
+ slack = data_end - lower
3276
+ if slack >= 8 * M:
3277
+ ntier = 0
3278
+ winsz = M
3279
+ bufsz = wr_sz
3280
+ slp = wr_slp
3281
+ else:
3282
+ winsz = max(40, int(M * (slack / (12 * M))))
3283
+ base_rate = M if not wr_slp else wr_sz / wr_slp
3284
+ if winsz > base_rate:
3285
+ ntier = 0
3286
+ bufsz = wr_sz
3287
+ slp = wr_slp
3288
+ elif winsz > 300:
3289
+ ntier = 1
3290
+ bufsz = winsz // 5
3291
+ slp = 0.2
3292
+ else:
3293
+ ntier = 2
3294
+ bufsz = winsz = slp = 1
3295
+
3296
+ if tier != ntier:
3297
+ tier = ntier
3298
+ self.log("moved to tier %d (%s)" % (tier, tiers[tier]))
3299
+
3300
+ try:
3301
+ with open(ap_data, "rb", self.args.iobuf) as f:
3302
+ f.seek(lower)
3303
+ page = f.read(min(winsz, data_end - lower, upper - lower))
3304
+ if not page:
3305
+ raise Exception("got 0 bytes (EOF?)")
3306
+ except Exception as ex:
3307
+ self.log("pipe: read failed at %.2f MiB: %s" % (lower / M, ex), 3)
3308
+ with self.u2mutex:
3309
+ self.pipes.c.pop(req_path, None)
3310
+ spins += 1
3311
+ if spins > 3:
3312
+ raise Pebkac(500, "file became unreadable")
3313
+ time.sleep(2)
3314
+ continue
3315
+
3316
+ spins = 0
3317
+ pofs = 0
3318
+ while pofs < len(page):
3319
+ if slp:
3320
+ time.sleep(slp)
3321
+
3322
+ try:
3323
+ buf = page[pofs : pofs + bufsz]
3324
+ self.s.sendall(buf)
3325
+ zi = len(buf)
3326
+ remains -= zi
3327
+ lower += zi
3328
+ pofs += zi
3329
+ except:
3330
+ broken = True
3331
+ break
3332
+
3333
+ if lower < upper and not broken:
3334
+ with open(req_path, "rb") as f:
3335
+ remains = sendfile_py(self.log, lower, upper, f, self.s, wr_sz, wr_slp)
3336
+
3337
+ spd = self._spd((upper - lower) - remains)
3338
+ if self.do_log:
3339
+ self.log("{}, {}".format(logmsg, spd))
3340
+
3341
+ return not broken
3342
+
3114
3343
  def tx_zip(
3115
3344
  self,
3116
3345
  fmt ,
@@ -3311,7 +3540,6 @@ class HttpCli(object):
3311
3540
  targs = {
3312
3541
  "r": self.args.SR if self.is_vproxied else "",
3313
3542
  "ts": self.conn.hsrv.cachebuster(),
3314
- "html_head": self.html_head,
3315
3543
  "edit": "edit" in self.uparam,
3316
3544
  "title": html_escape(self.vpath, crlf=True),
3317
3545
  "lastmod": int(ts_md * 1000),
@@ -3322,6 +3550,13 @@ class HttpCli(object):
3322
3550
  "md": boundary,
3323
3551
  "arg_base": arg_base,
3324
3552
  }
3553
+
3554
+ zfv = self.vn.flags.get("html_head")
3555
+ if zfv:
3556
+ targs["this"] = self
3557
+ self._build_html_head(zfv, targs)
3558
+
3559
+ targs["html_head"] = self.html_head
3325
3560
  zs = template.render(**targs).encode("utf-8", "replace")
3326
3561
  html = zs.split(boundary.encode("utf-8"))
3327
3562
  if len(html) != 2:
@@ -3437,8 +3672,6 @@ class HttpCli(object):
3437
3672
  self.reply(zb, mime="text/plain; charset=utf-8")
3438
3673
  return True
3439
3674
 
3440
- self.html_head += self.vn.flags.get("html_head", "")
3441
-
3442
3675
  html = self.j2s(
3443
3676
  "splash",
3444
3677
  this=self,
@@ -3483,7 +3716,7 @@ class HttpCli(object):
3483
3716
  return True
3484
3717
 
3485
3718
  def set_cfg_reset(self) :
3486
- for k in ("k304", "js", "idxh", "cppwd", "cppws"):
3719
+ for k in ("k304", "js", "idxh", "dots", "cppwd", "cppws"):
3487
3720
  cookie = gencookie(k, "x", self.args.R, False)
3488
3721
  self.out_headerlist.append(("Set-Cookie", cookie))
3489
3722
 
@@ -3694,7 +3927,7 @@ class HttpCli(object):
3694
3927
  allvols = [x for x in allvols if "e2d" in x.flags]
3695
3928
 
3696
3929
  for vol in allvols:
3697
- cur = idx.get_cur(vol.realpath)
3930
+ cur = idx.get_cur(vol)
3698
3931
  if not cur:
3699
3932
  continue
3700
3933
 
@@ -3748,7 +3981,7 @@ class HttpCli(object):
3748
3981
  if not allvols:
3749
3982
  ret = [{"kinshi": 1}]
3750
3983
 
3751
- jtxt = '{"u":%s,"c":%s}' % (uret, json.dumps(ret, indent=0))
3984
+ jtxt = '{"u":%s,"c":%s}' % (uret, json.dumps(ret, separators=(",\n", ": ")))
3752
3985
  zi = len(uret.split('\n"pd":')) - 1
3753
3986
  self.log("%s #%d+%d %.2fsec" % (lm, zi, len(ret), time.time() - t0))
3754
3987
  self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
@@ -3909,7 +4142,17 @@ class HttpCli(object):
3909
4142
  e2d = "e2d" in vn.flags
3910
4143
  e2t = "e2t" in vn.flags
3911
4144
 
3912
- self.html_head += vn.flags.get("html_head", "")
4145
+ add_og = "og" in vn.flags
4146
+ if add_og:
4147
+ if "th" in self.uparam or "raw" in self.uparam:
4148
+ og_ua = add_og = False
4149
+ elif self.args.og_ua:
4150
+ og_ua = add_og = self.args.og_ua.search(self.ua)
4151
+ else:
4152
+ og_ua = False
4153
+ add_og = True
4154
+ og_fn = ""
4155
+
3913
4156
  if "b" in self.uparam:
3914
4157
  self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
3915
4158
 
@@ -3917,13 +4160,15 @@ class HttpCli(object):
3917
4160
  is_dk = False
3918
4161
  fk_pass = False
3919
4162
  icur = None
3920
- if is_dir and (e2t or e2d):
4163
+ if (e2t or e2d) and (is_dir or add_og):
3921
4164
  idx = self.conn.get_u2idx()
3922
4165
  if idx and hasattr(idx, "p_end"):
3923
- icur = idx.get_cur(dbv.realpath)
4166
+ icur = idx.get_cur(dbv)
3924
4167
 
3925
4168
  th_fmt = self.uparam.get("th")
3926
- if self.can_read or (self.can_get and vn.flags.get("dk")):
4169
+ if self.can_read or (
4170
+ self.can_get and (vn.flags.get("dk") or "fk" not in vn.flags)
4171
+ ):
3927
4172
  if th_fmt is not None:
3928
4173
  nothumb = "dthumb" in dbv.flags
3929
4174
  if is_dir:
@@ -3970,7 +4215,7 @@ class HttpCli(object):
3970
4215
  elif self.can_write and th_fmt is not None:
3971
4216
  return self.tx_svg("upload\nonly")
3972
4217
 
3973
- elif self.can_get and self.avn:
4218
+ if not self.can_read and self.can_get and self.avn:
3974
4219
  axs = self.avn.axs
3975
4220
  if self.uname not in axs.uhtml:
3976
4221
  pass
@@ -4016,6 +4261,17 @@ class HttpCli(object):
4016
4261
  self.log(t % (correct, got, self.req, abspath), 6)
4017
4262
  return self.tx_404()
4018
4263
 
4264
+ if add_og:
4265
+ if og_ua or self.host not in self.headers.get("referer", ""):
4266
+ self.vpath, og_fn = vsplit(self.vpath)
4267
+ vpath = self.vpath
4268
+ vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False)
4269
+ abspath = vn.dcanonical(rem)
4270
+ dbv, vrem = vn.get_dbv(rem)
4271
+ is_dir = stat.S_ISDIR(st.st_mode)
4272
+ is_dk = True
4273
+ vpnodes.pop()
4274
+
4019
4275
  if (
4020
4276
  (abspath.endswith(".md") or self.can_delete)
4021
4277
  and "nohtml" not in vn.flags
@@ -4027,7 +4283,10 @@ class HttpCli(object):
4027
4283
  ):
4028
4284
  return self.tx_md(vn, abspath)
4029
4285
 
4030
- return self.tx_file(abspath)
4286
+ if not add_og or not og_fn:
4287
+ return self.tx_file(
4288
+ abspath, None if st.st_size or "nopipe" in vn.flags else vn.realpath
4289
+ )
4031
4290
 
4032
4291
  elif is_dir and not self.can_read:
4033
4292
  if self._use_dirkey(abspath):
@@ -4074,7 +4333,11 @@ class HttpCli(object):
4074
4333
  is_ls = "ls" in self.uparam
4075
4334
  is_js = self.args.force_js or self.cookies.get("js") == "y"
4076
4335
 
4077
- if not is_ls and (self.ua.startswith("curl/") or self.ua.startswith("fetch")):
4336
+ if (
4337
+ not is_ls
4338
+ and not add_og
4339
+ and (self.ua.startswith("curl/") or self.ua.startswith("fetch"))
4340
+ ):
4078
4341
  self.uparam["ls"] = "v"
4079
4342
  is_ls = True
4080
4343
 
@@ -4148,6 +4411,7 @@ class HttpCli(object):
4148
4411
  "dsort": vf["sort"],
4149
4412
  "dcrop": vf["crop"],
4150
4413
  "dth3x": vf["th3x"],
4414
+ "dvol": self.args.au_vol,
4151
4415
  "themes": self.args.themes,
4152
4416
  "turbolvl": self.args.turbo,
4153
4417
  "u2j": self.args.u2j,
@@ -4199,7 +4463,7 @@ class HttpCli(object):
4199
4463
 
4200
4464
  for k in ["zip", "tar"]:
4201
4465
  v = self.uparam.get(k)
4202
- if v is not None:
4466
+ if v is not None and (not add_og or not og_fn):
4203
4467
  return self.tx_zip(k, v, self.vpath, vn, rem, [])
4204
4468
 
4205
4469
  fsroot, vfs_ls, vfs_virt = vn.ls(
@@ -4213,6 +4477,10 @@ class HttpCli(object):
4213
4477
  ls_names = [x[0] for x in vfs_ls]
4214
4478
  ls_names.extend(list(vfs_virt.keys()))
4215
4479
 
4480
+ if add_og and og_fn and not self.can_read:
4481
+ ls_names = [og_fn]
4482
+ is_js = True
4483
+
4216
4484
  # check for old versions of files,
4217
4485
  # [num-backups, most-recent, hist-path]
4218
4486
  hist = {}
@@ -4274,12 +4542,14 @@ class HttpCli(object):
4274
4542
  margin = "DIR"
4275
4543
  elif add_dk:
4276
4544
  zs = absreal(fspath)
4277
- margin = '<a href="%s?k=%s&zip" rel="nofollow">zip</a>' % (
4545
+ margin = '<a href="%s?k=%s&zip=crc" rel="nofollow">zip</a>' % (
4278
4546
  quotep(href),
4279
4547
  self.gen_fk(2, self.args.dk_salt, zs, 0, 0)[:add_dk],
4280
4548
  )
4281
4549
  else:
4282
- margin = '<a href="%s?zip" rel="nofollow">zip</a>' % (quotep(href),)
4550
+ margin = '<a href="%s?zip=crc" rel="nofollow">zip</a>' % (
4551
+ quotep(href),
4552
+ )
4283
4553
  elif fn in hist:
4284
4554
  margin = '<a href="%s.hist/%s">#%s</a>' % (
4285
4555
  base,
@@ -4423,6 +4693,9 @@ class HttpCli(object):
4423
4693
  else:
4424
4694
  taglist = list(tagset)
4425
4695
 
4696
+ if not files and not dirs and not readme and not logues[0] and not logues[1]:
4697
+ logues[1] = "this folder is empty"
4698
+
4426
4699
  if is_ls:
4427
4700
  ls_ret["dirs"] = dirs
4428
4701
  ls_ret["files"] = files
@@ -4474,6 +4747,148 @@ class HttpCli(object):
4474
4747
  if "mth" in vn.flags:
4475
4748
  j2a["def_hcols"] = list(vn.flags["mth"])
4476
4749
 
4750
+ if add_og and "raw" not in self.uparam:
4751
+ j2a["this"] = self
4752
+ cgv["og_fn"] = og_fn
4753
+ if og_fn and vn.flags.get("og_tpl"):
4754
+ tpl = vn.flags["og_tpl"]
4755
+ if "EXT" in tpl:
4756
+ zs = og_fn.split(".")[-1].lower()
4757
+ tpl2 = tpl.replace("EXT", zs)
4758
+ if os.path.exists(tpl2):
4759
+ tpl = tpl2
4760
+ with self.conn.hsrv.mutex:
4761
+ if tpl not in self.conn.hsrv.j2:
4762
+ tdir, tname = os.path.split(tpl)
4763
+ j2env = jinja2.Environment()
4764
+ j2env.loader = jinja2.FileSystemLoader(tdir)
4765
+ self.conn.hsrv.j2[tpl] = j2env.get_template(tname)
4766
+ thumb = ""
4767
+ is_pic = is_vid = is_au = False
4768
+ covernames = self.args.th_coversd
4769
+ for fn in ls_names:
4770
+ if fn.lower() in covernames:
4771
+ thumb = fn
4772
+ break
4773
+ if og_fn:
4774
+ ext = og_fn.split(".")[-1].lower()
4775
+ if ext in self.thumbcli.thumbable:
4776
+ is_pic = (
4777
+ ext in self.thumbcli.fmt_pil
4778
+ or ext in self.thumbcli.fmt_vips
4779
+ or ext in self.thumbcli.fmt_ffi
4780
+ )
4781
+ is_vid = ext in self.thumbcli.fmt_ffv
4782
+ is_au = ext in self.thumbcli.fmt_ffa
4783
+ if not thumb or not is_au:
4784
+ thumb = og_fn
4785
+ file = next((x for x in files if x["name"] == og_fn), None)
4786
+ else:
4787
+ file = None
4788
+
4789
+ url_base = "%s://%s/%s" % (
4790
+ "https" if self.is_https else "http",
4791
+ self.host,
4792
+ self.args.RS + quotep(vpath),
4793
+ )
4794
+ j2a["og_is_pic"] = is_pic
4795
+ j2a["og_is_vid"] = is_vid
4796
+ j2a["og_is_au"] = is_au
4797
+ if thumb:
4798
+ fmt = vn.flags.get("og_th", "j")
4799
+ th_base = ujoin(url_base, quotep(thumb))
4800
+ query = "th=%s&cache" % (fmt,)
4801
+ query = ub64enc(query.encode("utf-8")).decode("utf-8")
4802
+ # discord looks at file extension, not content-type...
4803
+ query += "/a.jpg" if "j" in fmt else "/a.webp"
4804
+ j2a["og_thumb"] = "%s/.uqe/%s" % (th_base, query)
4805
+
4806
+ j2a["og_fn"] = og_fn
4807
+ j2a["og_file"] = file
4808
+ if og_fn:
4809
+ og_fn_q = quotep(og_fn)
4810
+ query = ub64enc(b"raw").decode("utf-8")
4811
+ if "." in og_fn:
4812
+ query += "/a.%s" % (og_fn.split(".")[-1])
4813
+
4814
+ j2a["og_url"] = ujoin(url_base, og_fn_q)
4815
+ j2a["og_raw"] = j2a["og_url"] + "/.uqe/" + query
4816
+ else:
4817
+ j2a["og_url"] = j2a["og_raw"] = url_base
4818
+
4819
+ if not vn.flags.get("og_no_head"):
4820
+ ogh = {"twitter:card": "summary"}
4821
+
4822
+ title = str(vn.flags.get("og_title") or "")
4823
+
4824
+ if thumb:
4825
+ ogh["og:image"] = j2a["og_thumb"]
4826
+
4827
+ zso = vn.flags.get("og_desc") or ""
4828
+ if zso != "-":
4829
+ ogh["og:description"] = str(zso)
4830
+
4831
+ zs = vn.flags.get("og_site") or self.args.name
4832
+ if zs not in ("", "-"):
4833
+ ogh["og:site_name"] = zs
4834
+
4835
+ tagmap = {}
4836
+ if is_au:
4837
+ title = str(vn.flags.get("og_title_a") or "")
4838
+ ogh["og:type"] = "music.song"
4839
+ ogh["og:audio"] = j2a["og_raw"]
4840
+ tagmap = {
4841
+ "artist": "og:music:musician",
4842
+ "album": "og:music:album",
4843
+ ".dur": "og:music:duration",
4844
+ }
4845
+ elif is_vid:
4846
+ title = str(vn.flags.get("og_title_v") or "")
4847
+ ogh["og:type"] = "video.other"
4848
+ ogh["og:video"] = j2a["og_raw"]
4849
+ tagmap = {
4850
+ "title": "og:title",
4851
+ ".dur": "og:video:duration",
4852
+ }
4853
+ elif is_pic:
4854
+ title = str(vn.flags.get("og_title_i") or "")
4855
+ ogh["twitter:card"] = "summary_large_image"
4856
+ ogh["twitter:image"] = ogh["og:image"] = j2a["og_raw"]
4857
+
4858
+ try:
4859
+ for k, v in file["tags"].items():
4860
+ zs = "{{ %s }}" % (k,)
4861
+ title = title.replace(zs, str(v))
4862
+ except:
4863
+ pass
4864
+ title = re.sub(r"\{\{ [^}]+ \}\}", "", title)
4865
+ while title.startswith(" - "):
4866
+ title = title[3:]
4867
+ while title.endswith(" - "):
4868
+ title = title[:3]
4869
+
4870
+ if vn.flags.get("og_s_title") or not title:
4871
+ title = str(vn.flags.get("og_title") or "")
4872
+
4873
+ for tag, hname in tagmap.items():
4874
+ try:
4875
+ v = file["tags"][tag]
4876
+ if not v:
4877
+ continue
4878
+ ogh[hname] = int(v) if tag == ".dur" else v
4879
+ except:
4880
+ pass
4881
+
4882
+ ogh["og:title"] = title
4883
+
4884
+ oghs = [
4885
+ '\t<meta property="%s" content="%s">'
4886
+ % (k, html_escape(str(v), True, True))
4887
+ for k, v in ogh.items()
4888
+ ]
4889
+ zs = self.html_head + "\n%s\n" % ("\n".join(oghs),)
4890
+ self.html_head = zs.replace("\n\n", "\n")
4891
+
4477
4892
  html = self.j2s(tpl, **j2a)
4478
4893
  self.reply(html.encode("utf-8", "replace"))
4479
4894
  return True
copyparty/httpconn.py CHANGED
@@ -52,6 +52,7 @@ class HttpConn(object):
52
52
  self.E = self.args.E
53
53
  self.asrv = hsrv.asrv # mypy404
54
54
  self.u2fh = hsrv.u2fh # mypy404
55
+ self.pipes = hsrv.pipes # mypy404
55
56
  self.ipa_nm = hsrv.ipa_nm
56
57
  self.xff_nm = hsrv.xff_nm
57
58
  self.xff_lan = hsrv.xff_lan # type: ignore
copyparty/httpsrv.py CHANGED
@@ -61,6 +61,7 @@ from .u2idx import U2idx
61
61
  from .util import (
62
62
  E_SCK,
63
63
  FHC,
64
+ CachedDict,
64
65
  Daemon,
65
66
  Garda,
66
67
  Magician,
@@ -126,6 +127,7 @@ class HttpSrv(object):
126
127
  self.t_periodic = None
127
128
 
128
129
  self.u2fh = FHC()
130
+ self.pipes = CachedDict(0.2)
129
131
  self.metrics = Metrics(self)
130
132
  self.nreq = 0
131
133
  self.nsus = 0