copyparty 1.19.15__py3-none-any.whl → 1.19.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copyparty/__init__.py +19 -0
- copyparty/__main__.py +47 -11
- copyparty/__version__.py +2 -2
- copyparty/authsrv.py +45 -9
- copyparty/bos/bos.py +5 -1
- copyparty/cfg.py +20 -0
- copyparty/ftpd.py +5 -3
- copyparty/httpcli.py +114 -27
- copyparty/mdns.py +53 -18
- copyparty/mtag.py +18 -4
- copyparty/qrkode.py +107 -0
- copyparty/res/COPYING.txt +76 -5
- copyparty/smbd.py +1 -1
- copyparty/stolen/qrcodegen.py +1 -64
- copyparty/svchub.py +13 -2
- copyparty/tcpsrv.py +6 -7
- copyparty/tftpd.py +1 -1
- copyparty/th_srv.py +11 -5
- copyparty/up2k.py +40 -16
- copyparty/util.py +63 -16
- copyparty/web/baguettebox.js.gz +0 -0
- copyparty/web/browser.css.gz +0 -0
- copyparty/web/browser.html +3 -1
- copyparty/web/browser.js.gz +0 -0
- copyparty/web/splash.html +3 -0
- copyparty/web/splash.js.gz +0 -0
- copyparty/web/tl/chi.js.gz +0 -0
- copyparty/web/tl/cze.js.gz +0 -0
- copyparty/web/tl/deu.js.gz +0 -0
- copyparty/web/tl/epo.js.gz +0 -0
- copyparty/web/tl/fin.js.gz +0 -0
- copyparty/web/tl/fra.js.gz +0 -0
- copyparty/web/tl/grc.js.gz +0 -0
- copyparty/web/tl/ita.js.gz +0 -0
- copyparty/web/tl/kor.js.gz +0 -0
- copyparty/web/tl/nld.js.gz +0 -0
- copyparty/web/tl/nno.js.gz +0 -0
- copyparty/web/tl/nor.js.gz +0 -0
- copyparty/web/tl/pol.js.gz +0 -0
- copyparty/web/tl/por.js.gz +0 -0
- copyparty/web/tl/rus.js.gz +0 -0
- copyparty/web/tl/spa.js.gz +0 -0
- copyparty/web/tl/swe.js.gz +0 -0
- copyparty/web/tl/tur.js.gz +0 -0
- copyparty/web/tl/ukr.js.gz +0 -0
- copyparty/web/util.js.gz +0 -0
- {copyparty-1.19.15.dist-info → copyparty-1.19.17.dist-info}/METADATA +47 -2
- {copyparty-1.19.15.dist-info → copyparty-1.19.17.dist-info}/RECORD +52 -32
- {copyparty-1.19.15.dist-info → copyparty-1.19.17.dist-info}/WHEEL +0 -0
- {copyparty-1.19.15.dist-info → copyparty-1.19.17.dist-info}/entry_points.txt +0 -0
- {copyparty-1.19.15.dist-info → copyparty-1.19.17.dist-info}/licenses/LICENSE +0 -0
- {copyparty-1.19.15.dist-info → copyparty-1.19.17.dist-info}/top_level.txt +0 -0
copyparty/httpcli.py
CHANGED
|
@@ -34,8 +34,8 @@ from .__init__ import ANYWIN, RES, TYPE_CHECKING, EnvParams, unicode
|
|
|
34
34
|
from .__version__ import S_VERSION
|
|
35
35
|
from .authsrv import LEELOO_DALLAS, VFS # typechk
|
|
36
36
|
from .bos import bos
|
|
37
|
+
from .qrkode import QrCode, qr2svg, qrgen
|
|
37
38
|
from .star import StreamTar
|
|
38
|
-
from .stolen.qrcodegen import QrCode, qr2svg
|
|
39
39
|
from .sutil import StreamArc, gfilter
|
|
40
40
|
from .szip import StreamZip
|
|
41
41
|
from .up2k import up2k_chunksize
|
|
@@ -270,7 +270,7 @@ class HttpCli(object):
|
|
|
270
270
|
tpl = self.conn.hsrv.j2[name]
|
|
271
271
|
ka["r"] = self.args.SR if self.is_vproxied else ""
|
|
272
272
|
ka["ts"] = self.conn.hsrv.cachebuster()
|
|
273
|
-
ka["lang"] = self.args.lang
|
|
273
|
+
ka["lang"] = self.cookies.get("cplng") or self.args.lang
|
|
274
274
|
ka["favico"] = self.args.favico
|
|
275
275
|
ka["s_doctitle"] = self.args.doctitle
|
|
276
276
|
ka["tcolor"] = self.vn.flags["tcolor"]
|
|
@@ -857,6 +857,16 @@ class HttpCli(object):
|
|
|
857
857
|
return self.conn.iphash.s(self.ip)
|
|
858
858
|
|
|
859
859
|
def cbonk(self, g , v , reason , descr ) :
|
|
860
|
+
cond = self.args.dont_ban
|
|
861
|
+
if (
|
|
862
|
+
cond == "any"
|
|
863
|
+
or (cond == "auth" and self.uname != "*")
|
|
864
|
+
or (cond == "aa" and self.avol)
|
|
865
|
+
or (cond == "av" and self.can_admin)
|
|
866
|
+
or (cond == "rw" and self.can_read and self.can_write)
|
|
867
|
+
):
|
|
868
|
+
return False
|
|
869
|
+
|
|
860
870
|
self.conn.hsrv.nsus += 1
|
|
861
871
|
if not g.lim:
|
|
862
872
|
return False
|
|
@@ -881,7 +891,7 @@ class HttpCli(object):
|
|
|
881
891
|
0,
|
|
882
892
|
self.ip,
|
|
883
893
|
time.time(),
|
|
884
|
-
reason,
|
|
894
|
+
[reason, reason],
|
|
885
895
|
):
|
|
886
896
|
self.log("client banned: %s" % (descr,), 1)
|
|
887
897
|
self.conn.hsrv.bans[ip] = bonk
|
|
@@ -1420,10 +1430,10 @@ class HttpCli(object):
|
|
|
1420
1430
|
|
|
1421
1431
|
hits = idx.run_query(self.uname, [self.vn], uq, uv, False, False, nmax)[0]
|
|
1422
1432
|
|
|
1423
|
-
pw
|
|
1424
|
-
|
|
1425
|
-
q_pw = "?pw=%s" % (
|
|
1426
|
-
a_pw = "&pw=%s" % (
|
|
1433
|
+
if "pw" in self.ouparam and "nopw" not in self.ouparam:
|
|
1434
|
+
zs = self.ouparam["pw"]
|
|
1435
|
+
q_pw = "?pw=%s" % (quotep(zs),)
|
|
1436
|
+
a_pw = "&pw=%s" % (quotep(zs),)
|
|
1427
1437
|
for i in hits:
|
|
1428
1438
|
i["rp"] += a_pw if "?" in i["rp"] else q_pw
|
|
1429
1439
|
else:
|
|
@@ -1437,6 +1447,8 @@ class HttpCli(object):
|
|
|
1437
1447
|
self.host,
|
|
1438
1448
|
)
|
|
1439
1449
|
feed = baseurl + self.req[1:]
|
|
1450
|
+
if "pw" in self.ouparam and self.ouparam.get("nopw") == "a":
|
|
1451
|
+
feed = re.sub(r"&pw=[^&]*", "", feed)
|
|
1440
1452
|
if self.is_vproxied:
|
|
1441
1453
|
baseurl += self.args.RS
|
|
1442
1454
|
efeed = html_escape(feed, True, True)
|
|
@@ -1514,6 +1526,64 @@ class HttpCli(object):
|
|
|
1514
1526
|
self.log("rss: %d hits, %d bytes" % (len(hits), len(bret)))
|
|
1515
1527
|
return True
|
|
1516
1528
|
|
|
1529
|
+
def tx_zls(self, abspath) :
|
|
1530
|
+
if self.do_log:
|
|
1531
|
+
self.log("zls %s @%s" % (self.req, self.uname))
|
|
1532
|
+
if self.args.no_zls:
|
|
1533
|
+
raise Pebkac(405, "zip browsing is disabled in server config")
|
|
1534
|
+
|
|
1535
|
+
import zipfile
|
|
1536
|
+
|
|
1537
|
+
try:
|
|
1538
|
+
with zipfile.ZipFile(abspath, "r") as zf:
|
|
1539
|
+
filelist = [{"fn": f.filename} for f in zf.infolist()]
|
|
1540
|
+
ret = json.dumps(filelist).encode("utf-8", "replace")
|
|
1541
|
+
self.reply(ret, mime="application/json")
|
|
1542
|
+
return True
|
|
1543
|
+
except (zipfile.BadZipfile, RuntimeError):
|
|
1544
|
+
raise Pebkac(404, "requested file is not a valid zip file")
|
|
1545
|
+
|
|
1546
|
+
def tx_zget(self, abspath) :
|
|
1547
|
+
maxsz = 1024 * 1024 * 64
|
|
1548
|
+
|
|
1549
|
+
inner_path = self.uparam.get("zget")
|
|
1550
|
+
if not inner_path:
|
|
1551
|
+
raise Pebkac(405, "inner path is required")
|
|
1552
|
+
if self.do_log:
|
|
1553
|
+
self.log(
|
|
1554
|
+
"zget %s \033[35m%s\033[0m @%s" % (self.req, inner_path, self.uname)
|
|
1555
|
+
)
|
|
1556
|
+
if self.args.no_zls:
|
|
1557
|
+
raise Pebkac(405, "zip browsing is disabled in server config")
|
|
1558
|
+
|
|
1559
|
+
import zipfile
|
|
1560
|
+
|
|
1561
|
+
try:
|
|
1562
|
+
with zipfile.ZipFile(abspath, "r") as zf:
|
|
1563
|
+
zi = zf.getinfo(inner_path)
|
|
1564
|
+
if zi.file_size >= maxsz:
|
|
1565
|
+
raise Pebkac(404, "zip bomb defused")
|
|
1566
|
+
with zf.open(zi, "r") as fi:
|
|
1567
|
+
self.send_headers(length=zi.file_size, mime=guess_mime(inner_path))
|
|
1568
|
+
|
|
1569
|
+
sendfile_py(
|
|
1570
|
+
self.log,
|
|
1571
|
+
0,
|
|
1572
|
+
zi.file_size,
|
|
1573
|
+
fi,
|
|
1574
|
+
self.s,
|
|
1575
|
+
self.args.s_wr_sz,
|
|
1576
|
+
self.args.s_wr_slp,
|
|
1577
|
+
not self.args.no_poll,
|
|
1578
|
+
{},
|
|
1579
|
+
"",
|
|
1580
|
+
)
|
|
1581
|
+
except KeyError:
|
|
1582
|
+
raise Pebkac(404, "no such file in archive")
|
|
1583
|
+
except (zipfile.BadZipfile, RuntimeError):
|
|
1584
|
+
raise Pebkac(404, "requested file is not a valid zip file")
|
|
1585
|
+
return True
|
|
1586
|
+
|
|
1517
1587
|
def handle_propfind(self) :
|
|
1518
1588
|
if self.do_log:
|
|
1519
1589
|
self.log("PFIND %s @%s" % (self.req, self.uname))
|
|
@@ -2077,7 +2147,7 @@ class HttpCli(object):
|
|
|
2077
2147
|
t = "urlform_raw %d @ %r\n %r\n"
|
|
2078
2148
|
self.log(t % (len(orig), "/" + self.vpath, orig))
|
|
2079
2149
|
try:
|
|
2080
|
-
zb = unquote(buf.replace(b"+", b" "))
|
|
2150
|
+
zb = unquote(buf.replace(b"+", b" ").replace(b"&", b"\n"))
|
|
2081
2151
|
plain = zb.decode("utf-8", "replace")
|
|
2082
2152
|
if buf.startswith(b"msg="):
|
|
2083
2153
|
plain = plain[4:]
|
|
@@ -2098,7 +2168,7 @@ class HttpCli(object):
|
|
|
2098
2168
|
len(buf),
|
|
2099
2169
|
self.ip,
|
|
2100
2170
|
time.time(),
|
|
2101
|
-
plain,
|
|
2171
|
+
[plain, orig],
|
|
2102
2172
|
)
|
|
2103
2173
|
|
|
2104
2174
|
t = "urlform_dec %d @ %r\n %r\n"
|
|
@@ -2257,7 +2327,7 @@ class HttpCli(object):
|
|
|
2257
2327
|
remains,
|
|
2258
2328
|
self.ip,
|
|
2259
2329
|
at,
|
|
2260
|
-
|
|
2330
|
+
None,
|
|
2261
2331
|
)
|
|
2262
2332
|
t = hr.get("rejectmsg") or ""
|
|
2263
2333
|
if t or not hr:
|
|
@@ -2392,7 +2462,7 @@ class HttpCli(object):
|
|
|
2392
2462
|
post_sz,
|
|
2393
2463
|
self.ip,
|
|
2394
2464
|
at,
|
|
2395
|
-
|
|
2465
|
+
None,
|
|
2396
2466
|
)
|
|
2397
2467
|
t = hr.get("rejectmsg") or ""
|
|
2398
2468
|
if t or not hr:
|
|
@@ -3224,7 +3294,7 @@ class HttpCli(object):
|
|
|
3224
3294
|
0,
|
|
3225
3295
|
self.ip,
|
|
3226
3296
|
time.time(),
|
|
3227
|
-
|
|
3297
|
+
None,
|
|
3228
3298
|
)
|
|
3229
3299
|
t = hr.get("rejectmsg") or ""
|
|
3230
3300
|
if t or not hr:
|
|
@@ -3396,7 +3466,7 @@ class HttpCli(object):
|
|
|
3396
3466
|
0,
|
|
3397
3467
|
self.ip,
|
|
3398
3468
|
at,
|
|
3399
|
-
|
|
3469
|
+
None,
|
|
3400
3470
|
)
|
|
3401
3471
|
t = hr.get("rejectmsg") or ""
|
|
3402
3472
|
if t or not hr:
|
|
@@ -3503,7 +3573,7 @@ class HttpCli(object):
|
|
|
3503
3573
|
sz,
|
|
3504
3574
|
self.ip,
|
|
3505
3575
|
at,
|
|
3506
|
-
|
|
3576
|
+
None,
|
|
3507
3577
|
)
|
|
3508
3578
|
t = hr.get("rejectmsg") or ""
|
|
3509
3579
|
if t or not hr:
|
|
@@ -3814,7 +3884,7 @@ class HttpCli(object):
|
|
|
3814
3884
|
0,
|
|
3815
3885
|
self.ip,
|
|
3816
3886
|
time.time(),
|
|
3817
|
-
|
|
3887
|
+
None,
|
|
3818
3888
|
)
|
|
3819
3889
|
t = hr.get("rejectmsg") or ""
|
|
3820
3890
|
if t or not hr:
|
|
@@ -3862,7 +3932,7 @@ class HttpCli(object):
|
|
|
3862
3932
|
sz,
|
|
3863
3933
|
self.ip,
|
|
3864
3934
|
new_lastmod,
|
|
3865
|
-
|
|
3935
|
+
None,
|
|
3866
3936
|
)
|
|
3867
3937
|
t = hr.get("rejectmsg") or ""
|
|
3868
3938
|
if t or not hr:
|
|
@@ -4929,7 +4999,7 @@ class HttpCli(object):
|
|
|
4929
4999
|
url += "#" + uhash
|
|
4930
5000
|
|
|
4931
5001
|
self.log("qrcode(%r)" % (url,))
|
|
4932
|
-
ret = qr2svg(
|
|
5002
|
+
ret = qr2svg(qrgen(url.encode("utf-8")), 2)
|
|
4933
5003
|
self.reply(ret.encode("utf-8"), mime="image/svg+xml")
|
|
4934
5004
|
return True
|
|
4935
5005
|
|
|
@@ -4996,7 +5066,7 @@ class HttpCli(object):
|
|
|
4996
5066
|
"edit": "edit" in self.uparam,
|
|
4997
5067
|
"title": html_escape(self.vpath, crlf=True),
|
|
4998
5068
|
"lastmod": int(ts_md * 1000),
|
|
4999
|
-
"lang": self.args.lang,
|
|
5069
|
+
"lang": self.cookies.get("cplng") or self.args.lang,
|
|
5000
5070
|
"favico": self.args.favico,
|
|
5001
5071
|
"have_emp": int(self.args.emp),
|
|
5002
5072
|
"md_no_br": int(vn.flags.get("md_no_br") or 0),
|
|
@@ -5381,13 +5451,20 @@ class HttpCli(object):
|
|
|
5381
5451
|
return self.redirect("", "?h", x.get(), "return to", False)
|
|
5382
5452
|
|
|
5383
5453
|
def tx_stack(self) :
|
|
5384
|
-
|
|
5454
|
+
zs = self.args.stack_who
|
|
5455
|
+
if zs == "all" or (
|
|
5456
|
+
(zs == "a" and self.avol)
|
|
5457
|
+
or (zs == "rw" and [x for x in self.wvol if x in self.rvol])
|
|
5458
|
+
):
|
|
5459
|
+
pass
|
|
5460
|
+
else:
|
|
5385
5461
|
raise Pebkac(403, "'stack' not allowed for user " + self.uname)
|
|
5386
5462
|
|
|
5387
|
-
|
|
5388
|
-
|
|
5389
|
-
|
|
5390
|
-
|
|
5463
|
+
ret = html_escape(alltrace(self.args.stack_v))
|
|
5464
|
+
if self.args.stack_v:
|
|
5465
|
+
ret = "<pre>%s\n%s" % (time.time(), ret)
|
|
5466
|
+
else:
|
|
5467
|
+
ret = "<pre>%s" % (ret,)
|
|
5391
5468
|
self.reply(ret.encode("utf-8"))
|
|
5392
5469
|
return True
|
|
5393
5470
|
|
|
@@ -6454,14 +6531,23 @@ class HttpCli(object):
|
|
|
6454
6531
|
):
|
|
6455
6532
|
return self.tx_md(vn, abspath)
|
|
6456
6533
|
|
|
6534
|
+
if "zls" in self.uparam:
|
|
6535
|
+
return self.tx_zls(abspath)
|
|
6536
|
+
if "zget" in self.uparam:
|
|
6537
|
+
return self.tx_zget(abspath)
|
|
6538
|
+
|
|
6457
6539
|
if not add_og or not og_fn:
|
|
6458
|
-
|
|
6459
|
-
abspath, None
|
|
6460
|
-
|
|
6540
|
+
if st.st_size or "nopipe" in vn.flags:
|
|
6541
|
+
return self.tx_file(abspath, None)
|
|
6542
|
+
else:
|
|
6543
|
+
return self.tx_file(abspath, vn.get_dbv("")[0].realpath)
|
|
6461
6544
|
|
|
6462
6545
|
elif is_dir and not self.can_read:
|
|
6463
6546
|
if use_dirkey:
|
|
6464
6547
|
is_dk = True
|
|
6548
|
+
elif self.can_get and "doc" in self.uparam:
|
|
6549
|
+
zs = vjoin(self.vpath, self.uparam["doc"]) + "?v"
|
|
6550
|
+
return self.redirect(zs, flavor="redirecting to", use302=True)
|
|
6465
6551
|
elif not self.can_write:
|
|
6466
6552
|
return self.tx_404(True)
|
|
6467
6553
|
|
|
@@ -6469,7 +6555,7 @@ class HttpCli(object):
|
|
|
6469
6555
|
|
|
6470
6556
|
try:
|
|
6471
6557
|
if not self.args.nih:
|
|
6472
|
-
srv_info.append(self.args.
|
|
6558
|
+
srv_info.append(self.args.name_html)
|
|
6473
6559
|
except:
|
|
6474
6560
|
self.log("#wow #whoa")
|
|
6475
6561
|
|
|
@@ -6543,6 +6629,7 @@ class HttpCli(object):
|
|
|
6543
6629
|
"acct": self.uname,
|
|
6544
6630
|
"perms": perms,
|
|
6545
6631
|
}
|
|
6632
|
+
# also see `js_htm` in authsrv.py
|
|
6546
6633
|
j2a = {
|
|
6547
6634
|
"cgv1": vn.js_htm,
|
|
6548
6635
|
"cgv": cgv,
|
copyparty/mdns.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
from __future__ import print_function, unicode_literals
|
|
3
3
|
|
|
4
4
|
import errno
|
|
5
|
+
import os
|
|
5
6
|
import random
|
|
6
7
|
import select
|
|
7
8
|
import socket
|
|
@@ -12,28 +13,62 @@ from ipaddress import IPv4Network, IPv6Network
|
|
|
12
13
|
from .__init__ import TYPE_CHECKING
|
|
13
14
|
from .__init__ import unicode as U
|
|
14
15
|
from .multicast import MC_Sck, MCast
|
|
15
|
-
from .stolen.dnslib import (
|
|
16
|
-
AAAA,
|
|
17
|
-
)
|
|
18
|
-
from .stolen.dnslib import CLASS as DC
|
|
19
|
-
from .stolen.dnslib import (
|
|
20
|
-
NSEC,
|
|
21
|
-
PTR,
|
|
22
|
-
QTYPE,
|
|
23
|
-
RR,
|
|
24
|
-
SRV,
|
|
25
|
-
TXT,
|
|
26
|
-
A,
|
|
27
|
-
DNSHeader,
|
|
28
|
-
DNSQuestion,
|
|
29
|
-
DNSRecord,
|
|
30
|
-
set_avahi_379,
|
|
31
|
-
)
|
|
32
16
|
from .util import IP6_LL, CachedSet, Daemon, Netdev, list_ips, min_ex
|
|
33
17
|
|
|
18
|
+
try:
|
|
19
|
+
if os.getenv("PRTY_SYS_ALL") or os.getenv("PRTY_SYS_DNSLIB"):
|
|
20
|
+
raise ImportError()
|
|
21
|
+
from .stolen.dnslib import (
|
|
22
|
+
AAAA,
|
|
23
|
+
)
|
|
24
|
+
from .stolen.dnslib import CLASS as DC
|
|
25
|
+
from .stolen.dnslib import (
|
|
26
|
+
NSEC,
|
|
27
|
+
PTR,
|
|
28
|
+
QTYPE,
|
|
29
|
+
RR,
|
|
30
|
+
SRV,
|
|
31
|
+
TXT,
|
|
32
|
+
A,
|
|
33
|
+
DNSHeader,
|
|
34
|
+
DNSQuestion,
|
|
35
|
+
DNSRecord,
|
|
36
|
+
set_avahi_379,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
DNS_VND = True
|
|
40
|
+
except ImportError:
|
|
41
|
+
DNS_VND = False
|
|
42
|
+
from dnslib import (
|
|
43
|
+
AAAA,
|
|
44
|
+
)
|
|
45
|
+
from dnslib import CLASS as DC
|
|
46
|
+
from dnslib import (
|
|
47
|
+
NSEC,
|
|
48
|
+
PTR,
|
|
49
|
+
QTYPE,
|
|
50
|
+
RR,
|
|
51
|
+
SRV,
|
|
52
|
+
TXT,
|
|
53
|
+
A,
|
|
54
|
+
Bimap,
|
|
55
|
+
DNSHeader,
|
|
56
|
+
DNSQuestion,
|
|
57
|
+
DNSRecord,
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
DC.forward[0x8001] = "F_IN"
|
|
61
|
+
DC.reverse["F_IN"] = 0x8001
|
|
62
|
+
|
|
34
63
|
if TYPE_CHECKING:
|
|
35
64
|
from .svchub import SvcHub
|
|
36
65
|
|
|
66
|
+
if os.getenv("PRTY_MODSPEC"):
|
|
67
|
+
from inspect import getsourcefile
|
|
68
|
+
|
|
69
|
+
print("PRTY_MODSPEC: dnslib:", getsourcefile(A))
|
|
70
|
+
|
|
71
|
+
|
|
37
72
|
MDNS4 = "224.0.0.251"
|
|
38
73
|
MDNS6 = "ff02::fb"
|
|
39
74
|
|
|
@@ -71,7 +106,7 @@ class MDNS(MCast):
|
|
|
71
106
|
self.ngen = ngen
|
|
72
107
|
self.ttl = 300
|
|
73
108
|
|
|
74
|
-
if not self.args.zm_nwa_1:
|
|
109
|
+
if not self.args.zm_nwa_1 and DNS_VND:
|
|
75
110
|
set_avahi_379()
|
|
76
111
|
|
|
77
112
|
zs = self.args.zm_fqdn or (self.args.name + ".local")
|
copyparty/mtag.py
CHANGED
|
@@ -162,12 +162,12 @@ def au_unpk(
|
|
|
162
162
|
znil = [x for x in znil if "cover" in x[0]] or znil
|
|
163
163
|
znil = [x for x in znil if CBZ_01.search(x[0])] or znil
|
|
164
164
|
t = "cbz: %d files, %d hits" % (nf, len(znil))
|
|
165
|
+
if not znil:
|
|
166
|
+
raise Exception("no images inside cbz")
|
|
165
167
|
using = sorted(znil)[0][1].filename
|
|
166
168
|
if znil:
|
|
167
169
|
t += ", using " + using
|
|
168
170
|
log(t)
|
|
169
|
-
if not znil:
|
|
170
|
-
raise Exception("no images inside cbz")
|
|
171
171
|
fi = zf.open(using)
|
|
172
172
|
|
|
173
173
|
elif pk == "epub":
|
|
@@ -193,9 +193,10 @@ def au_unpk(
|
|
|
193
193
|
|
|
194
194
|
except Exception as ex:
|
|
195
195
|
if ret:
|
|
196
|
-
t = "failed to decompress
|
|
196
|
+
t = "failed to decompress file %r: %r"
|
|
197
197
|
log(t % (abspath, ex))
|
|
198
198
|
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
|
199
|
+
return ""
|
|
199
200
|
|
|
200
201
|
return abspath
|
|
201
202
|
|
|
@@ -415,10 +416,17 @@ def get_cover_from_epub(log , abspath ) :
|
|
|
415
416
|
# This might be an EPUB2 file, try the legacy way of specifying covers
|
|
416
417
|
coverimage_path = _get_cover_from_epub2(log, package_root, package_ns)
|
|
417
418
|
|
|
419
|
+
if not coverimage_path:
|
|
420
|
+
raise Exception("no cover inside epub")
|
|
421
|
+
|
|
418
422
|
# This url is either absolute (in the .epub) or relative to the package document
|
|
419
423
|
adjusted_cover_path = urljoin(rootfile_path, coverimage_path)
|
|
420
424
|
|
|
421
|
-
|
|
425
|
+
try:
|
|
426
|
+
return z.open(adjusted_cover_path)
|
|
427
|
+
except KeyError:
|
|
428
|
+
t = "epub: cover specified in package document, but doesn't exist: %s"
|
|
429
|
+
log(t % (adjusted_cover_path,))
|
|
422
430
|
|
|
423
431
|
|
|
424
432
|
def _get_cover_from_epub2(
|
|
@@ -636,6 +644,9 @@ class MTag(object):
|
|
|
636
644
|
return self._get(abspath)
|
|
637
645
|
|
|
638
646
|
ap = au_unpk(self.log, self.args.au_unpk, abspath)
|
|
647
|
+
if not ap:
|
|
648
|
+
return {}
|
|
649
|
+
|
|
639
650
|
ret = self._get(ap)
|
|
640
651
|
if ap != abspath:
|
|
641
652
|
wunlink(self.log, ap, VF_CAREFUL)
|
|
@@ -741,6 +752,9 @@ class MTag(object):
|
|
|
741
752
|
ap = abspath
|
|
742
753
|
|
|
743
754
|
ret = {}
|
|
755
|
+
if not ap:
|
|
756
|
+
return ret
|
|
757
|
+
|
|
744
758
|
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
|
745
759
|
try:
|
|
746
760
|
cmd = [parser.bin, ap]
|
copyparty/qrkode.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
from __future__ import print_function, unicode_literals
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
if os.getenv("PRTY_SYS_ALL") or os.getenv("PRTY_SYS_QRCG"):
|
|
8
|
+
raise ImportError()
|
|
9
|
+
from .stolen.qrcodegen import QrCode
|
|
10
|
+
|
|
11
|
+
qrgen = QrCode.encode_binary
|
|
12
|
+
VENDORED = True
|
|
13
|
+
except ImportError:
|
|
14
|
+
VENDORED = False
|
|
15
|
+
from qrcodegen import QrCode
|
|
16
|
+
|
|
17
|
+
if os.getenv("PRTY_MODSPEC"):
|
|
18
|
+
from inspect import getsourcefile
|
|
19
|
+
|
|
20
|
+
print("PRTY_MODSPEC: qrcode:", getsourcefile(QrCode))
|
|
21
|
+
|
|
22
|
+
if not VENDORED:
|
|
23
|
+
|
|
24
|
+
def _qrgen(data ) :
|
|
25
|
+
ret = None
|
|
26
|
+
V = QrCode.Ecc
|
|
27
|
+
for e in [V.HIGH, V.QUARTILE, V.MEDIUM, V.LOW]:
|
|
28
|
+
qr = QrCode.encode_binary(data, e)
|
|
29
|
+
qr.size = qr._size
|
|
30
|
+
qr.modules = qr._modules
|
|
31
|
+
if not ret or ret.size > qr.size:
|
|
32
|
+
ret = qr
|
|
33
|
+
return ret
|
|
34
|
+
|
|
35
|
+
qrgen = _qrgen
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def qr2txt(qr , zoom = 1, pad = 4) :
|
|
39
|
+
tab = qr.modules
|
|
40
|
+
sz = qr.size
|
|
41
|
+
if sz % 2 and zoom == 1:
|
|
42
|
+
tab.append([False] * sz)
|
|
43
|
+
|
|
44
|
+
tab = [[False] * sz] * pad + tab + [[False] * sz] * pad
|
|
45
|
+
tab = [[False] * pad + x + [False] * pad for x in tab]
|
|
46
|
+
|
|
47
|
+
rows = []
|
|
48
|
+
if zoom == 1:
|
|
49
|
+
for y in range(0, len(tab), 2):
|
|
50
|
+
row = ""
|
|
51
|
+
for x in range(len(tab[y])):
|
|
52
|
+
v = 2 if tab[y][x] else 0
|
|
53
|
+
v += 1 if tab[y + 1][x] else 0
|
|
54
|
+
row += " ▄▀█"[v]
|
|
55
|
+
rows.append(row)
|
|
56
|
+
else:
|
|
57
|
+
for tr in tab:
|
|
58
|
+
row = ""
|
|
59
|
+
for zb in tr:
|
|
60
|
+
row += " █"[int(zb)] * 2
|
|
61
|
+
rows.append(row)
|
|
62
|
+
|
|
63
|
+
return "\n".join(rows)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def qr2png(
|
|
67
|
+
qr ,
|
|
68
|
+
zoom ,
|
|
69
|
+
pad ,
|
|
70
|
+
bg ,
|
|
71
|
+
fg ,
|
|
72
|
+
ap ,
|
|
73
|
+
) :
|
|
74
|
+
from PIL import Image
|
|
75
|
+
|
|
76
|
+
tab = qr.modules
|
|
77
|
+
sz = qr.size
|
|
78
|
+
psz = sz + pad * 2
|
|
79
|
+
if bg:
|
|
80
|
+
img = Image.new("RGB", (psz, psz), bg)
|
|
81
|
+
else:
|
|
82
|
+
img = Image.new("RGBA", (psz, psz), (0, 0, 0, 0))
|
|
83
|
+
fg = (fg[0], fg[1], fg[2], 255)
|
|
84
|
+
for y in range(sz):
|
|
85
|
+
for x in range(sz):
|
|
86
|
+
if tab[y][x]:
|
|
87
|
+
img.putpixel((x + pad, y + pad), fg)
|
|
88
|
+
if zoom != 1:
|
|
89
|
+
img = img.resize((sz * zoom, sz * zoom), Image.Resampling.NEAREST)
|
|
90
|
+
img.save(ap)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def qr2svg(qr , border ) :
|
|
94
|
+
parts = []
|
|
95
|
+
for y in range(qr.size):
|
|
96
|
+
sy = border + y
|
|
97
|
+
for x in range(qr.size):
|
|
98
|
+
if qr.modules[y][x]:
|
|
99
|
+
parts.append("M%d,%dh1v1h-1z" % (border + x, sy))
|
|
100
|
+
t = """\
|
|
101
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
102
|
+
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 {0} {0}" stroke="none">
|
|
103
|
+
<rect width="100%" height="100%" fill="#F7F7F7"/>
|
|
104
|
+
<path d="{1}" fill="#111111"/>
|
|
105
|
+
</svg>
|
|
106
|
+
"""
|
|
107
|
+
return t.format(qr.size + border * 2, " ".join(parts))
|