copyparty 1.19.17__py3-none-any.whl → 1.19.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. copyparty/__init__.py +6 -1
  2. copyparty/__main__.py +1 -0
  3. copyparty/__version__.py +2 -2
  4. copyparty/authsrv.py +47 -25
  5. copyparty/ftpd.py +1 -1
  6. copyparty/httpcli.py +61 -25
  7. copyparty/httpsrv.py +2 -2
  8. copyparty/web/a/partyfuse.py.gz +0 -0
  9. copyparty/web/a/u2c.py.gz +0 -0
  10. copyparty/web/a/webdav-cfg.txt.gz +0 -0
  11. copyparty/web/baguettebox.js.gz +0 -0
  12. copyparty/web/browser.css.gz +0 -0
  13. copyparty/web/browser.js.gz +0 -0
  14. copyparty/web/dbg-audio.js.gz +0 -0
  15. copyparty/web/md.css.gz +0 -0
  16. copyparty/web/md.js.gz +0 -0
  17. copyparty/web/md2.css.gz +0 -0
  18. copyparty/web/md2.js.gz +0 -0
  19. copyparty/web/mde.css.gz +0 -0
  20. copyparty/web/mde.js.gz +0 -0
  21. copyparty/web/msg.css.gz +0 -0
  22. copyparty/web/rups.css.gz +0 -0
  23. copyparty/web/rups.js.gz +0 -0
  24. copyparty/web/shares.css.gz +0 -0
  25. copyparty/web/shares.js.gz +0 -0
  26. copyparty/web/splash.css.gz +0 -0
  27. copyparty/web/splash.js.gz +0 -0
  28. copyparty/web/svcs.html +1 -1
  29. copyparty/web/svcs.js.gz +0 -0
  30. copyparty/web/tl/chi.js.gz +0 -0
  31. copyparty/web/tl/cze.js.gz +0 -0
  32. copyparty/web/tl/deu.js.gz +0 -0
  33. copyparty/web/tl/epo.js.gz +0 -0
  34. copyparty/web/tl/fin.js.gz +0 -0
  35. copyparty/web/tl/fra.js.gz +0 -0
  36. copyparty/web/tl/grc.js.gz +0 -0
  37. copyparty/web/tl/ita.js.gz +0 -0
  38. copyparty/web/tl/kor.js.gz +0 -0
  39. copyparty/web/tl/nld.js.gz +0 -0
  40. copyparty/web/tl/nno.js.gz +0 -0
  41. copyparty/web/tl/nor.js.gz +0 -0
  42. copyparty/web/tl/pol.js.gz +0 -0
  43. copyparty/web/tl/por.js.gz +0 -0
  44. copyparty/web/tl/rus.js.gz +0 -0
  45. copyparty/web/tl/spa.js.gz +0 -0
  46. copyparty/web/tl/swe.js.gz +0 -0
  47. copyparty/web/tl/tur.js.gz +0 -0
  48. copyparty/web/tl/ukr.js.gz +0 -0
  49. copyparty/web/ui.css.gz +0 -0
  50. copyparty/web/up2k.js.gz +0 -0
  51. copyparty/web/util.js.gz +0 -0
  52. copyparty/web/w.hash.js.gz +0 -0
  53. {copyparty-1.19.17.dist-info → copyparty-1.19.19.dist-info}/METADATA +17 -3
  54. {copyparty-1.19.17.dist-info → copyparty-1.19.19.dist-info}/RECORD +58 -58
  55. copyparty/web/a/partyfuse.py +0 -947
  56. copyparty/web/a/u2c.py +0 -1718
  57. copyparty/web/a/webdav-cfg.bat +0 -45
  58. {copyparty-1.19.17.dist-info → copyparty-1.19.19.dist-info}/WHEEL +0 -0
  59. {copyparty-1.19.17.dist-info → copyparty-1.19.19.dist-info}/entry_points.txt +0 -0
  60. {copyparty-1.19.17.dist-info → copyparty-1.19.19.dist-info}/licenses/LICENSE +0 -0
  61. {copyparty-1.19.17.dist-info → copyparty-1.19.19.dist-info}/top_level.txt +0 -0
copyparty/web/a/u2c.py DELETED
@@ -1,1718 +0,0 @@
1
- #!/usr/bin/env python3
2
- from __future__ import print_function, unicode_literals
3
-
4
- S_VERSION = "2.13"
5
- S_BUILD_DT = "2025-09-05"
6
-
7
- """
8
- u2c.py: upload to copyparty
9
- 2021, ed <irc.rizon.net>, MIT-Licensed
10
- https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
11
-
12
- - dependencies: no
13
- - supports python 2.6, 2.7, and 3.3 through 3.14
14
- - if something breaks just try again and it'll autoresume
15
- """
16
-
17
- import atexit
18
- import base64
19
- import binascii
20
- import datetime
21
- import hashlib
22
- import json
23
- import math
24
- import os
25
- import platform
26
- import re
27
- import signal
28
- import socket
29
- import stat
30
- import sys
31
- import threading
32
- import time
33
-
34
- EXE = bool(getattr(sys, "frozen", False))
35
-
36
- try:
37
- import argparse
38
- except:
39
- m = "\n ERROR: need 'argparse'; download it here:\n https://github.com/ThomasWaldmann/argparse/raw/master/argparse.py\n"
40
- print(m)
41
- raise
42
-
43
-
44
- PY2 = sys.version_info < (3,)
45
- PY27 = sys.version_info > (2, 7) and PY2
46
- PY37 = sys.version_info > (3, 7)
47
- if PY2:
48
- import httplib as http_client
49
- from Queue import Queue
50
- from urllib import quote, unquote
51
- from urlparse import urlsplit, urlunsplit
52
-
53
- sys.dont_write_bytecode = True
54
- bytes = str
55
- files_decoder = lambda s: unicode(s, "utf8")
56
- else:
57
- from urllib.parse import quote_from_bytes as quote
58
- from urllib.parse import unquote_to_bytes as unquote
59
- from urllib.parse import urlsplit, urlunsplit
60
-
61
- import http.client as http_client
62
- from queue import Queue
63
-
64
- unicode = str
65
- files_decoder = unicode
66
-
67
-
68
- WTF8 = "replace" if PY2 else "surrogateescape"
69
-
70
- VT100 = platform.system() != "Windows"
71
-
72
-
73
- try:
74
- UTC = datetime.timezone.utc
75
- except:
76
- TD_ZERO = datetime.timedelta(0)
77
-
78
- class _UTC(datetime.tzinfo):
79
- def utcoffset(self, dt):
80
- return TD_ZERO
81
-
82
- def tzname(self, dt):
83
- return "UTC"
84
-
85
- def dst(self, dt):
86
- return TD_ZERO
87
-
88
- UTC = _UTC()
89
-
90
-
91
- try:
92
- _b64etl = bytes.maketrans(b"+/", b"-_")
93
-
94
- def ub64enc(bs):
95
- x = binascii.b2a_base64(bs, newline=False)
96
- return x.translate(_b64etl)
97
-
98
- ub64enc(b"a")
99
- except:
100
- ub64enc = base64.urlsafe_b64encode
101
-
102
-
103
- class BadAuth(Exception):
104
- pass
105
-
106
-
107
- class Daemon(threading.Thread):
108
- def __init__(self, target, name=None, a=None):
109
- threading.Thread.__init__(self, name=name)
110
- self.a = a or ()
111
- self.fun = target
112
- self.daemon = True
113
- self.start()
114
-
115
- def run(self):
116
- try:
117
- signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGINT, signal.SIGTERM])
118
- except:
119
- pass
120
-
121
- self.fun(*self.a)
122
-
123
-
124
- class HSQueue(Queue):
125
- def _init(self, maxsize):
126
- from collections import deque
127
-
128
- self.q = deque()
129
-
130
- def _qsize(self):
131
- return len(self.q)
132
-
133
- def _put(self, item):
134
- if item and item.nhs:
135
- self.q.appendleft(item)
136
- else:
137
- self.q.append(item)
138
-
139
- def _get(self):
140
- return self.q.popleft()
141
-
142
-
143
- class HCli(object):
144
- def __init__(self, ar):
145
- self.ar = ar
146
- url = urlsplit(ar.url)
147
- tls = url.scheme.lower() == "https"
148
- try:
149
- addr, port = url.netloc.split(":")
150
- except:
151
- addr = url.netloc
152
- port = 443 if tls else 80
153
-
154
- self.addr = addr
155
- self.port = int(port)
156
- self.tls = tls
157
- self.verify = ar.te or not ar.td
158
- self.conns = []
159
- self.hconns = []
160
- if tls:
161
- import ssl
162
-
163
- if not self.verify:
164
- self.ctx = ssl._create_unverified_context()
165
- elif self.verify is True:
166
- self.ctx = None
167
- else:
168
- self.ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
169
- self.ctx.load_verify_locations(self.verify)
170
-
171
- self.base_hdrs = {
172
- "Accept": "*/*",
173
- "Connection": "keep-alive",
174
- "Host": url.netloc,
175
- "Origin": self.ar.burl,
176
- "User-Agent": "u2c/%s" % (S_VERSION,),
177
- }
178
-
179
- def _connect(self, timeout):
180
- args = {}
181
- if PY37:
182
- args["blocksize"] = 1048576
183
-
184
- if not self.tls:
185
- C = http_client.HTTPConnection
186
- else:
187
- C = http_client.HTTPSConnection
188
- if self.ctx:
189
- args = {"context": self.ctx}
190
-
191
- return C(self.addr, self.port, timeout=timeout, **args)
192
-
193
- def req(self, meth, vpath, hdrs, body=None, ctype=None):
194
- now = time.time()
195
-
196
- hdrs.update(self.base_hdrs)
197
- if self.ar.a:
198
- hdrs["PW"] = self.ar.a
199
- if ctype:
200
- hdrs["Content-Type"] = ctype
201
- if meth == "POST" and CLEN not in hdrs:
202
- hdrs[CLEN] = (
203
- 0 if not body else body.len if hasattr(body, "len") else len(body)
204
- )
205
-
206
- # large timeout for handshakes (safededup)
207
- conns = self.hconns if ctype == MJ else self.conns
208
- while conns and self.ar.cxp < now - conns[0][0]:
209
- conns.pop(0)[1].close()
210
- c = conns.pop()[1] if conns else self._connect(999 if ctype == MJ else 128)
211
- try:
212
- c.request(meth, vpath, body, hdrs)
213
- if PY27:
214
- rsp = c.getresponse(buffering=True)
215
- else:
216
- rsp = c.getresponse()
217
-
218
- data = rsp.read()
219
- conns.append((time.time(), c))
220
- return rsp.status, data.decode("utf-8")
221
- except http_client.BadStatusLine:
222
- if self.ar.cxp > 4:
223
- t = "\nWARNING: --cxp probably too high; reducing from %d to 4"
224
- print(t % (self.ar.cxp,))
225
- self.ar.cxp = 4
226
- c.close()
227
- raise
228
- except:
229
- c.close()
230
- raise
231
-
232
-
233
- MJ = "application/json"
234
- MO = "application/octet-stream"
235
- CLEN = "Content-Length"
236
-
237
- web = None # type: HCli
238
-
239
- links = [] # type: list[str]
240
- linkmtx = threading.Lock()
241
- linkfile = None
242
-
243
-
244
- class File(object):
245
- """an up2k upload task; represents a single file"""
246
-
247
- def __init__(self, top, rel, size, lmod):
248
- self.top = top # type: bytes
249
- self.rel = rel.replace(b"\\", b"/") # type: bytes
250
- self.size = size # type: int
251
- self.lmod = lmod # type: float
252
-
253
- self.abs = os.path.join(top, rel) # type: bytes
254
- self.name = self.rel.split(b"/")[-1].decode("utf-8", WTF8) # type: str
255
-
256
- # set by get_hashlist
257
- self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
258
- self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
259
- self.t_hash = 0.0 # type: float
260
-
261
- # set by handshake
262
- self.recheck = False # duplicate; redo handshake after all files done
263
- self.ucids = [] # type: list[str] # chunks which need to be uploaded
264
- self.wark = "" # type: str
265
- self.url = "" # type: str
266
- self.nhs = 0 # type: int
267
-
268
- # set by upload
269
- self.t0_up = 0.0 # type: float
270
- self.t1_up = 0.0 # type: float
271
- self.nojoin = 0 # type: int
272
- self.up_b = 0 # type: int
273
- self.up_c = 0 # type: int
274
- self.cd = 0 # type: int
275
-
276
-
277
- class FileSlice(object):
278
- """file-like object providing a fixed window into a file"""
279
-
280
- def __init__(self, file, cids):
281
- # type: (File, str) -> None
282
-
283
- self.file = file
284
- self.cids = cids
285
-
286
- self.car, tlen = file.kchunks[cids[0]]
287
- for cid in cids[1:]:
288
- ofs, clen = file.kchunks[cid]
289
- if ofs != self.car + tlen:
290
- raise Exception(9)
291
- tlen += clen
292
-
293
- self.len = self.tlen = tlen
294
- self.cdr = self.car + self.len
295
- self.ofs = 0 # type: int
296
-
297
- self.f = None
298
- self.seek = self._seek0
299
- self.read = self._read0
300
-
301
- def subchunk(self, maxsz, nth):
302
- if self.tlen <= maxsz:
303
- return -1
304
-
305
- if not nth:
306
- self.car0 = self.car
307
- self.cdr0 = self.cdr
308
-
309
- self.car = self.car0 + maxsz * nth
310
- if self.car >= self.cdr0:
311
- return -2
312
-
313
- self.cdr = self.car + min(self.cdr0 - self.car, maxsz)
314
- self.len = self.cdr - self.car
315
- self.seek(0)
316
- return nth
317
-
318
- def unsub(self):
319
- self.car = self.car0
320
- self.cdr = self.cdr0
321
- self.len = self.tlen
322
-
323
- def _open(self):
324
- self.seek = self._seek
325
- self.read = self._read
326
-
327
- self.f = open(self.file.abs, "rb", 512 * 1024)
328
- self.f.seek(self.car)
329
-
330
- # https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
331
- # IOBase, RawIOBase, BufferedIOBase
332
- funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
333
- try:
334
- for fun in funs.split():
335
- setattr(self, fun, getattr(self.f, fun))
336
- except:
337
- pass # py27 probably
338
-
339
- def close(self, *a, **ka):
340
- return # until _open
341
-
342
- def tell(self):
343
- return self.ofs
344
-
345
- def _seek(self, ofs, wh=0):
346
-
347
- if wh == 1:
348
- ofs = self.ofs + ofs
349
- elif wh == 2:
350
- ofs = self.len + ofs # provided ofs is negative
351
-
352
- if ofs < 0:
353
- ofs = 0
354
- elif ofs >= self.len:
355
- ofs = self.len - 1
356
-
357
- self.ofs = ofs
358
- self.f.seek(self.car + ofs)
359
-
360
- def _read(self, sz):
361
-
362
- sz = min(sz, self.len - self.ofs)
363
- ret = self.f.read(sz)
364
- self.ofs += len(ret)
365
- return ret
366
-
367
- def _seek0(self, ofs, wh=0):
368
- self._open()
369
- return self.seek(ofs, wh)
370
-
371
- def _read0(self, sz):
372
- self._open()
373
- return self.read(sz)
374
-
375
-
376
- class MTHash(object):
377
- def __init__(self, cores):
378
- self.f = None
379
- self.sz = 0
380
- self.csz = 0
381
- self.omutex = threading.Lock()
382
- self.imutex = threading.Lock()
383
- self.work_q = Queue()
384
- self.done_q = Queue()
385
- self.thrs = []
386
- for _ in range(cores):
387
- self.thrs.append(Daemon(self.worker))
388
-
389
- def hash(self, f, fsz, chunksz, pcb=None, pcb_opaque=None):
390
- with self.omutex:
391
- self.f = f
392
- self.sz = fsz
393
- self.csz = chunksz
394
-
395
- chunks = {}
396
- nchunks = int(math.ceil(fsz / chunksz))
397
- for nch in range(nchunks):
398
- self.work_q.put(nch)
399
-
400
- ex = ""
401
- for nch in range(nchunks):
402
- qe = self.done_q.get()
403
- try:
404
- nch, dig, ofs, csz = qe
405
- chunks[nch] = [dig, ofs, csz]
406
- except:
407
- ex = ex or qe
408
-
409
- if pcb:
410
- pcb(pcb_opaque, chunksz * nch)
411
-
412
- if ex:
413
- raise Exception(ex)
414
-
415
- ret = []
416
- for n in range(nchunks):
417
- ret.append(chunks[n])
418
-
419
- self.f = None
420
- self.csz = 0
421
- self.sz = 0
422
- return ret
423
-
424
- def worker(self):
425
- while True:
426
- ofs = self.work_q.get()
427
- try:
428
- v = self.hash_at(ofs)
429
- except Exception as ex:
430
- v = str(ex)
431
-
432
- self.done_q.put(v)
433
-
434
- def hash_at(self, nch):
435
- f = self.f
436
- assert f
437
- ofs = ofs0 = nch * self.csz
438
- hashobj = hashlib.sha512()
439
- chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
440
- while chunk_rem > 0:
441
- with self.imutex:
442
- f.seek(ofs)
443
- buf = f.read(min(chunk_rem, 1024 * 1024 * 12))
444
-
445
- if not buf:
446
- raise Exception("EOF at " + str(ofs))
447
-
448
- hashobj.update(buf)
449
- chunk_rem -= len(buf)
450
- ofs += len(buf)
451
-
452
- digest = ub64enc(hashobj.digest()[:33]).decode("utf-8")
453
- return nch, digest, ofs0, chunk_sz
454
-
455
-
456
- _print = print
457
-
458
-
459
- def safe_print(*a, **ka):
460
- ka["end"] = ""
461
- zs = " ".join([unicode(x) for x in a])
462
- _print(zs + "\n", **ka)
463
-
464
-
465
- def eprint(*a, **ka):
466
- ka["file"] = sys.stderr
467
- ka["end"] = ""
468
- if not PY2:
469
- ka["flush"] = True
470
-
471
- _print(*a, **ka)
472
- if PY2 or not VT100:
473
- sys.stderr.flush()
474
-
475
-
476
- def flushing_print(*a, **ka):
477
- try:
478
- safe_print(*a, **ka)
479
- except:
480
- v = " ".join(str(x) for x in a)
481
- v = v.encode("ascii", "replace").decode("ascii")
482
- safe_print(v, **ka)
483
-
484
- if "flush" not in ka:
485
- sys.stdout.flush()
486
-
487
-
488
- print = safe_print if VT100 else flushing_print
489
-
490
-
491
- def termsize():
492
- env = os.environ
493
-
494
- def ioctl_GWINSZ(fd):
495
- try:
496
- import fcntl
497
- import struct
498
- import termios
499
-
500
- r = struct.unpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
501
- return r[::-1]
502
- except:
503
- return None
504
-
505
- cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
506
- if not cr:
507
- try:
508
- fd = os.open(os.ctermid(), os.O_RDONLY)
509
- cr = ioctl_GWINSZ(fd)
510
- os.close(fd)
511
- except:
512
- pass
513
-
514
- try:
515
- return cr or (int(env["COLUMNS"]), int(env["LINES"]))
516
- except:
517
- return 80, 25
518
-
519
-
520
- class CTermsize(object):
521
- def __init__(self):
522
- self.ev = False
523
- self.margin = None
524
- self.g = None
525
- self.w, self.h = termsize()
526
-
527
- try:
528
- signal.signal(signal.SIGWINCH, self.ev_sig)
529
- except:
530
- return
531
-
532
- Daemon(self.worker)
533
-
534
- def worker(self):
535
- while True:
536
- time.sleep(0.5)
537
- if not self.ev:
538
- continue
539
-
540
- self.ev = False
541
- self.w, self.h = termsize()
542
-
543
- if self.margin is not None:
544
- self.scroll_region(self.margin)
545
-
546
- def ev_sig(self, *a, **ka):
547
- self.ev = True
548
-
549
- def scroll_region(self, margin):
550
- self.margin = margin
551
- if margin is None:
552
- self.g = None
553
- eprint("\033[s\033[r\033[u")
554
- else:
555
- self.g = 1 + self.h - margin
556
- t = "%s\033[%dA" % ("\n" * margin, margin)
557
- eprint("%s\033[s\033[1;%dr\033[u" % (t, self.g - 1))
558
-
559
-
560
- ss = CTermsize()
561
-
562
-
563
- def undns(url):
564
- usp = urlsplit(url)
565
- hn = usp.hostname
566
- gai = None
567
- eprint("resolving host [%s] ..." % (hn,))
568
- try:
569
- gai = socket.getaddrinfo(hn, None)
570
- hn = gai[0][4][0]
571
- except KeyboardInterrupt:
572
- raise
573
- except:
574
- t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai=%r\n"
575
- eprint(t % (gai,))
576
- raise
577
-
578
- if usp.port:
579
- hn = "%s:%s" % (hn, usp.port)
580
- if usp.username or usp.password:
581
- hn = "%s:%s@%s" % (usp.username, usp.password, hn)
582
-
583
- usp = usp._replace(netloc=hn)
584
- url = urlunsplit(usp)
585
- eprint(" %s\n" % (url,))
586
- return url
587
-
588
-
589
- def _scd(err, top):
590
- """non-recursive listing of directory contents, along with stat() info"""
591
- top_ = os.path.join(top, b"")
592
- with os.scandir(top) as dh:
593
- for fh in dh:
594
- abspath = top_ + fh.name
595
- try:
596
- yield [abspath, fh.stat()]
597
- except Exception as ex:
598
- err.append((abspath, str(ex)))
599
-
600
-
601
- def _lsd(err, top):
602
- """non-recursive listing of directory contents, along with stat() info"""
603
- top_ = os.path.join(top, b"")
604
- for name in os.listdir(top):
605
- abspath = top_ + name
606
- try:
607
- yield [abspath, os.stat(abspath)]
608
- except Exception as ex:
609
- err.append((abspath, str(ex)))
610
-
611
-
612
- if hasattr(os, "scandir") and sys.version_info > (3, 6):
613
- statdir = _scd
614
- else:
615
- statdir = _lsd
616
-
617
-
618
- def walkdir(err, top, excl, seen):
619
- """recursive statdir"""
620
- atop = os.path.abspath(os.path.realpath(top))
621
- if atop in seen:
622
- err.append((top, "recursive-symlink"))
623
- return
624
-
625
- seen = seen[:] + [atop]
626
- for ap, inf in sorted(statdir(err, top)):
627
- if excl.match(ap):
628
- continue
629
- if stat.S_ISDIR(inf.st_mode):
630
- yield ap, inf
631
- try:
632
- for x in walkdir(err, ap, excl, seen):
633
- yield x
634
- except Exception as ex:
635
- err.append((ap, str(ex)))
636
- elif stat.S_ISREG(inf.st_mode):
637
- yield ap, inf
638
- else:
639
- err.append((ap, "irregular filetype 0%o" % (inf.st_mode,)))
640
-
641
-
642
- def walkdirs(err, tops, excl):
643
- """recursive statdir for a list of tops, yields [top, relpath, stat]"""
644
- sep = "{0}".format(os.sep).encode("ascii")
645
- if not VT100:
646
- excl = excl.replace("/", r"\\")
647
- za = []
648
- for td in tops:
649
- try:
650
- ap = os.path.abspath(os.path.realpath(td))
651
- if td[-1:] in (b"\\", b"/"):
652
- ap += sep
653
- except:
654
- # maybe cpython #88013 (ok)
655
- ap = td
656
-
657
- za.append(ap)
658
-
659
- za = [x if x.startswith(b"\\\\") else b"\\\\?\\" + x for x in za]
660
- za = [x.replace(b"/", b"\\") for x in za]
661
- tops = za
662
-
663
- ptn = re.compile(excl.encode("utf-8") or b"\n", re.I)
664
-
665
- for top in tops:
666
- isdir = os.path.isdir(top)
667
- if top[-1:] == sep:
668
- stop = top.rstrip(sep)
669
- yield stop, b"", os.stat(stop)
670
- else:
671
- stop, dn = os.path.split(top)
672
- if isdir:
673
- yield stop, dn, os.stat(stop)
674
-
675
- if isdir:
676
- for ap, inf in walkdir(err, top, ptn, []):
677
- yield stop, ap[len(stop) :].lstrip(sep), inf
678
- else:
679
- d, n = top.rsplit(sep, 1)
680
- yield d or b"/", n, os.stat(top)
681
-
682
-
683
- # mostly from copyparty/util.py
684
- def quotep(btxt):
685
- # type: (bytes) -> bytes
686
- quot1 = quote(btxt, safe=b"/")
687
- if not PY2:
688
- quot1 = quot1.encode("ascii")
689
-
690
- return quot1.replace(b" ", b"%20") # type: ignore
691
-
692
-
693
- # from copyparty/util.py
694
- def humansize(sz, terse=False):
695
- """picks a sensible unit for the given extent"""
696
- for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
697
- if sz < 1024:
698
- break
699
-
700
- sz /= 1024.0
701
-
702
- ret = " ".join([str(sz)[:4].rstrip("."), unit])
703
-
704
- if not terse:
705
- return ret
706
-
707
- return ret.replace("iB", "").replace(" ", "")
708
-
709
-
710
- # from copyparty/up2k.py
711
- def up2k_chunksize(filesize):
712
- """gives The correct chunksize for up2k hashing"""
713
- chunksize = 1024 * 1024
714
- stepsize = 512 * 1024
715
- while True:
716
- for mul in [1, 2]:
717
- nchunks = math.ceil(filesize * 1.0 / chunksize)
718
- if nchunks <= 256 or (chunksize >= 32 * 1024 * 1024 and nchunks <= 4096):
719
- return chunksize
720
-
721
- chunksize += stepsize
722
- stepsize *= mul
723
-
724
-
725
- # mostly from copyparty/up2k.py
726
- def get_hashlist(file, pcb, mth):
727
- # type: (File, Any, Any) -> None
728
- """generates the up2k hashlist from file contents, inserts it into `file`"""
729
-
730
- chunk_sz = up2k_chunksize(file.size)
731
- file_rem = file.size
732
- file_ofs = 0
733
- ret = []
734
- with open(file.abs, "rb", 512 * 1024) as f:
735
- t0 = time.time()
736
-
737
- if mth and file.size >= 1024 * 512:
738
- ret = mth.hash(f, file.size, chunk_sz, pcb, file)
739
- file_rem = 0
740
-
741
- while file_rem > 0:
742
- # same as `hash_at` except for `imutex` / bufsz
743
- hashobj = hashlib.sha512()
744
- chunk_sz = chunk_rem = min(chunk_sz, file_rem)
745
- while chunk_rem > 0:
746
- buf = f.read(min(chunk_rem, 64 * 1024))
747
- if not buf:
748
- raise Exception("EOF at " + str(f.tell()))
749
-
750
- hashobj.update(buf)
751
- chunk_rem -= len(buf)
752
-
753
- digest = ub64enc(hashobj.digest()[:33]).decode("utf-8")
754
-
755
- ret.append([digest, file_ofs, chunk_sz])
756
- file_ofs += chunk_sz
757
- file_rem -= chunk_sz
758
-
759
- if pcb:
760
- pcb(file, file_ofs)
761
-
762
- file.t_hash = time.time() - t0
763
- file.cids = ret
764
- file.kchunks = {}
765
- for k, v1, v2 in ret:
766
- if k not in file.kchunks:
767
- file.kchunks[k] = [v1, v2]
768
-
769
-
770
- def printlink(ar, purl, name, fk):
771
- if not name:
772
- url = purl # srch
773
- else:
774
- name = quotep(name.encode("utf-8", WTF8)).decode("utf-8")
775
- if fk:
776
- url = "%s%s?k=%s" % (purl, name, fk)
777
- else:
778
- url = "%s%s" % (purl, name)
779
-
780
- url = "%s/%s" % (ar.burl, url.lstrip("/"))
781
-
782
- with linkmtx:
783
- if ar.u:
784
- links.append(url)
785
- if ar.ud:
786
- print(url)
787
- if linkfile:
788
- zs = "%s\n" % (url,)
789
- zb = zs.encode("utf-8", "replace")
790
- linkfile.write(zb)
791
-
792
-
793
- def handshake(ar, file, search):
794
- # type: (argparse.Namespace, File, bool) -> tuple[list[str], bool]
795
- """
796
- performs a handshake with the server; reply is:
797
- if search, a list of search results
798
- otherwise, a list of chunks to upload
799
- """
800
-
801
- req = {
802
- "hash": [x[0] for x in file.cids],
803
- "name": file.name,
804
- "lmod": file.lmod,
805
- "size": file.size,
806
- }
807
- if search:
808
- req["srch"] = 1
809
- else:
810
- if ar.touch:
811
- req["umod"] = True
812
- if ar.owo:
813
- req["replace"] = "mt"
814
- elif ar.ow:
815
- req["replace"] = True
816
-
817
- file.recheck = False
818
- if file.url:
819
- url = file.url
820
- else:
821
- if b"/" in file.rel:
822
- url = quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8")
823
- else:
824
- url = ""
825
- url = ar.vtop + url
826
-
827
- while True:
828
- sc = 600
829
- txt = ""
830
- t0 = time.time()
831
- try:
832
- zs = json.dumps(req, separators=(",\n", ": "))
833
- sc, txt = web.req("POST", url, {}, zs.encode("utf-8"), MJ)
834
- if sc < 400:
835
- break
836
-
837
- raise Exception("http %d: %s" % (sc, txt))
838
-
839
- except Exception as ex:
840
- em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip()
841
-
842
- if (
843
- sc == 422
844
- or "<pre>partial upload exists at a different" in txt
845
- or "<pre>source file busy; please try again" in txt
846
- ):
847
- file.recheck = True
848
- return [], False
849
- elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
850
- return [], False
851
- elif sc == 403:
852
- print("\nERROR: login required, or wrong password:\n%s" % (txt,))
853
- raise BadAuth()
854
-
855
- t = "handshake failed, retrying: %s\n t0=%.3f t1=%.3f td=%.3f\n %s\n\n"
856
- now = time.time()
857
- eprint(t % (file.name, t0, now, now - t0, em))
858
- time.sleep(ar.cd)
859
-
860
- try:
861
- r = json.loads(txt)
862
- except:
863
- raise Exception(txt)
864
-
865
- if search:
866
- if ar.uon and r["hits"]:
867
- printlink(ar, r["hits"][0]["rp"], "", "")
868
- return r["hits"], False
869
-
870
- file.url = quotep(r["purl"].encode("utf-8", WTF8)).decode("utf-8")
871
- file.name = r["name"]
872
- file.wark = r["wark"]
873
-
874
- if ar.uon and not r["hash"]:
875
- printlink(ar, file.url, r["name"], r.get("fk"))
876
-
877
- return r["hash"], r["sprs"]
878
-
879
-
880
- def upload(fsl, stats, maxsz):
881
- # type: (FileSlice, str, int) -> None
882
- """upload a range of file data, defined by one or more `cid` (chunk-hash)"""
883
-
884
- ctxt = fsl.cids[0]
885
- if len(fsl.cids) > 1:
886
- n = 192 // len(fsl.cids)
887
- n = 9 if n > 9 else 2 if n < 2 else n
888
- zsl = [zs[:n] for zs in fsl.cids[1:]]
889
- ctxt += ",%d,%s" % (n, "".join(zsl))
890
-
891
- headers = {
892
- "X-Up2k-Hash": ctxt,
893
- "X-Up2k-Wark": fsl.file.wark,
894
- }
895
-
896
- if stats:
897
- headers["X-Up2k-Stat"] = stats
898
-
899
- nsub = 0
900
- try:
901
- while nsub != -1:
902
- nsub = fsl.subchunk(maxsz, nsub)
903
- if nsub == -2:
904
- return
905
- if nsub >= 0:
906
- headers["X-Up2k-Subc"] = str(maxsz * nsub)
907
- headers.pop(CLEN, None)
908
- nsub += 1
909
-
910
- sc, txt = web.req("POST", fsl.file.url, headers, fsl, MO)
911
-
912
- if sc == 400:
913
- if (
914
- "already being written" in txt
915
- or "already got that" in txt
916
- or "only sibling chunks" in txt
917
- ):
918
- fsl.file.nojoin = 1
919
-
920
- if sc >= 400:
921
- raise Exception("http %s: %s" % (sc, txt))
922
- finally:
923
- if fsl.f:
924
- fsl.f.close()
925
- if nsub != -1:
926
- fsl.unsub()
927
-
928
-
929
- class Ctl(object):
930
- """
931
- the coordinator which runs everything in parallel
932
- (hashing, handshakes, uploads)
933
- """
934
-
935
- def _scan(self):
936
- ar = self.ar
937
- eprint("\nscanning %d locations\n" % (len(ar.files),))
938
- nfiles = 0
939
- nbytes = 0
940
- err = []
941
- for _, _, inf in walkdirs(err, ar.files, ar.x):
942
- if stat.S_ISDIR(inf.st_mode):
943
- continue
944
-
945
- nfiles += 1
946
- nbytes += inf.st_size
947
-
948
- if err:
949
- eprint("\n# failed to access %d paths:\n" % (len(err),))
950
- for ap, msg in err:
951
- if ar.v:
952
- eprint("%s\n `-%s\n\n" % (ap.decode("utf-8", "replace"), msg))
953
- else:
954
- eprint(ap.decode("utf-8", "replace") + "\n")
955
-
956
- eprint("^ failed to access those %d paths ^\n\n" % (len(err),))
957
-
958
- if not ar.v:
959
- eprint("hint: set -v for detailed error messages\n")
960
-
961
- if not ar.ok:
962
- eprint("hint: aborting because --ok is not set\n")
963
- return
964
-
965
- eprint("found %d files, %s\n\n" % (nfiles, humansize(nbytes)))
966
- return nfiles, nbytes
967
-
968
- def __init__(self, ar, stats=None):
969
- self.ok = False
970
- self.panik = 0
971
- self.errs = 0
972
- self.ar = ar
973
- self.stats = stats or self._scan()
974
- if not self.stats:
975
- return
976
-
977
- self.nfiles, self.nbytes = self.stats
978
- self.filegen = walkdirs([], ar.files, ar.x)
979
- self.recheck = [] # type: list[File]
980
-
981
- if ar.safe:
982
- self._safe()
983
- else:
984
- self.at_hash = 0.0
985
- self.at_up = 0.0
986
- self.at_upr = 0.0
987
- self.hash_f = 0
988
- self.hash_c = 0
989
- self.hash_b = 0
990
- self.up_f = 0
991
- self.up_c = 0
992
- self.up_b = 0 # num bytes handled
993
- self.up_br = 0 # num bytes actually transferred
994
- self.uploader_busy = 0
995
- self.serialized = False
996
-
997
- self.t0 = time.time()
998
- self.t0_up = None
999
- self.spd = None
1000
- self.eta = "99:99:99"
1001
-
1002
- self.mutex = threading.Lock()
1003
- self.exit_cond = threading.Condition()
1004
- self.uploader_alive = ar.j
1005
- self.handshaker_alive = ar.j
1006
- self.q_handshake = HSQueue() # type: Queue[File]
1007
- self.q_upload = Queue() # type: Queue[FileSlice]
1008
-
1009
- self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
1010
- self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
1011
-
1012
- self.mth = MTHash(ar.J) if ar.J > 1 else None
1013
-
1014
- self._fancy()
1015
-
1016
- self.ok = not self.errs
1017
-
1018
- def _safe(self):
1019
- """minimal basic slow boring fallback codepath"""
1020
- search = self.ar.s
1021
- nf = 0
1022
- for top, rel, inf in self.filegen:
1023
- if stat.S_ISDIR(inf.st_mode) or not rel:
1024
- continue
1025
-
1026
- nf += 1
1027
- file = File(top, rel, inf.st_size, inf.st_mtime)
1028
- upath = file.abs.decode("utf-8", "replace")
1029
-
1030
- print("%d %s\n hash..." % (self.nfiles - nf, upath))
1031
- get_hashlist(file, None, None)
1032
-
1033
- while True:
1034
- print(" hs...")
1035
- try:
1036
- hs, _ = handshake(self.ar, file, search)
1037
- except BadAuth:
1038
- sys.exit(1)
1039
-
1040
- if search:
1041
- if hs:
1042
- for hit in hs:
1043
- print(" found: %s/%s" % (self.ar.burl, hit["rp"]))
1044
- else:
1045
- print(" NOT found")
1046
- break
1047
-
1048
- file.ucids = hs
1049
- if not hs:
1050
- break
1051
-
1052
- print("%d %s" % (self.nfiles - nf, upath))
1053
- ncs = len(hs)
1054
- for nc, cid in enumerate(hs):
1055
- print(" %d up %s" % (ncs - nc, cid))
1056
- stats = "%d/0/0/%d" % (nf, self.nfiles - nf)
1057
- fslice = FileSlice(file, [cid])
1058
- upload(fslice, stats, self.ar.szm)
1059
-
1060
- print(" ok!")
1061
- if file.recheck:
1062
- self.recheck.append(file)
1063
-
1064
- if not self.recheck:
1065
- return
1066
-
1067
- eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
1068
- for file in self.recheck:
1069
- handshake(self.ar, file, False)
1070
-
1071
- def _fancy(self):
1072
- atexit.register(self.cleanup_vt100)
1073
- if VT100 and not self.ar.ns:
1074
- ss.scroll_region(3)
1075
-
1076
- Daemon(self.hasher)
1077
- for _ in range(self.ar.j):
1078
- Daemon(self.handshaker)
1079
- Daemon(self.uploader)
1080
-
1081
- last_sp = -1
1082
- while True:
1083
- with self.exit_cond:
1084
- self.exit_cond.wait(0.07)
1085
- if self.panik:
1086
- sys.exit(1)
1087
- with self.mutex:
1088
- if not self.handshaker_alive and not self.uploader_alive:
1089
- break
1090
- st_hash = self.st_hash[:]
1091
- st_up = self.st_up[:]
1092
-
1093
- if VT100 and not self.ar.ns:
1094
- maxlen = ss.w - len(str(self.nfiles)) - 14
1095
- txt = "\033[s\033[%dH" % (ss.g,)
1096
- for y, k, st, f in [
1097
- [0, "hash", st_hash, self.hash_f],
1098
- [1, "send", st_up, self.up_f],
1099
- ]:
1100
- txt += "\033[%dH%s:" % (ss.g + y, k)
1101
- file, arg = st
1102
- if not file:
1103
- txt += " %s\033[K" % (arg,)
1104
- else:
1105
- if y:
1106
- p = 100 * file.up_b / file.size
1107
- else:
1108
- p = 100 * arg / file.size
1109
-
1110
- name = file.abs.decode("utf-8", "replace")[-maxlen:]
1111
- if "/" in name:
1112
- name = "\033[36m%s\033[0m/%s" % tuple(name.rsplit("/", 1))
1113
-
1114
- txt += "%6.1f%% %d %s\033[K" % (p, self.nfiles - f, name)
1115
-
1116
- txt += "\033[%dH " % (ss.g + 2,)
1117
- else:
1118
- txt = " "
1119
-
1120
- if not VT100: # OSC9;4 (taskbar-progress)
1121
- sp = int(self.up_b * 100 / self.nbytes) or 1
1122
- if last_sp != sp:
1123
- last_sp = sp
1124
- txt += "\033]9;4;1;%d\033\\" % (sp,)
1125
-
1126
- if not self.up_br:
1127
- spd = self.hash_b / ((time.time() - self.t0) or 1)
1128
- eta = (self.nbytes - self.hash_b) / (spd or 1)
1129
- else:
1130
- spd = self.up_br / ((time.time() - self.t0_up) or 1)
1131
- spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
1132
- eta = (self.nbytes - self.up_b) / (spd or 1)
1133
-
1134
- spd = humansize(spd)
1135
- self.eta = str(datetime.timedelta(seconds=int(eta)))
1136
- if eta > 2591999:
1137
- self.eta = self.eta.split(",")[0] # truncate HH:MM:SS
1138
- sleft = humansize(self.nbytes - self.up_b)
1139
- nleft = self.nfiles - self.up_f
1140
- tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
1141
-
1142
- t = "%s eta @ %s/s, %s, %d# left" % (self.eta, spd, sleft, nleft)
1143
- if not self.hash_b:
1144
- t = " now hashing..."
1145
- eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
1146
-
1147
- if self.ar.wlist:
1148
- self.at_hash = time.time() - self.t0
1149
-
1150
- if self.hash_b and self.at_hash:
1151
- spd = humansize(self.hash_b / self.at_hash)
1152
- eprint("\nhasher: %.2f sec, %s/s\n" % (self.at_hash, spd))
1153
- if self.up_br and self.at_up:
1154
- spd = humansize(self.up_br / self.at_up)
1155
- eprint("upload: %.2f sec, %s/s\n" % (self.at_up, spd))
1156
-
1157
- if not self.recheck:
1158
- return
1159
-
1160
- eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
1161
- for file in self.recheck:
1162
- handshake(self.ar, file, False)
1163
-
1164
- def cleanup_vt100(self):
1165
- if VT100:
1166
- ss.scroll_region(None)
1167
- else:
1168
- eprint("\033]9;4;0\033\\")
1169
- eprint("\033[J\033]0;\033\\")
1170
-
1171
- def cb_hasher(self, file, ofs):
1172
- self.st_hash = [file, ofs]
1173
-
1174
- def hasher(self):
1175
- ptn = re.compile(self.ar.x.encode("utf-8"), re.I) if self.ar.x else None
1176
- sep = "{0}".format(os.sep).encode("ascii")
1177
- prd = None
1178
- ls = {}
1179
- for top, rel, inf in self.filegen:
1180
- isdir = stat.S_ISDIR(inf.st_mode)
1181
- if self.ar.z or self.ar.drd:
1182
- rd = rel if isdir else os.path.dirname(rel)
1183
- srd = rd.decode("utf-8", "replace").replace("\\", "/").rstrip("/")
1184
- if srd:
1185
- srd += "/"
1186
- if prd != rd:
1187
- prd = rd
1188
- ls = {}
1189
- try:
1190
- print(" ls ~{0}".format(srd))
1191
- zt = (
1192
- self.ar.vtop,
1193
- quotep(rd.replace(b"\\", b"/")).decode("utf-8"),
1194
- )
1195
- sc, txt = web.req("GET", "%s%s?ls&lt&dots" % zt, {})
1196
- if sc >= 400:
1197
- raise Exception("http %s" % (sc,))
1198
-
1199
- j = json.loads(txt)
1200
- for f in j["dirs"] + j["files"]:
1201
- rfn = f["href"].split("?")[0].rstrip("/")
1202
- ls[unquote(rfn.encode("utf-8", WTF8))] = f
1203
- except Exception as ex:
1204
- print(" mkdir ~{0} ({1})".format(srd, ex))
1205
-
1206
- if self.ar.drd:
1207
- dp = os.path.join(top, rd)
1208
- try:
1209
- lnodes = set(os.listdir(dp))
1210
- except:
1211
- lnodes = list(ls) # fs eio; don't delete
1212
- if ptn:
1213
- zs = dp.replace(sep, b"/").rstrip(b"/") + b"/"
1214
- zls = [zs + x for x in lnodes]
1215
- zls = [x for x in zls if not ptn.match(x)]
1216
- lnodes = [x.split(b"/")[-1] for x in zls]
1217
- bnames = [x for x in ls if x not in lnodes and x != b".hist"]
1218
- vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
1219
- names = [x.decode("utf-8", WTF8) for x in bnames]
1220
- locs = [vpath + srd + x for x in names]
1221
- while locs:
1222
- req = locs
1223
- while req:
1224
- print("DELETING ~%s#%s" % (srd, len(req)))
1225
- body = json.dumps(req).encode("utf-8")
1226
- sc, txt = web.req(
1227
- "POST", self.ar.url + "?delete", {}, body, MJ
1228
- )
1229
- if sc == 413 and "json 2big" in txt:
1230
- print(" (delete request too big; slicing...)")
1231
- req = req[: len(req) // 2]
1232
- continue
1233
- elif sc >= 400:
1234
- t = "delete request failed: %s %s"
1235
- raise Exception(t % (sc, txt))
1236
- break
1237
- locs = locs[len(req) :]
1238
-
1239
- if isdir:
1240
- continue
1241
-
1242
- if self.ar.z:
1243
- rf = ls.get(os.path.basename(rel), None)
1244
- if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 2:
1245
- self.nfiles -= 1
1246
- self.nbytes -= inf.st_size
1247
- continue
1248
-
1249
- file = File(top, rel, inf.st_size, inf.st_mtime)
1250
- while True:
1251
- with self.mutex:
1252
- if (
1253
- self.hash_f - self.up_f == 1
1254
- or (
1255
- self.hash_b - self.up_b < 1024 * 1024 * 1024
1256
- and self.hash_c - self.up_c < 512
1257
- )
1258
- ) and (
1259
- not self.ar.nh
1260
- or (
1261
- self.q_upload.empty()
1262
- and self.q_handshake.empty()
1263
- and not self.uploader_busy
1264
- )
1265
- ):
1266
- break
1267
-
1268
- time.sleep(0.05)
1269
-
1270
- get_hashlist(file, self.cb_hasher, self.mth)
1271
- with self.mutex:
1272
- self.hash_f += 1
1273
- self.hash_c += len(file.cids)
1274
- self.hash_b += file.size
1275
- if self.ar.wlist:
1276
- self.up_f = self.hash_f
1277
- self.up_c = self.hash_c
1278
- self.up_b = self.hash_b
1279
-
1280
- if self.ar.wlist:
1281
- vp = file.rel.decode("utf-8")
1282
- if self.ar.chs:
1283
- zsl = [
1284
- "%s %d %d" % (zsii[0], n, zsii[1])
1285
- for n, zsii in enumerate(file.cids)
1286
- ]
1287
- print("chs: %s\n%s" % (vp, "\n".join(zsl)))
1288
- zsl = [self.ar.wsalt, str(file.size)] + [x[0] for x in file.cids]
1289
- zb = hashlib.sha512("\n".join(zsl).encode("utf-8")).digest()[:33]
1290
- wark = ub64enc(zb).decode("utf-8")
1291
- if self.ar.jw:
1292
- print("%s %s" % (wark, vp))
1293
- else:
1294
- zd = datetime.datetime.fromtimestamp(max(0, file.lmod), UTC)
1295
- dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
1296
- zd.year,
1297
- zd.month,
1298
- zd.day,
1299
- zd.hour,
1300
- zd.minute,
1301
- zd.second,
1302
- )
1303
- print("%s %12d %s %s" % (dt, file.size, wark, vp))
1304
- continue
1305
-
1306
- self.q_handshake.put(file)
1307
-
1308
- self.st_hash = [None, "(finished)"]
1309
- self._check_if_done()
1310
-
1311
- def _check_if_done(self):
1312
- with self.mutex:
1313
- if self.nfiles - self.up_f:
1314
- return
1315
- for _ in range(self.ar.j):
1316
- self.q_handshake.put(None)
1317
-
1318
- def handshaker(self):
1319
- search = self.ar.s
1320
- while True:
1321
- file = self.q_handshake.get()
1322
- if not file:
1323
- with self.mutex:
1324
- self.handshaker_alive -= 1
1325
- self.q_upload.put(None)
1326
- return
1327
-
1328
- chunksz = up2k_chunksize(file.size)
1329
- upath = file.abs.decode("utf-8", "replace")
1330
- if not VT100:
1331
- upath = upath.lstrip("\\?")
1332
-
1333
- file.nhs += 1
1334
- if file.nhs > 32:
1335
- print("ERROR: giving up on file %s" % (upath))
1336
- self.errs += 1
1337
- continue
1338
-
1339
- while time.time() < file.cd:
1340
- time.sleep(0.1)
1341
-
1342
- try:
1343
- hs, sprs = handshake(self.ar, file, search)
1344
- except BadAuth:
1345
- self.panik = 1
1346
- break
1347
-
1348
- if search:
1349
- if hs:
1350
- for hit in hs:
1351
- print("found: %s\n %s/%s" % (upath, self.ar.burl, hit["rp"]))
1352
- else:
1353
- print("NOT found: {0}".format(upath))
1354
-
1355
- with self.mutex:
1356
- self.up_f += 1
1357
- self.up_c += len(file.cids)
1358
- self.up_b += file.size
1359
-
1360
- self._check_if_done()
1361
- continue
1362
-
1363
- if file.recheck:
1364
- self.recheck.append(file)
1365
-
1366
- with self.mutex:
1367
- if hs and not sprs and not self.serialized:
1368
- t = "server filesystem does not support sparse files; serializing uploads\n"
1369
- eprint(t)
1370
- self.serialized = True
1371
- for _ in range(self.ar.j - 1):
1372
- self.q_upload.put(None)
1373
- if not hs:
1374
- # all chunks done
1375
- self.up_f += 1
1376
- self.up_c += len(file.cids) - file.up_c
1377
- self.up_b += file.size - file.up_b
1378
-
1379
- if not file.recheck:
1380
- self.up_done(file)
1381
-
1382
- if hs and file.up_c:
1383
- # some chunks failed
1384
- self.up_c -= len(hs)
1385
- file.up_c -= len(hs)
1386
- for cid in hs:
1387
- sz = file.kchunks[cid][1]
1388
- self.up_br -= sz
1389
- self.up_b -= sz
1390
- file.up_b -= sz
1391
-
1392
- if hs and not file.up_b:
1393
- # first hs of this file; is this an upload resume?
1394
- file.up_b = chunksz * max(0, len(file.kchunks) - len(hs))
1395
-
1396
- file.ucids = hs
1397
-
1398
- if not hs:
1399
- self.at_hash += file.t_hash
1400
-
1401
- if self.ar.spd:
1402
- if VT100:
1403
- c1 = "\033[36m"
1404
- c2 = "\033[0m"
1405
- else:
1406
- c1 = c2 = ""
1407
-
1408
- spd_h = humansize(file.size / file.t_hash, True)
1409
- if file.up_c:
1410
- t_up = file.t1_up - file.t0_up
1411
- spd_u = humansize(file.size / t_up, True)
1412
-
1413
- t = "uploaded %s %s(h:%.2fs,%s/s,up:%.2fs,%s/s)%s"
1414
- print(t % (upath, c1, file.t_hash, spd_h, t_up, spd_u, c2))
1415
- else:
1416
- t = " found %s %s(%.2fs,%s/s)%s"
1417
- print(t % (upath, c1, file.t_hash, spd_h, c2))
1418
- else:
1419
- kw = "uploaded" if file.up_c else " found"
1420
- print("{0} {1}".format(kw, upath))
1421
-
1422
- self._check_if_done()
1423
- continue
1424
-
1425
- njoin = self.ar.sz // chunksz
1426
- cs = hs[:]
1427
- while cs:
1428
- fsl = FileSlice(file, cs[:1])
1429
- try:
1430
- if file.nojoin:
1431
- raise Exception()
1432
- for n in range(2, min(len(cs), njoin + 1)):
1433
- fsl = FileSlice(file, cs[:n])
1434
- except:
1435
- pass
1436
- cs = cs[len(fsl.cids) :]
1437
- self.q_upload.put(fsl)
1438
-
1439
- def uploader(self):
1440
- while True:
1441
- fsl = self.q_upload.get()
1442
- if not fsl:
1443
- done = False
1444
- with self.mutex:
1445
- self.uploader_alive -= 1
1446
- if not self.uploader_alive:
1447
- done = not self.handshaker_alive
1448
- self.st_up = [None, "(finished)"]
1449
- if done:
1450
- with self.exit_cond:
1451
- self.exit_cond.notify_all()
1452
- return
1453
-
1454
- file = fsl.file
1455
- cids = fsl.cids
1456
-
1457
- with self.mutex:
1458
- if not self.uploader_busy:
1459
- self.at_upr = time.time()
1460
- self.uploader_busy += 1
1461
- if not file.t0_up:
1462
- file.t0_up = time.time()
1463
- if not self.t0_up:
1464
- self.t0_up = file.t0_up
1465
-
1466
- stats = "%d/%d/%d/%d %d/%d %s" % (
1467
- self.up_f,
1468
- len(self.recheck),
1469
- self.uploader_busy,
1470
- self.nfiles - self.up_f,
1471
- self.nbytes // (1024 * 1024),
1472
- (self.nbytes - self.up_b) // (1024 * 1024),
1473
- self.eta,
1474
- )
1475
-
1476
- try:
1477
- upload(fsl, stats, self.ar.szm)
1478
- except Exception as ex:
1479
- t = "upload failed, retrying: %s #%s+%d (%s)\n"
1480
- eprint(t % (file.name, cids[0][:8], len(cids) - 1, ex))
1481
- file.cd = time.time() + self.ar.cd
1482
- # handshake will fix it
1483
-
1484
- with self.mutex:
1485
- sz = fsl.len
1486
- file.ucids = [x for x in file.ucids if x not in cids]
1487
- if not file.ucids:
1488
- file.t1_up = time.time()
1489
- self.q_handshake.put(file)
1490
-
1491
- self.st_up = [file, cids[0]]
1492
- file.up_b += sz
1493
- self.up_b += sz
1494
- self.up_br += sz
1495
- file.up_c += 1
1496
- self.up_c += 1
1497
- self.uploader_busy -= 1
1498
- if not self.uploader_busy:
1499
- self.at_up += time.time() - self.at_upr
1500
-
1501
- def up_done(self, file):
1502
- if self.ar.dl:
1503
- os.unlink(file.abs)
1504
-
1505
-
1506
- class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
1507
- pass
1508
-
1509
-
1510
- def main():
1511
- global web, linkfile
1512
-
1513
- time.strptime("19970815", "%Y%m%d") # python#7980
1514
- "".encode("idna") # python#29288
1515
- if not VT100:
1516
- os.system("rem") # enables colors
1517
-
1518
- cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
1519
- hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
1520
-
1521
- ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION)
1522
- if "--version" in sys.argv:
1523
- print(ver)
1524
- return
1525
-
1526
- sys.argv = [x for x in sys.argv if x != "--ws"]
1527
-
1528
- # fmt: off
1529
- ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool " + ver, epilog="""
1530
- NOTE: source file/folder selection uses rsync syntax, meaning that:
1531
- "foo" uploads the entire folder to URL/foo/
1532
- "foo/" uploads the CONTENTS of the folder into URL/
1533
- NOTE: if server has --usernames enabled, then password is "username:password"
1534
- """)
1535
-
1536
- ap.add_argument("url", type=unicode, help="server url, including destination folder")
1537
- ap.add_argument("files", type=files_decoder, nargs="+", help="files and/or folders to process")
1538
- ap.add_argument("-v", action="store_true", help="verbose")
1539
- ap.add_argument("-a", metavar="PASSWD", help="password or $filepath")
1540
- ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
1541
- ap.add_argument("-x", type=unicode, metavar="REGEX", action="append", help="skip file if filesystem-abspath matches REGEX (option can be repeated), example: '.*/\\.hist/.*'")
1542
- ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
1543
- ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
1544
- ap.add_argument("--ow", action="store_true", help="overwrite existing files instead of autorenaming")
1545
- ap.add_argument("--owo", action="store_true", help="overwrite existing files if server-file is older")
1546
- ap.add_argument("--spd", action="store_true", help="print speeds for each file")
1547
- ap.add_argument("--version", action="store_true", help="show version and exit")
1548
-
1549
- ap = app.add_argument_group("print links")
1550
- ap.add_argument("-u", action="store_true", help="print list of download-links after all uploads finished")
1551
- ap.add_argument("-ud", action="store_true", help="print download-link after each upload finishes")
1552
- ap.add_argument("-uf", type=unicode, metavar="PATH", help="print list of download-links to file")
1553
-
1554
- ap = app.add_argument_group("compatibility")
1555
- ap.add_argument("--cls", action="store_true", help="clear screen before start")
1556
- ap.add_argument("--rh", type=int, metavar="TRIES", default=0, help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)")
1557
-
1558
- ap = app.add_argument_group("folder sync")
1559
- ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
1560
- ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally (implies --ow)")
1561
- ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames")
1562
-
1563
- ap = app.add_argument_group("file-ID calculator; enable with url '-' to list warks (file identifiers) instead of upload/search")
1564
- ap.add_argument("--wsalt", type=unicode, metavar="S", default="hunter2", help="salt to use when creating warks; must match server config")
1565
- ap.add_argument("--chs", action="store_true", help="verbose (print the hash/offset of each chunk in each file)")
1566
- ap.add_argument("--jw", action="store_true", help="just identifier+filepath, not mtime/size too")
1567
-
1568
- ap = app.add_argument_group("performance tweaks")
1569
- ap.add_argument("-j", type=int, metavar="CONNS", default=2, help="parallel connections")
1570
- ap.add_argument("-J", type=int, metavar="CORES", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
1571
- ap.add_argument("--sz", type=int, metavar="MiB", default=64, help="try to make each POST this big")
1572
- ap.add_argument("--szm", type=int, metavar="MiB", default=96, help="max size of each POST (default is cloudflare max)")
1573
- ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
1574
- ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
1575
- ap.add_argument("--cxp", type=float, metavar="SEC", default=57, help="assume http connections expired after SEConds")
1576
- ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
1577
- ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
1578
- ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
1579
-
1580
- ap = app.add_argument_group("tls")
1581
- ap.add_argument("-te", metavar="PATH", help="path to ca.pem or cert.pem to expect/verify")
1582
- ap.add_argument("-td", action="store_true", help="disable certificate check")
1583
- # fmt: on
1584
-
1585
- try:
1586
- ar = app.parse_args()
1587
- finally:
1588
- if EXE and not sys.argv[1:]:
1589
- eprint("*** hit enter to exit ***")
1590
- try:
1591
- input()
1592
- except:
1593
- pass
1594
-
1595
- # msys2 doesn't uncygpath absolute paths with whitespace
1596
- if not VT100:
1597
- zsl = []
1598
- for fn in ar.files:
1599
- if re.search("^/[a-z]/", fn):
1600
- fn = r"%s:\%s" % (fn[1:2], fn[3:])
1601
- zsl.append(fn.replace("/", "\\"))
1602
- ar.files = zsl
1603
-
1604
- fok = []
1605
- fng = []
1606
- for fn in ar.files:
1607
- if os.path.exists(fn):
1608
- fok.append(fn)
1609
- elif VT100:
1610
- fng.append(fn)
1611
- else:
1612
- # windows leaves glob-expansion to the invoked process... okayyy let's get to work
1613
- from glob import glob
1614
-
1615
- fns = glob(fn)
1616
- if fns:
1617
- fok.extend(fns)
1618
- else:
1619
- fng.append(fn)
1620
-
1621
- if fng:
1622
- t = "some files/folders were not found:\n %s"
1623
- raise Exception(t % ("\n ".join(fng),))
1624
-
1625
- ar.files = fok
1626
-
1627
- if ar.drd:
1628
- ar.dr = True
1629
-
1630
- if ar.dr:
1631
- ar.ow = True
1632
-
1633
- ar.sz *= 1024 * 1024
1634
- ar.szm *= 1024 * 1024
1635
-
1636
- ar.x = "|".join(ar.x or [])
1637
-
1638
- setattr(ar, "wlist", ar.url == "-")
1639
- setattr(ar, "uon", ar.u or ar.ud or ar.uf)
1640
-
1641
- if ar.uf:
1642
- linkfile = open(ar.uf, "wb")
1643
-
1644
- for k in "dl dr drd wlist".split():
1645
- errs = []
1646
- if ar.safe and getattr(ar, k):
1647
- errs.append(k)
1648
-
1649
- if errs:
1650
- raise Exception("--safe is incompatible with " + str(errs))
1651
-
1652
- ar.files = [
1653
- os.path.abspath(os.path.realpath(x.encode("utf-8")))
1654
- + (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
1655
- for x in ar.files
1656
- ]
1657
-
1658
- # urlsplit needs scheme;
1659
- zs = ar.url.rstrip("/") + "/"
1660
- if "://" not in zs:
1661
- zs = "http://" + zs
1662
- ar.url = zs
1663
-
1664
- url = urlsplit(zs)
1665
- ar.burl = "%s://%s" % (url.scheme, url.netloc)
1666
- ar.vtop = url.path
1667
-
1668
- if "https://" in ar.url.lower():
1669
- try:
1670
- import ssl
1671
- import zipfile
1672
- except:
1673
- t = "ERROR: https is not available for some reason; please use http"
1674
- print("\n\n %s\n\n" % (t,))
1675
- raise
1676
-
1677
- if ar.a and ar.a.startswith("$"):
1678
- fn = ar.a[1:]
1679
- print("reading password from file [{0}]".format(fn))
1680
- with open(fn, "rb") as f:
1681
- ar.a = f.read().decode("utf-8").strip()
1682
-
1683
- for n in range(ar.rh):
1684
- try:
1685
- ar.url = undns(ar.url)
1686
- break
1687
- except KeyboardInterrupt:
1688
- raise
1689
- except:
1690
- if n > ar.rh - 2:
1691
- raise
1692
-
1693
- if ar.cls:
1694
- eprint("\033[H\033[2J\033[3J", end="")
1695
-
1696
- web = HCli(ar)
1697
- ctl = Ctl(ar)
1698
-
1699
- if ar.dr and not ar.drd and ctl.ok:
1700
- print("\npass 2/2: delete")
1701
- ar.drd = True
1702
- ar.z = True
1703
- ctl = Ctl(ar, ctl.stats)
1704
-
1705
- if links:
1706
- print()
1707
- print("\n".join(links))
1708
- if linkfile:
1709
- linkfile.close()
1710
-
1711
- if ctl.errs:
1712
- print("WARNING: %d errors" % (ctl.errs))
1713
-
1714
- sys.exit(0 if ctl.ok else 1)
1715
-
1716
-
1717
- if __name__ == "__main__":
1718
- main()