copyparty 1.15.7__tar.gz → 1.15.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. {copyparty-1.15.7 → copyparty-1.15.9}/PKG-INFO +30 -3
  2. {copyparty-1.15.7 → copyparty-1.15.9}/README.md +29 -2
  3. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/__main__.py +11 -2
  4. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/__version__.py +2 -2
  5. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/authsrv.py +15 -3
  6. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/cfg.py +1 -0
  7. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/httpcli.py +201 -11
  8. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/httpsrv.py +2 -0
  9. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/metrics.py +1 -1
  10. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/u2idx.py +5 -3
  11. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/up2k.py +33 -26
  12. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/util.py +20 -10
  13. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/a/u2c.py +109 -37
  14. copyparty-1.15.9/copyparty/web/baguettebox.js.gz +0 -0
  15. copyparty-1.15.9/copyparty/web/browser.js.gz +0 -0
  16. copyparty-1.15.9/copyparty/web/ui.css.gz +0 -0
  17. copyparty-1.15.9/copyparty/web/up2k.js.gz +0 -0
  18. copyparty-1.15.9/copyparty/web/util.js.gz +0 -0
  19. copyparty-1.15.9/copyparty/web/w.hash.js.gz +0 -0
  20. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty.egg-info/PKG-INFO +30 -3
  21. copyparty-1.15.7/copyparty/web/baguettebox.js.gz +0 -0
  22. copyparty-1.15.7/copyparty/web/browser.js.gz +0 -0
  23. copyparty-1.15.7/copyparty/web/ui.css.gz +0 -0
  24. copyparty-1.15.7/copyparty/web/up2k.js.gz +0 -0
  25. copyparty-1.15.7/copyparty/web/util.js.gz +0 -0
  26. copyparty-1.15.7/copyparty/web/w.hash.js.gz +0 -0
  27. {copyparty-1.15.7 → copyparty-1.15.9}/LICENSE +0 -0
  28. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/__init__.py +0 -0
  29. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/bos/__init__.py +0 -0
  30. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/bos/bos.py +0 -0
  31. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/bos/path.py +0 -0
  32. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/broker_mp.py +0 -0
  33. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/broker_mpw.py +0 -0
  34. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/broker_thr.py +0 -0
  35. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/broker_util.py +0 -0
  36. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/cert.py +0 -0
  37. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/dxml.py +0 -0
  38. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/fsutil.py +0 -0
  39. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/ftpd.py +0 -0
  40. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/httpconn.py +0 -0
  41. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/ico.py +0 -0
  42. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/mdns.py +0 -0
  43. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/mtag.py +0 -0
  44. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/multicast.py +0 -0
  45. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/pwhash.py +0 -0
  46. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/res/COPYING.txt +0 -0
  47. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/res/__init__.py +0 -0
  48. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/res/insecure.pem +0 -0
  49. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/smbd.py +0 -0
  50. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/ssdp.py +0 -0
  51. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/star.py +0 -0
  52. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/__init__.py +0 -0
  53. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/__init__.py +0 -0
  54. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/bimap.py +0 -0
  55. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/bit.py +0 -0
  56. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/buffer.py +0 -0
  57. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/dns.py +0 -0
  58. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/label.py +0 -0
  59. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/lex.py +0 -0
  60. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/dnslib/ranges.py +0 -0
  61. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/ifaddr/__init__.py +0 -0
  62. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/ifaddr/_posix.py +0 -0
  63. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/ifaddr/_shared.py +0 -0
  64. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/ifaddr/_win32.py +0 -0
  65. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/qrcodegen.py +0 -0
  66. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/stolen/surrogateescape.py +0 -0
  67. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/sutil.py +0 -0
  68. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/svchub.py +0 -0
  69. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/szip.py +0 -0
  70. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/tcpsrv.py +0 -0
  71. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/tftpd.py +0 -0
  72. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/th_cli.py +0 -0
  73. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/th_srv.py +0 -0
  74. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/a/__init__.py +0 -0
  75. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/a/partyfuse.py +0 -0
  76. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/a/webdav-cfg.bat +0 -0
  77. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/browser.css.gz +0 -0
  78. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/browser.html +0 -0
  79. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/browser2.html +0 -0
  80. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/cf.html +0 -0
  81. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/dbg-audio.js.gz +0 -0
  82. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/dd/2.png +0 -0
  83. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/dd/3.png +0 -0
  84. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/dd/4.png +0 -0
  85. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/dd/5.png +0 -0
  86. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/dd/__init__.py +0 -0
  87. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/__init__.py +0 -0
  88. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/busy.mp3.gz +0 -0
  89. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/easymde.css.gz +0 -0
  90. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/easymde.js.gz +0 -0
  91. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/fuse.py +0 -0
  92. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/marked.js.gz +0 -0
  93. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/mini-fa.css.gz +0 -0
  94. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/mini-fa.woff +0 -0
  95. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/prism.css.gz +0 -0
  96. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/prism.js.gz +0 -0
  97. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/prismd.css.gz +0 -0
  98. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/scp.woff2 +0 -0
  99. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/sha512.ac.js.gz +0 -0
  100. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/deps/sha512.hw.js.gz +0 -0
  101. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/md.css.gz +0 -0
  102. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/md.html +0 -0
  103. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/md.js.gz +0 -0
  104. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/md2.css.gz +0 -0
  105. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/md2.js.gz +0 -0
  106. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/mde.css.gz +0 -0
  107. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/mde.html +0 -0
  108. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/mde.js.gz +0 -0
  109. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/msg.css.gz +0 -0
  110. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/msg.html +0 -0
  111. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/shares.css.gz +0 -0
  112. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/shares.html +0 -0
  113. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/shares.js.gz +0 -0
  114. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/splash.css.gz +0 -0
  115. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/splash.html +0 -0
  116. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/splash.js.gz +0 -0
  117. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/svcs.html +0 -0
  118. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty/web/svcs.js.gz +0 -0
  119. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty.egg-info/SOURCES.txt +0 -0
  120. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty.egg-info/dependency_links.txt +0 -0
  121. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty.egg-info/entry_points.txt +0 -0
  122. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty.egg-info/requires.txt +0 -0
  123. {copyparty-1.15.7 → copyparty-1.15.9}/copyparty.egg-info/top_level.txt +0 -0
  124. {copyparty-1.15.7 → copyparty-1.15.9}/pyproject.toml +0 -0
  125. {copyparty-1.15.7 → copyparty-1.15.9}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: copyparty
3
- Version: 1.15.7
3
+ Version: 1.15.9
4
4
  Summary: Portable file server with accelerated resumable uploads, deduplication, WebDAV, FTP, zeroconf, media indexer, video thumbnails, audio transcoding, and write-only folders
5
5
  Author-email: ed <copyparty@ocv.me>
6
6
  License: MIT
@@ -101,6 +101,7 @@ turn almost any device into a file server with resumable uploads/downloads using
101
101
  * [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
102
102
  * [shares](#shares) - share a file or folder by creating a temporary link
103
103
  * [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
104
+ * [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
104
105
  * [media player](#media-player) - plays almost every audio format there is
105
106
  * [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
106
107
  * [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
@@ -273,7 +274,7 @@ also see [comparison to similar software](./docs/versus.md)
273
274
  * upload
274
275
  * ☑ basic: plain multipart, ie6 support
275
276
  * ☑ [up2k](#uploading): js, resumable, multithreaded
276
- * **no filesize limit!** ...unless you use Cloudflare, then it's 383.9 GiB
277
+ * **no filesize limit!** even on Cloudflare
277
278
  * ☑ stash: simple PUT filedropper
278
279
  * ☑ filename randomizer
279
280
  * ☑ write-only folders
@@ -708,7 +709,7 @@ up2k has several advantages:
708
709
  * uploads resume if you reboot your browser or pc, just upload the same files again
709
710
  * server detects any corruption; the client reuploads affected chunks
710
711
  * the client doesn't upload anything that already exists on the server
711
- * no filesize limit unless imposed by a proxy, for example Cloudflare, which blocks uploads over 383.9 GiB
712
+ * no filesize limit, even when a proxy limits the request size (for example Cloudflare)
712
713
  * much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
713
714
  * the last-modified timestamp of the file is preserved
714
715
 
@@ -744,6 +745,8 @@ note that since up2k has to read each file twice, `[🎈] bup` can *theoreticall
744
745
 
745
746
  if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
746
747
 
748
+ if the server is behind a proxy which imposes a request-size limit, you can configure up2k to sneak below the limit with server-option `--u2sz` (the default is 96 MiB to support Cloudflare)
749
+
747
750
 
748
751
  ### file-search
749
752
 
@@ -897,6 +900,30 @@ or a mix of both:
897
900
  the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
898
901
 
899
902
 
903
+ ## rss feeds
904
+
905
+ monitor a folder with your RSS reader , optionally recursive
906
+
907
+ must be enabled per-volume with volflag `rss` or globally with `--rss`
908
+
909
+ the feed includes itunes metadata for use with podcast readers such as [AntennaPod](https://antennapod.org/)
910
+
911
+ a feed example: https://cd.ocv.me/a/d2/d22/?rss&fext=mp3
912
+
913
+ url parameters:
914
+
915
+ * `pw=hunter2` for password auth
916
+ * `recursive` to also include subfolders
917
+ * `title=foo` changes the feed title (default: folder name)
918
+ * `fext=mp3,opus` only include mp3 and opus files (default: all)
919
+ * `nf=30` only show the first 30 results (default: 250)
920
+ * `sort=m` sort by mtime (file last-modified), newest first (default)
921
+ * `u` = upload-time; NOTE: non-uploaded files have upload-time `0`
922
+ * `n` = filename
923
+ * `a` = filesize
924
+ * uppercase = reverse-sort; `M` = oldest file first
925
+
926
+
900
927
  ## media player
901
928
 
902
929
  plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
@@ -47,6 +47,7 @@ turn almost any device into a file server with resumable uploads/downloads using
47
47
  * [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
48
48
  * [shares](#shares) - share a file or folder by creating a temporary link
49
49
  * [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
50
+ * [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
50
51
  * [media player](#media-player) - plays almost every audio format there is
51
52
  * [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
52
53
  * [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
@@ -219,7 +220,7 @@ also see [comparison to similar software](./docs/versus.md)
219
220
  * upload
220
221
  * ☑ basic: plain multipart, ie6 support
221
222
  * ☑ [up2k](#uploading): js, resumable, multithreaded
222
- * **no filesize limit!** ...unless you use Cloudflare, then it's 383.9 GiB
223
+ * **no filesize limit!** even on Cloudflare
223
224
  * ☑ stash: simple PUT filedropper
224
225
  * ☑ filename randomizer
225
226
  * ☑ write-only folders
@@ -654,7 +655,7 @@ up2k has several advantages:
654
655
  * uploads resume if you reboot your browser or pc, just upload the same files again
655
656
  * server detects any corruption; the client reuploads affected chunks
656
657
  * the client doesn't upload anything that already exists on the server
657
- * no filesize limit unless imposed by a proxy, for example Cloudflare, which blocks uploads over 383.9 GiB
658
+ * no filesize limit, even when a proxy limits the request size (for example Cloudflare)
658
659
  * much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
659
660
  * the last-modified timestamp of the file is preserved
660
661
 
@@ -690,6 +691,8 @@ note that since up2k has to read each file twice, `[🎈] bup` can *theoreticall
690
691
 
691
692
  if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
692
693
 
694
+ if the server is behind a proxy which imposes a request-size limit, you can configure up2k to sneak below the limit with server-option `--u2sz` (the default is 96 MiB to support Cloudflare)
695
+
693
696
 
694
697
  ### file-search
695
698
 
@@ -843,6 +846,30 @@ or a mix of both:
843
846
  the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
844
847
 
845
848
 
849
+ ## rss feeds
850
+
851
+ monitor a folder with your RSS reader , optionally recursive
852
+
853
+ must be enabled per-volume with volflag `rss` or globally with `--rss`
854
+
855
+ the feed includes itunes metadata for use with podcast readers such as [AntennaPod](https://antennapod.org/)
856
+
857
+ a feed example: https://cd.ocv.me/a/d2/d22/?rss&fext=mp3
858
+
859
+ url parameters:
860
+
861
+ * `pw=hunter2` for password auth
862
+ * `recursive` to also include subfolders
863
+ * `title=foo` changes the feed title (default: folder name)
864
+ * `fext=mp3,opus` only include mp3 and opus files (default: all)
865
+ * `nf=30` only show the first 30 results (default: 250)
866
+ * `sort=m` sort by mtime (file last-modified), newest first (default)
867
+ * `u` = upload-time; NOTE: non-uploaded files have upload-time `0`
868
+ * `n` = filename
869
+ * `a` = filesize
870
+ * uppercase = reverse-sort; `M` = oldest file first
871
+
872
+
846
873
  ## media player
847
874
 
848
875
  plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
@@ -1009,7 +1009,7 @@ def add_upload(ap):
1009
1009
  ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
1010
1010
  ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
1011
1011
  ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
1012
- ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for this size. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
1012
+ ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
1013
1013
  ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
1014
1014
  ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
1015
1015
 
@@ -1029,7 +1029,7 @@ def add_network(ap):
1029
1029
  else:
1030
1030
  ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
1031
1031
  ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
1032
- ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
1032
+ ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=128.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
1033
1033
  ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
1034
1034
  ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
1035
1035
  ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0.0, help="debug: socket write delay in seconds")
@@ -1349,6 +1349,14 @@ def add_transcoding(ap):
1349
1349
  ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
1350
1350
 
1351
1351
 
1352
+ def add_rss(ap):
1353
+ ap2 = ap.add_argument_group('RSS options')
1354
+ ap2.add_argument("--rss", action="store_true", help="enable RSS output (experimental)")
1355
+ ap2.add_argument("--rss-nf", metavar="HITS", type=int, default=250, help="default number of files to return (url-param 'nf')")
1356
+ ap2.add_argument("--rss-fext", metavar="E,E", type=u, default="", help="default list of file extensions to include (url-param 'fext'); blank=all")
1357
+ ap2.add_argument("--rss-sort", metavar="ORD", type=u, default="m", help="default sort order (url-param 'sort'); [\033[32mm\033[0m]=last-modified [\033[32mu\033[0m]=upload-time [\033[32mn\033[0m]=filename [\033[32ms\033[0m]=filesize; Uppercase=oldest-first. Note that upload-time is 0 for non-uploaded files")
1358
+
1359
+
1352
1360
  def add_db_general(ap, hcores):
1353
1361
  noidx = APPLESAN_TXT if MACOS else ""
1354
1362
  ap2 = ap.add_argument_group('general db options')
@@ -1518,6 +1526,7 @@ def run_argparse(
1518
1526
  add_db_metadata(ap)
1519
1527
  add_thumbnail(ap)
1520
1528
  add_transcoding(ap)
1529
+ add_rss(ap)
1521
1530
  add_ftp(ap)
1522
1531
  add_webdav(ap)
1523
1532
  add_tftp(ap)
@@ -1,8 +1,8 @@
1
1
  # coding: utf-8
2
2
 
3
- VERSION = (1, 15, 7)
3
+ VERSION = (1, 15, 9)
4
4
  CODENAME = "fill the drives"
5
- BUILD_DT = (2024, 10, 14)
5
+ BUILD_DT = (2024, 10, 18)
6
6
 
7
7
  S_VERSION = ".".join(map(str, VERSION))
8
8
  S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
@@ -59,6 +59,7 @@ if PY2:
59
59
  LEELOO_DALLAS = "leeloo_dallas"
60
60
 
61
61
  SEE_LOG = "see log for details"
62
+ SEESLOG = " (see serverlog for details)"
62
63
  SSEELOG = " ({})".format(SEE_LOG)
63
64
  BAD_CFG = "invalid config; {}".format(SEE_LOG)
64
65
  SBADCFG = " ({})".format(BAD_CFG)
@@ -157,8 +158,11 @@ class Lim(object):
157
158
  self.chk_rem(rem)
158
159
  if sz != -1:
159
160
  self.chk_sz(sz)
160
- self.chk_vsz(broker, ptop, sz, volgetter)
161
- self.chk_df(abspath, sz) # side effects; keep last-ish
161
+ else:
162
+ sz = 0
163
+
164
+ self.chk_vsz(broker, ptop, sz, volgetter)
165
+ self.chk_df(abspath, sz) # side effects; keep last-ish
162
166
 
163
167
  ap2, vp2 = self.rot(abspath)
164
168
  if abspath == ap2:
@@ -198,7 +202,15 @@ class Lim(object):
198
202
 
199
203
  if self.dft < time.time():
200
204
  self.dft = int(time.time()) + 300
201
- self.dfv = get_df(abspath)[0] or 0
205
+
206
+ df, du, err = get_df(abspath, True)
207
+ if err:
208
+ t = "failed to read disk space usage for [%s]: %s"
209
+ self.log(t % (abspath, err), 3)
210
+ self.dfv = 0xAAAAAAAAA # 42.6 GiB
211
+ else:
212
+ self.dfv = df or 0
213
+
202
214
  for j in list(self.reg.values()) if self.reg else []:
203
215
  self.dfv -= int(j["size"] / (len(j["hash"]) or 999) * len(j["need"]))
204
216
 
@@ -46,6 +46,7 @@ def vf_bmap() :
46
46
  "og_no_head",
47
47
  "og_s_title",
48
48
  "rand",
49
+ "rss",
49
50
  "xdev",
50
51
  "xlink",
51
52
  "xvol",
@@ -127,6 +127,8 @@ LOGUES = [[0, ".prologue.html"], [1, ".epilogue.html"]]
127
127
 
128
128
  READMES = [[0, ["preadme.md", "PREADME.md"]], [1, ["readme.md", "README.md"]]]
129
129
 
130
+ RSS_SORT = {"m": "mt", "u": "at", "n": "fn", "s": "sz"}
131
+
130
132
 
131
133
  class HttpCli(object):
132
134
  """
@@ -1196,8 +1198,146 @@ class HttpCli(object):
1196
1198
  if "h" in self.uparam:
1197
1199
  return self.tx_mounts()
1198
1200
 
1201
+ if "rss" in self.uparam:
1202
+ return self.tx_rss()
1203
+
1199
1204
  return self.tx_browser()
1200
1205
 
1206
+ def tx_rss(self) :
1207
+ if self.do_log:
1208
+ self.log("RSS %s @%s" % (self.req, self.uname))
1209
+
1210
+ if not self.can_read:
1211
+ return self.tx_404()
1212
+
1213
+ vn = self.vn
1214
+ if not vn.flags.get("rss"):
1215
+ raise Pebkac(405, "RSS is disabled in server config")
1216
+
1217
+ rem = self.rem
1218
+ idx = self.conn.get_u2idx()
1219
+ if not idx or not hasattr(idx, "p_end"):
1220
+ if not HAVE_SQLITE3:
1221
+ raise Pebkac(500, "sqlite3 not found on server; rss is disabled")
1222
+ raise Pebkac(500, "server busy, cannot generate rss; please retry in a bit")
1223
+
1224
+ uv = [rem]
1225
+ if "recursive" in self.uparam:
1226
+ uq = "up.rd like ?||'%'"
1227
+ else:
1228
+ uq = "up.rd == ?"
1229
+
1230
+ zs = str(self.uparam.get("fext", self.args.rss_fext))
1231
+ if zs in ("True", "False"):
1232
+ zs = ""
1233
+ if zs:
1234
+ zsl = []
1235
+ for ext in zs.split(","):
1236
+ zsl.append("+up.fn like '%.'||?")
1237
+ uv.append(ext)
1238
+ uq += " and ( %s )" % (" or ".join(zsl),)
1239
+
1240
+ zs1 = self.uparam.get("sort", self.args.rss_sort)
1241
+ zs2 = zs1.lower()
1242
+ zs = RSS_SORT.get(zs2)
1243
+ if not zs:
1244
+ raise Pebkac(400, "invalid sort key; must be m/u/n/s")
1245
+
1246
+ uq += " order by up." + zs
1247
+ if zs1 == zs2:
1248
+ uq += " desc"
1249
+
1250
+ nmax = int(self.uparam.get("nf") or self.args.rss_nf)
1251
+
1252
+ hits = idx.run_query(self.uname, [self.vn], uq, uv, False, False, nmax)[0]
1253
+
1254
+ pw = self.ouparam.get("pw")
1255
+ if pw:
1256
+ q_pw = "?pw=%s" % (pw,)
1257
+ a_pw = "&pw=%s" % (pw,)
1258
+ for i in hits:
1259
+ i["rp"] += a_pw if "?" in i["rp"] else q_pw
1260
+ else:
1261
+ q_pw = a_pw = ""
1262
+
1263
+ title = self.uparam.get("title") or self.vpath.split("/")[-1]
1264
+ etitle = html_escape(title, True, True)
1265
+
1266
+ baseurl = "%s://%s%s" % (
1267
+ "https" if self.is_https else "http",
1268
+ self.host,
1269
+ self.args.SRS,
1270
+ )
1271
+ feed = "%s%s" % (baseurl, self.req[1:])
1272
+ efeed = html_escape(feed, True, True)
1273
+ edirlink = efeed.split("?")[0] + q_pw
1274
+
1275
+ ret = [
1276
+ """\
1277
+ <?xml version="1.0" encoding="UTF-8"?>
1278
+ <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:content="http://purl.org/rss/1.0/modules/content/">
1279
+ \t<channel>
1280
+ \t\t<atom:link href="%s" rel="self" type="application/rss+xml" />
1281
+ \t\t<title>%s</title>
1282
+ \t\t<description></description>
1283
+ \t\t<link>%s</link>
1284
+ \t\t<generator>copyparty-1</generator>
1285
+ """
1286
+ % (efeed, etitle, edirlink)
1287
+ ]
1288
+
1289
+ q = "select fn from cv where rd=? and dn=?"
1290
+ crd, cdn = rem.rsplit("/", 1) if "/" in rem else ("", rem)
1291
+ try:
1292
+ cfn = idx.cur[self.vn.realpath].execute(q, (crd, cdn)).fetchone()[0]
1293
+ bos.stat(os.path.join(vn.canonical(rem), cfn))
1294
+ cv_url = "%s%s?th=jf%s" % (baseurl, vjoin(self.vpath, cfn), a_pw)
1295
+ cv_url = html_escape(cv_url, True, True)
1296
+ zs = """\
1297
+ \t\t
1302
+ """
1303
+ ret.append(zs % (cv_url, etitle, edirlink))
1304
+ except:
1305
+ pass
1306
+
1307
+ for i in hits:
1308
+ iurl = html_escape("%s%s" % (baseurl, i["rp"]), True, True)
1309
+ title = unquotep(i["rp"].split("?")[0].split("/")[-1])
1310
+ title = html_escape(title, True, True)
1311
+ tag_t = str(i["tags"].get("title") or "")
1312
+ tag_a = str(i["tags"].get("artist") or "")
1313
+ desc = "%s - %s" % (tag_a, tag_t) if tag_t and tag_a else (tag_t or tag_a)
1314
+ desc = html_escape(desc, True, True) if desc else title
1315
+ mime = html_escape(guess_mime(title))
1316
+ lmod = formatdate(i["ts"])
1317
+ zsa = (iurl, iurl, title, desc, lmod, iurl, mime, i["sz"])
1318
+ zs = (
1319
+ """\
1320
+ \t\t<item>
1321
+ \t\t\t<guid>%s</guid>
1322
+ \t\t\t<link>%s</link>
1323
+ \t\t\t<title>%s</title>
1324
+ \t\t\t<description>%s</description>
1325
+ \t\t\t<pubDate>%s</pubDate>
1326
+ \t\t\t<enclosure url="%s" type="%s" length="%d"/>
1327
+ """
1328
+ % zsa
1329
+ )
1330
+ dur = i["tags"].get(".dur")
1331
+ if dur:
1332
+ zs += "\t\t\t<itunes:duration>%d</itunes:duration>\n" % (dur,)
1333
+ ret.append(zs + "\t\t</item>\n")
1334
+
1335
+ ret.append("\t</channel>\n</rss>\n")
1336
+ bret = "".join(ret).encode("utf-8", "replace")
1337
+ self.reply(bret, 200, "text/xml; charset=utf-8")
1338
+ self.log("rss: %d hits, %d bytes" % (len(hits), len(bret)))
1339
+ return True
1340
+
1201
1341
  def handle_propfind(self) :
1202
1342
  if self.do_log:
1203
1343
  self.log("PFIND %s @%s" % (self.req, self.uname))
@@ -1874,7 +2014,7 @@ class HttpCli(object):
1874
2014
  f, fn = ren_open(fn, *open_a, **params)
1875
2015
  try:
1876
2016
  path = os.path.join(fdir, fn)
1877
- post_sz, sha_hex, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
2017
+ post_sz, sha_hex, sha_b64 = hashcopy(reader, f, None, 0, self.args.s_wr_slp)
1878
2018
  finally:
1879
2019
  f.close()
1880
2020
 
@@ -2337,7 +2477,7 @@ class HttpCli(object):
2337
2477
  broker = self.conn.hsrv.broker
2338
2478
  x = broker.ask("up2k.handle_chunks", ptop, wark, chashes)
2339
2479
  response = x.get()
2340
- chashes, chunksize, cstarts, path, lastmod, sprs = response
2480
+ chashes, chunksize, cstarts, path, lastmod, fsize, sprs = response
2341
2481
  maxsize = chunksize * len(chashes)
2342
2482
  cstart0 = cstarts[0]
2343
2483
  locked = chashes # remaining chunks to be received in this request
@@ -2345,6 +2485,50 @@ class HttpCli(object):
2345
2485
  num_left = -1 # num chunks left according to most recent up2k release
2346
2486
  treport = time.time() # ratelimit up2k reporting to reduce overhead
2347
2487
 
2488
+ if "x-up2k-subc" in self.headers:
2489
+ sc_ofs = int(self.headers["x-up2k-subc"])
2490
+ chash = chashes[0]
2491
+
2492
+ u2sc = self.conn.hsrv.u2sc
2493
+ try:
2494
+ sc_pofs, hasher = u2sc[chash]
2495
+ if not sc_ofs:
2496
+ t = "client restarted the chunk; forgetting subchunk offset %d"
2497
+ self.log(t % (sc_pofs,))
2498
+ raise Exception()
2499
+ except:
2500
+ sc_pofs = 0
2501
+ hasher = hashlib.sha512()
2502
+
2503
+ et = "subchunk protocol error; resetting chunk "
2504
+ if sc_pofs != sc_ofs:
2505
+ u2sc.pop(chash, None)
2506
+ t = "%s[%s]: the expected resume-point was %d, not %d"
2507
+ raise Pebkac(400, t % (et, chash, sc_pofs, sc_ofs))
2508
+ if len(cstarts) > 1:
2509
+ u2sc.pop(chash, None)
2510
+ t = "%s[%s]: only a single subchunk can be uploaded in one request; you are sending %d chunks"
2511
+ raise Pebkac(400, t % (et, chash, len(cstarts)))
2512
+ csize = min(chunksize, fsize - cstart0[0])
2513
+ cstart0[0] += sc_ofs # also sets cstarts[0][0]
2514
+ sc_next_ofs = sc_ofs + postsize
2515
+ if sc_next_ofs > csize:
2516
+ u2sc.pop(chash, None)
2517
+ t = "%s[%s]: subchunk offset (%d) plus postsize (%d) exceeds chunksize (%d)"
2518
+ raise Pebkac(400, t % (et, chash, sc_ofs, postsize, csize))
2519
+ else:
2520
+ final_subchunk = sc_next_ofs == csize
2521
+ t = "subchunk %s %d:%d/%d %s"
2522
+ zs = "END" if final_subchunk else ""
2523
+ self.log(t % (chash[:15], sc_ofs, sc_next_ofs, csize, zs), 6)
2524
+ if final_subchunk:
2525
+ u2sc.pop(chash, None)
2526
+ else:
2527
+ u2sc[chash] = (sc_next_ofs, hasher)
2528
+ else:
2529
+ hasher = None
2530
+ final_subchunk = True
2531
+
2348
2532
  try:
2349
2533
  if self.args.nw:
2350
2534
  path = os.devnull
@@ -2375,9 +2559,11 @@ class HttpCli(object):
2375
2559
  reader = read_socket(
2376
2560
  self.sr, self.args.s_rd_sz, min(remains, chunksize)
2377
2561
  )
2378
- post_sz, _, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
2562
+ post_sz, _, sha_b64 = hashcopy(
2563
+ reader, f, hasher, 0, self.args.s_wr_slp
2564
+ )
2379
2565
 
2380
- if sha_b64 != chash:
2566
+ if sha_b64 != chash and final_subchunk:
2381
2567
  try:
2382
2568
  self.bakflip(
2383
2569
  f, path, cstart[0], post_sz, chash, sha_b64, vfs.flags
@@ -2409,7 +2595,8 @@ class HttpCli(object):
2409
2595
 
2410
2596
  # be quick to keep the tcp winsize scale;
2411
2597
  # if we can't confirm rn then that's fine
2412
- written.append(chash)
2598
+ if final_subchunk:
2599
+ written.append(chash)
2413
2600
  now = time.time()
2414
2601
  if now - treport < 1:
2415
2602
  continue
@@ -2434,6 +2621,7 @@ class HttpCli(object):
2434
2621
  except:
2435
2622
  # maybe busted handle (eg. disk went full)
2436
2623
  f.close()
2624
+ chashes = [] # exception flag
2437
2625
  raise
2438
2626
  finally:
2439
2627
  if locked:
@@ -2442,9 +2630,11 @@ class HttpCli(object):
2442
2630
  num_left, t = x.get()
2443
2631
  if num_left < 0:
2444
2632
  self.loud_reply(t, status=500)
2445
- return False
2446
- t = "got %d more chunks, %d left"
2447
- self.log(t % (len(written), num_left), 6)
2633
+ if chashes: # kills exception bubbling otherwise
2634
+ return False
2635
+ else:
2636
+ t = "got %d more chunks, %d left"
2637
+ self.log(t % (len(written), num_left), 6)
2448
2638
 
2449
2639
  if num_left < 0:
2450
2640
  raise Pebkac(500, "unconfirmed; see serverlog")
@@ -2797,7 +2987,7 @@ class HttpCli(object):
2797
2987
  tabspath = os.path.join(fdir, tnam)
2798
2988
  self.log("writing to {}".format(tabspath))
2799
2989
  sz, sha_hex, sha_b64 = hashcopy(
2800
- p_data, f, self.args.s_wr_slp, max_sz
2990
+ p_data, f, None, max_sz, self.args.s_wr_slp
2801
2991
  )
2802
2992
  if sz == 0:
2803
2993
  raise Pebkac(400, "empty files in post")
@@ -3127,7 +3317,7 @@ class HttpCli(object):
3127
3317
  wunlink(self.log, fp, vfs.flags)
3128
3318
 
3129
3319
  with open(fsenc(fp), "wb", self.args.iobuf) as f:
3130
- sz, sha512, _ = hashcopy(p_data, f, self.args.s_wr_slp)
3320
+ sz, sha512, _ = hashcopy(p_data, f, None, 0, self.args.s_wr_slp)
3131
3321
 
3132
3322
  if lim:
3133
3323
  lim.nup(self.ip)
@@ -5035,7 +5225,7 @@ class HttpCli(object):
5035
5225
  self.log("#wow #whoa")
5036
5226
 
5037
5227
  if not self.args.nid:
5038
- free, total = get_df(abspath)
5228
+ free, total, _ = get_df(abspath, False)
5039
5229
  if total is not None:
5040
5230
  h1 = humansize(free or 0)
5041
5231
  h2 = humansize(total)
@@ -1,6 +1,7 @@
1
1
  # coding: utf-8
2
2
  from __future__ import print_function, unicode_literals
3
3
 
4
+ import hashlib
4
5
  import math
5
6
  import os
6
7
  import re
@@ -141,6 +142,7 @@ class HttpSrv(object):
141
142
  self.t_periodic = None
142
143
 
143
144
  self.u2fh = FHC()
145
+ self.u2sc = {}
144
146
  self.pipes = CachedDict(0.2)
145
147
  self.metrics = Metrics(self)
146
148
  self.nreq = 0
@@ -128,7 +128,7 @@ class Metrics(object):
128
128
  addbh("cpp_disk_size_bytes", "total HDD size of volume")
129
129
  addbh("cpp_disk_free_bytes", "free HDD space in volume")
130
130
  for vpath, vol in allvols:
131
- free, total = get_df(vol.realpath)
131
+ free, total, _ = get_df(vol.realpath, False)
132
132
  if free is None or total is None:
133
133
  continue
134
134
 
@@ -91,7 +91,7 @@ class U2idx(object):
91
91
  uv = [wark[:16], wark]
92
92
 
93
93
  try:
94
- return self.run_query(uname, vols, uq, uv, False, 99999)[0]
94
+ return self.run_query(uname, vols, uq, uv, False, True, 99999)[0]
95
95
  except:
96
96
  raise Pebkac(500, min_ex())
97
97
 
@@ -295,7 +295,7 @@ class U2idx(object):
295
295
  q += " lower({}) {} ? ) ".format(field, oper)
296
296
 
297
297
  try:
298
- return self.run_query(uname, vols, q, va, have_mt, lim)
298
+ return self.run_query(uname, vols, q, va, have_mt, True, lim)
299
299
  except Exception as ex:
300
300
  raise Pebkac(500, repr(ex))
301
301
 
@@ -306,6 +306,7 @@ class U2idx(object):
306
306
  uq ,
307
307
  uv ,
308
308
  have_mt ,
309
+ sort ,
309
310
  lim ,
310
311
  ) :
311
312
  if self.args.srch_dbg:
@@ -452,7 +453,8 @@ class U2idx(object):
452
453
  done_flag.append(True)
453
454
  self.active_id = ""
454
455
 
455
- ret.sort(key=itemgetter("rp"))
456
+ if sort:
457
+ ret.sort(key=itemgetter("rp"))
456
458
 
457
459
  return ret, list(taglist.keys()), lim < 0 and not clamped
458
460