quasarr 2.7.1__tar.gz → 2.7.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasarr might be problematic. Click here for more details.
- {quasarr-2.7.1 → quasarr-2.7.2}/PKG-INFO +1 -1
- {quasarr-2.7.1 → quasarr-2.7.2}/docker/Dockerfile +1 -1
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/arr/__init__.py +4 -5
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/shared_state.py +1 -1
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/version.py +1 -1
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/__init__.py +96 -62
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/dj.py +55 -52
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/mb.py +15 -18
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/sf.py +1 -1
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/sj.py +55 -52
- {quasarr-2.7.1 → quasarr-2.7.2}/.env.example +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.github/FUNDING.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.github/ISSUE_TEMPLATE/bug_report.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.github/ISSUE_TEMPLATE/config.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.github/workflows/HostnameRedaction.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.github/workflows/PullRequests.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.github/workflows/Release.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.gitignore +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/.pre-commit-config.yaml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/CONTRIBUTING.md +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/LICENSE +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/Quasarr.png +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/Quasarr.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/README.md +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/cli_tester.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/docker/dev-services-compose.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/docker/docker-compose.yml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/pre-commit.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/pyproject.toml +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/captcha/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/config/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/jdownloader/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/packages/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/sponsors_helper/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/api/statistics/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/linkcrypters/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/linkcrypters/al.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/linkcrypters/filecrypt.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/linkcrypters/hide.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/packages/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/al.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/by.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/dd.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/dj.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/dl.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/dt.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/dw.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/he.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/hs.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/mb.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/nk.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/nx.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/sf.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/sj.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/sl.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/wd.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/downloads/sources/wx.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/auth.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/cloudflare.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/hostname_issues.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/html_images.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/html_templates.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/imdb_metadata.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/jd_cache.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/log.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/myjd_api.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/notifications.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/obfuscated.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/sessions/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/sessions/al.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/sessions/dd.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/sessions/dl.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/sessions/nx.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/statistics.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/utils.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/providers/web_server.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/al.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/by.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/dd.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/dl.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/dt.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/dw.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/fx.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/he.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/hs.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/nk.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/nx.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/sl.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/wd.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/search/sources/wx.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/storage/__init__.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/storage/config.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/storage/setup.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/quasarr/storage/sqlite_database.py +0 -0
- {quasarr-2.7.1 → quasarr-2.7.2}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: quasarr
|
|
3
|
-
Version: 2.7.
|
|
3
|
+
Version: 2.7.2
|
|
4
4
|
Summary: Quasarr connects JDownloader with Radarr, Sonarr and LazyLibrarian. It also decrypts links protected by CAPTCHAs.
|
|
5
5
|
Author-email: rix1337 <rix1337@users.noreply.github.com>
|
|
6
6
|
License-File: LICENSE
|
|
@@ -33,7 +33,7 @@ ENV INTERNAL_ADDRESS=""
|
|
|
33
33
|
ENV EXTERNAL_ADDRESS=""
|
|
34
34
|
ENV DISCORD=""
|
|
35
35
|
ENV HOSTNAMES=""
|
|
36
|
-
ENV LOG_MAX_WIDTH="
|
|
36
|
+
ENV LOG_MAX_WIDTH="120"
|
|
37
37
|
|
|
38
38
|
# Restart loop: exit 0 = restart, exit non-zero = stop container
|
|
39
39
|
ENTRYPOINT ["sh", "-c", "while true; do quasarr --port=8080 --internal_address=$INTERNAL_ADDRESS --external_address=$EXTERNAL_ADDRESS --discord=$DISCORD --hostnames=$HOSTNAMES; ret=$?; if [ $ret -ne 0 ]; then echo \"Quasarr exited with error $ret, stopping...\"; exit $ret; fi; echo \"Quasarr restarting...\"; sleep 2; done"]
|
|
@@ -15,7 +15,7 @@ from quasarr.downloads import download
|
|
|
15
15
|
from quasarr.downloads.packages import delete_package, get_packages
|
|
16
16
|
from quasarr.providers import shared_state
|
|
17
17
|
from quasarr.providers.auth import require_api_key
|
|
18
|
-
from quasarr.providers.log import debug, info
|
|
18
|
+
from quasarr.providers.log import debug, error, info, warn
|
|
19
19
|
from quasarr.providers.version import get_version
|
|
20
20
|
from quasarr.search import get_search_results
|
|
21
21
|
|
|
@@ -420,9 +420,8 @@ def setup_arr_routes(app):
|
|
|
420
420
|
</channel>
|
|
421
421
|
</rss>"""
|
|
422
422
|
except Exception as e:
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
info(f"[ERROR] Unknown indexer request: {dict(request.query)}")
|
|
423
|
+
error(f"Error loading search results: {e} " + traceback.format_exc())
|
|
424
|
+
warn(f"Unknown indexer request: {dict(request.query)}")
|
|
426
425
|
return """<?xml version="1.0" encoding="UTF-8"?>
|
|
427
426
|
<rss>
|
|
428
427
|
<channel>
|
|
@@ -432,5 +431,5 @@ def setup_arr_routes(app):
|
|
|
432
431
|
</channel>
|
|
433
432
|
</rss>"""
|
|
434
433
|
|
|
435
|
-
|
|
434
|
+
warn(f"[ERROR] Unknown general request: {dict(request.query)}")
|
|
436
435
|
return {"error": True}
|
|
@@ -4,9 +4,11 @@
|
|
|
4
4
|
|
|
5
5
|
import time
|
|
6
6
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
7
|
+
from datetime import timezone
|
|
8
|
+
from email.utils import parsedate_to_datetime
|
|
7
9
|
|
|
8
10
|
from quasarr.providers.imdb_metadata import get_imdb_metadata
|
|
9
|
-
from quasarr.providers.log import debug, info
|
|
11
|
+
from quasarr.providers.log import debug, info, trace
|
|
10
12
|
from quasarr.search.sources.al import al_feed, al_search
|
|
11
13
|
from quasarr.search.sources.by import by_feed, by_search
|
|
12
14
|
from quasarr.search.sources.dd import dd_feed, dd_search
|
|
@@ -72,54 +74,54 @@ def get_search_results(
|
|
|
72
74
|
|
|
73
75
|
# Mappings
|
|
74
76
|
imdb_map = [
|
|
75
|
-
(al, al_search),
|
|
76
|
-
(by, by_search),
|
|
77
|
-
(dd, dd_search),
|
|
78
|
-
(dl, dl_search),
|
|
79
|
-
(dt, dt_search),
|
|
80
|
-
(dj, dj_search),
|
|
81
|
-
(dw, dw_search),
|
|
82
|
-
(fx, fx_search),
|
|
83
|
-
(he, he_search),
|
|
84
|
-
(hs, hs_search),
|
|
85
|
-
(mb, mb_search),
|
|
86
|
-
(nk, nk_search),
|
|
87
|
-
(nx, nx_search),
|
|
88
|
-
(sf, sf_search),
|
|
89
|
-
(sj, sj_search),
|
|
90
|
-
(sl, sl_search),
|
|
91
|
-
(wd, wd_search),
|
|
92
|
-
(wx, wx_search),
|
|
77
|
+
("al", al, al_search),
|
|
78
|
+
("by", by, by_search),
|
|
79
|
+
("dd", dd, dd_search),
|
|
80
|
+
("dl", dl, dl_search),
|
|
81
|
+
("dt", dt, dt_search),
|
|
82
|
+
("dj", dj, dj_search),
|
|
83
|
+
("dw", dw, dw_search),
|
|
84
|
+
("fx", fx, fx_search),
|
|
85
|
+
("he", he, he_search),
|
|
86
|
+
("hs", hs, hs_search),
|
|
87
|
+
("mb", mb, mb_search),
|
|
88
|
+
("nk", nk, nk_search),
|
|
89
|
+
("nx", nx, nx_search),
|
|
90
|
+
("sf", sf, sf_search),
|
|
91
|
+
("sj", sj, sj_search),
|
|
92
|
+
("sl", sl, sl_search),
|
|
93
|
+
("wd", wd, wd_search),
|
|
94
|
+
("wx", wx, wx_search),
|
|
93
95
|
]
|
|
94
96
|
|
|
95
97
|
phrase_map = [
|
|
96
|
-
(by, by_search),
|
|
97
|
-
(dl, dl_search),
|
|
98
|
-
(dt, dt_search),
|
|
99
|
-
(nx, nx_search),
|
|
100
|
-
(sl, sl_search),
|
|
101
|
-
(wd, wd_search),
|
|
98
|
+
("by", by, by_search),
|
|
99
|
+
("dl", dl, dl_search),
|
|
100
|
+
("dt", dt, dt_search),
|
|
101
|
+
("nx", nx, nx_search),
|
|
102
|
+
("sl", sl, sl_search),
|
|
103
|
+
("wd", wd, wd_search),
|
|
102
104
|
]
|
|
103
105
|
|
|
104
106
|
feed_map = [
|
|
105
|
-
(al, al_feed),
|
|
106
|
-
(by, by_feed),
|
|
107
|
-
(dd, dd_feed),
|
|
108
|
-
(dj, dj_feed),
|
|
109
|
-
(dl, dl_feed),
|
|
110
|
-
(dt, dt_feed),
|
|
111
|
-
(dw, dw_feed),
|
|
112
|
-
(fx, fx_feed),
|
|
113
|
-
(he, he_feed),
|
|
114
|
-
(hs, hs_feed),
|
|
115
|
-
(mb, mb_feed),
|
|
116
|
-
(nk, nk_feed),
|
|
117
|
-
(nx, nx_feed),
|
|
118
|
-
(sf, sf_feed),
|
|
119
|
-
(sj, sj_feed),
|
|
120
|
-
(sl, sl_feed),
|
|
121
|
-
(wd, wd_feed),
|
|
122
|
-
(wx, wx_feed),
|
|
107
|
+
("al", al, al_feed),
|
|
108
|
+
("by", by, by_feed),
|
|
109
|
+
("dd", dd, dd_feed),
|
|
110
|
+
("dj", dj, dj_feed),
|
|
111
|
+
("dl", dl, dl_feed),
|
|
112
|
+
("dt", dt, dt_feed),
|
|
113
|
+
("dw", dw, dw_feed),
|
|
114
|
+
("fx", fx, fx_feed),
|
|
115
|
+
("he", he, he_feed),
|
|
116
|
+
("hs", hs, hs_feed),
|
|
117
|
+
("mb", mb, mb_feed),
|
|
118
|
+
("nk", nk, nk_feed),
|
|
119
|
+
("nx", nx, nx_feed),
|
|
120
|
+
("sf", sf, sf_feed),
|
|
121
|
+
("sj", sj, sj_feed),
|
|
122
|
+
("sl", sl, sl_feed),
|
|
123
|
+
("wd", wd, wd_feed),
|
|
124
|
+
("wx", wx, wx_feed),
|
|
123
125
|
]
|
|
124
126
|
|
|
125
127
|
# Add searches
|
|
@@ -128,27 +130,27 @@ def get_search_results(
|
|
|
128
130
|
(shared_state, start_time, request_from, imdb_id),
|
|
129
131
|
{"mirror": mirror, "season": season, "episode": episode},
|
|
130
132
|
)
|
|
131
|
-
for
|
|
132
|
-
if
|
|
133
|
-
search_executor.add(func, args, kwargs, True)
|
|
133
|
+
for name, url, func in imdb_map:
|
|
134
|
+
if url:
|
|
135
|
+
search_executor.add(func, args, kwargs, True, name.upper())
|
|
134
136
|
|
|
135
137
|
elif search_phrase and docs_search:
|
|
136
138
|
args, kwargs = (
|
|
137
139
|
(shared_state, start_time, request_from, search_phrase),
|
|
138
140
|
{"mirror": mirror, "season": season, "episode": episode},
|
|
139
141
|
)
|
|
140
|
-
for
|
|
141
|
-
if
|
|
142
|
-
search_executor.add(func, args, kwargs)
|
|
142
|
+
for name, url, func in phrase_map:
|
|
143
|
+
if url:
|
|
144
|
+
search_executor.add(func, args, kwargs, source_name=name.upper())
|
|
143
145
|
|
|
144
146
|
elif search_phrase:
|
|
145
147
|
debug(f"Search phrase '{search_phrase}' is not supported for {request_from}.")
|
|
146
148
|
|
|
147
149
|
else:
|
|
148
150
|
args, kwargs = ((shared_state, start_time, request_from), {"mirror": mirror})
|
|
149
|
-
for
|
|
150
|
-
if
|
|
151
|
-
search_executor.add(func, args, kwargs)
|
|
151
|
+
for name, url, func in feed_map:
|
|
152
|
+
if url:
|
|
153
|
+
search_executor.add(func, args, kwargs, source_name=name.upper())
|
|
152
154
|
|
|
153
155
|
# Clean description for Console UI
|
|
154
156
|
if imdb_id:
|
|
@@ -165,12 +167,30 @@ def get_search_results(
|
|
|
165
167
|
|
|
166
168
|
elapsed_time = time.time() - start_time
|
|
167
169
|
|
|
170
|
+
# Sort results by date (newest first)
|
|
171
|
+
def get_date(item):
|
|
172
|
+
try:
|
|
173
|
+
dt = parsedate_to_datetime(item.get("details", {}).get("date", ""))
|
|
174
|
+
if dt.tzinfo is None:
|
|
175
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
176
|
+
return dt
|
|
177
|
+
except Exception:
|
|
178
|
+
return parsedate_to_datetime("Thu, 01 Jan 1970 00:00:00 +0000")
|
|
179
|
+
|
|
180
|
+
results.sort(key=get_date, reverse=True)
|
|
181
|
+
|
|
168
182
|
# Calculate pagination for logging and return
|
|
169
183
|
total_count = len(results)
|
|
170
184
|
|
|
171
185
|
# Slicing
|
|
172
186
|
sliced_results = results[offset : offset + limit]
|
|
173
187
|
|
|
188
|
+
if sliced_results:
|
|
189
|
+
trace(f"First {len(sliced_results)} results sorted by date:")
|
|
190
|
+
for i, res in enumerate(sliced_results):
|
|
191
|
+
details = res.get("details", {})
|
|
192
|
+
trace(f"{i + 1}. {details.get('date')} | {details.get('title')}")
|
|
193
|
+
|
|
174
194
|
# Formatting for log (1-based index for humans)
|
|
175
195
|
log_start = min(offset + 1, total_count) if total_count > 0 else 0
|
|
176
196
|
log_end = min(offset + limit, total_count)
|
|
@@ -193,12 +213,19 @@ class SearchExecutor:
|
|
|
193
213
|
def __init__(self):
|
|
194
214
|
self.searches = []
|
|
195
215
|
|
|
196
|
-
def add(self, func, args, kwargs, use_cache=False):
|
|
216
|
+
def add(self, func, args, kwargs, use_cache=False, source_name=None):
|
|
197
217
|
key_args = list(args)
|
|
198
218
|
key_args[1] = None
|
|
199
219
|
key_args = tuple(key_args)
|
|
200
220
|
key = hash((func.__name__, key_args, frozenset(kwargs.items())))
|
|
201
|
-
self.searches.append(
|
|
221
|
+
self.searches.append(
|
|
222
|
+
(
|
|
223
|
+
key,
|
|
224
|
+
lambda: func(*args, **kwargs),
|
|
225
|
+
use_cache,
|
|
226
|
+
source_name or func.__name__,
|
|
227
|
+
)
|
|
228
|
+
)
|
|
202
229
|
|
|
203
230
|
def run_all(self):
|
|
204
231
|
results = []
|
|
@@ -213,7 +240,7 @@ class SearchExecutor:
|
|
|
213
240
|
current_index = 0
|
|
214
241
|
pending_futures = []
|
|
215
242
|
|
|
216
|
-
for key, func, use_cache in self.searches:
|
|
243
|
+
for key, func, use_cache, source_name in self.searches:
|
|
217
244
|
cached_result = None
|
|
218
245
|
exp = 0
|
|
219
246
|
|
|
@@ -233,27 +260,34 @@ class SearchExecutor:
|
|
|
233
260
|
all_cached = False
|
|
234
261
|
future = executor.submit(func)
|
|
235
262
|
cache_key = key if use_cache else None
|
|
236
|
-
future_to_meta[future] = (current_index, cache_key)
|
|
263
|
+
future_to_meta[future] = (current_index, cache_key, source_name)
|
|
237
264
|
pending_futures.append(future)
|
|
238
265
|
current_index += 1
|
|
239
266
|
|
|
240
267
|
if pending_futures:
|
|
241
|
-
|
|
268
|
+
results_badges = [""] * len(pending_futures)
|
|
242
269
|
|
|
243
270
|
for future in as_completed(pending_futures):
|
|
244
|
-
index, cache_key = future_to_meta[future]
|
|
271
|
+
index, cache_key, source_name = future_to_meta[future]
|
|
245
272
|
try:
|
|
246
273
|
res = future.result()
|
|
247
|
-
|
|
248
|
-
|
|
274
|
+
if res and len(res) > 0:
|
|
275
|
+
badge = f"<bg green><black>{source_name}</black></bg green>"
|
|
276
|
+
else:
|
|
277
|
+
debug(f"❌ No results returned by {source_name}")
|
|
278
|
+
badge = f"<bg black><white>{source_name}</white></bg black>"
|
|
279
|
+
|
|
280
|
+
results_badges[index] = badge
|
|
249
281
|
results.extend(res)
|
|
250
282
|
if cache_key:
|
|
251
283
|
search_cache.set(cache_key, res)
|
|
252
284
|
except Exception as e:
|
|
253
|
-
|
|
285
|
+
results_badges[index] = (
|
|
286
|
+
f"<bg red><white>{source_name}</white></bg red>"
|
|
287
|
+
)
|
|
254
288
|
info(f"Search error: {e}")
|
|
255
289
|
|
|
256
|
-
bar_str = f" [{''.join(
|
|
290
|
+
bar_str = f" [{' '.join(results_badges)}]"
|
|
257
291
|
|
|
258
292
|
return results, bar_str, all_cached, min_ttl
|
|
259
293
|
|
|
@@ -37,70 +37,73 @@ def dj_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
37
37
|
sj_host = shared_state.values["config"]("Hostnames").get(hostname)
|
|
38
38
|
password = sj_host
|
|
39
39
|
|
|
40
|
-
url = f"https://{sj_host}/api/releases/latest/0"
|
|
41
40
|
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
42
41
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
r.raise_for_status()
|
|
46
|
-
data = json.loads(r.content)
|
|
47
|
-
except Exception as e:
|
|
48
|
-
error(f"Feed load error: {e}")
|
|
49
|
-
mark_hostname_issue(
|
|
50
|
-
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
51
|
-
)
|
|
52
|
-
return releases
|
|
42
|
+
for days in range(4):
|
|
43
|
+
url = f"https://{sj_host}/api/releases/latest/{days}"
|
|
53
44
|
|
|
54
|
-
for release in data:
|
|
55
45
|
try:
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
46
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
47
|
+
r.raise_for_status()
|
|
48
|
+
data = json.loads(r.content)
|
|
49
|
+
except Exception as e:
|
|
50
|
+
error(f"Feed load error: {e}")
|
|
51
|
+
mark_hostname_issue(
|
|
52
|
+
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
53
|
+
)
|
|
54
|
+
return releases
|
|
59
55
|
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
56
|
+
for release in data:
|
|
57
|
+
try:
|
|
58
|
+
title = release.get("name").rstrip(".")
|
|
59
|
+
if not title:
|
|
60
|
+
continue
|
|
63
61
|
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
continue
|
|
62
|
+
published = convert_to_rss_date(release.get("createdAt"))
|
|
63
|
+
if not published:
|
|
64
|
+
continue
|
|
68
65
|
|
|
69
|
-
|
|
66
|
+
media = release.get("_media", {})
|
|
67
|
+
slug = media.get("slug")
|
|
68
|
+
if not slug:
|
|
69
|
+
continue
|
|
70
70
|
|
|
71
|
-
|
|
72
|
-
size = 0
|
|
73
|
-
imdb_id = None
|
|
71
|
+
series_url = f"https://{sj_host}/serie/{slug}"
|
|
74
72
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
)
|
|
79
|
-
).decode("utf-8")
|
|
73
|
+
mb = 0
|
|
74
|
+
size = 0
|
|
75
|
+
imdb_id = None
|
|
80
76
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
77
|
+
payload = urlsafe_b64encode(
|
|
78
|
+
f"{title}|{series_url}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode(
|
|
79
|
+
"utf-8"
|
|
80
|
+
)
|
|
81
|
+
).decode("utf-8")
|
|
82
|
+
|
|
83
|
+
link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
|
|
84
|
+
|
|
85
|
+
releases.append(
|
|
86
|
+
{
|
|
87
|
+
"details": {
|
|
88
|
+
"title": title,
|
|
89
|
+
"hostname": hostname,
|
|
90
|
+
"imdb_id": imdb_id,
|
|
91
|
+
"link": link,
|
|
92
|
+
"mirror": mirror,
|
|
93
|
+
"size": size,
|
|
94
|
+
"date": published,
|
|
95
|
+
"source": series_url,
|
|
96
|
+
},
|
|
97
|
+
"type": "protected",
|
|
98
|
+
}
|
|
99
|
+
)
|
|
84
100
|
|
|
85
|
-
|
|
86
|
-
{
|
|
87
|
-
|
|
88
|
-
"title": title,
|
|
89
|
-
"hostname": hostname,
|
|
90
|
-
"imdb_id": imdb_id,
|
|
91
|
-
"link": link,
|
|
92
|
-
"mirror": mirror,
|
|
93
|
-
"size": size,
|
|
94
|
-
"date": published,
|
|
95
|
-
"source": series_url,
|
|
96
|
-
},
|
|
97
|
-
"type": "protected",
|
|
98
|
-
}
|
|
99
|
-
)
|
|
101
|
+
except Exception as e:
|
|
102
|
+
warn(f"Feed parse error: {e}")
|
|
103
|
+
continue
|
|
100
104
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
continue
|
|
105
|
+
if releases:
|
|
106
|
+
break
|
|
104
107
|
|
|
105
108
|
debug(f"Time taken: {time.time() - start_time:.2f}s")
|
|
106
109
|
|
|
@@ -12,7 +12,7 @@ import requests
|
|
|
12
12
|
from bs4 import BeautifulSoup
|
|
13
13
|
|
|
14
14
|
from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
|
|
15
|
-
from quasarr.providers.log import debug, warn
|
|
15
|
+
from quasarr.providers.log import debug, error, warn
|
|
16
16
|
|
|
17
17
|
hostname = "mb"
|
|
18
18
|
supported_mirrors = ["rapidgator", "ddownload"]
|
|
@@ -104,21 +104,9 @@ def _parse_posts(
|
|
|
104
104
|
if " " in title:
|
|
105
105
|
continue
|
|
106
106
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
else:
|
|
111
|
-
mirror_candidates = []
|
|
112
|
-
for strong in post.find_all(
|
|
113
|
-
"strong", string=re.compile(r"^Download", re.I)
|
|
114
|
-
):
|
|
115
|
-
link_tag = strong.find_next_sibling("a")
|
|
116
|
-
if link_tag and link_tag.get_text(strip=True):
|
|
117
|
-
host = link_tag.get_text(strip=True).split(".")[0].lower()
|
|
118
|
-
mirror_candidates.append(host)
|
|
119
|
-
valid = [m for m in mirror_candidates if m in supported_mirrors]
|
|
120
|
-
if not valid or (mirror_filter and mirror_filter not in valid):
|
|
121
|
-
continue
|
|
107
|
+
# can't check for mirrors in soup, so we use the hardcoded list
|
|
108
|
+
if mirror_filter and mirror_filter not in supported_mirrors:
|
|
109
|
+
continue
|
|
122
110
|
|
|
123
111
|
# extract IMDb ID
|
|
124
112
|
imdb_id = None
|
|
@@ -128,9 +116,18 @@ def _parse_posts(
|
|
|
128
116
|
imdb_id = m.group(1)
|
|
129
117
|
break
|
|
130
118
|
|
|
119
|
+
if not imdb_id:
|
|
120
|
+
m = IMDB_REGEX.search(post.get_text())
|
|
121
|
+
if m:
|
|
122
|
+
imdb_id = m.group(1)
|
|
123
|
+
|
|
131
124
|
# size extraction
|
|
132
125
|
mb = size_bytes = 0
|
|
133
|
-
size_match = re.search(
|
|
126
|
+
size_match = re.search(
|
|
127
|
+
r"(?:Größe|Size).*?:\s*([\d\.]+)\s*([GMK]B)",
|
|
128
|
+
post.get_text(),
|
|
129
|
+
re.IGNORECASE,
|
|
130
|
+
)
|
|
134
131
|
if size_match:
|
|
135
132
|
sz = {"size": size_match.group(1), "sizeunit": size_match.group(2)}
|
|
136
133
|
mb = shared_state.convert_to_mb(sz)
|
|
@@ -159,7 +156,7 @@ def _parse_posts(
|
|
|
159
156
|
}
|
|
160
157
|
)
|
|
161
158
|
except Exception as e:
|
|
162
|
-
|
|
159
|
+
error(f"Error parsing {hostname.upper()} post: {e}")
|
|
163
160
|
continue
|
|
164
161
|
return releases
|
|
165
162
|
|
|
@@ -316,7 +316,7 @@ def sf_search(
|
|
|
316
316
|
# fetch API HTML
|
|
317
317
|
epoch = str(datetime.now().timestamp()).replace(".", "")[:-3]
|
|
318
318
|
api_url = f"https://{sf}/api/v1/{season_id}/season/ALL?lang=ALL&_={epoch}"
|
|
319
|
-
|
|
319
|
+
trace(f"Requesting SF API URL: {api_url}")
|
|
320
320
|
try:
|
|
321
321
|
r = requests.get(api_url, headers=headers, timeout=10)
|
|
322
322
|
r.raise_for_status()
|
|
@@ -39,70 +39,73 @@ def sj_feed(shared_state, start_time, request_from, mirror=None):
|
|
|
39
39
|
sj_host = shared_state.values["config"]("Hostnames").get(hostname)
|
|
40
40
|
password = sj_host
|
|
41
41
|
|
|
42
|
-
url = f"https://{sj_host}/api/releases/latest/0"
|
|
43
42
|
headers = {"User-Agent": shared_state.values["user_agent"]}
|
|
44
43
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
r.raise_for_status()
|
|
48
|
-
data = json.loads(r.content)
|
|
49
|
-
except Exception as e:
|
|
50
|
-
info(f"{hostname.upper()}: feed load error: {e}")
|
|
51
|
-
mark_hostname_issue(
|
|
52
|
-
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
53
|
-
)
|
|
54
|
-
return releases
|
|
44
|
+
for days in range(4):
|
|
45
|
+
url = f"https://{sj_host}/api/releases/latest/{days}"
|
|
55
46
|
|
|
56
|
-
for release in data:
|
|
57
47
|
try:
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
48
|
+
r = requests.get(url, headers=headers, timeout=30)
|
|
49
|
+
r.raise_for_status()
|
|
50
|
+
data = json.loads(r.content)
|
|
51
|
+
except Exception as e:
|
|
52
|
+
info(f"{hostname.upper()}: feed load error: {e}")
|
|
53
|
+
mark_hostname_issue(
|
|
54
|
+
hostname, "feed", str(e) if "e" in dir() else "Error occurred"
|
|
55
|
+
)
|
|
56
|
+
return releases
|
|
61
57
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
58
|
+
for release in data:
|
|
59
|
+
try:
|
|
60
|
+
title = release.get("name").rstrip(".")
|
|
61
|
+
if not title:
|
|
62
|
+
continue
|
|
65
63
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
continue
|
|
64
|
+
published = convert_to_rss_date(release.get("createdAt"))
|
|
65
|
+
if not published:
|
|
66
|
+
continue
|
|
70
67
|
|
|
71
|
-
|
|
68
|
+
media = release.get("_media", {})
|
|
69
|
+
slug = media.get("slug")
|
|
70
|
+
if not slug:
|
|
71
|
+
continue
|
|
72
72
|
|
|
73
|
-
|
|
74
|
-
size = 0
|
|
75
|
-
imdb_id = None
|
|
73
|
+
series_url = f"https://{sj_host}/serie/{slug}"
|
|
76
74
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
)
|
|
81
|
-
).decode("utf-8")
|
|
75
|
+
mb = 0
|
|
76
|
+
size = 0
|
|
77
|
+
imdb_id = None
|
|
82
78
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
79
|
+
payload = urlsafe_b64encode(
|
|
80
|
+
f"{title}|{series_url}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode(
|
|
81
|
+
"utf-8"
|
|
82
|
+
)
|
|
83
|
+
).decode("utf-8")
|
|
84
|
+
|
|
85
|
+
link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
|
|
86
|
+
|
|
87
|
+
releases.append(
|
|
88
|
+
{
|
|
89
|
+
"details": {
|
|
90
|
+
"title": title,
|
|
91
|
+
"hostname": hostname,
|
|
92
|
+
"imdb_id": imdb_id,
|
|
93
|
+
"link": link,
|
|
94
|
+
"mirror": mirror,
|
|
95
|
+
"size": size,
|
|
96
|
+
"date": published,
|
|
97
|
+
"source": series_url,
|
|
98
|
+
},
|
|
99
|
+
"type": "protected",
|
|
100
|
+
}
|
|
101
|
+
)
|
|
86
102
|
|
|
87
|
-
|
|
88
|
-
{
|
|
89
|
-
|
|
90
|
-
"title": title,
|
|
91
|
-
"hostname": hostname,
|
|
92
|
-
"imdb_id": imdb_id,
|
|
93
|
-
"link": link,
|
|
94
|
-
"mirror": mirror,
|
|
95
|
-
"size": size,
|
|
96
|
-
"date": published,
|
|
97
|
-
"source": series_url,
|
|
98
|
-
},
|
|
99
|
-
"type": "protected",
|
|
100
|
-
}
|
|
101
|
-
)
|
|
103
|
+
except Exception as e:
|
|
104
|
+
debug(f"{hostname.upper()}: feed parse error: {e}")
|
|
105
|
+
continue
|
|
102
106
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
continue
|
|
107
|
+
if releases:
|
|
108
|
+
break
|
|
106
109
|
|
|
107
110
|
debug(f"Time taken: {time.time() - start_time:.2f}s")
|
|
108
111
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|