quasarr 2.4.8__py3-none-any.whl → 2.4.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. quasarr/__init__.py +134 -70
  2. quasarr/api/__init__.py +40 -31
  3. quasarr/api/arr/__init__.py +116 -108
  4. quasarr/api/captcha/__init__.py +262 -137
  5. quasarr/api/config/__init__.py +76 -46
  6. quasarr/api/packages/__init__.py +138 -102
  7. quasarr/api/sponsors_helper/__init__.py +29 -16
  8. quasarr/api/statistics/__init__.py +19 -19
  9. quasarr/downloads/__init__.py +165 -72
  10. quasarr/downloads/linkcrypters/al.py +35 -18
  11. quasarr/downloads/linkcrypters/filecrypt.py +107 -52
  12. quasarr/downloads/linkcrypters/hide.py +5 -6
  13. quasarr/downloads/packages/__init__.py +342 -177
  14. quasarr/downloads/sources/al.py +191 -100
  15. quasarr/downloads/sources/by.py +31 -13
  16. quasarr/downloads/sources/dd.py +27 -14
  17. quasarr/downloads/sources/dj.py +1 -3
  18. quasarr/downloads/sources/dl.py +126 -71
  19. quasarr/downloads/sources/dt.py +11 -5
  20. quasarr/downloads/sources/dw.py +28 -14
  21. quasarr/downloads/sources/he.py +32 -24
  22. quasarr/downloads/sources/mb.py +19 -9
  23. quasarr/downloads/sources/nk.py +14 -10
  24. quasarr/downloads/sources/nx.py +8 -18
  25. quasarr/downloads/sources/sf.py +45 -20
  26. quasarr/downloads/sources/sj.py +1 -3
  27. quasarr/downloads/sources/sl.py +9 -5
  28. quasarr/downloads/sources/wd.py +32 -12
  29. quasarr/downloads/sources/wx.py +35 -21
  30. quasarr/providers/auth.py +42 -37
  31. quasarr/providers/cloudflare.py +28 -30
  32. quasarr/providers/hostname_issues.py +2 -1
  33. quasarr/providers/html_images.py +2 -2
  34. quasarr/providers/html_templates.py +22 -14
  35. quasarr/providers/imdb_metadata.py +149 -80
  36. quasarr/providers/jd_cache.py +131 -39
  37. quasarr/providers/log.py +1 -1
  38. quasarr/providers/myjd_api.py +260 -196
  39. quasarr/providers/notifications.py +53 -41
  40. quasarr/providers/obfuscated.py +9 -4
  41. quasarr/providers/sessions/al.py +71 -55
  42. quasarr/providers/sessions/dd.py +21 -14
  43. quasarr/providers/sessions/dl.py +30 -19
  44. quasarr/providers/sessions/nx.py +23 -14
  45. quasarr/providers/shared_state.py +292 -141
  46. quasarr/providers/statistics.py +75 -43
  47. quasarr/providers/utils.py +33 -27
  48. quasarr/providers/version.py +45 -14
  49. quasarr/providers/web_server.py +10 -5
  50. quasarr/search/__init__.py +30 -18
  51. quasarr/search/sources/al.py +124 -73
  52. quasarr/search/sources/by.py +110 -59
  53. quasarr/search/sources/dd.py +57 -35
  54. quasarr/search/sources/dj.py +69 -48
  55. quasarr/search/sources/dl.py +159 -100
  56. quasarr/search/sources/dt.py +110 -74
  57. quasarr/search/sources/dw.py +121 -61
  58. quasarr/search/sources/fx.py +108 -62
  59. quasarr/search/sources/he.py +78 -49
  60. quasarr/search/sources/mb.py +96 -48
  61. quasarr/search/sources/nk.py +80 -50
  62. quasarr/search/sources/nx.py +91 -62
  63. quasarr/search/sources/sf.py +171 -106
  64. quasarr/search/sources/sj.py +69 -48
  65. quasarr/search/sources/sl.py +115 -71
  66. quasarr/search/sources/wd.py +67 -44
  67. quasarr/search/sources/wx.py +188 -123
  68. quasarr/storage/config.py +65 -52
  69. quasarr/storage/setup.py +238 -140
  70. quasarr/storage/sqlite_database.py +10 -4
  71. {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/METADATA +2 -2
  72. quasarr-2.4.9.dist-info/RECORD +81 -0
  73. quasarr-2.4.8.dist-info/RECORD +0 -81
  74. {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/WHEEL +0 -0
  75. {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/entry_points.txt +0 -0
  76. {quasarr-2.4.8.dist-info → quasarr-2.4.9.dist-info}/licenses/LICENSE +0 -0
@@ -7,10 +7,13 @@ import time
7
7
  from base64 import urlsafe_b64encode
8
8
  from datetime import datetime, timezone
9
9
 
10
- from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
10
+ from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
11
11
  from quasarr.providers.imdb_metadata import get_localized_title
12
- from quasarr.providers.log import info, debug
13
- from quasarr.providers.sessions.dd import create_and_persist_session, retrieve_and_validate_session
12
+ from quasarr.providers.log import debug, info
13
+ from quasarr.providers.sessions.dd import (
14
+ create_and_persist_session,
15
+ retrieve_and_validate_session,
16
+ )
14
17
 
15
18
  hostname = "dd"
16
19
  supported_mirrors = ["ironfiles", "rapidgator", "filefactory"]
@@ -18,7 +21,7 @@ supported_mirrors = ["ironfiles", "rapidgator", "filefactory"]
18
21
 
19
22
  def convert_to_rss_date(unix_timestamp):
20
23
  parsed_date = datetime.fromtimestamp(unix_timestamp, tz=timezone.utc)
21
- rss_date = parsed_date.strftime('%a, %d %b %Y %H:%M:%S %z')
24
+ rss_date = parsed_date.strftime("%a, %d %b %Y %H:%M:%S %z")
22
25
 
23
26
  return rss_date
24
27
 
@@ -31,13 +34,23 @@ def dd_feed(*args, **kwargs):
31
34
  return dd_search(*args, **kwargs)
32
35
 
33
36
 
34
- def dd_search(shared_state, start_time, request_from, search_string="", mirror=None, season=None, episode=None):
37
+ def dd_search(
38
+ shared_state,
39
+ start_time,
40
+ request_from,
41
+ search_string="",
42
+ mirror=None,
43
+ season=None,
44
+ episode=None,
45
+ ):
35
46
  releases = []
36
47
  dd = shared_state.values["config"]("Hostnames").get(hostname.lower())
37
48
  password = dd
38
49
 
39
50
  if not "arr" in request_from.lower():
40
- debug(f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!')
51
+ debug(
52
+ f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!'
53
+ )
41
54
  return releases
42
55
 
43
56
  try:
@@ -51,13 +64,15 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
51
64
  return releases
52
65
 
53
66
  if mirror and mirror not in supported_mirrors:
54
- debug(f'Mirror "{mirror}" not supported by "{hostname.upper()}". Supported mirrors: {supported_mirrors}.'
55
- ' Skipping search!')
67
+ debug(
68
+ f'Mirror "{mirror}" not supported by "{hostname.upper()}". Supported mirrors: {supported_mirrors}.'
69
+ " Skipping search!"
70
+ )
56
71
  return releases
57
72
 
58
73
  imdb_id = shared_state.is_imdb_id(search_string)
59
74
  if imdb_id:
60
- search_string = get_localized_title(shared_state, imdb_id, 'en')
75
+ search_string = get_localized_title(shared_state, imdb_id, "en")
61
76
  if not search_string:
62
77
  info(f"Could not extract title from IMDb-ID {imdb_id}")
63
78
  return releases
@@ -79,17 +94,17 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
79
94
  "web-1080p-x265",
80
95
  "web-2160p-x265-hdr",
81
96
  "movie-1080p-x265",
82
- "movie-2160p-webdl-x265-hdr"
97
+ "movie-2160p-webdl-x265-hdr",
83
98
  ]
84
99
 
85
100
  headers = {
86
- 'User-Agent': shared_state.values["user_agent"],
101
+ "User-Agent": shared_state.values["user_agent"],
87
102
  }
88
103
 
89
104
  try:
90
105
  release_list = []
91
106
  for page in range(0, 100, 20):
92
- url = f'https://{dd}/index/search/keyword/{search_string}/qualities/{','.join(qualities)}/from/{page}/search'
107
+ url = f"https://{dd}/index/search/keyword/{search_string}/qualities/{','.join(qualities)}/from/{page}/search"
93
108
 
94
109
  r = dd_session.get(url, headers=headers, timeout=timeout)
95
110
  r.raise_for_status()
@@ -101,17 +116,16 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
101
116
  try:
102
117
  if release.get("fake"):
103
118
  debug(
104
- f"Release {release.get('release')} marked as fake. Invalidating {hostname.upper()} session...")
119
+ f"Release {release.get('release')} marked as fake. Invalidating {hostname.upper()} session..."
120
+ )
105
121
  create_and_persist_session(shared_state)
106
122
  return []
107
123
  else:
108
124
  title = release.get("release")
109
125
 
110
- if not shared_state.is_valid_release(title,
111
- request_from,
112
- search_string,
113
- season,
114
- episode):
126
+ if not shared_state.is_valid_release(
127
+ title, request_from, search_string, season, episode
128
+ ):
115
129
  continue
116
130
 
117
131
  imdb_id = release.get("imdbid", None)
@@ -121,31 +135,39 @@ def dd_search(shared_state, start_time, request_from, search_string="", mirror=N
121
135
  mb = shared_state.convert_to_mb(size_item) * 1024 * 1024
122
136
  published = convert_to_rss_date(release.get("when"))
123
137
  payload = urlsafe_b64encode(
124
- f"{title}|{source}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode("utf-8")).decode(
125
- "utf-8")
138
+ f"{title}|{source}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode(
139
+ "utf-8"
140
+ )
141
+ ).decode("utf-8")
126
142
  link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
127
143
 
128
- releases.append({
129
- "details": {
130
- "title": title,
131
- "hostname": hostname.lower(),
132
- "imdb_id": imdb_id,
133
- "link": link,
134
- "mirror": mirror,
135
- "size": mb,
136
- "date": published,
137
- "source": source
138
- },
139
- "type": "protected"
140
- })
144
+ releases.append(
145
+ {
146
+ "details": {
147
+ "title": title,
148
+ "hostname": hostname.lower(),
149
+ "imdb_id": imdb_id,
150
+ "link": link,
151
+ "mirror": mirror,
152
+ "size": mb,
153
+ "date": published,
154
+ "source": source,
155
+ },
156
+ "type": "protected",
157
+ }
158
+ )
141
159
  except Exception as e:
142
160
  info(f"Error parsing {hostname.upper()} feed: {e}")
143
- mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
161
+ mark_hostname_issue(
162
+ hostname, "search", str(e) if "e" in dir() else "Error occurred"
163
+ )
144
164
  continue
145
165
 
146
166
  except Exception as e:
147
167
  info(f"Error loading {hostname.upper()} {search_type}: {e}")
148
- mark_hostname_issue(hostname, search_type, str(e) if "e" in dir() else "Error occurred")
168
+ mark_hostname_issue(
169
+ hostname, search_type, str(e) if "e" in dir() else "Error occurred"
170
+ )
149
171
 
150
172
  elapsed_time = time.time() - start_time
151
173
  debug(f"Time taken: {elapsed_time:.2f}s ({hostname})")
@@ -11,18 +11,18 @@ from datetime import datetime, timedelta
11
11
  import requests
12
12
  from bs4 import BeautifulSoup
13
13
 
14
- from quasarr.providers.hostname_issues import mark_hostname_issue, clear_hostname_issue
14
+ from quasarr.providers.hostname_issues import clear_hostname_issue, mark_hostname_issue
15
15
  from quasarr.providers.imdb_metadata import get_localized_title
16
- from quasarr.providers.log import info, debug
16
+ from quasarr.providers.log import debug, info
17
17
 
18
18
  hostname = "dj"
19
19
 
20
20
 
21
21
  def convert_to_rss_date(date_str):
22
22
  try:
23
- return datetime.fromisoformat(
24
- date_str.replace("Z", "+00:00")
25
- ).strftime("%a, %d %b %Y %H:%M:%S +0000")
23
+ return datetime.fromisoformat(date_str.replace("Z", "+00:00")).strftime(
24
+ "%a, %d %b %Y %H:%M:%S +0000"
25
+ )
26
26
  except Exception:
27
27
  return ""
28
28
 
@@ -31,7 +31,9 @@ def dj_feed(shared_state, start_time, request_from, mirror=None):
31
31
  releases = []
32
32
 
33
33
  if "sonarr" not in request_from.lower():
34
- debug(f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!')
34
+ debug(
35
+ f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!'
36
+ )
35
37
  return releases
36
38
 
37
39
  sj_host = shared_state.values["config"]("Hostnames").get(hostname)
@@ -46,7 +48,9 @@ def dj_feed(shared_state, start_time, request_from, mirror=None):
46
48
  data = json.loads(r.content)
47
49
  except Exception as e:
48
50
  info(f"{hostname.upper()}: feed load error: {e}")
49
- mark_hostname_issue(hostname, "feed", str(e) if "e" in dir() else "Error occurred")
51
+ mark_hostname_issue(
52
+ hostname, "feed", str(e) if "e" in dir() else "Error occurred"
53
+ )
50
54
  return releases
51
55
 
52
56
  for release in data:
@@ -71,24 +75,30 @@ def dj_feed(shared_state, start_time, request_from, mirror=None):
71
75
  imdb_id = None
72
76
 
73
77
  payload = urlsafe_b64encode(
74
- f"{title}|{series_url}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode("utf-8")
78
+ f"{title}|{series_url}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode(
79
+ "utf-8"
80
+ )
75
81
  ).decode("utf-8")
76
82
 
77
- link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
78
-
79
- releases.append({
80
- "details": {
81
- "title": title,
82
- "hostname": hostname,
83
- "imdb_id": imdb_id,
84
- "link": link,
85
- "mirror": mirror,
86
- "size": size,
87
- "date": published,
88
- "source": series_url
89
- },
90
- "type": "protected"
91
- })
83
+ link = (
84
+ f"{shared_state.values['internal_address']}/download/?payload={payload}"
85
+ )
86
+
87
+ releases.append(
88
+ {
89
+ "details": {
90
+ "title": title,
91
+ "hostname": hostname,
92
+ "imdb_id": imdb_id,
93
+ "link": link,
94
+ "mirror": mirror,
95
+ "size": size,
96
+ "date": published,
97
+ "source": series_url,
98
+ },
99
+ "type": "protected",
100
+ }
101
+ )
92
102
 
93
103
  except Exception as e:
94
104
  debug(f"{hostname.upper()}: feed parse error: {e}")
@@ -101,11 +111,21 @@ def dj_feed(shared_state, start_time, request_from, mirror=None):
101
111
  return releases
102
112
 
103
113
 
104
- def dj_search(shared_state, start_time, request_from, search_string, mirror=None, season=None, episode=None):
114
+ def dj_search(
115
+ shared_state,
116
+ start_time,
117
+ request_from,
118
+ search_string,
119
+ mirror=None,
120
+ season=None,
121
+ episode=None,
122
+ ):
105
123
  releases = []
106
124
 
107
125
  if "sonarr" not in request_from.lower():
108
- debug(f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!')
126
+ debug(
127
+ f'Skipping {request_from} search on "{hostname.upper()}" (unsupported media type)!'
128
+ )
109
129
  return releases
110
130
 
111
131
  sj_host = shared_state.values["config"]("Hostnames").get(hostname)
@@ -130,10 +150,12 @@ def dj_search(shared_state, start_time, request_from, search_string, mirror=None
130
150
  results = soup.find_all("a", href=re.compile(r"^/serie/"))
131
151
  except Exception as e:
132
152
  info(f"{hostname.upper()}: search load error: {e}")
133
- mark_hostname_issue(hostname, "search", str(e) if "e" in dir() else "Error occurred")
153
+ mark_hostname_issue(
154
+ hostname, "search", str(e) if "e" in dir() else "Error occurred"
155
+ )
134
156
  return releases
135
157
 
136
- one_hour_ago = (datetime.now() - timedelta(hours=1)).strftime('%Y-%m-%d %H:%M:%S')
158
+ one_hour_ago = (datetime.now() - timedelta(hours=1)).strftime("%Y-%m-%d %H:%M:%S")
137
159
  sanitized_search_string = shared_state.sanitize_string(localized_title)
138
160
 
139
161
  for result in results:
@@ -143,8 +165,7 @@ def dj_search(shared_state, start_time, request_from, search_string, mirror=None
143
165
  sanitized_title = shared_state.sanitize_string(result_title)
144
166
 
145
167
  if not re.search(
146
- rf"\b{re.escape(sanitized_search_string)}\b",
147
- sanitized_title
168
+ rf"\b{re.escape(sanitized_search_string)}\b", sanitized_title
148
169
  ):
149
170
  debug(
150
171
  f"Search string '{localized_title}' doesn't match '{result_title}'"
@@ -177,11 +198,7 @@ def dj_search(shared_state, start_time, request_from, search_string, mirror=None
177
198
  continue
178
199
 
179
200
  if not shared_state.is_valid_release(
180
- title,
181
- request_from,
182
- search_string,
183
- season,
184
- episode
201
+ title, request_from, search_string, season, episode
185
202
  ):
186
203
  continue
187
204
 
@@ -194,24 +211,28 @@ def dj_search(shared_state, start_time, request_from, search_string, mirror=None
194
211
  size = 0
195
212
 
196
213
  payload = urlsafe_b64encode(
197
- f"{title}|{series_url}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode("utf-8")
214
+ f"{title}|{series_url}|{mirror}|{mb}|{password}|{imdb_id}|{hostname}".encode(
215
+ "utf-8"
216
+ )
198
217
  ).decode("utf-8")
199
218
 
200
219
  link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
201
220
 
202
- releases.append({
203
- "details": {
204
- "title": title,
205
- "hostname": hostname,
206
- "imdb_id": imdb_id,
207
- "link": link,
208
- "mirror": mirror,
209
- "size": size,
210
- "date": published,
211
- "source": series_url
212
- },
213
- "type": "protected"
214
- })
221
+ releases.append(
222
+ {
223
+ "details": {
224
+ "title": title,
225
+ "hostname": hostname,
226
+ "imdb_id": imdb_id,
227
+ "link": link,
228
+ "mirror": mirror,
229
+ "size": size,
230
+ "date": published,
231
+ "source": series_url,
232
+ },
233
+ "type": "protected",
234
+ }
235
+ )
215
236
 
216
237
  except Exception as e:
217
238
  debug(f"{hostname.upper()}: search parse error: {e}")