quasarr 1.30.0__py3-none-any.whl → 1.31.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

@@ -5,7 +5,7 @@
5
5
  import json
6
6
  import re
7
7
  from base64 import urlsafe_b64encode, urlsafe_b64decode
8
- from urllib.parse import quote, unquote, urljoin
8
+ from urllib.parse import quote, unquote
9
9
 
10
10
  import requests
11
11
  from bottle import request, response, redirect, HTTPResponse
@@ -308,7 +308,7 @@ def setup_captcha_routes(app):
308
308
 
309
309
  check_package_exists(package_id)
310
310
 
311
- package_selector = render_package_selector(package_id)
311
+ package_selector = render_package_selector(package_id, title)
312
312
  failed_warning = render_failed_attempts_warning(package_id)
313
313
 
314
314
  return render_centered_html(f"""
@@ -318,7 +318,6 @@ def setup_captcha_routes(app):
318
318
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
319
319
  {package_selector}
320
320
  {failed_warning}
321
- <p><b>Package:</b> {title}</p>
322
321
  {render_userscript_section(url, package_id, title, password, "hide")}
323
322
  <p>
324
323
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -349,7 +348,7 @@ def setup_captcha_routes(app):
349
348
 
350
349
  check_package_exists(package_id)
351
350
 
352
- package_selector = render_package_selector(package_id)
351
+ package_selector = render_package_selector(package_id, title)
353
352
  failed_warning = render_failed_attempts_warning(package_id)
354
353
 
355
354
  return render_centered_html(f"""
@@ -359,7 +358,6 @@ def setup_captcha_routes(app):
359
358
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
360
359
  {package_selector}
361
360
  {failed_warning}
362
- <p><b>Package:</b> {title}</p>
363
361
  {render_userscript_section(url, package_id, title, password, "junkies")}
364
362
  <p>
365
363
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -391,7 +389,7 @@ def setup_captcha_routes(app):
391
389
 
392
390
  url = urls[0][0] if isinstance(urls[0], (list, tuple)) else urls[0]
393
391
 
394
- package_selector = render_package_selector(package_id)
392
+ package_selector = render_package_selector(package_id, title)
395
393
  failed_warning = render_failed_attempts_warning(package_id)
396
394
 
397
395
  return render_centered_html(f"""
@@ -401,7 +399,6 @@ def setup_captcha_routes(app):
401
399
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
402
400
  {package_selector}
403
401
  {failed_warning}
404
- <p><b>Package:</b> {title}</p>
405
402
  {render_userscript_section(url, package_id, title, password, "keeplinks")}
406
403
  <p>
407
404
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -433,7 +430,7 @@ def setup_captcha_routes(app):
433
430
 
434
431
  url = urls[0][0] if isinstance(urls[0], (list, tuple)) else urls[0]
435
432
 
436
- package_selector = render_package_selector(package_id)
433
+ package_selector = render_package_selector(package_id, title)
437
434
  failed_warning = render_failed_attempts_warning(package_id)
438
435
 
439
436
  return render_centered_html(f"""
@@ -443,7 +440,6 @@ def setup_captcha_routes(app):
443
440
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
444
441
  {package_selector}
445
442
  {failed_warning}
446
- <p><b>Package:</b> {title}</p>
447
443
  {render_userscript_section(url, package_id, title, password, "tolink")}
448
444
  <p>
449
445
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -619,12 +615,22 @@ def setup_captcha_routes(app):
619
615
  </script>
620
616
  '''
621
617
 
622
- def render_package_selector(current_package_id):
623
- """Render a dropdown selector for all available packages at the top of captcha UIs"""
618
+ def render_package_selector(current_package_id, current_title=None):
619
+ """Render package title, with dropdown selector if multiple packages available"""
624
620
  protected = shared_state.get_db("protected").retrieve_all_titles()
625
621
 
626
- if not protected or len(protected) <= 1:
627
- return "" # Don't show selector if only one or no packages
622
+ if not protected:
623
+ return ""
624
+
625
+ # Single package - just show the title without dropdown
626
+ if len(protected) <= 1:
627
+ if current_title:
628
+ return f'''
629
+ <div class="package-selector" style="margin-bottom: 20px; padding: 12px; background: rgba(128, 128, 128, 0.1); border: 1px solid rgba(128, 128, 128, 0.3); border-radius: 8px;">
630
+ <p style="margin: 0; word-break: break-all;"><b>📦 Package:</b> {current_title}</p>
631
+ </div>
632
+ '''
633
+ return ""
628
634
 
629
635
  sj = shared_state.values["config"]("Hostnames").get("sj")
630
636
  dj = shared_state.values["config"]("Hostnames").get("dj")
@@ -815,7 +821,7 @@ def setup_captcha_routes(app):
815
821
  f"pkg_pass={quote(password)}"
816
822
  )
817
823
 
818
- package_selector = render_package_selector(package_id)
824
+ package_selector = render_package_selector(package_id, title)
819
825
  failed_warning = render_failed_attempts_warning(package_id)
820
826
 
821
827
  return render_centered_html(f"""
@@ -825,7 +831,6 @@ def setup_captcha_routes(app):
825
831
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
826
832
  {package_selector}
827
833
  {failed_warning}
828
- <p style="max-width: 370px; word-wrap: break-word; overflow-wrap: break-word;"><b>Package:</b> {title}</p>
829
834
 
830
835
  <div>
831
836
  <!-- Info section explaining the process -->
@@ -1171,7 +1176,7 @@ def setup_captcha_routes(app):
1171
1176
  bypass_section = render_filecrypt_bypass_section(url, package_id, title, password)
1172
1177
 
1173
1178
  # Add package selector and failed attempts warning
1174
- package_selector = render_package_selector(package_id)
1179
+ package_selector = render_package_selector(package_id, title)
1175
1180
 
1176
1181
  # Create fallback URL for the manual FileCrypt page
1177
1182
  fallback_payload = {
@@ -1187,6 +1192,9 @@ def setup_captcha_routes(app):
1187
1192
  failed_warning = render_failed_attempts_warning(package_id, include_delete_button=False,
1188
1193
  fallback_url=filecrypt_fallback_url) # Delete button is already below
1189
1194
 
1195
+ # Escape title for safe use in JavaScript string
1196
+ escaped_title_js = title.replace('\\', '\\\\').replace('"', '\\"').replace('\n', '\\n').replace('\r', '\\r')
1197
+
1190
1198
  content = render_centered_html(r'''
1191
1199
  <style>
1192
1200
  @media (max-width: 600px) {
@@ -1203,6 +1211,9 @@ def setup_captcha_routes(app):
1203
1211
  }
1204
1212
  </style>
1205
1213
  <script type="text/javascript">
1214
+ // Package title for result display
1215
+ var packageTitleText = "''' + escaped_title_js + r'''";
1216
+
1206
1217
  // Check if we should redirect to fallback due to failed attempts
1207
1218
  (function() {
1208
1219
  const storageKey = 'captcha_attempts_''' + package_id + r'''';
@@ -1231,11 +1242,8 @@ def setup_captcha_routes(app):
1231
1242
  var warnBox = document.getElementById("failed-attempts-warning");
1232
1243
  if (warnBox) warnBox.style.display = "none";
1233
1244
 
1234
- // Remove width limit on result screen
1235
- var packageTitle = document.getElementById("package-title");
1236
- packageTitle.style.maxWidth = "none";
1237
-
1238
- document.getElementById("captcha-key").innerText = 'Using result "' + token + '" to decrypt links...';
1245
+ // Add package title to result area
1246
+ document.getElementById("captcha-key").innerHTML = '<p style="word-break: break-all;"><b>Package:</b> ' + packageTitleText + '</p><p style="word-break: break-all;">Using result "' + token + '" to decrypt links...</p>';
1239
1247
  var link = document.getElementById("link-hidden").value;
1240
1248
  const fullPath = '/captcha/decrypt-filecrypt';
1241
1249
 
@@ -1288,7 +1296,6 @@ def setup_captcha_routes(app):
1288
1296
  {package_selector}
1289
1297
  </div>
1290
1298
  {failed_warning}
1291
- <p id="package-title" style="max-width: 370px; word-wrap: break-word; overflow-wrap: break-word;"><b>Package:</b> {title}</p>
1292
1299
  <div id="captcha-key"></div>
1293
1300
  {link_select}<br><br>
1294
1301
  <input type="hidden" id="link-hidden" value="{prioritized_links[0][0]}" />
@@ -25,6 +25,7 @@ from quasarr.downloads.sources.wx import get_wx_download_links
25
25
  from quasarr.providers.log import info
26
26
  from quasarr.providers.notifications import send_discord_message
27
27
  from quasarr.providers.statistics import StatsHelper
28
+ from quasarr.providers.utils import filter_offline_links
28
29
 
29
30
  # =============================================================================
30
31
  # CRYPTER CONFIGURATION
@@ -184,6 +185,12 @@ def process_links(shared_state, source_result, title, password, package_id, imdb
184
185
  reason=f'All links are offline or IP is banned for "{title}" on {label} - "{source_url}"')
185
186
  links = valid_links
186
187
 
188
+ # Filter out verifiably offline links
189
+ links = filter_offline_links(links, shared_state=shared_state, log_func=info)
190
+ if not links:
191
+ return fail(title, package_id, shared_state,
192
+ reason=f'All verifiable links are offline for "{title}" on {label} - "{source_url}"')
193
+
187
194
  classified = classify_links(links, shared_state)
188
195
 
189
196
  # PRIORITY 1: Direct hoster links
@@ -3,14 +3,12 @@
3
3
  # Project by https://github.com/rix1337
4
4
 
5
5
  import re
6
- from concurrent.futures import ThreadPoolExecutor, as_completed
7
- from io import BytesIO
8
6
 
9
- from PIL import Image
10
7
  from bs4 import BeautifulSoup, NavigableString
11
8
 
12
9
  from quasarr.providers.log import info, debug
13
10
  from quasarr.providers.sessions.dl import retrieve_and_validate_session, fetch_via_requests_session, invalidate_session
11
+ from quasarr.providers.utils import generate_status_url, check_links_online_status
14
12
 
15
13
  hostname = "dl"
16
14
 
@@ -136,28 +134,6 @@ def extract_mirror_name_from_link(link_element):
136
134
  return None
137
135
 
138
136
 
139
- def generate_status_url(href, crypter_type):
140
- """
141
- Generate a status URL for crypters that support it.
142
- Returns None if status URL cannot be generated.
143
- """
144
- if crypter_type == "hide":
145
- # hide.cx links: https://hide.cx/folder/{UUID} → https://hide.cx/state/{UUID}
146
- match = re.search(r'hide\.cx/(?:folder/)?([a-f0-9-]{36})', href, re.IGNORECASE)
147
- if match:
148
- uuid = match.group(1)
149
- return f"https://hide.cx/state/{uuid}"
150
-
151
- elif crypter_type == "tolink":
152
- # tolink links: https://tolink.to/f/{ID} → https://tolink.to/f/{ID}/s/status.png
153
- match = re.search(r'tolink\.to/f/([a-zA-Z0-9]+)', href, re.IGNORECASE)
154
- if match:
155
- link_id = match.group(1)
156
- return f"https://tolink.to/f/{link_id}/s/status.png"
157
-
158
- return None
159
-
160
-
161
137
  def extract_status_url_from_html(link_element, crypter_type):
162
138
  """
163
139
  Extract status image URL from HTML near the link element.
@@ -259,100 +235,6 @@ def build_filecrypt_status_map(soup):
259
235
  return status_map
260
236
 
261
237
 
262
- def image_has_green(image_data):
263
- """
264
- Analyze image data to check if it contains green pixels.
265
- Returns True if any significant green is detected (indicating online status).
266
- """
267
- try:
268
- img = Image.open(BytesIO(image_data))
269
- img = img.convert('RGB')
270
-
271
- pixels = list(img.getdata())
272
-
273
- for r, g, b in pixels:
274
- # Check if pixel is greenish: green channel is dominant
275
- # and has a reasonable absolute value
276
- if g > 100 and g > r * 1.3 and g > b * 1.3:
277
- return True
278
-
279
- return False
280
- except Exception as e:
281
- debug(f"Error analyzing status image: {e}")
282
- # If we can't analyze, assume online to not skip valid links
283
- return True
284
-
285
-
286
- def fetch_status_image(status_url):
287
- """
288
- Fetch a status image and return (status_url, image_data).
289
- Returns (status_url, None) on failure.
290
- """
291
- try:
292
- import requests
293
- response = requests.get(status_url, timeout=10)
294
- if response.status_code == 200:
295
- return (status_url, response.content)
296
- except Exception as e:
297
- debug(f"Error fetching status image {status_url}: {e}")
298
- return (status_url, None)
299
-
300
-
301
- def check_links_online_status(links_with_status):
302
- """
303
- Check online status for links that have status URLs.
304
- Returns list of links that are online (or have no status URL to check).
305
-
306
- links_with_status: list of [href, identifier, status_url] where status_url can be None
307
- """
308
-
309
- links_to_check = [(i, link) for i, link in enumerate(links_with_status) if link[2]]
310
-
311
- if not links_to_check:
312
- # No status URLs to check, return all links as potentially online
313
- return [[link[0], link[1]] for link in links_with_status]
314
-
315
- # Batch fetch status images
316
- status_results = {} # status_url -> has_green
317
- status_urls = list(set(link[2] for _, link in links_to_check))
318
-
319
- batch_size = 10
320
- for i in range(0, len(status_urls), batch_size):
321
- batch = status_urls[i:i + batch_size]
322
- with ThreadPoolExecutor(max_workers=batch_size) as executor:
323
- futures = [executor.submit(fetch_status_image, url) for url in batch]
324
- for future in as_completed(futures):
325
- try:
326
- status_url, image_data = future.result()
327
- if image_data:
328
- status_results[status_url] = image_has_green(image_data)
329
- else:
330
- # Could not fetch, assume online
331
- status_results[status_url] = True
332
- except Exception as e:
333
- debug(f"Error checking status: {e}")
334
-
335
- # Filter to online links
336
- online_links = []
337
-
338
- for link in links_with_status:
339
- href, identifier, status_url = link
340
- if not status_url:
341
- # No status URL, include link (keeplinks case)
342
- online_links.append([href, identifier])
343
- elif status_url in status_results:
344
- if status_results[status_url]:
345
- online_links.append([href, identifier])
346
- debug(f"Link online: {identifier} ({href})")
347
- else:
348
- debug(f"Link offline: {identifier} ({href})")
349
- else:
350
- # Status check failed, include link
351
- online_links.append([href, identifier])
352
-
353
- return online_links
354
-
355
-
356
238
  def extract_links_and_password_from_post(post_content, host):
357
239
  """
358
240
  Extract download links and password from a forum post.
@@ -448,7 +330,11 @@ def get_dl_download_links(shared_state, url, mirror, title, password):
448
330
  info(f"Could not find any posts in thread: {url}")
449
331
  return {"links": [], "password": ""}
450
332
 
451
- # Iterate through posts to find one with online links
333
+ # Track first post with unverifiable links as fallback
334
+ fallback_links = None
335
+ fallback_password = ""
336
+
337
+ # Iterate through posts to find one with verified online links
452
338
  for post_index, post in enumerate(posts):
453
339
  post_content = post.select_one('div.bbWrapper')
454
340
  if not post_content:
@@ -459,16 +345,32 @@ def get_dl_download_links(shared_state, url, mirror, title, password):
459
345
  if not links_with_status:
460
346
  continue
461
347
 
348
+ # Check if any links have status URLs we can verify
349
+ has_verifiable_links = any(link[2] for link in links_with_status)
350
+
351
+ if not has_verifiable_links:
352
+ # No way to check online status - save as fallback and continue looking
353
+ if fallback_links is None:
354
+ fallback_links = [[link[0], link[1]] for link in links_with_status]
355
+ fallback_password = extracted_password
356
+ debug(f"Post #{post_index + 1} has links but no status URLs, saving as fallback...")
357
+ continue
358
+
462
359
  # Check which links are online
463
- online_links = check_links_online_status(links_with_status)
360
+ online_links = check_links_online_status(links_with_status, shared_state)
464
361
 
465
362
  if online_links:
466
363
  post_info = "first post" if post_index == 0 else f"post #{post_index + 1}"
467
- debug(f"Found {len(online_links)} online link(s) in {post_info} for: {title}")
364
+ debug(f"Found {len(online_links)} verified online link(s) in {post_info} for: {title}")
468
365
  return {"links": online_links, "password": extracted_password}
469
366
  else:
470
367
  debug(f"All links in post #{post_index + 1} are offline, checking next post...")
471
368
 
369
+ # No verified online links found - return fallback if available
370
+ if fallback_links:
371
+ debug(f"No verified online links found, returning unverified fallback links for: {title}")
372
+ return {"links": fallback_links, "password": fallback_password}
373
+
472
374
  info(f"No online download links found in any post: {url}")
473
375
  return {"links": [], "password": ""}
474
376
 
@@ -7,6 +7,7 @@ import re
7
7
  import requests
8
8
 
9
9
  from quasarr.providers.log import info, debug
10
+ from quasarr.providers.utils import check_links_online_status
10
11
 
11
12
  hostname = "wx"
12
13
 
@@ -15,7 +16,10 @@ def get_wx_download_links(shared_state, url, mirror, title, password):
15
16
  """
16
17
  KEEP THE SIGNATURE EVEN IF SOME PARAMETERS ARE UNUSED!
17
18
 
18
- WX source handler - Grabs download links from API based on title and mirror.
19
+ WX source handler - Grabs download links from API based on title.
20
+ Finds the best mirror (M1, M2, M3...) by checking online status.
21
+ Returns all online links from the first complete mirror, or the best partial mirror.
22
+ Prefers hide.cx links over other crypters (filecrypt, etc.) when online counts are equal.
19
23
  """
20
24
  host = shared_state.values["config"]("Hostnames").get(hostname)
21
25
 
@@ -35,7 +39,7 @@ def get_wx_download_links(shared_state, url, mirror, title, password):
35
39
  return {"links": []}
36
40
 
37
41
  # Extract slug from URL
38
- slug_match = re.search(r'/detail/([^/]+)', url)
42
+ slug_match = re.search(r'/detail/([^/?]+)', url)
39
43
  if not slug_match:
40
44
  info(f"{hostname.upper()}: Could not extract slug from URL: {url}")
41
45
  return {"links": []}
@@ -64,62 +68,100 @@ def get_wx_download_links(shared_state, url, mirror, title, password):
64
68
 
65
69
  releases = data['item']['releases']
66
70
 
67
- # Find the release matching the title
68
- matching_release = None
69
- for release in releases:
70
- if release.get('fulltitle') == title:
71
- matching_release = release
72
- break
71
+ # Find ALL releases matching the title (these are different mirrors: M1, M2, M3...)
72
+ matching_releases = [r for r in releases if r.get('fulltitle') == title]
73
73
 
74
- if not matching_release:
74
+ if not matching_releases:
75
75
  info(f"{hostname.upper()}: No release found matching title: {title}")
76
76
  return {"links": []}
77
77
 
78
- # Extract crypted_links based on mirror
79
- crypted_links = matching_release.get('crypted_links', {})
80
-
81
- if not crypted_links:
82
- info(f"{hostname.upper()}: No crypted_links found for: {title}")
83
- return {"links": []}
84
-
85
- links = []
86
-
87
- # If mirror is specified, find matching hoster (handle partial matches like 'ddownload' -> 'ddownload.com')
88
- if mirror:
89
- matched_hoster = None
90
- for hoster in crypted_links.keys():
91
- if mirror.lower() in hoster.lower() or hoster.lower() in mirror.lower():
92
- matched_hoster = hoster
93
- break
94
-
95
- if matched_hoster:
96
- link = crypted_links[matched_hoster]
97
- # Prefer hide over filecrypt
98
- if re.search(r'hide\.', link, re.IGNORECASE):
99
- links.append([link, matched_hoster])
100
- debug(f"{hostname.upper()}: Found hide link for mirror {matched_hoster}")
101
- elif re.search(r'filecrypt\.', link, re.IGNORECASE):
102
- links.append([link, matched_hoster])
103
- debug(f"{hostname.upper()}: Found filecrypt link for mirror {matched_hoster}")
104
- else:
105
- info(
106
- f"{hostname.upper()}: Mirror '{mirror}' not found in available hosters: {list(crypted_links.keys())}")
107
- else:
108
- # If no mirror specified, get all available crypted links (prefer hide over filecrypt)
109
- for hoster, link in crypted_links.items():
110
- if re.search(r'hide\.', link, re.IGNORECASE):
111
- links.append([link, hoster])
112
- debug(f"{hostname.upper()}: Found hide link for hoster {hoster}")
113
- elif re.search(r'filecrypt\.', link, re.IGNORECASE):
114
- links.append([link, hoster])
115
- debug(f"{hostname.upper()}: Found filecrypt link for hoster {hoster}")
116
-
117
- if not links:
118
- info(f"{hostname.upper()}: No supported crypted links found for: {title}")
119
- return {"links": []}
120
-
121
- debug(f"{hostname.upper()}: Found {len(links)} crypted link(s) for: {title}")
122
- return {"links": links}
78
+ debug(f"{hostname.upper()}: Found {len(matching_releases)} mirror(s) for: {title}")
79
+
80
+ # Evaluate each mirror and find the best one
81
+ # Track: (online_count, is_hide, online_links)
82
+ best_mirror = None # (online_count, is_hide, online_links)
83
+
84
+ for idx, release in enumerate(matching_releases):
85
+ crypted_links = release.get('crypted_links', {})
86
+ check_urls = release.get('options', {}).get('check', {})
87
+
88
+ if not crypted_links:
89
+ continue
90
+
91
+ # Separate hide.cx links from other crypters
92
+ hide_links = []
93
+ other_links = []
94
+
95
+ for hoster, container_url in crypted_links.items():
96
+ state_url = check_urls.get(hoster)
97
+ if re.search(r'hide\.', container_url, re.IGNORECASE):
98
+ hide_links.append([container_url, hoster, state_url])
99
+ elif re.search(r'filecrypt\.', container_url, re.IGNORECASE):
100
+ other_links.append([container_url, hoster, state_url])
101
+ # Skip other crypters we don't support
102
+
103
+ # Check hide.cx links first (preferred)
104
+ hide_online = 0
105
+ online_hide = []
106
+ if hide_links:
107
+ online_hide = check_links_online_status(hide_links, shared_state)
108
+ hide_total = len(hide_links)
109
+ hide_online = len(online_hide)
110
+
111
+ debug(f"{hostname.upper()}: M{idx + 1} hide.cx: {hide_online}/{hide_total} online")
112
+
113
+ # If all hide.cx links are online, use this mirror immediately
114
+ if hide_online == hide_total and hide_online > 0:
115
+ debug(
116
+ f"{hostname.upper()}: M{idx + 1} is complete (all {hide_online} hide.cx links online), using this mirror")
117
+ return {"links": online_hide}
118
+
119
+ # Check other crypters (filecrypt, etc.) - no early return, always check all mirrors for hide.cx first
120
+ other_online = 0
121
+ online_other = []
122
+ if other_links:
123
+ online_other = check_links_online_status(other_links, shared_state)
124
+ other_total = len(other_links)
125
+ other_online = len(online_other)
126
+
127
+ debug(f"{hostname.upper()}: M{idx + 1} other crypters: {other_online}/{other_total} online")
128
+
129
+ # Determine best option for this mirror (prefer hide.cx on ties)
130
+ mirror_links = None
131
+ mirror_count = 0
132
+ mirror_is_hide = False
133
+
134
+ if hide_online > 0 and hide_online >= other_online:
135
+ # hide.cx wins (more links or tie)
136
+ mirror_links = online_hide
137
+ mirror_count = hide_online
138
+ mirror_is_hide = True
139
+ elif other_online > hide_online:
140
+ # other crypter has more online links
141
+ mirror_links = online_other
142
+ mirror_count = other_online
143
+ mirror_is_hide = False
144
+
145
+ # Update best_mirror if this mirror is better
146
+ # Priority: 1) more online links, 2) hide.cx preference on ties
147
+ if mirror_links:
148
+ if best_mirror is None:
149
+ best_mirror = (mirror_count, mirror_is_hide, mirror_links)
150
+ elif mirror_count > best_mirror[0]:
151
+ best_mirror = (mirror_count, mirror_is_hide, mirror_links)
152
+ elif mirror_count == best_mirror[0] and mirror_is_hide and not best_mirror[1]:
153
+ # Same count but this is hide.cx and current best is not
154
+ best_mirror = (mirror_count, mirror_is_hide, mirror_links)
155
+
156
+ # No complete mirror found, return best partial mirror
157
+ if best_mirror and best_mirror[2]:
158
+ crypter_type = "hide.cx" if best_mirror[1] else "other crypter"
159
+ debug(
160
+ f"{hostname.upper()}: No complete mirror, using best partial with {best_mirror[0]} online {crypter_type} link(s)")
161
+ return {"links": best_mirror[2]}
162
+
163
+ info(f"{hostname.upper()}: No online links found for: {title}")
164
+ return {"links": []}
123
165
 
124
166
  except Exception as e:
125
167
  info(f"{hostname.upper()}: Error extracting download links from {url}: {e}")
@@ -6,9 +6,12 @@ import os
6
6
  import re
7
7
  import socket
8
8
  import sys
9
+ from concurrent.futures import ThreadPoolExecutor, as_completed
10
+ from io import BytesIO
9
11
  from urllib.parse import urlparse
10
12
 
11
13
  import requests
14
+ from PIL import Image
12
15
 
13
16
  # Fallback user agent when FlareSolverr is not available
14
17
  FALLBACK_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36"
@@ -187,3 +190,177 @@ def is_site_usable(shared_state, shorthand):
187
190
  password = config.get('password')
188
191
 
189
192
  return bool(user and password)
193
+
194
+
195
+ # =============================================================================
196
+ # LINK STATUS CHECKING
197
+ # =============================================================================
198
+
199
+ def generate_status_url(href, crypter_type):
200
+ """
201
+ Generate a status URL for crypters that support it.
202
+ Returns None if status URL cannot be generated.
203
+ """
204
+ if crypter_type == "hide":
205
+ # hide.cx links: https://hide.cx/folder/{UUID} or /container/{UUID} → https://hide.cx/state/{UUID}
206
+ match = re.search(r'hide\.cx/(?:folder/|container/)?([a-f0-9-]{36})', href, re.IGNORECASE)
207
+ if match:
208
+ uuid = match.group(1)
209
+ return f"https://hide.cx/state/{uuid}"
210
+
211
+ elif crypter_type == "tolink":
212
+ # tolink links: https://tolink.to/f/{ID} → https://tolink.to/f/{ID}/s/status.png
213
+ match = re.search(r'tolink\.to/f/([a-zA-Z0-9]+)', href, re.IGNORECASE)
214
+ if match:
215
+ link_id = match.group(1)
216
+ return f"https://tolink.to/f/{link_id}/s/status.png"
217
+
218
+ return None
219
+
220
+
221
+ def detect_crypter_type(url):
222
+ """Detect crypter type from URL for status checking."""
223
+ url_lower = url.lower()
224
+ if 'hide.' in url_lower:
225
+ return "hide"
226
+ elif 'tolink.' in url_lower:
227
+ return "tolink"
228
+ elif 'filecrypt.' in url_lower:
229
+ return "filecrypt"
230
+ elif 'keeplinks.' in url_lower:
231
+ return "keeplinks"
232
+ return None
233
+
234
+
235
+ def image_has_green(image_data):
236
+ """
237
+ Analyze image data to check if it contains green pixels.
238
+ Returns True if any significant green is detected (indicating online status).
239
+ """
240
+ try:
241
+ img = Image.open(BytesIO(image_data))
242
+ # Convert palette images with transparency to RGBA first to avoid warning
243
+ if img.mode == 'P' and 'transparency' in img.info:
244
+ img = img.convert('RGBA')
245
+ img = img.convert('RGB')
246
+
247
+ pixels = list(img.getdata())
248
+
249
+ for r, g, b in pixels:
250
+ # Check if pixel is greenish: green channel is dominant
251
+ # and has a reasonable absolute value
252
+ if g > 100 and g > r * 1.3 and g > b * 1.3:
253
+ return True
254
+
255
+ return False
256
+ except Exception:
257
+ # If we can't analyze, assume online to not skip valid links
258
+ return True
259
+
260
+
261
+ def fetch_status_image(status_url, shared_state=None):
262
+ """
263
+ Fetch a status image and return (status_url, image_data).
264
+ Returns (status_url, None) on failure.
265
+ """
266
+ try:
267
+ headers = {}
268
+ if shared_state:
269
+ user_agent = shared_state.values.get("user_agent")
270
+ if user_agent:
271
+ headers["User-Agent"] = user_agent
272
+ response = requests.get(status_url, headers=headers, timeout=10)
273
+ if response.status_code == 200:
274
+ return (status_url, response.content)
275
+ except Exception:
276
+ pass
277
+ return (status_url, None)
278
+
279
+
280
+ def check_links_online_status(links_with_status, shared_state=None):
281
+ """
282
+ Check online status for links that have status URLs.
283
+ Returns list of links that are online (or have no status URL to check).
284
+
285
+ links_with_status: list of [href, identifier, status_url] where status_url can be None
286
+ shared_state: optional shared state for user agent
287
+ """
288
+ links_to_check = [(i, link) for i, link in enumerate(links_with_status) if link[2]]
289
+
290
+ if not links_to_check:
291
+ # No status URLs to check, return all links as potentially online
292
+ return [[link[0], link[1]] for link in links_with_status]
293
+
294
+ # Batch fetch status images
295
+ status_results = {} # status_url -> has_green
296
+ status_urls = list(set(link[2] for _, link in links_to_check))
297
+
298
+ batch_size = 10
299
+ for i in range(0, len(status_urls), batch_size):
300
+ batch = status_urls[i:i + batch_size]
301
+ with ThreadPoolExecutor(max_workers=batch_size) as executor:
302
+ futures = [executor.submit(fetch_status_image, url, shared_state) for url in batch]
303
+ for future in as_completed(futures):
304
+ try:
305
+ status_url, image_data = future.result()
306
+ if image_data:
307
+ status_results[status_url] = image_has_green(image_data)
308
+ else:
309
+ # Could not fetch, assume online
310
+ status_results[status_url] = True
311
+ except Exception:
312
+ pass
313
+
314
+ # Filter to online links
315
+ online_links = []
316
+
317
+ for link in links_with_status:
318
+ href, identifier, status_url = link
319
+ if not status_url:
320
+ # No status URL, include link
321
+ online_links.append([href, identifier])
322
+ elif status_url in status_results:
323
+ if status_results[status_url]:
324
+ online_links.append([href, identifier])
325
+ else:
326
+ # Status check failed, include link
327
+ online_links.append([href, identifier])
328
+
329
+ return online_links
330
+
331
+
332
+ def filter_offline_links(links, shared_state=None, log_func=None):
333
+ """
334
+ Filter out offline links from a list of [url, identifier] pairs.
335
+ Only checks links where status can be verified (hide.cx, tolink).
336
+ Returns filtered list of [url, identifier] pairs.
337
+ """
338
+ if not links:
339
+ return links
340
+
341
+ # Build list with status URLs
342
+ links_with_status = []
343
+ for link in links:
344
+ url = link[0]
345
+ identifier = link[1] if len(link) > 1 else "unknown"
346
+ crypter_type = detect_crypter_type(url)
347
+ status_url = generate_status_url(url, crypter_type) if crypter_type else None
348
+ links_with_status.append([url, identifier, status_url])
349
+
350
+ # Check if any links can be verified
351
+ verifiable_count = sum(1 for l in links_with_status if l[2])
352
+ if verifiable_count == 0:
353
+ # Nothing to verify, return original links
354
+ return links
355
+
356
+ if log_func:
357
+ log_func(f"Checking online status for {verifiable_count} verifiable link(s)...")
358
+
359
+ # Check status and filter
360
+ online_links = check_links_online_status(links_with_status, shared_state)
361
+
362
+ if log_func and len(online_links) < len(links):
363
+ offline_count = len(links) - len(online_links)
364
+ log_func(f"Filtered out {offline_count} offline link(s)")
365
+
366
+ return online_links
@@ -8,7 +8,7 @@ import requests
8
8
 
9
9
 
10
10
  def get_version():
11
- return "1.30.0"
11
+ return "1.31.0"
12
12
 
13
13
 
14
14
  def get_latest_version():
@@ -131,6 +131,7 @@ def wx_feed(shared_state, start_time, request_from, mirror=None):
131
131
  def wx_search(shared_state, start_time, request_from, search_string, mirror=None, season=None, episode=None):
132
132
  """
133
133
  Search using internal API.
134
+ Deduplicates results by fulltitle - each unique release appears only once.
134
135
  """
135
136
  releases = []
136
137
  host = shared_state.values["config"]("Hostnames").get(hostname)
@@ -201,6 +202,9 @@ def wx_search(shared_state, start_time, request_from, search_string, mirror=None
201
202
 
202
203
  debug(f"{hostname.upper()}: Found {len(items)} items in search results")
203
204
 
205
+ # Track seen titles to deduplicate (mirrors have same fulltitle)
206
+ seen_titles = set()
207
+
204
208
  for item in items:
205
209
  try:
206
210
  uid = item.get('uid')
@@ -238,29 +242,34 @@ def wx_search(shared_state, start_time, request_from, search_string, mirror=None
238
242
  title = title.replace(' ', '.')
239
243
 
240
244
  if shared_state.is_valid_release(title, request_from, search_string, season, episode):
241
- published = detail_item.get('updated_at') or detail_item.get('created_at')
242
- if not published:
243
- published = datetime.now().strftime("%a, %d %b %Y %H:%M:%S +0000")
244
- password = f"www.{host}"
245
-
246
- payload = urlsafe_b64encode(
247
- f"{title}|{source}|{mirror}|0|{password}|{item_imdb_id or ''}".encode("utf-8")
248
- ).decode("utf-8")
249
- link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
250
-
251
- releases.append({
252
- "details": {
253
- "title": title,
254
- "hostname": hostname,
255
- "imdb_id": item_imdb_id,
256
- "link": link,
257
- "mirror": mirror,
258
- "size": 0,
259
- "date": published,
260
- "source": source
261
- },
262
- "type": "protected"
263
- })
245
+ # Skip if we've already seen this exact title
246
+ if title in seen_titles:
247
+ debug(f"{hostname.upper()}: Skipping duplicate main title: {title}")
248
+ else:
249
+ seen_titles.add(title)
250
+ published = detail_item.get('updated_at') or detail_item.get('created_at')
251
+ if not published:
252
+ published = datetime.now().strftime("%a, %d %b %Y %H:%M:%S +0000")
253
+ password = f"www.{host}"
254
+
255
+ payload = urlsafe_b64encode(
256
+ f"{title}|{source}|{mirror}|0|{password}|{item_imdb_id or ''}".encode("utf-8")
257
+ ).decode("utf-8")
258
+ link = f"{shared_state.values['internal_address']}/download/?payload={payload}"
259
+
260
+ releases.append({
261
+ "details": {
262
+ "title": title,
263
+ "hostname": hostname,
264
+ "imdb_id": item_imdb_id,
265
+ "link": link,
266
+ "mirror": mirror,
267
+ "size": 0,
268
+ "date": published,
269
+ "source": source
270
+ },
271
+ "type": "protected"
272
+ })
264
273
 
265
274
  if 'releases' in detail_item and isinstance(detail_item['releases'], list):
266
275
  debug(f"{hostname.upper()}: Found {len(detail_item['releases'])} releases for {uid}")
@@ -279,6 +288,13 @@ def wx_search(shared_state, start_time, request_from, search_string, mirror=None
279
288
  debug(f"{hostname.upper()}: ✗ Release filtered out: {release_title}")
280
289
  continue
281
290
 
291
+ # Skip if we've already seen this exact title (deduplication)
292
+ if release_title in seen_titles:
293
+ debug(f"{hostname.upper()}: Skipping duplicate release: {release_title}")
294
+ continue
295
+
296
+ seen_titles.add(release_title)
297
+
282
298
  release_uid = release.get('uid')
283
299
  if release_uid:
284
300
  release_source = f"https://{host}/detail/{uid}?release={release_uid}"
@@ -323,7 +339,7 @@ def wx_search(shared_state, start_time, request_from, search_string, mirror=None
323
339
  debug(f"{hostname.upper()}: {traceback.format_exc()}")
324
340
  continue
325
341
 
326
- debug(f"{hostname.upper()}: Returning {len(releases)} total releases")
342
+ debug(f"{hostname.upper()}: Returning {len(releases)} total releases (deduplicated)")
327
343
 
328
344
  except Exception as e:
329
345
  info(f"Error in {hostname.upper()} search: {e}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: quasarr
3
- Version: 1.30.0
3
+ Version: 1.31.0
4
4
  Summary: Quasarr connects JDownloader with Radarr, Sonarr and LazyLibrarian. It also decrypts links protected by CAPTCHAs.
5
5
  Home-page: https://github.com/rix1337/Quasarr
6
6
  Author: rix1337
@@ -1,11 +1,11 @@
1
1
  quasarr/__init__.py,sha256=cEtxN2AuwKvrxpIvAR7UL997VtYQ4iN3Eo3ZnP-WjZQ,14682
2
2
  quasarr/api/__init__.py,sha256=UOyyuOjF2WN6Um2wwQNHjFA-Rj0prb11z8SCjbifKJU,6940
3
3
  quasarr/api/arr/__init__.py,sha256=BNEugX1hUF5kn8MIgdoyE1HeyabpojjxAa6RlXSem74,17518
4
- quasarr/api/captcha/__init__.py,sha256=uIuXMHUisAQal8h7c-5kSJBHlROKcHizhCbbSXXkhec,72248
4
+ quasarr/api/captcha/__init__.py,sha256=2d7fTTo-FOXufsG_MxyaDPt8r1KVbXbSXnJAgrt8Qvo,72591
5
5
  quasarr/api/config/__init__.py,sha256=m0DrbanI0lK_PaZA6ey3osj5l1_tMjjYjoFKkzrdPu0,13692
6
6
  quasarr/api/sponsors_helper/__init__.py,sha256=kAZabPlplPYRG6Uw7ZHTk5uypualwvhs-NoTOjQhhhA,6369
7
7
  quasarr/api/statistics/__init__.py,sha256=NrBAjjHkIUE95HhPUGIfNqh2IqBqJ_zm00S90Y-Qnus,7038
8
- quasarr/downloads/__init__.py,sha256=bpNg6LNqoqpnA-U7uVDhq9jM6VYB2bkekCw1XxZRpWM,11613
8
+ quasarr/downloads/__init__.py,sha256=6COdDlJkNuWuJq7DLVqqb0fuH1b3HtsbvA0mlmtLeIs,11972
9
9
  quasarr/downloads/linkcrypters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  quasarr/downloads/linkcrypters/al.py,sha256=mfUG5VclC_-FcGoZL9zHYD7dz7X_YpaNmoKkgiyl9-0,8812
11
11
  quasarr/downloads/linkcrypters/filecrypt.py,sha256=He8b7HjoPA-LmRwVwY0l_5JAVlJ3sYOXs5tcWXooqI4,17055
@@ -16,7 +16,7 @@ quasarr/downloads/sources/al.py,sha256=g587VESZRZHZ03uxHKpufEr5qAtzbyGLmoijksU35
16
16
  quasarr/downloads/sources/by.py,sha256=kmUTn3izayRCV7W-t0E4kYE8qTbt3L3reCLozfvRGcU,3807
17
17
  quasarr/downloads/sources/dd.py,sha256=8X2tOle3qTq0b60Aa3o0uqp2vNELDHYYj99ERI7U_X0,2971
18
18
  quasarr/downloads/sources/dj.py,sha256=wY00hVRNhucZBG1hfExKqayhP1ISD8FFQm7wHYxutOk,404
19
- quasarr/downloads/sources/dl.py,sha256=P11c6P2Pxf8EMZdxBFcnY0y_Z35IAE1cJfTEx1Wkkw0,17995
19
+ quasarr/downloads/sources/dl.py,sha256=HF0kCzjY1elA6oVjIRlP37lLkAXStQTva2dRYLBTOE0,14823
20
20
  quasarr/downloads/sources/dt.py,sha256=80yIHAivsqoPKAaFdZ4wPFBVGCbHNUO130pv7EO2LTM,2605
21
21
  quasarr/downloads/sources/dw.py,sha256=_28-E58Hs9NVwHyLt2M1oYUxVZ-wpE5dQv8gMNhiAPM,2622
22
22
  quasarr/downloads/sources/he.py,sha256=AA6OrIkD3KS_w1ClvXyW1_9hujM6A8P_5VcMHRM6ngg,3680
@@ -27,7 +27,7 @@ quasarr/downloads/sources/sf.py,sha256=ecPHNsNiRNXTfQX9MBLzJKqrEc1IpkrKkBXpihTPh
27
27
  quasarr/downloads/sources/sj.py,sha256=Bkv0c14AXct50n_viaTNK3bYG-Bpvx8x2D0UN_6gm78,404
28
28
  quasarr/downloads/sources/sl.py,sha256=jWprFt1Hew1T67fB1O_pc9YWgc3NVh30KXSwSyS50Pc,3186
29
29
  quasarr/downloads/sources/wd.py,sha256=kr1I1uJa7ZkEPH2LA6alXTJEn0LBPgLCwIh3wLXwCv8,4447
30
- quasarr/downloads/sources/wx.py,sha256=EygMfkgBMZYj3tSk4gvj5DcojkRswGhY_y8FMPNnVeU,4834
30
+ quasarr/downloads/sources/wx.py,sha256=NzNNeqVL6sKkFKyreW-oerrreb5QP2tUGHTWHM5pMCU,7013
31
31
  quasarr/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  quasarr/providers/cloudflare.py,sha256=9iet8runc2VHVcA0_2z1qkrL6D5JKqz1ndktqCgsJFs,7873
33
33
  quasarr/providers/html_images.py,sha256=2n82gTJg7E7q2ytPFN4FWouYTIlmPYu_iHFtG7uktIA,28482
@@ -40,8 +40,8 @@ quasarr/providers/notifications.py,sha256=bohT-6yudmFnmZMc3BwCGX0n1HdzSVgQG_LDZm
40
40
  quasarr/providers/obfuscated.py,sha256=8FcmY9cjvjOoth-6LrPZ1CZZbpKplcaMMBE06FkdYgA,1339187
41
41
  quasarr/providers/shared_state.py,sha256=-TIiH2lkCfovq7bzUZicpUjXEjS87ZHCcevsFgySOqw,29944
42
42
  quasarr/providers/statistics.py,sha256=cEQixYnDMDqtm5wWe40E_2ucyo4mD0n3SrfelhQi1L8,6452
43
- quasarr/providers/utils.py,sha256=Q5qjo0tP5DkrgIxXcM8jpm1-uSEswjaXlQ4VnzXwPAg,5741
44
- quasarr/providers/version.py,sha256=vyDncNqSjOAYR617Y77Gb5eDIPupq1rey_qsuPdficg,4004
43
+ quasarr/providers/utils.py,sha256=mcUPbcXMsLmrYv0CTZO5a9aOt2-JLyL3SZxu6N8OyjU,12075
44
+ quasarr/providers/version.py,sha256=bjolkJyZ2IG4sc5GvrvZfwTeYTHWNvMAMp2vgnpPVZ0,4004
45
45
  quasarr/providers/web_server.py,sha256=AYd0KRxdDWMBr87BP8wlSMuL4zZo0I_rY-vHBai6Pfg,1688
46
46
  quasarr/providers/sessions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  quasarr/providers/sessions/al.py,sha256=WXue9LaT4y0BzsbKtHbN6bb_72c4AZZWR9NP-vg9-cg,12462
@@ -66,14 +66,14 @@ quasarr/search/sources/sf.py,sha256=3z_fvcafOh7U4D_vgq9yC8ktKeazI9fiAi96hCeXb5Q,
66
66
  quasarr/search/sources/sj.py,sha256=JRzoCDohClmGH7aXOz82KVUt6pZsZoBDBXvwvQrAijM,7074
67
67
  quasarr/search/sources/sl.py,sha256=5e5S7JvdbNOc2EthyOkfC4aTpG8O7fn4WS2O3_EXjnM,9463
68
68
  quasarr/search/sources/wd.py,sha256=O02j3irSlVw2qES82g_qHuavAk-njjSRH1dHSCnOUas,7540
69
- quasarr/search/sources/wx.py,sha256=_h1M6GhkJzixwHscrt0lMOnPSEDP1Xl24OypEe8Jy7c,12906
69
+ quasarr/search/sources/wx.py,sha256=zlRvg7Ls-DFRo4sUBMRAXZRMfE2mnaXCkzP7pu53pIY,13842
70
70
  quasarr/storage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
71
71
  quasarr/storage/config.py,sha256=SSTgIce2FVYoVTK_6OCU3msknhxuLA3EC4Kcrrf_dxQ,6378
72
72
  quasarr/storage/setup.py,sha256=0Gm6sHLmlcKvOGeli9eVuRVEP0Slz8-K5jZG0cNXaew,42041
73
73
  quasarr/storage/sqlite_database.py,sha256=yMqFQfKf0k7YS-6Z3_7pj4z1GwWSXJ8uvF4IydXsuTE,3554
74
- quasarr-1.30.0.dist-info/licenses/LICENSE,sha256=QQFCAfDgt7lSA8oSWDHIZ9aTjFbZaBJdjnGOHkuhK7k,1060
75
- quasarr-1.30.0.dist-info/METADATA,sha256=9H6ehKg_MvM3DxsN4RhqK7RUmPf9pQ1TcWAnoQ5k54k,11009
76
- quasarr-1.30.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
77
- quasarr-1.30.0.dist-info/entry_points.txt,sha256=gXi8mUKsIqKVvn-bOc8E5f04sK_KoMCC-ty6b2Hf-jc,40
78
- quasarr-1.30.0.dist-info/top_level.txt,sha256=dipJdaRda5ruTZkoGfZU60bY4l9dtPlmOWwxK_oGSF0,8
79
- quasarr-1.30.0.dist-info/RECORD,,
74
+ quasarr-1.31.0.dist-info/licenses/LICENSE,sha256=QQFCAfDgt7lSA8oSWDHIZ9aTjFbZaBJdjnGOHkuhK7k,1060
75
+ quasarr-1.31.0.dist-info/METADATA,sha256=4jot6IFM_BIQwf8Ftx74l9W26M-2EaK8LXxC9XUn3qE,11009
76
+ quasarr-1.31.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
77
+ quasarr-1.31.0.dist-info/entry_points.txt,sha256=gXi8mUKsIqKVvn-bOc8E5f04sK_KoMCC-ty6b2Hf-jc,40
78
+ quasarr-1.31.0.dist-info/top_level.txt,sha256=dipJdaRda5ruTZkoGfZU60bY4l9dtPlmOWwxK_oGSF0,8
79
+ quasarr-1.31.0.dist-info/RECORD,,