quasarr 1.30.0__tar.gz → 1.31.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

Files changed (87) hide show
  1. {quasarr-1.30.0 → quasarr-1.31.0}/PKG-INFO +1 -1
  2. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/api/captcha/__init__.py +29 -22
  3. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/__init__.py +7 -0
  4. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/dl.py +24 -122
  5. quasarr-1.31.0/quasarr/downloads/sources/wx.py +168 -0
  6. quasarr-1.31.0/quasarr/providers/utils.py +366 -0
  7. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/version.py +1 -1
  8. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/wx.py +40 -24
  9. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr.egg-info/PKG-INFO +1 -1
  10. quasarr-1.30.0/quasarr/downloads/sources/wx.py +0 -126
  11. quasarr-1.30.0/quasarr/providers/utils.py +0 -189
  12. {quasarr-1.30.0 → quasarr-1.31.0}/LICENSE +0 -0
  13. {quasarr-1.30.0 → quasarr-1.31.0}/README.md +0 -0
  14. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/__init__.py +0 -0
  15. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/api/__init__.py +0 -0
  16. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/api/arr/__init__.py +0 -0
  17. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/api/config/__init__.py +0 -0
  18. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/api/sponsors_helper/__init__.py +0 -0
  19. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/api/statistics/__init__.py +0 -0
  20. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/linkcrypters/__init__.py +0 -0
  21. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/linkcrypters/al.py +0 -0
  22. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/linkcrypters/filecrypt.py +0 -0
  23. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/linkcrypters/hide.py +0 -0
  24. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/packages/__init__.py +0 -0
  25. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/__init__.py +0 -0
  26. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/al.py +0 -0
  27. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/by.py +0 -0
  28. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/dd.py +0 -0
  29. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/dj.py +0 -0
  30. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/dt.py +0 -0
  31. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/dw.py +0 -0
  32. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/he.py +0 -0
  33. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/mb.py +0 -0
  34. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/nk.py +0 -0
  35. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/nx.py +0 -0
  36. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/sf.py +0 -0
  37. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/sj.py +0 -0
  38. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/sl.py +0 -0
  39. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/downloads/sources/wd.py +0 -0
  40. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/__init__.py +0 -0
  41. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/cloudflare.py +0 -0
  42. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/html_images.py +0 -0
  43. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/html_templates.py +0 -0
  44. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/imdb_metadata.py +0 -0
  45. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/jd_cache.py +0 -0
  46. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/log.py +0 -0
  47. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/myjd_api.py +0 -0
  48. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/notifications.py +0 -0
  49. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/obfuscated.py +0 -0
  50. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/sessions/__init__.py +0 -0
  51. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/sessions/al.py +0 -0
  52. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/sessions/dd.py +0 -0
  53. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/sessions/dl.py +0 -0
  54. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/sessions/nx.py +0 -0
  55. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/shared_state.py +0 -0
  56. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/statistics.py +0 -0
  57. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/providers/web_server.py +0 -0
  58. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/__init__.py +0 -0
  59. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/__init__.py +0 -0
  60. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/al.py +0 -0
  61. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/by.py +0 -0
  62. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/dd.py +0 -0
  63. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/dj.py +0 -0
  64. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/dl.py +0 -0
  65. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/dt.py +0 -0
  66. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/dw.py +0 -0
  67. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/fx.py +0 -0
  68. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/he.py +0 -0
  69. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/mb.py +0 -0
  70. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/nk.py +0 -0
  71. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/nx.py +0 -0
  72. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/sf.py +0 -0
  73. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/sj.py +0 -0
  74. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/sl.py +0 -0
  75. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/search/sources/wd.py +0 -0
  76. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/storage/__init__.py +0 -0
  77. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/storage/config.py +0 -0
  78. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/storage/setup.py +0 -0
  79. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr/storage/sqlite_database.py +0 -0
  80. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr.egg-info/SOURCES.txt +0 -0
  81. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr.egg-info/dependency_links.txt +0 -0
  82. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr.egg-info/entry_points.txt +0 -0
  83. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr.egg-info/not-zip-safe +0 -0
  84. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr.egg-info/requires.txt +0 -0
  85. {quasarr-1.30.0 → quasarr-1.31.0}/quasarr.egg-info/top_level.txt +0 -0
  86. {quasarr-1.30.0 → quasarr-1.31.0}/setup.cfg +0 -0
  87. {quasarr-1.30.0 → quasarr-1.31.0}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: quasarr
3
- Version: 1.30.0
3
+ Version: 1.31.0
4
4
  Summary: Quasarr connects JDownloader with Radarr, Sonarr and LazyLibrarian. It also decrypts links protected by CAPTCHAs.
5
5
  Home-page: https://github.com/rix1337/Quasarr
6
6
  Author: rix1337
@@ -5,7 +5,7 @@
5
5
  import json
6
6
  import re
7
7
  from base64 import urlsafe_b64encode, urlsafe_b64decode
8
- from urllib.parse import quote, unquote, urljoin
8
+ from urllib.parse import quote, unquote
9
9
 
10
10
  import requests
11
11
  from bottle import request, response, redirect, HTTPResponse
@@ -308,7 +308,7 @@ def setup_captcha_routes(app):
308
308
 
309
309
  check_package_exists(package_id)
310
310
 
311
- package_selector = render_package_selector(package_id)
311
+ package_selector = render_package_selector(package_id, title)
312
312
  failed_warning = render_failed_attempts_warning(package_id)
313
313
 
314
314
  return render_centered_html(f"""
@@ -318,7 +318,6 @@ def setup_captcha_routes(app):
318
318
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
319
319
  {package_selector}
320
320
  {failed_warning}
321
- <p><b>Package:</b> {title}</p>
322
321
  {render_userscript_section(url, package_id, title, password, "hide")}
323
322
  <p>
324
323
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -349,7 +348,7 @@ def setup_captcha_routes(app):
349
348
 
350
349
  check_package_exists(package_id)
351
350
 
352
- package_selector = render_package_selector(package_id)
351
+ package_selector = render_package_selector(package_id, title)
353
352
  failed_warning = render_failed_attempts_warning(package_id)
354
353
 
355
354
  return render_centered_html(f"""
@@ -359,7 +358,6 @@ def setup_captcha_routes(app):
359
358
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
360
359
  {package_selector}
361
360
  {failed_warning}
362
- <p><b>Package:</b> {title}</p>
363
361
  {render_userscript_section(url, package_id, title, password, "junkies")}
364
362
  <p>
365
363
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -391,7 +389,7 @@ def setup_captcha_routes(app):
391
389
 
392
390
  url = urls[0][0] if isinstance(urls[0], (list, tuple)) else urls[0]
393
391
 
394
- package_selector = render_package_selector(package_id)
392
+ package_selector = render_package_selector(package_id, title)
395
393
  failed_warning = render_failed_attempts_warning(package_id)
396
394
 
397
395
  return render_centered_html(f"""
@@ -401,7 +399,6 @@ def setup_captcha_routes(app):
401
399
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
402
400
  {package_selector}
403
401
  {failed_warning}
404
- <p><b>Package:</b> {title}</p>
405
402
  {render_userscript_section(url, package_id, title, password, "keeplinks")}
406
403
  <p>
407
404
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -433,7 +430,7 @@ def setup_captcha_routes(app):
433
430
 
434
431
  url = urls[0][0] if isinstance(urls[0], (list, tuple)) else urls[0]
435
432
 
436
- package_selector = render_package_selector(package_id)
433
+ package_selector = render_package_selector(package_id, title)
437
434
  failed_warning = render_failed_attempts_warning(package_id)
438
435
 
439
436
  return render_centered_html(f"""
@@ -443,7 +440,6 @@ def setup_captcha_routes(app):
443
440
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
444
441
  {package_selector}
445
442
  {failed_warning}
446
- <p><b>Package:</b> {title}</p>
447
443
  {render_userscript_section(url, package_id, title, password, "tolink")}
448
444
  <p>
449
445
  {render_button("Delete Package", "secondary", {"onclick": f"location.href='/captcha/delete/{package_id}'"})}
@@ -619,12 +615,22 @@ def setup_captcha_routes(app):
619
615
  </script>
620
616
  '''
621
617
 
622
- def render_package_selector(current_package_id):
623
- """Render a dropdown selector for all available packages at the top of captcha UIs"""
618
+ def render_package_selector(current_package_id, current_title=None):
619
+ """Render package title, with dropdown selector if multiple packages available"""
624
620
  protected = shared_state.get_db("protected").retrieve_all_titles()
625
621
 
626
- if not protected or len(protected) <= 1:
627
- return "" # Don't show selector if only one or no packages
622
+ if not protected:
623
+ return ""
624
+
625
+ # Single package - just show the title without dropdown
626
+ if len(protected) <= 1:
627
+ if current_title:
628
+ return f'''
629
+ <div class="package-selector" style="margin-bottom: 20px; padding: 12px; background: rgba(128, 128, 128, 0.1); border: 1px solid rgba(128, 128, 128, 0.3); border-radius: 8px;">
630
+ <p style="margin: 0; word-break: break-all;"><b>📦 Package:</b> {current_title}</p>
631
+ </div>
632
+ '''
633
+ return ""
628
634
 
629
635
  sj = shared_state.values["config"]("Hostnames").get("sj")
630
636
  dj = shared_state.values["config"]("Hostnames").get("dj")
@@ -815,7 +821,7 @@ def setup_captcha_routes(app):
815
821
  f"pkg_pass={quote(password)}"
816
822
  )
817
823
 
818
- package_selector = render_package_selector(package_id)
824
+ package_selector = render_package_selector(package_id, title)
819
825
  failed_warning = render_failed_attempts_warning(package_id)
820
826
 
821
827
  return render_centered_html(f"""
@@ -825,7 +831,6 @@ def setup_captcha_routes(app):
825
831
  <h1><img src="{images.logo}" type="image/png" alt="Quasarr logo" class="logo"/>Quasarr</h1>
826
832
  {package_selector}
827
833
  {failed_warning}
828
- <p style="max-width: 370px; word-wrap: break-word; overflow-wrap: break-word;"><b>Package:</b> {title}</p>
829
834
 
830
835
  <div>
831
836
  <!-- Info section explaining the process -->
@@ -1171,7 +1176,7 @@ def setup_captcha_routes(app):
1171
1176
  bypass_section = render_filecrypt_bypass_section(url, package_id, title, password)
1172
1177
 
1173
1178
  # Add package selector and failed attempts warning
1174
- package_selector = render_package_selector(package_id)
1179
+ package_selector = render_package_selector(package_id, title)
1175
1180
 
1176
1181
  # Create fallback URL for the manual FileCrypt page
1177
1182
  fallback_payload = {
@@ -1187,6 +1192,9 @@ def setup_captcha_routes(app):
1187
1192
  failed_warning = render_failed_attempts_warning(package_id, include_delete_button=False,
1188
1193
  fallback_url=filecrypt_fallback_url) # Delete button is already below
1189
1194
 
1195
+ # Escape title for safe use in JavaScript string
1196
+ escaped_title_js = title.replace('\\', '\\\\').replace('"', '\\"').replace('\n', '\\n').replace('\r', '\\r')
1197
+
1190
1198
  content = render_centered_html(r'''
1191
1199
  <style>
1192
1200
  @media (max-width: 600px) {
@@ -1203,6 +1211,9 @@ def setup_captcha_routes(app):
1203
1211
  }
1204
1212
  </style>
1205
1213
  <script type="text/javascript">
1214
+ // Package title for result display
1215
+ var packageTitleText = "''' + escaped_title_js + r'''";
1216
+
1206
1217
  // Check if we should redirect to fallback due to failed attempts
1207
1218
  (function() {
1208
1219
  const storageKey = 'captcha_attempts_''' + package_id + r'''';
@@ -1231,11 +1242,8 @@ def setup_captcha_routes(app):
1231
1242
  var warnBox = document.getElementById("failed-attempts-warning");
1232
1243
  if (warnBox) warnBox.style.display = "none";
1233
1244
 
1234
- // Remove width limit on result screen
1235
- var packageTitle = document.getElementById("package-title");
1236
- packageTitle.style.maxWidth = "none";
1237
-
1238
- document.getElementById("captcha-key").innerText = 'Using result "' + token + '" to decrypt links...';
1245
+ // Add package title to result area
1246
+ document.getElementById("captcha-key").innerHTML = '<p style="word-break: break-all;"><b>Package:</b> ' + packageTitleText + '</p><p style="word-break: break-all;">Using result "' + token + '" to decrypt links...</p>';
1239
1247
  var link = document.getElementById("link-hidden").value;
1240
1248
  const fullPath = '/captcha/decrypt-filecrypt';
1241
1249
 
@@ -1288,7 +1296,6 @@ def setup_captcha_routes(app):
1288
1296
  {package_selector}
1289
1297
  </div>
1290
1298
  {failed_warning}
1291
- <p id="package-title" style="max-width: 370px; word-wrap: break-word; overflow-wrap: break-word;"><b>Package:</b> {title}</p>
1292
1299
  <div id="captcha-key"></div>
1293
1300
  {link_select}<br><br>
1294
1301
  <input type="hidden" id="link-hidden" value="{prioritized_links[0][0]}" />
@@ -25,6 +25,7 @@ from quasarr.downloads.sources.wx import get_wx_download_links
25
25
  from quasarr.providers.log import info
26
26
  from quasarr.providers.notifications import send_discord_message
27
27
  from quasarr.providers.statistics import StatsHelper
28
+ from quasarr.providers.utils import filter_offline_links
28
29
 
29
30
  # =============================================================================
30
31
  # CRYPTER CONFIGURATION
@@ -184,6 +185,12 @@ def process_links(shared_state, source_result, title, password, package_id, imdb
184
185
  reason=f'All links are offline or IP is banned for "{title}" on {label} - "{source_url}"')
185
186
  links = valid_links
186
187
 
188
+ # Filter out verifiably offline links
189
+ links = filter_offline_links(links, shared_state=shared_state, log_func=info)
190
+ if not links:
191
+ return fail(title, package_id, shared_state,
192
+ reason=f'All verifiable links are offline for "{title}" on {label} - "{source_url}"')
193
+
187
194
  classified = classify_links(links, shared_state)
188
195
 
189
196
  # PRIORITY 1: Direct hoster links
@@ -3,14 +3,12 @@
3
3
  # Project by https://github.com/rix1337
4
4
 
5
5
  import re
6
- from concurrent.futures import ThreadPoolExecutor, as_completed
7
- from io import BytesIO
8
6
 
9
- from PIL import Image
10
7
  from bs4 import BeautifulSoup, NavigableString
11
8
 
12
9
  from quasarr.providers.log import info, debug
13
10
  from quasarr.providers.sessions.dl import retrieve_and_validate_session, fetch_via_requests_session, invalidate_session
11
+ from quasarr.providers.utils import generate_status_url, check_links_online_status
14
12
 
15
13
  hostname = "dl"
16
14
 
@@ -136,28 +134,6 @@ def extract_mirror_name_from_link(link_element):
136
134
  return None
137
135
 
138
136
 
139
- def generate_status_url(href, crypter_type):
140
- """
141
- Generate a status URL for crypters that support it.
142
- Returns None if status URL cannot be generated.
143
- """
144
- if crypter_type == "hide":
145
- # hide.cx links: https://hide.cx/folder/{UUID} → https://hide.cx/state/{UUID}
146
- match = re.search(r'hide\.cx/(?:folder/)?([a-f0-9-]{36})', href, re.IGNORECASE)
147
- if match:
148
- uuid = match.group(1)
149
- return f"https://hide.cx/state/{uuid}"
150
-
151
- elif crypter_type == "tolink":
152
- # tolink links: https://tolink.to/f/{ID} → https://tolink.to/f/{ID}/s/status.png
153
- match = re.search(r'tolink\.to/f/([a-zA-Z0-9]+)', href, re.IGNORECASE)
154
- if match:
155
- link_id = match.group(1)
156
- return f"https://tolink.to/f/{link_id}/s/status.png"
157
-
158
- return None
159
-
160
-
161
137
  def extract_status_url_from_html(link_element, crypter_type):
162
138
  """
163
139
  Extract status image URL from HTML near the link element.
@@ -259,100 +235,6 @@ def build_filecrypt_status_map(soup):
259
235
  return status_map
260
236
 
261
237
 
262
- def image_has_green(image_data):
263
- """
264
- Analyze image data to check if it contains green pixels.
265
- Returns True if any significant green is detected (indicating online status).
266
- """
267
- try:
268
- img = Image.open(BytesIO(image_data))
269
- img = img.convert('RGB')
270
-
271
- pixels = list(img.getdata())
272
-
273
- for r, g, b in pixels:
274
- # Check if pixel is greenish: green channel is dominant
275
- # and has a reasonable absolute value
276
- if g > 100 and g > r * 1.3 and g > b * 1.3:
277
- return True
278
-
279
- return False
280
- except Exception as e:
281
- debug(f"Error analyzing status image: {e}")
282
- # If we can't analyze, assume online to not skip valid links
283
- return True
284
-
285
-
286
- def fetch_status_image(status_url):
287
- """
288
- Fetch a status image and return (status_url, image_data).
289
- Returns (status_url, None) on failure.
290
- """
291
- try:
292
- import requests
293
- response = requests.get(status_url, timeout=10)
294
- if response.status_code == 200:
295
- return (status_url, response.content)
296
- except Exception as e:
297
- debug(f"Error fetching status image {status_url}: {e}")
298
- return (status_url, None)
299
-
300
-
301
- def check_links_online_status(links_with_status):
302
- """
303
- Check online status for links that have status URLs.
304
- Returns list of links that are online (or have no status URL to check).
305
-
306
- links_with_status: list of [href, identifier, status_url] where status_url can be None
307
- """
308
-
309
- links_to_check = [(i, link) for i, link in enumerate(links_with_status) if link[2]]
310
-
311
- if not links_to_check:
312
- # No status URLs to check, return all links as potentially online
313
- return [[link[0], link[1]] for link in links_with_status]
314
-
315
- # Batch fetch status images
316
- status_results = {} # status_url -> has_green
317
- status_urls = list(set(link[2] for _, link in links_to_check))
318
-
319
- batch_size = 10
320
- for i in range(0, len(status_urls), batch_size):
321
- batch = status_urls[i:i + batch_size]
322
- with ThreadPoolExecutor(max_workers=batch_size) as executor:
323
- futures = [executor.submit(fetch_status_image, url) for url in batch]
324
- for future in as_completed(futures):
325
- try:
326
- status_url, image_data = future.result()
327
- if image_data:
328
- status_results[status_url] = image_has_green(image_data)
329
- else:
330
- # Could not fetch, assume online
331
- status_results[status_url] = True
332
- except Exception as e:
333
- debug(f"Error checking status: {e}")
334
-
335
- # Filter to online links
336
- online_links = []
337
-
338
- for link in links_with_status:
339
- href, identifier, status_url = link
340
- if not status_url:
341
- # No status URL, include link (keeplinks case)
342
- online_links.append([href, identifier])
343
- elif status_url in status_results:
344
- if status_results[status_url]:
345
- online_links.append([href, identifier])
346
- debug(f"Link online: {identifier} ({href})")
347
- else:
348
- debug(f"Link offline: {identifier} ({href})")
349
- else:
350
- # Status check failed, include link
351
- online_links.append([href, identifier])
352
-
353
- return online_links
354
-
355
-
356
238
  def extract_links_and_password_from_post(post_content, host):
357
239
  """
358
240
  Extract download links and password from a forum post.
@@ -448,7 +330,11 @@ def get_dl_download_links(shared_state, url, mirror, title, password):
448
330
  info(f"Could not find any posts in thread: {url}")
449
331
  return {"links": [], "password": ""}
450
332
 
451
- # Iterate through posts to find one with online links
333
+ # Track first post with unverifiable links as fallback
334
+ fallback_links = None
335
+ fallback_password = ""
336
+
337
+ # Iterate through posts to find one with verified online links
452
338
  for post_index, post in enumerate(posts):
453
339
  post_content = post.select_one('div.bbWrapper')
454
340
  if not post_content:
@@ -459,16 +345,32 @@ def get_dl_download_links(shared_state, url, mirror, title, password):
459
345
  if not links_with_status:
460
346
  continue
461
347
 
348
+ # Check if any links have status URLs we can verify
349
+ has_verifiable_links = any(link[2] for link in links_with_status)
350
+
351
+ if not has_verifiable_links:
352
+ # No way to check online status - save as fallback and continue looking
353
+ if fallback_links is None:
354
+ fallback_links = [[link[0], link[1]] for link in links_with_status]
355
+ fallback_password = extracted_password
356
+ debug(f"Post #{post_index + 1} has links but no status URLs, saving as fallback...")
357
+ continue
358
+
462
359
  # Check which links are online
463
- online_links = check_links_online_status(links_with_status)
360
+ online_links = check_links_online_status(links_with_status, shared_state)
464
361
 
465
362
  if online_links:
466
363
  post_info = "first post" if post_index == 0 else f"post #{post_index + 1}"
467
- debug(f"Found {len(online_links)} online link(s) in {post_info} for: {title}")
364
+ debug(f"Found {len(online_links)} verified online link(s) in {post_info} for: {title}")
468
365
  return {"links": online_links, "password": extracted_password}
469
366
  else:
470
367
  debug(f"All links in post #{post_index + 1} are offline, checking next post...")
471
368
 
369
+ # No verified online links found - return fallback if available
370
+ if fallback_links:
371
+ debug(f"No verified online links found, returning unverified fallback links for: {title}")
372
+ return {"links": fallback_links, "password": fallback_password}
373
+
472
374
  info(f"No online download links found in any post: {url}")
473
375
  return {"links": [], "password": ""}
474
376
 
@@ -0,0 +1,168 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Quasarr
3
+ # Project by https://github.com/rix1337
4
+
5
+ import re
6
+
7
+ import requests
8
+
9
+ from quasarr.providers.log import info, debug
10
+ from quasarr.providers.utils import check_links_online_status
11
+
12
+ hostname = "wx"
13
+
14
+
15
+ def get_wx_download_links(shared_state, url, mirror, title, password):
16
+ """
17
+ KEEP THE SIGNATURE EVEN IF SOME PARAMETERS ARE UNUSED!
18
+
19
+ WX source handler - Grabs download links from API based on title.
20
+ Finds the best mirror (M1, M2, M3...) by checking online status.
21
+ Returns all online links from the first complete mirror, or the best partial mirror.
22
+ Prefers hide.cx links over other crypters (filecrypt, etc.) when online counts are equal.
23
+ """
24
+ host = shared_state.values["config"]("Hostnames").get(hostname)
25
+
26
+ headers = {
27
+ 'User-Agent': shared_state.values["user_agent"],
28
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
29
+ }
30
+
31
+ try:
32
+ session = requests.Session()
33
+
34
+ # First, load the page to establish session cookies
35
+ response = session.get(url, headers=headers, timeout=30)
36
+
37
+ if response.status_code != 200:
38
+ info(f"{hostname.upper()}: Failed to load page: {url} (Status: {response.status_code})")
39
+ return {"links": []}
40
+
41
+ # Extract slug from URL
42
+ slug_match = re.search(r'/detail/([^/?]+)', url)
43
+ if not slug_match:
44
+ info(f"{hostname.upper()}: Could not extract slug from URL: {url}")
45
+ return {"links": []}
46
+
47
+ api_url = f'https://api.{host}/start/d/{slug_match.group(1)}'
48
+
49
+ # Update headers for API request
50
+ api_headers = {
51
+ 'User-Agent': shared_state.values["user_agent"],
52
+ 'Accept': 'application/json'
53
+ }
54
+
55
+ debug(f"{hostname.upper()}: Fetching API data from: {api_url}")
56
+ api_response = session.get(api_url, headers=api_headers, timeout=30)
57
+
58
+ if api_response.status_code != 200:
59
+ info(f"{hostname.upper()}: Failed to load API: {api_url} (Status: {api_response.status_code})")
60
+ return {"links": []}
61
+
62
+ data = api_response.json()
63
+
64
+ # Navigate to releases in the API response
65
+ if 'item' not in data or 'releases' not in data['item']:
66
+ info(f"{hostname.upper()}: No releases found in API response")
67
+ return {"links": []}
68
+
69
+ releases = data['item']['releases']
70
+
71
+ # Find ALL releases matching the title (these are different mirrors: M1, M2, M3...)
72
+ matching_releases = [r for r in releases if r.get('fulltitle') == title]
73
+
74
+ if not matching_releases:
75
+ info(f"{hostname.upper()}: No release found matching title: {title}")
76
+ return {"links": []}
77
+
78
+ debug(f"{hostname.upper()}: Found {len(matching_releases)} mirror(s) for: {title}")
79
+
80
+ # Evaluate each mirror and find the best one
81
+ # Track: (online_count, is_hide, online_links)
82
+ best_mirror = None # (online_count, is_hide, online_links)
83
+
84
+ for idx, release in enumerate(matching_releases):
85
+ crypted_links = release.get('crypted_links', {})
86
+ check_urls = release.get('options', {}).get('check', {})
87
+
88
+ if not crypted_links:
89
+ continue
90
+
91
+ # Separate hide.cx links from other crypters
92
+ hide_links = []
93
+ other_links = []
94
+
95
+ for hoster, container_url in crypted_links.items():
96
+ state_url = check_urls.get(hoster)
97
+ if re.search(r'hide\.', container_url, re.IGNORECASE):
98
+ hide_links.append([container_url, hoster, state_url])
99
+ elif re.search(r'filecrypt\.', container_url, re.IGNORECASE):
100
+ other_links.append([container_url, hoster, state_url])
101
+ # Skip other crypters we don't support
102
+
103
+ # Check hide.cx links first (preferred)
104
+ hide_online = 0
105
+ online_hide = []
106
+ if hide_links:
107
+ online_hide = check_links_online_status(hide_links, shared_state)
108
+ hide_total = len(hide_links)
109
+ hide_online = len(online_hide)
110
+
111
+ debug(f"{hostname.upper()}: M{idx + 1} hide.cx: {hide_online}/{hide_total} online")
112
+
113
+ # If all hide.cx links are online, use this mirror immediately
114
+ if hide_online == hide_total and hide_online > 0:
115
+ debug(
116
+ f"{hostname.upper()}: M{idx + 1} is complete (all {hide_online} hide.cx links online), using this mirror")
117
+ return {"links": online_hide}
118
+
119
+ # Check other crypters (filecrypt, etc.) - no early return, always check all mirrors for hide.cx first
120
+ other_online = 0
121
+ online_other = []
122
+ if other_links:
123
+ online_other = check_links_online_status(other_links, shared_state)
124
+ other_total = len(other_links)
125
+ other_online = len(online_other)
126
+
127
+ debug(f"{hostname.upper()}: M{idx + 1} other crypters: {other_online}/{other_total} online")
128
+
129
+ # Determine best option for this mirror (prefer hide.cx on ties)
130
+ mirror_links = None
131
+ mirror_count = 0
132
+ mirror_is_hide = False
133
+
134
+ if hide_online > 0 and hide_online >= other_online:
135
+ # hide.cx wins (more links or tie)
136
+ mirror_links = online_hide
137
+ mirror_count = hide_online
138
+ mirror_is_hide = True
139
+ elif other_online > hide_online:
140
+ # other crypter has more online links
141
+ mirror_links = online_other
142
+ mirror_count = other_online
143
+ mirror_is_hide = False
144
+
145
+ # Update best_mirror if this mirror is better
146
+ # Priority: 1) more online links, 2) hide.cx preference on ties
147
+ if mirror_links:
148
+ if best_mirror is None:
149
+ best_mirror = (mirror_count, mirror_is_hide, mirror_links)
150
+ elif mirror_count > best_mirror[0]:
151
+ best_mirror = (mirror_count, mirror_is_hide, mirror_links)
152
+ elif mirror_count == best_mirror[0] and mirror_is_hide and not best_mirror[1]:
153
+ # Same count but this is hide.cx and current best is not
154
+ best_mirror = (mirror_count, mirror_is_hide, mirror_links)
155
+
156
+ # No complete mirror found, return best partial mirror
157
+ if best_mirror and best_mirror[2]:
158
+ crypter_type = "hide.cx" if best_mirror[1] else "other crypter"
159
+ debug(
160
+ f"{hostname.upper()}: No complete mirror, using best partial with {best_mirror[0]} online {crypter_type} link(s)")
161
+ return {"links": best_mirror[2]}
162
+
163
+ info(f"{hostname.upper()}: No online links found for: {title}")
164
+ return {"links": []}
165
+
166
+ except Exception as e:
167
+ info(f"{hostname.upper()}: Error extracting download links from {url}: {e}")
168
+ return {"links": []}