quasarr 2.6.1__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasarr might be problematic. Click here for more details.

Files changed (54) hide show
  1. quasarr/__init__.py +71 -61
  2. quasarr/api/__init__.py +1 -2
  3. quasarr/api/arr/__init__.py +159 -56
  4. quasarr/api/captcha/__init__.py +203 -154
  5. quasarr/downloads/__init__.py +12 -8
  6. quasarr/downloads/linkcrypters/al.py +3 -3
  7. quasarr/downloads/linkcrypters/filecrypt.py +1 -2
  8. quasarr/downloads/packages/__init__.py +62 -88
  9. quasarr/downloads/sources/al.py +3 -3
  10. quasarr/downloads/sources/by.py +3 -3
  11. quasarr/downloads/sources/he.py +8 -9
  12. quasarr/downloads/sources/nk.py +3 -3
  13. quasarr/downloads/sources/sl.py +6 -1
  14. quasarr/downloads/sources/wd.py +93 -37
  15. quasarr/downloads/sources/wx.py +11 -17
  16. quasarr/providers/auth.py +9 -13
  17. quasarr/providers/cloudflare.py +4 -3
  18. quasarr/providers/imdb_metadata.py +0 -2
  19. quasarr/providers/jd_cache.py +64 -90
  20. quasarr/providers/log.py +226 -8
  21. quasarr/providers/myjd_api.py +116 -94
  22. quasarr/providers/sessions/al.py +20 -22
  23. quasarr/providers/sessions/dd.py +1 -1
  24. quasarr/providers/sessions/dl.py +8 -10
  25. quasarr/providers/sessions/nx.py +1 -1
  26. quasarr/providers/shared_state.py +26 -15
  27. quasarr/providers/utils.py +15 -6
  28. quasarr/providers/version.py +1 -1
  29. quasarr/search/__init__.py +91 -78
  30. quasarr/search/sources/al.py +19 -23
  31. quasarr/search/sources/by.py +6 -6
  32. quasarr/search/sources/dd.py +8 -10
  33. quasarr/search/sources/dj.py +15 -18
  34. quasarr/search/sources/dl.py +25 -37
  35. quasarr/search/sources/dt.py +13 -15
  36. quasarr/search/sources/dw.py +24 -16
  37. quasarr/search/sources/fx.py +25 -11
  38. quasarr/search/sources/he.py +16 -14
  39. quasarr/search/sources/hs.py +7 -7
  40. quasarr/search/sources/mb.py +7 -7
  41. quasarr/search/sources/nk.py +24 -25
  42. quasarr/search/sources/nx.py +22 -15
  43. quasarr/search/sources/sf.py +18 -9
  44. quasarr/search/sources/sj.py +7 -7
  45. quasarr/search/sources/sl.py +26 -14
  46. quasarr/search/sources/wd.py +61 -31
  47. quasarr/search/sources/wx.py +33 -47
  48. quasarr/storage/config.py +1 -3
  49. {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/METADATA +4 -1
  50. quasarr-2.7.0.dist-info/RECORD +84 -0
  51. quasarr-2.6.1.dist-info/RECORD +0 -84
  52. {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/WHEEL +0 -0
  53. {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/entry_points.txt +0 -0
  54. {quasarr-2.6.1.dist-info → quasarr-2.7.0.dist-info}/licenses/LICENSE +0 -0
@@ -190,18 +190,7 @@ class Linkgrabber:
190
190
 
191
191
  def add_links(
192
192
  self,
193
- params=[
194
- {
195
- "autostart": True,
196
- "links": None,
197
- "packageName": None,
198
- "extractPassword": None,
199
- "priority": "DEFAULT",
200
- "downloadPassword": None,
201
- "destinationFolder": None,
202
- "overwritePackagizerRules": False,
203
- }
204
- ],
193
+ params=None,
205
194
  ):
206
195
  """
207
196
  Add links to the linkcollector
@@ -216,10 +205,23 @@ class Linkgrabber:
216
205
  "destinationFolder" : null
217
206
  }
218
207
  """
208
+ if params is None:
209
+ params = [
210
+ {
211
+ "autostart": True,
212
+ "links": None,
213
+ "packageName": None,
214
+ "extractPassword": None,
215
+ "priority": "DEFAULT",
216
+ "downloadPassword": None,
217
+ "destinationFolder": None,
218
+ "overwritePackagizerRules": False,
219
+ }
220
+ ]
219
221
  resp = self.device.action(self.url + "/addLinks", params)
220
222
  return resp
221
223
 
222
- def cleanup(self, action, mode, selection_type, link_ids=[], package_ids=[]):
224
+ def cleanup(self, action, mode, selection_type, link_ids=None, package_ids=None):
223
225
  """
224
226
  Clean packages and/or links of the linkgrabber list.
225
227
  Requires at least a package_ids or link_ids list, or both.
@@ -235,6 +237,10 @@ class Linkgrabber:
235
237
  :param selection_type: Type of selection to use. Types: SELECTED, UNSELECTED, ALL, NONE
236
238
  :type: str:
237
239
  """
240
+ if link_ids is None:
241
+ link_ids = []
242
+ if package_ids is None:
243
+ package_ids = []
238
244
  params = [link_ids, package_ids]
239
245
  params += [action, mode, selection_type]
240
246
  resp = self.device.action(self.url + "/cleanup", params)
@@ -259,24 +265,7 @@ class Linkgrabber:
259
265
 
260
266
  def query_links(
261
267
  self,
262
- params=[
263
- {
264
- "bytesTotal": True,
265
- "comment": True,
266
- "status": True,
267
- "enabled": True,
268
- "maxResults": -1,
269
- "startAt": 0,
270
- "hosts": True,
271
- "url": True,
272
- "availability": True,
273
- "variantIcon": True,
274
- "variantName": True,
275
- "variantID": True,
276
- "variants": True,
277
- "priority": True,
278
- }
279
- ],
268
+ params=None,
280
269
  ):
281
270
  """
282
271
 
@@ -288,34 +277,55 @@ class Linkgrabber:
288
277
  :type: Dictionary
289
278
  :rtype: List of dictionaries of this style, with more or less detail based on your options.
290
279
  """
280
+ if params is None:
281
+ params = [
282
+ {
283
+ "bytesTotal": True,
284
+ "comment": True,
285
+ "status": True,
286
+ "enabled": True,
287
+ "maxResults": -1,
288
+ "startAt": 0,
289
+ "hosts": True,
290
+ "url": True,
291
+ "availability": True,
292
+ "variantIcon": True,
293
+ "variantName": True,
294
+ "variantID": True,
295
+ "variants": True,
296
+ "priority": True,
297
+ }
298
+ ]
291
299
  resp = self.device.action(self.url + "/queryLinks", params)
292
300
  return resp
293
301
 
294
302
  def query_packages(
295
303
  self,
296
- params=[
297
- {
298
- "bytesLoaded": True,
299
- "bytesTotal": True,
300
- "comment": True,
301
- "enabled": True,
302
- "eta": True,
303
- "priority": False,
304
- "finished": True,
305
- "running": True,
306
- "speed": True,
307
- "status": True,
308
- "childCount": True,
309
- "hosts": True,
310
- "saveTo": True,
311
- "maxResults": -1,
312
- "startAt": 0,
313
- }
314
- ],
304
+ params=None,
315
305
  ):
316
306
  """
317
307
  Get the links in the linkgrabber list
318
308
  """
309
+ if params is None:
310
+ params = [
311
+ {
312
+ "bytesLoaded": True,
313
+ "bytesTotal": True,
314
+ "comment": True,
315
+ "enabled": True,
316
+ "eta": True,
317
+ "priority": False,
318
+ "finished": True,
319
+ "running": True,
320
+ "speed": True,
321
+ "status": True,
322
+ "childCount": True,
323
+ "hosts": True,
324
+ "saveTo": True,
325
+ "maxResults": -1,
326
+ "startAt": 0,
327
+ }
328
+ ]
319
329
  resp = self.device.action("/linkgrabberv2/queryPackages", params)
320
330
  return resp
321
331
 
@@ -329,7 +339,7 @@ class Downloads:
329
339
  self.device = device
330
340
  self.url = "/downloadsV2"
331
341
 
332
- def cleanup(self, action, mode, selection_type, link_ids=[], package_ids=[]):
342
+ def cleanup(self, action, mode, selection_type, link_ids=None, package_ids=None):
333
343
  """
334
344
  Clean packages and/or links of the linkgrabber list.
335
345
  Requires at least a package_ids or link_ids list, or both.
@@ -345,6 +355,10 @@ class Downloads:
345
355
  :param selection_type: Type of selection to use. Types: SELECTED, UNSELECTED, ALL, NONE
346
356
  :type: str:
347
357
  """
358
+ if link_ids is None:
359
+ link_ids = []
360
+ if package_ids is None:
361
+ package_ids = []
348
362
  params = [link_ids, package_ids]
349
363
  params += [action, mode, selection_type]
350
364
  resp = self.device.action(self.url + "/cleanup", params)
@@ -352,59 +366,63 @@ class Downloads:
352
366
 
353
367
  def query_links(
354
368
  self,
355
- params=[
356
- {
357
- "bytesTotal": True,
358
- "comment": True,
359
- "status": True,
360
- "enabled": True,
361
- "maxResults": -1,
362
- "startAt": 0,
363
- "packageUUIDs": [],
364
- "host": True,
365
- "url": True,
366
- "bytesloaded": True,
367
- "speed": True,
368
- "eta": True,
369
- "finished": True,
370
- "priority": True,
371
- "running": True,
372
- "skipped": True,
373
- "extractionStatus": True,
374
- }
375
- ],
369
+ params=None,
376
370
  ):
377
371
  """
378
372
  Get the links in the download list
379
373
  """
374
+ if params is None:
375
+ params = [
376
+ {
377
+ "bytesTotal": True,
378
+ "comment": True,
379
+ "status": True,
380
+ "enabled": True,
381
+ "maxResults": -1,
382
+ "startAt": 0,
383
+ "packageUUIDs": [],
384
+ "host": True,
385
+ "url": True,
386
+ "bytesloaded": True,
387
+ "speed": True,
388
+ "eta": True,
389
+ "finished": True,
390
+ "priority": True,
391
+ "running": True,
392
+ "skipped": True,
393
+ "extractionStatus": True,
394
+ }
395
+ ]
380
396
  resp = self.device.action(self.url + "/queryLinks", params)
381
397
  return resp
382
398
 
383
399
  def query_packages(
384
400
  self,
385
- params=[
386
- {
387
- "bytesLoaded": True,
388
- "bytesTotal": True,
389
- "comment": True,
390
- "enabled": True,
391
- "eta": True,
392
- "priority": False,
393
- "finished": True,
394
- "running": True,
395
- "speed": True,
396
- "status": True,
397
- "childCount": True,
398
- "hosts": True,
399
- "saveTo": True,
400
- "maxResults": -1,
401
- "startAt": 0,
402
- }
403
- ],
401
+ params=None,
404
402
  ):
405
403
  """
406
404
  Get the packages in the downloads list
407
405
  """
406
+ if params is None:
407
+ params = [
408
+ {
409
+ "bytesLoaded": True,
410
+ "bytesTotal": True,
411
+ "comment": True,
412
+ "enabled": True,
413
+ "eta": True,
414
+ "priority": False,
415
+ "finished": True,
416
+ "running": True,
417
+ "speed": True,
418
+ "status": True,
419
+ "childCount": True,
420
+ "hosts": True,
421
+ "saveTo": True,
422
+ "maxResults": -1,
423
+ "startAt": 0,
424
+ }
425
+ ]
408
426
  resp = self.device.action(self.url + "/queryPackages", params)
409
427
  return resp
410
428
 
@@ -418,7 +436,7 @@ class Extraction:
418
436
  self.device = device
419
437
  self.url = "/extraction"
420
438
 
421
- def get_archive_info(self, link_ids=[], package_ids=[]):
439
+ def get_archive_info(self, link_ids=None, package_ids=None):
422
440
  """
423
441
  Get ArchiveStatus for links and/or packages.
424
442
 
@@ -427,6 +445,10 @@ class Extraction:
427
445
  :param link_ids: link UUID's.
428
446
  :type: list of strings
429
447
  """
448
+ if link_ids is None:
449
+ link_ids = []
450
+ if package_ids is None:
451
+ package_ids = []
430
452
  params = [link_ids, package_ids]
431
453
  resp = self.device.action(self.url + "/getArchiveInfo", params)
432
454
  return resp
@@ -697,7 +719,7 @@ class Myjdapi:
697
719
  try:
698
720
  decrypted_data = unpad(decryptor.decrypt(self.__base64_decode(data)))
699
721
  except:
700
- raise MYJDException("Failed to decode response: {}", data)
722
+ raise MYJDException("Failed to decode response: {}", data) from None
701
723
 
702
724
  return decrypted_data
703
725
 
@@ -977,7 +999,7 @@ class Myjdapi:
977
999
  except:
978
1000
  raise MYJDException(
979
1001
  "Failed to decode response: {}", encrypted_response.text
980
- )
1002
+ ) from None
981
1003
  msg = (
982
1004
  "\n\tSOURCE: "
983
1005
  + error_msg["src"]
@@ -38,8 +38,7 @@ def create_and_persist_session(shared_state):
38
38
  # AL requires FlareSolverr - check availability first
39
39
  if not is_flaresolverr_available(shared_state):
40
40
  info(
41
- f'"{hostname.upper()}" requires FlareSolverr which is not configured. '
42
- f"Please configure FlareSolverr in the web UI to use this site."
41
+ "FlareSolverr is not configured, configure FlareSolverr in the web UI to use this site."
43
42
  )
44
43
  mark_hostname_issue(
45
44
  hostname, "session", "FlareSolverr required but not configured"
@@ -58,7 +57,7 @@ def create_and_persist_session(shared_state):
58
57
 
59
58
  # Prime cookies via FlareSolverr
60
59
  try:
61
- info(f'Priming "{hostname}" session via FlareSolverr...')
60
+ debug("Priming session via FlareSolverr...")
62
61
  fs_headers = {"Content-Type": "application/json"}
63
62
  fs_payload = {
64
63
  "cmd": "request.get",
@@ -72,19 +71,19 @@ def create_and_persist_session(shared_state):
72
71
  )
73
72
  fs_resp.raise_for_status()
74
73
  except Timeout:
75
- info(f"{hostname}: FlareSolverr request timed out")
74
+ info("FlareSolverr request timed out")
76
75
  mark_hostname_issue(hostname, "session", "FlareSolverr request timed out")
77
76
  return None
78
77
  except RequestException as e:
79
78
  # This covers HTTP errors and connection issues *other than* timeout
80
- info(f"{hostname}: FlareSolverr server error: {e}")
79
+ info(f"FlareSolverr server error: {e}")
81
80
  mark_hostname_issue(hostname, "session", str(e))
82
81
  return None
83
82
 
84
83
  fs_json = fs_resp.json()
85
84
  # Check if FlareSolverr actually solved the challenge
86
85
  if fs_json.get("status") != "ok" or "solution" not in fs_json:
87
- info(f"{hostname}: FlareSolverr did not return a valid solution")
86
+ info("FlareSolverr did not return a valid solution")
88
87
  mark_hostname_issue(
89
88
  hostname, "session", "FlareSolverr did not return a valid solution"
90
89
  )
@@ -106,7 +105,7 @@ def create_and_persist_session(shared_state):
106
105
  sess.cookies.set(name, value, domain=domain, path=path)
107
106
 
108
107
  except Exception as e:
109
- debug(f'Could not prime "{hostname}" session via FlareSolverr: {e}')
108
+ debug(f"Could not prime session via FlareSolverr: {e}")
110
109
  mark_hostname_issue(hostname, "session", str(e))
111
110
  return None
112
111
 
@@ -124,12 +123,12 @@ def create_and_persist_session(shared_state):
124
123
  )
125
124
 
126
125
  if r.status_code != 200 or "invalid" in r.text.lower():
127
- info(f'Login failed: "{hostname}" - {r.status_code} - {r.text}')
126
+ info(f"Login failed: {r.status_code} - {r.text}")
128
127
  mark_hostname_issue(hostname, "session", "Login failed")
129
128
  return None
130
- info(f'Login successful: "{hostname}"')
129
+ info("Login successful")
131
130
  else:
132
- info(f'Missing credentials for: "{hostname}" - skipping login')
131
+ info("Missing credentials - skipping login")
133
132
  mark_hostname_issue(hostname, "session", "Missing credentials")
134
133
  return None
135
134
 
@@ -144,7 +143,7 @@ def retrieve_and_validate_session(shared_state):
144
143
 
145
144
  # AL requires FlareSolverr - check availability
146
145
  if not is_flaresolverr_available(shared_state):
147
- info(f'"{hostname.upper()}" requires FlareSolverr which is not configured')
146
+ info("Requires FlareSolverr which is not configured")
148
147
  mark_hostname_issue(hostname, "session", "FlareSolverr required")
149
148
  return None
150
149
 
@@ -162,16 +161,15 @@ def retrieve_and_validate_session(shared_state):
162
161
  # Check if session is older than 24 hours
163
162
  age = time.time() - created_at
164
163
  if age > SESSION_MAX_AGE_SECONDS:
165
- debug(f"{hostname}: session expired (age: {age / 3600:.1f} hours)")
164
+ debug(f"Session expired (age: {age / 3600:.1f} hours)")
166
165
  invalidate_session(shared_state)
167
166
  return create_and_persist_session(shared_state)
168
167
  else:
169
- debug(f"{hostname}: session valid (age: {age / 3600:.1f} hours)")
170
-
168
+ debug(f"Session valid (age: {age / 3600:.1f} hours)")
171
169
  except (json.JSONDecodeError, TypeError):
172
170
  # Legacy format: plain base64 token without timestamp
173
171
  # Treat as expired and recreate
174
- debug(f"{hostname}: legacy session format detected, recreating")
172
+ debug("Legacy session format detected, recreating")
175
173
  invalidate_session(shared_state)
176
174
  return create_and_persist_session(shared_state)
177
175
 
@@ -181,7 +179,7 @@ def retrieve_and_validate_session(shared_state):
181
179
  if not isinstance(sess, requests.Session):
182
180
  raise ValueError("Not a Session")
183
181
  except Exception as e:
184
- debug(f"{hostname}: session load failed: {e}")
182
+ debug(f"Session load failed: {e}")
185
183
  return create_and_persist_session(shared_state)
186
184
 
187
185
  return sess
@@ -190,7 +188,7 @@ def retrieve_and_validate_session(shared_state):
190
188
  def invalidate_session(shared_state):
191
189
  db = shared_state.values["database"]("sessions")
192
190
  db.delete(hostname)
193
- debug(f'Session for "{hostname}" marked as invalid!')
191
+ debug("Session marked as invalid!")
194
192
 
195
193
 
196
194
  def _persist_session_to_db(shared_state, sess):
@@ -259,8 +257,8 @@ def fetch_via_flaresolverr(
259
257
  # Check if FlareSolverr is available
260
258
  if not is_flaresolverr_available(shared_state):
261
259
  info(
262
- f'"{hostname.upper()}" requires FlareSolverr which is not configured. '
263
- f"Please configure FlareSolverr in the web UI."
260
+ "Requires FlareSolverr which is not configured. "
261
+ "Please configure FlareSolverr in the web UI."
264
262
  )
265
263
  return {
266
264
  "status_code": None,
@@ -275,7 +273,7 @@ def fetch_via_flaresolverr(
275
273
 
276
274
  sess = retrieve_and_validate_session(shared_state)
277
275
  if not sess:
278
- debug(f"Skipping {hostname}: site not usable (login skipped or no credentials)")
276
+ debug("Site not usable (login skipped or no credentials).")
279
277
  return {
280
278
  "status_code": None,
281
279
  "headers": {},
@@ -317,7 +315,7 @@ def fetch_via_flaresolverr(
317
315
  "error": f"FlareSolverr request failed: {e}",
318
316
  }
319
317
  except Exception as e:
320
- raise RuntimeError(f"Could not reach FlareSolverr: {e}")
318
+ raise RuntimeError(f"Could not reach FlareSolverr: {e}") from e
321
319
 
322
320
  if resp.status_code >= 400:
323
321
  mark_hostname_issue(hostname, "session", f"Request failed: {resp.status_code}")
@@ -381,7 +379,7 @@ def fetch_via_requests_session(
381
379
  sess = retrieve_and_validate_session(shared_state)
382
380
  if not sess:
383
381
  raise SkippedSiteError(
384
- f"{hostname}: site not usable (login skipped or no credentials)"
382
+ f"Site '{hostname}' not usable (login skipped or no credentials)"
385
383
  )
386
384
 
387
385
  if year:
@@ -78,7 +78,7 @@ def create_and_persist_session(shared_state):
78
78
 
79
79
  def retrieve_and_validate_session(shared_state):
80
80
  if not is_site_usable(shared_state, hostname):
81
- debug(f"Skipping {hostname}: site not usable (login skipped or no credentials)")
81
+ debug("Site not usable (login skipped or no credentials)")
82
82
  return None
83
83
 
84
84
  session_string = shared_state.values["database"]("sessions").retrieve("dd")
@@ -40,7 +40,7 @@ def create_and_persist_session(shared_state):
40
40
  password = credentials_cfg.get("password")
41
41
 
42
42
  if not user or not password:
43
- info(f'Missing credentials for: "{hostname}" - user and password are required')
43
+ info("Missing credentials - user and password are required")
44
44
  mark_hostname_issue(hostname, "session", "Missing credentials")
45
45
  return None
46
46
 
@@ -62,7 +62,7 @@ def create_and_persist_session(shared_state):
62
62
  csrf_input = soup.find("input", {"name": "_xfToken"})
63
63
 
64
64
  if not csrf_input or not csrf_input.get("value"):
65
- info(f'Could not find CSRF token on login page for: "{hostname}"')
65
+ info("Could not find CSRF token on login page")
66
66
  mark_hostname_issue(hostname, "session", "Could not find CSRF token")
67
67
  return None
68
68
 
@@ -87,15 +87,13 @@ def create_and_persist_session(shared_state):
87
87
  verify_r.raise_for_status()
88
88
 
89
89
  if 'data-logged-in="true"' not in verify_r.text:
90
- info(
91
- f'Login verification failed for: "{hostname}" - invalid credentials or login failed'
92
- )
90
+ info("Login verification failed - invalid credentials or login failed")
93
91
  mark_hostname_issue(hostname, "session", "Login verification failed")
94
92
  return None
95
93
 
96
- info(f'Session successfully created for: "{hostname}" using user/password')
94
+ info("Session successfully created using user/password")
97
95
  except Exception as e:
98
- info(f'Failed to create session for: "{hostname}" - {e}')
96
+ info(f"Failed to create session - {e}")
99
97
  mark_hostname_issue(hostname, "session", str(e))
100
98
  return None
101
99
 
@@ -132,7 +130,7 @@ def retrieve_and_validate_session(shared_state):
132
130
  if not isinstance(sess, requests.Session):
133
131
  raise ValueError("Not a Session")
134
132
  except Exception as e:
135
- debug(f"{hostname}: session load failed: {e}")
133
+ debug(f"Session load failed: {e}")
136
134
  return create_and_persist_session(shared_state)
137
135
 
138
136
  return sess
@@ -147,7 +145,7 @@ def invalidate_session(shared_state):
147
145
  """
148
146
  db = shared_state.values["database"]("sessions")
149
147
  db.delete(hostname)
150
- debug(f'Session for "{hostname}" marked as invalid!')
148
+ debug("Session marked as invalid!")
151
149
 
152
150
 
153
151
  def _persist_session_to_db(shared_state, sess):
@@ -188,7 +186,7 @@ def fetch_via_requests_session(
188
186
  sess = retrieve_and_validate_session(shared_state)
189
187
  if not sess:
190
188
  raise SkippedSiteError(
191
- f"{hostname}: site not usable (login skipped or no credentials)"
189
+ f"Site '{hostname}' not usable (login skipped or no credentials)"
192
190
  )
193
191
 
194
192
  # Execute request
@@ -78,7 +78,7 @@ def create_and_persist_session(shared_state):
78
78
 
79
79
  def retrieve_and_validate_session(shared_state):
80
80
  if not is_site_usable(shared_state, hostname):
81
- debug(f"Skipping {hostname}: site not usable (login skipped or no credentials)")
81
+ debug("Site not usable (login skipped or no credentials)")
82
82
  return None
83
83
 
84
84
  session_string = shared_state.values["database"]("sessions").retrieve("nx")
@@ -12,7 +12,7 @@ from datetime import date, datetime, timedelta
12
12
  from urllib import parse
13
13
 
14
14
  import quasarr
15
- from quasarr.providers.log import debug, info
15
+ from quasarr.providers.log import debug, error, info, trace, warn
16
16
  from quasarr.providers.myjd_api import (
17
17
  Jddevice,
18
18
  Myjdapi,
@@ -140,7 +140,7 @@ def connect_to_jd(jd, user, password, device_name):
140
140
  connection_info = device.check_direct_connection()
141
141
  if connection_info["status"]:
142
142
  info(
143
- f'Direct connection to JDownloader established: "{connection_info["ip"]}"'
143
+ f"Direct connection to JDownloader established: <g>{connection_info['ip']}</g>"
144
144
  )
145
145
  else:
146
146
  info("Could not establish direct connection to JDownloader.")
@@ -232,25 +232,25 @@ def get_device():
232
232
  # First 10 failures: 3 seconds
233
233
  sleep_time = 3
234
234
  if attempts == 10:
235
- info(
235
+ warn(
236
236
  f"WARNING: {attempts} consecutive JDownloader connection errors. Switching to 1-minute intervals."
237
237
  )
238
238
  elif attempts <= 15:
239
239
  # Next 5 failures (11-15): 1 minute
240
240
  sleep_time = 60
241
241
  if attempts % 10 == 0:
242
- info(
242
+ warn(
243
243
  f"WARNING: {attempts} consecutive JDownloader connection errors. Please check your credentials!"
244
244
  )
245
245
  if attempts == 15:
246
- info(
246
+ warn(
247
247
  f"WARNING: Still failing after {attempts} attempts. Switching to 5-minute intervals."
248
248
  )
249
249
  else:
250
250
  # After 15 failures: 5 minutes
251
251
  sleep_time = 300
252
252
  if attempts % 10 == 0:
253
- info(
253
+ warn(
254
254
  f"WARNING: {attempts} consecutive JDownloader connection errors. Please check your credentials!"
255
255
  )
256
256
 
@@ -271,7 +271,7 @@ def get_devices(user, password):
271
271
  devices = jd.list_devices()
272
272
  return devices
273
273
  except (TokenExpiredException, RequestTimeoutException, MYJDException) as e:
274
- info("Error connecting to JDownloader: " + str(e))
274
+ error("Error connecting to JDownloader: " + str(e))
275
275
  return []
276
276
 
277
277
 
@@ -611,7 +611,7 @@ def sanitize_string(s):
611
611
 
612
612
  # Remove German and English articles
613
613
  articles = r"\b(?:der|die|das|ein|eine|einer|eines|einem|einen|the|a|an|and)\b"
614
- s = re.sub(articles, "", s, re.IGNORECASE)
614
+ s = re.sub(articles, "", s, count=0, flags=re.IGNORECASE)
615
615
 
616
616
  # Replace obsolete titles
617
617
  s = s.replace("navy cis", "ncis")
@@ -631,7 +631,7 @@ def search_string_in_sanitized_title(search_string, title):
631
631
  )
632
632
  # Use word boundaries to ensure full word/phrase match
633
633
  if re.search(rf"\b{search_regex}\b", sanitized_title):
634
- debug(f"Matched search string: {search_regex} with title: {sanitized_title}")
634
+ trace(f"Matched search string: {search_regex} with title: {sanitized_title}")
635
635
  return True
636
636
  else:
637
637
  debug(
@@ -718,11 +718,13 @@ def is_valid_release(
718
718
  is_tv_search = "sonarr" in rf
719
719
  is_docs_search = "lazylibrarian" in rf
720
720
 
721
- # if search string is NOT an imdb id check search_string_in_sanitized_title - if not match, its not valid
721
+ # if search string is NOT an imdb id check search_string_in_sanitized_title - if not match, it is not valid
722
722
  if not is_docs_search and not is_imdb_id(search_string):
723
723
  if not search_string_in_sanitized_title(search_string, title):
724
724
  debug(
725
- f"Skipping {title!r} as it doesn't match sanitized search string: {search_string!r}"
725
+ "Skipping {title!r} as it doesn't match sanitized search string: {search_string!r}",
726
+ title=title,
727
+ search_string=search_string,
726
728
  )
727
729
  return False
728
730
 
@@ -730,7 +732,9 @@ def is_valid_release(
730
732
  if is_movie_search:
731
733
  if not MOVIE_REGEX.match(title):
732
734
  debug(
733
- f"Skipping {title!r} as title doesn't match movie regex: {MOVIE_REGEX.pattern}"
735
+ "Skipping {title!r} as title doesn't match movie regex: {pattern!r}",
736
+ title=title,
737
+ pattern=MOVIE_REGEX.pattern,
734
738
  )
735
739
  return False
736
740
  return True
@@ -740,14 +744,19 @@ def is_valid_release(
740
744
  # must have some S/E tag present
741
745
  if not SEASON_EP_REGEX.search(title):
742
746
  debug(
743
- f"Skipping {title!r} as title doesn't match TV show regex: {SEASON_EP_REGEX.pattern}"
747
+ "Skipping {title!r} as title doesn't match TV show regex: {pattern!r}",
748
+ title=title,
749
+ pattern=SEASON_EP_REGEX.pattern,
744
750
  )
745
751
  return False
746
752
  # if caller specified a season or episode, double‑check the match
747
753
  if season is not None or episode is not None:
748
754
  if not match_in_title(title, season, episode):
749
755
  debug(
750
- f"Skipping {title!r} as it doesn't match season {season} and episode {episode}"
756
+ "Skipping {title!r} as it doesn't match season {season} and episode {episode}",
757
+ title=title,
758
+ season=season,
759
+ episode=episode,
751
760
  )
752
761
  return False
753
762
  return True
@@ -757,7 +766,9 @@ def is_valid_release(
757
766
  # must NOT have any S/E tag present
758
767
  if SEASON_EP_REGEX.search(title):
759
768
  debug(
760
- f"Skipping {title!r} as title matches TV show regex: {SEASON_EP_REGEX.pattern}"
769
+ "Skipping {title!r} as title matches TV show regex: {pattern!r}",
770
+ title=title,
771
+ pattern=SEASON_EP_REGEX.pattern,
761
772
  )
762
773
  return False
763
774
  return True