webscout 8.3.1__py3-none-any.whl → 8.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (114) hide show
  1. webscout/AIutel.py +180 -78
  2. webscout/Bing_search.py +417 -0
  3. webscout/Extra/gguf.py +706 -177
  4. webscout/Provider/AISEARCH/__init__.py +1 -0
  5. webscout/Provider/AISEARCH/genspark_search.py +7 -7
  6. webscout/Provider/AISEARCH/stellar_search.py +132 -0
  7. webscout/Provider/ExaChat.py +84 -58
  8. webscout/Provider/GeminiProxy.py +140 -0
  9. webscout/Provider/HeckAI.py +85 -80
  10. webscout/Provider/Jadve.py +56 -50
  11. webscout/Provider/MCPCore.py +78 -75
  12. webscout/Provider/MiniMax.py +207 -0
  13. webscout/Provider/Nemotron.py +41 -13
  14. webscout/Provider/Netwrck.py +34 -51
  15. webscout/Provider/OPENAI/BLACKBOXAI.py +0 -4
  16. webscout/Provider/OPENAI/GeminiProxy.py +328 -0
  17. webscout/Provider/OPENAI/MiniMax.py +298 -0
  18. webscout/Provider/OPENAI/README.md +32 -29
  19. webscout/Provider/OPENAI/README_AUTOPROXY.md +238 -0
  20. webscout/Provider/OPENAI/TogetherAI.py +4 -17
  21. webscout/Provider/OPENAI/__init__.py +17 -1
  22. webscout/Provider/OPENAI/autoproxy.py +1067 -39
  23. webscout/Provider/OPENAI/base.py +17 -76
  24. webscout/Provider/OPENAI/deepinfra.py +42 -108
  25. webscout/Provider/OPENAI/e2b.py +0 -1
  26. webscout/Provider/OPENAI/flowith.py +179 -166
  27. webscout/Provider/OPENAI/friendli.py +233 -0
  28. webscout/Provider/OPENAI/mcpcore.py +109 -70
  29. webscout/Provider/OPENAI/monochat.py +329 -0
  30. webscout/Provider/OPENAI/pydantic_imports.py +1 -172
  31. webscout/Provider/OPENAI/scirachat.py +59 -51
  32. webscout/Provider/OPENAI/toolbaz.py +3 -9
  33. webscout/Provider/OPENAI/typegpt.py +1 -1
  34. webscout/Provider/OPENAI/utils.py +19 -42
  35. webscout/Provider/OPENAI/x0gpt.py +14 -2
  36. webscout/Provider/OPENAI/xenai.py +514 -0
  37. webscout/Provider/OPENAI/yep.py +8 -2
  38. webscout/Provider/OpenGPT.py +54 -32
  39. webscout/Provider/PI.py +58 -84
  40. webscout/Provider/StandardInput.py +32 -13
  41. webscout/Provider/TTI/README.md +9 -9
  42. webscout/Provider/TTI/__init__.py +3 -1
  43. webscout/Provider/TTI/aiarta.py +92 -78
  44. webscout/Provider/TTI/bing.py +231 -0
  45. webscout/Provider/TTI/infip.py +212 -0
  46. webscout/Provider/TTI/monochat.py +220 -0
  47. webscout/Provider/TTS/speechma.py +45 -39
  48. webscout/Provider/TeachAnything.py +11 -3
  49. webscout/Provider/TextPollinationsAI.py +78 -70
  50. webscout/Provider/TogetherAI.py +350 -0
  51. webscout/Provider/Venice.py +37 -46
  52. webscout/Provider/VercelAI.py +27 -24
  53. webscout/Provider/WiseCat.py +35 -35
  54. webscout/Provider/WrDoChat.py +22 -26
  55. webscout/Provider/WritingMate.py +26 -22
  56. webscout/Provider/XenAI.py +324 -0
  57. webscout/Provider/__init__.py +10 -5
  58. webscout/Provider/deepseek_assistant.py +378 -0
  59. webscout/Provider/granite.py +48 -57
  60. webscout/Provider/koala.py +51 -39
  61. webscout/Provider/learnfastai.py +49 -64
  62. webscout/Provider/llmchat.py +79 -93
  63. webscout/Provider/llmchatco.py +63 -78
  64. webscout/Provider/multichat.py +51 -40
  65. webscout/Provider/oivscode.py +1 -1
  66. webscout/Provider/scira_chat.py +159 -96
  67. webscout/Provider/scnet.py +13 -13
  68. webscout/Provider/searchchat.py +13 -13
  69. webscout/Provider/sonus.py +12 -11
  70. webscout/Provider/toolbaz.py +25 -8
  71. webscout/Provider/turboseek.py +41 -42
  72. webscout/Provider/typefully.py +27 -12
  73. webscout/Provider/typegpt.py +41 -46
  74. webscout/Provider/uncovr.py +55 -90
  75. webscout/Provider/x0gpt.py +33 -17
  76. webscout/Provider/yep.py +79 -96
  77. webscout/auth/__init__.py +55 -0
  78. webscout/auth/api_key_manager.py +189 -0
  79. webscout/auth/auth_system.py +100 -0
  80. webscout/auth/config.py +76 -0
  81. webscout/auth/database.py +400 -0
  82. webscout/auth/exceptions.py +67 -0
  83. webscout/auth/middleware.py +248 -0
  84. webscout/auth/models.py +130 -0
  85. webscout/auth/providers.py +279 -0
  86. webscout/auth/rate_limiter.py +254 -0
  87. webscout/auth/request_models.py +127 -0
  88. webscout/auth/request_processing.py +226 -0
  89. webscout/auth/routes.py +550 -0
  90. webscout/auth/schemas.py +103 -0
  91. webscout/auth/server.py +367 -0
  92. webscout/client.py +121 -70
  93. webscout/litagent/Readme.md +68 -55
  94. webscout/litagent/agent.py +99 -9
  95. webscout/scout/core/scout.py +104 -26
  96. webscout/scout/element.py +139 -18
  97. webscout/swiftcli/core/cli.py +14 -3
  98. webscout/swiftcli/decorators/output.py +59 -9
  99. webscout/update_checker.py +31 -49
  100. webscout/version.py +1 -1
  101. webscout/webscout_search.py +4 -12
  102. webscout/webscout_search_async.py +3 -10
  103. webscout/yep_search.py +2 -11
  104. {webscout-8.3.1.dist-info → webscout-8.3.3.dist-info}/METADATA +141 -99
  105. {webscout-8.3.1.dist-info → webscout-8.3.3.dist-info}/RECORD +109 -83
  106. {webscout-8.3.1.dist-info → webscout-8.3.3.dist-info}/entry_points.txt +1 -1
  107. webscout/Provider/HF_space/__init__.py +0 -0
  108. webscout/Provider/HF_space/qwen_qwen2.py +0 -206
  109. webscout/Provider/OPENAI/api.py +0 -1320
  110. webscout/Provider/TTI/fastflux.py +0 -233
  111. webscout/Provider/Writecream.py +0 -246
  112. {webscout-8.3.1.dist-info → webscout-8.3.3.dist-info}/WHEEL +0 -0
  113. {webscout-8.3.1.dist-info → webscout-8.3.3.dist-info}/licenses/LICENSE.md +0 -0
  114. {webscout-8.3.1.dist-info → webscout-8.3.3.dist-info}/top_level.txt +0 -0
webscout/AIutel.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import codecs
2
2
  import json
3
+ import re
3
4
  from typing import (
4
5
  Any,
5
6
  AsyncGenerator,
@@ -48,17 +49,6 @@ def _process_chunk(
48
49
  error_handler (Optional[Callable[[Exception, str], Optional[Any]]]): An optional callback function that is called when JSON parsing fails.
49
50
  It receives the exception and the sanitized chunk as arguments. It should return a value to yield instead of the raw chunk, or None to ignore.
50
51
 
51
-
52
- Args:
53
- chunk: Chunk of text to process.
54
- intro_value: Prefix to remove from the chunk.
55
- to_json: Parse the chunk as JSON if True.
56
- skip_markers: List of markers to skip.
57
- strip_chars: Characters to strip from the chunk.
58
- yield_raw_on_error: Whether to return the raw chunk on parse errors.
59
- error_handler: Optional callback ``Callable[[Exception, str], Optional[Any]]``
60
- invoked when JSON parsing fails. The callback should return a value to
61
- yield instead of the raw chunk, or ``None`` to ignore.
62
52
  """
63
53
  if not isinstance(chunk, str):
64
54
  return None
@@ -128,11 +118,6 @@ def _decode_byte_stream(
128
118
  Defaults to 'replace'.
129
119
  buffer_size (int): The size of the internal buffer used for decoding.
130
120
 
131
- Args:
132
- byte_iterator: Iterator yielding bytes
133
- encoding: Character encoding to use
134
- errors: How to handle encoding errors ('strict', 'ignore', 'replace')
135
- buffer_size: Size of internal buffer for performance tuning
136
121
  """
137
122
  # Initialize decoder with the specified encoding
138
123
  try:
@@ -252,7 +237,7 @@ def _sanitize_stream_sync(
252
237
  Args:
253
238
  data: String, iterable of strings, or iterable of bytes to process.
254
239
  intro_value: Prefix indicating the start of meaningful data.
255
- to_json: Parse JSON content if ``True``.
240
+ to_json: Parse the chunk as JSON if True.
256
241
  skip_markers: Lines containing any of these markers are skipped.
257
242
  strip_chars: Characters to strip from each line.
258
243
  start_marker: Begin processing only after this marker is found.
@@ -277,69 +262,50 @@ def _sanitize_stream_sync(
277
262
  processing_active = start_marker is None
278
263
  buffer = ""
279
264
  found_start = False if start_marker else True
265
+ line_iterator: Iterable[str]
280
266
 
281
- # Fast path for single string processing
282
267
  if isinstance(data, str):
283
- processed_item = None
284
- if processing_active:
285
- if to_json:
286
- try:
287
- data = data.strip()
288
- if data:
289
- processed_item = json.loads(data)
290
- except Exception as e:
291
- if error_handler:
292
- try:
293
- handled = error_handler(e, data)
294
- if handled is not None:
295
- processed_item = handled
296
-
297
- except Exception:
298
- pass
299
- if processed_item is None:
300
- processed_item = data if yield_raw_on_error else None
268
+ # If data is a string, decide whether to split it into lines
269
+ # or treat it as an iterable containing a single chunk.
270
+ temp_lines: List[str]
271
+ if line_delimiter is None: # Default: split by newlines if present
272
+ if '\n' in data or '\r' in data:
273
+ temp_lines = data.splitlines()
301
274
  else:
302
- processed_item = _process_chunk(
303
- data, intro_value, False, effective_skip_markers,
304
- strip_chars, yield_raw_on_error, error_handler
305
- )
306
- if processed_item is not None:
307
- if content_extractor:
308
- try:
309
- final_content = content_extractor(processed_item)
310
- if final_content is not None:
311
- yield final_content
312
- except Exception:
313
- pass
314
- else:
315
- yield processed_item
316
- return
317
-
318
- # Stream processing path
319
- if not hasattr(data, '__iter__'):
320
- raise TypeError(f"Input must be a string or an iterable, not {type(data).__name__}")
321
-
322
- try:
323
- iterator = iter(data)
324
- first_item = next(iterator, None)
325
- if first_item is None:
275
+ temp_lines = [data] # Treat as a single line/chunk
276
+ elif line_delimiter in data: # Custom delimiter found in string
277
+ temp_lines = data.split(line_delimiter)
278
+ else: # Custom delimiter not found, or string is effectively a single segment
279
+ temp_lines = [data]
280
+ line_iterator = iter(temp_lines)
281
+ elif hasattr(data, '__iter__'): # data is an iterable (but not a string)
282
+ _iter = iter(data)
283
+ first_item = next(_iter, None)
284
+
285
+ if first_item is None: # Iterable was empty
326
286
  return
287
+
327
288
  from itertools import chain
328
- stream = chain([first_item], iterator)
289
+ # Reconstruct the full iterable including the first_item
290
+ stream_input_iterable = chain([first_item], _iter)
329
291
 
330
- # Determine if we're dealing with bytes or strings
331
292
  if isinstance(first_item, bytes):
293
+ # Ensure stream_input_iterable is typed as Iterable[bytes] for _decode_byte_stream
332
294
  line_iterator = _decode_byte_stream(
333
- stream,
295
+ stream_input_iterable, # type: ignore
334
296
  encoding=encoding,
335
297
  errors=encoding_errors,
336
298
  buffer_size=buffer_size
337
299
  )
338
300
  elif isinstance(first_item, str):
339
- line_iterator = stream
301
+ # Ensure stream_input_iterable is typed as Iterable[str]
302
+ line_iterator = stream_input_iterable # type: ignore
340
303
  else:
341
- raise TypeError(f"Stream must yield strings or bytes, not {type(first_item).__name__}")
304
+ raise TypeError(f"Iterable must yield strings or bytes, not {type(first_item).__name__}")
305
+ else: # Not a string and not an iterable
306
+ raise TypeError(f"Input must be a string or an iterable, not {type(data).__name__}")
342
307
 
308
+ try:
343
309
  for line in line_iterator:
344
310
  if not line:
345
311
  continue
@@ -630,10 +596,17 @@ async def _sanitize_stream_async(
630
596
  def sanitize_stream(
631
597
  data: Union[
632
598
  str,
599
+ bytes,
633
600
  Iterable[str],
634
601
  Iterable[bytes],
635
602
  AsyncIterable[str],
636
603
  AsyncIterable[bytes],
604
+ dict,
605
+ list,
606
+ int,
607
+ float,
608
+ bool,
609
+ None,
637
610
  ],
638
611
  intro_value: str = "data:",
639
612
  to_json: bool = True,
@@ -648,19 +621,16 @@ def sanitize_stream(
648
621
  buffer_size: int = 8192,
649
622
  line_delimiter: Optional[str] = None,
650
623
  error_handler: Optional[Callable[[Exception, str], Optional[Any]]] = None,
624
+ object_mode: Literal["as_is", "json", "str"] = "json",
625
+ raw: bool = False,
651
626
  ) -> Union[Generator[Any, None, None], AsyncGenerator[Any, None]]:
652
627
  """
653
628
  Processes streaming data (strings or bytes) in either synchronous or asynchronous mode.
654
-
655
- This function acts as a unified interface for handling both synchronous and
656
- asynchronous data streams. It automatically detects the type of input data and
657
- dispatches it to the appropriate processing function (`_sanitize_stream_sync` or
658
- `_sanitize_stream_async`).
629
+ Now supports non-iterable and miscellaneous input types (dict, list, int, float, bool, None).
659
630
 
660
631
  Args:
661
- data (Union[str, Iterable[str], Iterable[bytes], AsyncIterable[str], AsyncIterable[bytes]]):
662
- The data to be processed. Can be a string, a synchronous iterable of strings or bytes,
663
- or an asynchronous iterable of strings or bytes.
632
+ data: The data to be processed. Can be a string, bytes, a synchronous iterable of strings or bytes,
633
+ an asynchronous iterable of strings or bytes, or a single object (dict, list, int, float, bool, None).
664
634
  intro_value (str): Prefix indicating the start of meaningful data. Defaults to "data:".
665
635
  to_json (bool): Parse JSON content if ``True``. Defaults to True.
666
636
  skip_markers (Optional[List[str]]): Lines containing any of these markers are skipped. Defaults to None.
@@ -678,25 +648,157 @@ def sanitize_stream(
678
648
  error_handler (Optional[Callable[[Exception, str], Optional[Any]]]):
679
649
  Callback invoked with ``(Exception, str)`` when JSON parsing fails.
680
650
  If the callback returns a value, it is yielded in place of the raw line. Defaults to None.
651
+ object_mode (Literal["as_is", "json", "str"]): How to handle non-string, non-iterable objects.
652
+ "json" (default) yields as JSON string, "str" yields as str(obj), "as_is" yields the object as-is.
653
+ raw (bool): If True, yields the raw response as returned by the API, chunk by chunk (no splitting or joining).
681
654
 
682
655
  Returns:
683
656
  Union[Generator[Any, None, None], AsyncGenerator[Any, None]]:
684
- A generator or an asynchronous generator yielding the processed data.
685
- """
657
+ A generator or an asynchronous generator yielding the processed data, or raw data if raw=True.
658
+ """ # --- RAW MODE: yield each chunk exactly as returned by the API ---
659
+ if raw:
660
+ def _raw_passthrough_sync(source_iter):
661
+ for chunk in source_iter:
662
+ if isinstance(chunk, (bytes, bytearray)):
663
+ # Decode bytes preserving all whitespace and newlines
664
+ yield chunk.decode(encoding, encoding_errors)
665
+ elif chunk is not None:
666
+ # Yield string chunks as-is, preserving all formatting
667
+ yield chunk
668
+ # Skip None chunks entirely
669
+ async def _raw_passthrough_async(source_aiter):
670
+ async for chunk in source_aiter:
671
+ if isinstance(chunk, (bytes, bytearray)):
672
+ # Decode bytes preserving all whitespace and newlines
673
+ yield chunk.decode(encoding, encoding_errors)
674
+ elif chunk is not None:
675
+ # Yield string chunks as-is, preserving all formatting
676
+ yield chunk
677
+ # Skip None chunks entirely
678
+ # Sync iterable (but not str/bytes)
679
+ if hasattr(data, "__iter__") and not isinstance(data, (str, bytes)):
680
+ return _raw_passthrough_sync(data)
681
+ # Async iterable
682
+ if hasattr(data, "__aiter__"):
683
+ return _raw_passthrough_async(data)
684
+ # Single string or bytes
685
+ if isinstance(data, (bytes, bytearray)):
686
+ def _yield_single():
687
+ yield data.decode(encoding, encoding_errors)
688
+ return _yield_single()
689
+ else:
690
+ def _yield_single():
691
+ if data is not None:
692
+ yield data
693
+ return _yield_single()
694
+ # --- END RAW MODE ---
695
+
696
+ text_attr = getattr(data, "text", None)
697
+ content_attr = getattr(data, "content", None)
698
+
699
+ # Handle None
700
+ if data is None:
701
+ def _empty_gen():
702
+ if False:
703
+ yield None
704
+ return _empty_gen()
705
+
706
+ # Handle bytes directly
707
+ if isinstance(data, bytes):
708
+ try:
709
+ payload = data.decode(encoding, encoding_errors)
710
+ except Exception:
711
+ payload = str(data)
712
+ return _sanitize_stream_sync(
713
+ payload, intro_value, to_json, skip_markers, strip_chars,
714
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
715
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
716
+ )
717
+
718
+ # Handle string directly
719
+ if isinstance(data, str):
720
+ return _sanitize_stream_sync(
721
+ data, intro_value, to_json, skip_markers, strip_chars,
722
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
723
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
724
+ )
725
+
726
+ # Handle dict, list, int, float, bool (non-iterable, non-string/bytes)
727
+ if isinstance(data, (dict, list, int, float, bool)):
728
+ if object_mode == "as_is":
729
+ def _as_is_gen():
730
+ yield data
731
+ return _as_is_gen()
732
+ elif object_mode == "str":
733
+ return _sanitize_stream_sync(
734
+ str(data), intro_value, to_json, skip_markers, strip_chars,
735
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
736
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
737
+ )
738
+ else: # "json"
739
+ try:
740
+ json_str = json.dumps(data)
741
+ except Exception:
742
+ json_str = str(data)
743
+ return _sanitize_stream_sync(
744
+ json_str, intro_value, to_json, skip_markers, strip_chars,
745
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
746
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
747
+ )
748
+
749
+ # Handle file-like objects (optional, treat as string if .read exists)
750
+ if hasattr(data, "read") and callable(data.read):
751
+ try:
752
+ file_content = data.read()
753
+ if isinstance(file_content, bytes):
754
+ file_content = file_content.decode(encoding, encoding_errors)
755
+ return _sanitize_stream_sync(
756
+ file_content, intro_value, to_json, skip_markers, strip_chars,
757
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
758
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
759
+ )
760
+ except Exception:
761
+ pass # fallback to next
762
+
763
+ # Handle .text or .content attributes
764
+ if isinstance(text_attr, str):
765
+ payload = text_attr
766
+ return _sanitize_stream_sync(
767
+ payload, intro_value, to_json, skip_markers, strip_chars,
768
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
769
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
770
+ )
771
+ elif isinstance(content_attr, bytes):
772
+ try:
773
+ payload = content_attr.decode(encoding, encoding_errors)
774
+ except Exception:
775
+ payload = str(content_attr)
776
+ return _sanitize_stream_sync(
777
+ payload, intro_value, to_json, skip_markers, strip_chars,
778
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
779
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
780
+ )
686
781
 
782
+ # Handle async iterables
687
783
  if hasattr(data, "__aiter__"):
688
784
  return _sanitize_stream_async(
689
785
  data, intro_value, to_json, skip_markers, strip_chars,
690
786
  start_marker, end_marker, content_extractor, yield_raw_on_error,
691
787
  encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
692
788
  )
789
+ # Handle sync iterables (but not strings/bytes)
790
+ if hasattr(data, "__iter__"):
791
+ return _sanitize_stream_sync(
792
+ data, intro_value, to_json, skip_markers, strip_chars,
793
+ start_marker, end_marker, content_extractor, yield_raw_on_error,
794
+ encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
795
+ )
796
+ # Fallback: treat as string
693
797
  return _sanitize_stream_sync(
694
- data, intro_value, to_json, skip_markers, strip_chars,
798
+ str(data), intro_value, to_json, skip_markers, strip_chars,
695
799
  start_marker, end_marker, content_extractor, yield_raw_on_error,
696
800
  encoding, encoding_errors, buffer_size, line_delimiter, error_handler,
697
801
  )
698
-
699
-
700
802
  from .conversation import Conversation # noqa: E402,F401
701
803
  from .Extra.autocoder import AutoCoder # noqa: E402,F401
702
804
  from .optimizers import Optimizers # noqa: E402,F401