webscout 7.1__py3-none-any.whl → 7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (144) hide show
  1. webscout/AIauto.py +191 -191
  2. webscout/AIbase.py +122 -122
  3. webscout/AIutel.py +440 -440
  4. webscout/Bard.py +343 -161
  5. webscout/DWEBS.py +489 -492
  6. webscout/Extra/YTToolkit/YTdownloader.py +995 -995
  7. webscout/Extra/YTToolkit/__init__.py +2 -2
  8. webscout/Extra/YTToolkit/transcriber.py +476 -479
  9. webscout/Extra/YTToolkit/ytapi/channel.py +307 -307
  10. webscout/Extra/YTToolkit/ytapi/playlist.py +58 -58
  11. webscout/Extra/YTToolkit/ytapi/pool.py +7 -7
  12. webscout/Extra/YTToolkit/ytapi/utils.py +62 -62
  13. webscout/Extra/YTToolkit/ytapi/video.py +103 -103
  14. webscout/Extra/autocoder/__init__.py +9 -9
  15. webscout/Extra/autocoder/autocoder_utiles.py +199 -199
  16. webscout/Extra/autocoder/rawdog.py +5 -7
  17. webscout/Extra/autollama.py +230 -230
  18. webscout/Extra/gguf.py +3 -3
  19. webscout/Extra/weather.py +171 -171
  20. webscout/LLM.py +442 -442
  21. webscout/Litlogger/__init__.py +67 -681
  22. webscout/Litlogger/core/__init__.py +6 -0
  23. webscout/Litlogger/core/level.py +20 -0
  24. webscout/Litlogger/core/logger.py +123 -0
  25. webscout/Litlogger/handlers/__init__.py +12 -0
  26. webscout/Litlogger/handlers/console.py +50 -0
  27. webscout/Litlogger/handlers/file.py +143 -0
  28. webscout/Litlogger/handlers/network.py +174 -0
  29. webscout/Litlogger/styles/__init__.py +7 -0
  30. webscout/Litlogger/styles/colors.py +231 -0
  31. webscout/Litlogger/styles/formats.py +377 -0
  32. webscout/Litlogger/styles/text.py +87 -0
  33. webscout/Litlogger/utils/__init__.py +6 -0
  34. webscout/Litlogger/utils/detectors.py +154 -0
  35. webscout/Litlogger/utils/formatters.py +200 -0
  36. webscout/Provider/AISEARCH/DeepFind.py +250 -250
  37. webscout/Provider/Blackboxai.py +3 -3
  38. webscout/Provider/ChatGPTGratis.py +226 -0
  39. webscout/Provider/Cloudflare.py +3 -4
  40. webscout/Provider/DeepSeek.py +218 -0
  41. webscout/Provider/Deepinfra.py +3 -3
  42. webscout/Provider/Free2GPT.py +131 -124
  43. webscout/Provider/Gemini.py +100 -115
  44. webscout/Provider/Glider.py +3 -3
  45. webscout/Provider/Groq.py +5 -1
  46. webscout/Provider/Jadve.py +3 -3
  47. webscout/Provider/Marcus.py +191 -192
  48. webscout/Provider/Netwrck.py +3 -3
  49. webscout/Provider/PI.py +2 -2
  50. webscout/Provider/PizzaGPT.py +2 -3
  51. webscout/Provider/QwenLM.py +311 -0
  52. webscout/Provider/TTI/AiForce/__init__.py +22 -22
  53. webscout/Provider/TTI/AiForce/async_aiforce.py +257 -257
  54. webscout/Provider/TTI/AiForce/sync_aiforce.py +242 -242
  55. webscout/Provider/TTI/Nexra/__init__.py +22 -22
  56. webscout/Provider/TTI/Nexra/async_nexra.py +286 -286
  57. webscout/Provider/TTI/Nexra/sync_nexra.py +258 -258
  58. webscout/Provider/TTI/PollinationsAI/__init__.py +23 -23
  59. webscout/Provider/TTI/PollinationsAI/async_pollinations.py +330 -330
  60. webscout/Provider/TTI/PollinationsAI/sync_pollinations.py +285 -285
  61. webscout/Provider/TTI/artbit/__init__.py +22 -22
  62. webscout/Provider/TTI/artbit/async_artbit.py +184 -184
  63. webscout/Provider/TTI/artbit/sync_artbit.py +176 -176
  64. webscout/Provider/TTI/blackbox/__init__.py +4 -4
  65. webscout/Provider/TTI/blackbox/async_blackbox.py +212 -212
  66. webscout/Provider/TTI/blackbox/sync_blackbox.py +199 -199
  67. webscout/Provider/TTI/deepinfra/__init__.py +4 -4
  68. webscout/Provider/TTI/deepinfra/async_deepinfra.py +227 -227
  69. webscout/Provider/TTI/deepinfra/sync_deepinfra.py +199 -199
  70. webscout/Provider/TTI/huggingface/__init__.py +22 -22
  71. webscout/Provider/TTI/huggingface/async_huggingface.py +199 -199
  72. webscout/Provider/TTI/huggingface/sync_huggingface.py +195 -195
  73. webscout/Provider/TTI/imgninza/__init__.py +4 -4
  74. webscout/Provider/TTI/imgninza/async_ninza.py +214 -214
  75. webscout/Provider/TTI/imgninza/sync_ninza.py +209 -209
  76. webscout/Provider/TTI/talkai/__init__.py +4 -4
  77. webscout/Provider/TTI/talkai/async_talkai.py +229 -229
  78. webscout/Provider/TTI/talkai/sync_talkai.py +207 -207
  79. webscout/Provider/TTS/deepgram.py +182 -182
  80. webscout/Provider/TTS/elevenlabs.py +136 -136
  81. webscout/Provider/TTS/gesserit.py +150 -150
  82. webscout/Provider/TTS/murfai.py +138 -138
  83. webscout/Provider/TTS/parler.py +133 -134
  84. webscout/Provider/TTS/streamElements.py +360 -360
  85. webscout/Provider/TTS/utils.py +280 -280
  86. webscout/Provider/TTS/voicepod.py +116 -116
  87. webscout/Provider/TextPollinationsAI.py +2 -3
  88. webscout/Provider/WiseCat.py +193 -0
  89. webscout/Provider/__init__.py +144 -134
  90. webscout/Provider/cerebras.py +242 -227
  91. webscout/Provider/chatglm.py +204 -204
  92. webscout/Provider/dgaf.py +2 -3
  93. webscout/Provider/gaurish.py +2 -3
  94. webscout/Provider/geminiapi.py +208 -208
  95. webscout/Provider/granite.py +223 -0
  96. webscout/Provider/hermes.py +218 -218
  97. webscout/Provider/llama3mitril.py +179 -179
  98. webscout/Provider/llamatutor.py +3 -3
  99. webscout/Provider/llmchat.py +2 -3
  100. webscout/Provider/meta.py +794 -794
  101. webscout/Provider/multichat.py +331 -331
  102. webscout/Provider/typegpt.py +359 -359
  103. webscout/Provider/yep.py +2 -2
  104. webscout/__main__.py +5 -5
  105. webscout/cli.py +319 -319
  106. webscout/conversation.py +241 -242
  107. webscout/exceptions.py +328 -328
  108. webscout/litagent/__init__.py +28 -28
  109. webscout/litagent/agent.py +2 -3
  110. webscout/litprinter/__init__.py +0 -58
  111. webscout/scout/__init__.py +8 -8
  112. webscout/scout/core.py +884 -884
  113. webscout/scout/element.py +459 -459
  114. webscout/scout/parsers/__init__.py +69 -69
  115. webscout/scout/parsers/html5lib_parser.py +172 -172
  116. webscout/scout/parsers/html_parser.py +236 -236
  117. webscout/scout/parsers/lxml_parser.py +178 -178
  118. webscout/scout/utils.py +38 -38
  119. webscout/swiftcli/__init__.py +811 -811
  120. webscout/update_checker.py +2 -12
  121. webscout/version.py +1 -1
  122. webscout/webscout_search.py +5 -4
  123. webscout/zeroart/__init__.py +54 -54
  124. webscout/zeroart/base.py +60 -60
  125. webscout/zeroart/effects.py +99 -99
  126. webscout/zeroart/fonts.py +816 -816
  127. {webscout-7.1.dist-info → webscout-7.2.dist-info}/METADATA +4 -3
  128. webscout-7.2.dist-info/RECORD +217 -0
  129. webstoken/__init__.py +30 -30
  130. webstoken/classifier.py +189 -189
  131. webstoken/keywords.py +216 -216
  132. webstoken/language.py +128 -128
  133. webstoken/ner.py +164 -164
  134. webstoken/normalizer.py +35 -35
  135. webstoken/processor.py +77 -77
  136. webstoken/sentiment.py +206 -206
  137. webstoken/stemmer.py +73 -73
  138. webstoken/tagger.py +60 -60
  139. webstoken/tokenizer.py +158 -158
  140. webscout-7.1.dist-info/RECORD +0 -198
  141. {webscout-7.1.dist-info → webscout-7.2.dist-info}/LICENSE.md +0 -0
  142. {webscout-7.1.dist-info → webscout-7.2.dist-info}/WHEEL +0 -0
  143. {webscout-7.1.dist-info → webscout-7.2.dist-info}/entry_points.txt +0 -0
  144. {webscout-7.1.dist-info → webscout-7.2.dist-info}/top_level.txt +0 -0
@@ -1,251 +1,251 @@
1
- from uuid import uuid4
2
- import requests
3
- import json
4
- import re
5
- from typing import Any, Dict, Generator, Optional
6
-
7
- from webscout.AIbase import AISearch
8
- from webscout import exceptions
9
- from webscout import LitAgent
10
-
11
- class Response:
12
- """A wrapper class for DeepFind API responses.
13
-
14
- This class automatically converts response objects to their text representation
15
- when printed or converted to string.
16
-
17
- Attributes:
18
- text (str): The text content of the response
19
-
20
- Example:
21
- >>> response = Response("Hello, world!")
22
- >>> print(response)
23
- Hello, world!
24
- >>> str(response)
25
- 'Hello, world!'
26
- """
27
- def __init__(self, text: str):
28
- self.text = text
29
-
30
- def __str__(self):
31
- return self.text
32
-
33
- def __repr__(self):
34
- return self.text
35
-
36
- class DeepFind(AISearch):
37
- """A class to interact with the DeepFind AI search API.
38
-
39
- DeepFind provides a powerful search interface that returns AI-generated responses
40
- based on web content. It supports both streaming and non-streaming responses.
41
-
42
- Basic Usage:
43
- >>> from webscout import DeepFind
44
- >>> ai = DeepFind()
45
- >>> # Non-streaming example
46
- >>> response = ai.search("What is Python?")
47
- >>> print(response)
48
- Python is a high-level programming language...
49
-
50
- >>> # Streaming example
51
- >>> for chunk in ai.search("Tell me about AI", stream=True):
52
- ... print(chunk, end="", flush=True)
53
- Artificial Intelligence is...
54
-
55
- >>> # Raw response format
56
- >>> for chunk in ai.search("Hello", stream=True, raw=True):
57
- ... print(chunk)
58
- {'text': 'Hello'}
59
- {'text': ' there!'}
60
-
61
- Args:
62
- timeout (int, optional): Request timeout in seconds. Defaults to 30.
63
- proxies (dict, optional): Proxy configuration for requests. Defaults to None.
64
-
65
- Attributes:
66
- api_endpoint (str): The DeepFind API endpoint URL.
67
- stream_chunk_size (int): Size of chunks when streaming responses.
68
- timeout (int): Request timeout in seconds.
69
- headers (dict): HTTP headers used in requests.
70
- """
71
-
72
- def __init__(
73
- self,
74
- timeout: int = 30,
75
- proxies: Optional[dict] = None,
76
- ):
77
- """Initialize the DeepFind API client.
78
-
79
- Args:
80
- timeout (int, optional): Request timeout in seconds. Defaults to 30.
81
- proxies (dict, optional): Proxy configuration for requests. Defaults to None.
82
-
83
- Example:
84
- >>> ai = DeepFind(timeout=60) # Longer timeout
85
- >>> ai = DeepFind(proxies={'http': 'http://proxy.com:8080'}) # With proxy
86
- """
87
- self.session = requests.Session()
88
- self.api_endpoint = "https://www.deepfind.co/?q={query}"
89
- self.stream_chunk_size = 1024
90
- self.timeout = timeout
91
- self.last_response = {}
92
- self.headers = {
93
- "Accept": "text/x-component",
94
- "Accept-Encoding": "gzip, deflate, br, zstd",
95
- "Accept-Language": "en-US,en;q=0.9,en-IN;q=0.8",
96
- "Content-Type": "text/plain;charset=UTF-8",
97
- "DNT": "1",
98
- "Next-Action": "f354668f23f516a46ad0abe4dedb84b19068bb54",
99
- "Next-Router-State-Tree": '%5B%22%22%2C%7B%22children%22%3A%5B%22__PAGE__%3F%7B%5C%22q%5C%22%3A%5C%22hi%5C%22%7D%22%2C%7B%7D%2C%22%2F%3Fq%3Dhi%22%2C%22refresh%22%5D%7D%2Cnull%2Cnull%2Ctrue%5D',
100
- "Origin": "https://www.deepfind.co",
101
- "Referer": "https://www.deepfind.co/?q=hi",
102
- "Sec-Ch-Ua": '"Not A(Brand";v="8", "Chromium";v="132", "Microsoft Edge";v="132"',
103
- "Sec-Ch-Ua-Mobile": "?0",
104
- "Sec-Ch-Ua-Platform": '"Windows"',
105
- "Sec-Fetch-Dest": "empty",
106
- "Sec-Fetch-Mode": "cors",
107
- "Sec-Fetch-Site": "same-origin",
108
- "User-Agent": LitAgent().random(),
109
- }
110
- self.session.headers.update(self.headers)
111
- self.proxies = proxies
112
-
113
- def search(
114
- self,
115
- prompt: str,
116
- stream: bool = False,
117
- raw: bool = False,
118
- ) -> Dict[str, Any] | Generator[str, None, None]:
119
- """Search using the DeepFind API and get AI-generated responses.
120
-
121
- This method sends a search query to DeepFind and returns the AI-generated response.
122
- It supports both streaming and non-streaming modes, as well as raw response format.
123
-
124
- Args:
125
- prompt (str): The search query or prompt to send to the API.
126
- stream (bool, optional): If True, yields response chunks as they arrive.
127
- If False, returns complete response. Defaults to False.
128
- raw (bool, optional): If True, returns raw response dictionaries with 'text' key.
129
- If False, returns Response objects that convert to text automatically.
130
- Defaults to False.
131
-
132
- Returns:
133
- Union[Dict[str, Any], Generator[str, None, None]]:
134
- - If stream=False: Returns complete response
135
- - If stream=True: Yields response chunks as they arrive
136
-
137
- Raises:
138
- APIConnectionError: If the API request fails
139
-
140
- Examples:
141
- Basic search:
142
- >>> ai = DeepFind()
143
- >>> response = ai.search("What is Python?")
144
- >>> print(response)
145
- Python is a programming language...
146
-
147
- Streaming response:
148
- >>> for chunk in ai.search("Tell me about AI", stream=True):
149
- ... print(chunk, end="")
150
- Artificial Intelligence...
151
-
152
- Raw response format:
153
- >>> for chunk in ai.search("Hello", stream=True, raw=True):
154
- ... print(chunk)
155
- {'text': 'Hello'}
156
- {'text': ' there!'}
157
-
158
- Error handling:
159
- >>> try:
160
- ... response = ai.search("My question")
161
- ... except exceptions.APIConnectionError as e:
162
- ... print(f"API error: {e}")
163
- """
164
- url = self.api_endpoint.format(query=prompt)
165
- payload = [
166
- [{"role": "user", "id": uuid4().hex, "content": prompt}],
167
- uuid4().hex,
168
- ]
169
-
170
- def for_stream():
171
- try:
172
- with self.session.post(
173
- url,
174
- headers=self.headers,
175
- json=payload,
176
- stream=True,
177
- timeout=self.timeout,
178
- ) as response:
179
- response.raise_for_status()
180
- streaming_text = ""
181
- for line in response.iter_lines(decode_unicode=True):
182
- if line:
183
- content_matches = re.findall(r'"content":"([^"\\]*(?:\\.[^"\\]*)*)"', line)
184
- if content_matches:
185
- for content in content_matches:
186
- if len(content) > len(streaming_text):
187
- delta = content[len(streaming_text):]
188
- streaming_text = content
189
- delta = delta.replace('\\"', '"').replace('\\n', '\n')
190
- delta = re.sub(r'\[REF\]\(https?://[^\s]*\)', '', delta)
191
- if raw:
192
- yield {"text": delta}
193
- else:
194
- yield Response(delta)
195
- description_matches = re.findall(r'"description":"([^"\\]*(?:\\.[^"\\]*)*)"', line)
196
- if description_matches:
197
- for description in description_matches:
198
- if description and len(description) > len(streaming_text):
199
- delta = description[len(streaming_text):]
200
- streaming_text = description
201
- delta = delta.replace('\\"', '"').replace('\\n', '\n')
202
- delta = re.sub(r'\[REF\]\(https?://[^\s]*\)', '', delta)
203
- if raw:
204
- yield {"text": f"{delta}\n"}
205
- else:
206
- yield Response(f"{delta}\n")
207
- self.last_response = Response(streaming_text)
208
- except requests.exceptions.RequestException as e:
209
- raise exceptions.APIConnectionError(f"Request failed: {e}")
210
-
211
- def for_non_stream():
212
- full_response = ""
213
- for chunk in for_stream():
214
- if raw:
215
- yield chunk
216
- else:
217
- full_response += str(chunk)
218
- if not raw:
219
- self.last_response = Response(full_response)
220
- return self.last_response
221
-
222
- return for_stream() if stream else for_non_stream()
223
-
224
- @staticmethod
225
- def clean_content(text: str) -> str:
226
- """Removes all webblock elements with research or detail classes.
227
-
228
- Args:
229
- text (str): The text to clean
230
-
231
- Returns:
232
- str: The cleaned text
233
-
234
- Example:
235
- >>> text = '<webblock class="research">...</webblock>Other text'
236
- >>> cleaned_text = DeepFind.clean_content(text)
237
- >>> print(cleaned_text)
238
- Other text
239
- """
240
- cleaned_text = re.sub(
241
- r'<webblock class="(?:research|detail)">[^<]*</webblock>', "", text
242
- )
243
- return cleaned_text
244
-
245
-
246
- if __name__ == "__main__":
247
- from rich import print
248
- ai = DeepFind()
249
- response = ai.search(input(">>> "), stream=True, raw=False)
250
- for chunk in response:
1
+ from uuid import uuid4
2
+ import requests
3
+ import json
4
+ import re
5
+ from typing import Any, Dict, Generator, Optional
6
+
7
+ from webscout.AIbase import AISearch
8
+ from webscout import exceptions
9
+ from webscout import LitAgent
10
+
11
+ class Response:
12
+ """A wrapper class for DeepFind API responses.
13
+
14
+ This class automatically converts response objects to their text representation
15
+ when printed or converted to string.
16
+
17
+ Attributes:
18
+ text (str): The text content of the response
19
+
20
+ Example:
21
+ >>> response = Response("Hello, world!")
22
+ >>> print(response)
23
+ Hello, world!
24
+ >>> str(response)
25
+ 'Hello, world!'
26
+ """
27
+ def __init__(self, text: str):
28
+ self.text = text
29
+
30
+ def __str__(self):
31
+ return self.text
32
+
33
+ def __repr__(self):
34
+ return self.text
35
+
36
+ class DeepFind(AISearch):
37
+ """A class to interact with the DeepFind AI search API.
38
+
39
+ DeepFind provides a powerful search interface that returns AI-generated responses
40
+ based on web content. It supports both streaming and non-streaming responses.
41
+
42
+ Basic Usage:
43
+ >>> from webscout import DeepFind
44
+ >>> ai = DeepFind()
45
+ >>> # Non-streaming example
46
+ >>> response = ai.search("What is Python?")
47
+ >>> print(response)
48
+ Python is a high-level programming language...
49
+
50
+ >>> # Streaming example
51
+ >>> for chunk in ai.search("Tell me about AI", stream=True):
52
+ ... print(chunk, end="", flush=True)
53
+ Artificial Intelligence is...
54
+
55
+ >>> # Raw response format
56
+ >>> for chunk in ai.search("Hello", stream=True, raw=True):
57
+ ... print(chunk)
58
+ {'text': 'Hello'}
59
+ {'text': ' there!'}
60
+
61
+ Args:
62
+ timeout (int, optional): Request timeout in seconds. Defaults to 30.
63
+ proxies (dict, optional): Proxy configuration for requests. Defaults to None.
64
+
65
+ Attributes:
66
+ api_endpoint (str): The DeepFind API endpoint URL.
67
+ stream_chunk_size (int): Size of chunks when streaming responses.
68
+ timeout (int): Request timeout in seconds.
69
+ headers (dict): HTTP headers used in requests.
70
+ """
71
+
72
+ def __init__(
73
+ self,
74
+ timeout: int = 30,
75
+ proxies: Optional[dict] = None,
76
+ ):
77
+ """Initialize the DeepFind API client.
78
+
79
+ Args:
80
+ timeout (int, optional): Request timeout in seconds. Defaults to 30.
81
+ proxies (dict, optional): Proxy configuration for requests. Defaults to None.
82
+
83
+ Example:
84
+ >>> ai = DeepFind(timeout=60) # Longer timeout
85
+ >>> ai = DeepFind(proxies={'http': 'http://proxy.com:8080'}) # With proxy
86
+ """
87
+ self.session = requests.Session()
88
+ self.api_endpoint = "https://www.deepfind.co/?q={query}"
89
+ self.stream_chunk_size = 1024
90
+ self.timeout = timeout
91
+ self.last_response = {}
92
+ self.headers = {
93
+ "Accept": "text/x-component",
94
+ "Accept-Encoding": "gzip, deflate, br, zstd",
95
+ "Accept-Language": "en-US,en;q=0.9,en-IN;q=0.8",
96
+ "Content-Type": "text/plain;charset=UTF-8",
97
+ "DNT": "1",
98
+ "Next-Action": "f354668f23f516a46ad0abe4dedb84b19068bb54",
99
+ "Next-Router-State-Tree": '%5B%22%22%2C%7B%22children%22%3A%5B%22__PAGE__%3F%7B%5C%22q%5C%22%3A%5C%22hi%5C%22%7D%22%2C%7B%7D%2C%22%2F%3Fq%3Dhi%22%2C%22refresh%22%5D%7D%2Cnull%2Cnull%2Ctrue%5D',
100
+ "Origin": "https://www.deepfind.co",
101
+ "Referer": "https://www.deepfind.co/?q=hi",
102
+ "Sec-Ch-Ua": '"Not A(Brand";v="8", "Chromium";v="132", "Microsoft Edge";v="132"',
103
+ "Sec-Ch-Ua-Mobile": "?0",
104
+ "Sec-Ch-Ua-Platform": '"Windows"',
105
+ "Sec-Fetch-Dest": "empty",
106
+ "Sec-Fetch-Mode": "cors",
107
+ "Sec-Fetch-Site": "same-origin",
108
+ "User-Agent": LitAgent().random(),
109
+ }
110
+ self.session.headers.update(self.headers)
111
+ self.proxies = proxies
112
+
113
+ def search(
114
+ self,
115
+ prompt: str,
116
+ stream: bool = False,
117
+ raw: bool = False,
118
+ ) -> Dict[str, Any] | Generator[str, None, None]:
119
+ """Search using the DeepFind API and get AI-generated responses.
120
+
121
+ This method sends a search query to DeepFind and returns the AI-generated response.
122
+ It supports both streaming and non-streaming modes, as well as raw response format.
123
+
124
+ Args:
125
+ prompt (str): The search query or prompt to send to the API.
126
+ stream (bool, optional): If True, yields response chunks as they arrive.
127
+ If False, returns complete response. Defaults to False.
128
+ raw (bool, optional): If True, returns raw response dictionaries with 'text' key.
129
+ If False, returns Response objects that convert to text automatically.
130
+ Defaults to False.
131
+
132
+ Returns:
133
+ Union[Dict[str, Any], Generator[str, None, None]]:
134
+ - If stream=False: Returns complete response
135
+ - If stream=True: Yields response chunks as they arrive
136
+
137
+ Raises:
138
+ APIConnectionError: If the API request fails
139
+
140
+ Examples:
141
+ Basic search:
142
+ >>> ai = DeepFind()
143
+ >>> response = ai.search("What is Python?")
144
+ >>> print(response)
145
+ Python is a programming language...
146
+
147
+ Streaming response:
148
+ >>> for chunk in ai.search("Tell me about AI", stream=True):
149
+ ... print(chunk, end="")
150
+ Artificial Intelligence...
151
+
152
+ Raw response format:
153
+ >>> for chunk in ai.search("Hello", stream=True, raw=True):
154
+ ... print(chunk)
155
+ {'text': 'Hello'}
156
+ {'text': ' there!'}
157
+
158
+ Error handling:
159
+ >>> try:
160
+ ... response = ai.search("My question")
161
+ ... except exceptions.APIConnectionError as e:
162
+ ... print(f"API error: {e}")
163
+ """
164
+ url = self.api_endpoint.format(query=prompt)
165
+ payload = [
166
+ [{"role": "user", "id": uuid4().hex, "content": prompt}],
167
+ uuid4().hex,
168
+ ]
169
+
170
+ def for_stream():
171
+ try:
172
+ with self.session.post(
173
+ url,
174
+ headers=self.headers,
175
+ json=payload,
176
+ stream=True,
177
+ timeout=self.timeout,
178
+ ) as response:
179
+ response.raise_for_status()
180
+ streaming_text = ""
181
+ for line in response.iter_lines(decode_unicode=True):
182
+ if line:
183
+ content_matches = re.findall(r'"content":"([^"\\]*(?:\\.[^"\\]*)*)"', line)
184
+ if content_matches:
185
+ for content in content_matches:
186
+ if len(content) > len(streaming_text):
187
+ delta = content[len(streaming_text):]
188
+ streaming_text = content
189
+ delta = delta.replace('\\"', '"').replace('\\n', '\n')
190
+ delta = re.sub(r'\[REF\]\(https?://[^\s]*\)', '', delta)
191
+ if raw:
192
+ yield {"text": delta}
193
+ else:
194
+ yield Response(delta)
195
+ description_matches = re.findall(r'"description":"([^"\\]*(?:\\.[^"\\]*)*)"', line)
196
+ if description_matches:
197
+ for description in description_matches:
198
+ if description and len(description) > len(streaming_text):
199
+ delta = description[len(streaming_text):]
200
+ streaming_text = description
201
+ delta = delta.replace('\\"', '"').replace('\\n', '\n')
202
+ delta = re.sub(r'\[REF\]\(https?://[^\s]*\)', '', delta)
203
+ if raw:
204
+ yield {"text": f"{delta}\n"}
205
+ else:
206
+ yield Response(f"{delta}\n")
207
+ self.last_response = Response(streaming_text)
208
+ except requests.exceptions.RequestException as e:
209
+ raise exceptions.APIConnectionError(f"Request failed: {e}")
210
+
211
+ def for_non_stream():
212
+ full_response = ""
213
+ for chunk in for_stream():
214
+ if raw:
215
+ yield chunk
216
+ else:
217
+ full_response += str(chunk)
218
+ if not raw:
219
+ self.last_response = Response(full_response)
220
+ return self.last_response
221
+
222
+ return for_stream() if stream else for_non_stream()
223
+
224
+ @staticmethod
225
+ def clean_content(text: str) -> str:
226
+ """Removes all webblock elements with research or detail classes.
227
+
228
+ Args:
229
+ text (str): The text to clean
230
+
231
+ Returns:
232
+ str: The cleaned text
233
+
234
+ Example:
235
+ >>> text = '<webblock class="research">...</webblock>Other text'
236
+ >>> cleaned_text = DeepFind.clean_content(text)
237
+ >>> print(cleaned_text)
238
+ Other text
239
+ """
240
+ cleaned_text = re.sub(
241
+ r'<webblock class="(?:research|detail)">[^<]*</webblock>', "", text
242
+ )
243
+ return cleaned_text
244
+
245
+
246
+ if __name__ == "__main__":
247
+ from rich import print
248
+ ai = DeepFind()
249
+ response = ai.search(input(">>> "), stream=True, raw=False)
250
+ for chunk in response:
251
251
  print(chunk, end="", flush=True)
@@ -4,7 +4,7 @@ from typing import Any, Dict, Optional, Union, Generator, List
4
4
  from webscout.AIutel import Optimizers, Conversation, AwesomePrompts
5
5
  from webscout.AIbase import Provider
6
6
  from webscout import exceptions
7
- from webscout.Litlogger import LitLogger, LogFormat, ColorScheme
7
+ from webscout.Litlogger import Logger, LogFormat
8
8
 
9
9
  class BLACKBOXAI(Provider):
10
10
  """
@@ -46,10 +46,10 @@ class BLACKBOXAI(Provider):
46
46
  system_message: str = "You are a helpful AI assistant."
47
47
  ):
48
48
  """Initialize BlackboxAI with enhanced configuration options."""
49
- self.logger = LitLogger(
49
+ self.logger = Logger(
50
50
  name="BlackboxAI",
51
51
  format=LogFormat.MODERN_EMOJI,
52
- color_scheme=ColorScheme.CYBERPUNK
52
+
53
53
  ) if logging else None
54
54
 
55
55
  self.session = requests.Session()