webscout 6.5__py3-none-any.whl → 6.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (70) hide show
  1. webscout/Extra/autocoder/autocoder_utiles.py +119 -101
  2. webscout/Extra/weather.py +5 -5
  3. webscout/Provider/AISEARCH/__init__.py +2 -0
  4. webscout/Provider/AISEARCH/ooai.py +155 -0
  5. webscout/Provider/Amigo.py +70 -85
  6. webscout/Provider/{prefind.py → Jadve.py} +72 -70
  7. webscout/Provider/Netwrck.py +239 -0
  8. webscout/Provider/Openai.py +4 -3
  9. webscout/Provider/PI.py +2 -2
  10. webscout/Provider/PizzaGPT.py +3 -3
  11. webscout/Provider/TeachAnything.py +15 -2
  12. webscout/Provider/Youchat.py +42 -8
  13. webscout/Provider/__init__.py +134 -147
  14. webscout/Provider/meta.py +1 -1
  15. webscout/Provider/multichat.py +230 -0
  16. webscout/Provider/promptrefine.py +2 -2
  17. webscout/Provider/talkai.py +10 -13
  18. webscout/Provider/turboseek.py +5 -4
  19. webscout/Provider/tutorai.py +8 -112
  20. webscout/Provider/typegpt.py +4 -5
  21. webscout/Provider/x0gpt.py +81 -9
  22. webscout/Provider/yep.py +123 -361
  23. webscout/__init__.py +10 -1
  24. webscout/cli.py +31 -39
  25. webscout/conversation.py +24 -9
  26. webscout/exceptions.py +188 -20
  27. webscout/litprinter/__init__.py +19 -123
  28. webscout/litprinter/colors.py +54 -0
  29. webscout/optimizers.py +335 -185
  30. webscout/scout/__init__.py +2 -5
  31. webscout/scout/core/__init__.py +7 -0
  32. webscout/scout/core/crawler.py +140 -0
  33. webscout/scout/core/scout.py +571 -0
  34. webscout/scout/core/search_result.py +96 -0
  35. webscout/scout/core/text_analyzer.py +63 -0
  36. webscout/scout/core/text_utils.py +277 -0
  37. webscout/scout/core/web_analyzer.py +52 -0
  38. webscout/scout/element.py +6 -5
  39. webscout/update_checker.py +117 -58
  40. webscout/version.py +1 -1
  41. webscout/webscout_search.py +1 -1
  42. webscout/zeroart/base.py +15 -16
  43. webscout/zeroart/effects.py +1 -1
  44. webscout/zeroart/fonts.py +1 -1
  45. {webscout-6.5.dist-info → webscout-6.7.dist-info}/METADATA +9 -172
  46. {webscout-6.5.dist-info → webscout-6.7.dist-info}/RECORD +63 -45
  47. {webscout-6.5.dist-info → webscout-6.7.dist-info}/entry_points.txt +1 -1
  48. webscout-6.7.dist-info/top_level.txt +2 -0
  49. webstoken/__init__.py +30 -0
  50. webstoken/classifier.py +189 -0
  51. webstoken/keywords.py +216 -0
  52. webstoken/language.py +128 -0
  53. webstoken/ner.py +164 -0
  54. webstoken/normalizer.py +35 -0
  55. webstoken/processor.py +77 -0
  56. webstoken/sentiment.py +206 -0
  57. webstoken/stemmer.py +73 -0
  58. webstoken/t.py +75 -0
  59. webstoken/tagger.py +60 -0
  60. webstoken/tokenizer.py +158 -0
  61. webscout/Provider/Perplexity.py +0 -591
  62. webscout/Provider/RoboCoders.py +0 -206
  63. webscout/Provider/genspark.py +0 -225
  64. webscout/Provider/perplexitylabs.py +0 -265
  65. webscout/Provider/twitterclone.py +0 -251
  66. webscout/Provider/upstage.py +0 -230
  67. webscout-6.5.dist-info/top_level.txt +0 -1
  68. /webscout/Provider/{felo_search.py → AISEARCH/felo_search.py} +0 -0
  69. {webscout-6.5.dist-info → webscout-6.7.dist-info}/LICENSE.md +0 -0
  70. {webscout-6.5.dist-info → webscout-6.7.dist-info}/WHEEL +0 -0
@@ -1,147 +1,134 @@
1
- # webscout/providers/__init__.py
2
- from .PI import *
3
- from .Llama import LLAMA
4
- from .Cohere import Cohere
5
- from .Reka import REKA
6
- from .Groq import GROQ
7
- from .Groq import AsyncGROQ
8
- from .Openai import OPENAI
9
- from .Openai import AsyncOPENAI
10
- from .Koboldai import KOBOLDAI
11
- from .Koboldai import AsyncKOBOLDAI
12
- from .RoboCoders import RoboCoders
13
- from .Perplexity import *
14
- from .perplexitylabs import PerplexityLabs
15
- from .Blackboxai import BLACKBOXAI
16
- from .Phind import PhindSearch
17
- from .Phind import Phindv2
18
- from .ai4chat import *
19
- from .Gemini import GEMINI
20
- from .Deepseek import DeepSeek
21
- from .Deepinfra import DeepInfra
22
- from .Farfalle import *
23
- from .cleeai import *
24
- from .OLLAMA import OLLAMA
25
- from .Andi import AndiSearch
26
- from .PizzaGPT import *
27
- from .Llama3 import *
28
- from .DARKAI import *
29
- from .koala import *
30
- from .RUBIKSAI import *
31
- from .meta import *
32
- from .DiscordRocks import *
33
- from .felo_search import *
34
- from .julius import *
35
- from .Youchat import *
36
- from .yep import *
37
- from .Cloudflare import *
38
- from .turboseek import *
39
- from .Free2GPT import *
40
- from .EDITEE import *
41
- from .TeachAnything import *
42
- from .AI21 import *
43
- from .Chatify import *
44
- from .x0gpt import *
45
- from .cerebras import *
46
- from .lepton import *
47
- from .geminiapi import *
48
- from .elmo import *
49
- from .genspark import *
50
- from .upstage import *
51
- from .Bing import *
52
- from .GPTWeb import *
53
- # from .UNFINISHED.aigames import *
54
- from .llamatutor import *
55
- from .promptrefine import *
56
- from .twitterclone import *
57
- from .tutorai import *
58
- from .ChatGPTES import *
59
- from .Amigo import *
60
- from .prefind import *
61
- from .bagoodex import *
62
- # from .UNFINISHED.ChatHub import *
63
- from .aimathgpt import *
64
- from .gaurish import *
65
- from .geminiprorealtime import *
66
- from .NinjaChat import *
67
- from .llmchat import *
68
- from .talkai import *
69
- from .askmyai import *
70
- from .llama3mitril import *
71
- from .Marcus import *
72
- from .typegpt import *
73
- from .mhystical import *
74
- __all__ = [
75
- 'Farfalle',
76
- 'LLAMA',
77
- 'Cohere',
78
- 'REKA',
79
- 'GROQ',
80
- 'AsyncGROQ',
81
- 'OPENAI',
82
- 'AsyncOPENAI',
83
- 'KOBOLDAI',
84
- 'AsyncKOBOLDAI',
85
- 'Perplexity',
86
- 'PerplexityLabs',
87
- 'BLACKBOXAI',
88
- 'PhindSearch',
89
- 'Felo',
90
- 'GEMINI',
91
- 'DeepSeek',
92
- 'DeepInfra',
93
- 'AI4Chat',
94
- 'Phindv2',
95
- 'OLLAMA',
96
- 'AndiSearch',
97
- 'PIZZAGPT',
98
- 'LLAMA3',
99
- 'DARKAI',
100
- 'KOALA',
101
- 'RUBIKSAI',
102
- 'Meta',
103
- 'AskMyAI',
104
- 'DiscordRocks',
105
- 'PiAI',
106
- 'Julius',
107
- 'YouChat',
108
- 'YEPCHAT',
109
- 'Cloudflare',
110
- 'TurboSeek',
111
- 'Editee',
112
- 'TeachAnything',
113
- 'AI21',
114
- 'Chatify',
115
- 'X0GPT',
116
- 'Cerebras',
117
- 'Lepton',
118
- 'GEMINIAPI',
119
- 'Cleeai',
120
- 'Elmo',
121
- 'Genspark',
122
- 'Upstage',
123
- 'Free2GPT',
124
- 'Bing',
125
- 'GPTWeb',
126
- # 'AIGameIO',
127
- 'LlamaTutor',
128
- 'PromptRefine',
129
- 'AIUncensored',
130
- 'TutorAI',
131
- 'ChatGPTES',
132
- 'AmigoChat',
133
- 'PrefindAI',
134
- 'Bagoodex',
135
- # 'ChatHub',
136
- 'AIMathGPT',
137
- 'GaurishCerebras',
138
- 'GeminiPro',
139
- 'NinjaChat',
140
- 'LLMChat',
141
- 'Talkai',
142
- 'Llama3Mitril',
143
- 'Marcus',
144
- 'RoboCoders',
145
- 'TypeGPT',
146
- 'Mhystical',
147
- ]
1
+ # webscout/providers/__init__.py
2
+ from .PI import *
3
+ from .Llama import LLAMA
4
+ from .Cohere import Cohere
5
+ from .Reka import REKA
6
+ from .Groq import GROQ
7
+ from .Groq import AsyncGROQ
8
+ from .Openai import OPENAI
9
+ from .Openai import AsyncOPENAI
10
+ from .Koboldai import KOBOLDAI
11
+ from .Koboldai import AsyncKOBOLDAI
12
+ from .Blackboxai import BLACKBOXAI
13
+ from .Phind import PhindSearch
14
+ from .Phind import Phindv2
15
+ from .ai4chat import *
16
+ from .Gemini import GEMINI
17
+ from .Deepseek import DeepSeek
18
+ from .Deepinfra import DeepInfra
19
+ from .Farfalle import *
20
+ from .cleeai import *
21
+ from .OLLAMA import OLLAMA
22
+ from .Andi import AndiSearch
23
+ from .PizzaGPT import *
24
+ from .Llama3 import *
25
+ from .DARKAI import *
26
+ from .koala import *
27
+ from .RUBIKSAI import *
28
+ from .meta import *
29
+ from .DiscordRocks import *
30
+ from .julius import *
31
+ from .Youchat import *
32
+ from .yep import *
33
+ from .Cloudflare import *
34
+ from .turboseek import *
35
+ from .Free2GPT import *
36
+ from .EDITEE import *
37
+ from .TeachAnything import *
38
+ from .AI21 import *
39
+ from .Chatify import *
40
+ from .x0gpt import *
41
+ from .cerebras import *
42
+ from .lepton import *
43
+ from .geminiapi import *
44
+ from .elmo import *
45
+ from .Bing import *
46
+ from .GPTWeb import *
47
+ from .Netwrck import Netwrck
48
+ from .llamatutor import *
49
+ from .promptrefine import *
50
+ from .tutorai import *
51
+ from .ChatGPTES import *
52
+ from .Amigo import *
53
+ from .bagoodex import *
54
+ from .aimathgpt import *
55
+ from .gaurish import *
56
+ from .geminiprorealtime import *
57
+ from .NinjaChat import *
58
+ from .llmchat import *
59
+ from .talkai import *
60
+ from .askmyai import *
61
+ from .llama3mitril import *
62
+ from .Marcus import *
63
+ from .typegpt import *
64
+ from .mhystical import *
65
+ from .multichat import *
66
+ from .Jadve import *
67
+ __all__ = [
68
+ 'Farfalle',
69
+ 'LLAMA',
70
+ 'Cohere',
71
+ 'REKA',
72
+ 'GROQ',
73
+ 'AsyncGROQ',
74
+ 'OPENAI',
75
+ 'AsyncOPENAI',
76
+ 'KOBOLDAI',
77
+ 'AsyncKOBOLDAI',
78
+ 'BLACKBOXAI',
79
+ 'PhindSearch',
80
+ 'GEMINI',
81
+ 'DeepSeek',
82
+ 'DeepInfra',
83
+ 'AI4Chat',
84
+ 'Phindv2',
85
+ 'OLLAMA',
86
+ 'AndiSearch',
87
+ 'PIZZAGPT',
88
+ 'LLAMA3',
89
+ 'DARKAI',
90
+ 'KOALA',
91
+ 'RUBIKSAI',
92
+ 'Meta',
93
+ 'AskMyAI',
94
+ 'DiscordRocks',
95
+ 'PiAI',
96
+ 'Julius',
97
+ 'YouChat',
98
+ 'YEPCHAT',
99
+ 'Cloudflare',
100
+ 'TurboSeek',
101
+ 'Editee',
102
+ 'TeachAnything',
103
+ 'AI21',
104
+ 'Chatify',
105
+ 'X0GPT',
106
+ 'Cerebras',
107
+ 'Lepton',
108
+ 'GEMINIAPI',
109
+ 'Cleeai',
110
+ 'Elmo',
111
+ 'Free2GPT',
112
+ 'Bing',
113
+ 'GPTWeb',
114
+ 'Netwrck',
115
+ 'LlamaTutor',
116
+ 'PromptRefine',
117
+ 'TutorAI',
118
+ 'ChatGPTES',
119
+ 'AmigoChat',
120
+ 'Bagoodex',
121
+ 'AIMathGPT',
122
+ 'GaurishCerebras',
123
+ 'GeminiPro',
124
+ 'NinjaChat',
125
+ 'LLMChat',
126
+ 'Talkai',
127
+ 'Llama3Mitril',
128
+ 'Marcus',
129
+ 'TypeGPT',
130
+ 'Mhystical',
131
+ 'Netwrck',
132
+ 'MultiChatAI',
133
+ 'JadveOpenAI',
134
+ ]
webscout/Provider/meta.py CHANGED
@@ -248,7 +248,7 @@ def get_fb_session(email, password, proxies=None):
248
248
  return cookies
249
249
 
250
250
 
251
- def get_cookies() -> dict:
251
+ def get_cookies(self) -> dict:
252
252
  """
253
253
  Extracts necessary cookies from the Meta AI main page.
254
254
 
@@ -0,0 +1,230 @@
1
+ import requests
2
+ import json
3
+ from typing import Any, Dict, Optional, Generator
4
+
5
+ from webscout.AIutel import Optimizers
6
+ from webscout.AIutel import Conversation
7
+ from webscout.AIutel import AwesomePrompts
8
+ from webscout.AIbase import Provider
9
+ from webscout import exceptions
10
+
11
+ # Model configurations
12
+ MODEL_CONFIGS = {
13
+ "llama": {
14
+ "endpoint": "https://www.multichatai.com/api/chat/meta",
15
+ "models": {
16
+ "llama-3.1-70b-versatile": {"contextLength": 8192},
17
+ "llama-3.2-90b-vision-preview": {"contextLength": 32768},
18
+ "llama-3.2-11b-vision-preview": {"contextLength": 32768},
19
+ },
20
+ },
21
+ "alibaba": {
22
+ "endpoint": "https://www.multichatai.com/api/chat/alibaba",
23
+ "models": {
24
+ "Qwen/Qwen2.5-72B-Instruct": {"contextLength": 32768},
25
+ "Qwen/Qwen2.5-Coder-32B-Instruct": {"contextLength": 32768},
26
+ },
27
+ },
28
+ "cohere": {
29
+ "endpoint": "https://www.multichatai.com/api/chat/cohere",
30
+ "models": {"command-r": {"contextLength": 128000}},
31
+ },
32
+ }
33
+
34
+ class MultiChatAI(Provider):
35
+ """
36
+ A class to interact with the MultiChatAI API.
37
+ """
38
+
39
+ def __init__(
40
+ self,
41
+ is_conversation: bool = True,
42
+ max_tokens: int = 4000,
43
+ timeout: int = 30,
44
+ intro: str = None,
45
+ filepath: str = None,
46
+ update_file: bool = True,
47
+ proxies: dict = {},
48
+ history_offset: int = 10250,
49
+ act: str = None,
50
+ model: str = "llama-3.1-70b-versatile", # Default model
51
+ system_prompt: str = "You are a helpful assistant.",
52
+ temperature: float = 0.5,
53
+ presence_penalty: int = 0,
54
+ frequency_penalty: int = 0,
55
+ top_p: float = 1,
56
+ ):
57
+ """Initializes the MultiChatAI API client."""
58
+ self.session = requests.Session()
59
+ self.is_conversation = is_conversation
60
+ self.max_tokens_to_sample = max_tokens
61
+ self.timeout = timeout
62
+ self.last_response = {}
63
+ self.model = model
64
+ self.system_prompt = system_prompt
65
+ self.temperature = temperature
66
+ self.presence_penalty = presence_penalty
67
+ self.frequency_penalty = frequency_penalty
68
+ self.top_p = top_p
69
+ self.headers = {
70
+ "accept": "*/*",
71
+ "accept-language": "en-US,en;q=0.9",
72
+ "content-type": "text/plain;charset=UTF-8",
73
+ "origin": "https://www.multichatai.com",
74
+ "referer": "https://www.multichatai.com/",
75
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
76
+ }
77
+ self.session.headers.update(self.headers)
78
+ self.session.proxies = proxies
79
+
80
+ self.__available_optimizers = (
81
+ method
82
+ for method in dir(Optimizers)
83
+ if callable(getattr(Optimizers, method)) and not method.startswith("__")
84
+ )
85
+ Conversation.intro = (
86
+ AwesomePrompts().get_act(
87
+ act, raise_not_found=True, default=None, case_insensitive=True
88
+ )
89
+ if act
90
+ else intro or Conversation.intro
91
+ )
92
+ self.conversation = Conversation(
93
+ is_conversation, self.max_tokens_to_sample, filepath, update_file
94
+ )
95
+ self.conversation.history_offset = history_offset
96
+
97
+ # Parse provider and model name
98
+ self.provider = "llama" # Default provider
99
+ self.model_name = self.model
100
+
101
+ # Check if model exists in any provider
102
+ model_found = False
103
+ for provider, config in MODEL_CONFIGS.items():
104
+ if self.model in config["models"]:
105
+ self.provider = provider
106
+ self.model_name = self.model
107
+ model_found = True
108
+ break
109
+
110
+ if not model_found:
111
+ available_models = []
112
+ for provider, config in MODEL_CONFIGS.items():
113
+ for model in config["models"].keys():
114
+ available_models.append(f"{provider}/{model}")
115
+ raise ValueError(
116
+ f"Invalid model: {self.model}\nAvailable models: {', '.join(available_models)}"
117
+ )
118
+
119
+ def _get_endpoint(self) -> str:
120
+ """Get the API endpoint for the current provider."""
121
+ return MODEL_CONFIGS[self.provider]["endpoint"]
122
+
123
+ def _get_chat_settings(self) -> Dict[str, Any]:
124
+ """Get chat settings for the current model."""
125
+ base_settings = MODEL_CONFIGS[self.provider]["models"][self.model_name]
126
+ return {
127
+ "model": self.model,
128
+ "prompt": self.system_prompt,
129
+ "temperature": self.temperature,
130
+ "contextLength": base_settings["contextLength"],
131
+ "includeProfileContext": True,
132
+ "includeWorkspaceInstructions": True,
133
+ "embeddingsProvider": "openai"
134
+ }
135
+
136
+ def ask(
137
+ self,
138
+ prompt: str,
139
+ stream: bool = False,
140
+ raw: bool = False,
141
+ optimizer: str = None,
142
+ conversationally: bool = False,
143
+ ) -> Dict[str, Any] | Generator:
144
+ """Sends a prompt to the MultiChatAI API and returns the response."""
145
+ conversation_prompt = self.conversation.gen_complete_prompt(prompt)
146
+ if optimizer:
147
+ if optimizer in self.__available_optimizers:
148
+ conversation_prompt = getattr(Optimizers, optimizer)(
149
+ conversation_prompt if conversationally else prompt
150
+ )
151
+ else:
152
+ raise exceptions.FailedToGenerateResponseError(
153
+ f"Optimizer is not one of {self.__available_optimizers}"
154
+ )
155
+
156
+ payload = {
157
+ "chatSettings": self._get_chat_settings(),
158
+ "messages": [
159
+ {"role": "system", "content": self.system_prompt},
160
+ {"role": "user", "content": conversation_prompt},
161
+ ],
162
+ "customModelId": "",
163
+ }
164
+
165
+ try:
166
+ response = self.session.post(
167
+ self._get_endpoint(),
168
+ headers=self.headers,
169
+ json=payload,
170
+ stream=True,
171
+ timeout=self.timeout,
172
+ )
173
+ response.raise_for_status()
174
+
175
+ full_response = ""
176
+ for line in response.iter_lines():
177
+ if line:
178
+ decoded_line = line.decode("utf-8")
179
+ if stream:
180
+ yield {"text": decoded_line}
181
+ full_response += decoded_line
182
+
183
+ self.last_response = {"text": full_response.strip()}
184
+ self.conversation.update_chat_history(prompt, full_response.strip())
185
+
186
+ if not stream:
187
+ return self.last_response
188
+
189
+ except requests.exceptions.RequestException as e:
190
+ raise exceptions.ProviderConnectionError(f"API request failed: {e}") from e
191
+ except json.JSONDecodeError as e:
192
+ raise exceptions.InvalidResponseError(f"Invalid JSON response: {e}") from e
193
+ except Exception as e:
194
+ raise exceptions.FailedToGenerateResponseError(f"Unexpected error: {e}") from e
195
+
196
+ def chat(
197
+ self,
198
+ prompt: str,
199
+ stream: bool = False,
200
+ optimizer: str = None,
201
+ conversationally: bool = False,
202
+ ) -> str | Generator[str, None, None]:
203
+ """Generate response."""
204
+ if stream:
205
+ for chunk in self.ask(
206
+ prompt, stream=True, optimizer=optimizer, conversationally=conversationally
207
+ ):
208
+ if isinstance(chunk, dict):
209
+ yield chunk.get("text", "")
210
+ else:
211
+ yield str(chunk)
212
+ else:
213
+ response = self.ask(
214
+ prompt, stream=False, optimizer=optimizer, conversationally=conversationally
215
+ )
216
+ return response.get("text", "") if isinstance(response, dict) else str(response)
217
+
218
+ def get_message(self, response: Dict[str, Any] | str) -> str:
219
+ """Retrieves message from response."""
220
+ if isinstance(response, dict):
221
+ return response.get("text", "")
222
+ return str(response)
223
+
224
+ if __name__ == "__main__":
225
+ from rich import print
226
+
227
+ ai = MultiChatAI(model="llama-3.1-70b-versatile")
228
+ response = ai.chat("What is the meaning of life?", stream=True)
229
+ for chunk in response:
230
+ print(chunk, end="", flush=True)
@@ -6,7 +6,7 @@ from webscout.AIutel import Optimizers
6
6
  from webscout.AIutel import Conversation
7
7
  from webscout.AIutel import AwesomePrompts
8
8
  from webscout.AIbase import Provider
9
- from fake_useragent import UserAgent
9
+ from webscout import LitAgent as UserAgent
10
10
 
11
11
  class PromptRefine(Provider):
12
12
  """
@@ -55,7 +55,7 @@ class PromptRefine(Provider):
55
55
  self.headers = {
56
56
  'origin': 'https://www.promptrefine.com',
57
57
  'referer': 'https://www.promptrefine.com/prompt/new',
58
- 'user-agent': UserAgent().random
58
+ 'user-agent': UserAgent().random()
59
59
  }
60
60
 
61
61
  self.__available_optimizers = (
@@ -1,5 +1,5 @@
1
1
  import uuid
2
- import requests
2
+ import cloudscraper
3
3
  import json
4
4
  from typing import Any, Dict, Optional, Generator
5
5
 
@@ -8,7 +8,7 @@ from webscout.AIutel import Conversation
8
8
  from webscout.AIutel import AwesomePrompts
9
9
  from webscout.AIbase import Provider
10
10
  from webscout import exceptions
11
-
11
+ from webscout.litagent import LitAgent
12
12
  class Talkai(Provider):
13
13
  """
14
14
  A class to interact with the Talkai.info API.
@@ -30,7 +30,7 @@ class Talkai(Provider):
30
30
  """
31
31
  Initializes the Talkai.info API with given parameters.
32
32
  """
33
- self.session = requests.Session()
33
+ self.session = cloudscraper.create_scraper()
34
34
  self.is_conversation = is_conversation
35
35
  self.max_tokens_to_sample = max_tokens
36
36
  self.api_endpoint = "https://talkai.info/chat/send/"
@@ -43,9 +43,8 @@ class Talkai(Provider):
43
43
  'Content-Type': 'application/json',
44
44
  'Origin': 'https://talkai.info',
45
45
  'Referer': 'https://talkai.info/chat/',
46
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36 Edg/130.0.0.0',
47
- 'sec-ch-ua': '"Chromium";v="130", "Microsoft Edge";v="130", "Not?A_Brand";v="99"',
48
- 'sec-ch-ua-platform': '"Windows"'
46
+ 'User-Agent': LitAgent().random(),
47
+ 'Cookie': '_csrf-front=e19e203a958c74e439261f6860535403324c9ab2ede76449e6407e54e1f366afa%3A2%3A%7Bi%3A0%3Bs%3A11%3A%22_csrf-front%22%3Bi%3A1%3Bs%3A32%3A%22QbnGY7XS5q9i3JnDvi6KRzrOk0D6XFnk%22%3B%7D; _ga=GA1.1.1383924142.1734246140; _ym_uid=1723397035198647017; _ym_d=1734246141; _ym_isad=1; _ym_visorc=b; talkai-front=ngbj23of1t0ujg2raoa3l57vqe; _ga_FB7V9WMN30=GS1.1.1734246139.1.1734246143.0.0.0'
49
48
  }
50
49
  self.__available_optimizers = (
51
50
  method
@@ -87,9 +86,7 @@ class Talkai(Provider):
87
86
  conversation_prompt = self.conversation.gen_complete_prompt(prompt)
88
87
  if optimizer:
89
88
  if optimizer in self.__available_optimizers:
90
- conversation_prompt = getattr(Optimizers, optimizer)(
91
- conversation_prompt if conversationally else prompt
92
- )
89
+ conversation_prompt = getattr(Optimizers, optimizer)(conversation_prompt if conversationally else prompt)
93
90
  else:
94
91
  raise exceptions.FailedToGenerateResponseError(
95
92
  f"Optimizer is not one of {self.__available_optimizers}"
@@ -111,7 +108,7 @@ class Talkai(Provider):
111
108
 
112
109
  def for_stream():
113
110
  try:
114
- with requests.post(self.api_endpoint, headers=self.headers, json=payload, stream=True, timeout=self.timeout) as response:
111
+ with self.session.post(self.api_endpoint, headers=self.headers, json=payload, stream=True, timeout=self.timeout) as response:
115
112
  response.raise_for_status()
116
113
 
117
114
  full_response = ""
@@ -120,7 +117,7 @@ class Talkai(Provider):
120
117
  decoded_line = line.decode('utf-8')
121
118
  if 'event: trylimit' in decoded_line:
122
119
  break # Stop if trylimit event is encountered
123
- if decoded_line.startswith('data: '):
120
+ if decoded_line.startswith('data:'):
124
121
  data = decoded_line[6:] # Remove 'data: ' prefix
125
122
  full_response += data
126
123
  yield data if raw else dict(text=data)
@@ -130,7 +127,7 @@ class Talkai(Provider):
130
127
  prompt, self.get_message(self.last_response)
131
128
  )
132
129
 
133
- except requests.exceptions.RequestException as e:
130
+ except cloudscraper.exceptions as e:
134
131
  raise exceptions.FailedToGenerateResponseError(f"Request failed: {e}")
135
132
 
136
133
  def for_non_stream():
@@ -193,4 +190,4 @@ if __name__ == "__main__":
193
190
  t = Talkai()
194
191
  resp = t.chat("write me about AI", stream=True)
195
192
  for chunk in resp:
196
- print(chunk, end="", flush=True)
193
+ print(chunk, end="", flush=True)
@@ -7,7 +7,7 @@ from webscout.AIutel import AwesomePrompts, sanitize_stream
7
7
  from webscout.AIbase import Provider, AsyncProvider
8
8
  from webscout import exceptions
9
9
  from typing import Any, AsyncGenerator, Dict
10
-
10
+ from webscout.litagent import LitAgent
11
11
 
12
12
  class TurboSeek(Provider):
13
13
  """
@@ -65,7 +65,7 @@ class TurboSeek(Provider):
65
65
  "sec-fetch-dest": "empty",
66
66
  "sec-fetch-mode": "cors",
67
67
  "sec-fetch-site": "same-origin",
68
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36 Edg/127.0.0.0"
68
+ "user-agent": LitAgent().random(),
69
69
  }
70
70
 
71
71
  self.__available_optimizers = (
@@ -145,7 +145,7 @@ class TurboSeek(Provider):
145
145
  data = json.loads(value[6:].decode('utf-8')) # Decode manually
146
146
  if "text" in data:
147
147
  streaming_text += data["text"]
148
- resp = dict(text=streaming_text)
148
+ resp = dict(text=data["text"])
149
149
  self.last_response.update(resp)
150
150
  yield value if raw else resp
151
151
  except json.decoder.JSONDecodeError:
@@ -210,6 +210,7 @@ class TurboSeek(Provider):
210
210
  if __name__ == '__main__':
211
211
  from rich import print
212
212
  ai = TurboSeek()
213
- response = ai.chat("hi")
213
+ response = ai.chat("hello buddy", stream=True)
214
214
  for chunk in response:
215
215
  print(chunk, end="", flush=True)
216
+