webscout 6.0__py3-none-any.whl → 6.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (63) hide show
  1. webscout/AIauto.py +77 -259
  2. webscout/Agents/Onlinesearcher.py +22 -10
  3. webscout/Agents/functioncall.py +2 -2
  4. webscout/Bard.py +21 -21
  5. webscout/Extra/autollama.py +37 -20
  6. webscout/Local/__init__.py +6 -7
  7. webscout/Local/formats.py +406 -194
  8. webscout/Local/model.py +1074 -477
  9. webscout/Local/samplers.py +108 -144
  10. webscout/Local/thread.py +251 -410
  11. webscout/Local/ui.py +401 -0
  12. webscout/Local/utils.py +338 -136
  13. webscout/Provider/Amigo.py +51 -38
  14. webscout/Provider/Deepseek.py +7 -6
  15. webscout/Provider/EDITEE.py +2 -2
  16. webscout/Provider/GPTWeb.py +1 -1
  17. webscout/Provider/Llama3.py +1 -1
  18. webscout/Provider/NinjaChat.py +200 -0
  19. webscout/Provider/OLLAMA.py +1 -1
  20. webscout/Provider/Perplexity.py +1 -1
  21. webscout/Provider/Reka.py +12 -5
  22. webscout/Provider/TTI/AIuncensored.py +103 -0
  23. webscout/Provider/TTI/Nexra.py +3 -3
  24. webscout/Provider/TTI/__init__.py +4 -2
  25. webscout/Provider/TTI/aiforce.py +2 -2
  26. webscout/Provider/TTI/imgninza.py +136 -0
  27. webscout/Provider/TTI/talkai.py +116 -0
  28. webscout/Provider/TeachAnything.py +0 -3
  29. webscout/Provider/Youchat.py +1 -1
  30. webscout/Provider/__init__.py +16 -12
  31. webscout/Provider/{ChatHub.py → aimathgpt.py} +72 -88
  32. webscout/Provider/cerebras.py +143 -123
  33. webscout/Provider/cleeai.py +1 -1
  34. webscout/Provider/felo_search.py +1 -1
  35. webscout/Provider/gaurish.py +207 -0
  36. webscout/Provider/geminiprorealtime.py +160 -0
  37. webscout/Provider/genspark.py +1 -1
  38. webscout/Provider/julius.py +8 -3
  39. webscout/Provider/learnfastai.py +1 -1
  40. webscout/Provider/{aigames.py → llmchat.py} +74 -84
  41. webscout/Provider/promptrefine.py +3 -1
  42. webscout/Provider/talkai.py +196 -0
  43. webscout/Provider/turboseek.py +3 -8
  44. webscout/Provider/tutorai.py +1 -1
  45. webscout/__init__.py +2 -43
  46. webscout/exceptions.py +5 -1
  47. webscout/tempid.py +4 -73
  48. webscout/utils.py +3 -0
  49. webscout/version.py +1 -1
  50. webscout/webai.py +1 -1
  51. webscout/webscout_search.py +154 -123
  52. {webscout-6.0.dist-info → webscout-6.2.dist-info}/METADATA +164 -245
  53. {webscout-6.0.dist-info → webscout-6.2.dist-info}/RECORD +57 -55
  54. webscout/Local/rawdog.py +0 -946
  55. webscout/Provider/BasedGPT.py +0 -214
  56. webscout/Provider/TTI/amigo.py +0 -148
  57. webscout/Provider/bixin.py +0 -264
  58. webscout/Provider/xdash.py +0 -182
  59. webscout/websx_search.py +0 -19
  60. {webscout-6.0.dist-info → webscout-6.2.dist-info}/LICENSE.md +0 -0
  61. {webscout-6.0.dist-info → webscout-6.2.dist-info}/WHEEL +0 -0
  62. {webscout-6.0.dist-info → webscout-6.2.dist-info}/entry_points.txt +0 -0
  63. {webscout-6.0.dist-info → webscout-6.2.dist-info}/top_level.txt +0 -0
webscout/AIauto.py CHANGED
@@ -1,123 +1,34 @@
1
- from webscout.AIbase import Provider, AsyncProvider
2
- from webscout.Provider.ThinkAnyAI import ThinkAnyAI
3
- from webscout.Provider.Llama import LLAMA
4
-
5
- from webscout.Provider.Koboldai import KOBOLDAI
6
- from webscout.Provider.Koboldai import AsyncKOBOLDAI
7
-
8
- from webscout.Provider.Perplexity import Perplexity
9
- from webscout.Provider.Blackboxai import BLACKBOXAI
10
- from webscout.Provider.Blackboxai import AsyncBLACKBOXAI
11
- from webscout.Provider.Phind import PhindSearch
12
- from webscout.Provider.Phind import Phindv2
13
- from webscout.Provider.yep import YEPCHAT
14
- from webscout.Provider.Poe import POE
15
- from webscout.Provider.BasedGPT import BasedGPT
16
- from webscout.Provider.Deepseek import DeepSeek
17
- from webscout.Provider.Deepinfra import DeepInfra, VLM, AsyncDeepInfra
18
- from webscout.Provider.OLLAMA import OLLAMA
19
- from webscout.Provider.Andi import AndiSearch
20
- from webscout.Provider.Llama3 import LLAMA3
21
- from webscout.Provider.DARKAI import DARKAI
22
- from webscout.Provider.koala import KOALA
23
- from webscout.Provider.RUBIKSAI import RUBIKSAI
24
- from webscout.Provider.meta import Meta
25
-
26
- from webscout.Provider.DiscordRocks import DiscordRocks
27
- from webscout.Provider.felo_search import Felo
28
- from webscout.Provider.xdash import XDASH
29
- from webscout.Provider.julius import Julius
30
- from webscout.Provider.Youchat import YouChat
31
- from webscout.Provider.Cloudflare import Cloudflare
32
- from webscout.Provider.turboseek import TurboSeek
33
- from webscout.Provider.NetFly import NetFly
34
- from webscout.Provider.EDITEE import Editee
35
- from webscout.Provider.Chatify import Chatify
36
- from webscout.Provider.PI import PiAI
37
- from webscout.g4f import GPT4FREE, AsyncGPT4FREE
38
- from webscout.g4f import TestProviders
1
+ from webscout.AIbase import Provider
2
+ from webscout.g4f import GPT4FREE, TestProviders
39
3
  from webscout.exceptions import AllProvidersFailure
40
- from typing import AsyncGenerator
41
-
42
- from typing import Union
43
- from typing import Any
4
+ from typing import Union, Any, Dict, Generator
5
+ import importlib
6
+ import pkgutil
44
7
  import logging
45
-
46
-
47
- provider_map: dict[
48
- str,
49
- Union[
50
- ThinkAnyAI,
51
- LLAMA,
52
- KOBOLDAI,
53
- Perplexity,
54
- BLACKBOXAI,
55
- PhindSearch,
56
- Phindv2,
57
- YEPCHAT,
58
- POE,
59
- BasedGPT,
60
- DeepSeek,
61
- DeepInfra,
62
- VLM,
63
- GPT4FREE,
64
- OLLAMA,
65
- AndiSearch,
66
- LLAMA3,
67
- DARKAI,
68
- KOALA,
69
- RUBIKSAI,
70
- Meta,
71
-
72
- DiscordRocks,
73
- Felo,
74
- XDASH,
75
- Julius,
76
- YouChat,
77
- Cloudflare,
78
- TurboSeek,
79
- NetFly,
80
- Editee,
81
- Chatify,
82
- PiAI,
83
- ],
84
- ] = {
85
- "ThinkAnyAI": ThinkAnyAI,
86
- "LLAMA2": LLAMA,
87
- "KOBOLDAI": KOBOLDAI,
88
- "PERPLEXITY": Perplexity,
89
- "BLACKBOXAI": BLACKBOXAI,
90
- "PhindSearch": PhindSearch,
91
- "Phindv2": Phindv2,
92
- "YEPCHAT": YEPCHAT,
93
-
94
- "POE": POE,
95
- "BasedGPT": BasedGPT,
96
- "DeepSeek": DeepSeek,
97
- "DeepInfra": DeepInfra,
98
- "VLM": VLM,
99
- "gpt4free": GPT4FREE,
100
- "ollama": OLLAMA,
101
- "andi": AndiSearch,
102
- "llama3": LLAMA3,
103
- "darkai": DARKAI,
104
- "koala": KOALA,
105
- "rubiksai": RUBIKSAI,
106
- "meta": Meta,
107
-
108
- "discordrocks": DiscordRocks,
109
- "felo": Felo,
110
- "xdash": XDASH,
111
- "julius": Julius,
112
- "you": YouChat,
113
- "cloudflare": Cloudflare,
114
- "turboseek": TurboSeek,
115
- "netfly": NetFly,
116
- "editee": Editee,
117
- # "chatify": Chatify,
118
- "pi": PiAI,
119
- }
120
-
8
+ import random
9
+ import inspect
10
+
11
+ def load_providers():
12
+ provider_map = {}
13
+ api_key_providers = set()
14
+ provider_package = importlib.import_module("webscout.Provider")
15
+
16
+ for _, module_name, _ in pkgutil.iter_modules(provider_package.__path__):
17
+ try:
18
+ module = importlib.import_module(f"webscout.Provider.{module_name}")
19
+ for attr_name in dir(module):
20
+ attr = getattr(module, attr_name)
21
+ if isinstance(attr, type) and issubclass(attr, Provider) and attr != Provider:
22
+ provider_map[attr_name.upper()] = attr
23
+ # Check if the provider needs an API key
24
+ if 'api_key' in inspect.signature(attr.__init__).parameters:
25
+ api_key_providers.add(attr_name.upper())
26
+ except Exception as e:
27
+ logging.warning(f"Failed to load provider {module_name}: {e}")
28
+
29
+ return provider_map, api_key_providers
30
+
31
+ provider_map, api_key_providers = load_providers()
121
32
 
122
33
  class AUTO(Provider):
123
34
  def __init__(
@@ -133,57 +44,8 @@ class AUTO(Provider):
133
44
  act: str = None,
134
45
  exclude: list[str] = [],
135
46
  ):
136
- """Instantiates AUTO
137
-
138
- Args:
139
- is_conversation (bool, optional): Flag for chatting conversationally. Defaults to True
140
- max_tokens (int, optional): Maximum number of tokens to be generated upon completion. Defaults to 600.
141
- timeout (int, optional): Http request timeout. Defaults to 30.
142
- intro (str, optional): Conversation introductory prompt. Defaults to None.
143
- filepath (str, optional): Path to file containing conversation history. Defaults to None.
144
- update_file (bool, optional): Add new prompts and responses to the file. Defaults to True.
145
- proxies (dict, optional): Http request proxies. Defaults to {}.
146
- history_offset (int, optional): Limit conversation history to this number of last texts. Defaults to 10250.
147
- act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
148
- exclude(list[str], optional): List of providers to be excluded. Defaults to [].
149
- """
150
- self.provider: Union[
151
- ThinkAnyAI,
152
- LLAMA,
153
- KOBOLDAI,
154
- Perplexity,
155
- BLACKBOXAI,
156
- PhindSearch,
157
- Phindv2,
158
- YEPCHAT,
159
-
160
- POE,
161
- BasedGPT,
162
- DeepSeek,
163
- DeepInfra,
164
- VLM,
165
- GPT4FREE,
166
- OLLAMA,
167
- AndiSearch,
168
- LLAMA3,
169
- DARKAI,
170
- KOALA,
171
- RUBIKSAI,
172
- Meta,
173
-
174
- DiscordRocks,
175
- Felo,
176
- XDASH,
177
- Julius,
178
- YouChat,
179
- Cloudflare,
180
- TurboSeek,
181
- NetFly,
182
- Editee,
183
- # Chatify,
184
- PiAI,
185
- ] = None
186
- self.provider_name: str = None
47
+ self.provider = None
48
+ self.provider_name = None
187
49
  self.is_conversation = is_conversation
188
50
  self.max_tokens = max_tokens
189
51
  self.timeout = timeout
@@ -193,15 +55,15 @@ class AUTO(Provider):
193
55
  self.proxies = proxies
194
56
  self.history_offset = history_offset
195
57
  self.act = act
196
- self.exclude = exclude
58
+ self.exclude = [e.upper() for e in exclude]
197
59
 
198
60
  @property
199
61
  def last_response(self) -> dict[str, Any]:
200
- return self.provider.last_response
62
+ return self.provider.last_response if self.provider else {}
201
63
 
202
64
  @property
203
65
  def conversation(self) -> object:
204
- return self.provider.conversation
66
+ return self.provider.conversation if self.provider else None
205
67
 
206
68
  def ask(
207
69
  self,
@@ -211,20 +73,8 @@ class AUTO(Provider):
211
73
  optimizer: str = None,
212
74
  conversationally: bool = False,
213
75
  run_new_test: bool = False,
214
- ) -> dict:
215
- """Chat with AI
216
-
217
- Args:
218
- prompt (str): Prompt to be send.
219
- stream (bool, optional): Flag for streaming response. Defaults to False.
220
- raw (bool, optional): Stream back raw response as received. Defaults to False.
221
- optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
222
- conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
223
- run_new_test (bool, optional): Perform new test on g4f-based providers. Defaults to False.
224
- Returns:
225
- dict : {}
226
- """
227
- ask_kwargs: dict[str, Union[str, bool]] = {
76
+ ) -> Union[Dict, Generator]:
77
+ ask_kwargs = {
228
78
  "prompt": prompt,
229
79
  "stream": stream,
230
80
  "raw": raw,
@@ -232,14 +82,20 @@ class AUTO(Provider):
232
82
  "conversationally": conversationally,
233
83
  }
234
84
 
235
- # webscout-based providers
236
- for provider_name, provider_obj in provider_map.items():
237
- # continue
238
- if provider_name in self.exclude:
239
- continue
85
+ # Filter out API key required providers and excluded providers
86
+ available_providers = [
87
+ (name, cls) for name, cls in provider_map.items()
88
+ if name not in api_key_providers and name not in self.exclude
89
+ ]
90
+
91
+ # Shuffle the list of available providers
92
+ random.shuffle(available_providers)
93
+
94
+ # Try webscout-based providers
95
+ for provider_name, provider_class in available_providers:
240
96
  try:
241
97
  self.provider_name = f"webscout-{provider_name}"
242
- self.provider = provider_obj(
98
+ self.provider = provider_class(
243
99
  is_conversation=self.is_conversation,
244
100
  max_tokens=self.max_tokens,
245
101
  timeout=self.timeout,
@@ -251,26 +107,19 @@ class AUTO(Provider):
251
107
  act=self.act,
252
108
  )
253
109
 
254
- def for_stream():
255
- for chunk in self.provider.ask(**ask_kwargs):
256
- yield chunk
257
-
258
- def for_non_stream():
259
- return self.provider.ask(**ask_kwargs)
260
-
261
- return for_stream() if stream else for_non_stream()
110
+ return self.provider.ask(**ask_kwargs)
262
111
 
263
112
  except Exception as e:
264
113
  logging.debug(
265
114
  f"Failed to generate response using provider {provider_name} - {e}"
266
115
  )
267
116
 
268
- # g4f-based providers
269
-
270
- for provider_info in TestProviders(timeout=self.timeout).get_results(
271
- run=run_new_test
272
- ):
273
- if provider_info["name"] in self.exclude:
117
+ # Try GPT4FREE providers
118
+ gpt4free_providers = TestProviders(timeout=self.timeout).get_results(run=run_new_test)
119
+ random.shuffle(gpt4free_providers)
120
+
121
+ for provider_info in gpt4free_providers:
122
+ if provider_info["name"].upper() in self.exclude:
274
123
  continue
275
124
  try:
276
125
  self.provider_name = f"g4f-{provider_info['name']}"
@@ -286,23 +135,15 @@ class AUTO(Provider):
286
135
  act=self.act,
287
136
  )
288
137
 
289
- def for_stream():
290
- for chunk in self.provider.ask(**ask_kwargs):
291
- yield chunk
292
-
293
- def for_non_stream():
294
- return self.provider.ask(**ask_kwargs)
295
-
296
- return for_stream() if stream else for_non_stream()
138
+ print(f"Using provider: {self.provider_name}")
139
+ return self.provider.ask(**ask_kwargs)
297
140
 
298
141
  except Exception as e:
299
142
  logging.debug(
300
- f"Failed to generate response using GPT4FREE-base provider {provider_name} - {e}"
143
+ f"Failed to generate response using GPT4FREE-based provider {provider_info['name']} - {e}"
301
144
  )
302
145
 
303
- raise AllProvidersFailure(
304
- "None of the providers generated response successfully."
305
- )
146
+ raise AllProvidersFailure("None of the providers generated response successfully.")
306
147
 
307
148
  def chat(
308
149
  self,
@@ -311,48 +152,25 @@ class AUTO(Provider):
311
152
  optimizer: str = None,
312
153
  conversationally: bool = False,
313
154
  run_new_test: bool = False,
314
- ) -> str:
315
- """Generate response `str`
316
- Args:
317
- prompt (str): Prompt to be send.
318
- stream (bool, optional): Flag for streaming response. Defaults to False.
319
- optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
320
- conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
321
- run_new_test (bool, optional): Perform new test on g4f-based providers. Defaults to False.
322
- Returns:
323
- str: Response generated
324
- """
325
-
326
- def for_stream():
327
- for response in self.ask(
328
- prompt,
329
- True,
330
- optimizer=optimizer,
331
- conversationally=conversationally,
332
- run_new_test=run_new_test,
333
- ):
334
- yield self.get_message(response)
335
-
336
- def for_non_stream():
337
- ask_response = self.ask(
338
- prompt,
339
- False,
340
- optimizer=optimizer,
341
- conversationally=conversationally,
342
- run_new_test=run_new_test,
343
- )
344
- return self.get_message(ask_response)
345
-
346
- return for_stream() if stream else for_non_stream()
155
+ ) -> Union[str, Generator[str, None, None]]:
156
+ response = self.ask(
157
+ prompt,
158
+ stream,
159
+ optimizer=optimizer,
160
+ conversationally=conversationally,
161
+ run_new_test=run_new_test,
162
+ )
163
+
164
+ if stream:
165
+ return (self.get_message(chunk) for chunk in response)
166
+ else:
167
+ return self.get_message(response)
347
168
 
348
169
  def get_message(self, response: dict) -> str:
349
- """Retrieves message only from response
350
-
351
- Args:
352
- response (dict): Response generated by `self.ask`
353
-
354
- Returns:
355
- str: Message extracted
356
- """
357
170
  assert self.provider is not None, "Chat with AI first"
358
171
  return self.provider.get_message(response)
172
+ if __name__ == "__main__":
173
+ auto = AUTO()
174
+
175
+ response = auto.chat("Hello, how are you?")
176
+ print(response)
@@ -2,14 +2,14 @@ import json
2
2
  import httpx
3
3
  from bs4 import BeautifulSoup
4
4
  from typing import List, Dict
5
- from webscout import WEBS, GEMINIAPI
5
+ from webscout import GoogleS, GEMINIAPI
6
6
  import re
7
7
  from concurrent.futures import ThreadPoolExecutor, as_completed
8
8
 
9
9
 
10
10
  class WebSearchAgent:
11
11
  def __init__(self):
12
- self.webs = WEBS()
12
+ self.webs = GoogleS()
13
13
  self.ai = GEMINIAPI(is_conversation=False, api_key='AIzaSyAYlT5-V0MXZwaLYpXCF1Z-Yvy_tx1jylA')
14
14
 
15
15
  def generate_search_queries(self, information: str, num_queries: int = 10) -> List[str]:
@@ -62,16 +62,16 @@ Now, generate the optimal search queries: """
62
62
  else:
63
63
  return [information]
64
64
 
65
- def search(self, information: str, region: str = 'wt-wt', safesearch: str = 'off',
66
- timelimit: str = 'y', max_results: int = 10) -> List[Dict]:
65
+ def search(self, information: str, region: str = 'wt-wt', safe: str = 'off',
66
+ max_results: int = 10) -> List[Dict]:
67
67
  search_queries = self.generate_search_queries(information, num_queries=10)
68
68
  all_results = []
69
69
 
70
70
  for query in search_queries:
71
71
  results = []
72
72
  with self.webs as webs:
73
- for result in webs.text(query, region=region, safesearch=safesearch,
74
- timelimit=timelimit, max_results=max_results):
73
+ for result in webs.search(query, region=region, safe=safe,
74
+ max_results=max_results):
75
75
  results.append(result)
76
76
  all_results.extend(results)
77
77
 
@@ -113,7 +113,7 @@ Now, generate the optimal search queries: """
113
113
  class OnlineSearcher:
114
114
  def __init__(self):
115
115
  self.agent = WebSearchAgent()
116
- self.ai = GEMINIAPI(is_conversation=False, api_key='AIzaSyAYlT5-V0MXZwaLYpXCF1Z-Yvy_tx1jylA')
116
+ self.ai = GEMINIAPI(is_conversation=False, api_key='GOOGLE GEMINI API')
117
117
 
118
118
  def answer_question(self, question: str) -> None:
119
119
  search_results = self.agent.search(question, max_results=10)
@@ -148,7 +148,7 @@ Instructions:
148
148
  Your response should be informative, accurate, and properly sourced when possible. Begin your answer now: """
149
149
 
150
150
  for chunk in self.ai.chat(prompt, stream=True):
151
- print(chunk, end='', flush=True) # Print each chunk in real-time
151
+ print(chunk, end='', flush=True)
152
152
 
153
153
 
154
154
 
@@ -161,10 +161,22 @@ if __name__ == "__main__":
161
161
  if question.lower() == 'quit':
162
162
  break
163
163
  print("=" * 50)
164
- assistant.answer_question(question) # The answer is printed in real-time
164
+ assistant.answer_question(question)
165
165
  print("=" * 50)
166
166
  except KeyboardInterrupt:
167
167
  print("\nExiting.")
168
168
  break
169
169
  except Exception as e:
170
- print(f"An error occurred: {e}")
170
+ print(f"An error occurred: {e}")
171
+
172
+ """
173
+ def format_prompt(messages: Messages, add_special_tokens=False) -> str:
174
+
175
+ if not add_special_tokens and len(messages) <= 1:
176
+ return messages[0]["content"]
177
+ formatted = "\n".join([
178
+ f'{message["role"].capitalize()}: {message["content"]}'
179
+ for message in messages
180
+ ])
181
+ return f"{formatted}\nAssistant:
182
+ """
@@ -4,12 +4,12 @@ import json
4
4
  import time
5
5
  from typing import Any, Dict, Optional
6
6
  import requests
7
- from webscout import WEBS, DeepInfra
7
+ from webscout import WEBS, GEMINIAPI
8
8
 
9
9
  class FunctionCallingAgent:
10
10
  def __init__(self,
11
11
  tools: list = None):
12
- self.ai = DeepInfra(timeout=300, intro=None)
12
+ self.ai = GEMINIAPI(api_key="Gemini api key", timeout=300, intro=None)
13
13
  self.tools = tools if tools is not None else []
14
14
  self.knowledge_cutoff = "September 2022"
15
15
 
webscout/Bard.py CHANGED
@@ -6,8 +6,7 @@ import random
6
6
  import re
7
7
  import string
8
8
  import sys
9
- from typing import Dict
10
- from typing import List
9
+ from typing import Dict, List, Tuple
11
10
 
12
11
  import httpx
13
12
  from prompt_toolkit import prompt
@@ -48,6 +47,22 @@ def __get_input(
48
47
  )
49
48
 
50
49
 
50
+ def load_cookies(cookie_path: str) -> Tuple[str, str]:
51
+ """Loads cookies from the provided JSON file."""
52
+ try:
53
+ with open(cookie_path, 'r') as file:
54
+ cookies = json.load(file)
55
+ session_auth1 = next(item['value'] for item in cookies if item['name'] == '__Secure-1PSID')
56
+ session_auth2 = next(item['value'] for item in cookies if item['name'] == '__Secure-1PSIDTS')
57
+ return session_auth1, session_auth2
58
+ except FileNotFoundError:
59
+ raise Exception(f"Cookie file not found at path: {cookie_path}")
60
+ except json.JSONDecodeError:
61
+ raise Exception("Invalid JSON format in the cookie file.")
62
+ except StopIteration:
63
+ raise Exception("Required cookies not found in the cookie file.")
64
+
65
+
51
66
  class Chatbot:
52
67
  """
53
68
  Synchronous wrapper for the AsyncChatbot class.
@@ -55,14 +70,14 @@ class Chatbot:
55
70
 
56
71
  def __init__(
57
72
  self,
58
- secure_1psid: str,
59
- secure_1psidts: str,
73
+ cookie_path: str,
60
74
  proxy: dict = None,
61
75
  timeout: int = 20,
62
76
  ):
63
77
  self.loop = asyncio.get_event_loop()
78
+ self.secure_1psid, self.secure_1psidts = load_cookies(cookie_path)
64
79
  self.async_chatbot = self.loop.run_until_complete(
65
- AsyncChatbot.create(secure_1psid, secure_1psidts, proxy, timeout),
80
+ AsyncChatbot.create(self.secure_1psid, self.secure_1psidts, proxy, timeout),
66
81
  )
67
82
 
68
83
  def save_conversation(self, file_path: str, conversation_name: str):
@@ -309,22 +324,7 @@ class AsyncChatbot:
309
324
 
310
325
  if __name__ == "__main__":
311
326
  import sys
312
- console = Console()
313
- if os.getenv("Gemini_QUICK"):
314
- Secure_1PSID = os.getenv("Gemini__Secure_1PSID")
315
- secure_1psidts = os.getenv("Gemini__secure_1psidts")
316
- if not (Secure_1PSID and secure_1psidts):
317
- print(
318
- "Gemini__Secure_1PSID or Gemini__secure_1psidts environment variable not set.",
319
- )
320
- sys.exit(1)
321
- chatbot = Chatbot(Secure_1PSID, secure_1psidts)
322
- # Join arguments into a single string
323
- MESSAGE = " ".join(sys.argv[1:])
324
- response = chatbot.ask(MESSAGE)
325
- console.print(Markdown(response["content"]))
326
- console.print(response["images"] if response.get("images") else "")
327
- sys.exit(0)
327
+ sys.exit(0)
328
328
  parser = argparse.ArgumentParser()
329
329
  parser.add_argument(
330
330
  "--session",