webscout 8.2.3__py3-none-any.whl → 8.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (122) hide show
  1. webscout/AIutel.py +226 -14
  2. webscout/Bard.py +579 -206
  3. webscout/DWEBS.py +78 -35
  4. webscout/Extra/gguf.py +2 -0
  5. webscout/Extra/tempmail/base.py +1 -1
  6. webscout/Provider/AISEARCH/hika_search.py +4 -0
  7. webscout/Provider/AISEARCH/scira_search.py +2 -5
  8. webscout/Provider/Aitopia.py +75 -51
  9. webscout/Provider/AllenAI.py +181 -147
  10. webscout/Provider/ChatGPTClone.py +97 -86
  11. webscout/Provider/ChatSandbox.py +342 -0
  12. webscout/Provider/Cloudflare.py +79 -32
  13. webscout/Provider/Deepinfra.py +135 -94
  14. webscout/Provider/ElectronHub.py +103 -39
  15. webscout/Provider/ExaChat.py +36 -20
  16. webscout/Provider/GPTWeb.py +103 -47
  17. webscout/Provider/GithubChat.py +52 -49
  18. webscout/Provider/GizAI.py +283 -0
  19. webscout/Provider/Glider.py +39 -28
  20. webscout/Provider/Groq.py +222 -91
  21. webscout/Provider/HeckAI.py +93 -69
  22. webscout/Provider/HuggingFaceChat.py +113 -106
  23. webscout/Provider/Hunyuan.py +94 -83
  24. webscout/Provider/Jadve.py +104 -79
  25. webscout/Provider/LambdaChat.py +142 -123
  26. webscout/Provider/Llama3.py +94 -39
  27. webscout/Provider/MCPCore.py +315 -0
  28. webscout/Provider/Marcus.py +95 -37
  29. webscout/Provider/Netwrck.py +94 -52
  30. webscout/Provider/OPENAI/__init__.py +4 -1
  31. webscout/Provider/OPENAI/ai4chat.py +286 -0
  32. webscout/Provider/OPENAI/chatgptclone.py +35 -14
  33. webscout/Provider/OPENAI/deepinfra.py +37 -0
  34. webscout/Provider/OPENAI/exachat.py +4 -0
  35. webscout/Provider/OPENAI/groq.py +354 -0
  36. webscout/Provider/OPENAI/heckai.py +6 -2
  37. webscout/Provider/OPENAI/mcpcore.py +376 -0
  38. webscout/Provider/OPENAI/multichat.py +368 -0
  39. webscout/Provider/OPENAI/netwrck.py +3 -1
  40. webscout/Provider/OPENAI/scirachat.py +2 -4
  41. webscout/Provider/OPENAI/textpollinations.py +20 -22
  42. webscout/Provider/OPENAI/toolbaz.py +1 -0
  43. webscout/Provider/OpenGPT.py +48 -38
  44. webscout/Provider/PI.py +178 -93
  45. webscout/Provider/PizzaGPT.py +66 -36
  46. webscout/Provider/StandardInput.py +42 -30
  47. webscout/Provider/TeachAnything.py +95 -52
  48. webscout/Provider/TextPollinationsAI.py +138 -78
  49. webscout/Provider/TwoAI.py +162 -81
  50. webscout/Provider/TypliAI.py +305 -0
  51. webscout/Provider/Venice.py +97 -58
  52. webscout/Provider/VercelAI.py +33 -14
  53. webscout/Provider/WiseCat.py +65 -28
  54. webscout/Provider/Writecream.py +37 -11
  55. webscout/Provider/WritingMate.py +135 -63
  56. webscout/Provider/__init__.py +9 -27
  57. webscout/Provider/ai4chat.py +6 -7
  58. webscout/Provider/asksteve.py +53 -44
  59. webscout/Provider/cerebras.py +77 -31
  60. webscout/Provider/chatglm.py +47 -37
  61. webscout/Provider/copilot.py +0 -3
  62. webscout/Provider/elmo.py +109 -60
  63. webscout/Provider/granite.py +102 -54
  64. webscout/Provider/hermes.py +95 -48
  65. webscout/Provider/koala.py +1 -1
  66. webscout/Provider/learnfastai.py +113 -54
  67. webscout/Provider/llama3mitril.py +86 -51
  68. webscout/Provider/llmchat.py +88 -46
  69. webscout/Provider/llmchatco.py +110 -115
  70. webscout/Provider/meta.py +41 -37
  71. webscout/Provider/multichat.py +67 -28
  72. webscout/Provider/scira_chat.py +49 -30
  73. webscout/Provider/scnet.py +106 -53
  74. webscout/Provider/searchchat.py +87 -88
  75. webscout/Provider/sonus.py +113 -63
  76. webscout/Provider/toolbaz.py +115 -82
  77. webscout/Provider/turboseek.py +90 -43
  78. webscout/Provider/tutorai.py +82 -64
  79. webscout/Provider/typefully.py +85 -35
  80. webscout/Provider/typegpt.py +118 -61
  81. webscout/Provider/uncovr.py +132 -76
  82. webscout/Provider/x0gpt.py +69 -26
  83. webscout/Provider/yep.py +79 -66
  84. webscout/cli.py +256 -0
  85. webscout/conversation.py +34 -22
  86. webscout/exceptions.py +23 -0
  87. webscout/prompt_manager.py +56 -42
  88. webscout/version.py +1 -1
  89. webscout/webscout_search.py +65 -47
  90. webscout/webscout_search_async.py +81 -126
  91. webscout/yep_search.py +93 -43
  92. {webscout-8.2.3.dist-info → webscout-8.2.5.dist-info}/METADATA +183 -50
  93. {webscout-8.2.3.dist-info → webscout-8.2.5.dist-info}/RECORD +97 -113
  94. {webscout-8.2.3.dist-info → webscout-8.2.5.dist-info}/WHEEL +1 -1
  95. webscout-8.2.5.dist-info/entry_points.txt +3 -0
  96. {webscout-8.2.3.dist-info → webscout-8.2.5.dist-info}/top_level.txt +0 -1
  97. inferno/__init__.py +0 -6
  98. inferno/__main__.py +0 -9
  99. inferno/cli.py +0 -6
  100. webscout/Local/__init__.py +0 -12
  101. webscout/Local/__main__.py +0 -9
  102. webscout/Local/api.py +0 -576
  103. webscout/Local/cli.py +0 -516
  104. webscout/Local/config.py +0 -75
  105. webscout/Local/llm.py +0 -287
  106. webscout/Local/model_manager.py +0 -253
  107. webscout/Local/server.py +0 -721
  108. webscout/Local/utils.py +0 -93
  109. webscout/Provider/C4ai.py +0 -432
  110. webscout/Provider/ChatGPTES.py +0 -237
  111. webscout/Provider/Chatify.py +0 -175
  112. webscout/Provider/DeepSeek.py +0 -196
  113. webscout/Provider/Llama.py +0 -200
  114. webscout/Provider/Phind.py +0 -535
  115. webscout/Provider/WebSim.py +0 -228
  116. webscout/Provider/askmyai.py +0 -158
  117. webscout/Provider/gaurish.py +0 -244
  118. webscout/Provider/labyrinth.py +0 -340
  119. webscout/Provider/lepton.py +0 -194
  120. webscout/Provider/llamatutor.py +0 -192
  121. webscout-8.2.3.dist-info/entry_points.txt +0 -5
  122. {webscout-8.2.3.dist-info → webscout-8.2.5.dist-info/licenses}/LICENSE.md +0 -0
webscout/Bard.py CHANGED
@@ -1,5 +1,6 @@
1
+ # -*- coding: utf-8 -*-
1
2
  #########################################
2
- # Code Generated by o3-mini-high
3
+ # Code Modified to use curl_cffi
3
4
  #########################################
4
5
  import asyncio
5
6
  import json
@@ -12,11 +13,15 @@ from pathlib import Path
12
13
  from datetime import datetime
13
14
  from typing import Dict, List, Tuple, Union, Optional
14
15
 
15
- import httpx
16
- from httpx import AsyncClient, HTTPStatusError
16
+ # Use curl_cffi for requests
17
+ from curl_cffi import CurlError
18
+ from curl_cffi.requests import AsyncSession
19
+ # Import common request exceptions (curl_cffi often wraps these)
20
+ from requests.exceptions import RequestException, Timeout, HTTPError
17
21
 
18
22
  # For image models using validation. Adjust based on organization internal pydantic.
19
- from pydantic import BaseModel, validator
23
+ # Updated import for Pydantic V2
24
+ from pydantic import BaseModel, field_validator
20
25
 
21
26
  # Rich is retained for logging within image methods.
22
27
  from rich.console import Console
@@ -41,7 +46,8 @@ class Headers(Enum):
41
46
  "Host": "gemini.google.com",
42
47
  "Origin": "https://gemini.google.com",
43
48
  "Referer": "https://gemini.google.com/",
44
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
49
+ # User-Agent will be handled by curl_cffi impersonate
50
+ # "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
45
51
  "X-Same-Domain": "1",
46
52
  }
47
53
  ROTATE_COOKIES = {
@@ -50,6 +56,7 @@ class Headers(Enum):
50
56
  UPLOAD = {"Push-ID": "feeds/mcudyrk2a4khkz"}
51
57
 
52
58
  class Model(Enum):
59
+ # Model definitions remain the same
53
60
  UNSPECIFIED = ("unspecified", {}, False)
54
61
  G_2_0_FLASH = (
55
62
  "gemini-2.0-flash",
@@ -96,34 +103,66 @@ class Model(Enum):
96
103
  f"Unknown model name: {name}. Available models: {', '.join([model.model_name for model in cls])}"
97
104
  )
98
105
 
99
- async def upload_file(file: Union[bytes, str, Path], proxy: Optional[str] = None) -> str:
106
+ async def upload_file(
107
+ file: Union[bytes, str, Path],
108
+ proxy: Optional[Union[str, Dict[str, str]]] = None,
109
+ impersonate: str = "chrome110" # Added impersonate
110
+ ) -> str:
100
111
  """
101
- Upload a file to Google's server and return its identifier.
112
+ Upload a file to Google's server and return its identifier using curl_cffi.
102
113
 
103
114
  Parameters:
104
115
  file: bytes | str | Path
105
116
  File data in bytes, or path to the file to be uploaded.
106
- proxy: str, optional
107
- Proxy URL.
117
+ proxy: str | Dict, optional
118
+ Proxy URL or dictionary.
119
+ impersonate: str, optional
120
+ Browser profile for curl_cffi to impersonate. Defaults to "chrome110".
108
121
 
109
122
  Returns:
110
123
  str: Identifier of the uploaded file.
111
124
  Raises:
112
- httpx.HTTPStatusError: If the upload request failed.
125
+ HTTPError: If the upload request failed.
126
+ RequestException: For other network-related errors.
113
127
  """
114
128
  if not isinstance(file, bytes):
115
- with open(file, "rb") as f:
116
- file = f.read()
117
-
118
- async with AsyncClient(http2=True, proxies=proxy) as client:
119
- response = await client.post(
120
- url=Endpoint.UPLOAD.value,
121
- headers=Headers.UPLOAD.value,
122
- files={"file": file},
123
- follow_redirects=True,
124
- )
125
- response.raise_for_status()
126
- return response.text
129
+ file_path = Path(file)
130
+ if not file_path.is_file():
131
+ raise FileNotFoundError(f"File not found at path: {file}")
132
+ with open(file_path, "rb") as f:
133
+ file_content = f.read()
134
+ else:
135
+ file_content = file
136
+
137
+ # Prepare proxy dictionary for curl_cffi
138
+ proxies_dict = None
139
+ if isinstance(proxy, str):
140
+ proxies_dict = {"http": proxy, "https": proxy} # curl_cffi uses http/https keys
141
+ elif isinstance(proxy, dict):
142
+ proxies_dict = proxy # Assume it's already in the correct format
143
+
144
+ try:
145
+ # Use AsyncSession from curl_cffi
146
+ async with AsyncSession(
147
+ proxies=proxies_dict,
148
+ impersonate=impersonate,
149
+ headers=Headers.UPLOAD.value, # Pass headers directly
150
+ # follow_redirects=True is default in curl_cffi
151
+ ) as client:
152
+ response = await client.post(
153
+ url=Endpoint.UPLOAD.value,
154
+ # headers=Headers.UPLOAD.value, # Headers passed in session
155
+ files={"file": file_content},
156
+ # follow_redirects=True, # Default
157
+ )
158
+ response.raise_for_status() # Raises HTTPError for bad responses
159
+ return response.text
160
+ except HTTPError as e:
161
+ console.log(f"[red]HTTP error during file upload: {e.response.status_code} {e}[/red]")
162
+ raise # Re-raise HTTPError
163
+ except (RequestException, CurlError) as e:
164
+ console.log(f"[red]Network error during file upload: {e}[/red]")
165
+ raise # Re-raise other request errors
127
166
 
128
167
  #########################################
129
168
  # Cookie loading and Chatbot classes
@@ -132,17 +171,25 @@ async def upload_file(file: Union[bytes, str, Path], proxy: Optional[str] = None
132
171
  def load_cookies(cookie_path: str) -> Tuple[str, str]:
133
172
  """Loads cookies from the provided JSON file."""
134
173
  try:
135
- with open(cookie_path, 'r') as file:
174
+ with open(cookie_path, 'r', encoding='utf-8') as file: # Added encoding
136
175
  cookies = json.load(file)
137
- session_auth1 = next(item['value'] for item in cookies if item['name'] == '__Secure-1PSID')
138
- session_auth2 = next(item['value'] for item in cookies if item['name'] == '__Secure-1PSIDTS')
176
+ # Handle potential variations in cookie names (case-insensitivity)
177
+ session_auth1 = next((item['value'] for item in cookies if item['name'].upper() == '__SECURE-1PSID'), None)
178
+ session_auth2 = next((item['value'] for item in cookies if item['name'].upper() == '__SECURE-1PSIDTS'), None)
179
+
180
+ if not session_auth1 or not session_auth2:
181
+ raise StopIteration("Required cookies (__Secure-1PSID or __Secure-1PSIDTS) not found.")
182
+
139
183
  return session_auth1, session_auth2
140
184
  except FileNotFoundError:
141
185
  raise Exception(f"Cookie file not found at path: {cookie_path}")
142
186
  except json.JSONDecodeError:
143
187
  raise Exception("Invalid JSON format in the cookie file.")
144
- except StopIteration:
145
- raise Exception("Required cookies not found in the cookie file.")
188
+ except StopIteration as e:
189
+ raise Exception(f"{e} Check the cookie file format and content.")
190
+ except Exception as e: # Catch other potential errors
191
+ raise Exception(f"An unexpected error occurred while loading cookies: {e}")
192
+
146
193
 
147
194
  class Chatbot:
148
195
  """
@@ -151,14 +198,22 @@ class Chatbot:
151
198
  def __init__(
152
199
  self,
153
200
  cookie_path: str,
154
- proxy: dict = None,
201
+ proxy: Optional[Union[str, Dict[str, str]]] = None, # Allow string or dict proxy
155
202
  timeout: int = 20,
156
- model: Model = Model.UNSPECIFIED
203
+ model: Model = Model.UNSPECIFIED,
204
+ impersonate: str = "chrome110" # Added impersonate
157
205
  ):
158
- self.loop = asyncio.get_event_loop()
206
+ # Use asyncio.run() for cleaner async execution in sync context
207
+ # Handle potential RuntimeError if an event loop is already running
208
+ try:
209
+ self.loop = asyncio.get_running_loop()
210
+ except RuntimeError:
211
+ self.loop = asyncio.new_event_loop()
212
+ asyncio.set_event_loop(self.loop)
213
+
159
214
  self.secure_1psid, self.secure_1psidts = load_cookies(cookie_path)
160
215
  self.async_chatbot = self.loop.run_until_complete(
161
- AsyncChatbot.create(self.secure_1psid, self.secure_1psidts, proxy, timeout, model)
216
+ AsyncChatbot.create(self.secure_1psid, self.secure_1psidts, proxy, timeout, model, impersonate) # Pass impersonate
162
217
  )
163
218
 
164
219
  def save_conversation(self, file_path: str, conversation_name: str):
@@ -176,23 +231,26 @@ class Chatbot:
176
231
  self.async_chatbot.load_conversation(file_path, conversation_name)
177
232
  )
178
233
 
179
- def ask(self, message: str) -> dict:
180
- return self.loop.run_until_complete(self.async_chatbot.ask(message))
234
+ def ask(self, message: str, image: Optional[Union[bytes, str, Path]] = None) -> dict: # Added image param
235
+ # Pass image to async ask method
236
+ return self.loop.run_until_complete(self.async_chatbot.ask(message, image=image))
181
237
 
182
238
  class AsyncChatbot:
183
239
  """
184
- A class to interact with Google Gemini.
240
+ A class to interact with Google Gemini using curl_cffi.
185
241
  Parameters:
186
242
  secure_1psid: str
187
243
  The __Secure-1PSID cookie.
188
244
  secure_1psidts: str
189
245
  The __Secure-1PSIDTS cookie.
190
- proxy: dict
191
- Http request proxy.
246
+ proxy: Optional[Union[str, Dict[str, str]]]
247
+ Proxy URL string or dictionary for curl_cffi.
192
248
  timeout: int
193
249
  Request timeout in seconds.
194
250
  model: Model
195
251
  Selected model for the session.
252
+ impersonate: str
253
+ Browser profile for curl_cffi to impersonate.
196
254
  """
197
255
  __slots__ = [
198
256
  "headers",
@@ -201,156 +259,328 @@ class AsyncChatbot:
201
259
  "conversation_id",
202
260
  "response_id",
203
261
  "choice_id",
204
- "proxy",
262
+ "proxy", # Store the original proxy config
263
+ "proxies_dict", # Store the curl_cffi-compatible proxy dict
205
264
  "secure_1psidts",
206
265
  "secure_1psid",
207
266
  "session",
208
267
  "timeout",
209
268
  "model",
269
+ "impersonate", # Store impersonate setting
210
270
  ]
211
271
 
212
272
  def __init__(
213
273
  self,
214
274
  secure_1psid: str,
215
275
  secure_1psidts: str,
216
- proxy: dict = None,
276
+ proxy: Optional[Union[str, Dict[str, str]]] = None, # Allow string or dict proxy
217
277
  timeout: int = 20,
218
278
  model: Model = Model.UNSPECIFIED,
279
+ impersonate: str = "chrome110", # Added impersonate
219
280
  ):
220
281
  headers = Headers.GEMINI.value.copy()
221
282
  if model != Model.UNSPECIFIED:
222
283
  headers.update(model.model_header)
223
- self._reqid = int("".join(random.choices(string.digits, k=4)))
224
- self.proxy = proxy
284
+ self._reqid = int("".join(random.choices(string.digits, k=7))) # Increased length for less collision chance
285
+ self.proxy = proxy # Store original proxy setting
286
+ self.impersonate = impersonate # Store impersonate setting
287
+
288
+ # Prepare proxy dictionary for curl_cffi
289
+ self.proxies_dict = None
290
+ if isinstance(proxy, str):
291
+ self.proxies_dict = {"http": proxy, "https": proxy} # curl_cffi uses http/https keys
292
+ elif isinstance(proxy, dict):
293
+ self.proxies_dict = proxy # Assume it's already in the correct format
294
+
225
295
  self.conversation_id = ""
226
296
  self.response_id = ""
227
297
  self.choice_id = ""
228
298
  self.secure_1psid = secure_1psid
229
299
  self.secure_1psidts = secure_1psidts
230
- self.session = httpx.AsyncClient(proxies=self.proxy)
231
- self.session.headers = headers
232
- self.session.cookies.set("__Secure-1PSID", secure_1psid)
233
- self.session.cookies.set("__Secure-1PSIDTS", secure_1psidts)
234
- self.timeout = timeout
300
+
301
+ # Initialize curl_cffi AsyncSession
302
+ self.session = AsyncSession(
303
+ headers=headers,
304
+ cookies={"__Secure-1PSID": secure_1psid, "__Secure-1PSIDTS": secure_1psidts},
305
+ proxies=self.proxies_dict,
306
+ timeout=timeout,
307
+ impersonate=self.impersonate,
308
+ # verify=True, # Default in curl_cffi
309
+ # http2=True, # Implicitly handled by curl_cffi if possible
310
+ )
311
+ # No need to set proxies/headers/cookies again, done in constructor
312
+
313
+ self.timeout = timeout # Store timeout for potential direct use in requests
235
314
  self.model = model
315
+ self.SNlM0e = None # Initialize SNlM0e
236
316
 
237
317
  @classmethod
238
318
  async def create(
239
319
  cls,
240
320
  secure_1psid: str,
241
321
  secure_1psidts: str,
242
- proxy: dict = None,
322
+ proxy: Optional[Union[str, Dict[str, str]]] = None, # Allow string or dict proxy
243
323
  timeout: int = 20,
244
324
  model: Model = Model.UNSPECIFIED,
325
+ impersonate: str = "chrome110", # Added impersonate
245
326
  ) -> "AsyncChatbot":
246
- instance = cls(secure_1psid, secure_1psidts, proxy, timeout, model)
247
- instance.SNlM0e = await instance.__get_snlm0e()
327
+ """
328
+ Factory method to create and initialize an AsyncChatbot instance.
329
+ Fetches the necessary SNlM0e value asynchronously.
330
+ """
331
+ instance = cls(secure_1psid, secure_1psidts, proxy, timeout, model, impersonate) # Pass impersonate
332
+ try:
333
+ instance.SNlM0e = await instance.__get_snlm0e()
334
+ except Exception as e:
335
+ # Log the error and re-raise or handle appropriately
336
+ console.log(f"[red]Error during AsyncChatbot initialization (__get_snlm0e): {e}[/red]", style="bold red")
337
+ # Optionally close the session if initialization fails critically
338
+ await instance.session.close() # Use close() for AsyncSession
339
+ raise # Re-raise the exception to signal failure
248
340
  return instance
249
341
 
250
342
  async def save_conversation(self, file_path: str, conversation_name: str) -> None:
343
+ # Logic remains the same
251
344
  conversations = await self.load_conversations(file_path)
252
- conversation_exists = False
253
- for conversation in conversations:
254
- if conversation["conversation_name"] == conversation_name:
255
- conversation["conversation_name"] = conversation_name
256
- conversation["_reqid"] = self._reqid
257
- conversation["conversation_id"] = self.conversation_id
258
- conversation["response_id"] = self.response_id
259
- conversation["choice_id"] = self.choice_id
260
- conversation["SNlM0e"] = self.SNlM0e
261
- conversation_exists = True
262
- if not conversation_exists:
263
- conversation = {
264
- "conversation_name": conversation_name,
265
- "_reqid": self._reqid,
266
- "conversation_id": self.conversation_id,
267
- "response_id": self.response_id,
268
- "choice_id": self.choice_id,
269
- "SNlM0e": self.SNlM0e,
270
- }
271
- conversations.append(conversation)
272
- with open(file_path, "w", encoding="utf-8") as f:
273
- json.dump(conversations, f, indent=4)
345
+ conversation_data = {
346
+ "conversation_name": conversation_name,
347
+ "_reqid": self._reqid,
348
+ "conversation_id": self.conversation_id,
349
+ "response_id": self.response_id,
350
+ "choice_id": self.choice_id,
351
+ "SNlM0e": self.SNlM0e,
352
+ "model_name": self.model.model_name, # Save the model used
353
+ "timestamp": datetime.now().isoformat(), # Add timestamp
354
+ }
355
+
356
+ found = False
357
+ for i, conv in enumerate(conversations):
358
+ if conv.get("conversation_name") == conversation_name:
359
+ conversations[i] = conversation_data # Update existing
360
+ found = True
361
+ break
362
+ if not found:
363
+ conversations.append(conversation_data) # Add new
364
+
365
+ try:
366
+ # Ensure directory exists
367
+ Path(file_path).parent.mkdir(parents=True, exist_ok=True)
368
+ with open(file_path, "w", encoding="utf-8") as f:
369
+ json.dump(conversations, f, indent=4, ensure_ascii=False)
370
+ except IOError as e:
371
+ console.log(f"[red]Error saving conversation to {file_path}: {e}[/red]")
372
+ raise
274
373
 
275
374
  async def load_conversations(self, file_path: str) -> List[Dict]:
375
+ # Logic remains the same
276
376
  if not os.path.isfile(file_path):
277
377
  return []
278
- with open(file_path, encoding="utf-8") as f:
279
- return json.load(f)
378
+ try:
379
+ with open(file_path, 'r', encoding="utf-8") as f:
380
+ return json.load(f)
381
+ except (json.JSONDecodeError, IOError) as e:
382
+ console.log(f"[red]Error loading conversations from {file_path}: {e}[/red]")
383
+ return []
280
384
 
281
385
  async def load_conversation(self, file_path: str, conversation_name: str) -> bool:
386
+ # Logic remains the same, but update headers on the session
282
387
  conversations = await self.load_conversations(file_path)
283
388
  for conversation in conversations:
284
- if conversation["conversation_name"] == conversation_name:
285
- self._reqid = conversation["_reqid"]
286
- self.conversation_id = conversation["conversation_id"]
287
- self.response_id = conversation["response_id"]
288
- self.choice_id = conversation["choice_id"]
289
- self.SNlM0e = conversation["SNlM0e"]
290
- return True
389
+ if conversation.get("conversation_name") == conversation_name:
390
+ try:
391
+ self._reqid = conversation["_reqid"]
392
+ self.conversation_id = conversation["conversation_id"]
393
+ self.response_id = conversation["response_id"]
394
+ self.choice_id = conversation["choice_id"]
395
+ self.SNlM0e = conversation["SNlM0e"]
396
+ if "model_name" in conversation:
397
+ try:
398
+ self.model = Model.from_name(conversation["model_name"])
399
+ # Update headers in the session if model changed
400
+ self.session.headers.update(self.model.model_header)
401
+ except ValueError as e:
402
+ console.log(f"[yellow]Warning: Model '{conversation['model_name']}' from saved conversation not found. Using current model '{self.model.model_name}'. Error: {e}[/yellow]")
403
+
404
+ console.log(f"Loaded conversation '{conversation_name}'")
405
+ return True
406
+ except KeyError as e:
407
+ console.log(f"[red]Error loading conversation '{conversation_name}': Missing key {e}[/red]")
408
+ return False
409
+ console.log(f"[yellow]Conversation '{conversation_name}' not found in {file_path}[/yellow]")
291
410
  return False
292
411
 
293
412
  async def __get_snlm0e(self):
294
- if not (self.secure_1psid and self.secure_1psidts) or self.secure_1psid[:2] != "g.":
295
- raise Exception("Enter correct __Secure_1PSID and __Secure_1PSIDTS value. __Secure_1PSID value must start with a g.")
296
- resp = await self.session.get(Endpoint.INIT.value, timeout=10, follow_redirects=True)
297
- if resp.status_code != 200:
298
- raise Exception(f"Response code not 200. Response Status is {resp.status_code}")
299
- snlm0e_match = re.search(r'"SNlM0e":"(.*?)"', resp.text)
300
- if not snlm0e_match:
301
- raise Exception("SNlM0e value not found in response. Check __Secure_1PSID value."
302
- "\nNOTE: The cookies expire after a short period; ensure you update them frequently."
303
- f" Failed with status {resp.status_code} - {resp.reason_phrase}")
304
- return snlm0e_match.group(1)
305
-
306
- async def ask(self, message: str) -> dict:
413
+ """Fetches the SNlM0e value required for API requests using curl_cffi."""
414
+ if not self.secure_1psid or not self.secure_1psidts:
415
+ raise ValueError("Both __Secure-1PSID and __Secure-1PSIDTS cookies are required.")
416
+
417
+ try:
418
+ # Use the session's get method
419
+ resp = await self.session.get(
420
+ Endpoint.INIT.value,
421
+ timeout=self.timeout, # Timeout is already set in session, but can override
422
+ # follow_redirects=True # Default in curl_cffi
423
+ )
424
+ resp.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
425
+
426
+ # Regex logic remains the same
427
+ snlm0e_match = re.search(r'["\']SNlM0e["\']\s*:\s*["\'](.*?)["\']', resp.text)
428
+ if not snlm0e_match:
429
+ error_message = "SNlM0e value not found in response."
430
+ if "Sign in to continue" in resp.text or "accounts.google.com" in str(resp.url):
431
+ error_message += " Cookies might be invalid or expired. Please update them."
432
+ elif resp.status_code == 429:
433
+ error_message += " Rate limit likely exceeded."
434
+ else:
435
+ error_message += f" Response status: {resp.status_code}. Check cookie validity and network."
436
+ raise ValueError(error_message)
437
+
438
+ return snlm0e_match.group(1)
439
+
440
+ except Timeout as e: # Catch requests.exceptions.Timeout
441
+ raise TimeoutError(f"Request timed out while fetching SNlM0e: {e}") from e
442
+ except (RequestException, CurlError) as e: # Catch general request errors and Curl specific errors
443
+ raise ConnectionError(f"Network error while fetching SNlM0e: {e}") from e
444
+ except HTTPError as e: # Catch requests.exceptions.HTTPError
445
+ if e.response.status_code == 401 or e.response.status_code == 403:
446
+ raise PermissionError(f"Authentication failed (status {e.response.status_code}). Check cookies. {e}") from e
447
+ else:
448
+ raise Exception(f"HTTP error {e.response.status_code} while fetching SNlM0e: {e}") from e
449
+
450
+
451
+ async def ask(self, message: str, image: Optional[Union[bytes, str, Path]] = None) -> dict:
452
+ """
453
+ Sends a message to Google Gemini and returns the response using curl_cffi.
454
+
455
+ Parameters:
456
+ message: str
457
+ The message to send.
458
+ image: Optional[Union[bytes, str, Path]]
459
+ Optional image data (bytes) or path to an image file to include.
460
+
461
+ Returns:
462
+ dict: A dictionary containing the response content and metadata.
463
+ """
464
+ if self.SNlM0e is None:
465
+ raise RuntimeError("AsyncChatbot not properly initialized. Call AsyncChatbot.create()")
466
+
307
467
  params = {
308
- "bl": "boq_assistant-bard-web-server_20230713.13_p0",
468
+ "bl": "boq_assistant-bard-web-server_20240625.13_p0", # Example, might need updates
309
469
  "_reqid": str(self._reqid),
310
470
  "rt": "c",
311
471
  }
472
+
473
+ image_upload_id = None
474
+ if image:
475
+ try:
476
+ # Pass proxy and impersonate settings to upload_file
477
+ image_upload_id = await upload_file(image, proxy=self.proxies_dict, impersonate=self.impersonate)
478
+ console.log(f"Image uploaded successfully. ID: {image_upload_id}")
479
+ except Exception as e:
480
+ console.log(f"[red]Error uploading image: {e}[/red]")
481
+ return {"content": f"Error uploading image: {e}", "error": True}
482
+
483
+ # Structure logic remains the same
312
484
  message_struct = [
313
485
  [message],
314
486
  None,
315
487
  [self.conversation_id, self.response_id, self.choice_id],
316
488
  ]
489
+ if image_upload_id:
490
+ message_struct = [
491
+ [message],
492
+ [[[image_upload_id, 1]]],
493
+ [self.conversation_id, self.response_id, self.choice_id],
494
+ ]
495
+
317
496
  data = {
318
- "f.req": json.dumps([None, json.dumps(message_struct)]),
497
+ "f.req": json.dumps([None, json.dumps(message_struct, ensure_ascii=False)], ensure_ascii=False),
319
498
  "at": self.SNlM0e,
320
499
  }
321
- resp = await self.session.post(
322
- Endpoint.GENERATE.value,
323
- params=params,
324
- data=data,
325
- timeout=self.timeout,
326
- )
500
+
327
501
  try:
328
- chat_data_line = resp.content.splitlines()[3]
502
+ # Use session.post
503
+ resp = await self.session.post(
504
+ Endpoint.GENERATE.value,
505
+ params=params,
506
+ data=data, # curl_cffi uses data for form-encoded
507
+ timeout=self.timeout,
508
+ )
509
+ resp.raise_for_status() # Check for HTTP errors
510
+
511
+ # Response processing logic remains the same
512
+ lines = resp.text.splitlines()
513
+ if len(lines) < 4:
514
+ raise ValueError(f"Unexpected response format from Gemini API. Status: {resp.status_code}. Content: {resp.text[:200]}...")
515
+
516
+ chat_data_line = lines[3]
517
+ if chat_data_line.startswith(")]}'"):
518
+ chat_data_line = chat_data_line.split('\n', 1)[-1].strip()
519
+
329
520
  chat_data = json.loads(chat_data_line)[0][2]
330
- except (IndexError, json.JSONDecodeError):
331
- return {"content": f"Gemini encountered an error: {resp.content}."}
332
- if not chat_data:
333
- return {"content": f"Gemini returned empty response: {resp.content}."}
334
- json_chat_data = json.loads(chat_data)
335
- images = []
336
- if len(json_chat_data) >= 3:
337
- if len(json_chat_data[4][0]) >= 4 and json_chat_data[4][0][4]:
338
- for img in json_chat_data[4][0][4]:
339
- images.append(img[0][0][0])
340
- results = {
341
- "content": json_chat_data[4][0][1][0],
342
- "conversation_id": json_chat_data[1][0],
343
- "response_id": json_chat_data[1][1],
344
- "factualityQueries": json_chat_data[3],
345
- "textQuery": json_chat_data[2][0] if json_chat_data[2] is not None else "",
346
- "choices": [{"id": i[0], "content": i[1]} for i in json_chat_data[4]],
347
- "images": images,
348
- }
349
- self.conversation_id = results["conversation_id"]
350
- self.response_id = results["response_id"]
351
- self.choice_id = results["choices"][0]["id"]
352
- self._reqid += 100000
353
- return results
521
+
522
+ if not chat_data:
523
+ return {"content": f"Gemini returned an empty response structure. Status: {resp.status_code}."}
524
+
525
+ json_chat_data = json.loads(chat_data)
526
+
527
+ # Extraction logic remains the same
528
+ content = json_chat_data[4][0][1][0] if len(json_chat_data) > 4 and len(json_chat_data[4]) > 0 and len(json_chat_data[4][0]) > 1 and len(json_chat_data[4][0][1]) > 0 else ""
529
+ conversation_id = json_chat_data[1][0] if len(json_chat_data) > 1 and len(json_chat_data[1]) > 0 else self.conversation_id
530
+ response_id = json_chat_data[1][1] if len(json_chat_data) > 1 and len(json_chat_data[1]) > 1 else self.response_id
531
+ factualityQueries = json_chat_data[3] if len(json_chat_data) > 3 else None
532
+ textQuery = json_chat_data[2][0] if len(json_chat_data) > 2 and json_chat_data[2] else ""
533
+ choices = [{"id": i[0], "content": i[1]} for i in json_chat_data[4]] if len(json_chat_data) > 4 else []
534
+ choice_id = choices[0]["id"] if choices else self.choice_id
535
+
536
+ images = []
537
+ if len(json_chat_data) > 4 and len(json_chat_data[4]) > 0 and len(json_chat_data[4][0]) > 4 and json_chat_data[4][0][4]:
538
+ for img_data in json_chat_data[4][0][4]:
539
+ try:
540
+ img_url = img_data[0][0][0]
541
+ img_alt = img_data[2] if len(img_data) > 2 else ""
542
+ img_title = img_data[1] if len(img_data) > 1 else "[Image]"
543
+ images.append({"url": img_url, "alt": img_alt, "title": img_title})
544
+ except (IndexError, TypeError):
545
+ console.log("[yellow]Warning: Could not parse image data structure.[/yellow]")
546
+ continue
547
+
548
+ results = {
549
+ "content": content,
550
+ "conversation_id": conversation_id,
551
+ "response_id": response_id,
552
+ "factualityQueries": factualityQueries,
553
+ "textQuery": textQuery,
554
+ "choices": choices,
555
+ "images": images,
556
+ "error": False,
557
+ }
558
+
559
+ # Update state
560
+ self.conversation_id = conversation_id
561
+ self.response_id = response_id
562
+ self.choice_id = choice_id
563
+ self._reqid += random.randint(1000, 9000)
564
+
565
+ return results
566
+
567
+ # Update exception handling
568
+ except (IndexError, json.JSONDecodeError, TypeError) as e:
569
+ console.log(f"[red]Error parsing Gemini response: {e}[/red]")
570
+ return {"content": f"Error parsing Gemini response: {e}. Response: {resp.text[:200]}...", "error": True}
571
+ except Timeout as e: # Catch requests.exceptions.Timeout
572
+ console.log(f"[red]Request timed out: {e}[/red]")
573
+ return {"content": f"Request timed out: {e}", "error": True}
574
+ except (RequestException, CurlError) as e: # Catch general request/curl errors
575
+ console.log(f"[red]Network error: {e}[/red]")
576
+ return {"content": f"Network error: {e}", "error": True}
577
+ except HTTPError as e: # Catch requests.exceptions.HTTPError
578
+ console.log(f"[red]HTTP error {e.response.status_code}: {e}[/red]")
579
+ return {"content": f"HTTP error {e.response.status_code}: {e}", "error": True}
580
+ except Exception as e:
581
+ console.log(f"[red]An unexpected error occurred during ask: {e}[/red]", style="bold red")
582
+ return {"content": f"An unexpected error occurred: {e}", "error": True}
583
+
354
584
 
355
585
  #########################################
356
586
  # New Image classes
@@ -366,20 +596,24 @@ class Image(BaseModel):
366
596
  Title of the image (default: "[Image]").
367
597
  alt: str, optional
368
598
  Optional description.
369
- proxy: str, optional
599
+ proxy: Optional[Union[str, Dict[str, str]]] = None # Allow string or dict proxy
370
600
  Proxy used when saving the image.
601
+ impersonate: str = "chrome110" # Added impersonate for saving
602
+ Browser profile for curl_cffi to impersonate.
371
603
  """
372
604
  url: str
373
605
  title: str = "[Image]"
374
606
  alt: str = ""
375
- proxy: Optional[str] = None
607
+ proxy: Optional[Union[str, Dict[str, str]]] = None
608
+ impersonate: str = "chrome110" # Default impersonation for saving
376
609
 
377
610
  def __str__(self):
378
611
  return f"{self.title}({self.url}) - {self.alt}"
379
612
 
380
613
  def __repr__(self):
381
- short_url = self.url if len(self.url) <= 20 else self.url[:8] + "..." + self.url[-12:]
382
- return f"Image(title='{self.title}', url='{short_url}', alt='{self.alt}')"
614
+ short_url = self.url if len(self.url) <= 50 else self.url[:20] + "..." + self.url[-20:]
615
+ short_alt = self.alt[:30] + "..." if len(self.alt) > 30 else self.alt
616
+ return f"Image(title='{self.title}', url='{short_url}', alt='{short_alt}')"
383
617
 
384
618
  async def save(
385
619
  self,
@@ -390,7 +624,7 @@ class Image(BaseModel):
390
624
  skip_invalid_filename: bool = False,
391
625
  ) -> Optional[str]:
392
626
  """
393
- Save the image to disk.
627
+ Save the image to disk using curl_cffi.
394
628
  Parameters:
395
629
  path: str, optional
396
630
  Directory to save the image (default "./temp").
@@ -405,56 +639,110 @@ class Image(BaseModel):
405
639
  Returns:
406
640
  Absolute path of the saved image if successful; None if skipped.
407
641
  Raises:
408
- httpx.HTTPError if the network request fails.
642
+ HTTPError if the network request fails.
643
+ RequestException/CurlError for other network errors.
644
+ IOError if file writing fails.
409
645
  """
410
- filename = filename or self.url.split("/")[-1].split("?")[0]
646
+ # Filename generation logic remains the same
647
+ if not filename:
648
+ try:
649
+ # Use httpx.URL temporarily just for parsing, or implement manually
650
+ # Let's use basic parsing to avoid httpx dependency here
651
+ from urllib.parse import urlparse, unquote
652
+ parsed_url = urlparse(self.url)
653
+ base_filename = os.path.basename(unquote(parsed_url.path))
654
+ safe_filename = re.sub(r'[<>:"/\\|?*]', '_', base_filename)
655
+ filename = safe_filename if safe_filename else f"image_{random.randint(1000, 9999)}.jpg"
656
+ except Exception:
657
+ filename = f"image_{random.randint(1000, 9999)}.jpg"
658
+
411
659
  try:
412
- filename = re.search(r"^(.*\.\w+)", filename).group()
413
- except AttributeError:
414
- if verbose:
415
- console.log(f"Invalid filename: {filename}")
660
+ _ = Path(filename)
661
+ max_len = 255
662
+ if len(filename) > max_len:
663
+ name, ext = os.path.splitext(filename)
664
+ filename = name[:max_len - len(ext) -1] + ext
665
+ except (OSError, ValueError):
666
+ if verbose: console.log(f"[yellow]Invalid filename generated: {filename}[/yellow]")
416
667
  if skip_invalid_filename:
668
+ if verbose: console.log("[yellow]Skipping save due to invalid filename.[/yellow]")
417
669
  return None
418
- async with AsyncClient(http2=True, follow_redirects=True, cookies=cookies, proxies=self.proxy) as client:
419
- response = await client.get(self.url)
420
- if response.status_code == 200:
421
- content_type = response.headers.get("content-type")
422
- if content_type and "image" not in content_type:
423
- console.log(f"Warning: Content type of {filename} is {content_type}, not an image.")
670
+ filename = f"image_{random.randint(1000, 9999)}.jpg"
671
+ if verbose: console.log(f"[yellow]Using fallback filename: {filename}[/yellow]")
672
+
673
+ # Prepare proxy dictionary for curl_cffi
674
+ proxies_dict = None
675
+ if isinstance(self.proxy, str):
676
+ proxies_dict = {"http": self.proxy, "https": self.proxy}
677
+ elif isinstance(self.proxy, dict):
678
+ proxies_dict = self.proxy
679
+
680
+ try:
681
+ # Use AsyncSession from curl_cffi
682
+ async with AsyncSession(
683
+ follow_redirects=True, # Default
684
+ cookies=cookies,
685
+ proxies=proxies_dict,
686
+ impersonate=self.impersonate # Use stored impersonate setting
687
+ ) as client:
688
+ if verbose:
689
+ console.log(f"Attempting to download image from: {self.url}")
690
+ response = await client.get(self.url)
691
+ response.raise_for_status() # Raise HTTPError for bad responses
692
+
693
+ content_type = response.headers.get("content-type", "").lower()
694
+ if "image" not in content_type:
695
+ console.log(f"[yellow]Warning: Content type is '{content_type}', not an image. Saving anyway.[/yellow]")
696
+
424
697
  dest_path = Path(path)
425
698
  dest_path.mkdir(parents=True, exist_ok=True)
426
699
  dest = dest_path / filename
700
+
701
+ # Use response.content which holds the bytes
427
702
  dest.write_bytes(response.content)
428
703
  if verbose:
429
- console.log(f"Image saved as {dest.resolve()}")
704
+ console.log(f"Image saved successfully as {dest.resolve()}")
430
705
  return str(dest.resolve())
431
- else:
432
- raise HTTPStatusError(
433
- f"Error downloading image: {response.status_code} {response.reason_phrase}",
434
- request=response.request,
435
- response=response,
436
- )
706
+
707
+ # Update exception handling
708
+ except HTTPError as e:
709
+ console.log(f"[red]Error downloading image {self.url}: {e.response.status_code} {e}[/red]")
710
+ raise
711
+ except (RequestException, CurlError) as e:
712
+ console.log(f"[red]Network error downloading image {self.url}: {e}[/red]")
713
+ raise
714
+ except IOError as e:
715
+ console.log(f"[red]Error writing image file to {dest}: {e}[/red]")
716
+ raise
717
+ except Exception as e:
718
+ console.log(f"[red]An unexpected error occurred during image save: {e}[/red]")
719
+ raise
720
+
437
721
 
438
722
  class WebImage(Image):
439
723
  """
440
- Image retrieved from web.
724
+ Image retrieved from web search results.
441
725
  Returned when asking Gemini to "SEND an image of [something]".
442
726
  """
443
727
  pass
444
728
 
445
729
  class GeneratedImage(Image):
446
730
  """
447
- Image generated by ImageFX (Google's AI image generator).
731
+ Image generated by Google's AI image generator (e.g., ImageFX).
448
732
  Parameters:
449
733
  cookies: dict[str, str]
450
- Cookies used from the GeminiClient.
734
+ Cookies required for accessing the generated image URL, typically
735
+ from the GeminiClient/Chatbot instance.
451
736
  """
452
737
  cookies: Dict[str, str]
453
738
 
454
- @validator("cookies")
455
- def validate_cookies(cls, v):
456
- if not v:
457
- raise ValueError("GeneratedImage requires cookies from GeminiClient.")
739
+ # Updated validator for Pydantic V2
740
+ @field_validator("cookies")
741
+ @classmethod
742
+ def validate_cookies(cls, v: Dict[str, str]) -> Dict[str, str]:
743
+ """Ensures cookies are provided for generated images."""
744
+ if not v or not isinstance(v, dict):
745
+ raise ValueError("GeneratedImage requires a dictionary of cookies from the client.")
458
746
  return v
459
747
 
460
748
  async def save(self, **kwargs) -> Optional[str]:
@@ -462,74 +750,159 @@ class GeneratedImage(Image):
462
750
  Save the generated image to disk.
463
751
  Parameters:
464
752
  filename: str, optional
465
- Filename to use; generated images are in .png format.
753
+ Filename to use. If not provided, a default name including
754
+ a timestamp and part of the URL is used. Generated images
755
+ are often in .png or .jpg format.
466
756
  Additional arguments are passed to Image.save.
467
757
  Returns:
468
- Absolute path of the saved image if successful.
758
+ Absolute path of the saved image if successful, None if skipped.
469
759
  """
470
- filename = kwargs.pop("filename", None) or f"{datetime.now().strftime('%Y%m%d%H%M%S')}_{self.url[-10:]}.png"
471
- return await super().save(filename=filename, cookies=self.cookies, **kwargs)
760
+ if "filename" not in kwargs:
761
+ ext = ".jpg" if ".jpg" in self.url.lower() else ".png"
762
+ url_part = self.url.split('/')[-1][:10]
763
+ kwargs["filename"] = f"{datetime.now().strftime('%Y%m%d%H%M%S')}_{url_part}{ext}"
764
+
765
+ # Pass the required cookies and other args (like impersonate) to the parent save method
766
+ return await super().save(cookies=self.cookies, **kwargs)
472
767
 
473
768
  #########################################
474
769
  # Main usage demonstration
475
770
  #########################################
476
771
 
477
- if __name__ == "__main__":
478
- """
479
- Usage demonstration:
480
- - Reads cookies from 'cookies.json'
481
- - Initializes the synchronous Chatbot wrapper.
482
- - Performs a text query.
483
- - Performs an image generation query and downloads the generated image.
484
- - Demonstrates saving a conversation.
485
- """
486
- # Define the path to cookies file
487
- cookies_file = r"cookies.json"
488
-
489
- # Create Chatbot instance with a chosen model
772
+ async def main_async():
773
+ """Asynchronous main function for demonstration."""
774
+ cookies_file = "cookies.json"
775
+ impersonate_profile = "chrome110" # Example browser profile
776
+
777
+ bot = None
490
778
  try:
491
- bot = Chatbot(cookie_path=cookies_file, model=Model.G_2_0_FLASH_THINKING_WITH_APPS)
779
+ bot = await AsyncChatbot.create(
780
+ *load_cookies(cookies_file),
781
+ model=Model.G_2_5_PRO,
782
+ impersonate=impersonate_profile, # Pass impersonate setting
783
+ # proxy="socks5://127.0.0.1:9050" # Example SOCKS proxy
784
+ )
785
+ console.log(f"[green]AsyncChatbot initialized successfully (impersonating {impersonate_profile}).[/green]")
786
+ except FileNotFoundError:
787
+ console.log(f"[bold red]Error: Cookie file '{cookies_file}' not found.[/bold red]")
788
+ console.log("Please export cookies from your browser after logging into Google Gemini and save as cookies.json.")
789
+ return
492
790
  except Exception as e:
493
- console.log(f"[red]Error initializing Chatbot: {e}[/red]")
494
- exit(1)
791
+ console.log(f"[bold red]Error initializing AsyncChatbot: {e}[/bold red]")
792
+ return
495
793
 
496
- # Sample text query
497
- text_message = "How many r's in word strawberry?"
498
- console.log("[green]Sending text query to Gemini...[/green]")
794
+ # --- Sample text query ---
795
+ text_message = "Explain the concept of asynchronous programming in Python in simple terms."
796
+ console.log(f"\n[cyan]Sending text query:[/cyan] '{text_message}'")
499
797
  try:
500
- response_text = bot.ask(text_message)
501
- console.log("[blue]Text Response:[/blue]")
502
- console.print(Markdown(response_text.get("content", "No content received.")))
798
+ response_text = await bot.ask(text_message)
799
+ if response_text.get("error"):
800
+ console.log(f"[red]Error in text response: {response_text.get('content')}[/red]")
801
+ else:
802
+ console.log("[blue]Text Response:[/blue]")
803
+ console.print(Markdown(response_text.get("content", "No content received.")))
503
804
  except Exception as e:
504
- console.log(f"[red]Error sending text query: {e}[/red]")
805
+ console.log(f"[red]Error during text query: {e}[/red]")
505
806
 
506
- # Image generation query
507
- image_message = "Generate an image of a scenic view."
508
- console.log("[green]Requesting image generation from Gemini...[/green]")
807
+ # --- Image Generation Query ---
808
+ image_prompt = "Generate an artistic image of a cat sitting on a crescent moon, starry night background."
809
+ console.log(f"\n[cyan]Sending image generation query:[/cyan] '{image_prompt}'")
509
810
  try:
510
- response_image = bot.ask(image_message)
511
- # Check if any image URL is returned in the response
512
- image_urls = response_image.get("images", [])
513
- if not image_urls:
514
- console.log("[red]No image URLs returned in response.[/red]")
811
+ response_image = await bot.ask(image_prompt)
812
+
813
+ if response_image.get("error"):
814
+ console.log(f"[red]Error in image response: {response_image.get('content')}[/red]")
515
815
  else:
516
- image_url = image_urls[0]
517
- console.log(f"[blue]Image URL received: {image_url}[/blue]")
518
- # Use GeneratedImage class to download the generated image
519
- generated_img = GeneratedImage(
520
- url=image_url,
521
- cookies={"__Secure-1PSID": bot.secure_1psid, "__Secure-1PSIDTS": bot.secure_1psidts}
522
- )
523
- saved_path = asyncio.run(generated_img.save(path="downloaded_images", verbose=True))
524
- console.log(f"[blue]Generated image saved at: {saved_path}[/blue]")
816
+ returned_images = response_image.get("images", [])
817
+ if not returned_images:
818
+ console.log("[yellow]No direct image data returned. Response content:[/yellow]")
819
+ console.print(Markdown(response_image.get("content", "No content received.")))
820
+ else:
821
+ console.log(f"[green]Received {len(returned_images)} image(s).[/green]")
822
+ for i, img_data in enumerate(returned_images):
823
+ console.log(f"Processing image {i+1}: URL: {img_data.get('url')}")
824
+ try:
825
+ # Pass impersonate setting when creating Image object
826
+ generated_img = GeneratedImage(
827
+ url=img_data.get('url'),
828
+ title=img_data.get('title', f"Generated Image {i+1}"),
829
+ alt=img_data.get('alt', ""),
830
+ cookies={"__Secure-1PSID": bot.secure_1psid, "__Secure-1PSIDTS": bot.secure_1psidts},
831
+ proxy=bot.proxy, # Pass proxy settings from bot
832
+ impersonate=bot.impersonate # Pass impersonate setting from bot
833
+ )
834
+ save_path = "downloaded_images"
835
+ saved_file = await generated_img.save(path=save_path, verbose=True, skip_invalid_filename=True)
836
+ if saved_file:
837
+ console.log(f"[blue]Image {i+1} saved to: {saved_file}[/blue]")
838
+ else:
839
+ console.log(f"[yellow]Image {i+1} skipped due to filename issue.[/yellow]")
840
+ except Exception as img_e:
841
+ console.log(f"[red]Error saving image {i+1}: {img_e}[/red]")
842
+
525
843
  except Exception as e:
526
- console.log(f"[red]Error processing image generation: {e}[/red]")
527
-
528
- # Demonstrate saving a conversation
844
+ console.log(f"[red]Error during image generation query: {e}[/red]")
845
+
846
+ # --- Image Understanding Query ---
847
+ local_image_path = "path/to/your/local/image.jpg" # <--- CHANGE THIS PATH
848
+ image_understanding_prompt = "Describe what you see in this image."
849
+
850
+ if Path(local_image_path).is_file():
851
+ console.log(f"\n[cyan]Sending image understanding query with image:[/cyan] '{local_image_path}'")
852
+ console.log(f"[cyan]Prompt:[/cyan] '{image_understanding_prompt}'")
853
+ try:
854
+ response_understanding = await bot.ask(image_understanding_prompt, image=local_image_path)
855
+ if response_understanding.get("error"):
856
+ console.log(f"[red]Error in image understanding response: {response_understanding.get('content')}[/red]")
857
+ else:
858
+ console.log("[blue]Image Understanding Response:[/blue]")
859
+ console.print(Markdown(response_understanding.get("content", "No content received.")))
860
+ except Exception as e:
861
+ console.log(f"[red]Error during image understanding query: {e}[/red]")
862
+ else:
863
+ console.log(f"\n[yellow]Skipping image understanding query: File not found at '{local_image_path}'.[/yellow]")
864
+ console.log("[yellow]Please update 'local_image_path' in the script to test this feature.[/yellow]")
865
+
866
+
867
+ # --- Save/Load Conversation (logic remains the same) ---
529
868
  conversation_file = "conversations.json"
530
- conversation_name = "Sample Conversation"
869
+ conversation_name = f"Demo Conversation - {datetime.now().strftime('%Y%m%d_%H%M')}"
870
+ console.log(f"\n[cyan]Saving conversation as:[/cyan] '{conversation_name}' to '{conversation_file}'")
531
871
  try:
532
- bot.save_conversation(conversation_file, conversation_name)
533
- console.log(f"[green]Conversation saved to {conversation_file} under the name '{conversation_name}'.[/green]")
872
+ await bot.save_conversation(conversation_file, conversation_name)
873
+ console.log(f"[green]Conversation saved successfully.[/green]")
534
874
  except Exception as e:
535
875
  console.log(f"[red]Error saving conversation: {e}[/red]")
876
+
877
+ console.log(f"\n[cyan]Attempting to load conversation:[/cyan] '{conversation_name}' from '{conversation_file}'")
878
+ try:
879
+ loaded = await bot.load_conversation(conversation_file, conversation_name)
880
+ if loaded:
881
+ console.log("[green]Conversation loaded successfully. Sending a follow-up query.[/green]")
882
+ follow_up_message = "What was the first question I asked in this session?"
883
+ console.log(f"[cyan]Sending follow-up query:[/cyan] '{follow_up_message}'")
884
+ response_follow_up = await bot.ask(follow_up_message)
885
+ if response_follow_up.get("error"):
886
+ console.log(f"[red]Error in follow-up response: {response_follow_up.get('content')}[/red]")
887
+ else:
888
+ console.log("[blue]Follow-up Response:[/blue]")
889
+ console.print(Markdown(response_follow_up.get("content", "No content received.")))
890
+ else:
891
+ console.log("[yellow]Could not load the conversation.[/yellow]")
892
+ except Exception as e:
893
+ console.log(f"[red]Error loading or using loaded conversation: {e}[/red]")
894
+
895
+ # --- Cleanup ---
896
+ if bot and bot.session:
897
+ await bot.session.close() # Use close() for AsyncSession
898
+ console.log("\n[grey]HTTP session closed.[/grey]")
899
+
900
+
901
+ if __name__ == "__main__":
902
+ try:
903
+ asyncio.run(main_async())
904
+ except KeyboardInterrupt:
905
+ console.log("\n[yellow]Operation cancelled by user.[/yellow]")
906
+ except Exception as main_e:
907
+ console.log(f"[bold red]An error occurred in the main execution: {main_e}[/bold red]")
908
+