webscout 8.2.7__py3-none-any.whl → 8.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- webscout/AIauto.py +33 -15
- webscout/AIbase.py +96 -37
- webscout/AIutel.py +703 -250
- webscout/Bard.py +441 -323
- webscout/Extra/Act.md +309 -0
- webscout/Extra/GitToolkit/__init__.py +10 -0
- webscout/Extra/GitToolkit/gitapi/README.md +110 -0
- webscout/Extra/GitToolkit/gitapi/__init__.py +12 -0
- webscout/Extra/GitToolkit/gitapi/repository.py +195 -0
- webscout/Extra/GitToolkit/gitapi/user.py +96 -0
- webscout/Extra/GitToolkit/gitapi/utils.py +62 -0
- webscout/Extra/YTToolkit/README.md +375 -0
- webscout/Extra/YTToolkit/YTdownloader.py +957 -0
- webscout/Extra/YTToolkit/__init__.py +3 -0
- webscout/Extra/YTToolkit/transcriber.py +476 -0
- webscout/Extra/YTToolkit/ytapi/README.md +44 -0
- webscout/Extra/YTToolkit/ytapi/__init__.py +6 -0
- webscout/Extra/YTToolkit/ytapi/channel.py +307 -0
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -0
- webscout/Extra/YTToolkit/ytapi/extras.py +118 -0
- webscout/Extra/YTToolkit/ytapi/https.py +88 -0
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -0
- webscout/Extra/YTToolkit/ytapi/playlist.py +59 -0
- webscout/Extra/YTToolkit/ytapi/pool.py +8 -0
- webscout/Extra/YTToolkit/ytapi/query.py +40 -0
- webscout/Extra/YTToolkit/ytapi/stream.py +63 -0
- webscout/Extra/YTToolkit/ytapi/utils.py +62 -0
- webscout/Extra/YTToolkit/ytapi/video.py +232 -0
- webscout/Extra/__init__.py +7 -0
- webscout/Extra/autocoder/__init__.py +9 -0
- webscout/Extra/autocoder/autocoder.py +1105 -0
- webscout/Extra/autocoder/autocoder_utiles.py +332 -0
- webscout/Extra/gguf.md +430 -0
- webscout/Extra/gguf.py +684 -0
- webscout/Extra/tempmail/README.md +488 -0
- webscout/Extra/tempmail/__init__.py +28 -0
- webscout/Extra/tempmail/async_utils.py +141 -0
- webscout/Extra/tempmail/base.py +161 -0
- webscout/Extra/tempmail/cli.py +187 -0
- webscout/Extra/tempmail/emailnator.py +84 -0
- webscout/Extra/tempmail/mail_tm.py +361 -0
- webscout/Extra/tempmail/temp_mail_io.py +292 -0
- webscout/Extra/weather.md +281 -0
- webscout/Extra/weather.py +194 -0
- webscout/Extra/weather_ascii.py +76 -0
- webscout/Litlogger/README.md +10 -0
- webscout/Litlogger/__init__.py +15 -0
- webscout/Litlogger/formats.py +4 -0
- webscout/Litlogger/handlers.py +103 -0
- webscout/Litlogger/levels.py +13 -0
- webscout/Litlogger/logger.py +92 -0
- webscout/Provider/AI21.py +177 -0
- webscout/Provider/AISEARCH/DeepFind.py +254 -0
- webscout/Provider/AISEARCH/Perplexity.py +333 -0
- webscout/Provider/AISEARCH/README.md +279 -0
- webscout/Provider/AISEARCH/__init__.py +9 -0
- webscout/Provider/AISEARCH/felo_search.py +202 -0
- webscout/Provider/AISEARCH/genspark_search.py +324 -0
- webscout/Provider/AISEARCH/hika_search.py +186 -0
- webscout/Provider/AISEARCH/iask_search.py +410 -0
- webscout/Provider/AISEARCH/monica_search.py +220 -0
- webscout/Provider/AISEARCH/scira_search.py +298 -0
- webscout/Provider/AISEARCH/webpilotai_search.py +255 -0
- webscout/Provider/Aitopia.py +316 -0
- webscout/Provider/AllenAI.py +440 -0
- webscout/Provider/Andi.py +228 -0
- webscout/Provider/Blackboxai.py +791 -0
- webscout/Provider/ChatGPTClone.py +237 -0
- webscout/Provider/ChatGPTGratis.py +194 -0
- webscout/Provider/ChatSandbox.py +342 -0
- webscout/Provider/Cloudflare.py +324 -0
- webscout/Provider/Cohere.py +208 -0
- webscout/Provider/Deepinfra.py +340 -0
- webscout/Provider/ExaAI.py +261 -0
- webscout/Provider/ExaChat.py +358 -0
- webscout/Provider/Flowith.py +217 -0
- webscout/Provider/FreeGemini.py +250 -0
- webscout/Provider/Gemini.py +169 -0
- webscout/Provider/GithubChat.py +369 -0
- webscout/Provider/GizAI.py +295 -0
- webscout/Provider/Glider.py +225 -0
- webscout/Provider/Groq.py +801 -0
- webscout/Provider/HF_space/__init__.py +0 -0
- webscout/Provider/HF_space/qwen_qwen2.py +206 -0
- webscout/Provider/HeckAI.py +375 -0
- webscout/Provider/HuggingFaceChat.py +469 -0
- webscout/Provider/Hunyuan.py +283 -0
- webscout/Provider/Jadve.py +291 -0
- webscout/Provider/Koboldai.py +384 -0
- webscout/Provider/LambdaChat.py +411 -0
- webscout/Provider/Llama3.py +259 -0
- webscout/Provider/MCPCore.py +315 -0
- webscout/Provider/Marcus.py +198 -0
- webscout/Provider/Nemotron.py +218 -0
- webscout/Provider/Netwrck.py +270 -0
- webscout/Provider/OLLAMA.py +396 -0
- webscout/Provider/OPENAI/BLACKBOXAI.py +766 -0
- webscout/Provider/OPENAI/Cloudflare.py +378 -0
- webscout/Provider/OPENAI/FreeGemini.py +283 -0
- webscout/Provider/OPENAI/NEMOTRON.py +232 -0
- webscout/Provider/OPENAI/Qwen3.py +283 -0
- webscout/Provider/OPENAI/README.md +952 -0
- webscout/Provider/OPENAI/TwoAI.py +357 -0
- webscout/Provider/OPENAI/__init__.py +40 -0
- webscout/Provider/OPENAI/ai4chat.py +293 -0
- webscout/Provider/OPENAI/api.py +969 -0
- webscout/Provider/OPENAI/base.py +249 -0
- webscout/Provider/OPENAI/c4ai.py +373 -0
- webscout/Provider/OPENAI/chatgpt.py +556 -0
- webscout/Provider/OPENAI/chatgptclone.py +494 -0
- webscout/Provider/OPENAI/chatsandbox.py +173 -0
- webscout/Provider/OPENAI/copilot.py +242 -0
- webscout/Provider/OPENAI/deepinfra.py +322 -0
- webscout/Provider/OPENAI/e2b.py +1414 -0
- webscout/Provider/OPENAI/exaai.py +417 -0
- webscout/Provider/OPENAI/exachat.py +444 -0
- webscout/Provider/OPENAI/flowith.py +162 -0
- webscout/Provider/OPENAI/freeaichat.py +359 -0
- webscout/Provider/OPENAI/glider.py +326 -0
- webscout/Provider/OPENAI/groq.py +364 -0
- webscout/Provider/OPENAI/heckai.py +308 -0
- webscout/Provider/OPENAI/llmchatco.py +335 -0
- webscout/Provider/OPENAI/mcpcore.py +389 -0
- webscout/Provider/OPENAI/multichat.py +376 -0
- webscout/Provider/OPENAI/netwrck.py +357 -0
- webscout/Provider/OPENAI/oivscode.py +287 -0
- webscout/Provider/OPENAI/opkfc.py +496 -0
- webscout/Provider/OPENAI/pydantic_imports.py +172 -0
- webscout/Provider/OPENAI/scirachat.py +477 -0
- webscout/Provider/OPENAI/sonus.py +304 -0
- webscout/Provider/OPENAI/standardinput.py +433 -0
- webscout/Provider/OPENAI/textpollinations.py +339 -0
- webscout/Provider/OPENAI/toolbaz.py +413 -0
- webscout/Provider/OPENAI/typefully.py +355 -0
- webscout/Provider/OPENAI/typegpt.py +364 -0
- webscout/Provider/OPENAI/uncovrAI.py +463 -0
- webscout/Provider/OPENAI/utils.py +318 -0
- webscout/Provider/OPENAI/venice.py +431 -0
- webscout/Provider/OPENAI/wisecat.py +387 -0
- webscout/Provider/OPENAI/writecream.py +163 -0
- webscout/Provider/OPENAI/x0gpt.py +365 -0
- webscout/Provider/OPENAI/yep.py +382 -0
- webscout/Provider/OpenGPT.py +209 -0
- webscout/Provider/Openai.py +496 -0
- webscout/Provider/PI.py +429 -0
- webscout/Provider/Perplexitylabs.py +415 -0
- webscout/Provider/QwenLM.py +254 -0
- webscout/Provider/Reka.py +214 -0
- webscout/Provider/StandardInput.py +290 -0
- webscout/Provider/TTI/README.md +82 -0
- webscout/Provider/TTI/__init__.py +7 -0
- webscout/Provider/TTI/aiarta.py +365 -0
- webscout/Provider/TTI/artbit.py +0 -0
- webscout/Provider/TTI/base.py +64 -0
- webscout/Provider/TTI/fastflux.py +200 -0
- webscout/Provider/TTI/magicstudio.py +201 -0
- webscout/Provider/TTI/piclumen.py +203 -0
- webscout/Provider/TTI/pixelmuse.py +225 -0
- webscout/Provider/TTI/pollinations.py +221 -0
- webscout/Provider/TTI/utils.py +11 -0
- webscout/Provider/TTS/README.md +192 -0
- webscout/Provider/TTS/__init__.py +10 -0
- webscout/Provider/TTS/base.py +159 -0
- webscout/Provider/TTS/deepgram.py +156 -0
- webscout/Provider/TTS/elevenlabs.py +111 -0
- webscout/Provider/TTS/gesserit.py +128 -0
- webscout/Provider/TTS/murfai.py +113 -0
- webscout/Provider/TTS/openai_fm.py +129 -0
- webscout/Provider/TTS/parler.py +111 -0
- webscout/Provider/TTS/speechma.py +580 -0
- webscout/Provider/TTS/sthir.py +94 -0
- webscout/Provider/TTS/streamElements.py +333 -0
- webscout/Provider/TTS/utils.py +280 -0
- webscout/Provider/TeachAnything.py +229 -0
- webscout/Provider/TextPollinationsAI.py +308 -0
- webscout/Provider/TwoAI.py +475 -0
- webscout/Provider/TypliAI.py +305 -0
- webscout/Provider/UNFINISHED/ChatHub.py +209 -0
- webscout/Provider/UNFINISHED/Youchat.py +330 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +263 -0
- webscout/Provider/UNFINISHED/puterjs.py +635 -0
- webscout/Provider/UNFINISHED/test_lmarena.py +119 -0
- webscout/Provider/Venice.py +258 -0
- webscout/Provider/VercelAI.py +253 -0
- webscout/Provider/WiseCat.py +233 -0
- webscout/Provider/WrDoChat.py +370 -0
- webscout/Provider/Writecream.py +246 -0
- webscout/Provider/WritingMate.py +269 -0
- webscout/Provider/__init__.py +174 -0
- webscout/Provider/ai4chat.py +174 -0
- webscout/Provider/akashgpt.py +335 -0
- webscout/Provider/asksteve.py +220 -0
- webscout/Provider/cerebras.py +290 -0
- webscout/Provider/chatglm.py +215 -0
- webscout/Provider/cleeai.py +213 -0
- webscout/Provider/copilot.py +425 -0
- webscout/Provider/elmo.py +283 -0
- webscout/Provider/freeaichat.py +285 -0
- webscout/Provider/geminiapi.py +208 -0
- webscout/Provider/granite.py +235 -0
- webscout/Provider/hermes.py +266 -0
- webscout/Provider/julius.py +223 -0
- webscout/Provider/koala.py +170 -0
- webscout/Provider/learnfastai.py +325 -0
- webscout/Provider/llama3mitril.py +215 -0
- webscout/Provider/llmchat.py +258 -0
- webscout/Provider/llmchatco.py +306 -0
- webscout/Provider/lmarena.py +198 -0
- webscout/Provider/meta.py +801 -0
- webscout/Provider/multichat.py +364 -0
- webscout/Provider/oivscode.py +309 -0
- webscout/Provider/samurai.py +224 -0
- webscout/Provider/scira_chat.py +299 -0
- webscout/Provider/scnet.py +243 -0
- webscout/Provider/searchchat.py +292 -0
- webscout/Provider/sonus.py +258 -0
- webscout/Provider/talkai.py +194 -0
- webscout/Provider/toolbaz.py +353 -0
- webscout/Provider/turboseek.py +266 -0
- webscout/Provider/typefully.py +202 -0
- webscout/Provider/typegpt.py +289 -0
- webscout/Provider/uncovr.py +368 -0
- webscout/Provider/x0gpt.py +299 -0
- webscout/Provider/yep.py +389 -0
- webscout/__init__.py +4 -2
- webscout/cli.py +3 -28
- webscout/client.py +70 -0
- webscout/conversation.py +35 -35
- webscout/litagent/Readme.md +276 -0
- webscout/litagent/__init__.py +29 -0
- webscout/litagent/agent.py +455 -0
- webscout/litagent/constants.py +60 -0
- webscout/litprinter/__init__.py +59 -0
- webscout/optimizers.py +419 -419
- webscout/scout/README.md +404 -0
- webscout/scout/__init__.py +8 -0
- webscout/scout/core/__init__.py +7 -0
- webscout/scout/core/crawler.py +210 -0
- webscout/scout/core/scout.py +607 -0
- webscout/scout/core/search_result.py +96 -0
- webscout/scout/core/text_analyzer.py +63 -0
- webscout/scout/core/text_utils.py +277 -0
- webscout/scout/core/web_analyzer.py +52 -0
- webscout/scout/element.py +478 -0
- webscout/scout/parsers/__init__.py +69 -0
- webscout/scout/parsers/html5lib_parser.py +172 -0
- webscout/scout/parsers/html_parser.py +236 -0
- webscout/scout/parsers/lxml_parser.py +178 -0
- webscout/scout/utils.py +37 -0
- webscout/swiftcli/Readme.md +323 -0
- webscout/swiftcli/__init__.py +95 -0
- webscout/swiftcli/core/__init__.py +7 -0
- webscout/swiftcli/core/cli.py +297 -0
- webscout/swiftcli/core/context.py +104 -0
- webscout/swiftcli/core/group.py +241 -0
- webscout/swiftcli/decorators/__init__.py +28 -0
- webscout/swiftcli/decorators/command.py +221 -0
- webscout/swiftcli/decorators/options.py +220 -0
- webscout/swiftcli/decorators/output.py +252 -0
- webscout/swiftcli/exceptions.py +21 -0
- webscout/swiftcli/plugins/__init__.py +9 -0
- webscout/swiftcli/plugins/base.py +135 -0
- webscout/swiftcli/plugins/manager.py +269 -0
- webscout/swiftcli/utils/__init__.py +59 -0
- webscout/swiftcli/utils/formatting.py +252 -0
- webscout/swiftcli/utils/parsing.py +267 -0
- webscout/version.py +1 -1
- webscout/webscout_search.py +2 -182
- webscout/webscout_search_async.py +1 -179
- webscout/zeroart/README.md +89 -0
- webscout/zeroart/__init__.py +135 -0
- webscout/zeroart/base.py +66 -0
- webscout/zeroart/effects.py +101 -0
- webscout/zeroart/fonts.py +1239 -0
- {webscout-8.2.7.dist-info → webscout-8.2.9.dist-info}/METADATA +262 -83
- webscout-8.2.9.dist-info/RECORD +289 -0
- {webscout-8.2.7.dist-info → webscout-8.2.9.dist-info}/WHEEL +1 -1
- {webscout-8.2.7.dist-info → webscout-8.2.9.dist-info}/entry_points.txt +1 -0
- webscout-8.2.7.dist-info/RECORD +0 -26
- {webscout-8.2.7.dist-info → webscout-8.2.9.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.2.7.dist-info → webscout-8.2.9.dist-info}/top_level.txt +0 -0
webscout/Bard.py
CHANGED
|
@@ -35,12 +35,29 @@ console = Console()
|
|
|
35
35
|
#########################################
|
|
36
36
|
|
|
37
37
|
class Endpoint(Enum):
|
|
38
|
+
"""
|
|
39
|
+
Enum for Google Gemini API endpoints.
|
|
40
|
+
|
|
41
|
+
Attributes:
|
|
42
|
+
INIT (str): URL for initializing the Gemini session.
|
|
43
|
+
GENERATE (str): URL for generating chat responses.
|
|
44
|
+
ROTATE_COOKIES (str): URL for rotating authentication cookies.
|
|
45
|
+
UPLOAD (str): URL for uploading files/images.
|
|
46
|
+
"""
|
|
38
47
|
INIT = "https://gemini.google.com/app"
|
|
39
48
|
GENERATE = "https://gemini.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate"
|
|
40
49
|
ROTATE_COOKIES = "https://accounts.google.com/RotateCookies"
|
|
41
50
|
UPLOAD = "https://content-push.googleapis.com/upload"
|
|
42
51
|
|
|
43
52
|
class Headers(Enum):
|
|
53
|
+
"""
|
|
54
|
+
Enum for HTTP headers used in Gemini API requests.
|
|
55
|
+
|
|
56
|
+
Attributes:
|
|
57
|
+
GEMINI (dict): Headers for Gemini chat requests.
|
|
58
|
+
ROTATE_COOKIES (dict): Headers for rotating cookies.
|
|
59
|
+
UPLOAD (dict): Headers for file/image upload.
|
|
60
|
+
"""
|
|
44
61
|
GEMINI = {
|
|
45
62
|
"Content-Type": "application/x-www-form-urlencoded;charset=utf-8",
|
|
46
63
|
"Host": "gemini.google.com",
|
|
@@ -56,11 +73,19 @@ class Headers(Enum):
|
|
|
56
73
|
UPLOAD = {"Push-ID": "feeds/mcudyrk2a4khkz"}
|
|
57
74
|
|
|
58
75
|
class Model(Enum):
|
|
59
|
-
|
|
76
|
+
"""
|
|
77
|
+
Enum for available Gemini model configurations.
|
|
78
|
+
|
|
79
|
+
Attributes:
|
|
80
|
+
model_name (str): Name of the model.
|
|
81
|
+
model_header (dict): Additional headers required for the model.
|
|
82
|
+
advanced_only (bool): Whether the model is available only for advanced users.
|
|
83
|
+
"""
|
|
84
|
+
# Updated model definitions based on reference implementation
|
|
60
85
|
UNSPECIFIED = ("unspecified", {}, False)
|
|
61
86
|
G_2_0_FLASH = (
|
|
62
87
|
"gemini-2.0-flash",
|
|
63
|
-
{"x-goog-ext-525001261-jspb": '[
|
|
88
|
+
{"x-goog-ext-525001261-jspb": '[1,null,null,null,"f299729663a2343f"]'},
|
|
64
89
|
False,
|
|
65
90
|
)
|
|
66
91
|
G_2_0_FLASH_THINKING = (
|
|
@@ -68,9 +93,14 @@ class Model(Enum):
|
|
|
68
93
|
{"x-goog-ext-525001261-jspb": '[null,null,null,null,"7ca48d02d802f20a"]'},
|
|
69
94
|
False,
|
|
70
95
|
)
|
|
96
|
+
G_2_5_FLASH = (
|
|
97
|
+
"gemini-2.5-flash",
|
|
98
|
+
{"x-goog-ext-525001261-jspb": '[1,null,null,null,"35609594dbe934d8"]'},
|
|
99
|
+
False,
|
|
100
|
+
)
|
|
71
101
|
G_2_5_PRO = (
|
|
72
102
|
"gemini-2.5-pro",
|
|
73
|
-
{"x-goog-ext-525001261-jspb": '[
|
|
103
|
+
{"x-goog-ext-525001261-jspb": '[1,null,null,null,"2525e3954d185b3c"]'},
|
|
74
104
|
False,
|
|
75
105
|
)
|
|
76
106
|
G_2_0_EXP_ADVANCED = (
|
|
@@ -83,19 +113,34 @@ class Model(Enum):
|
|
|
83
113
|
{"x-goog-ext-525001261-jspb": '[null,null,null,null,"203e6bb81620bcfe"]'},
|
|
84
114
|
True,
|
|
85
115
|
)
|
|
86
|
-
G_2_5_FLASH = (
|
|
87
|
-
"gemini-2.5-flash",
|
|
88
|
-
{"x-goog-ext-525001261-jspb": '[1,null,null,null,"35609594dbe934d8"]'},
|
|
89
|
-
False,
|
|
90
|
-
)
|
|
91
116
|
|
|
92
117
|
def __init__(self, name, header, advanced_only):
|
|
118
|
+
"""
|
|
119
|
+
Initialize a Model enum member.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
name (str): Model name.
|
|
123
|
+
header (dict): Model-specific headers.
|
|
124
|
+
advanced_only (bool): If True, model is for advanced users only.
|
|
125
|
+
"""
|
|
93
126
|
self.model_name = name
|
|
94
127
|
self.model_header = header
|
|
95
128
|
self.advanced_only = advanced_only
|
|
96
129
|
|
|
97
130
|
@classmethod
|
|
98
131
|
def from_name(cls, name: str):
|
|
132
|
+
"""
|
|
133
|
+
Get a Model enum member by its model name.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
name (str): Name of the model.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
Model: Corresponding Model enum member.
|
|
140
|
+
|
|
141
|
+
Raises:
|
|
142
|
+
ValueError: If the model name is not found.
|
|
143
|
+
"""
|
|
99
144
|
for model in cls:
|
|
100
145
|
if model.model_name == name:
|
|
101
146
|
return model
|
|
@@ -106,29 +151,29 @@ class Model(Enum):
|
|
|
106
151
|
async def upload_file(
|
|
107
152
|
file: Union[bytes, str, Path],
|
|
108
153
|
proxy: Optional[Union[str, Dict[str, str]]] = None,
|
|
109
|
-
impersonate: str = "chrome110"
|
|
154
|
+
impersonate: str = "chrome110"
|
|
110
155
|
) -> str:
|
|
111
156
|
"""
|
|
112
|
-
|
|
157
|
+
Uploads a file to Google's Gemini server using curl_cffi and returns its identifier.
|
|
113
158
|
|
|
114
|
-
|
|
115
|
-
file
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
Proxy URL or dictionary.
|
|
119
|
-
impersonate: str, optional
|
|
120
|
-
Browser profile for curl_cffi to impersonate. Defaults to "chrome110".
|
|
159
|
+
Args:
|
|
160
|
+
file (bytes | str | Path): File data in bytes or path to the file to be uploaded.
|
|
161
|
+
proxy (str | dict, optional): Proxy URL or dictionary for the request.
|
|
162
|
+
impersonate (str, optional): Browser profile for curl_cffi to impersonate. Defaults to "chrome110".
|
|
121
163
|
|
|
122
164
|
Returns:
|
|
123
165
|
str: Identifier of the uploaded file.
|
|
166
|
+
|
|
124
167
|
Raises:
|
|
125
|
-
HTTPError: If the upload request
|
|
168
|
+
HTTPError: If the upload request fails.
|
|
126
169
|
RequestException: For other network-related errors.
|
|
170
|
+
FileNotFoundError: If the file path does not exist.
|
|
127
171
|
"""
|
|
172
|
+
# Handle file input
|
|
128
173
|
if not isinstance(file, bytes):
|
|
129
174
|
file_path = Path(file)
|
|
130
175
|
if not file_path.is_file():
|
|
131
|
-
|
|
176
|
+
raise FileNotFoundError(f"File not found at path: {file}")
|
|
132
177
|
with open(file_path, "rb") as f:
|
|
133
178
|
file_content = f.read()
|
|
134
179
|
else:
|
|
@@ -146,14 +191,12 @@ async def upload_file(
|
|
|
146
191
|
async with AsyncSession(
|
|
147
192
|
proxies=proxies_dict,
|
|
148
193
|
impersonate=impersonate,
|
|
149
|
-
headers=Headers.UPLOAD.value
|
|
150
|
-
# follow_redirects
|
|
194
|
+
headers=Headers.UPLOAD.value # Pass headers directly
|
|
195
|
+
# follow_redirects is handled automatically by curl_cffi
|
|
151
196
|
) as client:
|
|
152
197
|
response = await client.post(
|
|
153
198
|
url=Endpoint.UPLOAD.value,
|
|
154
|
-
# headers=Headers.UPLOAD.value, # Headers passed in session
|
|
155
199
|
files={"file": file_content},
|
|
156
|
-
# follow_redirects=True, # Default
|
|
157
200
|
)
|
|
158
201
|
response.raise_for_status() # Raises HTTPError for bad responses
|
|
159
202
|
return response.text
|
|
@@ -169,7 +212,18 @@ async def upload_file(
|
|
|
169
212
|
#########################################
|
|
170
213
|
|
|
171
214
|
def load_cookies(cookie_path: str) -> Tuple[str, str]:
|
|
172
|
-
"""
|
|
215
|
+
"""
|
|
216
|
+
Loads authentication cookies from a JSON file.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
cookie_path (str): Path to the JSON file containing cookies.
|
|
220
|
+
|
|
221
|
+
Returns:
|
|
222
|
+
tuple[str, str]: Tuple containing __Secure-1PSID and __Secure-1PSIDTS cookie values.
|
|
223
|
+
|
|
224
|
+
Raises:
|
|
225
|
+
Exception: If the file is not found, invalid, or required cookies are missing.
|
|
226
|
+
"""
|
|
173
227
|
try:
|
|
174
228
|
with open(cookie_path, 'r', encoding='utf-8') as file: # Added encoding
|
|
175
229
|
cookies = json.load(file)
|
|
@@ -194,6 +248,15 @@ def load_cookies(cookie_path: str) -> Tuple[str, str]:
|
|
|
194
248
|
class Chatbot:
|
|
195
249
|
"""
|
|
196
250
|
Synchronous wrapper for the AsyncChatbot class.
|
|
251
|
+
|
|
252
|
+
This class provides a synchronous interface to interact with Google Gemini,
|
|
253
|
+
handling authentication, conversation management, and message sending.
|
|
254
|
+
|
|
255
|
+
Attributes:
|
|
256
|
+
loop (asyncio.AbstractEventLoop): Event loop for running async tasks.
|
|
257
|
+
secure_1psid (str): Authentication cookie.
|
|
258
|
+
secure_1psidts (str): Authentication cookie.
|
|
259
|
+
async_chatbot (AsyncChatbot): Underlying asynchronous chatbot instance.
|
|
197
260
|
"""
|
|
198
261
|
def __init__(
|
|
199
262
|
self,
|
|
@@ -237,20 +300,26 @@ class Chatbot:
|
|
|
237
300
|
|
|
238
301
|
class AsyncChatbot:
|
|
239
302
|
"""
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
303
|
+
Asynchronous chatbot client for interacting with Google Gemini using curl_cffi.
|
|
304
|
+
|
|
305
|
+
This class manages authentication, session state, conversation history,
|
|
306
|
+
and sending/receiving messages (including images) asynchronously.
|
|
307
|
+
|
|
308
|
+
Attributes:
|
|
309
|
+
headers (dict): HTTP headers for requests.
|
|
310
|
+
_reqid (int): Request identifier for Gemini API.
|
|
311
|
+
SNlM0e (str): Session token required for API requests.
|
|
312
|
+
conversation_id (str): Current conversation ID.
|
|
313
|
+
response_id (str): Current response ID.
|
|
314
|
+
choice_id (str): Current choice ID.
|
|
315
|
+
proxy (str | dict | None): Proxy configuration.
|
|
316
|
+
proxies_dict (dict | None): Proxy dictionary for curl_cffi.
|
|
317
|
+
secure_1psid (str): Authentication cookie.
|
|
318
|
+
secure_1psidts (str): Authentication cookie.
|
|
319
|
+
session (AsyncSession): curl_cffi session for HTTP requests.
|
|
320
|
+
timeout (int): Request timeout in seconds.
|
|
321
|
+
model (Model): Selected Gemini model.
|
|
322
|
+
impersonate (str): Browser profile for curl_cffi to impersonate.
|
|
254
323
|
"""
|
|
255
324
|
__slots__ = [
|
|
256
325
|
"headers",
|
|
@@ -304,9 +373,8 @@ class AsyncChatbot:
|
|
|
304
373
|
cookies={"__Secure-1PSID": secure_1psid, "__Secure-1PSIDTS": secure_1psidts},
|
|
305
374
|
proxies=self.proxies_dict,
|
|
306
375
|
timeout=timeout,
|
|
307
|
-
impersonate=self.impersonate
|
|
308
|
-
# verify
|
|
309
|
-
# http2=True, # Implicitly handled by curl_cffi if possible
|
|
376
|
+
impersonate=self.impersonate
|
|
377
|
+
# verify and http2 are handled automatically by curl_cffi
|
|
310
378
|
)
|
|
311
379
|
# No need to set proxies/headers/cookies again, done in constructor
|
|
312
380
|
|
|
@@ -411,30 +479,41 @@ class AsyncChatbot:
|
|
|
411
479
|
|
|
412
480
|
async def __get_snlm0e(self):
|
|
413
481
|
"""Fetches the SNlM0e value required for API requests using curl_cffi."""
|
|
414
|
-
if not self.secure_1psid
|
|
415
|
-
|
|
482
|
+
if not self.secure_1psid:
|
|
483
|
+
raise ValueError("__Secure-1PSID cookie is required.")
|
|
416
484
|
|
|
417
485
|
try:
|
|
418
486
|
# Use the session's get method
|
|
419
487
|
resp = await self.session.get(
|
|
420
488
|
Endpoint.INIT.value,
|
|
421
|
-
timeout=self.timeout
|
|
422
|
-
# follow_redirects
|
|
489
|
+
timeout=self.timeout # Timeout is already set in session, but can override
|
|
490
|
+
# follow_redirects is handled automatically by curl_cffi
|
|
423
491
|
)
|
|
424
492
|
resp.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
|
425
493
|
|
|
426
|
-
#
|
|
494
|
+
# Check for authentication issues
|
|
495
|
+
if "Sign in to continue" in resp.text or "accounts.google.com" in str(resp.url):
|
|
496
|
+
raise PermissionError("Authentication failed. Cookies might be invalid or expired. Please update them.")
|
|
497
|
+
|
|
498
|
+
# Regex to find the SNlM0e value
|
|
427
499
|
snlm0e_match = re.search(r'["\']SNlM0e["\']\s*:\s*["\'](.*?)["\']', resp.text)
|
|
428
500
|
if not snlm0e_match:
|
|
429
501
|
error_message = "SNlM0e value not found in response."
|
|
430
|
-
if
|
|
431
|
-
|
|
432
|
-
elif resp.status_code == 429:
|
|
433
|
-
error_message += " Rate limit likely exceeded."
|
|
502
|
+
if resp.status_code == 429:
|
|
503
|
+
error_message += " Rate limit likely exceeded."
|
|
434
504
|
else:
|
|
435
|
-
|
|
505
|
+
error_message += f" Response status: {resp.status_code}. Check cookie validity and network."
|
|
436
506
|
raise ValueError(error_message)
|
|
437
507
|
|
|
508
|
+
# Try to refresh PSIDTS if needed
|
|
509
|
+
if not self.secure_1psidts and "PSIDTS" not in self.session.cookies:
|
|
510
|
+
try:
|
|
511
|
+
# Attempt to rotate cookies to get a fresh PSIDTS
|
|
512
|
+
await self.__rotate_cookies()
|
|
513
|
+
except Exception as e:
|
|
514
|
+
console.log(f"[yellow]Warning: Could not refresh PSIDTS cookie: {e}[/yellow]")
|
|
515
|
+
# Continue anyway as some accounts don't need PSIDTS
|
|
516
|
+
|
|
438
517
|
return snlm0e_match.group(1)
|
|
439
518
|
|
|
440
519
|
except Timeout as e: # Catch requests.exceptions.Timeout
|
|
@@ -442,10 +521,29 @@ class AsyncChatbot:
|
|
|
442
521
|
except (RequestException, CurlError) as e: # Catch general request errors and Curl specific errors
|
|
443
522
|
raise ConnectionError(f"Network error while fetching SNlM0e: {e}") from e
|
|
444
523
|
except HTTPError as e: # Catch requests.exceptions.HTTPError
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
524
|
+
if e.response.status_code == 401 or e.response.status_code == 403:
|
|
525
|
+
raise PermissionError(f"Authentication failed (status {e.response.status_code}). Check cookies. {e}") from e
|
|
526
|
+
else:
|
|
527
|
+
raise Exception(f"HTTP error {e.response.status_code} while fetching SNlM0e: {e}") from e
|
|
528
|
+
|
|
529
|
+
async def __rotate_cookies(self):
|
|
530
|
+
"""Rotates the __Secure-1PSIDTS cookie."""
|
|
531
|
+
try:
|
|
532
|
+
response = await self.session.post(
|
|
533
|
+
Endpoint.ROTATE_COOKIES.value,
|
|
534
|
+
headers=Headers.ROTATE_COOKIES.value,
|
|
535
|
+
data='[000,"-0000000000000000000"]',
|
|
536
|
+
timeout=self.timeout
|
|
537
|
+
)
|
|
538
|
+
response.raise_for_status()
|
|
539
|
+
|
|
540
|
+
if new_1psidts := response.cookies.get("__Secure-1PSIDTS"):
|
|
541
|
+
self.secure_1psidts = new_1psidts
|
|
542
|
+
self.session.cookies.set("__Secure-1PSIDTS", new_1psidts)
|
|
543
|
+
return new_1psidts
|
|
544
|
+
except Exception as e:
|
|
545
|
+
console.log(f"[yellow]Cookie rotation failed: {e}[/yellow]")
|
|
546
|
+
raise
|
|
449
547
|
|
|
450
548
|
|
|
451
549
|
async def ask(self, message: str, image: Optional[Union[bytes, str, Path]] = None) -> dict:
|
|
@@ -462,14 +560,15 @@ class AsyncChatbot:
|
|
|
462
560
|
dict: A dictionary containing the response content and metadata.
|
|
463
561
|
"""
|
|
464
562
|
if self.SNlM0e is None:
|
|
465
|
-
|
|
563
|
+
raise RuntimeError("AsyncChatbot not properly initialized. Call AsyncChatbot.create()")
|
|
466
564
|
|
|
467
565
|
params = {
|
|
468
|
-
"bl": "boq_assistant-bard-web-server_20240625.13_p0",
|
|
566
|
+
"bl": "boq_assistant-bard-web-server_20240625.13_p0",
|
|
469
567
|
"_reqid": str(self._reqid),
|
|
470
568
|
"rt": "c",
|
|
471
569
|
}
|
|
472
570
|
|
|
571
|
+
# Handle image upload if provided
|
|
473
572
|
image_upload_id = None
|
|
474
573
|
if image:
|
|
475
574
|
try:
|
|
@@ -480,106 +579,260 @@ class AsyncChatbot:
|
|
|
480
579
|
console.log(f"[red]Error uploading image: {e}[/red]")
|
|
481
580
|
return {"content": f"Error uploading image: {e}", "error": True}
|
|
482
581
|
|
|
483
|
-
#
|
|
484
|
-
message_struct = [
|
|
485
|
-
[message],
|
|
486
|
-
None,
|
|
487
|
-
[self.conversation_id, self.response_id, self.choice_id],
|
|
488
|
-
]
|
|
582
|
+
# Prepare message structure
|
|
489
583
|
if image_upload_id:
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
584
|
+
message_struct = [
|
|
585
|
+
[message],
|
|
586
|
+
[[[image_upload_id, 1]]],
|
|
587
|
+
[self.conversation_id, self.response_id, self.choice_id],
|
|
588
|
+
]
|
|
589
|
+
else:
|
|
590
|
+
message_struct = [
|
|
591
|
+
[message],
|
|
592
|
+
None,
|
|
593
|
+
[self.conversation_id, self.response_id, self.choice_id],
|
|
594
|
+
]
|
|
495
595
|
|
|
596
|
+
# Prepare request data
|
|
496
597
|
data = {
|
|
497
598
|
"f.req": json.dumps([None, json.dumps(message_struct, ensure_ascii=False)], ensure_ascii=False),
|
|
498
599
|
"at": self.SNlM0e,
|
|
499
600
|
}
|
|
500
601
|
|
|
501
602
|
try:
|
|
502
|
-
#
|
|
603
|
+
# Send request
|
|
503
604
|
resp = await self.session.post(
|
|
504
605
|
Endpoint.GENERATE.value,
|
|
505
606
|
params=params,
|
|
506
|
-
data=data,
|
|
607
|
+
data=data,
|
|
507
608
|
timeout=self.timeout,
|
|
508
609
|
)
|
|
509
|
-
resp.raise_for_status()
|
|
610
|
+
resp.raise_for_status()
|
|
510
611
|
|
|
511
|
-
#
|
|
612
|
+
# Process response
|
|
512
613
|
lines = resp.text.splitlines()
|
|
513
|
-
if len(lines) <
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
614
|
+
if len(lines) < 3:
|
|
615
|
+
raise ValueError(f"Unexpected response format. Status: {resp.status_code}. Content: {resp.text[:200]}...")
|
|
616
|
+
|
|
617
|
+
# Find the line with the response data
|
|
618
|
+
chat_data_line = None
|
|
619
|
+
for line in lines:
|
|
620
|
+
if line.startswith(")]}'"):
|
|
621
|
+
chat_data_line = line[4:].strip()
|
|
622
|
+
break
|
|
623
|
+
elif line.startswith("["):
|
|
624
|
+
chat_data_line = line
|
|
625
|
+
break
|
|
626
|
+
|
|
627
|
+
if not chat_data_line:
|
|
628
|
+
chat_data_line = lines[3] if len(lines) > 3 else lines[-1]
|
|
629
|
+
if chat_data_line.startswith(")]}'"):
|
|
630
|
+
chat_data_line = chat_data_line[4:].strip()
|
|
631
|
+
|
|
632
|
+
# Parse the response JSON
|
|
633
|
+
response_json = json.loads(chat_data_line)
|
|
634
|
+
|
|
635
|
+
# Find the main response body
|
|
636
|
+
body = None
|
|
637
|
+
body_index = 0
|
|
638
|
+
|
|
639
|
+
for part_index, part in enumerate(response_json):
|
|
640
|
+
try:
|
|
641
|
+
if isinstance(part, list) and len(part) > 2:
|
|
642
|
+
main_part = json.loads(part[2])
|
|
643
|
+
if main_part and len(main_part) > 4 and main_part[4]:
|
|
644
|
+
body = main_part
|
|
645
|
+
body_index = part_index
|
|
646
|
+
break
|
|
647
|
+
except (IndexError, TypeError, json.JSONDecodeError):
|
|
648
|
+
continue
|
|
649
|
+
|
|
650
|
+
if not body:
|
|
651
|
+
return {"content": "Failed to parse response body. No valid data found.", "error": True}
|
|
652
|
+
|
|
653
|
+
# Extract data from the response
|
|
654
|
+
try:
|
|
655
|
+
# Extract main content
|
|
656
|
+
content = ""
|
|
657
|
+
if len(body) > 4 and len(body[4]) > 0 and len(body[4][0]) > 1:
|
|
658
|
+
content = body[4][0][1][0] if len(body[4][0][1]) > 0 else ""
|
|
659
|
+
|
|
660
|
+
# Extract conversation metadata
|
|
661
|
+
conversation_id = body[1][0] if len(body) > 1 and len(body[1]) > 0 else self.conversation_id
|
|
662
|
+
response_id = body[1][1] if len(body) > 1 and len(body[1]) > 1 else self.response_id
|
|
663
|
+
|
|
664
|
+
# Extract additional data
|
|
665
|
+
factualityQueries = body[3] if len(body) > 3 else None
|
|
666
|
+
textQuery = body[2][0] if len(body) > 2 and body[2] else ""
|
|
667
|
+
|
|
668
|
+
# Extract choices
|
|
669
|
+
choices = []
|
|
670
|
+
if len(body) > 4:
|
|
671
|
+
for candidate in body[4]:
|
|
672
|
+
if len(candidate) > 1 and isinstance(candidate[1], list) and len(candidate[1]) > 0:
|
|
673
|
+
choices.append({"id": candidate[0], "content": candidate[1][0]})
|
|
674
|
+
|
|
675
|
+
choice_id = choices[0]["id"] if choices else self.choice_id
|
|
676
|
+
|
|
677
|
+
# Extract images - multiple possible formats
|
|
678
|
+
images = []
|
|
679
|
+
|
|
680
|
+
# Format 1: Regular web images
|
|
681
|
+
if len(body) > 4 and len(body[4]) > 0 and len(body[4][0]) > 4 and body[4][0][4]:
|
|
682
|
+
for img_data in body[4][0][4]:
|
|
683
|
+
try:
|
|
684
|
+
img_url = img_data[0][0][0]
|
|
685
|
+
img_alt = img_data[2] if len(img_data) > 2 else ""
|
|
686
|
+
img_title = img_data[1] if len(img_data) > 1 else "[Image]"
|
|
687
|
+
images.append({"url": img_url, "alt": img_alt, "title": img_title})
|
|
688
|
+
except (IndexError, TypeError):
|
|
689
|
+
console.log("[yellow]Warning: Could not parse image data structure (format 1).[/yellow]")
|
|
690
|
+
continue
|
|
691
|
+
|
|
692
|
+
# Format 2: Generated images in standard location
|
|
693
|
+
generated_images = []
|
|
694
|
+
if len(body) > 4 and len(body[4]) > 0 and len(body[4][0]) > 12 and body[4][0][12]:
|
|
539
695
|
try:
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
696
|
+
# Path 1: Check for images in [12][7][0]
|
|
697
|
+
if body[4][0][12][7] and body[4][0][12][7][0]:
|
|
698
|
+
# This is the standard path for generated images
|
|
699
|
+
for img_index, img_data in enumerate(body[4][0][12][7][0]):
|
|
700
|
+
try:
|
|
701
|
+
img_url = img_data[0][3][3]
|
|
702
|
+
img_title = f"[Generated Image {img_index+1}]"
|
|
703
|
+
img_alt = img_data[3][5][0] if len(img_data[3]) > 5 and len(img_data[3][5]) > 0 else ""
|
|
704
|
+
generated_images.append({"url": img_url, "alt": img_alt, "title": img_title})
|
|
705
|
+
except (IndexError, TypeError):
|
|
706
|
+
continue
|
|
707
|
+
|
|
708
|
+
# If we found images, but they might be in a different part of the response
|
|
709
|
+
if not generated_images:
|
|
710
|
+
# Look for image generation data in other response parts
|
|
711
|
+
for part_index, part in enumerate(response_json):
|
|
712
|
+
if part_index <= body_index:
|
|
713
|
+
continue
|
|
714
|
+
try:
|
|
715
|
+
img_part = json.loads(part[2])
|
|
716
|
+
if img_part[4][0][12][7][0]:
|
|
717
|
+
for img_index, img_data in enumerate(img_part[4][0][12][7][0]):
|
|
718
|
+
try:
|
|
719
|
+
img_url = img_data[0][3][3]
|
|
720
|
+
img_title = f"[Generated Image {img_index+1}]"
|
|
721
|
+
img_alt = img_data[3][5][0] if len(img_data[3]) > 5 and len(img_data[3][5]) > 0 else ""
|
|
722
|
+
generated_images.append({"url": img_url, "alt": img_alt, "title": img_title})
|
|
723
|
+
except (IndexError, TypeError):
|
|
724
|
+
continue
|
|
725
|
+
break
|
|
726
|
+
except (IndexError, TypeError, json.JSONDecodeError):
|
|
727
|
+
continue
|
|
544
728
|
except (IndexError, TypeError):
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
729
|
+
pass
|
|
730
|
+
|
|
731
|
+
# Format 3: Alternative location for generated images
|
|
732
|
+
if len(generated_images) == 0 and len(body) > 4 and len(body[4]) > 0:
|
|
733
|
+
try:
|
|
734
|
+
# Try to find images in candidate[4] structure
|
|
735
|
+
candidate = body[4][0]
|
|
736
|
+
if len(candidate) > 22 and candidate[22]:
|
|
737
|
+
# Look for URLs in the candidate[22] field
|
|
738
|
+
import re
|
|
739
|
+
content = candidate[22][0] if isinstance(candidate[22], list) and len(candidate[22]) > 0 else str(candidate[22])
|
|
740
|
+
urls = re.findall(r'https?://[^\s]+', content)
|
|
741
|
+
for i, url in enumerate(urls):
|
|
742
|
+
# Clean up URL if it ends with punctuation
|
|
743
|
+
if url[-1] in ['.', ',', ')', ']', '}', '"', "'"]:
|
|
744
|
+
url = url[:-1]
|
|
745
|
+
generated_images.append({
|
|
746
|
+
"url": url,
|
|
747
|
+
"title": f"[Generated Image {i+1}]",
|
|
748
|
+
"alt": ""
|
|
749
|
+
})
|
|
750
|
+
except (IndexError, TypeError) as e:
|
|
751
|
+
console.log(f"[yellow]Warning: Could not parse alternative image structure: {e}[/yellow]")
|
|
752
|
+
|
|
753
|
+
# Format 4: Look for image URLs in the text content
|
|
754
|
+
if len(images) == 0 and len(generated_images) == 0 and content:
|
|
755
|
+
try:
|
|
756
|
+
import re
|
|
757
|
+
# Look for image URLs in the content - try multiple patterns
|
|
758
|
+
|
|
759
|
+
# Pattern 1: Standard image URLs
|
|
760
|
+
urls = re.findall(r'(https?://[^\s]+\.(jpg|jpeg|png|gif|webp))', content.lower())
|
|
761
|
+
|
|
762
|
+
# Pattern 2: Google image URLs (which might not have extensions)
|
|
763
|
+
google_urls = re.findall(r'(https?://lh\d+\.googleusercontent\.com/[^\s]+)', content)
|
|
764
|
+
|
|
765
|
+
# Pattern 3: General URLs that might be images
|
|
766
|
+
general_urls = re.findall(r'(https?://[^\s]+)', content)
|
|
767
|
+
|
|
768
|
+
# Combine all found URLs
|
|
769
|
+
all_urls = []
|
|
770
|
+
if urls:
|
|
771
|
+
all_urls.extend([url_tuple[0] for url_tuple in urls])
|
|
772
|
+
if google_urls:
|
|
773
|
+
all_urls.extend(google_urls)
|
|
774
|
+
|
|
775
|
+
# Add general URLs only if we didn't find any specific image URLs
|
|
776
|
+
if not all_urls and general_urls:
|
|
777
|
+
all_urls = general_urls
|
|
778
|
+
|
|
779
|
+
# Process all found URLs
|
|
780
|
+
if all_urls:
|
|
781
|
+
for i, url in enumerate(all_urls):
|
|
782
|
+
# Clean up URL if it ends with punctuation
|
|
783
|
+
if url[-1] in ['.', ',', ')', ']', '}', '"', "'"]:
|
|
784
|
+
url = url[:-1]
|
|
785
|
+
images.append({
|
|
786
|
+
"url": url,
|
|
787
|
+
"title": f"[Image in Content {i+1}]",
|
|
788
|
+
"alt": ""
|
|
789
|
+
})
|
|
790
|
+
console.log(f"[green]Found {len(all_urls)} potential image URLs in content.[/green]")
|
|
791
|
+
except Exception as e:
|
|
792
|
+
console.log(f"[yellow]Warning: Error extracting URLs from content: {e}[/yellow]")
|
|
793
|
+
|
|
794
|
+
# Combine all images
|
|
795
|
+
all_images = images + generated_images
|
|
796
|
+
|
|
797
|
+
# Prepare results
|
|
798
|
+
results = {
|
|
799
|
+
"content": content,
|
|
800
|
+
"conversation_id": conversation_id,
|
|
801
|
+
"response_id": response_id,
|
|
802
|
+
"factualityQueries": factualityQueries,
|
|
803
|
+
"textQuery": textQuery,
|
|
804
|
+
"choices": choices,
|
|
805
|
+
"images": all_images,
|
|
806
|
+
"error": False,
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
# Update state
|
|
810
|
+
self.conversation_id = conversation_id
|
|
811
|
+
self.response_id = response_id
|
|
812
|
+
self.choice_id = choice_id
|
|
813
|
+
self._reqid += random.randint(1000, 9000)
|
|
814
|
+
|
|
815
|
+
return results
|
|
816
|
+
|
|
817
|
+
except (IndexError, TypeError) as e:
|
|
818
|
+
console.log(f"[red]Error extracting data from response: {e}[/red]")
|
|
819
|
+
return {"content": f"Error extracting data from response: {e}", "error": True}
|
|
820
|
+
|
|
821
|
+
except json.JSONDecodeError as e:
|
|
822
|
+
console.log(f"[red]Error parsing JSON response: {e}[/red]")
|
|
823
|
+
return {"content": f"Error parsing JSON response: {e}. Response: {resp.text[:200]}...", "error": True}
|
|
824
|
+
except Timeout as e:
|
|
572
825
|
console.log(f"[red]Request timed out: {e}[/red]")
|
|
573
826
|
return {"content": f"Request timed out: {e}", "error": True}
|
|
574
|
-
except (RequestException, CurlError) as e:
|
|
827
|
+
except (RequestException, CurlError) as e:
|
|
575
828
|
console.log(f"[red]Network error: {e}[/red]")
|
|
576
829
|
return {"content": f"Network error: {e}", "error": True}
|
|
577
|
-
except HTTPError as e:
|
|
578
|
-
|
|
579
|
-
|
|
830
|
+
except HTTPError as e:
|
|
831
|
+
console.log(f"[red]HTTP error {e.response.status_code}: {e}[/red]")
|
|
832
|
+
return {"content": f"HTTP error {e.response.status_code}: {e}", "error": True}
|
|
580
833
|
except Exception as e:
|
|
581
|
-
|
|
582
|
-
|
|
834
|
+
console.log(f"[red]An unexpected error occurred during ask: {e}[/red]", style="bold red")
|
|
835
|
+
return {"content": f"An unexpected error occurred: {e}", "error": True}
|
|
583
836
|
|
|
584
837
|
|
|
585
838
|
#########################################
|
|
@@ -588,24 +841,20 @@ class AsyncChatbot:
|
|
|
588
841
|
|
|
589
842
|
class Image(BaseModel):
|
|
590
843
|
"""
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
title
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
proxy: Optional[Union[str, Dict[str, str]]] = None # Allow string or dict proxy
|
|
600
|
-
Proxy used when saving the image.
|
|
601
|
-
impersonate: str = "chrome110" # Added impersonate for saving
|
|
602
|
-
Browser profile for curl_cffi to impersonate.
|
|
844
|
+
Represents a single image object returned from Gemini.
|
|
845
|
+
|
|
846
|
+
Attributes:
|
|
847
|
+
url (str): URL of the image.
|
|
848
|
+
title (str): Title of the image (default: "[Image]").
|
|
849
|
+
alt (str): Optional description of the image.
|
|
850
|
+
proxy (str | dict | None): Proxy used when saving the image.
|
|
851
|
+
impersonate (str): Browser profile for curl_cffi to impersonate.
|
|
603
852
|
"""
|
|
604
853
|
url: str
|
|
605
854
|
title: str = "[Image]"
|
|
606
855
|
alt: str = ""
|
|
607
856
|
proxy: Optional[Union[str, Dict[str, str]]] = None
|
|
608
|
-
impersonate: str = "chrome110"
|
|
857
|
+
impersonate: str = "chrome110"
|
|
609
858
|
|
|
610
859
|
def __str__(self):
|
|
611
860
|
return f"{self.title}({self.url}) - {self.alt}"
|
|
@@ -617,17 +866,17 @@ class Image(BaseModel):
|
|
|
617
866
|
|
|
618
867
|
async def save(
|
|
619
868
|
self,
|
|
620
|
-
path: str = "
|
|
869
|
+
path: str = "downloaded_images",
|
|
621
870
|
filename: Optional[str] = None,
|
|
622
871
|
cookies: Optional[dict] = None,
|
|
623
872
|
verbose: bool = False,
|
|
624
|
-
skip_invalid_filename: bool =
|
|
873
|
+
skip_invalid_filename: bool = True,
|
|
625
874
|
) -> Optional[str]:
|
|
626
875
|
"""
|
|
627
876
|
Save the image to disk using curl_cffi.
|
|
628
877
|
Parameters:
|
|
629
878
|
path: str, optional
|
|
630
|
-
Directory to save the image (default "
|
|
879
|
+
Directory to save the image (default "downloaded_images").
|
|
631
880
|
filename: str, optional
|
|
632
881
|
Filename to use; if not provided, inferred from URL.
|
|
633
882
|
cookies: dict, optional
|
|
@@ -643,32 +892,38 @@ class Image(BaseModel):
|
|
|
643
892
|
RequestException/CurlError for other network errors.
|
|
644
893
|
IOError if file writing fails.
|
|
645
894
|
"""
|
|
646
|
-
#
|
|
895
|
+
# Generate filename from URL if not provided
|
|
647
896
|
if not filename:
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
897
|
+
try:
|
|
898
|
+
from urllib.parse import urlparse, unquote
|
|
899
|
+
parsed_url = urlparse(self.url)
|
|
900
|
+
base_filename = os.path.basename(unquote(parsed_url.path))
|
|
901
|
+
# Remove invalid characters for filenames
|
|
902
|
+
safe_filename = re.sub(r'[<>:"/\\|?*]', '_', base_filename)
|
|
903
|
+
if safe_filename and len(safe_filename) > 0:
|
|
904
|
+
filename = safe_filename
|
|
905
|
+
else:
|
|
906
|
+
filename = f"image_{random.randint(1000, 9999)}.jpg"
|
|
907
|
+
except Exception:
|
|
908
|
+
filename = f"image_{random.randint(1000, 9999)}.jpg"
|
|
658
909
|
|
|
910
|
+
# Validate filename length
|
|
659
911
|
try:
|
|
660
912
|
_ = Path(filename)
|
|
661
913
|
max_len = 255
|
|
662
914
|
if len(filename) > max_len:
|
|
663
|
-
|
|
664
|
-
|
|
915
|
+
name, ext = os.path.splitext(filename)
|
|
916
|
+
filename = name[:max_len - len(ext) - 1] + ext
|
|
665
917
|
except (OSError, ValueError):
|
|
666
|
-
if verbose:
|
|
918
|
+
if verbose:
|
|
919
|
+
console.log(f"[yellow]Invalid filename generated: {filename}[/yellow]")
|
|
667
920
|
if skip_invalid_filename:
|
|
668
|
-
if verbose:
|
|
921
|
+
if verbose:
|
|
922
|
+
console.log("[yellow]Skipping save due to invalid filename.[/yellow]")
|
|
669
923
|
return None
|
|
670
924
|
filename = f"image_{random.randint(1000, 9999)}.jpg"
|
|
671
|
-
if verbose:
|
|
925
|
+
if verbose:
|
|
926
|
+
console.log(f"[yellow]Using fallback filename: {filename}[/yellow]")
|
|
672
927
|
|
|
673
928
|
# Prepare proxy dictionary for curl_cffi
|
|
674
929
|
proxies_dict = None
|
|
@@ -680,31 +935,35 @@ class Image(BaseModel):
|
|
|
680
935
|
try:
|
|
681
936
|
# Use AsyncSession from curl_cffi
|
|
682
937
|
async with AsyncSession(
|
|
683
|
-
follow_redirects=True, # Default
|
|
684
938
|
cookies=cookies,
|
|
685
939
|
proxies=proxies_dict,
|
|
686
|
-
impersonate=self.impersonate
|
|
940
|
+
impersonate=self.impersonate
|
|
941
|
+
# follow_redirects is handled automatically by curl_cffi
|
|
687
942
|
) as client:
|
|
688
943
|
if verbose:
|
|
689
944
|
console.log(f"Attempting to download image from: {self.url}")
|
|
945
|
+
|
|
690
946
|
response = await client.get(self.url)
|
|
691
|
-
response.raise_for_status()
|
|
947
|
+
response.raise_for_status()
|
|
692
948
|
|
|
949
|
+
# Check content type
|
|
693
950
|
content_type = response.headers.get("content-type", "").lower()
|
|
694
|
-
if "image" not in content_type:
|
|
951
|
+
if "image" not in content_type and verbose:
|
|
695
952
|
console.log(f"[yellow]Warning: Content type is '{content_type}', not an image. Saving anyway.[/yellow]")
|
|
696
953
|
|
|
954
|
+
# Create directory and save file
|
|
697
955
|
dest_path = Path(path)
|
|
698
956
|
dest_path.mkdir(parents=True, exist_ok=True)
|
|
699
957
|
dest = dest_path / filename
|
|
700
958
|
|
|
701
|
-
#
|
|
959
|
+
# Write image data to file
|
|
702
960
|
dest.write_bytes(response.content)
|
|
961
|
+
|
|
703
962
|
if verbose:
|
|
704
963
|
console.log(f"Image saved successfully as {dest.resolve()}")
|
|
964
|
+
|
|
705
965
|
return str(dest.resolve())
|
|
706
966
|
|
|
707
|
-
# Update exception handling
|
|
708
967
|
except HTTPError as e:
|
|
709
968
|
console.log(f"[red]Error downloading image {self.url}: {e.response.status_code} {e}[/red]")
|
|
710
969
|
raise
|
|
@@ -715,24 +974,25 @@ class Image(BaseModel):
|
|
|
715
974
|
console.log(f"[red]Error writing image file to {dest}: {e}[/red]")
|
|
716
975
|
raise
|
|
717
976
|
except Exception as e:
|
|
718
|
-
|
|
719
|
-
|
|
977
|
+
console.log(f"[red]An unexpected error occurred during image save: {e}[/red]")
|
|
978
|
+
raise
|
|
720
979
|
|
|
721
980
|
|
|
722
981
|
class WebImage(Image):
|
|
723
982
|
"""
|
|
724
|
-
|
|
983
|
+
Represents an image retrieved from web search results.
|
|
984
|
+
|
|
725
985
|
Returned when asking Gemini to "SEND an image of [something]".
|
|
726
986
|
"""
|
|
727
987
|
pass
|
|
728
988
|
|
|
729
989
|
class GeneratedImage(Image):
|
|
730
990
|
"""
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
from the GeminiClient/Chatbot instance.
|
|
991
|
+
Represents an image generated by Google's AI image generator (e.g., ImageFX).
|
|
992
|
+
|
|
993
|
+
Attributes:
|
|
994
|
+
cookies (dict[str, str]): Cookies required for accessing the generated image URL,
|
|
995
|
+
typically from the GeminiClient/Chatbot instance.
|
|
736
996
|
"""
|
|
737
997
|
cookies: Dict[str, str]
|
|
738
998
|
|
|
@@ -764,145 +1024,3 @@ class GeneratedImage(Image):
|
|
|
764
1024
|
|
|
765
1025
|
# Pass the required cookies and other args (like impersonate) to the parent save method
|
|
766
1026
|
return await super().save(cookies=self.cookies, **kwargs)
|
|
767
|
-
|
|
768
|
-
#########################################
|
|
769
|
-
# Main usage demonstration
|
|
770
|
-
#########################################
|
|
771
|
-
|
|
772
|
-
async def main_async():
|
|
773
|
-
"""Asynchronous main function for demonstration."""
|
|
774
|
-
cookies_file = "cookies.json"
|
|
775
|
-
impersonate_profile = "chrome110" # Example browser profile
|
|
776
|
-
|
|
777
|
-
bot = None
|
|
778
|
-
try:
|
|
779
|
-
bot = await AsyncChatbot.create(
|
|
780
|
-
*load_cookies(cookies_file),
|
|
781
|
-
model=Model.G_2_5_PRO,
|
|
782
|
-
impersonate=impersonate_profile, # Pass impersonate setting
|
|
783
|
-
# proxy="socks5://127.0.0.1:9050" # Example SOCKS proxy
|
|
784
|
-
)
|
|
785
|
-
console.log(f"[green]AsyncChatbot initialized successfully (impersonating {impersonate_profile}).[/green]")
|
|
786
|
-
except FileNotFoundError:
|
|
787
|
-
console.log(f"[bold red]Error: Cookie file '{cookies_file}' not found.[/bold red]")
|
|
788
|
-
console.log("Please export cookies from your browser after logging into Google Gemini and save as cookies.json.")
|
|
789
|
-
return
|
|
790
|
-
except Exception as e:
|
|
791
|
-
console.log(f"[bold red]Error initializing AsyncChatbot: {e}[/bold red]")
|
|
792
|
-
return
|
|
793
|
-
|
|
794
|
-
# --- Sample text query ---
|
|
795
|
-
text_message = "Explain the concept of asynchronous programming in Python in simple terms."
|
|
796
|
-
console.log(f"\n[cyan]Sending text query:[/cyan] '{text_message}'")
|
|
797
|
-
try:
|
|
798
|
-
response_text = await bot.ask(text_message)
|
|
799
|
-
if response_text.get("error"):
|
|
800
|
-
console.log(f"[red]Error in text response: {response_text.get('content')}[/red]")
|
|
801
|
-
else:
|
|
802
|
-
console.log("[blue]Text Response:[/blue]")
|
|
803
|
-
console.print(Markdown(response_text.get("content", "No content received.")))
|
|
804
|
-
except Exception as e:
|
|
805
|
-
console.log(f"[red]Error during text query: {e}[/red]")
|
|
806
|
-
|
|
807
|
-
# --- Image Generation Query ---
|
|
808
|
-
image_prompt = "Generate an artistic image of a cat sitting on a crescent moon, starry night background."
|
|
809
|
-
console.log(f"\n[cyan]Sending image generation query:[/cyan] '{image_prompt}'")
|
|
810
|
-
try:
|
|
811
|
-
response_image = await bot.ask(image_prompt)
|
|
812
|
-
|
|
813
|
-
if response_image.get("error"):
|
|
814
|
-
console.log(f"[red]Error in image response: {response_image.get('content')}[/red]")
|
|
815
|
-
else:
|
|
816
|
-
returned_images = response_image.get("images", [])
|
|
817
|
-
if not returned_images:
|
|
818
|
-
console.log("[yellow]No direct image data returned. Response content:[/yellow]")
|
|
819
|
-
console.print(Markdown(response_image.get("content", "No content received.")))
|
|
820
|
-
else:
|
|
821
|
-
console.log(f"[green]Received {len(returned_images)} image(s).[/green]")
|
|
822
|
-
for i, img_data in enumerate(returned_images):
|
|
823
|
-
console.log(f"Processing image {i+1}: URL: {img_data.get('url')}")
|
|
824
|
-
try:
|
|
825
|
-
# Pass impersonate setting when creating Image object
|
|
826
|
-
generated_img = GeneratedImage(
|
|
827
|
-
url=img_data.get('url'),
|
|
828
|
-
title=img_data.get('title', f"Generated Image {i+1}"),
|
|
829
|
-
alt=img_data.get('alt', ""),
|
|
830
|
-
cookies={"__Secure-1PSID": bot.secure_1psid, "__Secure-1PSIDTS": bot.secure_1psidts},
|
|
831
|
-
proxy=bot.proxy, # Pass proxy settings from bot
|
|
832
|
-
impersonate=bot.impersonate # Pass impersonate setting from bot
|
|
833
|
-
)
|
|
834
|
-
save_path = "downloaded_images"
|
|
835
|
-
saved_file = await generated_img.save(path=save_path, verbose=True, skip_invalid_filename=True)
|
|
836
|
-
if saved_file:
|
|
837
|
-
console.log(f"[blue]Image {i+1} saved to: {saved_file}[/blue]")
|
|
838
|
-
else:
|
|
839
|
-
console.log(f"[yellow]Image {i+1} skipped due to filename issue.[/yellow]")
|
|
840
|
-
except Exception as img_e:
|
|
841
|
-
console.log(f"[red]Error saving image {i+1}: {img_e}[/red]")
|
|
842
|
-
|
|
843
|
-
except Exception as e:
|
|
844
|
-
console.log(f"[red]Error during image generation query: {e}[/red]")
|
|
845
|
-
|
|
846
|
-
# --- Image Understanding Query ---
|
|
847
|
-
local_image_path = "path/to/your/local/image.jpg" # <--- CHANGE THIS PATH
|
|
848
|
-
image_understanding_prompt = "Describe what you see in this image."
|
|
849
|
-
|
|
850
|
-
if Path(local_image_path).is_file():
|
|
851
|
-
console.log(f"\n[cyan]Sending image understanding query with image:[/cyan] '{local_image_path}'")
|
|
852
|
-
console.log(f"[cyan]Prompt:[/cyan] '{image_understanding_prompt}'")
|
|
853
|
-
try:
|
|
854
|
-
response_understanding = await bot.ask(image_understanding_prompt, image=local_image_path)
|
|
855
|
-
if response_understanding.get("error"):
|
|
856
|
-
console.log(f"[red]Error in image understanding response: {response_understanding.get('content')}[/red]")
|
|
857
|
-
else:
|
|
858
|
-
console.log("[blue]Image Understanding Response:[/blue]")
|
|
859
|
-
console.print(Markdown(response_understanding.get("content", "No content received.")))
|
|
860
|
-
except Exception as e:
|
|
861
|
-
console.log(f"[red]Error during image understanding query: {e}[/red]")
|
|
862
|
-
else:
|
|
863
|
-
console.log(f"\n[yellow]Skipping image understanding query: File not found at '{local_image_path}'.[/yellow]")
|
|
864
|
-
console.log("[yellow]Please update 'local_image_path' in the script to test this feature.[/yellow]")
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
# --- Save/Load Conversation (logic remains the same) ---
|
|
868
|
-
conversation_file = "conversations.json"
|
|
869
|
-
conversation_name = f"Demo Conversation - {datetime.now().strftime('%Y%m%d_%H%M')}"
|
|
870
|
-
console.log(f"\n[cyan]Saving conversation as:[/cyan] '{conversation_name}' to '{conversation_file}'")
|
|
871
|
-
try:
|
|
872
|
-
await bot.save_conversation(conversation_file, conversation_name)
|
|
873
|
-
console.log(f"[green]Conversation saved successfully.[/green]")
|
|
874
|
-
except Exception as e:
|
|
875
|
-
console.log(f"[red]Error saving conversation: {e}[/red]")
|
|
876
|
-
|
|
877
|
-
console.log(f"\n[cyan]Attempting to load conversation:[/cyan] '{conversation_name}' from '{conversation_file}'")
|
|
878
|
-
try:
|
|
879
|
-
loaded = await bot.load_conversation(conversation_file, conversation_name)
|
|
880
|
-
if loaded:
|
|
881
|
-
console.log("[green]Conversation loaded successfully. Sending a follow-up query.[/green]")
|
|
882
|
-
follow_up_message = "What was the first question I asked in this session?"
|
|
883
|
-
console.log(f"[cyan]Sending follow-up query:[/cyan] '{follow_up_message}'")
|
|
884
|
-
response_follow_up = await bot.ask(follow_up_message)
|
|
885
|
-
if response_follow_up.get("error"):
|
|
886
|
-
console.log(f"[red]Error in follow-up response: {response_follow_up.get('content')}[/red]")
|
|
887
|
-
else:
|
|
888
|
-
console.log("[blue]Follow-up Response:[/blue]")
|
|
889
|
-
console.print(Markdown(response_follow_up.get("content", "No content received.")))
|
|
890
|
-
else:
|
|
891
|
-
console.log("[yellow]Could not load the conversation.[/yellow]")
|
|
892
|
-
except Exception as e:
|
|
893
|
-
console.log(f"[red]Error loading or using loaded conversation: {e}[/red]")
|
|
894
|
-
|
|
895
|
-
# --- Cleanup ---
|
|
896
|
-
if bot and bot.session:
|
|
897
|
-
await bot.session.close() # Use close() for AsyncSession
|
|
898
|
-
console.log("\n[grey]HTTP session closed.[/grey]")
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
if __name__ == "__main__":
|
|
902
|
-
try:
|
|
903
|
-
asyncio.run(main_async())
|
|
904
|
-
except KeyboardInterrupt:
|
|
905
|
-
console.log("\n[yellow]Operation cancelled by user.[/yellow]")
|
|
906
|
-
except Exception as main_e:
|
|
907
|
-
console.log(f"[bold red]An error occurred in the main execution: {main_e}[/bold red]")
|
|
908
|
-
|