webscout 8.2.9__py3-none-any.whl → 2026.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- webscout/AIauto.py +524 -251
- webscout/AIbase.py +247 -319
- webscout/AIutel.py +68 -703
- webscout/Bard.py +1072 -1026
- webscout/Extra/GitToolkit/__init__.py +10 -10
- webscout/Extra/GitToolkit/gitapi/__init__.py +20 -12
- webscout/Extra/GitToolkit/gitapi/gist.py +142 -0
- webscout/Extra/GitToolkit/gitapi/organization.py +91 -0
- webscout/Extra/GitToolkit/gitapi/repository.py +308 -195
- webscout/Extra/GitToolkit/gitapi/search.py +162 -0
- webscout/Extra/GitToolkit/gitapi/trending.py +236 -0
- webscout/Extra/GitToolkit/gitapi/user.py +128 -96
- webscout/Extra/GitToolkit/gitapi/utils.py +82 -62
- webscout/Extra/YTToolkit/README.md +443 -375
- webscout/Extra/YTToolkit/YTdownloader.py +953 -957
- webscout/Extra/YTToolkit/__init__.py +3 -3
- webscout/Extra/YTToolkit/transcriber.py +595 -476
- webscout/Extra/YTToolkit/ytapi/README.md +230 -44
- webscout/Extra/YTToolkit/ytapi/__init__.py +22 -6
- webscout/Extra/YTToolkit/ytapi/captions.py +190 -0
- webscout/Extra/YTToolkit/ytapi/channel.py +302 -307
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
- webscout/Extra/YTToolkit/ytapi/extras.py +178 -118
- webscout/Extra/YTToolkit/ytapi/hashtag.py +120 -0
- webscout/Extra/YTToolkit/ytapi/https.py +89 -88
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
- webscout/Extra/YTToolkit/ytapi/playlist.py +59 -59
- webscout/Extra/YTToolkit/ytapi/pool.py +8 -8
- webscout/Extra/YTToolkit/ytapi/query.py +143 -40
- webscout/Extra/YTToolkit/ytapi/shorts.py +122 -0
- webscout/Extra/YTToolkit/ytapi/stream.py +68 -63
- webscout/Extra/YTToolkit/ytapi/suggestions.py +97 -0
- webscout/Extra/YTToolkit/ytapi/utils.py +66 -62
- webscout/Extra/YTToolkit/ytapi/video.py +403 -232
- webscout/Extra/__init__.py +2 -3
- webscout/Extra/gguf.py +1298 -684
- webscout/Extra/tempmail/README.md +487 -487
- webscout/Extra/tempmail/__init__.py +28 -28
- webscout/Extra/tempmail/async_utils.py +143 -141
- webscout/Extra/tempmail/base.py +172 -161
- webscout/Extra/tempmail/cli.py +191 -187
- webscout/Extra/tempmail/emailnator.py +88 -84
- webscout/Extra/tempmail/mail_tm.py +378 -361
- webscout/Extra/tempmail/temp_mail_io.py +304 -292
- webscout/Extra/weather.py +196 -194
- webscout/Extra/weather_ascii.py +17 -15
- webscout/Provider/AISEARCH/PERPLEXED_search.py +175 -0
- webscout/Provider/AISEARCH/Perplexity.py +292 -333
- webscout/Provider/AISEARCH/README.md +106 -279
- webscout/Provider/AISEARCH/__init__.py +16 -9
- webscout/Provider/AISEARCH/brave_search.py +298 -0
- webscout/Provider/AISEARCH/iask_search.py +357 -410
- webscout/Provider/AISEARCH/monica_search.py +200 -220
- webscout/Provider/AISEARCH/webpilotai_search.py +242 -255
- webscout/Provider/Algion.py +413 -0
- webscout/Provider/Andi.py +74 -69
- webscout/Provider/Apriel.py +313 -0
- webscout/Provider/Ayle.py +323 -0
- webscout/Provider/ChatSandbox.py +329 -342
- webscout/Provider/ClaudeOnline.py +365 -0
- webscout/Provider/Cohere.py +232 -208
- webscout/Provider/DeepAI.py +367 -0
- webscout/Provider/Deepinfra.py +467 -340
- webscout/Provider/EssentialAI.py +217 -0
- webscout/Provider/ExaAI.py +274 -261
- webscout/Provider/Gemini.py +175 -169
- webscout/Provider/GithubChat.py +385 -369
- webscout/Provider/Gradient.py +286 -0
- webscout/Provider/Groq.py +556 -801
- webscout/Provider/HadadXYZ.py +323 -0
- webscout/Provider/HeckAI.py +392 -375
- webscout/Provider/HuggingFace.py +387 -0
- webscout/Provider/IBM.py +340 -0
- webscout/Provider/Jadve.py +317 -291
- webscout/Provider/K2Think.py +306 -0
- webscout/Provider/Koboldai.py +221 -384
- webscout/Provider/Netwrck.py +273 -270
- webscout/Provider/Nvidia.py +310 -0
- webscout/Provider/OPENAI/DeepAI.py +489 -0
- webscout/Provider/OPENAI/K2Think.py +423 -0
- webscout/Provider/OPENAI/PI.py +463 -0
- webscout/Provider/OPENAI/README.md +890 -952
- webscout/Provider/OPENAI/TogetherAI.py +405 -0
- webscout/Provider/OPENAI/TwoAI.py +255 -357
- webscout/Provider/OPENAI/__init__.py +148 -40
- webscout/Provider/OPENAI/ai4chat.py +348 -293
- webscout/Provider/OPENAI/akashgpt.py +436 -0
- webscout/Provider/OPENAI/algion.py +303 -0
- webscout/Provider/OPENAI/{exachat.py → ayle.py} +365 -444
- webscout/Provider/OPENAI/base.py +253 -249
- webscout/Provider/OPENAI/cerebras.py +296 -0
- webscout/Provider/OPENAI/chatgpt.py +870 -556
- webscout/Provider/OPENAI/chatsandbox.py +233 -173
- webscout/Provider/OPENAI/deepinfra.py +403 -322
- webscout/Provider/OPENAI/e2b.py +2370 -1414
- webscout/Provider/OPENAI/elmo.py +278 -0
- webscout/Provider/OPENAI/exaai.py +452 -417
- webscout/Provider/OPENAI/freeassist.py +446 -0
- webscout/Provider/OPENAI/gradient.py +448 -0
- webscout/Provider/OPENAI/groq.py +380 -364
- webscout/Provider/OPENAI/hadadxyz.py +292 -0
- webscout/Provider/OPENAI/heckai.py +333 -308
- webscout/Provider/OPENAI/huggingface.py +321 -0
- webscout/Provider/OPENAI/ibm.py +425 -0
- webscout/Provider/OPENAI/llmchat.py +253 -0
- webscout/Provider/OPENAI/llmchatco.py +378 -335
- webscout/Provider/OPENAI/meta.py +541 -0
- webscout/Provider/OPENAI/netwrck.py +374 -357
- webscout/Provider/OPENAI/nvidia.py +317 -0
- webscout/Provider/OPENAI/oivscode.py +348 -287
- webscout/Provider/OPENAI/openrouter.py +328 -0
- webscout/Provider/OPENAI/pydantic_imports.py +1 -172
- webscout/Provider/OPENAI/sambanova.py +397 -0
- webscout/Provider/OPENAI/sonus.py +305 -304
- webscout/Provider/OPENAI/textpollinations.py +370 -339
- webscout/Provider/OPENAI/toolbaz.py +375 -413
- webscout/Provider/OPENAI/typefully.py +419 -355
- webscout/Provider/OPENAI/typliai.py +279 -0
- webscout/Provider/OPENAI/utils.py +314 -318
- webscout/Provider/OPENAI/wisecat.py +359 -387
- webscout/Provider/OPENAI/writecream.py +185 -163
- webscout/Provider/OPENAI/x0gpt.py +462 -365
- webscout/Provider/OPENAI/zenmux.py +380 -0
- webscout/Provider/OpenRouter.py +386 -0
- webscout/Provider/Openai.py +337 -496
- webscout/Provider/PI.py +443 -429
- webscout/Provider/QwenLM.py +346 -254
- webscout/Provider/STT/__init__.py +28 -0
- webscout/Provider/STT/base.py +303 -0
- webscout/Provider/STT/elevenlabs.py +264 -0
- webscout/Provider/Sambanova.py +317 -0
- webscout/Provider/TTI/README.md +69 -82
- webscout/Provider/TTI/__init__.py +37 -7
- webscout/Provider/TTI/base.py +147 -64
- webscout/Provider/TTI/claudeonline.py +393 -0
- webscout/Provider/TTI/magicstudio.py +292 -201
- webscout/Provider/TTI/miragic.py +180 -0
- webscout/Provider/TTI/pollinations.py +331 -221
- webscout/Provider/TTI/together.py +334 -0
- webscout/Provider/TTI/utils.py +14 -11
- webscout/Provider/TTS/README.md +186 -192
- webscout/Provider/TTS/__init__.py +43 -10
- webscout/Provider/TTS/base.py +523 -159
- webscout/Provider/TTS/deepgram.py +286 -156
- webscout/Provider/TTS/elevenlabs.py +189 -111
- webscout/Provider/TTS/freetts.py +218 -0
- webscout/Provider/TTS/murfai.py +288 -113
- webscout/Provider/TTS/openai_fm.py +364 -129
- webscout/Provider/TTS/parler.py +203 -111
- webscout/Provider/TTS/qwen.py +334 -0
- webscout/Provider/TTS/sherpa.py +286 -0
- webscout/Provider/TTS/speechma.py +693 -580
- webscout/Provider/TTS/streamElements.py +275 -333
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TextPollinationsAI.py +331 -308
- webscout/Provider/TogetherAI.py +450 -0
- webscout/Provider/TwoAI.py +309 -475
- webscout/Provider/TypliAI.py +311 -305
- webscout/Provider/UNFINISHED/ChatHub.py +219 -209
- webscout/Provider/{OPENAI/glider.py → UNFINISHED/ChutesAI.py} +331 -326
- webscout/Provider/{GizAI.py → UNFINISHED/GizAI.py} +300 -295
- webscout/Provider/{Marcus.py → UNFINISHED/Marcus.py} +218 -198
- webscout/Provider/UNFINISHED/Qodo.py +481 -0
- webscout/Provider/{MCPCore.py → UNFINISHED/XenAI.py} +330 -315
- webscout/Provider/UNFINISHED/Youchat.py +347 -330
- webscout/Provider/UNFINISHED/aihumanizer.py +41 -0
- webscout/Provider/UNFINISHED/grammerchecker.py +37 -0
- webscout/Provider/UNFINISHED/liner.py +342 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +246 -263
- webscout/Provider/{samurai.py → UNFINISHED/samurai.py} +231 -224
- webscout/Provider/WiseCat.py +256 -233
- webscout/Provider/WrDoChat.py +390 -370
- webscout/Provider/__init__.py +115 -174
- webscout/Provider/ai4chat.py +181 -174
- webscout/Provider/akashgpt.py +330 -335
- webscout/Provider/cerebras.py +397 -290
- webscout/Provider/cleeai.py +236 -213
- webscout/Provider/elmo.py +291 -283
- webscout/Provider/geminiapi.py +343 -208
- webscout/Provider/julius.py +245 -223
- webscout/Provider/learnfastai.py +333 -325
- webscout/Provider/llama3mitril.py +230 -215
- webscout/Provider/llmchat.py +308 -258
- webscout/Provider/llmchatco.py +321 -306
- webscout/Provider/meta.py +996 -801
- webscout/Provider/oivscode.py +332 -309
- webscout/Provider/searchchat.py +316 -292
- webscout/Provider/sonus.py +264 -258
- webscout/Provider/toolbaz.py +359 -353
- webscout/Provider/turboseek.py +332 -266
- webscout/Provider/typefully.py +262 -202
- webscout/Provider/x0gpt.py +332 -299
- webscout/__init__.py +31 -39
- webscout/__main__.py +5 -5
- webscout/cli.py +585 -524
- webscout/client.py +1497 -70
- webscout/conversation.py +140 -436
- webscout/exceptions.py +383 -362
- webscout/litagent/__init__.py +29 -29
- webscout/litagent/agent.py +492 -455
- webscout/litagent/constants.py +60 -60
- webscout/models.py +505 -181
- webscout/optimizers.py +74 -420
- webscout/prompt_manager.py +376 -288
- webscout/sanitize.py +1514 -0
- webscout/scout/README.md +452 -404
- webscout/scout/__init__.py +8 -8
- webscout/scout/core/__init__.py +7 -7
- webscout/scout/core/crawler.py +330 -210
- webscout/scout/core/scout.py +800 -607
- webscout/scout/core/search_result.py +51 -96
- webscout/scout/core/text_analyzer.py +64 -63
- webscout/scout/core/text_utils.py +412 -277
- webscout/scout/core/web_analyzer.py +54 -52
- webscout/scout/element.py +872 -478
- webscout/scout/parsers/__init__.py +70 -69
- webscout/scout/parsers/html5lib_parser.py +182 -172
- webscout/scout/parsers/html_parser.py +238 -236
- webscout/scout/parsers/lxml_parser.py +203 -178
- webscout/scout/utils.py +38 -37
- webscout/search/__init__.py +47 -0
- webscout/search/base.py +201 -0
- webscout/search/bing_main.py +45 -0
- webscout/search/brave_main.py +92 -0
- webscout/search/duckduckgo_main.py +57 -0
- webscout/search/engines/__init__.py +127 -0
- webscout/search/engines/bing/__init__.py +15 -0
- webscout/search/engines/bing/base.py +35 -0
- webscout/search/engines/bing/images.py +114 -0
- webscout/search/engines/bing/news.py +96 -0
- webscout/search/engines/bing/suggestions.py +36 -0
- webscout/search/engines/bing/text.py +109 -0
- webscout/search/engines/brave/__init__.py +19 -0
- webscout/search/engines/brave/base.py +47 -0
- webscout/search/engines/brave/images.py +213 -0
- webscout/search/engines/brave/news.py +353 -0
- webscout/search/engines/brave/suggestions.py +318 -0
- webscout/search/engines/brave/text.py +167 -0
- webscout/search/engines/brave/videos.py +364 -0
- webscout/search/engines/duckduckgo/__init__.py +25 -0
- webscout/search/engines/duckduckgo/answers.py +80 -0
- webscout/search/engines/duckduckgo/base.py +189 -0
- webscout/search/engines/duckduckgo/images.py +100 -0
- webscout/search/engines/duckduckgo/maps.py +183 -0
- webscout/search/engines/duckduckgo/news.py +70 -0
- webscout/search/engines/duckduckgo/suggestions.py +22 -0
- webscout/search/engines/duckduckgo/text.py +221 -0
- webscout/search/engines/duckduckgo/translate.py +48 -0
- webscout/search/engines/duckduckgo/videos.py +80 -0
- webscout/search/engines/duckduckgo/weather.py +84 -0
- webscout/search/engines/mojeek.py +61 -0
- webscout/search/engines/wikipedia.py +77 -0
- webscout/search/engines/yahoo/__init__.py +41 -0
- webscout/search/engines/yahoo/answers.py +19 -0
- webscout/search/engines/yahoo/base.py +34 -0
- webscout/search/engines/yahoo/images.py +323 -0
- webscout/search/engines/yahoo/maps.py +19 -0
- webscout/search/engines/yahoo/news.py +258 -0
- webscout/search/engines/yahoo/suggestions.py +140 -0
- webscout/search/engines/yahoo/text.py +273 -0
- webscout/search/engines/yahoo/translate.py +19 -0
- webscout/search/engines/yahoo/videos.py +302 -0
- webscout/search/engines/yahoo/weather.py +220 -0
- webscout/search/engines/yandex.py +67 -0
- webscout/search/engines/yep/__init__.py +13 -0
- webscout/search/engines/yep/base.py +34 -0
- webscout/search/engines/yep/images.py +101 -0
- webscout/search/engines/yep/suggestions.py +38 -0
- webscout/search/engines/yep/text.py +99 -0
- webscout/search/http_client.py +172 -0
- webscout/search/results.py +141 -0
- webscout/search/yahoo_main.py +57 -0
- webscout/search/yep_main.py +48 -0
- webscout/server/__init__.py +48 -0
- webscout/server/config.py +78 -0
- webscout/server/exceptions.py +69 -0
- webscout/server/providers.py +286 -0
- webscout/server/request_models.py +131 -0
- webscout/server/request_processing.py +404 -0
- webscout/server/routes.py +642 -0
- webscout/server/server.py +351 -0
- webscout/server/ui_templates.py +1171 -0
- webscout/swiftcli/__init__.py +79 -95
- webscout/swiftcli/core/__init__.py +7 -7
- webscout/swiftcli/core/cli.py +574 -297
- webscout/swiftcli/core/context.py +98 -104
- webscout/swiftcli/core/group.py +268 -241
- webscout/swiftcli/decorators/__init__.py +28 -28
- webscout/swiftcli/decorators/command.py +243 -221
- webscout/swiftcli/decorators/options.py +247 -220
- webscout/swiftcli/decorators/output.py +392 -252
- webscout/swiftcli/exceptions.py +21 -21
- webscout/swiftcli/plugins/__init__.py +9 -9
- webscout/swiftcli/plugins/base.py +134 -135
- webscout/swiftcli/plugins/manager.py +269 -269
- webscout/swiftcli/utils/__init__.py +58 -59
- webscout/swiftcli/utils/formatting.py +251 -252
- webscout/swiftcli/utils/parsing.py +368 -267
- webscout/update_checker.py +280 -136
- webscout/utils.py +28 -14
- webscout/version.py +2 -1
- webscout/version.py.bak +3 -0
- webscout/zeroart/__init__.py +218 -135
- webscout/zeroart/base.py +70 -66
- webscout/zeroart/effects.py +155 -101
- webscout/zeroart/fonts.py +1799 -1239
- webscout-2026.1.19.dist-info/METADATA +638 -0
- webscout-2026.1.19.dist-info/RECORD +312 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/WHEEL +1 -1
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/entry_points.txt +1 -1
- webscout/DWEBS.py +0 -520
- webscout/Extra/Act.md +0 -309
- webscout/Extra/GitToolkit/gitapi/README.md +0 -110
- webscout/Extra/autocoder/__init__.py +0 -9
- webscout/Extra/autocoder/autocoder.py +0 -1105
- webscout/Extra/autocoder/autocoder_utiles.py +0 -332
- webscout/Extra/gguf.md +0 -430
- webscout/Extra/weather.md +0 -281
- webscout/Litlogger/README.md +0 -10
- webscout/Litlogger/__init__.py +0 -15
- webscout/Litlogger/formats.py +0 -4
- webscout/Litlogger/handlers.py +0 -103
- webscout/Litlogger/levels.py +0 -13
- webscout/Litlogger/logger.py +0 -92
- webscout/Provider/AI21.py +0 -177
- webscout/Provider/AISEARCH/DeepFind.py +0 -254
- webscout/Provider/AISEARCH/felo_search.py +0 -202
- webscout/Provider/AISEARCH/genspark_search.py +0 -324
- webscout/Provider/AISEARCH/hika_search.py +0 -186
- webscout/Provider/AISEARCH/scira_search.py +0 -298
- webscout/Provider/Aitopia.py +0 -316
- webscout/Provider/AllenAI.py +0 -440
- webscout/Provider/Blackboxai.py +0 -791
- webscout/Provider/ChatGPTClone.py +0 -237
- webscout/Provider/ChatGPTGratis.py +0 -194
- webscout/Provider/Cloudflare.py +0 -324
- webscout/Provider/ExaChat.py +0 -358
- webscout/Provider/Flowith.py +0 -217
- webscout/Provider/FreeGemini.py +0 -250
- webscout/Provider/Glider.py +0 -225
- webscout/Provider/HF_space/__init__.py +0 -0
- webscout/Provider/HF_space/qwen_qwen2.py +0 -206
- webscout/Provider/HuggingFaceChat.py +0 -469
- webscout/Provider/Hunyuan.py +0 -283
- webscout/Provider/LambdaChat.py +0 -411
- webscout/Provider/Llama3.py +0 -259
- webscout/Provider/Nemotron.py +0 -218
- webscout/Provider/OLLAMA.py +0 -396
- webscout/Provider/OPENAI/BLACKBOXAI.py +0 -766
- webscout/Provider/OPENAI/Cloudflare.py +0 -378
- webscout/Provider/OPENAI/FreeGemini.py +0 -283
- webscout/Provider/OPENAI/NEMOTRON.py +0 -232
- webscout/Provider/OPENAI/Qwen3.py +0 -283
- webscout/Provider/OPENAI/api.py +0 -969
- webscout/Provider/OPENAI/c4ai.py +0 -373
- webscout/Provider/OPENAI/chatgptclone.py +0 -494
- webscout/Provider/OPENAI/copilot.py +0 -242
- webscout/Provider/OPENAI/flowith.py +0 -162
- webscout/Provider/OPENAI/freeaichat.py +0 -359
- webscout/Provider/OPENAI/mcpcore.py +0 -389
- webscout/Provider/OPENAI/multichat.py +0 -376
- webscout/Provider/OPENAI/opkfc.py +0 -496
- webscout/Provider/OPENAI/scirachat.py +0 -477
- webscout/Provider/OPENAI/standardinput.py +0 -433
- webscout/Provider/OPENAI/typegpt.py +0 -364
- webscout/Provider/OPENAI/uncovrAI.py +0 -463
- webscout/Provider/OPENAI/venice.py +0 -431
- webscout/Provider/OPENAI/yep.py +0 -382
- webscout/Provider/OpenGPT.py +0 -209
- webscout/Provider/Perplexitylabs.py +0 -415
- webscout/Provider/Reka.py +0 -214
- webscout/Provider/StandardInput.py +0 -290
- webscout/Provider/TTI/aiarta.py +0 -365
- webscout/Provider/TTI/artbit.py +0 -0
- webscout/Provider/TTI/fastflux.py +0 -200
- webscout/Provider/TTI/piclumen.py +0 -203
- webscout/Provider/TTI/pixelmuse.py +0 -225
- webscout/Provider/TTS/gesserit.py +0 -128
- webscout/Provider/TTS/sthir.py +0 -94
- webscout/Provider/TeachAnything.py +0 -229
- webscout/Provider/UNFINISHED/puterjs.py +0 -635
- webscout/Provider/UNFINISHED/test_lmarena.py +0 -119
- webscout/Provider/Venice.py +0 -258
- webscout/Provider/VercelAI.py +0 -253
- webscout/Provider/Writecream.py +0 -246
- webscout/Provider/WritingMate.py +0 -269
- webscout/Provider/asksteve.py +0 -220
- webscout/Provider/chatglm.py +0 -215
- webscout/Provider/copilot.py +0 -425
- webscout/Provider/freeaichat.py +0 -285
- webscout/Provider/granite.py +0 -235
- webscout/Provider/hermes.py +0 -266
- webscout/Provider/koala.py +0 -170
- webscout/Provider/lmarena.py +0 -198
- webscout/Provider/multichat.py +0 -364
- webscout/Provider/scira_chat.py +0 -299
- webscout/Provider/scnet.py +0 -243
- webscout/Provider/talkai.py +0 -194
- webscout/Provider/typegpt.py +0 -289
- webscout/Provider/uncovr.py +0 -368
- webscout/Provider/yep.py +0 -389
- webscout/litagent/Readme.md +0 -276
- webscout/litprinter/__init__.py +0 -59
- webscout/swiftcli/Readme.md +0 -323
- webscout/tempid.py +0 -128
- webscout/webscout_search.py +0 -1184
- webscout/webscout_search_async.py +0 -654
- webscout/yep_search.py +0 -347
- webscout/zeroart/README.md +0 -89
- webscout-8.2.9.dist-info/METADATA +0 -1033
- webscout-8.2.9.dist-info/RECORD +0 -289
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/top_level.txt +0 -0
webscout/DWEBS.py
DELETED
|
@@ -1,520 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
DWEBS - A Google search library with advanced features
|
|
3
|
-
"""
|
|
4
|
-
import random
|
|
5
|
-
from time import sleep
|
|
6
|
-
from webscout.scout import Scout
|
|
7
|
-
from curl_cffi.requests import Session
|
|
8
|
-
from urllib.parse import unquote, urlencode
|
|
9
|
-
from typing import List, Dict, Optional, Union, Iterator, Any
|
|
10
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class SearchResult:
|
|
14
|
-
"""Class to represent a search result with metadata."""
|
|
15
|
-
|
|
16
|
-
def __init__(self, url: str, title: str, description: str):
|
|
17
|
-
"""
|
|
18
|
-
Initialize a search result.
|
|
19
|
-
|
|
20
|
-
Args:
|
|
21
|
-
url: The URL of the search result
|
|
22
|
-
title: The title of the search result
|
|
23
|
-
description: The description/snippet of the search result
|
|
24
|
-
"""
|
|
25
|
-
self.url = url
|
|
26
|
-
self.title = title
|
|
27
|
-
self.description = description
|
|
28
|
-
# Additional metadata that can be populated
|
|
29
|
-
self.metadata: Dict[str, Any] = {}
|
|
30
|
-
|
|
31
|
-
def __repr__(self) -> str:
|
|
32
|
-
"""Return string representation of search result."""
|
|
33
|
-
return f"SearchResult(url={self.url}, title={self.title}, description={self.description})"
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
class GoogleSearch:
|
|
37
|
-
"""Google search implementation with configurable parameters and advanced features."""
|
|
38
|
-
|
|
39
|
-
_executor: ThreadPoolExecutor = ThreadPoolExecutor()
|
|
40
|
-
|
|
41
|
-
def __init__(
|
|
42
|
-
self,
|
|
43
|
-
timeout: int = 10,
|
|
44
|
-
proxies: Optional[Dict[str, str]] = None,
|
|
45
|
-
verify: bool = True,
|
|
46
|
-
lang: str = "en",
|
|
47
|
-
sleep_interval: float = 0.0,
|
|
48
|
-
impersonate: str = "chrome110"
|
|
49
|
-
):
|
|
50
|
-
"""
|
|
51
|
-
Initialize GoogleSearch with custom settings.
|
|
52
|
-
|
|
53
|
-
Args:
|
|
54
|
-
timeout: Request timeout in seconds
|
|
55
|
-
proxies: Proxy configuration for requests
|
|
56
|
-
verify: Whether to verify SSL certificates
|
|
57
|
-
lang: Search language
|
|
58
|
-
sleep_interval: Sleep time between pagination requests
|
|
59
|
-
impersonate: Browser profile for curl_cffi. Defaults to "chrome110".
|
|
60
|
-
"""
|
|
61
|
-
self.timeout = timeout # Keep timeout for potential non-session uses or reference
|
|
62
|
-
self.proxies = proxies if proxies else {}
|
|
63
|
-
self.verify = verify
|
|
64
|
-
self.lang = lang
|
|
65
|
-
self.sleep_interval = sleep_interval
|
|
66
|
-
self.base_url = "https://www.google.com/search"
|
|
67
|
-
# Initialize curl_cffi session
|
|
68
|
-
self.session = Session(
|
|
69
|
-
proxies=self.proxies,
|
|
70
|
-
verify=self.verify,
|
|
71
|
-
timeout=self.timeout,
|
|
72
|
-
impersonate=impersonate
|
|
73
|
-
)
|
|
74
|
-
# Set common headers for the session
|
|
75
|
-
self.session.headers = {
|
|
76
|
-
"User-Agent": self._get_useragent(),
|
|
77
|
-
"Accept-Language": self.lang,
|
|
78
|
-
"Accept-Encoding": "gzip, deflate, br",
|
|
79
|
-
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
|
80
|
-
}
|
|
81
|
-
# Set default cookies for the session
|
|
82
|
-
self.session.cookies.update({
|
|
83
|
-
'CONSENT': 'PENDING+987',
|
|
84
|
-
'SOCS': 'CAESHAgBEhIaAB',
|
|
85
|
-
})
|
|
86
|
-
|
|
87
|
-
def _get_useragent(self) -> str:
|
|
88
|
-
"""
|
|
89
|
-
Generate a random user agent string.
|
|
90
|
-
|
|
91
|
-
Returns:
|
|
92
|
-
Random user agent string
|
|
93
|
-
"""
|
|
94
|
-
lynx_version = f"Lynx/{random.randint(2, 3)}.{random.randint(8, 9)}.{random.randint(0, 2)}"
|
|
95
|
-
libwww_version = f"libwww-FM/{random.randint(2, 3)}.{random.randint(13, 15)}"
|
|
96
|
-
ssl_mm_version = f"SSL-MM/{random.randint(1, 2)}.{random.randint(3, 5)}"
|
|
97
|
-
openssl_version = f"OpenSSL/{random.randint(1, 3)}.{random.randint(0, 4)}.{random.randint(0, 9)}"
|
|
98
|
-
return f"{lynx_version} {libwww_version} {ssl_mm_version} {openssl_version}"
|
|
99
|
-
|
|
100
|
-
def _make_request(self, term: str, results: int, start: int = 0, search_type: str = None) -> str:
|
|
101
|
-
"""
|
|
102
|
-
Make a request to Google search.
|
|
103
|
-
|
|
104
|
-
Args:
|
|
105
|
-
term: Search query
|
|
106
|
-
results: Number of results to request
|
|
107
|
-
start: Start position for pagination
|
|
108
|
-
search_type: Type of search ('', 'nws', 'isch')
|
|
109
|
-
|
|
110
|
-
Returns:
|
|
111
|
-
HTML response content
|
|
112
|
-
"""
|
|
113
|
-
params = {
|
|
114
|
-
"q": term,
|
|
115
|
-
"num": results + 2, # Request slightly more than needed
|
|
116
|
-
"hl": self.lang,
|
|
117
|
-
"start": start,
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
# Add search type if specified
|
|
121
|
-
if search_type:
|
|
122
|
-
params["tbm"] = search_type
|
|
123
|
-
|
|
124
|
-
try:
|
|
125
|
-
# Use the curl_cffi session
|
|
126
|
-
resp = self.session.get(
|
|
127
|
-
url=self.base_url,
|
|
128
|
-
params=params,
|
|
129
|
-
# Headers and cookies are now part of the session
|
|
130
|
-
# proxies, timeout, verify are handled by the session
|
|
131
|
-
)
|
|
132
|
-
resp.raise_for_status()
|
|
133
|
-
return resp.text
|
|
134
|
-
except Exception as e:
|
|
135
|
-
# Provide more specific error context if possible
|
|
136
|
-
if hasattr(e, 'response') and e.response is not None:
|
|
137
|
-
raise RuntimeError(f"Search request failed with status {e.response.status_code}: {str(e)}")
|
|
138
|
-
else:
|
|
139
|
-
raise RuntimeError(f"Search request failed: {str(e)}")
|
|
140
|
-
|
|
141
|
-
def _extract_url(self, raw_link: str) -> Optional[str]:
|
|
142
|
-
"""
|
|
143
|
-
Extract actual URL from Google redirect URL.
|
|
144
|
-
|
|
145
|
-
Args:
|
|
146
|
-
raw_link: Raw link from Google search
|
|
147
|
-
|
|
148
|
-
Returns:
|
|
149
|
-
Actual URL or None if invalid
|
|
150
|
-
"""
|
|
151
|
-
if not raw_link:
|
|
152
|
-
return None
|
|
153
|
-
|
|
154
|
-
if raw_link.startswith("/url?"):
|
|
155
|
-
try:
|
|
156
|
-
link = unquote(raw_link.split("&")[0].replace("/url?q=", ""))
|
|
157
|
-
return link
|
|
158
|
-
except Exception:
|
|
159
|
-
return None
|
|
160
|
-
elif raw_link.startswith("http"):
|
|
161
|
-
return unquote(raw_link)
|
|
162
|
-
|
|
163
|
-
return None
|
|
164
|
-
|
|
165
|
-
def _is_valid_result(self, link: str, fetched_links: set, unique: bool) -> bool:
|
|
166
|
-
"""
|
|
167
|
-
Check if search result is valid.
|
|
168
|
-
|
|
169
|
-
Args:
|
|
170
|
-
link: URL to check
|
|
171
|
-
fetched_links: Set of already fetched links
|
|
172
|
-
unique: Whether to filter duplicate links
|
|
173
|
-
|
|
174
|
-
Returns:
|
|
175
|
-
Boolean indicating if result is valid
|
|
176
|
-
"""
|
|
177
|
-
if any(x in link for x in ["google.", "/search?", "webcache."]):
|
|
178
|
-
return False
|
|
179
|
-
|
|
180
|
-
if link in fetched_links and unique:
|
|
181
|
-
return False
|
|
182
|
-
|
|
183
|
-
return True
|
|
184
|
-
|
|
185
|
-
def _parse_search_results(
|
|
186
|
-
self,
|
|
187
|
-
html: str,
|
|
188
|
-
num_results: int,
|
|
189
|
-
fetched_links: set,
|
|
190
|
-
unique: bool
|
|
191
|
-
) -> List[SearchResult]:
|
|
192
|
-
"""
|
|
193
|
-
Parse search results from HTML.
|
|
194
|
-
|
|
195
|
-
Args:
|
|
196
|
-
html: HTML content to parse
|
|
197
|
-
num_results: Maximum number of results to return
|
|
198
|
-
fetched_links: Set of already fetched links
|
|
199
|
-
unique: Filter duplicate links
|
|
200
|
-
|
|
201
|
-
Returns:
|
|
202
|
-
List of SearchResult objects
|
|
203
|
-
"""
|
|
204
|
-
results = []
|
|
205
|
-
soup = Scout(html, features="html.parser")
|
|
206
|
-
result_blocks = soup.find_all("div", class_="ezO2md")
|
|
207
|
-
|
|
208
|
-
if not result_blocks:
|
|
209
|
-
# Try alternative class patterns if the main one doesn't match
|
|
210
|
-
result_blocks = soup.find_all("div", attrs={"class": lambda c: c and "g" in c.split()})
|
|
211
|
-
|
|
212
|
-
for result in result_blocks:
|
|
213
|
-
# Find the link - looking for various potential Google result classes
|
|
214
|
-
link_tag = result.find("a", class_=["fuLhoc", "ZWRArf"])
|
|
215
|
-
if not link_tag:
|
|
216
|
-
link_tag = result.find("a")
|
|
217
|
-
if not link_tag:
|
|
218
|
-
continue
|
|
219
|
-
|
|
220
|
-
raw_link = link_tag.get("href", "")
|
|
221
|
-
link = self._extract_url(raw_link)
|
|
222
|
-
|
|
223
|
-
if not link:
|
|
224
|
-
continue
|
|
225
|
-
|
|
226
|
-
if not self._is_valid_result(link, fetched_links, unique):
|
|
227
|
-
continue
|
|
228
|
-
|
|
229
|
-
# Get title - it's the text content of the link tag for these results
|
|
230
|
-
title = link_tag.get_text(strip=True)
|
|
231
|
-
if not title:
|
|
232
|
-
continue
|
|
233
|
-
|
|
234
|
-
# Get description - it's in a span with class FrIlee or potentially other classes
|
|
235
|
-
description_tag = result.find("span", class_="FrIlee")
|
|
236
|
-
if not description_tag:
|
|
237
|
-
description_tag = result.find(["div", "span"], class_=lambda c: c and any(x in c for x in ["snippet", "description", "VwiC3b"]))
|
|
238
|
-
|
|
239
|
-
description = description_tag.get_text(strip=True) if description_tag else ""
|
|
240
|
-
|
|
241
|
-
# Create result object
|
|
242
|
-
search_result = SearchResult(link, title, description)
|
|
243
|
-
|
|
244
|
-
# Add extra metadata if available
|
|
245
|
-
citation = result.find("cite")
|
|
246
|
-
if citation:
|
|
247
|
-
search_result.metadata["source"] = citation.get_text(strip=True)
|
|
248
|
-
|
|
249
|
-
timestamp = result.find("span", class_=lambda c: c and "ZE5qJf" in c)
|
|
250
|
-
if timestamp:
|
|
251
|
-
search_result.metadata["date"] = timestamp.get_text(strip=True)
|
|
252
|
-
|
|
253
|
-
fetched_links.add(link)
|
|
254
|
-
results.append(search_result)
|
|
255
|
-
|
|
256
|
-
if len(results) >= num_results:
|
|
257
|
-
break
|
|
258
|
-
|
|
259
|
-
return results
|
|
260
|
-
|
|
261
|
-
def text(
|
|
262
|
-
self,
|
|
263
|
-
keywords: str,
|
|
264
|
-
region: str = None,
|
|
265
|
-
safesearch: str = "moderate",
|
|
266
|
-
max_results: int = 10,
|
|
267
|
-
start_num: int = 0,
|
|
268
|
-
unique: bool = True
|
|
269
|
-
) -> List[SearchResult]:
|
|
270
|
-
"""
|
|
271
|
-
Search Google for web results.
|
|
272
|
-
|
|
273
|
-
Args:
|
|
274
|
-
keywords: Search query
|
|
275
|
-
region: Region for search results (ISO country code)
|
|
276
|
-
safesearch: SafeSearch setting ("on", "moderate", "off")
|
|
277
|
-
max_results: Maximum number of results to return
|
|
278
|
-
start_num: Starting position for pagination
|
|
279
|
-
unique: Filter duplicate results
|
|
280
|
-
|
|
281
|
-
Returns:
|
|
282
|
-
List of SearchResult objects with search results
|
|
283
|
-
"""
|
|
284
|
-
if not keywords:
|
|
285
|
-
raise ValueError("Search keywords cannot be empty")
|
|
286
|
-
|
|
287
|
-
# Map safesearch values to Google's safe parameter
|
|
288
|
-
safe_map = {
|
|
289
|
-
"on": "active",
|
|
290
|
-
"moderate": "moderate",
|
|
291
|
-
"off": "off"
|
|
292
|
-
}
|
|
293
|
-
safe = safe_map.get(safesearch.lower(), "moderate")
|
|
294
|
-
|
|
295
|
-
# Keep track of unique results
|
|
296
|
-
fetched_results = []
|
|
297
|
-
fetched_links = set()
|
|
298
|
-
start = start_num
|
|
299
|
-
|
|
300
|
-
while len(fetched_results) < max_results:
|
|
301
|
-
# Add safe search parameter to the request
|
|
302
|
-
# Note: This modifies the session params for this specific request type
|
|
303
|
-
# It might be better to pass params directly to session.get if mixing search types
|
|
304
|
-
term_with_safe = f"{keywords} safe:{safe}"
|
|
305
|
-
if region and region.lower() != "all":
|
|
306
|
-
term_with_safe += f" location:{region}" # Example of adding region, adjust as needed
|
|
307
|
-
|
|
308
|
-
response_html = self._make_request(
|
|
309
|
-
term=term_with_safe, # Pass term with safe search
|
|
310
|
-
results=max_results - len(fetched_results),
|
|
311
|
-
start=start
|
|
312
|
-
)
|
|
313
|
-
|
|
314
|
-
results = self._parse_search_results(
|
|
315
|
-
html=response_html,
|
|
316
|
-
num_results=max_results - len(fetched_results),
|
|
317
|
-
fetched_links=fetched_links,
|
|
318
|
-
unique=unique
|
|
319
|
-
)
|
|
320
|
-
|
|
321
|
-
if not results:
|
|
322
|
-
break
|
|
323
|
-
|
|
324
|
-
fetched_results.extend(results)
|
|
325
|
-
|
|
326
|
-
if len(fetched_results) >= max_results:
|
|
327
|
-
break
|
|
328
|
-
|
|
329
|
-
start += 10 # Google typically uses increments of 10
|
|
330
|
-
sleep(self.sleep_interval)
|
|
331
|
-
|
|
332
|
-
return fetched_results[:max_results]
|
|
333
|
-
|
|
334
|
-
def news(
|
|
335
|
-
self,
|
|
336
|
-
keywords: str,
|
|
337
|
-
region: str = None,
|
|
338
|
-
safesearch: str = "moderate",
|
|
339
|
-
max_results: int = 10
|
|
340
|
-
) -> List[SearchResult]:
|
|
341
|
-
"""
|
|
342
|
-
Search Google News for news results.
|
|
343
|
-
|
|
344
|
-
Args:
|
|
345
|
-
keywords: Search query
|
|
346
|
-
region: Region for search results (ISO country code)
|
|
347
|
-
safesearch: SafeSearch setting ("on", "moderate", "off")
|
|
348
|
-
max_results: Maximum number of results to return
|
|
349
|
-
|
|
350
|
-
Returns:
|
|
351
|
-
List of SearchResult objects with news results
|
|
352
|
-
"""
|
|
353
|
-
if not keywords:
|
|
354
|
-
raise ValueError("Search keywords cannot be empty")
|
|
355
|
-
|
|
356
|
-
# Map safesearch values to Google's safe parameter
|
|
357
|
-
safe_map = {
|
|
358
|
-
"on": "active",
|
|
359
|
-
"moderate": "moderate",
|
|
360
|
-
"off": "off"
|
|
361
|
-
}
|
|
362
|
-
safe = safe_map.get(safesearch.lower(), "moderate")
|
|
363
|
-
|
|
364
|
-
# Keep track of unique results
|
|
365
|
-
fetched_links = set()
|
|
366
|
-
|
|
367
|
-
# Add safe search parameter
|
|
368
|
-
term_with_safe = f"{keywords} safe:{safe}"
|
|
369
|
-
if region and region.lower() != "all":
|
|
370
|
-
term_with_safe += f" location:{region}" # Example
|
|
371
|
-
|
|
372
|
-
response_html = self._make_request(
|
|
373
|
-
term=term_with_safe, # Pass term with safe search
|
|
374
|
-
results=max_results,
|
|
375
|
-
search_type="nws"
|
|
376
|
-
)
|
|
377
|
-
|
|
378
|
-
results = self._parse_search_results(
|
|
379
|
-
html=response_html,
|
|
380
|
-
num_results=max_results,
|
|
381
|
-
fetched_links=fetched_links,
|
|
382
|
-
unique=True # News results are generally unique per request
|
|
383
|
-
)
|
|
384
|
-
|
|
385
|
-
return results[:max_results]
|
|
386
|
-
|
|
387
|
-
def suggestions(self, query: str, region: str = None) -> List[str]:
|
|
388
|
-
"""
|
|
389
|
-
Get search suggestions for a query term.
|
|
390
|
-
|
|
391
|
-
Args:
|
|
392
|
-
query: Search query
|
|
393
|
-
region: Region for suggestions (ISO country code)
|
|
394
|
-
|
|
395
|
-
Returns:
|
|
396
|
-
List of search suggestions
|
|
397
|
-
"""
|
|
398
|
-
if not query:
|
|
399
|
-
raise ValueError("Search query cannot be empty")
|
|
400
|
-
|
|
401
|
-
try:
|
|
402
|
-
params = {
|
|
403
|
-
"client": "firefox",
|
|
404
|
-
"q": query,
|
|
405
|
-
}
|
|
406
|
-
|
|
407
|
-
# Add region if specified
|
|
408
|
-
if region and region.lower() != "all":
|
|
409
|
-
params["gl"] = region
|
|
410
|
-
|
|
411
|
-
url = f"https://www.google.com/complete/search?{urlencode(params)}"
|
|
412
|
-
|
|
413
|
-
# Use a simpler header set for the suggestions API
|
|
414
|
-
headers = {
|
|
415
|
-
"User-Agent": self._get_useragent(),
|
|
416
|
-
"Accept": "application/json, text/javascript, */*",
|
|
417
|
-
"Accept-Language": self.lang,
|
|
418
|
-
}
|
|
419
|
-
|
|
420
|
-
# Use session.get but override headers for this specific request
|
|
421
|
-
response = self.session.get(
|
|
422
|
-
url=url,
|
|
423
|
-
headers=headers,
|
|
424
|
-
params=params # Pass params directly
|
|
425
|
-
# timeout and verify are handled by session
|
|
426
|
-
)
|
|
427
|
-
response.raise_for_status()
|
|
428
|
-
|
|
429
|
-
# Response format is typically: ["original query", ["suggestion1", "suggestion2", ...]]
|
|
430
|
-
data = response.json()
|
|
431
|
-
if isinstance(data, list) and len(data) > 1 and isinstance(data[1], list):
|
|
432
|
-
return data[1]
|
|
433
|
-
return []
|
|
434
|
-
|
|
435
|
-
except Exception as e:
|
|
436
|
-
# Provide more specific error context if possible
|
|
437
|
-
if hasattr(e, 'response') and e.response is not None:
|
|
438
|
-
# Log error or handle differently if needed
|
|
439
|
-
print(f"Suggestions request failed with status {e.response.status_code}: {str(e)}")
|
|
440
|
-
else:
|
|
441
|
-
print(f"Suggestions request failed: {str(e)}")
|
|
442
|
-
# Return empty list on error instead of raising exception
|
|
443
|
-
return []
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
# Legacy function support for backward compatibility
|
|
447
|
-
def search(term, num_results=10, lang="en", proxy=None, advanced=False, sleep_interval=0, timeout=5, safe="active", ssl_verify=True, region=None, start_num=0, unique=False, impersonate="chrome110"): # Added impersonate
|
|
448
|
-
"""Legacy function for backward compatibility."""
|
|
449
|
-
google_search = GoogleSearch(
|
|
450
|
-
timeout=timeout,
|
|
451
|
-
proxies={"https": proxy, "http": proxy} if proxy else None,
|
|
452
|
-
verify=ssl_verify,
|
|
453
|
-
lang=lang,
|
|
454
|
-
sleep_interval=sleep_interval,
|
|
455
|
-
impersonate=impersonate # Pass impersonate
|
|
456
|
-
)
|
|
457
|
-
|
|
458
|
-
# Map legacy safe values
|
|
459
|
-
safe_search_map = {
|
|
460
|
-
"active": "on",
|
|
461
|
-
"moderate": "moderate",
|
|
462
|
-
"off": "off"
|
|
463
|
-
}
|
|
464
|
-
safesearch_val = safe_search_map.get(safe, "moderate")
|
|
465
|
-
|
|
466
|
-
results = google_search.text(
|
|
467
|
-
keywords=term,
|
|
468
|
-
region=region,
|
|
469
|
-
safesearch=safesearch_val,
|
|
470
|
-
max_results=num_results,
|
|
471
|
-
start_num=start_num,
|
|
472
|
-
unique=unique
|
|
473
|
-
)
|
|
474
|
-
|
|
475
|
-
# Convert to simple URLs if not advanced mode
|
|
476
|
-
if not advanced:
|
|
477
|
-
return [result.url for result in results]
|
|
478
|
-
return results
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
if __name__ == "__main__":
|
|
482
|
-
from rich import print
|
|
483
|
-
google = GoogleSearch(
|
|
484
|
-
timeout=10, # Optional: Set custom timeout
|
|
485
|
-
proxies=None, # Optional: Use proxies
|
|
486
|
-
verify=True # Optional: SSL verification
|
|
487
|
-
)
|
|
488
|
-
|
|
489
|
-
# Text Search
|
|
490
|
-
print("TEXT SEARCH RESULTS:")
|
|
491
|
-
text_results = google.text(
|
|
492
|
-
keywords="Python programming",
|
|
493
|
-
region="us", # Optional: Region for results
|
|
494
|
-
safesearch="moderate", # Optional: "on", "moderate", "off"
|
|
495
|
-
max_results=3 # Optional: Limit number of results
|
|
496
|
-
)
|
|
497
|
-
for result in text_results:
|
|
498
|
-
print(f"Title: {result.title}")
|
|
499
|
-
print(f"URL: {result.url}")
|
|
500
|
-
print(f"Description: {result.description}")
|
|
501
|
-
print("---")
|
|
502
|
-
|
|
503
|
-
# News Search
|
|
504
|
-
print("\nNEWS SEARCH RESULTS:")
|
|
505
|
-
news_results = google.news(
|
|
506
|
-
keywords="artificial intelligence",
|
|
507
|
-
region="us",
|
|
508
|
-
safesearch="moderate",
|
|
509
|
-
max_results=2
|
|
510
|
-
)
|
|
511
|
-
for result in news_results:
|
|
512
|
-
print(f"Title: {result.title}")
|
|
513
|
-
print(f"URL: {result.url}")
|
|
514
|
-
print(f"Description: {result.description}")
|
|
515
|
-
print("---")
|
|
516
|
-
|
|
517
|
-
# Search Suggestions
|
|
518
|
-
print("\nSEARCH SUGGESTIONS:")
|
|
519
|
-
suggestions = google.suggestions("how to")
|
|
520
|
-
print(suggestions)
|