webscout 8.2.2__py3-none-any.whl → 2026.1.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (483) hide show
  1. webscout/AIauto.py +524 -143
  2. webscout/AIbase.py +247 -123
  3. webscout/AIutel.py +68 -132
  4. webscout/Bard.py +1072 -535
  5. webscout/Extra/GitToolkit/__init__.py +2 -2
  6. webscout/Extra/GitToolkit/gitapi/__init__.py +20 -12
  7. webscout/Extra/GitToolkit/gitapi/gist.py +142 -0
  8. webscout/Extra/GitToolkit/gitapi/organization.py +91 -0
  9. webscout/Extra/GitToolkit/gitapi/repository.py +308 -195
  10. webscout/Extra/GitToolkit/gitapi/search.py +162 -0
  11. webscout/Extra/GitToolkit/gitapi/trending.py +236 -0
  12. webscout/Extra/GitToolkit/gitapi/user.py +128 -96
  13. webscout/Extra/GitToolkit/gitapi/utils.py +82 -62
  14. webscout/Extra/YTToolkit/README.md +443 -0
  15. webscout/Extra/YTToolkit/YTdownloader.py +953 -957
  16. webscout/Extra/YTToolkit/__init__.py +3 -3
  17. webscout/Extra/YTToolkit/transcriber.py +595 -476
  18. webscout/Extra/YTToolkit/ytapi/README.md +230 -0
  19. webscout/Extra/YTToolkit/ytapi/__init__.py +22 -6
  20. webscout/Extra/YTToolkit/ytapi/captions.py +190 -0
  21. webscout/Extra/YTToolkit/ytapi/channel.py +302 -307
  22. webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
  23. webscout/Extra/YTToolkit/ytapi/extras.py +178 -45
  24. webscout/Extra/YTToolkit/ytapi/hashtag.py +120 -0
  25. webscout/Extra/YTToolkit/ytapi/https.py +89 -88
  26. webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
  27. webscout/Extra/YTToolkit/ytapi/playlist.py +59 -59
  28. webscout/Extra/YTToolkit/ytapi/pool.py +8 -8
  29. webscout/Extra/YTToolkit/ytapi/query.py +143 -40
  30. webscout/Extra/YTToolkit/ytapi/shorts.py +122 -0
  31. webscout/Extra/YTToolkit/ytapi/stream.py +68 -63
  32. webscout/Extra/YTToolkit/ytapi/suggestions.py +97 -0
  33. webscout/Extra/YTToolkit/ytapi/utils.py +66 -62
  34. webscout/Extra/YTToolkit/ytapi/video.py +189 -18
  35. webscout/Extra/__init__.py +2 -3
  36. webscout/Extra/gguf.py +1298 -682
  37. webscout/Extra/tempmail/README.md +488 -0
  38. webscout/Extra/tempmail/__init__.py +28 -28
  39. webscout/Extra/tempmail/async_utils.py +143 -141
  40. webscout/Extra/tempmail/base.py +172 -161
  41. webscout/Extra/tempmail/cli.py +191 -187
  42. webscout/Extra/tempmail/emailnator.py +88 -84
  43. webscout/Extra/tempmail/mail_tm.py +378 -361
  44. webscout/Extra/tempmail/temp_mail_io.py +304 -292
  45. webscout/Extra/weather.py +196 -194
  46. webscout/Extra/weather_ascii.py +17 -15
  47. webscout/Provider/AISEARCH/PERPLEXED_search.py +175 -0
  48. webscout/Provider/AISEARCH/Perplexity.py +237 -304
  49. webscout/Provider/AISEARCH/README.md +106 -0
  50. webscout/Provider/AISEARCH/__init__.py +16 -10
  51. webscout/Provider/AISEARCH/brave_search.py +298 -0
  52. webscout/Provider/AISEARCH/iask_search.py +130 -209
  53. webscout/Provider/AISEARCH/monica_search.py +200 -246
  54. webscout/Provider/AISEARCH/webpilotai_search.py +242 -281
  55. webscout/Provider/Algion.py +413 -0
  56. webscout/Provider/Andi.py +74 -69
  57. webscout/Provider/Apriel.py +313 -0
  58. webscout/Provider/Ayle.py +323 -0
  59. webscout/Provider/ChatSandbox.py +329 -0
  60. webscout/Provider/ClaudeOnline.py +365 -0
  61. webscout/Provider/Cohere.py +232 -208
  62. webscout/Provider/DeepAI.py +367 -0
  63. webscout/Provider/Deepinfra.py +343 -173
  64. webscout/Provider/EssentialAI.py +217 -0
  65. webscout/Provider/ExaAI.py +274 -261
  66. webscout/Provider/Gemini.py +60 -54
  67. webscout/Provider/GithubChat.py +385 -367
  68. webscout/Provider/Gradient.py +286 -0
  69. webscout/Provider/Groq.py +556 -670
  70. webscout/Provider/HadadXYZ.py +323 -0
  71. webscout/Provider/HeckAI.py +392 -233
  72. webscout/Provider/HuggingFace.py +387 -0
  73. webscout/Provider/IBM.py +340 -0
  74. webscout/Provider/Jadve.py +317 -266
  75. webscout/Provider/K2Think.py +306 -0
  76. webscout/Provider/Koboldai.py +221 -381
  77. webscout/Provider/Netwrck.py +273 -228
  78. webscout/Provider/Nvidia.py +310 -0
  79. webscout/Provider/OPENAI/DeepAI.py +489 -0
  80. webscout/Provider/OPENAI/K2Think.py +423 -0
  81. webscout/Provider/OPENAI/PI.py +463 -0
  82. webscout/Provider/OPENAI/README.md +890 -0
  83. webscout/Provider/OPENAI/TogetherAI.py +405 -0
  84. webscout/Provider/OPENAI/TwoAI.py +255 -0
  85. webscout/Provider/OPENAI/__init__.py +148 -25
  86. webscout/Provider/OPENAI/ai4chat.py +348 -0
  87. webscout/Provider/OPENAI/akashgpt.py +436 -0
  88. webscout/Provider/OPENAI/algion.py +303 -0
  89. webscout/Provider/OPENAI/ayle.py +365 -0
  90. webscout/Provider/OPENAI/base.py +253 -46
  91. webscout/Provider/OPENAI/cerebras.py +296 -0
  92. webscout/Provider/OPENAI/chatgpt.py +514 -193
  93. webscout/Provider/OPENAI/chatsandbox.py +233 -0
  94. webscout/Provider/OPENAI/deepinfra.py +403 -272
  95. webscout/Provider/OPENAI/e2b.py +2370 -1350
  96. webscout/Provider/OPENAI/elmo.py +278 -0
  97. webscout/Provider/OPENAI/exaai.py +186 -138
  98. webscout/Provider/OPENAI/freeassist.py +446 -0
  99. webscout/Provider/OPENAI/gradient.py +448 -0
  100. webscout/Provider/OPENAI/groq.py +380 -0
  101. webscout/Provider/OPENAI/hadadxyz.py +292 -0
  102. webscout/Provider/OPENAI/heckai.py +100 -104
  103. webscout/Provider/OPENAI/huggingface.py +321 -0
  104. webscout/Provider/OPENAI/ibm.py +425 -0
  105. webscout/Provider/OPENAI/llmchat.py +253 -0
  106. webscout/Provider/OPENAI/llmchatco.py +378 -327
  107. webscout/Provider/OPENAI/meta.py +541 -0
  108. webscout/Provider/OPENAI/netwrck.py +110 -84
  109. webscout/Provider/OPENAI/nvidia.py +317 -0
  110. webscout/Provider/OPENAI/oivscode.py +348 -0
  111. webscout/Provider/OPENAI/openrouter.py +328 -0
  112. webscout/Provider/OPENAI/pydantic_imports.py +1 -0
  113. webscout/Provider/OPENAI/sambanova.py +397 -0
  114. webscout/Provider/OPENAI/sonus.py +126 -115
  115. webscout/Provider/OPENAI/textpollinations.py +218 -133
  116. webscout/Provider/OPENAI/toolbaz.py +136 -166
  117. webscout/Provider/OPENAI/typefully.py +419 -0
  118. webscout/Provider/OPENAI/typliai.py +279 -0
  119. webscout/Provider/OPENAI/utils.py +314 -211
  120. webscout/Provider/OPENAI/wisecat.py +103 -125
  121. webscout/Provider/OPENAI/writecream.py +185 -156
  122. webscout/Provider/OPENAI/x0gpt.py +227 -136
  123. webscout/Provider/OPENAI/zenmux.py +380 -0
  124. webscout/Provider/OpenRouter.py +386 -0
  125. webscout/Provider/Openai.py +337 -496
  126. webscout/Provider/PI.py +443 -344
  127. webscout/Provider/QwenLM.py +346 -254
  128. webscout/Provider/STT/__init__.py +28 -0
  129. webscout/Provider/STT/base.py +303 -0
  130. webscout/Provider/STT/elevenlabs.py +264 -0
  131. webscout/Provider/Sambanova.py +317 -0
  132. webscout/Provider/TTI/README.md +69 -0
  133. webscout/Provider/TTI/__init__.py +37 -12
  134. webscout/Provider/TTI/base.py +147 -0
  135. webscout/Provider/TTI/claudeonline.py +393 -0
  136. webscout/Provider/TTI/magicstudio.py +292 -0
  137. webscout/Provider/TTI/miragic.py +180 -0
  138. webscout/Provider/TTI/pollinations.py +331 -0
  139. webscout/Provider/TTI/together.py +334 -0
  140. webscout/Provider/TTI/utils.py +14 -0
  141. webscout/Provider/TTS/README.md +186 -0
  142. webscout/Provider/TTS/__init__.py +43 -7
  143. webscout/Provider/TTS/base.py +523 -0
  144. webscout/Provider/TTS/deepgram.py +286 -156
  145. webscout/Provider/TTS/elevenlabs.py +189 -111
  146. webscout/Provider/TTS/freetts.py +218 -0
  147. webscout/Provider/TTS/murfai.py +288 -113
  148. webscout/Provider/TTS/openai_fm.py +364 -0
  149. webscout/Provider/TTS/parler.py +203 -111
  150. webscout/Provider/TTS/qwen.py +334 -0
  151. webscout/Provider/TTS/sherpa.py +286 -0
  152. webscout/Provider/TTS/speechma.py +693 -180
  153. webscout/Provider/TTS/streamElements.py +275 -333
  154. webscout/Provider/TTS/utils.py +280 -280
  155. webscout/Provider/TextPollinationsAI.py +221 -121
  156. webscout/Provider/TogetherAI.py +450 -0
  157. webscout/Provider/TwoAI.py +309 -199
  158. webscout/Provider/TypliAI.py +311 -0
  159. webscout/Provider/UNFINISHED/ChatHub.py +219 -0
  160. webscout/Provider/{OPENAI/glider.py → UNFINISHED/ChutesAI.py} +160 -145
  161. webscout/Provider/UNFINISHED/GizAI.py +300 -0
  162. webscout/Provider/UNFINISHED/Marcus.py +218 -0
  163. webscout/Provider/UNFINISHED/Qodo.py +481 -0
  164. webscout/Provider/UNFINISHED/XenAI.py +330 -0
  165. webscout/Provider/{Youchat.py → UNFINISHED/Youchat.py} +64 -47
  166. webscout/Provider/UNFINISHED/aihumanizer.py +41 -0
  167. webscout/Provider/UNFINISHED/grammerchecker.py +37 -0
  168. webscout/Provider/UNFINISHED/liner.py +342 -0
  169. webscout/Provider/UNFINISHED/liner_api_request.py +246 -0
  170. webscout/Provider/UNFINISHED/samurai.py +231 -0
  171. webscout/Provider/WiseCat.py +256 -196
  172. webscout/Provider/WrDoChat.py +390 -0
  173. webscout/Provider/__init__.py +115 -198
  174. webscout/Provider/ai4chat.py +181 -202
  175. webscout/Provider/akashgpt.py +330 -342
  176. webscout/Provider/cerebras.py +397 -242
  177. webscout/Provider/cleeai.py +236 -213
  178. webscout/Provider/elmo.py +291 -234
  179. webscout/Provider/geminiapi.py +343 -208
  180. webscout/Provider/julius.py +245 -223
  181. webscout/Provider/learnfastai.py +333 -266
  182. webscout/Provider/llama3mitril.py +230 -180
  183. webscout/Provider/llmchat.py +308 -213
  184. webscout/Provider/llmchatco.py +321 -311
  185. webscout/Provider/meta.py +996 -794
  186. webscout/Provider/oivscode.py +332 -0
  187. webscout/Provider/searchchat.py +316 -293
  188. webscout/Provider/sonus.py +264 -208
  189. webscout/Provider/toolbaz.py +359 -320
  190. webscout/Provider/turboseek.py +332 -219
  191. webscout/Provider/typefully.py +262 -280
  192. webscout/Provider/x0gpt.py +332 -256
  193. webscout/__init__.py +31 -38
  194. webscout/__main__.py +5 -5
  195. webscout/cli.py +585 -293
  196. webscout/client.py +1497 -0
  197. webscout/conversation.py +140 -565
  198. webscout/exceptions.py +383 -339
  199. webscout/litagent/__init__.py +29 -29
  200. webscout/litagent/agent.py +492 -455
  201. webscout/litagent/constants.py +60 -60
  202. webscout/models.py +505 -181
  203. webscout/optimizers.py +32 -378
  204. webscout/prompt_manager.py +376 -274
  205. webscout/sanitize.py +1514 -0
  206. webscout/scout/README.md +452 -0
  207. webscout/scout/__init__.py +8 -8
  208. webscout/scout/core/__init__.py +7 -7
  209. webscout/scout/core/crawler.py +330 -140
  210. webscout/scout/core/scout.py +800 -568
  211. webscout/scout/core/search_result.py +51 -96
  212. webscout/scout/core/text_analyzer.py +64 -63
  213. webscout/scout/core/text_utils.py +412 -277
  214. webscout/scout/core/web_analyzer.py +54 -52
  215. webscout/scout/element.py +872 -460
  216. webscout/scout/parsers/__init__.py +70 -69
  217. webscout/scout/parsers/html5lib_parser.py +182 -172
  218. webscout/scout/parsers/html_parser.py +238 -236
  219. webscout/scout/parsers/lxml_parser.py +203 -178
  220. webscout/scout/utils.py +38 -37
  221. webscout/search/__init__.py +47 -0
  222. webscout/search/base.py +201 -0
  223. webscout/search/bing_main.py +45 -0
  224. webscout/search/brave_main.py +92 -0
  225. webscout/search/duckduckgo_main.py +57 -0
  226. webscout/search/engines/__init__.py +127 -0
  227. webscout/search/engines/bing/__init__.py +15 -0
  228. webscout/search/engines/bing/base.py +35 -0
  229. webscout/search/engines/bing/images.py +114 -0
  230. webscout/search/engines/bing/news.py +96 -0
  231. webscout/search/engines/bing/suggestions.py +36 -0
  232. webscout/search/engines/bing/text.py +109 -0
  233. webscout/search/engines/brave/__init__.py +19 -0
  234. webscout/search/engines/brave/base.py +47 -0
  235. webscout/search/engines/brave/images.py +213 -0
  236. webscout/search/engines/brave/news.py +353 -0
  237. webscout/search/engines/brave/suggestions.py +318 -0
  238. webscout/search/engines/brave/text.py +167 -0
  239. webscout/search/engines/brave/videos.py +364 -0
  240. webscout/search/engines/duckduckgo/__init__.py +25 -0
  241. webscout/search/engines/duckduckgo/answers.py +80 -0
  242. webscout/search/engines/duckduckgo/base.py +189 -0
  243. webscout/search/engines/duckduckgo/images.py +100 -0
  244. webscout/search/engines/duckduckgo/maps.py +183 -0
  245. webscout/search/engines/duckduckgo/news.py +70 -0
  246. webscout/search/engines/duckduckgo/suggestions.py +22 -0
  247. webscout/search/engines/duckduckgo/text.py +221 -0
  248. webscout/search/engines/duckduckgo/translate.py +48 -0
  249. webscout/search/engines/duckduckgo/videos.py +80 -0
  250. webscout/search/engines/duckduckgo/weather.py +84 -0
  251. webscout/search/engines/mojeek.py +61 -0
  252. webscout/search/engines/wikipedia.py +77 -0
  253. webscout/search/engines/yahoo/__init__.py +41 -0
  254. webscout/search/engines/yahoo/answers.py +19 -0
  255. webscout/search/engines/yahoo/base.py +34 -0
  256. webscout/search/engines/yahoo/images.py +323 -0
  257. webscout/search/engines/yahoo/maps.py +19 -0
  258. webscout/search/engines/yahoo/news.py +258 -0
  259. webscout/search/engines/yahoo/suggestions.py +140 -0
  260. webscout/search/engines/yahoo/text.py +273 -0
  261. webscout/search/engines/yahoo/translate.py +19 -0
  262. webscout/search/engines/yahoo/videos.py +302 -0
  263. webscout/search/engines/yahoo/weather.py +220 -0
  264. webscout/search/engines/yandex.py +67 -0
  265. webscout/search/engines/yep/__init__.py +13 -0
  266. webscout/search/engines/yep/base.py +34 -0
  267. webscout/search/engines/yep/images.py +101 -0
  268. webscout/search/engines/yep/suggestions.py +38 -0
  269. webscout/search/engines/yep/text.py +99 -0
  270. webscout/search/http_client.py +172 -0
  271. webscout/search/results.py +141 -0
  272. webscout/search/yahoo_main.py +57 -0
  273. webscout/search/yep_main.py +48 -0
  274. webscout/server/__init__.py +48 -0
  275. webscout/server/config.py +78 -0
  276. webscout/server/exceptions.py +69 -0
  277. webscout/server/providers.py +286 -0
  278. webscout/server/request_models.py +131 -0
  279. webscout/server/request_processing.py +404 -0
  280. webscout/server/routes.py +642 -0
  281. webscout/server/server.py +351 -0
  282. webscout/server/ui_templates.py +1171 -0
  283. webscout/swiftcli/__init__.py +79 -809
  284. webscout/swiftcli/core/__init__.py +7 -0
  285. webscout/swiftcli/core/cli.py +574 -0
  286. webscout/swiftcli/core/context.py +98 -0
  287. webscout/swiftcli/core/group.py +268 -0
  288. webscout/swiftcli/decorators/__init__.py +28 -0
  289. webscout/swiftcli/decorators/command.py +243 -0
  290. webscout/swiftcli/decorators/options.py +247 -0
  291. webscout/swiftcli/decorators/output.py +392 -0
  292. webscout/swiftcli/exceptions.py +21 -0
  293. webscout/swiftcli/plugins/__init__.py +9 -0
  294. webscout/swiftcli/plugins/base.py +134 -0
  295. webscout/swiftcli/plugins/manager.py +269 -0
  296. webscout/swiftcli/utils/__init__.py +58 -0
  297. webscout/swiftcli/utils/formatting.py +251 -0
  298. webscout/swiftcli/utils/parsing.py +368 -0
  299. webscout/update_checker.py +280 -136
  300. webscout/utils.py +28 -14
  301. webscout/version.py +2 -1
  302. webscout/version.py.bak +3 -0
  303. webscout/zeroart/__init__.py +218 -55
  304. webscout/zeroart/base.py +70 -60
  305. webscout/zeroart/effects.py +155 -99
  306. webscout/zeroart/fonts.py +1799 -816
  307. webscout-2026.1.19.dist-info/METADATA +638 -0
  308. webscout-2026.1.19.dist-info/RECORD +312 -0
  309. {webscout-8.2.2.dist-info → webscout-2026.1.19.dist-info}/WHEEL +1 -1
  310. webscout-2026.1.19.dist-info/entry_points.txt +4 -0
  311. webscout-2026.1.19.dist-info/top_level.txt +1 -0
  312. inferno/__init__.py +0 -6
  313. inferno/__main__.py +0 -9
  314. inferno/cli.py +0 -6
  315. webscout/DWEBS.py +0 -477
  316. webscout/Extra/autocoder/__init__.py +0 -9
  317. webscout/Extra/autocoder/autocoder.py +0 -849
  318. webscout/Extra/autocoder/autocoder_utiles.py +0 -332
  319. webscout/LLM.py +0 -442
  320. webscout/Litlogger/__init__.py +0 -67
  321. webscout/Litlogger/core/__init__.py +0 -6
  322. webscout/Litlogger/core/level.py +0 -23
  323. webscout/Litlogger/core/logger.py +0 -165
  324. webscout/Litlogger/handlers/__init__.py +0 -12
  325. webscout/Litlogger/handlers/console.py +0 -33
  326. webscout/Litlogger/handlers/file.py +0 -143
  327. webscout/Litlogger/handlers/network.py +0 -173
  328. webscout/Litlogger/styles/__init__.py +0 -7
  329. webscout/Litlogger/styles/colors.py +0 -249
  330. webscout/Litlogger/styles/formats.py +0 -458
  331. webscout/Litlogger/styles/text.py +0 -87
  332. webscout/Litlogger/utils/__init__.py +0 -6
  333. webscout/Litlogger/utils/detectors.py +0 -153
  334. webscout/Litlogger/utils/formatters.py +0 -200
  335. webscout/Local/__init__.py +0 -12
  336. webscout/Local/__main__.py +0 -9
  337. webscout/Local/api.py +0 -576
  338. webscout/Local/cli.py +0 -516
  339. webscout/Local/config.py +0 -75
  340. webscout/Local/llm.py +0 -287
  341. webscout/Local/model_manager.py +0 -253
  342. webscout/Local/server.py +0 -721
  343. webscout/Local/utils.py +0 -93
  344. webscout/Provider/AI21.py +0 -177
  345. webscout/Provider/AISEARCH/DeepFind.py +0 -250
  346. webscout/Provider/AISEARCH/ISou.py +0 -256
  347. webscout/Provider/AISEARCH/felo_search.py +0 -228
  348. webscout/Provider/AISEARCH/genspark_search.py +0 -208
  349. webscout/Provider/AISEARCH/hika_search.py +0 -194
  350. webscout/Provider/AISEARCH/scira_search.py +0 -324
  351. webscout/Provider/Aitopia.py +0 -292
  352. webscout/Provider/AllenAI.py +0 -413
  353. webscout/Provider/Blackboxai.py +0 -229
  354. webscout/Provider/C4ai.py +0 -432
  355. webscout/Provider/ChatGPTClone.py +0 -226
  356. webscout/Provider/ChatGPTES.py +0 -237
  357. webscout/Provider/ChatGPTGratis.py +0 -194
  358. webscout/Provider/Chatify.py +0 -175
  359. webscout/Provider/Cloudflare.py +0 -273
  360. webscout/Provider/DeepSeek.py +0 -196
  361. webscout/Provider/ElectronHub.py +0 -709
  362. webscout/Provider/ExaChat.py +0 -342
  363. webscout/Provider/Free2GPT.py +0 -241
  364. webscout/Provider/GPTWeb.py +0 -193
  365. webscout/Provider/Glider.py +0 -211
  366. webscout/Provider/HF_space/__init__.py +0 -0
  367. webscout/Provider/HF_space/qwen_qwen2.py +0 -206
  368. webscout/Provider/HuggingFaceChat.py +0 -462
  369. webscout/Provider/Hunyuan.py +0 -272
  370. webscout/Provider/LambdaChat.py +0 -392
  371. webscout/Provider/Llama.py +0 -200
  372. webscout/Provider/Llama3.py +0 -204
  373. webscout/Provider/Marcus.py +0 -148
  374. webscout/Provider/OLLAMA.py +0 -396
  375. webscout/Provider/OPENAI/c4ai.py +0 -367
  376. webscout/Provider/OPENAI/chatgptclone.py +0 -460
  377. webscout/Provider/OPENAI/exachat.py +0 -433
  378. webscout/Provider/OPENAI/freeaichat.py +0 -352
  379. webscout/Provider/OPENAI/opkfc.py +0 -488
  380. webscout/Provider/OPENAI/scirachat.py +0 -463
  381. webscout/Provider/OPENAI/standardinput.py +0 -425
  382. webscout/Provider/OPENAI/typegpt.py +0 -346
  383. webscout/Provider/OPENAI/uncovrAI.py +0 -455
  384. webscout/Provider/OPENAI/venice.py +0 -413
  385. webscout/Provider/OPENAI/yep.py +0 -327
  386. webscout/Provider/OpenGPT.py +0 -199
  387. webscout/Provider/Perplexitylabs.py +0 -415
  388. webscout/Provider/Phind.py +0 -535
  389. webscout/Provider/PizzaGPT.py +0 -198
  390. webscout/Provider/Reka.py +0 -214
  391. webscout/Provider/StandardInput.py +0 -278
  392. webscout/Provider/TTI/AiForce/__init__.py +0 -22
  393. webscout/Provider/TTI/AiForce/async_aiforce.py +0 -224
  394. webscout/Provider/TTI/AiForce/sync_aiforce.py +0 -245
  395. webscout/Provider/TTI/FreeAIPlayground/__init__.py +0 -9
  396. webscout/Provider/TTI/FreeAIPlayground/async_freeaiplayground.py +0 -181
  397. webscout/Provider/TTI/FreeAIPlayground/sync_freeaiplayground.py +0 -180
  398. webscout/Provider/TTI/ImgSys/__init__.py +0 -23
  399. webscout/Provider/TTI/ImgSys/async_imgsys.py +0 -202
  400. webscout/Provider/TTI/ImgSys/sync_imgsys.py +0 -195
  401. webscout/Provider/TTI/MagicStudio/__init__.py +0 -2
  402. webscout/Provider/TTI/MagicStudio/async_magicstudio.py +0 -111
  403. webscout/Provider/TTI/MagicStudio/sync_magicstudio.py +0 -109
  404. webscout/Provider/TTI/Nexra/__init__.py +0 -22
  405. webscout/Provider/TTI/Nexra/async_nexra.py +0 -286
  406. webscout/Provider/TTI/Nexra/sync_nexra.py +0 -258
  407. webscout/Provider/TTI/PollinationsAI/__init__.py +0 -23
  408. webscout/Provider/TTI/PollinationsAI/async_pollinations.py +0 -311
  409. webscout/Provider/TTI/PollinationsAI/sync_pollinations.py +0 -265
  410. webscout/Provider/TTI/aiarta/__init__.py +0 -2
  411. webscout/Provider/TTI/aiarta/async_aiarta.py +0 -482
  412. webscout/Provider/TTI/aiarta/sync_aiarta.py +0 -440
  413. webscout/Provider/TTI/artbit/__init__.py +0 -22
  414. webscout/Provider/TTI/artbit/async_artbit.py +0 -155
  415. webscout/Provider/TTI/artbit/sync_artbit.py +0 -148
  416. webscout/Provider/TTI/fastflux/__init__.py +0 -22
  417. webscout/Provider/TTI/fastflux/async_fastflux.py +0 -261
  418. webscout/Provider/TTI/fastflux/sync_fastflux.py +0 -252
  419. webscout/Provider/TTI/huggingface/__init__.py +0 -22
  420. webscout/Provider/TTI/huggingface/async_huggingface.py +0 -199
  421. webscout/Provider/TTI/huggingface/sync_huggingface.py +0 -195
  422. webscout/Provider/TTI/piclumen/__init__.py +0 -23
  423. webscout/Provider/TTI/piclumen/async_piclumen.py +0 -268
  424. webscout/Provider/TTI/piclumen/sync_piclumen.py +0 -233
  425. webscout/Provider/TTI/pixelmuse/__init__.py +0 -4
  426. webscout/Provider/TTI/pixelmuse/async_pixelmuse.py +0 -249
  427. webscout/Provider/TTI/pixelmuse/sync_pixelmuse.py +0 -182
  428. webscout/Provider/TTI/talkai/__init__.py +0 -4
  429. webscout/Provider/TTI/talkai/async_talkai.py +0 -229
  430. webscout/Provider/TTI/talkai/sync_talkai.py +0 -207
  431. webscout/Provider/TTS/gesserit.py +0 -127
  432. webscout/Provider/TeachAnything.py +0 -187
  433. webscout/Provider/Venice.py +0 -219
  434. webscout/Provider/VercelAI.py +0 -234
  435. webscout/Provider/WebSim.py +0 -228
  436. webscout/Provider/Writecream.py +0 -211
  437. webscout/Provider/WritingMate.py +0 -197
  438. webscout/Provider/aimathgpt.py +0 -189
  439. webscout/Provider/askmyai.py +0 -158
  440. webscout/Provider/asksteve.py +0 -203
  441. webscout/Provider/bagoodex.py +0 -145
  442. webscout/Provider/chatglm.py +0 -205
  443. webscout/Provider/copilot.py +0 -428
  444. webscout/Provider/freeaichat.py +0 -271
  445. webscout/Provider/gaurish.py +0 -244
  446. webscout/Provider/geminiprorealtime.py +0 -160
  447. webscout/Provider/granite.py +0 -187
  448. webscout/Provider/hermes.py +0 -219
  449. webscout/Provider/koala.py +0 -268
  450. webscout/Provider/labyrinth.py +0 -340
  451. webscout/Provider/lepton.py +0 -194
  452. webscout/Provider/llamatutor.py +0 -192
  453. webscout/Provider/multichat.py +0 -325
  454. webscout/Provider/promptrefine.py +0 -193
  455. webscout/Provider/scira_chat.py +0 -277
  456. webscout/Provider/scnet.py +0 -187
  457. webscout/Provider/talkai.py +0 -194
  458. webscout/Provider/tutorai.py +0 -252
  459. webscout/Provider/typegpt.py +0 -232
  460. webscout/Provider/uncovr.py +0 -312
  461. webscout/Provider/yep.py +0 -376
  462. webscout/litprinter/__init__.py +0 -59
  463. webscout/scout/core.py +0 -881
  464. webscout/tempid.py +0 -128
  465. webscout/webscout_search.py +0 -1346
  466. webscout/webscout_search_async.py +0 -877
  467. webscout/yep_search.py +0 -297
  468. webscout-8.2.2.dist-info/METADATA +0 -734
  469. webscout-8.2.2.dist-info/RECORD +0 -309
  470. webscout-8.2.2.dist-info/entry_points.txt +0 -5
  471. webscout-8.2.2.dist-info/top_level.txt +0 -3
  472. webstoken/__init__.py +0 -30
  473. webstoken/classifier.py +0 -189
  474. webstoken/keywords.py +0 -216
  475. webstoken/language.py +0 -128
  476. webstoken/ner.py +0 -164
  477. webstoken/normalizer.py +0 -35
  478. webstoken/processor.py +0 -77
  479. webstoken/sentiment.py +0 -206
  480. webstoken/stemmer.py +0 -73
  481. webstoken/tagger.py +0 -60
  482. webstoken/tokenizer.py +0 -158
  483. {webscout-8.2.2.dist-info → webscout-2026.1.19.dist-info/licenses}/LICENSE.md +0 -0
webscout/Provider/meta.py CHANGED
@@ -1,794 +1,996 @@
1
- import json
2
- import logging
3
- import time
4
- import urllib
5
- import uuid
6
- from typing import Dict, Generator, Iterator, List, Union
7
-
8
- import random
9
- import requests
10
- from webscout.scout import Scout
11
-
12
- from webscout.AIutel import Optimizers
13
- from webscout.AIutel import Conversation
14
- from webscout.AIutel import AwesomePrompts, sanitize_stream
15
- from webscout.AIbase import Provider
16
- from webscout import exceptions
17
- from webscout.litagent import LitAgent as Lit
18
- MAX_RETRIES = 3
19
-
20
- def generate_offline_threading_id() -> str:
21
- """
22
- Generates an offline threading ID.
23
-
24
- Returns:
25
- str: The generated offline threading ID.
26
- """
27
- # Maximum value for a 64-bit integer in Python
28
- max_int = (1 << 64) - 1
29
- mask22_bits = (1 << 22) - 1
30
-
31
- # Function to get the current timestamp in milliseconds
32
- def get_current_timestamp():
33
- return int(time.time() * 1000)
34
-
35
- # Function to generate a random 64-bit integer
36
- def get_random_64bit_int():
37
- return random.getrandbits(64)
38
-
39
- # Combine timestamp and random value
40
- def combine_and_mask(timestamp, random_value):
41
- shifted_timestamp = timestamp << 22
42
- masked_random = random_value & mask22_bits
43
- return (shifted_timestamp | masked_random) & max_int
44
-
45
- timestamp = get_current_timestamp()
46
- random_value = get_random_64bit_int()
47
- threading_id = combine_and_mask(timestamp, random_value)
48
-
49
- return str(threading_id)
50
-
51
-
52
- def extract_value(text: str, start_str: str, end_str: str) -> str:
53
- """
54
- Helper function to extract a specific value from the given text using a key.
55
-
56
- Args:
57
- text (str): The text from which to extract the value.
58
- start_str (str): The starting key.
59
- end_str (str): The ending key.
60
-
61
- Returns:
62
- str: The extracted value.
63
- """
64
- start = text.find(start_str) + len(start_str)
65
- end = text.find(end_str, start)
66
- return text[start:end]
67
-
68
-
69
- def format_response(response: dict) -> str:
70
- """
71
- Formats the response from Meta AI to remove unnecessary characters.
72
-
73
- Args:
74
- response (dict): The dictionnary containing the response to format.
75
-
76
- Returns:
77
- str: The formatted response.
78
- """
79
- text = ""
80
- for content in (
81
- response.get("data", {})
82
- .get("node", {})
83
- .get("bot_response_message", {})
84
- .get("composed_text", {})
85
- .get("content", [])
86
- ):
87
- text += content["text"] + "\n"
88
- return text
89
-
90
-
91
- # Function to perform the login
92
- def get_fb_session(email, password, proxies=None):
93
- login_url = "https://mbasic.facebook.com/login/"
94
- headers = {
95
- "authority": "mbasic.facebook.com",
96
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
97
- "accept-language": "en-US,en;q=0.9",
98
- "sec-ch-ua": '"Chromium";v="122", "Not(A:Brand";v="24", "Google Chrome";v="122"',
99
- "sec-ch-ua-mobile": "?0",
100
- "sec-ch-ua-platform": '"macOS"',
101
- "sec-fetch-dest": "document",
102
- "sec-fetch-mode": "navigate",
103
- "sec-fetch-site": "none",
104
- "sec-fetch-user": "?1",
105
- "upgrade-insecure-requests": "1",
106
- "user-agent": Lit().random(),
107
- }
108
- # Send the GET request
109
- response = requests.get(login_url, headers=headers, proxies=proxies)
110
-
111
- # Use Scout for parsing instead of BeautifulSoup
112
- scout = Scout(response.text)
113
-
114
- # Parse necessary parameters from the login form
115
- lsd = scout.find_first('input[name="lsd"]').get('value')
116
- jazoest = scout.find_first('input[name="jazoest"]').get('value')
117
- li = scout.find_first('input[name="li"]').get('value')
118
- m_ts = scout.find_first('input[name="m_ts"]').get('value')
119
-
120
- # Define the URL and body for the POST request to submit the login form
121
- post_url = "https://mbasic.facebook.com/login/device-based/regular/login/?refsrc=deprecated&lwv=100"
122
- data = {
123
- "lsd": lsd,
124
- "jazoest": jazoest,
125
- "m_ts": m_ts,
126
- "li": li,
127
- "try_number": "0",
128
- "unrecognized_tries": "0",
129
- "email": email,
130
- "pass": password,
131
- "login": "Log In",
132
- "bi_xrwh": "0",
133
- }
134
-
135
- headers = {
136
- "authority": "mbasic.facebook.com",
137
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
138
- "accept-language": "en-US,en;q=0.9",
139
- "cache-control": "no-cache",
140
- "content-type": "application/x-www-form-urlencoded",
141
- "cookie": f"datr={response.cookies.get('datr')}; sb={response.cookies.get('sb')}; ps_n=1; ps_l=1",
142
- "dpr": "2",
143
- "origin": "https://mbasic.facebook.com",
144
- "pragma": "no-cache",
145
- "referer": "https://mbasic.facebook.com/login/",
146
- "sec-fetch-site": "same-origin",
147
- "sec-fetch-user": "?1",
148
- "upgrade-insecure-requests": "1",
149
- "user-agent": Lit().random(),
150
- "viewport-width": "1728",
151
- }
152
-
153
- # Send the POST request
154
- session = requests.session()
155
- session.proxies = proxies
156
-
157
- result = session.post(post_url, headers=headers, data=data)
158
- if "sb" not in session.cookies:
159
- raise exceptions.FacebookInvalidCredentialsException(
160
- "Was not able to login to Facebook. Please check your credentials. "
161
- "You may also have been rate limited. Try to connect to Facebook manually."
162
- )
163
-
164
- cookies = {
165
- **result.cookies.get_dict(),
166
- "sb": session.cookies["sb"],
167
- "xs": session.cookies["xs"],
168
- "fr": session.cookies["fr"],
169
- "c_user": session.cookies["c_user"],
170
- }
171
-
172
- response_login = {
173
- "cookies": cookies,
174
- "headers": result.headers,
175
- "response": response.text,
176
- }
177
- meta_ai_cookies = get_cookies()
178
-
179
- url = "https://www.meta.ai/state/"
180
-
181
- payload = f'__a=1&lsd={meta_ai_cookies["lsd"]}'
182
- headers = {
183
- "authority": "www.meta.ai",
184
- "accept": "*/*",
185
- "accept-language": "en-US,en;q=0.9",
186
- "cache-control": "no-cache",
187
- "content-type": "application/x-www-form-urlencoded",
188
- "cookie": f'ps_n=1; ps_l=1; dpr=2; _js_datr={meta_ai_cookies["_js_datr"]}; abra_csrf={meta_ai_cookies["abra_csrf"]}; datr={meta_ai_cookies["datr"]};; ps_l=1; ps_n=1',
189
- "origin": "https://www.meta.ai",
190
- "pragma": "no-cache",
191
- "referer": "https://www.meta.ai/",
192
- "sec-fetch-mode": "cors",
193
- "sec-fetch-site": "same-origin",
194
- "user-agent": Lit().random(),
195
- "viewport-width": "1728",
196
- }
197
-
198
- response = requests.request("POST", url, headers=headers, data=payload, proxies=proxies)
199
-
200
- state = extract_value(response.text, start_str='"state":"', end_str='"')
201
-
202
- url = f"https://www.facebook.com/oidc/?app_id=1358015658191005&scope=openid%20linking&response_type=code&redirect_uri=https%3A%2F%2Fwww.meta.ai%2Fauth%2F&no_universal_links=1&deoia=1&state={state}"
203
- payload = {}
204
- headers = {
205
- "authority": "www.facebook.com",
206
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
207
- "accept-language": "en-US,en;q=0.9",
208
- "cache-control": "no-cache",
209
- "cookie": f"datr={response_login['cookies']['datr']}; sb={response_login['cookies']['sb']}; c_user={response_login['cookies']['c_user']}; xs={response_login['cookies']['xs']}; fr={response_login['cookies']['fr']}; m_page_voice={response_login['cookies']['m_page_voice']}; abra_csrf={meta_ai_cookies['abra_csrf']};",
210
- "sec-fetch-dest": "document",
211
- "sec-fetch-mode": "navigate",
212
- "sec-fetch-site": "cross-site",
213
- "sec-fetch-user": "?1",
214
- "upgrade-insecure-requests": "1",
215
- "user-agent": Lit().random(),
216
- }
217
- session = requests.session()
218
- session.proxies = proxies
219
- response = session.get(url, headers=headers, data=payload, allow_redirects=False)
220
-
221
- next_url = response.headers["Location"]
222
-
223
- url = next_url
224
-
225
- payload = {}
226
- headers = {
227
- "User-Agent": Lit().random(),
228
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
229
- "Accept-Language": "en-US,en;q=0.5",
230
- "Accept-Encoding": "gzip, deflate, br",
231
- "Referer": "https://www.meta.ai/",
232
- "Connection": "keep-alive",
233
- "Cookie": f'dpr=2; abra_csrf={meta_ai_cookies["abra_csrf"]}; datr={meta_ai_cookies["_js_datr"]}',
234
- "Upgrade-Insecure-Requests": "1",
235
- "Sec-Fetch-Dest": "document",
236
- "Sec-Fetch-Mode": "navigate",
237
- "Sec-Fetch-Site": "cross-site",
238
- "Sec-Fetch-User": "?1",
239
- "TE": "trailers",
240
- }
241
- session.get(url, headers=headers, data=payload)
242
- cookies = session.cookies.get_dict()
243
- if "abra_sess" not in cookies:
244
- raise exceptions.FacebookInvalidCredentialsException(
245
- "Was not able to login to Facebook. Please check your credentials. "
246
- "You may also have been rate limited. Try to connect to Facebook manually."
247
- )
248
- logging.info("Successfully logged in to Facebook.")
249
- return cookies
250
-
251
-
252
- def get_cookies(self) -> dict:
253
- """
254
- Extracts necessary cookies from the Meta AI main page.
255
-
256
- Returns:
257
- dict: A dictionary containing essential cookies.
258
- """
259
- headers = {}
260
- if self.fb_email is not None and self.fb_password is not None:
261
- fb_session = get_fb_session(self.fb_email, self.fb_password, self.proxy)
262
- headers = {"cookie": f"abra_sess={fb_session['abra_sess']}"}
263
-
264
- response = requests.get(
265
- "https://www.meta.ai/",
266
- headers=headers,
267
- proxies=self.proxy,
268
- )
269
-
270
- cookies = {
271
- "_js_datr": extract_value(
272
- response.text, start_str='_js_datr":{"value":"', end_str='",'
273
- ),
274
- "datr": extract_value(
275
- response.text, start_str='datr":{"value":"', end_str='",'
276
- ),
277
- "lsd": extract_value(
278
- response.text, start_str='"LSD",[],{"token":"', end_str='"}'
279
- ),
280
- "fb_dtsg": extract_value(
281
- response.text, start_str='DTSGInitData",[],{"token":"', end_str='"'
282
- ),
283
- }
284
-
285
- if len(headers) > 0:
286
- cookies["abra_sess"] = fb_session["abra_sess"]
287
- else:
288
- cookies["abra_csrf"] = extract_value(
289
- response.text, start_str='abra_csrf":{"value":"', end_str='",'
290
- )
291
- return cookies
292
-
293
- class Meta(Provider):
294
- """
295
- A class to interact with the Meta AI API to obtain and use access tokens for sending
296
- and receiving messages from the Meta AI Chat API.
297
- """
298
-
299
- def __init__(
300
- self,
301
- fb_email: str = None,
302
- fb_password: str = None,
303
- proxy: dict = None,
304
- is_conversation: bool = True,
305
- max_tokens: int = 600,
306
- timeout: int = 30,
307
- intro: str = None,
308
- filepath: str = None,
309
- update_file: bool = True,
310
- proxies: dict = {},
311
- history_offset: int = 10250,
312
- act: str = None,
313
- ):
314
- """
315
- Initializes the Meta AI API with given parameters.
316
-
317
- Args:
318
- fb_email (str, optional): Your Facebook email address. Defaults to None.
319
- fb_password (str, optional): Your Facebook password. Defaults to None.
320
- proxy (dict, optional): Proxy settings for requests. Defaults to None.
321
- is_conversation (bool, optional): Flag for chatting conversationally. Defaults to True.
322
- max_tokens (int, optional): Maximum number of tokens to be generated upon completion. Defaults to 600.
323
- timeout (int, optional): Http request timeout. Defaults to 30.
324
- intro (str, optional): Conversation introductory prompt. Defaults to None.
325
- filepath (str, optional): Path to file containing conversation history. Defaults to None.
326
- update_file (bool, optional): Add new prompts and responses to the file. Defaults to True.
327
- proxies (dict, optional): Http request proxies. Defaults to {}.
328
- history_offset (int, optional): Limit conversation history to this number of last texts. Defaults to 10250.
329
- act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
330
- """
331
- self.session = requests.Session()
332
- self.session.headers.update(
333
- {
334
- "user-agent": Lit().random(),
335
- }
336
- )
337
- self.access_token = None
338
- self.fb_email = fb_email
339
- self.fb_password = fb_password
340
- self.proxy = proxy
341
- if self.proxy and not self.check_proxy():
342
- raise ConnectionError(
343
- "Unable to connect to proxy. Please check your proxy settings."
344
- )
345
- self.is_conversation = is_conversation
346
- self.max_tokens_to_sample = max_tokens
347
- self.timeout = timeout
348
- self.last_response = {}
349
- self.is_authed = fb_password is not None and fb_email is not None
350
- self.cookies = self.get_cookies()
351
- self.external_conversation_id = None
352
- self.offline_threading_id = None
353
-
354
- self.__available_optimizers = (
355
- method
356
- for method in dir(Optimizers)
357
- if callable(getattr(Optimizers, method)) and not method.startswith("__")
358
- )
359
- Conversation.intro = (
360
- AwesomePrompts().get_act(
361
- act, raise_not_found=True, default=None, case_insensitive=True
362
- )
363
- if act
364
- else intro or Conversation.intro
365
- )
366
- self.conversation = Conversation(
367
- is_conversation, self.max_tokens_to_sample, filepath, update_file
368
- )
369
- self.conversation.history_offset = history_offset
370
- self.session.proxies = proxies
371
-
372
- def check_proxy(self, test_url: str = "https://api.ipify.org/?format=json") -> bool:
373
- """
374
- Checks the proxy connection by making a request to a test URL.
375
-
376
- Args:
377
- test_url (str): A test site from which we check that the proxy is installed correctly.
378
-
379
- Returns:
380
- bool: True if the proxy is working, False otherwise.
381
- """
382
- try:
383
- response = self.session.get(test_url, proxies=self.proxy, timeout=10)
384
- if response.status_code == 200:
385
- self.session.proxies = self.proxy
386
- return True
387
- return False
388
- except requests.RequestException:
389
- return False
390
-
391
- def get_access_token(self) -> str:
392
- """
393
- Retrieves an access token using Meta's authentication API.
394
-
395
- Returns:
396
- str: A valid access token.
397
- """
398
-
399
- if self.access_token:
400
- return self.access_token
401
-
402
- url = "https://www.meta.ai/api/graphql/"
403
- payload = {
404
- "lsd": self.cookies["lsd"],
405
- "fb_api_caller_class": "RelayModern",
406
- "fb_api_req_friendly_name": "useAbraAcceptTOSForTempUserMutation",
407
- "variables": {
408
- "dob": "1999-01-01",
409
- "icebreaker_type": "TEXT",
410
- "__relay_internal__pv__WebPixelRatiorelayprovider": 1,
411
- },
412
- "doc_id": "7604648749596940",
413
- }
414
- payload = urllib.parse.urlencode(payload) # noqa
415
- headers = {
416
- "content-type": "application/x-www-form-urlencoded",
417
- "cookie": f'_js_datr={self.cookies["_js_datr"]}; '
418
- f'abra_csrf={self.cookies["abra_csrf"]}; datr={self.cookies["datr"]};',
419
- "sec-fetch-site": "same-origin",
420
- "x-fb-friendly-name": "useAbraAcceptTOSForTempUserMutation",
421
- }
422
-
423
- response = self.session.post(url, headers=headers, data=payload)
424
-
425
- try:
426
- auth_json = response.json()
427
- except json.JSONDecodeError:
428
- raise exceptions.FacebookRegionBlocked(
429
- "Unable to receive a valid response from Meta AI. This is likely due to your region being blocked. "
430
- "Try manually accessing https://www.meta.ai/ to confirm."
431
- )
432
-
433
- access_token = auth_json["data"]["xab_abra_accept_terms_of_service"][
434
- "new_temp_user_auth"
435
- ]["access_token"]
436
-
437
- # Need to sleep for a bit, for some reason the API doesn't like it when we send request too quickly
438
- # (maybe Meta needs to register Cookies on their side?)
439
- time.sleep(1)
440
-
441
- return access_token
442
-
443
- def ask(
444
- self,
445
- prompt: str,
446
- stream: bool = False,
447
- raw: bool = False,
448
- optimizer: str = None,
449
- conversationally: bool = False,
450
- ) -> Union[Dict, Generator[Dict, None, None]]:
451
- """
452
- Sends a message to the Meta AI and returns the response.
453
-
454
- Args:
455
- prompt (str): The prompt to send.
456
- stream (bool): Whether to stream the response or not. Defaults to False.
457
- raw (bool, optional): Stream back raw response as received. Defaults to False.
458
- optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
459
- conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
460
- Returns:
461
- Union[Dict, Generator[Dict, None, None]]: A dictionary containing the response message and sources, or a generator yielding such dictionaries.
462
- """
463
- conversation_prompt = self.conversation.gen_complete_prompt(prompt)
464
- if optimizer:
465
- if optimizer in self.__available_optimizers:
466
- conversation_prompt = getattr(Optimizers, optimizer)(
467
- conversation_prompt if conversationally else prompt
468
- )
469
- else:
470
- raise Exception(
471
- f"Optimizer is not one of {self.__available_optimizers}"
472
- )
473
-
474
- if not self.is_authed:
475
- self.access_token = self.get_access_token()
476
- auth_payload = {"access_token": self.access_token}
477
- url = "https://graph.meta.ai/graphql?locale=user"
478
-
479
- else:
480
- auth_payload = {"fb_dtsg": self.cookies["fb_dtsg"]}
481
- url = "https://www.meta.ai/api/graphql/"
482
-
483
- if not self.external_conversation_id:
484
- external_id = str(uuid.uuid4())
485
- self.external_conversation_id = external_id
486
- payload = {
487
- **auth_payload,
488
- "fb_api_caller_class": "RelayModern",
489
- "fb_api_req_friendly_name": "useAbraSendMessageMutation",
490
- "variables": json.dumps(
491
- {
492
- "message": {"sensitive_string_value": conversation_prompt},
493
- "externalConversationId": self.external_conversation_id,
494
- "offlineThreadingId": generate_offline_threading_id(),
495
- "suggestedPromptIndex": None,
496
- "flashVideoRecapInput": {"images": []},
497
- "flashPreviewInput": None,
498
- "promptPrefix": None,
499
- "entrypoint": "ABRA__CHAT__TEXT",
500
- "icebreaker_type": "TEXT",
501
- "__relay_internal__pv__AbraDebugDevOnlyrelayprovider": False,
502
- "__relay_internal__pv__WebPixelRatiorelayprovider": 1,
503
- }
504
- ),
505
- "server_timestamps": "true",
506
- "doc_id": "7783822248314888",
507
- }
508
- payload = urllib.parse.urlencode(payload) # noqa
509
- headers = {
510
- "content-type": "application/x-www-form-urlencoded",
511
- "x-fb-friendly-name": "useAbraSendMessageMutation",
512
- }
513
- if self.is_authed:
514
- headers["cookie"] = f'abra_sess={self.cookies["abra_sess"]}'
515
- # Recreate the session to avoid cookie leakage when user is authenticated
516
- self.session = requests.Session()
517
- self.session.proxies = self.proxy
518
-
519
- if stream:
520
-
521
- def for_stream():
522
- response = self.session.post(
523
- url, headers=headers, data=payload, stream=True, timeout=self.timeout
524
- )
525
- if not response.ok:
526
- raise exceptions.FailedToGenerateResponseError(
527
- f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
528
- )
529
-
530
- lines = response.iter_lines()
531
- is_error = json.loads(next(lines))
532
- if len(is_error.get("errors", [])) > 0:
533
- raise exceptions.FailedToGenerateResponseError(
534
- f"Failed to generate response - {response.text}"
535
- )
536
- for line in lines:
537
- if line:
538
- json_line = json.loads(line)
539
- extracted_data = self.extract_data(json_line)
540
- if not extracted_data.get("message"):
541
- continue
542
- self.last_response.update(extracted_data)
543
- yield line if raw else extracted_data
544
- self.conversation.update_chat_history(
545
- prompt, self.get_message(self.last_response)
546
- )
547
-
548
- return for_stream()
549
- else:
550
- response = self.session.post(
551
- url, headers=headers, data=payload, timeout=self.timeout
552
- )
553
- if not response.ok:
554
- raise exceptions.FailedToGenerateResponseError(
555
- f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
556
- )
557
- raw_response = response.text
558
- last_streamed_response = self.extract_last_response(raw_response)
559
- if not last_streamed_response:
560
- raise exceptions.FailedToGenerateResponseError(
561
- f"Failed to generate response - {response.text}"
562
- )
563
-
564
- extracted_data = self.extract_data(last_streamed_response)
565
- self.last_response.update(extracted_data)
566
- self.conversation.update_chat_history(
567
- prompt, self.get_message(self.last_response)
568
- )
569
- return extracted_data
570
-
571
- def chat(
572
- self,
573
- prompt: str,
574
- stream: bool = False,
575
- optimizer: str = None,
576
- conversationally: bool = False,
577
- ) -> str:
578
- """
579
- Sends a message to the Meta AI and returns the response.
580
-
581
- Args:
582
- prompt (str): The message to send.
583
- stream (bool): Whether to stream the response or not. Defaults to False.
584
- optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
585
- conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
586
-
587
- Returns:
588
- str: The response message.
589
- """
590
-
591
- def for_stream():
592
- for response in self.ask(
593
- prompt, True, optimizer=optimizer, conversationally=conversationally
594
- ):
595
- yield self.get_message(response)
596
-
597
- def for_non_stream():
598
- return self.get_message(
599
- self.ask(
600
- prompt,
601
- False,
602
- optimizer=optimizer,
603
- conversationally=conversationally,
604
- )
605
- )
606
-
607
- return for_stream() if stream else for_non_stream()
608
-
609
- def extract_last_response(self, response: str) -> Dict:
610
- """
611
- Extracts the last response from the Meta AI API.
612
-
613
- Args:
614
- response (str): The response to extract the last response from.
615
-
616
- Returns:
617
- dict: A dictionary containing the last response.
618
- """
619
- last_streamed_response = None
620
- for line in response.split("\n"):
621
- try:
622
- json_line = json.loads(line)
623
- except json.JSONDecodeError:
624
- continue
625
-
626
- bot_response_message = (
627
- json_line.get("data", {})
628
- .get("node", {})
629
- .get("bot_response_message", {})
630
- )
631
- chat_id = bot_response_message.get("id")
632
- if chat_id:
633
- external_conversation_id, offline_threading_id, _ = chat_id.split("_")
634
- self.external_conversation_id = external_conversation_id
635
- self.offline_threading_id = offline_threading_id
636
-
637
- streaming_state = bot_response_message.get("streaming_state")
638
- if streaming_state == "OVERALL_DONE":
639
- last_streamed_response = json_line
640
-
641
- return last_streamed_response
642
-
643
- def extract_data(self, json_line: dict) -> Dict:
644
- """
645
- Extract data and sources from a parsed JSON line.
646
-
647
- Args:
648
- json_line (dict): Parsed JSON line.
649
-
650
- Returns:
651
- dict: A dictionary containing the response message, sources, and media.
652
- """
653
- bot_response_message = (
654
- json_line.get("data", {}).get("node", {}).get("bot_response_message", {})
655
- )
656
- response = format_response(response=json_line)
657
- fetch_id = bot_response_message.get("fetch_id")
658
- sources = self.fetch_sources(fetch_id) if fetch_id else []
659
- medias = self.extract_media(bot_response_message)
660
- return {"message": response, "sources": sources, "media": medias}
661
-
662
- def extract_media(self, json_line: dict) -> List[Dict]:
663
- """
664
- Extract media from a parsed JSON line.
665
-
666
- Args:
667
- json_line (dict): Parsed JSON line.
668
-
669
- Returns:
670
- list: A list of dictionaries containing the extracted media.
671
- """
672
- medias = []
673
- imagine_card = json_line.get("imagine_card", {})
674
- session = imagine_card.get("session", {}) if imagine_card else {}
675
- media_sets = (
676
- (json_line.get("imagine_card", {}).get("session", {}).get("media_sets", []))
677
- if imagine_card and session
678
- else []
679
- )
680
- for media_set in media_sets:
681
- imagine_media = media_set.get("imagine_media", [])
682
- for media in imagine_media:
683
- medias.append(
684
- {
685
- "url": media.get("uri"),
686
- "type": media.get("media_type"),
687
- "prompt": media.get("prompt"),
688
- }
689
- )
690
- return medias
691
-
692
- def get_cookies(self) -> dict:
693
- """
694
- Extracts necessary cookies from the Meta AI main page.
695
-
696
- Returns:
697
- dict: A dictionary containing essential cookies.
698
- """
699
- headers = {}
700
- if self.fb_email is not None and self.fb_password is not None:
701
- fb_session = get_fb_session(self.fb_email, self.fb_password, self.proxy)
702
- headers = {"cookie": f"abra_sess={fb_session['abra_sess']}"}
703
-
704
- response = requests.get(
705
- "https://www.meta.ai/",
706
- headers=headers,
707
- proxies=self.proxy,
708
- )
709
-
710
- cookies = {
711
- "_js_datr": extract_value(
712
- response.text, start_str='_js_datr":{"value":"', end_str='",'
713
- ),
714
- "datr": extract_value(
715
- response.text, start_str='datr":{"value":"', end_str='",'
716
- ),
717
- "lsd": extract_value(
718
- response.text, start_str='"LSD",[],{"token":"', end_str='"}'
719
- ),
720
- "fb_dtsg": extract_value(
721
- response.text, start_str='DTSGInitData",[],{"token":"', end_str='"'
722
- ),
723
- }
724
-
725
- if len(headers) > 0:
726
- cookies["abra_sess"] = fb_session["abra_sess"]
727
- else:
728
- cookies["abra_csrf"] = extract_value(
729
- response.text, start_str='abra_csrf":{"value":"', end_str='",'
730
- )
731
- return cookies
732
-
733
- def fetch_sources(self, fetch_id: str) -> List[Dict]:
734
- """
735
- Fetches sources from the Meta AI API based on the given query.
736
-
737
- Args:
738
- fetch_id (str): The fetch ID to use for the query.
739
-
740
- Returns:
741
- list: A list of dictionaries containing the fetched sources.
742
- """
743
-
744
- url = "https://graph.meta.ai/graphql?locale=user"
745
- payload = {
746
- "access_token": self.access_token,
747
- "fb_api_caller_class": "RelayModern",
748
- "fb_api_req_friendly_name": "AbraSearchPluginDialogQuery",
749
- "variables": json.dumps({"abraMessageFetchID": fetch_id}),
750
- "server_timestamps": "true",
751
- "doc_id": "6946734308765963",
752
- }
753
-
754
- payload = urllib.parse.urlencode(payload) # noqa
755
-
756
- headers = {
757
- "authority": "graph.meta.ai",
758
- "accept-language": "en-US,en;q=0.9,fr-FR;q=0.8,fr;q=0.7",
759
- "content-type": "application/x-www-form-urlencoded",
760
- "cookie": f'dpr=2; abra_csrf={self.cookies.get("abra_csrf")}; datr={self.cookies.get("datr")}; ps_n=1; ps_l=1',
761
- "x-fb-friendly-name": "AbraSearchPluginDialogQuery",
762
- }
763
-
764
- response = self.session.post(url, headers=headers, data=payload)
765
- response_json = response.json()
766
- message = response_json.get("data", {}).get("message", {})
767
- search_results = (
768
- (response_json.get("data", {}).get("message", {}).get("searchResults"))
769
- if message
770
- else None
771
- )
772
- if search_results is None:
773
- return []
774
-
775
- references = search_results["references"]
776
- return references
777
-
778
- def get_message(self, response: dict) -> str:
779
- """Retrieves message only from response
780
-
781
- Args:
782
- response (dict): Response generated by `self.ask`
783
-
784
- Returns:
785
- str: Message extracted
786
- """
787
- assert isinstance(response, dict), "Response should be of dict data-type only"
788
- return response["message"]
789
-
790
- if __name__ == "__main__":
791
- Meta = Meta()
792
- ai = Meta.chat("hi")
793
- for chunk in ai:
794
- print(chunk, end="", flush=True)
1
+ import json
2
+ import random
3
+ import time
4
+ import urllib.parse
5
+ import uuid
6
+ from typing import Any, Dict, Generator, List, Optional, Union, cast
7
+
8
+ from curl_cffi import CurlError
9
+ from curl_cffi.requests import Session
10
+ from litprinter import ic
11
+
12
+ from webscout import exceptions
13
+ from webscout.AIbase import Provider, Response
14
+ from webscout.AIutel import AwesomePrompts, Conversation, Optimizers, retry
15
+ from webscout.litagent import LitAgent as Lit
16
+ from webscout.scout import Scout
17
+
18
+ MAX_RETRIES = 3
19
+ HTTP2_STREAM_ERRORS = [92, 18, 7, 35, 36] # Common curl HTTP/2 stream errors
20
+
21
+
22
+ def generate_offline_threading_id() -> str:
23
+ """
24
+ Generates an offline threading ID.
25
+
26
+ Returns:
27
+ str: The generated offline threading ID.
28
+ """
29
+ # Maximum value for a 64-bit integer in Python
30
+ max_int = (1 << 64) - 1
31
+ mask22_bits = (1 << 22) - 1
32
+
33
+ # Function to get the current timestamp in milliseconds
34
+ def get_current_timestamp():
35
+ return int(time.time() * 1000)
36
+
37
+ # Function to generate a random 64-bit integer
38
+ def get_random_64bit_int():
39
+ return random.getrandbits(64)
40
+
41
+ # Combine timestamp and random value
42
+ def combine_and_mask(timestamp, random_value):
43
+ shifted_timestamp = timestamp << 22
44
+ masked_random = random_value & mask22_bits
45
+ return (shifted_timestamp | masked_random) & max_int
46
+
47
+ timestamp = get_current_timestamp()
48
+ random_value = get_random_64bit_int()
49
+ threading_id = combine_and_mask(timestamp, random_value)
50
+
51
+ return str(threading_id)
52
+
53
+
54
+ def extract_value(text: str, start_str: str, end_str: str) -> str:
55
+ """
56
+ Helper function to extract a specific value from the given text using a key.
57
+
58
+ Args:
59
+ text (str): The text from which to extract the value.
60
+ start_str (str): The starting key.
61
+ end_str (str): The ending key.
62
+
63
+ Returns:
64
+ str: The extracted value or empty string if not found.
65
+ """
66
+ start_idx = text.find(start_str)
67
+ if start_idx == -1:
68
+ return ""
69
+ start = start_idx + len(start_str)
70
+ end = text.find(end_str, start)
71
+ if end == -1:
72
+ # If end not found, return rest of string (but caller should validate)
73
+ return text[start:]
74
+ return text[start:end]
75
+
76
+
77
+ def format_response(response: dict) -> str:
78
+ """
79
+ Formats the response from Meta AI to remove unnecessary characters.
80
+
81
+ Args:
82
+ response (dict): The dictionnary containing the response to format.
83
+
84
+ Returns:
85
+ str: The formatted response.
86
+ """
87
+ text = ""
88
+ for content in (
89
+ response.get("data", {})
90
+ .get("node", {})
91
+ .get("bot_response_message", {})
92
+ .get("composed_text", {})
93
+ .get("content", [])
94
+ ):
95
+ text += content["text"] + "\n"
96
+ return text
97
+
98
+
99
+ # Function to perform the login
100
+ def get_fb_session(email, password, proxies=None):
101
+ login_url = "https://mbasic.facebook.com/login/"
102
+ headers = {
103
+ "authority": "mbasic.facebook.com",
104
+ "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
105
+ "accept-language": "en-US,en;q=0.9",
106
+ "sec-ch-ua": '"Chromium";v="122", "Not(A:Brand";v="24", "Google Chrome";v="122"',
107
+ "sec-ch-ua-mobile": "?0",
108
+ "sec-ch-ua-platform": '"macOS"',
109
+ "sec-fetch-dest": "document",
110
+ "sec-fetch-mode": "navigate",
111
+ "sec-fetch-site": "none",
112
+ "sec-fetch-user": "?1",
113
+ "upgrade-insecure-requests": "1",
114
+ "user-agent": Lit().random(),
115
+ }
116
+ # Create a session
117
+ session = Session()
118
+ if proxies:
119
+ session.proxies = proxies
120
+
121
+ # Send the GET request
122
+ response = session.get(login_url, headers=headers)
123
+
124
+ # Use Scout for parsing instead of BeautifulSoup
125
+ scout = Scout(response.text)
126
+
127
+ # Parse necessary parameters from the login form
128
+ lsd_tag = scout.find('input', attrs={'name': 'lsd'})
129
+ lsd = lsd_tag.get('value') if lsd_tag else None
130
+ jazoest_tag = scout.find('input', attrs={'name': 'jazoest'})
131
+ jazoest = jazoest_tag.get('value') if jazoest_tag else None
132
+ li_tag = scout.find('input', attrs={'name': 'li'})
133
+ li = li_tag.get('value') if li_tag else None
134
+ m_ts_tag = scout.find('input', attrs={'name': 'm_ts'})
135
+ m_ts = m_ts_tag.get('value') if m_ts_tag else None
136
+
137
+ # Define the URL and body for the POST request to submit the login form
138
+ post_url = "https://mbasic.facebook.com/login/device-based/regular/login/?refsrc=deprecated&lwv=100"
139
+ data = {
140
+ "lsd": lsd,
141
+ "jazoest": jazoest,
142
+ "m_ts": m_ts,
143
+ "li": li,
144
+ "try_number": "0",
145
+ "unrecognized_tries": "0",
146
+ "email": email,
147
+ "pass": password,
148
+ "login": "Log In",
149
+ "bi_xrwh": "0",
150
+ }
151
+
152
+ headers = {
153
+ "authority": "mbasic.facebook.com",
154
+ "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
155
+ "accept-language": "en-US,en;q=0.9",
156
+ "cache-control": "no-cache",
157
+ "content-type": "application/x-www-form-urlencoded",
158
+ "cookie": f"datr={response.cookies.get('datr')}; sb={response.cookies.get('sb')}; ps_n=1; ps_l=1",
159
+ "dpr": "2",
160
+ "origin": "https://mbasic.facebook.com",
161
+ "pragma": "no-cache",
162
+ "referer": "https://mbasic.facebook.com/login/",
163
+ "sec-fetch-site": "same-origin",
164
+ "sec-fetch-user": "?1",
165
+ "upgrade-insecure-requests": "1",
166
+ "user-agent": Lit().random(),
167
+ "viewport-width": "1728",
168
+ }
169
+
170
+ # Send the POST request
171
+ result = session.post(post_url, headers=headers, data=data)
172
+ if "sb" not in session.cookies:
173
+ raise exceptions.FacebookInvalidCredentialsException(
174
+ "Was not able to login to Facebook. Please check your credentials. "
175
+ "You may also have been rate limited. Try to connect to Facebook manually."
176
+ )
177
+
178
+ cookies = {
179
+ **result.cookies.get_dict(),
180
+ "sb": session.cookies["sb"],
181
+ "xs": session.cookies["xs"],
182
+ "fr": session.cookies["fr"],
183
+ "c_user": session.cookies["c_user"],
184
+ }
185
+
186
+ response_login = {
187
+ "cookies": cookies,
188
+ "headers": result.headers,
189
+ "response": response.text,
190
+ }
191
+ meta_ai_cookies = get_cookies()
192
+
193
+ url = "https://www.meta.ai/state/"
194
+
195
+ payload = f'__a=1&lsd={meta_ai_cookies["lsd"]}'
196
+ headers = {
197
+ "authority": "www.meta.ai",
198
+ "accept": "*/*",
199
+ "accept-language": "en-US,en;q=0.9",
200
+ "cache-control": "no-cache",
201
+ "content-type": "application/x-www-form-urlencoded",
202
+ "cookie": f'ps_n=1; ps_l=1; dpr=2; _js_datr={meta_ai_cookies["_js_datr"]}; abra_csrf={meta_ai_cookies["abra_csrf"]}; datr={meta_ai_cookies["datr"]};; ps_l=1; ps_n=1',
203
+ "origin": "https://www.meta.ai",
204
+ "pragma": "no-cache",
205
+ "referer": "https://www.meta.ai/",
206
+ "sec-fetch-mode": "cors",
207
+ "sec-fetch-site": "same-origin",
208
+ "user-agent": Lit().random(),
209
+ "viewport-width": "1728",
210
+ }
211
+
212
+ # Create a new session for this request
213
+ req_session = Session()
214
+ if proxies:
215
+ req_session.proxies = proxies
216
+
217
+ response = req_session.post(url, headers=headers, data=payload)
218
+
219
+ state = extract_value(response.text, start_str='"state":"', end_str='"')
220
+
221
+ url = f"https://www.facebook.com/oidc/?app_id=1358015658191005&scope=openid%20linking&response_type=code&redirect_uri=https%3A%2F%2Fwww.meta.ai%2Fauth%2F&no_universal_links=1&deoia=1&state={state}"
222
+ payload = {}
223
+ headers = {
224
+ "authority": "www.facebook.com",
225
+ "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
226
+ "accept-language": "en-US,en;q=0.9",
227
+ "cache-control": "no-cache",
228
+ "cookie": f"datr={response_login['cookies']['datr']}; sb={response_login['cookies']['sb']}; c_user={response_login['cookies']['c_user']}; xs={response_login['cookies']['xs']}; fr={response_login['cookies']['fr']}; m_page_voice={response_login['cookies']['m_page_voice']}; abra_csrf={meta_ai_cookies['abra_csrf']};",
229
+ "sec-fetch-dest": "document",
230
+ "sec-fetch-mode": "navigate",
231
+ "sec-fetch-site": "cross-site",
232
+ "sec-fetch-user": "?1",
233
+ "upgrade-insecure-requests": "1",
234
+ "user-agent": Lit().random(),
235
+ }
236
+
237
+ # Create a new session for Facebook
238
+ fb_session = Session()
239
+ if proxies:
240
+ fb_session.proxies = proxies
241
+
242
+ response = fb_session.get(url, headers=headers, data=payload, allow_redirects=False)
243
+
244
+ next_url = response.headers["Location"]
245
+
246
+ url = next_url
247
+
248
+ payload = {}
249
+ headers = {
250
+ "User-Agent": Lit().random(),
251
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
252
+ "Accept-Language": "en-US,en;q=0.5",
253
+ "Accept-Encoding": "gzip, deflate, br",
254
+ "Referer": "https://www.meta.ai/",
255
+ "Connection": "keep-alive",
256
+ "Cookie": f'dpr=2; abra_csrf={meta_ai_cookies["abra_csrf"]}; datr={meta_ai_cookies["_js_datr"]}',
257
+ "Upgrade-Insecure-Requests": "1",
258
+ "Sec-Fetch-Dest": "document",
259
+ "Sec-Fetch-Mode": "navigate",
260
+ "Sec-Fetch-Site": "cross-site",
261
+ "Sec-Fetch-User": "?1",
262
+ "TE": "trailers",
263
+ }
264
+ fb_session.get(url, headers=headers, data=payload)
265
+ cookies = fb_session.cookies.get_dict()
266
+ if "abra_sess" not in cookies:
267
+ raise exceptions.FacebookInvalidCredentialsException(
268
+ "Was not able to login to Facebook. Please check your credentials. "
269
+ "You may also have been rate limited. Try to connect to Facebook manually."
270
+ )
271
+ ic.configureOutput(prefix='INFO| ')
272
+ ic("Successfully logged in to Facebook.")
273
+ return cookies
274
+
275
+
276
+ def get_cookies() -> dict:
277
+ """
278
+ Extracts necessary cookies from the Meta AI main page.
279
+
280
+ Returns:
281
+ dict: A dictionary containing essential cookies.
282
+ """
283
+ # Create a session
284
+ session = Session()
285
+
286
+ response = session.get(
287
+ "https://www.meta.ai/",
288
+ )
289
+
290
+ cookies = {
291
+ "_js_datr": extract_value(
292
+ response.text, start_str='_js_datr":{"value":"', end_str='",'
293
+ ),
294
+ "datr": extract_value(
295
+ response.text, start_str='datr":{"value":"', end_str='",'
296
+ ),
297
+ "lsd": extract_value(
298
+ response.text, start_str='"LSD",[],{"token":"', end_str='"}'
299
+ ),
300
+ "fb_dtsg": extract_value(
301
+ response.text, start_str='DTSGInitData",[],{"token":"', end_str='"'
302
+ ),
303
+ "abra_csrf": extract_value(
304
+ response.text, start_str='abra_csrf":{"value":"', end_str='",'
305
+ )
306
+ }
307
+ return cookies
308
+
309
+ class Meta(Provider):
310
+ """
311
+ A class to interact with the Meta AI API to obtain and use access tokens for sending
312
+ and receiving messages from the Meta AI Chat API.
313
+ """
314
+ required_auth = False
315
+ def __init__(
316
+ self,
317
+ fb_email: Optional[str] = None,
318
+ fb_password: Optional[str] = None,
319
+ proxy: Optional[dict] = None,
320
+ is_conversation: bool = True,
321
+ max_tokens: int = 600,
322
+ timeout: int = 30,
323
+ intro: Optional[str] = None,
324
+ filepath: Optional[str] = None,
325
+ update_file: bool = True,
326
+ proxies: dict = {},
327
+ history_offset: int = 10250,
328
+ act: Optional[str] = None,
329
+ skip_init: bool = False,
330
+ ):
331
+ """
332
+ Initializes the Meta AI API with given parameters.
333
+
334
+ Args:
335
+ fb_email (str, optional): Your Facebook email address. Defaults to None.
336
+ fb_password (str, optional): Your Facebook password. Defaults to None.
337
+ proxy (dict, optional): Proxy settings for requests. Defaults to None.
338
+ is_conversation (bool, optional): Flag for chatting conversationally. Defaults to True.
339
+ max_tokens (int, optional): Maximum number of tokens to be generated upon completion. Defaults to 600.
340
+ timeout (int, optional): Http request timeout. Defaults to 30.
341
+ intro (str, optional): Conversation introductory prompt. Defaults to None.
342
+ filepath (str, optional): Path to file containing conversation history. Defaults to None.
343
+ update_file (bool, optional): Add new prompts and responses to the file. Defaults to True.
344
+ proxies (dict, optional): Http request proxies. Defaults to {}.
345
+ history_offset (int, optional): Limit conversation history to this number of last texts. Defaults to 10250.
346
+ act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
347
+ """
348
+ self.session = Session()
349
+ self.session.headers.update(
350
+ {
351
+ "user-agent": Lit().random(),
352
+ }
353
+ )
354
+
355
+ # Configure session for better HTTP/2 handling
356
+ self.session.timeout = timeout
357
+ self.session.curl_options.update({
358
+ # Increase connection timeout for slow networks
359
+ 155: 60, # CURLOPT_CONNECTTIMEOUT
360
+ # Increase total timeout
361
+ 13: max(30, timeout * 2), # CURLOPT_TIMEOUT
362
+ # Enable TCP keep-alive to maintain connections
363
+ 213: 60, # CURLOPT_TCP_KEEPALIVE
364
+ 214: 30, # CURLOPT_TCP_KEEPIDLE
365
+ 215: 10, # CURLOPT_TCP_KEEPINTVL
366
+ # Disable SSL verification issues
367
+ 64: 0, # CURLOPT_SSL_VERIFYPEER
368
+ 81: 0, # CURLOPT_SSL_VERIFYHOST
369
+ })
370
+
371
+ # Create a backup session for fallback
372
+ self.backup_session = Session()
373
+ self.backup_session.headers.update({"user-agent": Lit().random()})
374
+ self.backup_session.curl_options.update(self.session.curl_options)
375
+
376
+ # Add HTTP/2 error tracking
377
+ self.http2_error_count = 0
378
+ self.max_http2_errors = 3
379
+ self.last_successful_request = time.time()
380
+ self.retry_count = 0
381
+
382
+ self.access_token = None
383
+ self.fb_email = fb_email
384
+ self.fb_password = fb_password
385
+ self.proxy = proxy
386
+ if self.proxy and not self.check_proxy():
387
+ raise ConnectionError(
388
+ "Unable to connect to proxy. Please check your proxy settings."
389
+ )
390
+ self.is_conversation = is_conversation
391
+ self.max_tokens_to_sample = max_tokens
392
+ self.timeout = timeout
393
+ self.last_response = {}
394
+ self.is_authed = fb_password is not None and fb_email is not None
395
+ # For testing or offline environments, allow skipping the initial cookie fetch
396
+ self.skip_init = skip_init
397
+ self.cookies = {} if skip_init else self.get_cookies()
398
+ self.external_conversation_id = None
399
+ self.offline_threading_id = None
400
+
401
+ self.__available_optimizers = (
402
+ method
403
+ for method in dir(Optimizers)
404
+ if callable(getattr(Optimizers, method)) and not method.startswith("__")
405
+ )
406
+ self.conversation = Conversation(
407
+ is_conversation, self.max_tokens_to_sample, filepath, update_file
408
+ )
409
+ act_prompt = (
410
+ AwesomePrompts().get_act(cast(Union[str, int], act), default=None, case_insensitive=True
411
+ )
412
+ if act
413
+ else intro
414
+ )
415
+ if act_prompt:
416
+ self.conversation.intro = act_prompt
417
+ self.conversation.history_offset = history_offset
418
+ if proxies:
419
+ self.session.proxies.update(proxies)
420
+ # If skip_init was True we won't have cookies yet — some methods will fetch them lazily
421
+ if self.skip_init:
422
+ ic.configureOutput(prefix='WARNING| ')
423
+ ic('Meta initialized in skip_init mode: cookies not fetched. Some operations will fail until cookies are obtained.')
424
+
425
+
426
+ def check_proxy(self, test_url: str = "https://api.ipify.org/?format=json") -> bool:
427
+ """
428
+ Checks the proxy connection by making a request to a test URL.
429
+
430
+ Args:
431
+ test_url (str): A test site from which we check that the proxy is installed correctly.
432
+
433
+ Returns:
434
+ bool: True if the proxy is working, False otherwise.
435
+ """
436
+ try:
437
+ response = self.session.get(test_url, proxies=self.proxy, timeout=10)
438
+ if response.status_code == 200:
439
+ self.session.proxies = cast(Any, self.proxy)
440
+ return True
441
+ return False
442
+ except CurlError:
443
+ return False
444
+
445
+ @retry(retries=3, delay=2.0)
446
+ def get_access_token(self) -> str:
447
+ """
448
+ Retrieves an access token using Meta's authentication API.
449
+ Handles HTTP/2 errors with fallback strategies.
450
+
451
+ Returns:
452
+ str: A valid access token.
453
+ """
454
+
455
+ if self.access_token:
456
+ return self.access_token
457
+
458
+ url = "https://www.meta.ai/api/graphql/"
459
+ payload = {
460
+ "lsd": self.cookies["lsd"],
461
+ "fb_api_caller_class": "RelayModern",
462
+ "fb_api_req_friendly_name": "useAbraAcceptTOSForTempUserMutation",
463
+ "variables": {
464
+ "dob": "1999-01-01",
465
+ "icebreaker_type": "TEXT",
466
+ "__relay_internal__pv__WebPixelRatiorelayprovider": 1,
467
+ },
468
+ "doc_id": "7604648749596940",
469
+ }
470
+ payload = urllib.parse.urlencode(payload) # noqa
471
+ # Build cookie header safely - avoid inserting None values
472
+ cookie_parts = []
473
+ if self.cookies.get("_js_datr"):
474
+ cookie_parts.append(f"_js_datr={self.cookies.get('_js_datr')}")
475
+ if self.cookies.get("abra_csrf"):
476
+ cookie_parts.append(f"abra_csrf={self.cookies.get('abra_csrf')}")
477
+ if self.cookies.get("datr"):
478
+ cookie_parts.append(f"datr={self.cookies.get('datr')}")
479
+
480
+ headers = {
481
+ "content-type": "application/x-www-form-urlencoded",
482
+ "cookie": "; ".join(cookie_parts),
483
+ "sec-fetch-site": "same-origin",
484
+ "x-fb-friendly-name": "useAbraAcceptTOSForTempUserMutation",
485
+ }
486
+
487
+ # Try once with the normal session
488
+ try:
489
+ response = self.session.post(url, headers=headers, data=payload, timeout=self.timeout)
490
+ except Exception as e:
491
+ # Some Curl errors are wrapped in requests.HTTPError from curl_cffi; inspect message
492
+ err_str = str(e)
493
+ if 'HTTP/2 stream' in err_str or 'stream' in err_str:
494
+ ic.configureOutput(prefix='WARNING| ')
495
+ ic(f"Detected HTTP/2 stream issue when getting access token: {err_str}. Attempting HTTP/1.1 fallback via requests")
496
+ try:
497
+ import requests
498
+ resp = requests.post(url, headers=headers, data=payload, timeout=self.timeout, verify=False)
499
+ from types import SimpleNamespace
500
+ response = SimpleNamespace(status_code=resp.status_code, text=resp.text, json=resp.json)
501
+ except Exception as e2:
502
+ raise exceptions.FailedToGenerateResponseError(
503
+ f"Failed to get access token after HTTP/1.1 fallback: {e2}"
504
+ )
505
+ else:
506
+ raise exceptions.FailedToGenerateResponseError(
507
+ f"Failed to get access token: {e}"
508
+ )
509
+
510
+ try:
511
+ auth_json = response.json()
512
+ except json.JSONDecodeError:
513
+ raise exceptions.FacebookRegionBlocked(
514
+ "Unable to receive a valid response from Meta AI. This is likely due to your region being blocked. "
515
+ "Try manually accessing https://www.meta.ai/ to confirm."
516
+ )
517
+
518
+ access_token = auth_json["data"]["xab_abra_accept_terms_of_service"][
519
+ "new_temp_user_auth"
520
+ ]["access_token"]
521
+
522
+ # Need to sleep for a bit, for some reason the API doesn't like it when we send request too quickly
523
+ # (maybe Meta needs to register Cookies on their side?)
524
+ time.sleep(1)
525
+
526
+ return access_token
527
+
528
+ @retry(retries=3, delay=2.0)
529
+ def ask(
530
+ self,
531
+ prompt: str,
532
+ stream: bool = False,
533
+ raw: bool = False,
534
+ optimizer: Optional[str] = None,
535
+ conversationally: bool = False,
536
+ ) -> Union[Dict, Generator[Dict, None, None]]:
537
+ """
538
+ Sends a message to the Meta AI and returns the response.
539
+
540
+ Args:
541
+ prompt (str): The prompt to send.
542
+ stream (bool): Whether to stream the response or not. Defaults to False.
543
+ raw (bool, optional): Stream back raw response as received. Defaults to False.
544
+ optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
545
+ conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
546
+ Returns:
547
+ Union[Dict, Generator[Dict, None, None]]: A dictionary containing the response message and sources, or a generator yielding such dictionaries.
548
+ """
549
+ conversation_prompt = self.conversation.gen_complete_prompt(prompt)
550
+ if optimizer:
551
+ if optimizer in self.__available_optimizers:
552
+ conversation_prompt = getattr(Optimizers, optimizer)(
553
+ conversation_prompt if conversationally else prompt
554
+ )
555
+ else:
556
+ raise Exception(
557
+ f"Optimizer is not one of {self.__available_optimizers}"
558
+ )
559
+
560
+ if not self.is_authed:
561
+ # Lazily obtain access token/cookies if skip_init was used
562
+ if not self.cookies:
563
+ self.cookies = self.get_cookies()
564
+ self.access_token = self.get_access_token()
565
+ auth_payload = {"access_token": self.access_token}
566
+ url = "https://graph.meta.ai/graphql?locale=user"
567
+
568
+ else:
569
+ if not self.cookies:
570
+ self.cookies = self.get_cookies()
571
+ auth_payload = {"fb_dtsg": self.cookies["fb_dtsg"]}
572
+ url = "https://www.meta.ai/api/graphql/"
573
+
574
+ if not self.external_conversation_id:
575
+ external_id = str(uuid.uuid4())
576
+ self.external_conversation_id = external_id
577
+ payload = {
578
+ **auth_payload,
579
+ "fb_api_caller_class": "RelayModern",
580
+ "fb_api_req_friendly_name": "useAbraSendMessageMutation",
581
+ "variables": json.dumps(
582
+ {
583
+ "message": {"sensitive_string_value": conversation_prompt},
584
+ "externalConversationId": self.external_conversation_id,
585
+ "offlineThreadingId": generate_offline_threading_id(),
586
+ "suggestedPromptIndex": None,
587
+ "flashVideoRecapInput": {"images": []},
588
+ "flashPreviewInput": None,
589
+ "promptPrefix": None,
590
+ "entrypoint": "ABRA__CHAT__TEXT",
591
+ "icebreaker_type": "TEXT",
592
+ "__relay_internal__pv__AbraDebugDevOnlyrelayprovider": False,
593
+ "__relay_internal__pv__WebPixelRatiorelayprovider": 1,
594
+ }
595
+ ),
596
+ "server_timestamps": "true",
597
+ "doc_id": "7783822248314888",
598
+ }
599
+ payload = urllib.parse.urlencode(payload) # noqa
600
+ headers = {
601
+ "content-type": "application/x-www-form-urlencoded",
602
+ "x-fb-friendly-name": "useAbraSendMessageMutation",
603
+ }
604
+ if self.is_authed:
605
+ headers["cookie"] = f'abra_sess={self.cookies["abra_sess"]}'
606
+ # Recreate the session to avoid cookie leakage when user is authenticated
607
+ self.session = Session()
608
+ self.session.proxies = cast(Any, self.proxy)
609
+
610
+ if stream:
611
+
612
+ def for_stream():
613
+ try:
614
+ try:
615
+ response = self.session.post(
616
+ url, headers=headers, data=payload, stream=True, timeout=self.timeout
617
+ )
618
+ except CurlError as e:
619
+ # Try HTTP/1.1 fallback once
620
+ if hasattr(e, 'errno') and e.errno in HTTP2_STREAM_ERRORS:
621
+ ic.configureOutput(prefix='WARNING| ')
622
+ ic("HTTP/2 stream error on streaming request, attempting HTTP/1.1 fallback")
623
+ try:
624
+ self.session.curl_options.update({84: 1}) # force HTTP/1.1
625
+ response = self.session.post(url, headers=headers, data=payload, stream=True, timeout=self.timeout)
626
+ except Exception:
627
+ raise
628
+ else:
629
+ raise
630
+
631
+ if not response.ok:
632
+ raise exceptions.FailedToGenerateResponseError(
633
+ f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
634
+ )
635
+
636
+ lines = response.iter_lines()
637
+ is_error = json.loads(next(lines))
638
+ if len(is_error.get("errors", [])) > 0:
639
+ raise exceptions.FailedToGenerateResponseError(
640
+ f"Failed to generate response - {response.text}"
641
+ )
642
+ final_message = None
643
+ try:
644
+ for line in lines:
645
+ if line:
646
+ try:
647
+ json_line = json.loads(line)
648
+ extracted_data = self.extract_data(json_line)
649
+ if not extracted_data.get("message"):
650
+ continue
651
+ self.last_response.update(extracted_data)
652
+ final_message = extracted_data # Always keep the latest
653
+ yield final_message if not raw else json.dumps(final_message)
654
+ except json.JSONDecodeError:
655
+ # Skip malformed JSON lines
656
+ continue
657
+ except CurlError as e:
658
+ # Handle HTTP/2 stream closure during iteration
659
+ if hasattr(e, 'errno') and e.errno in HTTP2_STREAM_ERRORS:
660
+ ic.configureOutput(prefix='WARNING| ')
661
+ ic(f"HTTP/2 stream closed during iteration (errno: {e.errno})")
662
+ if final_message:
663
+ # Yield the last complete message before the stream closed
664
+ yield final_message if not raw else json.dumps(final_message)
665
+ break
666
+ else:
667
+ raise
668
+ except (ConnectionError, TimeoutError) as e:
669
+ ic.configureOutput(prefix='WARNING| ')
670
+ ic(f"Connection error during streaming: {e}")
671
+ if final_message:
672
+ # Yield the last complete message before the connection was lost
673
+ yield final_message if not raw else json.dumps(final_message)
674
+ self.conversation.update_chat_history(
675
+ prompt, self.get_message(self.last_response)
676
+ )
677
+ return
678
+
679
+ if final_message:
680
+ self.conversation.update_chat_history(
681
+ prompt, self.get_message(self.last_response)
682
+ )
683
+
684
+ except CurlError as e:
685
+ if hasattr(e, 'errno') and e.errno in HTTP2_STREAM_ERRORS:
686
+ raise exceptions.FailedToGenerateResponseError(
687
+ f"HTTP/2 stream error in stream mode: {e}"
688
+ )
689
+ else:
690
+ raise
691
+ except Exception as e:
692
+ raise exceptions.FailedToGenerateResponseError(
693
+ f"Unexpected error in stream mode: {e}"
694
+ )
695
+
696
+ return for_stream()
697
+ else:
698
+ try:
699
+ response = self.session.post(url, headers=headers, data=payload, timeout=self.timeout)
700
+ except CurlError as e:
701
+ # Try HTTP/1.1 fallback for non-stream requests
702
+ if hasattr(e, 'errno') and e.errno in HTTP2_STREAM_ERRORS:
703
+ ic.configureOutput(prefix='WARNING| ')
704
+ ic("HTTP/2 error on non-stream request, attempting HTTP/1.1 fallback")
705
+ try:
706
+ self.session.curl_options.update({84: 1}) # force HTTP/1.1
707
+ response = self.session.post(url, headers=headers, data=payload, timeout=self.timeout)
708
+ except Exception as e2:
709
+ raise exceptions.FailedToGenerateResponseError(
710
+ f"Failed to generate response after HTTP/1.1 fallback: {e2}"
711
+ )
712
+ else:
713
+ raise
714
+
715
+ if not response.ok:
716
+ raise exceptions.FailedToGenerateResponseError(
717
+ f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
718
+ )
719
+ raw_response = response.text
720
+ last_streamed_response = self.extract_last_response(raw_response)
721
+ if not last_streamed_response:
722
+ raise exceptions.FailedToGenerateResponseError(
723
+ f"Failed to generate response - {response.text}"
724
+ )
725
+
726
+ extracted_data = self.extract_data(last_streamed_response)
727
+ self.last_response.update(extracted_data)
728
+ self.conversation.update_chat_history(
729
+ prompt, self.get_message(self.last_response)
730
+ )
731
+ return extracted_data
732
+
733
+ def chat(
734
+ self,
735
+ prompt: str,
736
+ stream: bool = False,
737
+ optimizer: Optional[str] = None,
738
+ conversationally: bool = False,
739
+ **kwargs: Any,
740
+ ) -> Union[str, Generator[str, None, None]]:
741
+ """
742
+ Sends a message to the Meta AI and returns the response.
743
+
744
+ Args:
745
+ prompt (str): The message to send.
746
+ stream (bool): Whether to stream the response or not. Defaults to False.
747
+ optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
748
+ conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
749
+
750
+ Returns:
751
+ str: The response message.
752
+ """
753
+
754
+ def for_stream():
755
+ for response in self.ask(
756
+ prompt, True, optimizer=optimizer, conversationally=conversationally
757
+ ):
758
+ yield self.get_message(response)
759
+
760
+ def for_non_stream():
761
+ return self.get_message(
762
+ self.ask(
763
+ prompt,
764
+ False,
765
+ optimizer=optimizer,
766
+ conversationally=conversationally,
767
+ )
768
+ )
769
+
770
+ return for_stream() if stream else for_non_stream()
771
+
772
+ def extract_last_response(self, response: str) -> Optional[Dict]:
773
+ """
774
+ Extracts the last response from the Meta AI API.
775
+
776
+ Args:
777
+ response (str): The response to extract the last response from.
778
+
779
+ Returns:
780
+ dict: A dictionary containing the last response.
781
+ """
782
+ last_streamed_response = None
783
+ for line in response.split("\n"):
784
+ try:
785
+ json_line = json.loads(line)
786
+ except json.JSONDecodeError:
787
+ continue
788
+
789
+ bot_response_message = (
790
+ json_line.get("data", {})
791
+ .get("node", {})
792
+ .get("bot_response_message", {})
793
+ )
794
+ chat_id = bot_response_message.get("id")
795
+ if chat_id:
796
+ external_conversation_id, offline_threading_id, _ = chat_id.split("_")
797
+ self.external_conversation_id = external_conversation_id
798
+ self.offline_threading_id = offline_threading_id
799
+
800
+ streaming_state = bot_response_message.get("streaming_state")
801
+ if streaming_state == "OVERALL_DONE":
802
+ last_streamed_response = json_line
803
+
804
+ return last_streamed_response
805
+
806
+ def extract_data(self, json_line: dict) -> Dict:
807
+ """
808
+ Extract data and sources from a parsed JSON line.
809
+
810
+ Args:
811
+ json_line (dict): Parsed JSON line.
812
+
813
+ Returns:
814
+ dict: A dictionary containing the response message, sources, and media.
815
+ """
816
+ bot_response_message = (
817
+ json_line.get("data", {}).get("node", {}).get("bot_response_message", {})
818
+ )
819
+ response = format_response(response=json_line)
820
+ fetch_id = bot_response_message.get("fetch_id")
821
+ sources = self.fetch_sources(fetch_id) if fetch_id else []
822
+ medias = self.extract_media(bot_response_message)
823
+ return {"message": response, "sources": sources, "media": medias}
824
+
825
+ def extract_media(self, json_line: dict) -> List[Dict]:
826
+ """
827
+ Extract media from a parsed JSON line.
828
+
829
+ Args:
830
+ json_line (dict): Parsed JSON line.
831
+
832
+ Returns:
833
+ list: A list of dictionaries containing the extracted media.
834
+ """
835
+ medias = []
836
+ imagine_card = json_line.get("imagine_card", {})
837
+ session = imagine_card.get("session", {}) if imagine_card else {}
838
+ media_sets = (
839
+ (json_line.get("imagine_card", {}).get("session", {}).get("media_sets", []))
840
+ if imagine_card and session
841
+ else []
842
+ )
843
+ for media_set in media_sets:
844
+ imagine_media = media_set.get("imagine_media", [])
845
+ for media in imagine_media:
846
+ medias.append(
847
+ {
848
+ "url": media.get("uri"),
849
+ "type": media.get("media_type"),
850
+ "prompt": media.get("prompt"),
851
+ }
852
+ )
853
+ return medias
854
+
855
+ def get_cookies(self) -> dict:
856
+ """
857
+ Extracts necessary cookies from the Meta AI main page.
858
+
859
+ Returns:
860
+ dict: A dictionary containing essential cookies.
861
+ """
862
+ headers = {}
863
+ if self.fb_email is not None and self.fb_password is not None:
864
+ fb_session = get_fb_session(self.fb_email, self.fb_password, self.proxy)
865
+ headers = {"cookie": f"abra_sess={fb_session['abra_sess']}"}
866
+
867
+ # Try fetching the page with a few retries in case of transient errors
868
+ last_response = None
869
+ for attempt in range(3):
870
+ try:
871
+ response = self.session.get(
872
+ url="https://www.meta.ai/",
873
+ headers=headers,
874
+ proxies=self.proxy,
875
+ timeout=self.timeout,
876
+ )
877
+ last_response = response
878
+ break
879
+ except Exception as e:
880
+ ic.configureOutput(prefix='WARNING| ')
881
+ ic(f"Attempt {attempt+1} to fetch meta.ai failed: {e}. Retrying...")
882
+ time.sleep(1 * (2 ** attempt))
883
+ if last_response is None:
884
+ raise exceptions.FailedToGenerateResponseError(
885
+ "Failed to fetch https://www.meta.ai/ after multiple attempts"
886
+ )
887
+ response = last_response
888
+
889
+ cookies = {
890
+ "_js_datr": extract_value(
891
+ response.text, start_str='_js_datr":{"value":"', end_str='",'
892
+ ),
893
+ "datr": extract_value(
894
+ response.text, start_str='datr":{"value":"', end_str='",'
895
+ ),
896
+ "lsd": extract_value(
897
+ response.text, start_str='"LSD",[],{"token":"', end_str='"}'
898
+ ),
899
+ "fb_dtsg": extract_value(
900
+ response.text, start_str='DTSGInitData",[],{"token":"', end_str='"'
901
+ ),
902
+ }
903
+ # Also check cookie jar for values
904
+ jar = response.cookies.get_dict()
905
+ for key in ["_js_datr", "datr", "lsd", "fb_dtsg"]:
906
+ if not cookies.get(key) and jar.get(key):
907
+ cookies[key] = jar.get(key)
908
+ if len(headers) > 0:
909
+ cookies["abra_sess"] = fb_session["abra_sess"]
910
+ else:
911
+ cookies["abra_csrf"] = extract_value(
912
+ response.text, start_str='abra_csrf":{"value":"', end_str='",'
913
+ )
914
+
915
+ # Validate extracted cookies - ensure essential ones are present
916
+ essential = ["_js_datr", "datr", "lsd"]
917
+ missing = [k for k in essential if not cookies.get(k)]
918
+ if missing:
919
+ snippet = (response.text[:500] + '...') if response and hasattr(response, 'text') else ''
920
+ raise exceptions.FailedToGenerateResponseError(
921
+ f"Unable to extract necessary cookies from meta.ai page - missing: {missing}. Response may be blocked or returning an error page. Snippet: {snippet}"
922
+ )
923
+
924
+ return cookies
925
+
926
+ def fetch_sources(self, fetch_id: str) -> List[Dict]:
927
+ """
928
+ Fetches sources from the Meta AI API based on the given query.
929
+
930
+ Args:
931
+ fetch_id (str): The fetch ID to use for the query.
932
+
933
+ Returns:
934
+ list: A list of dictionaries containing the fetched sources.
935
+ """
936
+
937
+ url = "https://graph.meta.ai/graphql?locale=user"
938
+ payload = {
939
+ "access_token": self.access_token,
940
+ "fb_api_caller_class": "RelayModern",
941
+ "fb_api_req_friendly_name": "AbraSearchPluginDialogQuery",
942
+ "variables": json.dumps({"abraMessageFetchID": fetch_id}),
943
+ "server_timestamps": "true",
944
+ "doc_id": "6946734308765963",
945
+ }
946
+
947
+ payload = urllib.parse.urlencode(payload) # noqa
948
+
949
+ headers = {
950
+ "authority": "graph.meta.ai",
951
+ "accept-language": "en-US,en;q=0.9,fr-FR;q=0.8,fr;q=0.7",
952
+ "content-type": "application/x-www-form-urlencoded",
953
+ "cookie": f'dpr=2; abra_csrf={self.cookies.get("abra_csrf")}; datr={self.cookies.get("datr")}; ps_n=1; ps_l=1',
954
+ "x-fb-friendly-name": "AbraSearchPluginDialogQuery",
955
+ }
956
+
957
+ response = self.session.post(url, headers=headers, data=payload)
958
+ response_json = response.json()
959
+ message = response_json.get("data", {}).get("message", {})
960
+ search_results = (
961
+ (response_json.get("data", {}).get("message", {}).get("searchResults"))
962
+ if message
963
+ else None
964
+ )
965
+ if search_results is None:
966
+ return []
967
+
968
+ references = search_results["references"]
969
+ return references
970
+
971
+ def get_message(self, response: Response) -> str:
972
+ """Retrieves message only from response
973
+
974
+ Args:
975
+ response (dict): Response generated by `self.ask`
976
+
977
+ Returns:
978
+ str: Message extracted
979
+ """
980
+ assert isinstance(response, dict), "Response should be of dict data-type only"
981
+ return response["message"]
982
+
983
+ if __name__ == "__main__":
984
+ try:
985
+ meta_instance = Meta()
986
+ ai = meta_instance.chat("hi")
987
+ for chunk in ai:
988
+ print(chunk, end="", flush=True)
989
+ except exceptions.FailedToGenerateResponseError as e:
990
+ ic.configureOutput(prefix='ERROR| ')
991
+ ic(f"Meta provider failed to initialize or run: {e}")
992
+ ic("Possible causes: network connectivity issues, region blocking, or site returning error pages.")
993
+ ic("For offline testing, re-run with: Meta(skip_init=True)")
994
+ except Exception as e:
995
+ ic.configureOutput(prefix='ERROR| ')
996
+ ic(f"Unexpected error running meta provider: {e}")