metaai-sdk 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
metaai_api/__init__.py ADDED
@@ -0,0 +1,20 @@
1
+ """MetaAI API - Python SDK for Meta AI powered by Llama 3.
2
+
3
+ A modern, feature-rich Python SDK providing seamless access to Meta AI's capabilities:
4
+ - Chat with Llama 3 (with real-time internet access)
5
+ - Generate AI images
6
+ - Create AI videos from text prompts
7
+ - No API key required
8
+ """
9
+
10
+ __version__ = "2.0.0"
11
+ __author__ = "Meta AI SDK Team"
12
+ __license__ = "MIT"
13
+ __url__ = "https://github.com/mir-ashiq/metaai-api"
14
+
15
+ from .main import MetaAI # noqa
16
+ from .client import send_animate_request
17
+ from .video_generation import VideoGenerator # noqa
18
+
19
+ __all__ = ["MetaAI", "send_animate_request", "VideoGenerator"]
20
+
@@ -0,0 +1,256 @@
1
+ import asyncio
2
+ import contextlib
3
+ import logging
4
+ import os
5
+ import time
6
+ import uuid
7
+ from pathlib import Path
8
+ from typing import Any, Dict, Optional
9
+
10
+ from dotenv import load_dotenv
11
+ from fastapi import Depends, FastAPI, HTTPException
12
+ from fastapi.concurrency import run_in_threadpool
13
+ from pydantic import BaseModel, Field
14
+
15
+ from metaai_api import MetaAI
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+ # Load .env file if it exists
20
+ env_path = Path(__file__).parent.parent.parent / ".env"
21
+ if env_path.exists():
22
+ load_dotenv(env_path)
23
+ logger.info(f"Loaded environment variables from {env_path}")
24
+
25
+ # Refresh interval (seconds) for keeping lsd/fb_dtsg/cookies fresh
26
+ DEFAULT_REFRESH_SECONDS = 3600
27
+ REFRESH_SECONDS = int(os.getenv("META_AI_REFRESH_INTERVAL_SECONDS", DEFAULT_REFRESH_SECONDS))
28
+
29
+
30
+ class TokenCache:
31
+ """Thread-safe cache for Meta cookies and tokens."""
32
+
33
+ def __init__(self) -> None:
34
+ self._lock = asyncio.Lock()
35
+ self._cookies: Dict[str, str] = {}
36
+ self._last_refresh: float = 0.0
37
+
38
+ async def load_seed(self) -> None:
39
+ seed = {
40
+ "datr": os.getenv("META_AI_DATR", ""),
41
+ "abra_sess": os.getenv("META_AI_ABRA_SESS", ""),
42
+ "dpr": os.getenv("META_AI_DPR", ""),
43
+ "wd": os.getenv("META_AI_WD", ""),
44
+ "_js_datr": os.getenv("META_AI_JS_DATR", ""),
45
+ "abra_csrf": os.getenv("META_AI_ABRA_CSRF", ""),
46
+ }
47
+ missing = [k for k in ("datr", "abra_sess") if not seed.get(k)]
48
+ if missing:
49
+ raise RuntimeError(f"Missing required seed cookies: {', '.join(missing)}")
50
+ async with self._lock:
51
+ self._cookies = {k: v for k, v in seed.items() if v}
52
+ self._last_refresh = 0.0
53
+
54
+ async def refresh_if_needed(self, force: bool = False) -> None:
55
+ now = time.time()
56
+ if not force and (now - self._last_refresh) < REFRESH_SECONDS:
57
+ return
58
+ async with self._lock:
59
+ if not force and (time.time() - self._last_refresh) < REFRESH_SECONDS:
60
+ return
61
+ try:
62
+ ai = MetaAI(cookies=dict(self._cookies))
63
+ # MetaAI may fetch lsd/fb_dtsg; capture any updates
64
+ self._cookies = getattr(ai, "cookies", self._cookies)
65
+ self._last_refresh = time.time()
66
+ except Exception as exc: # noqa: BLE001
67
+ logger.warning("Cookie refresh failed: %s", exc)
68
+ if force:
69
+ raise
70
+
71
+ async def refresh_after_error(self) -> None:
72
+ await self.refresh_if_needed(force=True)
73
+
74
+ async def snapshot(self) -> Dict[str, str]:
75
+ async with self._lock:
76
+ return dict(self._cookies)
77
+
78
+
79
+ cache = TokenCache()
80
+ refresh_task: Optional[asyncio.Task] = None
81
+ app = FastAPI(title="Meta AI API Service", version="0.1.0")
82
+
83
+
84
+ def _get_proxies() -> Optional[Dict[str, str]]:
85
+ http_proxy = os.getenv("META_AI_PROXY_HTTP")
86
+ https_proxy = os.getenv("META_AI_PROXY_HTTPS")
87
+ if not http_proxy and not https_proxy:
88
+ return None
89
+ proxies: Dict[str, str] = {}
90
+ if http_proxy:
91
+ proxies["http"] = http_proxy
92
+ if https_proxy:
93
+ proxies["https"] = https_proxy
94
+ return proxies
95
+
96
+
97
+ class ChatRequest(BaseModel):
98
+ message: str
99
+ stream: bool = False
100
+ new_conversation: bool = False
101
+
102
+
103
+ class VideoRequest(BaseModel):
104
+ prompt: str
105
+ wait_before_poll: int = Field(10, ge=0, le=60)
106
+ max_attempts: int = Field(30, ge=1, le=60)
107
+ wait_seconds: int = Field(5, ge=1, le=30)
108
+ verbose: bool = False
109
+
110
+
111
+ class JobStatus(BaseModel):
112
+ job_id: str
113
+ status: str
114
+ created_at: float
115
+ updated_at: float
116
+ result: Optional[Dict[str, Any]] = None
117
+ error: Optional[str] = None
118
+
119
+
120
+ class JobStore:
121
+ def __init__(self) -> None:
122
+ self._jobs: Dict[str, JobStatus] = {}
123
+ self._lock = asyncio.Lock()
124
+
125
+ async def create(self) -> JobStatus:
126
+ now = time.time()
127
+ job_id = str(uuid.uuid4())
128
+ job = JobStatus(job_id=job_id, status="pending", created_at=now, updated_at=now)
129
+ async with self._lock:
130
+ self._jobs[job_id] = job
131
+ return job
132
+
133
+ async def set_running(self, job_id: str) -> None:
134
+ await self._update(job_id, status="running")
135
+
136
+ async def set_result(self, job_id: str, result: Dict[str, Any]) -> None:
137
+ await self._update(job_id, status="succeeded", result=result, error=None)
138
+
139
+ async def set_error(self, job_id: str, error: str) -> None:
140
+ await self._update(job_id, status="failed", error=error)
141
+
142
+ async def get(self, job_id: str) -> JobStatus:
143
+ async with self._lock:
144
+ if job_id not in self._jobs:
145
+ raise KeyError(job_id)
146
+ return self._jobs[job_id]
147
+
148
+ async def _update(self, job_id: str, **fields: Any) -> None:
149
+ async with self._lock:
150
+ if job_id not in self._jobs:
151
+ raise KeyError(job_id)
152
+ job = self._jobs[job_id].copy(update=fields)
153
+ job.updated_at = time.time()
154
+ self._jobs[job_id] = job
155
+
156
+
157
+ jobs = JobStore()
158
+
159
+
160
+ async def get_cookies() -> Dict[str, str]:
161
+ await cache.refresh_if_needed()
162
+ return await cache.snapshot()
163
+
164
+
165
+ @app.on_event("startup")
166
+ async def _startup() -> None:
167
+ await cache.load_seed()
168
+ await cache.refresh_if_needed(force=True)
169
+ global refresh_task
170
+ refresh_task = asyncio.create_task(_refresh_loop())
171
+
172
+
173
+ @app.on_event("shutdown")
174
+ async def _shutdown() -> None:
175
+ global refresh_task
176
+ if refresh_task:
177
+ refresh_task.cancel()
178
+ with contextlib.suppress(asyncio.CancelledError):
179
+ await refresh_task
180
+
181
+
182
+ @app.post("/chat")
183
+ async def chat(body: ChatRequest, cookies: Dict[str, str] = Depends(get_cookies)) -> Dict[str, Any]:
184
+ if body.stream:
185
+ raise HTTPException(status_code=400, detail="Streaming not supported via HTTP JSON; set stream=false")
186
+ ai = MetaAI(cookies=cookies, proxy=_get_proxies())
187
+ try:
188
+ return ai.prompt(body.message, stream=False, new_conversation=body.new_conversation)
189
+ except Exception as exc: # noqa: BLE001
190
+ await cache.refresh_after_error()
191
+ raise HTTPException(status_code=502, detail=str(exc)) from exc
192
+
193
+
194
+ @app.post("/video")
195
+ async def video(body: VideoRequest, cookies: Dict[str, str] = Depends(get_cookies)) -> Dict[str, Any]:
196
+ ai = MetaAI(cookies=cookies, proxy=_get_proxies())
197
+ try:
198
+ return await run_in_threadpool(
199
+ ai.generate_video,
200
+ body.prompt,
201
+ body.wait_before_poll,
202
+ body.max_attempts,
203
+ body.wait_seconds,
204
+ body.verbose,
205
+ )
206
+ except Exception as exc: # noqa: BLE001
207
+ await cache.refresh_after_error()
208
+ raise HTTPException(status_code=502, detail=str(exc)) from exc
209
+
210
+
211
+ @app.post("/video/async")
212
+ async def video_async(body: VideoRequest, cookies: Dict[str, str] = Depends(get_cookies)) -> Dict[str, str]:
213
+ job = await jobs.create()
214
+ asyncio.create_task(_run_video_job(job.job_id, body, cookies))
215
+ return {"job_id": job.job_id, "status": "pending"}
216
+
217
+
218
+ @app.get("/video/jobs/{job_id}")
219
+ async def video_job_status(job_id: str) -> Dict[str, Any]:
220
+ try:
221
+ job = await jobs.get(job_id)
222
+ return job.dict()
223
+ except KeyError:
224
+ raise HTTPException(status_code=404, detail="Job not found")
225
+
226
+
227
+ @app.get("/healthz")
228
+ async def health() -> Dict[str, str]:
229
+ return {"status": "ok"}
230
+
231
+
232
+ async def _run_video_job(job_id: str, body: VideoRequest, cookies: Dict[str, str]) -> None:
233
+ await jobs.set_running(job_id)
234
+ ai = MetaAI(cookies=cookies, proxy=_get_proxies())
235
+ try:
236
+ result = await run_in_threadpool(
237
+ ai.generate_video,
238
+ body.prompt,
239
+ body.wait_before_poll,
240
+ body.max_attempts,
241
+ body.wait_seconds,
242
+ body.verbose,
243
+ )
244
+ await jobs.set_result(job_id, result)
245
+ except Exception as exc: # noqa: BLE001
246
+ await cache.refresh_after_error()
247
+ await jobs.set_error(job_id, str(exc))
248
+
249
+
250
+ async def _refresh_loop() -> None:
251
+ while True:
252
+ try:
253
+ await cache.refresh_if_needed(force=True)
254
+ except Exception as exc: # noqa: BLE001
255
+ logger.warning("Background refresh failed: %s", exc)
256
+ await asyncio.sleep(REFRESH_SECONDS)
metaai_api/client.py ADDED
@@ -0,0 +1,140 @@
1
+ import json
2
+ from typing import Dict, List
3
+
4
+ import requests
5
+
6
+
7
+ def _parse_cookie_header(raw_cookie: str) -> Dict[str, str]:
8
+ parts = [p.strip() for p in raw_cookie.split(";") if p.strip()]
9
+ cookies: Dict[str, str] = {}
10
+ for part in parts:
11
+ if "=" not in part:
12
+ continue
13
+ name, value = part.split("=", 1)
14
+ cookies[name.strip()] = value.strip()
15
+ return cookies
16
+
17
+
18
+ def send_animate_request(user_cookie_header: str, prompt: str) -> Dict:
19
+ """Send a prompt to Meta AI's animate endpoint using provided cookies."""
20
+ cookies = _parse_cookie_header(user_cookie_header)
21
+
22
+ headers = {
23
+ "accept": "*/*",
24
+ "accept-language": "en-US,en;q=0.5",
25
+ "content-type": "multipart/form-data; boundary=----WebKitFormBoundarybkOB5PgK5hbMvG6A",
26
+ "origin": "https://www.meta.ai",
27
+ "priority": "u=1, i",
28
+ "referer": "https://www.meta.ai/",
29
+ "sec-ch-ua": '"Brave";v="141", "Not?A_Brand";v="8", "Chromium";v="141"',
30
+ "sec-ch-ua-full-version-list": '"Brave";v="141.0.0.0", "Not?A_Brand";v="8.0.0.0", "Chromium";v="141.0.0.0"',
31
+ "sec-ch-ua-mobile": "?0",
32
+ "sec-ch-ua-model": '""',
33
+ "sec-ch-ua-platform": '"Windows"',
34
+ "sec-ch-ua-platform-version": '"19.0.0"',
35
+ "sec-fetch-dest": "empty",
36
+ "sec-fetch-mode": "cors",
37
+ "sec-fetch-site": "same-origin",
38
+ "sec-gpc": "1",
39
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36",
40
+ "x-asbd-id": "359341",
41
+ "x-fb-lsd": "MMUBfnMuJ_zHq68M_QsA9p",
42
+ "cookie": user_cookie_header,
43
+ }
44
+
45
+ params = {
46
+ "fb_dtsg": "NAfs8i5CfuTxSgY049krPWfh6MLk1zW--f6qnzvqgeEaPvOWcpH_esA:2:1763623145",
47
+ "jazoest": "25561",
48
+ "lsd": "MMUBfnMuJ_zHq68M_QsA9p",
49
+ }
50
+
51
+ variables = json.dumps({"message": {"sensitive_string_value": prompt}})
52
+
53
+ data = {
54
+ "av": (None, "813590375178585"),
55
+ "__user": (None, "0"),
56
+ "__a": (None, "1"),
57
+ "__req": (None, "1b"),
58
+ "__hs": (None, "20412.HYP:kadabra_pkg.2.1...0"),
59
+ "dpr": (None, "1"),
60
+ "__ccg": (None, "GOOD"),
61
+ "__rev": (None, "1030167105"),
62
+ "__s": (None, "pfvlzb:j08r3n:u2o3ge"),
63
+ "__hsi": (None, "7574782986293208112"),
64
+ "__dyn": (None, "7xeUjG1mxu1syUqxemh0no6u5U4e2C1vzEdE98K360CEbo19oe8hw2nVEtwMw6ywaq221FwpUO0n24oaEnxO0Bo7O2l0Fwqo31w9O1lwlE-U2zxe2GewbS361qw82dUlwhE-15wmo423-0j52oS0Io5d0bS1LBwNwKG0WE8oC1IwGw-wlUcE2-G2O7E5y1rwa211wo84y1ix-0QU4G"),
65
+ "__csr": (None, ""),
66
+ "__hsdp": (None, ""),
67
+ "__hblp": (None, ""),
68
+ "__sjsp": (None, ""),
69
+ "__comet_req": (None, "72"),
70
+ "fb_dtsg": (None, "NAfs8i5CfuTxSgY049krPWfh6MLk1zW--f6qnzvqgeEaPvOWcpH_esA:2:1763623145"),
71
+ "jazoest": (None, "25561"),
72
+ "lsd": (None, "MMUBfnMuJ_zHq68M_QsA9p"),
73
+ "__spin_r": (None, "1030167105"),
74
+ "__spin_b": (None, "trunk"),
75
+ "__spin_t": (None, "1763641598"),
76
+ "__jssesw": (None, "1"),
77
+ "__crn": (None, "comet.kadabra.KadabraPromptRoute"),
78
+ "fb_api_caller_class": (None, "RelayModern"),
79
+ "fb_api_req_friendly_name": (None, "useKadabraSendMessageMutation"),
80
+ "server_timestamps": (None, "true"),
81
+ "variables": (None, variables),
82
+ "doc_id": (None, "26069859009269605"),
83
+ }
84
+
85
+ response = requests.post(
86
+ "https://www.meta.ai/api/graphql/",
87
+ params=params,
88
+ cookies=cookies,
89
+ headers=headers,
90
+ data=data,
91
+ )
92
+ response.raise_for_status()
93
+ return response.json()
94
+
95
+
96
+ def extract_video_urls_from_fetch_response(fetch_response: Dict) -> List[str]:
97
+ urls: List[str] = []
98
+
99
+ data = fetch_response.get("data", {})
100
+ fetch_post = data.get("xfb_genai_fetch_post") or data.get("xab_abra__xfb_genai_fetch_post") or {}
101
+
102
+ messages = fetch_post.get("messages", {}).get("edges", [])
103
+ for edge in messages:
104
+ node = edge.get("node", {})
105
+ content = node.get("content", {})
106
+ imagine_video = content.get("imagine_video") or {}
107
+
108
+ videos = imagine_video.get("videos", {}).get("nodes", [])
109
+ for video in videos:
110
+ uri = video.get("video_url") or video.get("uri")
111
+ if uri:
112
+ urls.append(uri)
113
+ delivery = video.get("videoDeliveryResponseResult") or {}
114
+ prog = delivery.get("progressive_urls", [])
115
+ for p in prog:
116
+ pu = p.get("progressive_url")
117
+ if pu:
118
+ urls.append(pu)
119
+
120
+ single_video = imagine_video.get("video") or {}
121
+ if isinstance(single_video, dict):
122
+ uri = single_video.get("video_url") or single_video.get("uri")
123
+ if uri:
124
+ urls.append(uri)
125
+ delivery = single_video.get("videoDeliveryResponseResult") or {}
126
+ prog = delivery.get("progressive_urls", [])
127
+ for p in prog:
128
+ pu = p.get("progressive_url")
129
+ if pu:
130
+ urls.append(pu)
131
+
132
+ # Deduplicate while preserving order
133
+ seen = set()
134
+ unique_urls: List[str] = []
135
+ for u in urls:
136
+ if u not in seen:
137
+ seen.add(u)
138
+ unique_urls.append(u)
139
+ return unique_urls
140
+
@@ -0,0 +1,6 @@
1
+ class FacebookInvalidCredentialsException(Exception):
2
+ pass
3
+
4
+
5
+ class FacebookRegionBlocked(Exception):
6
+ pass