universal-mcp-applications 0.1.22__py3-none-any.whl → 0.1.39rc8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of universal-mcp-applications might be problematic. Click here for more details.
- universal_mcp/applications/ahrefs/app.py +92 -238
- universal_mcp/applications/airtable/app.py +23 -122
- universal_mcp/applications/apollo/app.py +122 -475
- universal_mcp/applications/asana/app.py +605 -1755
- universal_mcp/applications/aws_s3/app.py +36 -103
- universal_mcp/applications/bill/app.py +644 -2055
- universal_mcp/applications/box/app.py +1246 -4159
- universal_mcp/applications/braze/app.py +410 -1476
- universal_mcp/applications/browser_use/README.md +15 -1
- universal_mcp/applications/browser_use/__init__.py +1 -0
- universal_mcp/applications/browser_use/app.py +94 -37
- universal_mcp/applications/cal_com_v2/app.py +207 -625
- universal_mcp/applications/calendly/app.py +103 -242
- universal_mcp/applications/canva/app.py +75 -140
- universal_mcp/applications/clickup/app.py +331 -798
- universal_mcp/applications/coda/app.py +240 -520
- universal_mcp/applications/confluence/app.py +497 -1285
- universal_mcp/applications/contentful/app.py +36 -151
- universal_mcp/applications/crustdata/app.py +42 -121
- universal_mcp/applications/dialpad/app.py +451 -924
- universal_mcp/applications/digitalocean/app.py +2071 -6082
- universal_mcp/applications/domain_checker/app.py +3 -54
- universal_mcp/applications/e2b/app.py +14 -64
- universal_mcp/applications/elevenlabs/app.py +9 -47
- universal_mcp/applications/exa/README.md +8 -4
- universal_mcp/applications/exa/app.py +408 -186
- universal_mcp/applications/falai/app.py +24 -101
- universal_mcp/applications/figma/app.py +91 -175
- universal_mcp/applications/file_system/app.py +2 -13
- universal_mcp/applications/firecrawl/app.py +186 -163
- universal_mcp/applications/fireflies/app.py +59 -281
- universal_mcp/applications/fpl/app.py +92 -529
- universal_mcp/applications/fpl/utils/fixtures.py +15 -49
- universal_mcp/applications/fpl/utils/helper.py +25 -89
- universal_mcp/applications/fpl/utils/league_utils.py +20 -64
- universal_mcp/applications/ghost_content/app.py +66 -175
- universal_mcp/applications/github/app.py +28 -65
- universal_mcp/applications/gong/app.py +140 -300
- universal_mcp/applications/google_calendar/app.py +26 -78
- universal_mcp/applications/google_docs/app.py +324 -354
- universal_mcp/applications/google_drive/app.py +194 -793
- universal_mcp/applications/google_gemini/app.py +29 -64
- universal_mcp/applications/google_mail/README.md +1 -0
- universal_mcp/applications/google_mail/app.py +93 -214
- universal_mcp/applications/google_searchconsole/app.py +25 -58
- universal_mcp/applications/google_sheet/app.py +174 -623
- universal_mcp/applications/google_sheet/helper.py +26 -53
- universal_mcp/applications/hashnode/app.py +57 -269
- universal_mcp/applications/heygen/app.py +77 -155
- universal_mcp/applications/http_tools/app.py +10 -32
- universal_mcp/applications/hubspot/README.md +1 -1
- universal_mcp/applications/hubspot/app.py +7508 -99
- universal_mcp/applications/jira/app.py +2419 -8334
- universal_mcp/applications/klaviyo/app.py +737 -1619
- universal_mcp/applications/linkedin/README.md +23 -4
- universal_mcp/applications/linkedin/app.py +861 -155
- universal_mcp/applications/mailchimp/app.py +696 -1851
- universal_mcp/applications/markitdown/app.py +8 -20
- universal_mcp/applications/miro/app.py +333 -815
- universal_mcp/applications/ms_teams/app.py +85 -207
- universal_mcp/applications/neon/app.py +144 -250
- universal_mcp/applications/notion/app.py +36 -51
- universal_mcp/applications/onedrive/README.md +24 -0
- universal_mcp/applications/onedrive/__init__.py +1 -0
- universal_mcp/applications/onedrive/app.py +316 -0
- universal_mcp/applications/openai/app.py +42 -165
- universal_mcp/applications/outlook/README.md +22 -9
- universal_mcp/applications/outlook/app.py +606 -262
- universal_mcp/applications/perplexity/README.md +2 -1
- universal_mcp/applications/perplexity/app.py +162 -20
- universal_mcp/applications/pipedrive/app.py +1021 -3331
- universal_mcp/applications/posthog/app.py +272 -541
- universal_mcp/applications/reddit/app.py +88 -204
- universal_mcp/applications/resend/app.py +41 -107
- universal_mcp/applications/retell/app.py +23 -50
- universal_mcp/applications/rocketlane/app.py +250 -963
- universal_mcp/applications/scraper/README.md +7 -4
- universal_mcp/applications/scraper/app.py +245 -283
- universal_mcp/applications/semanticscholar/app.py +36 -78
- universal_mcp/applications/semrush/app.py +43 -77
- universal_mcp/applications/sendgrid/app.py +826 -1576
- universal_mcp/applications/sentry/app.py +444 -1079
- universal_mcp/applications/serpapi/app.py +40 -143
- universal_mcp/applications/sharepoint/README.md +16 -14
- universal_mcp/applications/sharepoint/app.py +245 -154
- universal_mcp/applications/shopify/app.py +1743 -4479
- universal_mcp/applications/shortcut/app.py +272 -534
- universal_mcp/applications/slack/app.py +58 -109
- universal_mcp/applications/spotify/app.py +206 -405
- universal_mcp/applications/supabase/app.py +174 -283
- universal_mcp/applications/tavily/app.py +2 -2
- universal_mcp/applications/trello/app.py +853 -2816
- universal_mcp/applications/twilio/app.py +14 -50
- universal_mcp/applications/twitter/api_segments/compliance_api.py +4 -14
- universal_mcp/applications/twitter/api_segments/dm_conversations_api.py +6 -18
- universal_mcp/applications/twitter/api_segments/likes_api.py +1 -3
- universal_mcp/applications/twitter/api_segments/lists_api.py +5 -15
- universal_mcp/applications/twitter/api_segments/trends_api.py +1 -3
- universal_mcp/applications/twitter/api_segments/tweets_api.py +9 -31
- universal_mcp/applications/twitter/api_segments/usage_api.py +1 -5
- universal_mcp/applications/twitter/api_segments/users_api.py +14 -42
- universal_mcp/applications/whatsapp/app.py +35 -186
- universal_mcp/applications/whatsapp/audio.py +2 -6
- universal_mcp/applications/whatsapp/whatsapp.py +17 -51
- universal_mcp/applications/whatsapp_business/app.py +86 -299
- universal_mcp/applications/wrike/app.py +80 -153
- universal_mcp/applications/yahoo_finance/app.py +19 -65
- universal_mcp/applications/youtube/app.py +120 -306
- universal_mcp/applications/zenquotes/app.py +4 -4
- {universal_mcp_applications-0.1.22.dist-info → universal_mcp_applications-0.1.39rc8.dist-info}/METADATA +4 -2
- {universal_mcp_applications-0.1.22.dist-info → universal_mcp_applications-0.1.39rc8.dist-info}/RECORD +113 -117
- {universal_mcp_applications-0.1.22.dist-info → universal_mcp_applications-0.1.39rc8.dist-info}/WHEEL +1 -1
- universal_mcp/applications/hubspot/api_segments/__init__.py +0 -0
- universal_mcp/applications/hubspot/api_segments/api_segment_base.py +0 -54
- universal_mcp/applications/hubspot/api_segments/crm_api.py +0 -7337
- universal_mcp/applications/hubspot/api_segments/marketing_api.py +0 -1467
- universal_mcp/applications/unipile/README.md +0 -28
- universal_mcp/applications/unipile/__init__.py +0 -1
- universal_mcp/applications/unipile/app.py +0 -1077
- {universal_mcp_applications-0.1.22.dist-info → universal_mcp_applications-0.1.39rc8.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from typing import Any
|
|
2
|
-
|
|
3
2
|
import httpx
|
|
4
3
|
from loguru import logger
|
|
5
4
|
from universal_mcp.applications.application import APIApplication
|
|
@@ -38,15 +37,9 @@ class RedditApp(APIApplication):
|
|
|
38
37
|
if "access_token" not in credentials:
|
|
39
38
|
logger.error("Reddit credentials found but missing 'access_token'.")
|
|
40
39
|
raise ValueError("Invalid Reddit credentials format.")
|
|
40
|
+
return {"Authorization": f"Bearer {credentials['access_token']}", "User-Agent": "agentr-reddit-app/0.1 by AgentR"}
|
|
41
41
|
|
|
42
|
-
|
|
43
|
-
"Authorization": f"Bearer {credentials['access_token']}",
|
|
44
|
-
"User-Agent": "agentr-reddit-app/0.1 by AgentR",
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
def get_subreddit_posts(
|
|
48
|
-
self, subreddit: str, limit: int = 5, timeframe: str = "day"
|
|
49
|
-
) -> dict[str, Any]:
|
|
42
|
+
async def get_subreddit_posts(self, subreddit: str, limit: int = 5, timeframe: str = "day") -> dict[str, Any]:
|
|
50
43
|
"""
|
|
51
44
|
Fetches a specified number of top-rated posts from a particular subreddit, allowing results to be filtered by a specific timeframe (e.g., 'day', 'week'). This is a simplified version compared to `get_subreddit_top_posts`, which uses more complex pagination parameters instead of a direct time filter.
|
|
52
45
|
|
|
@@ -56,7 +49,7 @@ class RedditApp(APIApplication):
|
|
|
56
49
|
timeframe: The time period for top posts. Valid options: 'hour', 'day', 'week', 'month', 'year', 'all' (default: 'day')
|
|
57
50
|
|
|
58
51
|
Returns:
|
|
59
|
-
A
|
|
52
|
+
A dictionary containing a list of top posts with their details, or an error message if the request fails.
|
|
60
53
|
|
|
61
54
|
Raises:
|
|
62
55
|
RequestException: When the HTTP request to the Reddit API fails
|
|
@@ -69,34 +62,29 @@ class RedditApp(APIApplication):
|
|
|
69
62
|
if timeframe not in valid_timeframes:
|
|
70
63
|
return f"Error: Invalid timeframe '{timeframe}'. Please use one of: {', '.join(valid_timeframes)}"
|
|
71
64
|
if not 1 <= limit <= 100:
|
|
72
|
-
return
|
|
73
|
-
f"Error: Invalid limit '{limit}'. Please use a value between 1 and 100."
|
|
74
|
-
)
|
|
65
|
+
return f"Error: Invalid limit '{limit}'. Please use a value between 1 and 100."
|
|
75
66
|
url = f"{self.base_api_url}/r/{subreddit}/top"
|
|
76
67
|
params = {"limit": limit, "t": timeframe}
|
|
77
|
-
logger.info(
|
|
78
|
-
|
|
79
|
-
)
|
|
80
|
-
response = self._get(url, params=params)
|
|
68
|
+
logger.info(f"Requesting top {limit} posts from r/{subreddit} for timeframe '{timeframe}'")
|
|
69
|
+
response = await self._aget(url, params=params)
|
|
81
70
|
return self._handle_response(response)
|
|
82
71
|
|
|
83
|
-
def search_subreddits(
|
|
84
|
-
self, query: str, limit: int = 5, sort: str = "relevance"
|
|
85
|
-
) -> str:
|
|
72
|
+
async def search_subreddits(self, query: str, limit: int = 5, sort: str = "relevance") -> dict[str, Any]:
|
|
86
73
|
"""
|
|
87
|
-
|
|
74
|
+
Finds subreddits based on a query string, searching their names and descriptions.
|
|
75
|
+
Results can be sorted by relevance or activity. This function is for discovering communities and does not search for posts or users, unlike the more general `search_reddit` function.
|
|
88
76
|
|
|
89
77
|
Args:
|
|
90
|
-
query: The
|
|
91
|
-
limit: The maximum number of subreddits to return
|
|
92
|
-
sort: The order
|
|
78
|
+
query: The search query for subreddit names and descriptions.
|
|
79
|
+
limit: The maximum number of subreddits to return (1-100, default is 5).
|
|
80
|
+
sort: The sorting order for results. Can be 'relevance' or 'activity' (default is 'relevance').
|
|
93
81
|
|
|
94
82
|
Returns:
|
|
95
|
-
A
|
|
83
|
+
A dictionary containing a list of matching subreddits, including their names, subscriber counts, and descriptions. Returns an error message on failure.
|
|
96
84
|
|
|
97
85
|
Raises:
|
|
98
|
-
RequestException:
|
|
99
|
-
JSONDecodeError:
|
|
86
|
+
RequestException: If the API request to Reddit fails.
|
|
87
|
+
JSONDecodeError: If the API response is not valid JSON.
|
|
100
88
|
|
|
101
89
|
Tags:
|
|
102
90
|
search, important, reddit, api, query, format, list, validation
|
|
@@ -105,22 +93,14 @@ class RedditApp(APIApplication):
|
|
|
105
93
|
if sort not in valid_sorts:
|
|
106
94
|
return f"Error: Invalid sort option '{sort}'. Please use one of: {', '.join(valid_sorts)}"
|
|
107
95
|
if not 1 <= limit <= 100:
|
|
108
|
-
return
|
|
109
|
-
f"Error: Invalid limit '{limit}'. Please use a value between 1 and 100."
|
|
110
|
-
)
|
|
96
|
+
return f"Error: Invalid limit '{limit}'. Please use a value between 1 and 100."
|
|
111
97
|
url = f"{self.base_api_url}/subreddits/search"
|
|
112
|
-
params = {
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
"sort": sort,
|
|
116
|
-
}
|
|
117
|
-
logger.info(
|
|
118
|
-
f"Searching for subreddits matching '{query}' (limit: {limit}, sort: {sort})"
|
|
119
|
-
)
|
|
120
|
-
response = self._get(url, params=params)
|
|
98
|
+
params = {"q": query, "limit": limit, "sort": sort}
|
|
99
|
+
logger.info(f"Searching for subreddits matching '{query}' (limit: {limit}, sort: {sort})")
|
|
100
|
+
response = await self._aget(url, params=params)
|
|
121
101
|
return self._handle_response(response)
|
|
122
102
|
|
|
123
|
-
def get_post_flairs(self, subreddit: str):
|
|
103
|
+
async def get_post_flairs(self, subreddit: str):
|
|
124
104
|
"""
|
|
125
105
|
Fetches a list of available post flairs (tags) for a specified subreddit. This is primarily used to discover the correct `flair_id` needed to categorize a new submission when using the `create_post` function. It returns flair details or a message if none are available.
|
|
126
106
|
|
|
@@ -139,21 +119,13 @@ class RedditApp(APIApplication):
|
|
|
139
119
|
"""
|
|
140
120
|
url = f"{self.base_api_url}/r/{subreddit}/api/link_flair_v2"
|
|
141
121
|
logger.info(f"Fetching post flairs for subreddit: r/{subreddit}")
|
|
142
|
-
response = self.
|
|
122
|
+
response = await self._aget(url)
|
|
143
123
|
flairs = response.json()
|
|
144
124
|
if not flairs:
|
|
145
125
|
return f"No post flairs available for r/{subreddit}."
|
|
146
126
|
return flairs
|
|
147
127
|
|
|
148
|
-
def create_post(
|
|
149
|
-
self,
|
|
150
|
-
subreddit: str,
|
|
151
|
-
title: str,
|
|
152
|
-
kind: str = "self",
|
|
153
|
-
text: str = None,
|
|
154
|
-
url: str = None,
|
|
155
|
-
flair_id: str = None,
|
|
156
|
-
):
|
|
128
|
+
async def create_post(self, subreddit: str, title: str, kind: str = "self", text: str = None, url: str = None, flair_id: str = None):
|
|
157
129
|
"""
|
|
158
130
|
Creates a new Reddit post in a specified subreddit. It supports text ('self') or link posts, requiring a title and corresponding content (text or URL). An optional flair can be assigned. Returns the API response or a formatted error message on failure.
|
|
159
131
|
|
|
@@ -176,37 +148,24 @@ class RedditApp(APIApplication):
|
|
|
176
148
|
"""
|
|
177
149
|
if kind not in ["self", "link"]:
|
|
178
150
|
raise ValueError("Invalid post kind. Must be one of 'self' or 'link'.")
|
|
179
|
-
if kind == "self" and not text:
|
|
151
|
+
if kind == "self" and (not text):
|
|
180
152
|
raise ValueError("Text content is required for text posts.")
|
|
181
|
-
if kind == "link" and not url:
|
|
153
|
+
if kind == "link" and (not url):
|
|
182
154
|
raise ValueError("URL is required for link posts (including images).")
|
|
183
|
-
data = {
|
|
184
|
-
"sr": subreddit,
|
|
185
|
-
"title": title,
|
|
186
|
-
"kind": kind,
|
|
187
|
-
"text": text,
|
|
188
|
-
"url": url,
|
|
189
|
-
"flair_id": flair_id,
|
|
190
|
-
}
|
|
155
|
+
data = {"sr": subreddit, "title": title, "kind": kind, "text": text, "url": url, "flair_id": flair_id}
|
|
191
156
|
data = {k: v for k, v in data.items() if v is not None}
|
|
192
157
|
url_api = f"{self.base_api_url}/api/submit"
|
|
193
158
|
logger.info(f"Submitting a new post to r/{subreddit}")
|
|
194
|
-
response = self.
|
|
159
|
+
response = await self._apost(url_api, data=data)
|
|
195
160
|
response_json = response.json()
|
|
196
|
-
if (
|
|
197
|
-
response_json
|
|
198
|
-
and "json" in response_json
|
|
199
|
-
and "errors" in response_json["json"]
|
|
200
|
-
):
|
|
161
|
+
if response_json and "json" in response_json and ("errors" in response_json["json"]):
|
|
201
162
|
errors = response_json["json"]["errors"]
|
|
202
163
|
if errors:
|
|
203
|
-
error_message = ", ".join(
|
|
204
|
-
[f"{code}: {message}" for code, message in errors]
|
|
205
|
-
)
|
|
164
|
+
error_message = ", ".join([f"{code}: {message}" for code, message in errors])
|
|
206
165
|
return f"Reddit API error: {error_message}"
|
|
207
166
|
return response_json
|
|
208
167
|
|
|
209
|
-
def get_comment_by_id(self, comment_id: str) -> dict:
|
|
168
|
+
async def get_comment_by_id(self, comment_id: str) -> dict:
|
|
210
169
|
"""
|
|
211
170
|
Retrieves a single Reddit comment's data, such as author and score, using its unique 't1_' prefixed ID. Unlike `get_post_comments_details` which fetches all comments for a post, this function targets one specific comment directly, returning an error dictionary if it is not found.
|
|
212
171
|
|
|
@@ -224,7 +183,7 @@ class RedditApp(APIApplication):
|
|
|
224
183
|
retrieve, get, reddit, comment, api, fetch, single-item, important
|
|
225
184
|
"""
|
|
226
185
|
url = f"https://oauth.reddit.com/api/info.json?id={comment_id}"
|
|
227
|
-
response = self.
|
|
186
|
+
response = await self._aget(url)
|
|
228
187
|
data = response.json()
|
|
229
188
|
comments = data.get("data", {}).get("children", [])
|
|
230
189
|
if comments:
|
|
@@ -232,7 +191,7 @@ class RedditApp(APIApplication):
|
|
|
232
191
|
else:
|
|
233
192
|
return {"error": "Comment not found."}
|
|
234
193
|
|
|
235
|
-
def post_comment(self, parent_id: str, text: str) -> dict:
|
|
194
|
+
async def post_comment(self, parent_id: str, text: str) -> dict:
|
|
236
195
|
"""
|
|
237
196
|
Posts a new comment as a reply to a specified Reddit post or another comment. Using the parent's full ID and the desired text, it submits the comment via the API and returns the response containing the new comment's details.
|
|
238
197
|
|
|
@@ -251,15 +210,12 @@ class RedditApp(APIApplication):
|
|
|
251
210
|
post, comment, social, reddit, api, important
|
|
252
211
|
"""
|
|
253
212
|
url = f"{self.base_api_url}/api/comment"
|
|
254
|
-
data = {
|
|
255
|
-
"parent": parent_id,
|
|
256
|
-
"text": text,
|
|
257
|
-
}
|
|
213
|
+
data = {"parent": parent_id, "text": text}
|
|
258
214
|
logger.info(f"Posting comment to {parent_id}")
|
|
259
|
-
response = self.
|
|
215
|
+
response = await self._apost(url, data=data)
|
|
260
216
|
return response.json()
|
|
261
217
|
|
|
262
|
-
def edit_content(self, content_id: str, text: str) -> dict:
|
|
218
|
+
async def edit_content(self, content_id: str, text: str) -> dict:
|
|
263
219
|
"""
|
|
264
220
|
Modifies the text of a specific Reddit post or comment via its unique ID. Unlike creation or deletion functions, this method specifically handles updates to existing user-generated content, submitting the new text to the API and returning a JSON response detailing the edited item.
|
|
265
221
|
|
|
@@ -278,15 +234,12 @@ class RedditApp(APIApplication):
|
|
|
278
234
|
edit, update, content, reddit, api, important
|
|
279
235
|
"""
|
|
280
236
|
url = f"{self.base_api_url}/api/editusertext"
|
|
281
|
-
data = {
|
|
282
|
-
"thing_id": content_id,
|
|
283
|
-
"text": text,
|
|
284
|
-
}
|
|
237
|
+
data = {"thing_id": content_id, "text": text}
|
|
285
238
|
logger.info(f"Editing content {content_id}")
|
|
286
|
-
response = self.
|
|
239
|
+
response = await self._apost(url, data=data)
|
|
287
240
|
return response.json()
|
|
288
241
|
|
|
289
|
-
def delete_content(self, content_id: str) -> dict:
|
|
242
|
+
async def delete_content(self, content_id: str) -> dict:
|
|
290
243
|
"""
|
|
291
244
|
Deletes a specified Reddit post or comment using its full identifier (`content_id`). It sends a POST request to the `/api/del` endpoint for permanent removal, unlike `edit_content` which only modifies. On success, it returns a confirmation message.
|
|
292
245
|
|
|
@@ -304,73 +257,64 @@ class RedditApp(APIApplication):
|
|
|
304
257
|
delete, content-management, api, reddit, important
|
|
305
258
|
"""
|
|
306
259
|
url = f"{self.base_api_url}/api/del"
|
|
307
|
-
data = {
|
|
308
|
-
"id": content_id,
|
|
309
|
-
}
|
|
260
|
+
data = {"id": content_id}
|
|
310
261
|
logger.info(f"Deleting content {content_id}")
|
|
311
|
-
response = self.
|
|
262
|
+
response = await self._apost(url, data=data)
|
|
312
263
|
response.raise_for_status()
|
|
313
264
|
return {"message": f"Content {content_id} deleted successfully."}
|
|
314
265
|
|
|
315
|
-
def get_current_user_info(self) -> Any:
|
|
266
|
+
async def get_current_user_info(self) -> Any:
|
|
316
267
|
"""
|
|
317
268
|
Retrieves the full profile information for the currently authenticated user by making a GET request to the `/api/v1/me` Reddit API endpoint. This differs from `get_user_profile`, which requires a username, and `get_current_user_karma`, which specifically fetches karma data.
|
|
318
269
|
|
|
319
270
|
Returns:
|
|
320
|
-
|
|
271
|
+
A dictionary containing the authenticated user's profile information.
|
|
321
272
|
|
|
322
273
|
Tags:
|
|
323
274
|
users
|
|
324
275
|
"""
|
|
325
276
|
url = f"{self.base_url}/api/v1/me"
|
|
326
277
|
query_params = {}
|
|
327
|
-
response = self.
|
|
278
|
+
response = await self._aget(url, params=query_params)
|
|
328
279
|
response.raise_for_status()
|
|
329
280
|
return response.json()
|
|
330
281
|
|
|
331
|
-
def get_current_user_karma(self) -> Any:
|
|
282
|
+
async def get_current_user_karma(self) -> Any:
|
|
332
283
|
"""
|
|
333
284
|
Fetches the karma breakdown for the authenticated user from the Reddit API. This function specifically targets the `/api/v1/me/karma` endpoint, returning karma statistics per subreddit, which is more specific than `get_current_user_info` that retrieves general profile information.
|
|
334
285
|
|
|
335
286
|
Returns:
|
|
336
|
-
|
|
287
|
+
A dictionary containing the authenticated user's karma breakdown by subreddit.
|
|
337
288
|
|
|
338
289
|
Tags:
|
|
339
290
|
account
|
|
340
291
|
"""
|
|
341
292
|
url = f"{self.base_url}/api/v1/me/karma"
|
|
342
293
|
query_params = {}
|
|
343
|
-
response = self.
|
|
294
|
+
response = await self._aget(url, params=query_params)
|
|
344
295
|
response.raise_for_status()
|
|
345
296
|
return response.json()
|
|
346
297
|
|
|
347
|
-
def get_post_comments_details(self, post_id: str) -> Any:
|
|
298
|
+
async def get_post_comments_details(self, post_id: str) -> Any:
|
|
348
299
|
"""
|
|
349
300
|
Fetches a specific Reddit post's details and its complete comment tree using the post's unique ID. This function returns the entire discussion, including the original post and all associated comments, providing broader context than `get_comment_by_id` which only retrieves a single comment.
|
|
350
301
|
|
|
351
302
|
Args:
|
|
352
|
-
post_id (
|
|
303
|
+
post_id (str): The Reddit post ID ( e.g. '1m734tx' for https://www.reddit.com/r/mcp/comments/1m734tx/comment/n4occ77/)
|
|
353
304
|
|
|
354
305
|
Returns:
|
|
355
|
-
|
|
306
|
+
A dictionary containing the post details and its comment tree.
|
|
356
307
|
|
|
357
308
|
Tags:
|
|
358
309
|
listings, comments, posts, important
|
|
359
310
|
"""
|
|
360
|
-
|
|
361
311
|
url = f"{self.base_url}/comments/{post_id}.json"
|
|
362
312
|
query_params = {}
|
|
363
|
-
response = self.
|
|
313
|
+
response = await self._aget(url, params=query_params)
|
|
364
314
|
return self._handle_response(response)
|
|
365
315
|
|
|
366
|
-
def get_controversial_posts(
|
|
367
|
-
self,
|
|
368
|
-
after: str = None,
|
|
369
|
-
before: str = None,
|
|
370
|
-
count: int = None,
|
|
371
|
-
limit: int = None,
|
|
372
|
-
show: str = None,
|
|
373
|
-
sr_detail: str = None,
|
|
316
|
+
async def get_controversial_posts(
|
|
317
|
+
self, after: str = None, before: str = None, count: int = None, limit: int = None, show: str = None, sr_detail: str = None
|
|
374
318
|
) -> Any:
|
|
375
319
|
"""
|
|
376
320
|
Fetches a global list of the most controversial posts from across all of Reddit, distinct from subreddit-specific queries. Optional parameters allow for pagination and customization of the results, returning the direct API response data with the post listings.
|
|
@@ -384,7 +328,7 @@ class RedditApp(APIApplication):
|
|
|
384
328
|
sr_detail: Optional. Expand subreddit details.
|
|
385
329
|
|
|
386
330
|
Returns:
|
|
387
|
-
|
|
331
|
+
A dictionary containing a listing of controversial posts.
|
|
388
332
|
|
|
389
333
|
Tags:
|
|
390
334
|
listings, posts, controversial, read-only
|
|
@@ -392,21 +336,14 @@ class RedditApp(APIApplication):
|
|
|
392
336
|
url = f"{self.base_url}/controversial"
|
|
393
337
|
query_params = {
|
|
394
338
|
k: v
|
|
395
|
-
for k, v in [
|
|
396
|
-
("after", after),
|
|
397
|
-
("before", before),
|
|
398
|
-
("count", count),
|
|
399
|
-
("limit", limit),
|
|
400
|
-
("show", show),
|
|
401
|
-
("sr_detail", sr_detail),
|
|
402
|
-
]
|
|
339
|
+
for k, v in [("after", after), ("before", before), ("count", count), ("limit", limit), ("show", show), ("sr_detail", sr_detail)]
|
|
403
340
|
if v is not None
|
|
404
341
|
}
|
|
405
|
-
response = self.
|
|
342
|
+
response = await self._aget(url, params=query_params)
|
|
406
343
|
response.raise_for_status()
|
|
407
344
|
return response.json()
|
|
408
345
|
|
|
409
|
-
def get_hot_posts(
|
|
346
|
+
async def get_hot_posts(
|
|
410
347
|
self,
|
|
411
348
|
g: str = None,
|
|
412
349
|
after: str = None,
|
|
@@ -429,7 +366,7 @@ class RedditApp(APIApplication):
|
|
|
429
366
|
sr_detail: Optional. Expand subreddit details.
|
|
430
367
|
|
|
431
368
|
Returns:
|
|
432
|
-
|
|
369
|
+
A dictionary containing a listing of hot posts.
|
|
433
370
|
|
|
434
371
|
Tags:
|
|
435
372
|
listings, posts, hot, read-only
|
|
@@ -448,18 +385,12 @@ class RedditApp(APIApplication):
|
|
|
448
385
|
]
|
|
449
386
|
if v is not None
|
|
450
387
|
}
|
|
451
|
-
response = self.
|
|
388
|
+
response = await self._aget(url, params=query_params)
|
|
452
389
|
response.raise_for_status()
|
|
453
390
|
return response.json()
|
|
454
391
|
|
|
455
|
-
def get_new_posts(
|
|
456
|
-
self,
|
|
457
|
-
after: str = None,
|
|
458
|
-
before: str = None,
|
|
459
|
-
count: int = None,
|
|
460
|
-
limit: int = None,
|
|
461
|
-
show: str = None,
|
|
462
|
-
sr_detail: str = None,
|
|
392
|
+
async def get_new_posts(
|
|
393
|
+
self, after: str = None, before: str = None, count: int = None, limit: int = None, show: str = None, sr_detail: str = None
|
|
463
394
|
) -> Any:
|
|
464
395
|
"""
|
|
465
396
|
Fetches a list of the newest posts from across all of Reddit, not limited to a specific subreddit. This function supports optional pagination and filtering parameters to customize the API response, differentiating it from `get_subreddit_new_posts` which targets a single subreddit.
|
|
@@ -473,7 +404,7 @@ class RedditApp(APIApplication):
|
|
|
473
404
|
sr_detail: Optional. Expand subreddit details.
|
|
474
405
|
|
|
475
406
|
Returns:
|
|
476
|
-
|
|
407
|
+
A dictionary containing a listing of new posts.
|
|
477
408
|
|
|
478
409
|
Tags:
|
|
479
410
|
listings, posts, new, read-only
|
|
@@ -481,21 +412,14 @@ class RedditApp(APIApplication):
|
|
|
481
412
|
url = f"{self.base_url}/new"
|
|
482
413
|
query_params = {
|
|
483
414
|
k: v
|
|
484
|
-
for k, v in [
|
|
485
|
-
("after", after),
|
|
486
|
-
("before", before),
|
|
487
|
-
("count", count),
|
|
488
|
-
("limit", limit),
|
|
489
|
-
("show", show),
|
|
490
|
-
("sr_detail", sr_detail),
|
|
491
|
-
]
|
|
415
|
+
for k, v in [("after", after), ("before", before), ("count", count), ("limit", limit), ("show", show), ("sr_detail", sr_detail)]
|
|
492
416
|
if v is not None
|
|
493
417
|
}
|
|
494
|
-
response = self.
|
|
418
|
+
response = await self._aget(url, params=query_params)
|
|
495
419
|
response.raise_for_status()
|
|
496
420
|
return response.json()
|
|
497
421
|
|
|
498
|
-
def get_subreddit_hot_posts(
|
|
422
|
+
async def get_subreddit_hot_posts(
|
|
499
423
|
self,
|
|
500
424
|
subreddit: str,
|
|
501
425
|
g: str = None,
|
|
@@ -520,7 +444,7 @@ class RedditApp(APIApplication):
|
|
|
520
444
|
sr_detail: Optional. Expand subreddit details.
|
|
521
445
|
|
|
522
446
|
Returns:
|
|
523
|
-
|
|
447
|
+
A dictionary containing a listing of hot posts from the specified subreddit.
|
|
524
448
|
|
|
525
449
|
Tags:
|
|
526
450
|
listings, posts, subreddit, hot, read-only
|
|
@@ -541,11 +465,11 @@ class RedditApp(APIApplication):
|
|
|
541
465
|
]
|
|
542
466
|
if v is not None
|
|
543
467
|
}
|
|
544
|
-
response = self.
|
|
468
|
+
response = await self._aget(url, params=query_params)
|
|
545
469
|
response.raise_for_status()
|
|
546
470
|
return response.json()
|
|
547
471
|
|
|
548
|
-
def get_subreddit_new_posts(
|
|
472
|
+
async def get_subreddit_new_posts(
|
|
549
473
|
self,
|
|
550
474
|
subreddit: str,
|
|
551
475
|
after: str = None,
|
|
@@ -568,7 +492,7 @@ class RedditApp(APIApplication):
|
|
|
568
492
|
sr_detail: Optional. Expand subreddit details.
|
|
569
493
|
|
|
570
494
|
Returns:
|
|
571
|
-
|
|
495
|
+
A dictionary containing a listing of new posts from the specified subreddit.
|
|
572
496
|
|
|
573
497
|
Tags:
|
|
574
498
|
listings, posts, subreddit, new, read-only
|
|
@@ -578,21 +502,14 @@ class RedditApp(APIApplication):
|
|
|
578
502
|
url = f"{self.base_url}/r/{subreddit}/new"
|
|
579
503
|
query_params = {
|
|
580
504
|
k: v
|
|
581
|
-
for k, v in [
|
|
582
|
-
("after", after),
|
|
583
|
-
("before", before),
|
|
584
|
-
("count", count),
|
|
585
|
-
("limit", limit),
|
|
586
|
-
("show", show),
|
|
587
|
-
("sr_detail", sr_detail),
|
|
588
|
-
]
|
|
505
|
+
for k, v in [("after", after), ("before", before), ("count", count), ("limit", limit), ("show", show), ("sr_detail", sr_detail)]
|
|
589
506
|
if v is not None
|
|
590
507
|
}
|
|
591
|
-
response = self.
|
|
508
|
+
response = await self._aget(url, params=query_params)
|
|
592
509
|
response.raise_for_status()
|
|
593
510
|
return response.json()
|
|
594
511
|
|
|
595
|
-
def get_subreddit_top_posts(
|
|
512
|
+
async def get_subreddit_top_posts(
|
|
596
513
|
self,
|
|
597
514
|
subreddit: str,
|
|
598
515
|
after: str = None,
|
|
@@ -615,7 +532,7 @@ class RedditApp(APIApplication):
|
|
|
615
532
|
sr_detail: Optional. Expand subreddit details.
|
|
616
533
|
|
|
617
534
|
Returns:
|
|
618
|
-
|
|
535
|
+
A dictionary containing a listing of top posts from the specified subreddit.
|
|
619
536
|
|
|
620
537
|
Tags:
|
|
621
538
|
listings, posts, subreddit, top, read-only
|
|
@@ -625,28 +542,15 @@ class RedditApp(APIApplication):
|
|
|
625
542
|
url = f"{self.base_url}/r/{subreddit}/top"
|
|
626
543
|
query_params = {
|
|
627
544
|
k: v
|
|
628
|
-
for k, v in [
|
|
629
|
-
("after", after),
|
|
630
|
-
("before", before),
|
|
631
|
-
("count", count),
|
|
632
|
-
("limit", limit),
|
|
633
|
-
("show", show),
|
|
634
|
-
("sr_detail", sr_detail),
|
|
635
|
-
]
|
|
545
|
+
for k, v in [("after", after), ("before", before), ("count", count), ("limit", limit), ("show", show), ("sr_detail", sr_detail)]
|
|
636
546
|
if v is not None
|
|
637
547
|
}
|
|
638
|
-
response = self.
|
|
548
|
+
response = await self._aget(url, params=query_params)
|
|
639
549
|
response.raise_for_status()
|
|
640
550
|
return response.json()
|
|
641
551
|
|
|
642
|
-
def get_rising_posts(
|
|
643
|
-
self,
|
|
644
|
-
after: str = None,
|
|
645
|
-
before: str = None,
|
|
646
|
-
count: int = None,
|
|
647
|
-
limit: int = None,
|
|
648
|
-
show: str = None,
|
|
649
|
-
sr_detail: str = None,
|
|
552
|
+
async def get_rising_posts(
|
|
553
|
+
self, after: str = None, before: str = None, count: int = None, limit: int = None, show: str = None, sr_detail: str = None
|
|
650
554
|
) -> Any:
|
|
651
555
|
"""
|
|
652
556
|
Retrieves a list of rising posts from across all of Reddit. Unlike subreddit-specific listing functions (e.g., `get_subreddit_hot_posts`), this operates globally. It supports optional pagination and filtering parameters, such as `limit` and `after`, to customize the API response and navigate through results.
|
|
@@ -660,7 +564,7 @@ class RedditApp(APIApplication):
|
|
|
660
564
|
sr_detail: Optional. Expand subreddit details.
|
|
661
565
|
|
|
662
566
|
Returns:
|
|
663
|
-
|
|
567
|
+
A dictionary containing a listing of rising posts.
|
|
664
568
|
|
|
665
569
|
Tags:
|
|
666
570
|
listings, posts, rising, read-only
|
|
@@ -668,28 +572,15 @@ class RedditApp(APIApplication):
|
|
|
668
572
|
url = f"{self.base_url}/rising"
|
|
669
573
|
query_params = {
|
|
670
574
|
k: v
|
|
671
|
-
for k, v in [
|
|
672
|
-
("after", after),
|
|
673
|
-
("before", before),
|
|
674
|
-
("count", count),
|
|
675
|
-
("limit", limit),
|
|
676
|
-
("show", show),
|
|
677
|
-
("sr_detail", sr_detail),
|
|
678
|
-
]
|
|
575
|
+
for k, v in [("after", after), ("before", before), ("count", count), ("limit", limit), ("show", show), ("sr_detail", sr_detail)]
|
|
679
576
|
if v is not None
|
|
680
577
|
}
|
|
681
|
-
response = self.
|
|
578
|
+
response = await self._aget(url, params=query_params)
|
|
682
579
|
response.raise_for_status()
|
|
683
580
|
return response.json()
|
|
684
581
|
|
|
685
|
-
def get_top_posts(
|
|
686
|
-
self,
|
|
687
|
-
after: str = None,
|
|
688
|
-
before: str = None,
|
|
689
|
-
count: int = None,
|
|
690
|
-
limit: int = None,
|
|
691
|
-
show: str = None,
|
|
692
|
-
sr_detail: str = None,
|
|
582
|
+
async def get_top_posts(
|
|
583
|
+
self, after: str = None, before: str = None, count: int = None, limit: int = None, show: str = None, sr_detail: str = None
|
|
693
584
|
) -> Any:
|
|
694
585
|
"""
|
|
695
586
|
Fetches top-rated posts from across all of Reddit, distinct from `get_subreddit_top_posts`, which operates on a specific subreddit. The function supports standard API pagination parameters like `limit`, `after`, and `before` to navigate results, providing a broad, site-wide view of top content.
|
|
@@ -703,7 +594,7 @@ class RedditApp(APIApplication):
|
|
|
703
594
|
sr_detail: Optional. Expand subreddit details.
|
|
704
595
|
|
|
705
596
|
Returns:
|
|
706
|
-
|
|
597
|
+
A dictionary containing a listing of top posts.
|
|
707
598
|
|
|
708
599
|
Tags:
|
|
709
600
|
listings, posts, top, read-only
|
|
@@ -711,21 +602,14 @@ class RedditApp(APIApplication):
|
|
|
711
602
|
url = f"{self.base_url}/top"
|
|
712
603
|
query_params = {
|
|
713
604
|
k: v
|
|
714
|
-
for k, v in [
|
|
715
|
-
("after", after),
|
|
716
|
-
("before", before),
|
|
717
|
-
("count", count),
|
|
718
|
-
("limit", limit),
|
|
719
|
-
("show", show),
|
|
720
|
-
("sr_detail", sr_detail),
|
|
721
|
-
]
|
|
605
|
+
for k, v in [("after", after), ("before", before), ("count", count), ("limit", limit), ("show", show), ("sr_detail", sr_detail)]
|
|
722
606
|
if v is not None
|
|
723
607
|
}
|
|
724
|
-
response = self.
|
|
608
|
+
response = await self._aget(url, params=query_params)
|
|
725
609
|
response.raise_for_status()
|
|
726
610
|
return response.json()
|
|
727
611
|
|
|
728
|
-
def search_reddit(
|
|
612
|
+
async def search_reddit(
|
|
729
613
|
self,
|
|
730
614
|
after: str = None,
|
|
731
615
|
before: str = None,
|
|
@@ -760,7 +644,7 @@ class RedditApp(APIApplication):
|
|
|
760
644
|
type: Optional. A comma-separated list of result types ('sr', 'link', 'user').
|
|
761
645
|
|
|
762
646
|
Returns:
|
|
763
|
-
|
|
647
|
+
A dictionary containing the search results.
|
|
764
648
|
|
|
765
649
|
Tags:
|
|
766
650
|
search, reddit, posts, comments, users, read-only
|
|
@@ -785,11 +669,11 @@ class RedditApp(APIApplication):
|
|
|
785
669
|
]
|
|
786
670
|
if v is not None
|
|
787
671
|
}
|
|
788
|
-
response = self.
|
|
672
|
+
response = await self._aget(url, params=query_params)
|
|
789
673
|
response.raise_for_status()
|
|
790
674
|
return response.json()
|
|
791
675
|
|
|
792
|
-
def get_user_profile(self, username: str) -> Any:
|
|
676
|
+
async def get_user_profile(self, username: str) -> Any:
|
|
793
677
|
"""
|
|
794
678
|
Retrieves public profile information for a specified Reddit user via the `/user/{username}/about` endpoint. Unlike `get_current_user_info`, which targets the authenticated user, this function fetches data like karma and account age for any user identified by their username.
|
|
795
679
|
|
|
@@ -806,7 +690,7 @@ class RedditApp(APIApplication):
|
|
|
806
690
|
raise ValueError("Missing required parameter 'username'")
|
|
807
691
|
url = f"{self.base_url}/user/{username}/about"
|
|
808
692
|
query_params = {k: v for k, v in [("username", username)] if v is not None}
|
|
809
|
-
response = self.
|
|
693
|
+
response = await self._aget(url, params=query_params)
|
|
810
694
|
response.raise_for_status()
|
|
811
695
|
return response.json()
|
|
812
696
|
|