universal-mcp 0.1.13rc1__py3-none-any.whl → 0.1.13rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. universal_mcp/utils/installation.py +8 -8
  2. {universal_mcp-0.1.13rc1.dist-info → universal_mcp-0.1.13rc3.dist-info}/METADATA +40 -1
  3. universal_mcp-0.1.13rc3.dist-info/RECORD +38 -0
  4. universal_mcp/applications/ahrefs/README.md +0 -76
  5. universal_mcp/applications/ahrefs/__init__.py +0 -0
  6. universal_mcp/applications/ahrefs/app.py +0 -2291
  7. universal_mcp/applications/cal_com_v2/README.md +0 -175
  8. universal_mcp/applications/cal_com_v2/__init__.py +0 -0
  9. universal_mcp/applications/cal_com_v2/app.py +0 -5390
  10. universal_mcp/applications/calendly/README.md +0 -78
  11. universal_mcp/applications/calendly/__init__.py +0 -0
  12. universal_mcp/applications/calendly/app.py +0 -1195
  13. universal_mcp/applications/clickup/README.md +0 -160
  14. universal_mcp/applications/clickup/__init__.py +0 -0
  15. universal_mcp/applications/clickup/app.py +0 -5009
  16. universal_mcp/applications/coda/README.md +0 -133
  17. universal_mcp/applications/coda/__init__.py +0 -0
  18. universal_mcp/applications/coda/app.py +0 -3671
  19. universal_mcp/applications/curstdata/README.md +0 -50
  20. universal_mcp/applications/curstdata/__init__.py +0 -0
  21. universal_mcp/applications/curstdata/app.py +0 -551
  22. universal_mcp/applications/e2b/README.md +0 -37
  23. universal_mcp/applications/e2b/app.py +0 -65
  24. universal_mcp/applications/elevenlabs/README.md +0 -84
  25. universal_mcp/applications/elevenlabs/__init__.py +0 -0
  26. universal_mcp/applications/elevenlabs/app.py +0 -1402
  27. universal_mcp/applications/falai/README.md +0 -42
  28. universal_mcp/applications/falai/__init__.py +0 -0
  29. universal_mcp/applications/falai/app.py +0 -332
  30. universal_mcp/applications/figma/README.md +0 -74
  31. universal_mcp/applications/figma/__init__.py +0 -0
  32. universal_mcp/applications/figma/app.py +0 -1261
  33. universal_mcp/applications/firecrawl/README.md +0 -45
  34. universal_mcp/applications/firecrawl/app.py +0 -268
  35. universal_mcp/applications/github/README.md +0 -47
  36. universal_mcp/applications/github/app.py +0 -429
  37. universal_mcp/applications/gong/README.md +0 -88
  38. universal_mcp/applications/gong/__init__.py +0 -0
  39. universal_mcp/applications/gong/app.py +0 -2297
  40. universal_mcp/applications/google_calendar/app.py +0 -442
  41. universal_mcp/applications/google_docs/README.md +0 -40
  42. universal_mcp/applications/google_docs/app.py +0 -88
  43. universal_mcp/applications/google_drive/README.md +0 -44
  44. universal_mcp/applications/google_drive/app.py +0 -286
  45. universal_mcp/applications/google_mail/README.md +0 -47
  46. universal_mcp/applications/google_mail/app.py +0 -664
  47. universal_mcp/applications/google_sheet/README.md +0 -42
  48. universal_mcp/applications/google_sheet/app.py +0 -150
  49. universal_mcp/applications/heygen/README.md +0 -69
  50. universal_mcp/applications/heygen/__init__.py +0 -0
  51. universal_mcp/applications/heygen/app.py +0 -956
  52. universal_mcp/applications/mailchimp/README.md +0 -306
  53. universal_mcp/applications/mailchimp/__init__.py +0 -0
  54. universal_mcp/applications/mailchimp/app.py +0 -10937
  55. universal_mcp/applications/markitdown/app.py +0 -44
  56. universal_mcp/applications/neon/README.md +0 -99
  57. universal_mcp/applications/neon/__init__.py +0 -0
  58. universal_mcp/applications/neon/app.py +0 -1924
  59. universal_mcp/applications/notion/README.md +0 -55
  60. universal_mcp/applications/notion/__init__.py +0 -0
  61. universal_mcp/applications/notion/app.py +0 -527
  62. universal_mcp/applications/perplexity/README.md +0 -37
  63. universal_mcp/applications/perplexity/app.py +0 -65
  64. universal_mcp/applications/reddit/README.md +0 -45
  65. universal_mcp/applications/reddit/app.py +0 -379
  66. universal_mcp/applications/replicate/README.md +0 -65
  67. universal_mcp/applications/replicate/__init__.py +0 -0
  68. universal_mcp/applications/replicate/app.py +0 -980
  69. universal_mcp/applications/resend/README.md +0 -38
  70. universal_mcp/applications/resend/app.py +0 -37
  71. universal_mcp/applications/retell_ai/README.md +0 -46
  72. universal_mcp/applications/retell_ai/__init__.py +0 -0
  73. universal_mcp/applications/retell_ai/app.py +0 -333
  74. universal_mcp/applications/rocketlane/README.md +0 -42
  75. universal_mcp/applications/rocketlane/__init__.py +0 -0
  76. universal_mcp/applications/rocketlane/app.py +0 -194
  77. universal_mcp/applications/serpapi/README.md +0 -37
  78. universal_mcp/applications/serpapi/app.py +0 -73
  79. universal_mcp/applications/shortcut/README.md +0 -153
  80. universal_mcp/applications/shortcut/__init__.py +0 -0
  81. universal_mcp/applications/shortcut/app.py +0 -3880
  82. universal_mcp/applications/spotify/README.md +0 -116
  83. universal_mcp/applications/spotify/__init__.py +0 -0
  84. universal_mcp/applications/spotify/app.py +0 -2526
  85. universal_mcp/applications/supabase/README.md +0 -112
  86. universal_mcp/applications/supabase/__init__.py +0 -0
  87. universal_mcp/applications/supabase/app.py +0 -2970
  88. universal_mcp/applications/tavily/README.md +0 -38
  89. universal_mcp/applications/tavily/app.py +0 -51
  90. universal_mcp/applications/wrike/README.md +0 -71
  91. universal_mcp/applications/wrike/__init__.py +0 -0
  92. universal_mcp/applications/wrike/app.py +0 -1372
  93. universal_mcp/applications/youtube/README.md +0 -82
  94. universal_mcp/applications/youtube/__init__.py +0 -0
  95. universal_mcp/applications/youtube/app.py +0 -1428
  96. universal_mcp/applications/zenquotes/README.md +0 -37
  97. universal_mcp/applications/zenquotes/app.py +0 -31
  98. universal_mcp-0.1.13rc1.dist-info/RECORD +0 -132
  99. {universal_mcp-0.1.13rc1.dist-info → universal_mcp-0.1.13rc3.dist-info}/WHEEL +0 -0
  100. {universal_mcp-0.1.13rc1.dist-info → universal_mcp-0.1.13rc3.dist-info}/entry_points.txt +0 -0
@@ -1,45 +0,0 @@
1
-
2
- # Firecrawl MCP Server
3
-
4
- An MCP Server for the Firecrawl API.
5
-
6
- ## Supported Integrations
7
-
8
- - AgentR
9
- - API Key (Coming Soon)
10
- - OAuth (Coming Soon)
11
-
12
- ## Tools
13
-
14
- This is automatically generated from OpenAPI schema for the Firecrawl API.
15
-
16
- ## Supported Integrations
17
-
18
- This tool can be integrated with any service that supports HTTP requests.
19
-
20
- ## Tool List
21
-
22
- | Tool | Description |
23
- |------|-------------|
24
- | scrape_url | Scrapes a single URL using Firecrawl and returns the extracted data. |
25
- | search | Performs a web search using Firecrawl's search capability. |
26
- | start_crawl | Starts a crawl job for a given URL using Firecrawl. Returns the job ID immediately. |
27
- | check_crawl_status | Checks the status of a previously initiated Firecrawl crawl job. |
28
- | cancel_crawl | Cancels a currently running Firecrawl crawl job. |
29
- | start_batch_scrape | Starts a batch scrape job for multiple URLs using Firecrawl. |
30
- | check_batch_scrape_status | Checks the status of a previously initiated Firecrawl batch scrape job. |
31
- | start_extract | Starts an extraction job for one or more URLs using Firecrawl. |
32
- | check_extract_status | Checks the status of a previously initiated Firecrawl extraction job. |
33
-
34
-
35
-
36
- ## Usage
37
-
38
- - Login to AgentR
39
- - Follow the quickstart guide to setup MCP Server for your client
40
- - Visit Apps Store and enable the Firecrawl app
41
- - Restart the MCP Server
42
-
43
- ### Local Development
44
-
45
- - Follow the README to test with the local MCP Server
@@ -1,268 +0,0 @@
1
- from typing import Any
2
-
3
- from firecrawl import FirecrawlApp as FirecrawlApiClient
4
-
5
- from universal_mcp.applications.application import APIApplication
6
- from universal_mcp.integrations import Integration
7
-
8
-
9
- class FirecrawlApp(APIApplication):
10
- """
11
- Application for interacting with the Firecrawl service (firecrawl.dev)
12
- to scrape web pages, perform searches, and manage crawl/batch scrape/extract jobs.
13
- Requires a Firecrawl API key configured via integration.
14
- """
15
-
16
- def __init__(self, integration: Integration | None = None) -> None:
17
- super().__init__(name="firecrawl", integration=integration)
18
-
19
- def _get_client(self) -> FirecrawlApiClient:
20
- """Initializes and returns the Firecrawl client after ensuring API key is set."""
21
- api_key = self.integration.get_credentials().get("api_key")
22
- return FirecrawlApiClient(api_key=api_key)
23
-
24
- def scrape_url(
25
- self, url: str, params: dict[str, Any] | None = None
26
- ) -> dict[str, Any] | str:
27
- """
28
- Scrapes a single URL using Firecrawl and returns the extracted data.
29
-
30
- Args:
31
- url: The URL of the web page to scrape.
32
- params: Optional dictionary of parameters to customize the scrape.
33
-
34
- Returns:
35
- A dictionary containing the scraped data on success,
36
- or a string containing an error message on failure.
37
-
38
- Tags:
39
- scrape, important
40
- """
41
- try:
42
- client = self._get_client()
43
- response_data = client.scrape_url(url=url, params=params)
44
- return response_data
45
-
46
- except Exception as e:
47
- return f"Error scraping URL {url}: {type(e).__name__} - {e}"
48
-
49
- def search(
50
- self, query: str, params: dict[str, Any] | None = None
51
- ) -> dict[str, Any] | str:
52
- """
53
- Performs a web search using Firecrawl's search capability.
54
-
55
- Args:
56
- query: The search query string.
57
- params: Optional dictionary of search parameters.
58
-
59
- Returns:
60
- A dictionary containing the search results on success,
61
- or a string containing an error message on failure.
62
-
63
- Tags:
64
- search, important
65
- """
66
- try:
67
- client = self._get_client()
68
- response = client.search(query=query, params=params)
69
- return response
70
- except Exception as e:
71
- return f"Error performing search for '{query}': {type(e).__name__} - {e}"
72
-
73
- def start_crawl(
74
- self,
75
- url: str,
76
- params: dict[str, Any] | None = None,
77
- idempotency_key: str | None = None,
78
- ) -> dict[str, Any] | str:
79
- """
80
- Starts a crawl job for a given URL using Firecrawl. Returns the job ID immediately.
81
-
82
- Args:
83
- url: The starting URL for the crawl.
84
- params: Optional dictionary of parameters to customize the crawl.
85
- idempotency_key: Optional unique key to prevent duplicate jobs.
86
-
87
- Returns:
88
- A dictionary containing the job initiation response on success,
89
- or a string containing an error message on failure.
90
-
91
- Tags:
92
- crawl, async_job, start
93
- """
94
- try:
95
- client = self._get_client()
96
- response = client.async_crawl_url(
97
- url=url, params=params, idempotency_key=idempotency_key
98
- )
99
- return response
100
-
101
- except Exception as e:
102
- return f"Error starting crawl for URL {url}: {type(e).__name__} - {e}"
103
-
104
- def check_crawl_status(self, job_id: str) -> dict[str, Any] | str:
105
- """
106
- Checks the status of a previously initiated Firecrawl crawl job.
107
-
108
- Args:
109
- job_id: The ID of the crawl job to check.
110
-
111
- Returns:
112
- A dictionary containing the job status details on success,
113
- or a string containing an error message on failure.
114
-
115
- Tags:
116
- crawl, async_job, status
117
- """
118
- try:
119
- client = self._get_client()
120
- status = client.check_crawl_status(id=job_id)
121
- return status
122
-
123
- except Exception as e:
124
- return f"Error checking crawl status for job ID {job_id}: {type(e).__name__} - {e}"
125
-
126
- def cancel_crawl(self, job_id: str) -> dict[str, Any] | str:
127
- """
128
- Cancels a currently running Firecrawl crawl job.
129
-
130
- Args:
131
- job_id: The ID of the crawl job to cancel.
132
-
133
- Returns:
134
- A dictionary confirming the cancellation status on success,
135
- or a string containing an error message on failure.
136
-
137
- Tags:
138
- crawl, async_job, management, cancel
139
- """
140
- try:
141
- client = self._get_client()
142
- response = client.cancel_crawl(id=job_id)
143
-
144
- return response
145
-
146
- except Exception as e:
147
- return f"Error cancelling crawl job ID {job_id}: {type(e).__name__} - {e}"
148
-
149
- def start_batch_scrape(
150
- self,
151
- urls: list[str],
152
- params: dict[str, Any] | None = None,
153
- idempotency_key: str | None = None,
154
- ) -> dict[str, Any] | str:
155
- """
156
- Starts a batch scrape job for multiple URLs using Firecrawl.
157
-
158
- Args:
159
- urls: A list of URLs to scrape.
160
- params: Optional dictionary of parameters applied to all scrapes.
161
- idempotency_key: Optional unique key to prevent duplicate jobs.
162
-
163
- Returns:
164
- A dictionary containing the job initiation response on success,
165
- or a string containing an error message on failure.
166
-
167
- Tags:
168
- scrape, batch, async_job, start
169
- """
170
- try:
171
- client = self._get_client()
172
- response = client.async_batch_scrape_urls(
173
- urls=urls, params=params, idempotency_key=idempotency_key
174
- )
175
- return response
176
-
177
- except Exception as e:
178
- return f"Error starting batch scrape: {type(e).__name__} - {e}"
179
-
180
- def check_batch_scrape_status(self, job_id: str) -> dict[str, Any] | str:
181
- """
182
- Checks the status of a previously initiated Firecrawl batch scrape job.
183
-
184
- Args:
185
- job_id: The ID of the batch scrape job to check.
186
-
187
- Returns:
188
- A dictionary containing the job status details on success,
189
- or a string containing an error message on failure.
190
-
191
- Tags:
192
- scrape, batch, async_job, status
193
- """
194
- try:
195
- client = self._get_client()
196
- status = client.check_batch_scrape_status(id=job_id)
197
- return status
198
-
199
- except Exception as e:
200
- return f"Error checking batch scrape status for job ID {job_id}: {type(e).__name__} - {e}"
201
-
202
- def start_extract(
203
- self,
204
- urls: list[str],
205
- params: dict[str, Any] | None = None,
206
- idempotency_key: str | None = None,
207
- ) -> dict[str, Any] | str:
208
- """
209
- Starts an extraction job for one or more URLs using Firecrawl.
210
-
211
- Args:
212
- urls: A list of URLs to extract data from.
213
- params: Dictionary of parameters. MUST include 'prompt' or 'schema'.
214
- idempotency_key: Optional unique key to prevent duplicate jobs.
215
-
216
- Returns:
217
- A dictionary containing the job initiation response on success,
218
- or a string containing an error message on failure.
219
-
220
- Tags:
221
- extract, ai, async_job, start
222
- """
223
-
224
- try:
225
- client = self._get_client()
226
- response = client.async_extract(
227
- urls=urls, params=params, idempotency_key=idempotency_key
228
- )
229
- return response
230
-
231
- except Exception as e:
232
- return f"Error starting extraction: {type(e).__name__} - {e}"
233
-
234
- def check_extract_status(self, job_id: str) -> dict[str, Any] | str:
235
- """
236
- Checks the status of a previously initiated Firecrawl extraction job.
237
-
238
- Args:
239
- job_id: The ID of the extraction job to check.
240
-
241
- Returns:
242
- A dictionary containing the job status details on success,
243
- or a string containing an error message on failure.
244
-
245
- Tags:
246
- extract, ai, async_job, status
247
- """
248
- try:
249
- client = self._get_client()
250
- status = client.get_extract_status(job_id=job_id)
251
- return status
252
-
253
- except Exception as e:
254
- return f"Error checking extraction status for job ID {job_id}: {type(e).__name__} - {e}"
255
-
256
- def list_tools(self):
257
- """Returns a list of methods exposed as tools."""
258
- return [
259
- self.scrape_url,
260
- self.search,
261
- self.start_crawl,
262
- self.check_crawl_status,
263
- self.cancel_crawl,
264
- self.start_batch_scrape,
265
- self.check_batch_scrape_status,
266
- self.start_extract,
267
- self.check_extract_status,
268
- ]
@@ -1,47 +0,0 @@
1
-
2
- # Github MCP Server
3
-
4
- An MCP Server for the Github API.
5
-
6
- ## Supported Integrations
7
-
8
- - AgentR
9
- - API Key (Coming Soon)
10
- - OAuth (Coming Soon)
11
-
12
- ## Tools
13
-
14
- This is automatically generated from OpenAPI schema for the Github API.
15
-
16
- ## Supported Integrations
17
-
18
- This tool can be integrated with any service that supports HTTP requests.
19
-
20
- ## Tool List
21
-
22
- | Tool | Description |
23
- |------|-------------|
24
- | create_issue | Create a new issue in a GitHub repository |
25
- | create_pull_request | Create a new pull request for a GitHub repository |
26
- | get_pull_request | Get a specific pull request for a GitHub repository |
27
- | list_branches | List branches for a GitHub repository |
28
- | list_commits | List recent commits for a GitHub repository |
29
- | list_issues | List issues for a GitHub repository |
30
- | list_pull_requests | List pull requests for a GitHub repository |
31
- | list_repo_activities | List activities for a GitHub repository |
32
- | star_repository | Star a GitHub repository |
33
- | update_issue | Update an issue in a GitHub repository |
34
- | validate | Function for validate |
35
-
36
-
37
-
38
- ## Usage
39
-
40
- - Login to AgentR
41
- - Follow the quickstart guide to setup MCP Server for your client
42
- - Visit Apps Store and enable the Github app
43
- - Restart the MCP Server
44
-
45
- ### Local Development
46
-
47
- - Follow the README to test with the local MCP Server