universal-mcp-applications 0.1.30rc1__py3-none-any.whl → 0.1.30rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- universal_mcp/applications/linkedin/README.md +16 -4
- universal_mcp/applications/scraper/app.py +143 -70
- {universal_mcp_applications-0.1.30rc1.dist-info → universal_mcp_applications-0.1.30rc2.dist-info}/METADATA +1 -1
- {universal_mcp_applications-0.1.30rc1.dist-info → universal_mcp_applications-0.1.30rc2.dist-info}/RECORD +6 -7
- universal_mcp/applications/scraper/scraper_testers.py +0 -17
- {universal_mcp_applications-0.1.30rc1.dist-info → universal_mcp_applications-0.1.30rc2.dist-info}/WHEEL +0 -0
- {universal_mcp_applications-0.1.30rc1.dist-info → universal_mcp_applications-0.1.30rc2.dist-info}/licenses/LICENSE +0 -0
|
@@ -9,7 +9,19 @@ This is automatically generated from OpenAPI schema for the LinkedinApp API.
|
|
|
9
9
|
|
|
10
10
|
| Tool | Description |
|
|
11
11
|
|------|-------------|
|
|
12
|
-
| `
|
|
13
|
-
| `
|
|
14
|
-
| `
|
|
15
|
-
| `
|
|
12
|
+
| `list_all_chats` | Retrieves a paginated list of all chat conversations across linked accounts. Supports filtering by unread status, date range, and account provider, distinguishing it from functions listing messages within a single chat. |
|
|
13
|
+
| `list_chat_messages` | Retrieves messages from a specific chat identified by `chat_id`. Supports pagination and filtering by date or sender. Unlike `list_all_messages`, which fetches from all chats, this function targets the contents of a single conversation. |
|
|
14
|
+
| `send_chat_message` | Sends a text message to a specific chat conversation using its `chat_id`. This function creates a new message via a POST request, distinguishing it from read-only functions like `list_chat_messages`. It returns the API's response, which typically confirms the successful creation of the message. |
|
|
15
|
+
| `retrieve_chat` | Retrieves a single chat's details using its Unipile or provider-specific ID. This function is distinct from `list_all_chats`, which returns a collection, by targeting one specific conversation. |
|
|
16
|
+
| `list_all_messages` | Retrieves a paginated list of messages from all chats associated with the account. Unlike `list_chat_messages` which targets a specific conversation, this function provides a global message view, filterable by sender and date range. |
|
|
17
|
+
| `list_all_accounts` | Retrieves a paginated list of all social media accounts linked to the Unipile service. This is crucial for obtaining the `account_id` required by other methods to specify which user account should perform an action, like sending a message or retrieving user-specific posts. |
|
|
18
|
+
| `list_profile_posts` | Retrieves a paginated list of posts from a specific user or company profile using their provider ID. An authorizing `account_id` is required, and the `is_company` flag must specify the entity type, distinguishing this from `retrieve_post` which fetches a single post by its own ID. |
|
|
19
|
+
| `retrieve_own_profile` | Retrieves the profile details for the user associated with the Unipile account. This function targets the API's 'me' endpoint to fetch the authenticated user's profile, distinct from `retrieve_user_profile` which fetches profiles of other users by their public identifier. |
|
|
20
|
+
| `retrieve_user_profile` | Retrieves a specific LinkedIn user's profile using their public or internal ID. Unlike `retrieve_own_profile`, which fetches the authenticated user's details, this function targets and returns data for any specified third-party user profile on the platform. |
|
|
21
|
+
| `retrieve_post` | Fetches a specific post's details by its unique ID. Unlike `list_profile_posts`, which retrieves a collection of posts from a user or company profile, this function targets one specific post and returns its full object. |
|
|
22
|
+
| `list_post_comments` | Fetches comments for a specific post. Providing an optional `comment_id` retrieves threaded replies instead of top-level comments. This read-only operation contrasts with `create_post_comment`, which publishes new comments, and `list_content_reactions`, which retrieves 'likes'. |
|
|
23
|
+
| `create_post` | Publishes a new top-level post from the account, including text, user mentions, and an external link. This function creates original content, distinguishing it from `create_post_comment` which adds replies to existing posts. |
|
|
24
|
+
| `list_content_reactions` | Retrieves a paginated list of reactions for a given post or, optionally, a specific comment. This read-only operation uses the account for the request, distinguishing it from the `create_reaction` function which adds new reactions. |
|
|
25
|
+
| `create_post_comment` | Publishes a comment on a specified post. By providing an optional `comment_id`, it creates a threaded reply to an existing comment instead of a new top-level one. This function's dual capability distinguishes it from `list_post_comments`, which only retrieves comments and their replies. |
|
|
26
|
+
| `create_reaction` | Adds a specified reaction (e.g., 'like', 'love') to a LinkedIn post or, optionally, to a specific comment. This function performs a POST request to create the reaction, differentiating it from `list_content_reactions` which only retrieves existing ones. |
|
|
27
|
+
| `search` | Performs a comprehensive LinkedIn search for people, companies, posts, or jobs using keywords. |
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
from dotenv import load_dotenv
|
|
2
3
|
|
|
3
4
|
load_dotenv()
|
|
@@ -8,9 +9,6 @@ from loguru import logger
|
|
|
8
9
|
from universal_mcp.applications.application import APIApplication
|
|
9
10
|
from universal_mcp.integrations import Integration
|
|
10
11
|
|
|
11
|
-
from universal_mcp.applications.linkedin import LinkedinApp
|
|
12
|
-
|
|
13
|
-
|
|
14
12
|
class ScraperApp(APIApplication):
|
|
15
13
|
"""
|
|
16
14
|
Application for interacting with LinkedIn API.
|
|
@@ -22,11 +20,56 @@ class ScraperApp(APIApplication):
|
|
|
22
20
|
if self.integration:
|
|
23
21
|
credentials = self.integration.get_credentials()
|
|
24
22
|
self.account_id = credentials.get("account_id")
|
|
25
|
-
self._unipile_app = LinkedinApp(integration=self.integration)
|
|
26
23
|
else:
|
|
27
24
|
logger.warning("Integration not found")
|
|
28
25
|
self.account_id = None
|
|
29
|
-
|
|
26
|
+
|
|
27
|
+
@property
|
|
28
|
+
def base_url(self) -> str:
|
|
29
|
+
if not self._base_url:
|
|
30
|
+
unipile_dsn = os.getenv("UNIPILE_DSN")
|
|
31
|
+
if not unipile_dsn:
|
|
32
|
+
logger.error(
|
|
33
|
+
"UnipileApp: UNIPILE_DSN environment variable is not set."
|
|
34
|
+
)
|
|
35
|
+
raise ValueError(
|
|
36
|
+
"UnipileApp: UNIPILE_DSN environment variable is required."
|
|
37
|
+
)
|
|
38
|
+
self._base_url = f"https://{unipile_dsn}"
|
|
39
|
+
return self._base_url
|
|
40
|
+
|
|
41
|
+
@base_url.setter
|
|
42
|
+
def base_url(self, base_url: str) -> None:
|
|
43
|
+
self._base_url = base_url
|
|
44
|
+
logger.info(f"UnipileApp: Base URL set to {self._base_url}")
|
|
45
|
+
|
|
46
|
+
def _get_headers(self) -> dict[str, str]:
|
|
47
|
+
"""
|
|
48
|
+
Get the headers for Unipile API requests.
|
|
49
|
+
Overrides the base class method to use X-Api-Key.
|
|
50
|
+
"""
|
|
51
|
+
if not self.integration:
|
|
52
|
+
logger.warning(
|
|
53
|
+
"UnipileApp: No integration configured, returning empty headers."
|
|
54
|
+
)
|
|
55
|
+
return {}
|
|
56
|
+
|
|
57
|
+
api_key = os.getenv("UNIPILE_API_KEY")
|
|
58
|
+
if not api_key:
|
|
59
|
+
logger.error(
|
|
60
|
+
"UnipileApp: API key not found in integration credentials for Unipile."
|
|
61
|
+
)
|
|
62
|
+
return { # Or return minimal headers if some calls might not need auth (unlikely for Unipile)
|
|
63
|
+
"Content-Type": "application/json",
|
|
64
|
+
"Cache-Control": "no-cache",
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
logger.debug("UnipileApp: Using X-Api-Key for authentication.")
|
|
68
|
+
return {
|
|
69
|
+
"x-api-key": api_key,
|
|
70
|
+
"Content-Type": "application/json",
|
|
71
|
+
"Cache-Control": "no-cache", # Often good practice for APIs
|
|
72
|
+
}
|
|
30
73
|
|
|
31
74
|
def linkedin_search(
|
|
32
75
|
self,
|
|
@@ -34,13 +77,15 @@ class ScraperApp(APIApplication):
|
|
|
34
77
|
cursor: str | None = None,
|
|
35
78
|
limit: int | None = None,
|
|
36
79
|
keywords: str | None = None,
|
|
37
|
-
date_posted:
|
|
80
|
+
date_posted: Literal["past_day", "past_week", "past_month"] | None = None,
|
|
38
81
|
sort_by: Literal["relevance", "date"] = "relevance",
|
|
39
82
|
minimum_salary_value: int = 40,
|
|
40
83
|
) -> dict[str, Any]:
|
|
41
84
|
"""
|
|
42
|
-
Performs a
|
|
43
|
-
|
|
85
|
+
Performs a comprehensive LinkedIn search for people, companies, posts, or jobs using keywords.
|
|
86
|
+
Supports pagination and targets either the classic or Sales Navigator API for posts.
|
|
87
|
+
For people, companies, and jobs, it uses the classic API.
|
|
88
|
+
|
|
44
89
|
Args:
|
|
45
90
|
category: Type of search to perform. Valid values are "people", "companies", "posts", or "jobs".
|
|
46
91
|
cursor: Pagination cursor for the next page of entries.
|
|
@@ -49,82 +94,107 @@ class ScraperApp(APIApplication):
|
|
|
49
94
|
date_posted: Filter by when the post was posted (posts only). Valid values are "past_day", "past_week", or "past_month".
|
|
50
95
|
sort_by: How to sort the results (for posts and jobs). Valid values are "relevance" or "date".
|
|
51
96
|
minimum_salary_value: The minimum salary to filter for (jobs only).
|
|
52
|
-
|
|
97
|
+
|
|
53
98
|
Returns:
|
|
54
99
|
A dictionary containing search results and pagination details.
|
|
55
|
-
|
|
100
|
+
|
|
56
101
|
Raises:
|
|
57
102
|
httpx.HTTPError: If the API request fails.
|
|
58
|
-
|
|
103
|
+
ValueError: If the category is empty.
|
|
104
|
+
|
|
59
105
|
Tags:
|
|
60
|
-
linkedin, search,
|
|
106
|
+
linkedin, search, people, companies, posts, jobs, api, important
|
|
61
107
|
"""
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
108
|
+
if not category:
|
|
109
|
+
raise ValueError("Category cannot be empty.")
|
|
110
|
+
|
|
111
|
+
url = f"{self.base_url}/api/v1/linkedin/search"
|
|
112
|
+
|
|
113
|
+
params: dict[str, Any] = {"account_id": self.account_id}
|
|
114
|
+
if cursor:
|
|
115
|
+
params["cursor"] = cursor
|
|
116
|
+
if limit is not None:
|
|
117
|
+
params["limit"] = limit
|
|
118
|
+
|
|
119
|
+
payload: dict[str, Any] = {"api": "classic", "category": category}
|
|
120
|
+
|
|
121
|
+
if keywords:
|
|
122
|
+
payload["keywords"] = keywords
|
|
123
|
+
|
|
124
|
+
if category == "posts":
|
|
125
|
+
if date_posted:
|
|
126
|
+
payload["date_posted"] = date_posted
|
|
127
|
+
if sort_by:
|
|
128
|
+
payload["sort_by"] = sort_by
|
|
129
|
+
|
|
130
|
+
elif category == "jobs":
|
|
131
|
+
payload["minimum_salary"] = {
|
|
132
|
+
"currency": "USD",
|
|
133
|
+
"value": minimum_salary_value,
|
|
134
|
+
}
|
|
135
|
+
if sort_by:
|
|
136
|
+
payload["sort_by"] = sort_by
|
|
137
|
+
|
|
138
|
+
response = self._post(url, params=params, data=payload)
|
|
139
|
+
return self._handle_response(response)
|
|
72
140
|
|
|
73
141
|
def linkedin_list_profile_posts(
|
|
74
142
|
self,
|
|
75
|
-
identifier: str,
|
|
143
|
+
identifier: str, # User or Company provider internal ID
|
|
76
144
|
cursor: str | None = None,
|
|
77
|
-
limit: int | None = None,
|
|
145
|
+
limit: int | None = None, # 1-100 (spec says max 250)
|
|
146
|
+
is_company: bool | None = None,
|
|
78
147
|
) -> dict[str, Any]:
|
|
79
148
|
"""
|
|
80
|
-
|
|
81
|
-
|
|
149
|
+
Retrieves a paginated list of posts from a specific user or company profile using their provider ID. An authorizing `account_id` is required, and the `is_company` flag must specify the entity type, distinguishing this from `retrieve_post` which fetches a single post by its own ID.
|
|
150
|
+
|
|
82
151
|
Args:
|
|
83
|
-
identifier: The entity's provider internal ID (LinkedIn ID).
|
|
84
|
-
cursor: Pagination cursor
|
|
85
|
-
limit: Number of items to return (1-100, though spec allows up to 250).
|
|
86
|
-
|
|
152
|
+
identifier: The entity's provider internal ID (LinkedIn ID).
|
|
153
|
+
cursor: Pagination cursor.
|
|
154
|
+
limit: Number of items to return (1-100, as per Unipile example, though spec allows up to 250).
|
|
155
|
+
is_company: Boolean indicating if the identifier is for a company.
|
|
156
|
+
|
|
87
157
|
Returns:
|
|
88
158
|
A dictionary containing a list of post objects and pagination details.
|
|
89
|
-
|
|
159
|
+
|
|
90
160
|
Raises:
|
|
91
161
|
httpx.HTTPError: If the API request fails.
|
|
92
|
-
|
|
162
|
+
|
|
93
163
|
Tags:
|
|
94
164
|
linkedin, post, list, user_posts, company_posts, content, api, important
|
|
95
165
|
"""
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
limit=limit
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
self,
|
|
106
|
-
|
|
107
|
-
|
|
166
|
+
url = f"{self.base_url}/api/v1/users/{identifier}/posts"
|
|
167
|
+
params: dict[str, Any] = {"account_id": self.account_id}
|
|
168
|
+
if cursor:
|
|
169
|
+
params["cursor"] = cursor
|
|
170
|
+
if limit:
|
|
171
|
+
params["limit"] = limit
|
|
172
|
+
if is_company is not None:
|
|
173
|
+
params["is_company"] = is_company
|
|
174
|
+
|
|
175
|
+
response = self._get(url, params=params)
|
|
176
|
+
return response.json()
|
|
177
|
+
|
|
178
|
+
def linkedin_retrieve_profile(self, identifier: str) -> dict[str, Any]:
|
|
108
179
|
"""
|
|
109
|
-
Retrieves a specific LinkedIn user's profile using their
|
|
110
|
-
|
|
180
|
+
Retrieves a specific LinkedIn user's profile using their public or internal ID. Unlike `retrieve_own_profile`, which fetches the authenticated user's details, this function targets and returns data for any specified third-party user profile on the platform.
|
|
181
|
+
|
|
111
182
|
Args:
|
|
112
|
-
identifier:
|
|
113
|
-
|
|
183
|
+
identifier: Can be the provider's internal id OR the provider's public id of the requested user.For example, for https://www.linkedin.com/in/manojbajaj95/, the identifier is "manojbajaj95".
|
|
184
|
+
|
|
114
185
|
Returns:
|
|
115
186
|
A dictionary containing the user's profile details.
|
|
116
|
-
|
|
187
|
+
|
|
117
188
|
Raises:
|
|
118
189
|
httpx.HTTPError: If the API request fails.
|
|
119
|
-
|
|
190
|
+
|
|
120
191
|
Tags:
|
|
121
192
|
linkedin, user, profile, retrieve, get, api, important
|
|
122
193
|
"""
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
)
|
|
194
|
+
url = f"{self.base_url}/api/v1/users/{identifier}"
|
|
195
|
+
params: dict[str, Any] = {"account_id": self.account_id}
|
|
196
|
+
response = self._get(url, params=params)
|
|
197
|
+
return self._handle_response(response)
|
|
128
198
|
|
|
129
199
|
def linkedin_list_post_comments(
|
|
130
200
|
self,
|
|
@@ -134,31 +204,34 @@ class ScraperApp(APIApplication):
|
|
|
134
204
|
limit: int | None = None,
|
|
135
205
|
) -> dict[str, Any]:
|
|
136
206
|
"""
|
|
137
|
-
Fetches
|
|
138
|
-
|
|
207
|
+
Fetches comments for a specific post. Providing an optional `comment_id` retrieves threaded replies instead of top-level comments. This read-only operation contrasts with `create_post_comment`, which publishes new comments, and `list_content_reactions`, which retrieves 'likes'.
|
|
208
|
+
|
|
139
209
|
Args:
|
|
140
|
-
post_id: The social ID of the post.
|
|
210
|
+
post_id: The social ID of the post.
|
|
141
211
|
comment_id: If provided, retrieves replies to this comment ID instead of top-level comments.
|
|
142
212
|
cursor: Pagination cursor.
|
|
143
|
-
limit: Number of comments to return.
|
|
144
|
-
|
|
213
|
+
limit: Number of comments to return. (OpenAPI spec shows type string, passed as string if provided).
|
|
214
|
+
|
|
145
215
|
Returns:
|
|
146
216
|
A dictionary containing a list of comment objects and pagination details.
|
|
147
|
-
|
|
217
|
+
|
|
148
218
|
Raises:
|
|
149
219
|
httpx.HTTPError: If the API request fails.
|
|
150
|
-
|
|
220
|
+
|
|
151
221
|
Tags:
|
|
152
222
|
linkedin, post, comment, list, content, api, important
|
|
153
223
|
"""
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
224
|
+
url = f"{self.base_url}/api/v1/posts/{post_id}/comments"
|
|
225
|
+
params: dict[str, Any] = {"account_id": self.account_id}
|
|
226
|
+
if cursor:
|
|
227
|
+
params["cursor"] = cursor
|
|
228
|
+
if limit is not None:
|
|
229
|
+
params["limit"] = str(limit)
|
|
230
|
+
if comment_id:
|
|
231
|
+
params["comment_id"] = comment_id
|
|
232
|
+
|
|
233
|
+
response = self._get(url, params=params)
|
|
234
|
+
return response.json()
|
|
162
235
|
|
|
163
236
|
def list_tools(self):
|
|
164
237
|
"""
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: universal-mcp-applications
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.30rc2
|
|
4
4
|
Summary: A Universal MCP Application: universal_mcp_applications
|
|
5
5
|
Project-URL: Homepage, https://github.com/universal-mcp/applications
|
|
6
6
|
Project-URL: Repository, https://github.com/universal-mcp/applications
|
|
@@ -144,7 +144,7 @@ universal_mcp/applications/jira/app.py,sha256=V9GunK_s4gEuCw6Qnf45lDXaXwjVrNAhWm
|
|
|
144
144
|
universal_mcp/applications/klaviyo/README.md,sha256=6f2wbhCFF-BbgLr0nEE2Nb-Pz0M0b7U42Dp-ZILXz0I,53609
|
|
145
145
|
universal_mcp/applications/klaviyo/__init__.py,sha256=YS2GhW7my_I1tfyLlxlkeTFmlz2WD8vIpMThtdBflqQ,28
|
|
146
146
|
universal_mcp/applications/klaviyo/app.py,sha256=xHQxEZFVIWPCBmL6YoYxuVREibwPRH21izw0psmOzFc,423692
|
|
147
|
-
universal_mcp/applications/linkedin/README.md,sha256=
|
|
147
|
+
universal_mcp/applications/linkedin/README.md,sha256=gwbNgrPKUsGzQEsta3kp8SK5RJJ5mPg23WGG548Y6no,4476
|
|
148
148
|
universal_mcp/applications/linkedin/__init__.py,sha256=Yj-713vb4ZYykIlXlwOkKkIXIOB3opCW8wvp_CCqlKk,29
|
|
149
149
|
universal_mcp/applications/linkedin/app.py,sha256=jiKf5A6BwTKeShU0JcSd4Q1iTi282tUqSREyhSPsIIM,27304
|
|
150
150
|
universal_mcp/applications/mailchimp/README.md,sha256=xOR32HA8h-WMS9ntcBxyllM3UOBYiyvZ6tJBHlAuU7k,33802
|
|
@@ -197,8 +197,7 @@ universal_mcp/applications/rocketlane/__init__.py,sha256=jl3PjnTvPdjnbFXJgLywSlE
|
|
|
197
197
|
universal_mcp/applications/rocketlane/app.py,sha256=Ae2hQFI5PylCLtNPJkTqWMLGsLx5fDd4wRFDhxTzTXQ,240689
|
|
198
198
|
universal_mcp/applications/scraper/README.md,sha256=JUNLshHABs4T1f24nvQeee62YIElSkxpU-zs2kuS0Gw,1497
|
|
199
199
|
universal_mcp/applications/scraper/__init__.py,sha256=W5Buzq8QbetUQm5m9xXCHeWcvVObU2vZ4xbvYtZImJo,28
|
|
200
|
-
universal_mcp/applications/scraper/app.py,sha256
|
|
201
|
-
universal_mcp/applications/scraper/scraper_testers.py,sha256=4OXq1ga7YeM5MmR8J78XdVKWjBX6qiDhBWh04WLXcHY,778
|
|
200
|
+
universal_mcp/applications/scraper/app.py,sha256=B5toOQGPEoaFHAZXaMbtCu3VsYJLJmtw54cys7c4Lug,9691
|
|
202
201
|
universal_mcp/applications/semanticscholar/README.md,sha256=JpLY_698pvstgoNfQ5Go8C8ehQ-o68uFDX5kr86upK0,2834
|
|
203
202
|
universal_mcp/applications/semanticscholar/__init__.py,sha256=eR36chrc0pbBsSE1GadvmQH0OmtKnSC91xbE7HcDPf0,36
|
|
204
203
|
universal_mcp/applications/semanticscholar/app.py,sha256=OHTFkR-IwRU5Rvb1bEu7XmRHikht3hEgZxszLQu6kFI,22234
|
|
@@ -277,7 +276,7 @@ universal_mcp/applications/youtube/app.py,sha256=eqgqe0b53W9Mj0FZGW3ZqY3xkGF4NbO
|
|
|
277
276
|
universal_mcp/applications/zenquotes/README.md,sha256=FJyoTGRCaZjF_bsCBqg1CrYcvIfuUG_Qk616G1wjhF8,512
|
|
278
277
|
universal_mcp/applications/zenquotes/__init__.py,sha256=C5nEHZ3Xy6nYUarq0BqQbbJnHs0UtSlqhk0DqmvWiHk,58
|
|
279
278
|
universal_mcp/applications/zenquotes/app.py,sha256=7xIEnSZWAGYu5583Be2ZjSCtLUAfMWRzucSpp7hw_h4,1299
|
|
280
|
-
universal_mcp_applications-0.1.
|
|
281
|
-
universal_mcp_applications-0.1.
|
|
282
|
-
universal_mcp_applications-0.1.
|
|
283
|
-
universal_mcp_applications-0.1.
|
|
279
|
+
universal_mcp_applications-0.1.30rc2.dist-info/METADATA,sha256=i_qlUYtOcqPi-4_uzCUzgFOXmDk4P8SBUThPoWlUKcg,2959
|
|
280
|
+
universal_mcp_applications-0.1.30rc2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
281
|
+
universal_mcp_applications-0.1.30rc2.dist-info/licenses/LICENSE,sha256=NweDZVPslBAZFzlgByF158b85GR0f5_tLQgq1NS48To,1063
|
|
282
|
+
universal_mcp_applications-0.1.30rc2.dist-info/RECORD,,
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
from universal_mcp.applications.scraper import ScraperApp
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
from universal_mcp.agentr import AgentrClient, AgentrRegistry, AgentrIntegration
|
|
5
|
-
|
|
6
|
-
client = AgentrClient(api_key="key-1f5983ba-f433-4337-b652-dc40f8712660")
|
|
7
|
-
registry = AgentrRegistry(client=client)
|
|
8
|
-
|
|
9
|
-
integration = AgentrIntegration(name="scraper",client=client)
|
|
10
|
-
|
|
11
|
-
scraper = ScraperApp(integration=integration)
|
|
12
|
-
|
|
13
|
-
if __name__ == "__main__":
|
|
14
|
-
print(scraper.linkedin_search(keywords="ai", limit=2, category="jobs"))
|
|
15
|
-
# print(scraper.linkedin_retrieve_profile(identifier="ACoAAAEkwwAB9KEc2TrQgOLEQ-vzRyZeCDyc6DQ"))
|
|
16
|
-
# print(scraper.linkedin_list_profile_posts(identifier="ACoAAAEkwwAB9KEc2TrQgOLEQ-vzRyZeCDyc6DQ", limit=2))
|
|
17
|
-
# print(scraper.linkedin_list_post_comments(post_id="urn:li:ugcPost:7386500271624896512"))
|
|
File without changes
|
|
File without changes
|