universal-mcp-applications 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of universal-mcp-applications might be problematic. Click here for more details.
- universal_mcp/applications/aws_s3/app.py +71 -71
- universal_mcp/applications/calendly/app.py +199 -199
- universal_mcp/applications/canva/app.py +189 -189
- universal_mcp/applications/domain_checker/app.py +31 -24
- universal_mcp/applications/e2b/app.py +6 -7
- universal_mcp/applications/elevenlabs/app.py +24 -20
- universal_mcp/applications/exa/app.py +25 -20
- universal_mcp/applications/falai/app.py +44 -41
- universal_mcp/applications/file_system/app.py +20 -12
- universal_mcp/applications/firecrawl/app.py +46 -47
- universal_mcp/applications/fireflies/app.py +79 -79
- universal_mcp/applications/fpl/app.py +83 -74
- universal_mcp/applications/github/README.md +0 -1028
- universal_mcp/applications/github/app.py +55 -50227
- universal_mcp/applications/google_calendar/app.py +63 -65
- universal_mcp/applications/google_docs/app.py +78 -78
- universal_mcp/applications/google_drive/app.py +361 -440
- universal_mcp/applications/google_gemini/app.py +34 -17
- universal_mcp/applications/google_mail/app.py +117 -117
- universal_mcp/applications/google_searchconsole/app.py +41 -47
- universal_mcp/applications/google_sheet/app.py +157 -164
- universal_mcp/applications/http_tools/app.py +16 -16
- universal_mcp/applications/linkedin/app.py +26 -31
- universal_mcp/applications/ms_teams/app.py +190 -190
- universal_mcp/applications/openai/app.py +55 -56
- universal_mcp/applications/outlook/app.py +71 -71
- universal_mcp/applications/perplexity/app.py +17 -17
- universal_mcp/applications/reddit/app.py +225 -4053
- universal_mcp/applications/replicate/app.py +40 -42
- universal_mcp/applications/resend/app.py +157 -154
- universal_mcp/applications/scraper/app.py +24 -24
- universal_mcp/applications/serpapi/app.py +18 -20
- universal_mcp/applications/sharepoint/app.py +46 -36
- universal_mcp/applications/slack/app.py +66 -66
- universal_mcp/applications/tavily/app.py +7 -7
- universal_mcp/applications/twitter/api_segments/compliance_api.py +17 -20
- universal_mcp/applications/twitter/api_segments/dm_conversations_api.py +35 -40
- universal_mcp/applications/twitter/api_segments/dm_events_api.py +18 -21
- universal_mcp/applications/twitter/api_segments/likes_api.py +19 -22
- universal_mcp/applications/twitter/api_segments/lists_api.py +59 -68
- universal_mcp/applications/twitter/api_segments/spaces_api.py +36 -42
- universal_mcp/applications/twitter/api_segments/trends_api.py +7 -8
- universal_mcp/applications/twitter/api_segments/tweets_api.py +159 -185
- universal_mcp/applications/twitter/api_segments/usage_api.py +5 -6
- universal_mcp/applications/twitter/api_segments/users_api.py +230 -264
- universal_mcp/applications/unipile/app.py +99 -105
- universal_mcp/applications/whatsapp/app.py +86 -82
- universal_mcp/applications/whatsapp_business/app.py +147 -147
- universal_mcp/applications/youtube/app.py +290 -290
- universal_mcp/applications/zenquotes/app.py +6 -6
- {universal_mcp_applications-0.1.13.dist-info → universal_mcp_applications-0.1.15.dist-info}/METADATA +2 -2
- {universal_mcp_applications-0.1.13.dist-info → universal_mcp_applications-0.1.15.dist-info}/RECORD +54 -54
- {universal_mcp_applications-0.1.13.dist-info → universal_mcp_applications-0.1.15.dist-info}/WHEEL +0 -0
- {universal_mcp_applications-0.1.13.dist-info → universal_mcp_applications-0.1.15.dist-info}/licenses/LICENSE +0 -0
|
@@ -47,8 +47,8 @@ class ScraperApp(APIApplication):
|
|
|
47
47
|
content_type: Optional[str] = None,
|
|
48
48
|
) -> dict[str, Any]:
|
|
49
49
|
"""
|
|
50
|
-
Performs a LinkedIn search for posts.
|
|
51
|
-
|
|
50
|
+
Performs a general LinkedIn search for posts using keywords and filters like date and content type. It supports pagination and can utilize either the 'classic' or 'sales_navigator' API, searching broadly across the platform rather than fetching posts from a specific user's profile.
|
|
51
|
+
|
|
52
52
|
Args:
|
|
53
53
|
category: Type of search to perform (defaults to "posts").
|
|
54
54
|
api: Which LinkedIn API to use - "classic" or "sales_navigator".
|
|
@@ -58,13 +58,13 @@ class ScraperApp(APIApplication):
|
|
|
58
58
|
sort_by: How to sort the results, e.g., "relevance" or "date".
|
|
59
59
|
date_posted: Filter posts by when they were posted.
|
|
60
60
|
content_type: Filter by the type of content in the post. Example: "videos", "images", "live_videos", "collaborative_articles", "documents"
|
|
61
|
-
|
|
61
|
+
|
|
62
62
|
Returns:
|
|
63
63
|
A dictionary containing search results and pagination details.
|
|
64
|
-
|
|
64
|
+
|
|
65
65
|
Raises:
|
|
66
66
|
httpx.HTTPError: If the API request fails.
|
|
67
|
-
|
|
67
|
+
|
|
68
68
|
Tags:
|
|
69
69
|
linkedin, search, posts, api, scrapper, important
|
|
70
70
|
"""
|
|
@@ -81,31 +81,31 @@ class ScraperApp(APIApplication):
|
|
|
81
81
|
content_type=content_type,
|
|
82
82
|
)
|
|
83
83
|
|
|
84
|
-
def
|
|
84
|
+
def linkedin_list_profile_posts(
|
|
85
85
|
self,
|
|
86
86
|
identifier: str,
|
|
87
87
|
cursor: Optional[str] = None,
|
|
88
88
|
limit: Optional[int] = None,
|
|
89
89
|
) -> dict[str, Any]:
|
|
90
90
|
"""
|
|
91
|
-
|
|
92
|
-
|
|
91
|
+
Fetches a paginated list of all LinkedIn posts from a specific user or company profile using their unique identifier. This function retrieves content directly from a profile, unlike `linkedin_post_search` which finds posts across LinkedIn based on keywords and other filters.
|
|
92
|
+
|
|
93
93
|
Args:
|
|
94
94
|
identifier: The entity's provider internal ID (LinkedIn ID).starts with ACo for users, while for companies it's a series of numbers.
|
|
95
95
|
cursor: Pagination cursor for the next page of entries.
|
|
96
96
|
limit: Number of items to return (1-100, though spec allows up to 250).
|
|
97
|
-
|
|
97
|
+
|
|
98
98
|
Returns:
|
|
99
99
|
A dictionary containing a list of post objects and pagination details.
|
|
100
|
-
|
|
100
|
+
|
|
101
101
|
Raises:
|
|
102
102
|
httpx.HTTPError: If the API request fails.
|
|
103
|
-
|
|
103
|
+
|
|
104
104
|
Tags:
|
|
105
105
|
linkedin, post, list, user_posts, company_posts, content, api, important
|
|
106
106
|
"""
|
|
107
107
|
|
|
108
|
-
return self._unipile_app.
|
|
108
|
+
return self._unipile_app.list_profile_posts(
|
|
109
109
|
identifier=identifier,
|
|
110
110
|
account_id=self.account_id,
|
|
111
111
|
cursor=cursor,
|
|
@@ -117,23 +117,23 @@ class ScraperApp(APIApplication):
|
|
|
117
117
|
identifier: str,
|
|
118
118
|
) -> dict[str, Any]:
|
|
119
119
|
"""
|
|
120
|
-
Retrieves a specific LinkedIn user profile by
|
|
121
|
-
|
|
120
|
+
Retrieves a specific LinkedIn user's profile by their unique identifier, which can be an internal provider ID or a public username. This function simplifies data access by delegating the actual profile retrieval request to the integrated Unipile application, distinct from functions that list posts or comments.
|
|
121
|
+
|
|
122
122
|
Args:
|
|
123
123
|
identifier: Can be the provider's internal id OR the provider's public id of the requested user.
|
|
124
124
|
For example, for https://www.linkedin.com/in/manojbajaj95/, the identifier is "manojbajaj95".
|
|
125
|
-
|
|
125
|
+
|
|
126
126
|
Returns:
|
|
127
127
|
A dictionary containing the user's profile details.
|
|
128
|
-
|
|
128
|
+
|
|
129
129
|
Raises:
|
|
130
130
|
httpx.HTTPError: If the API request fails.
|
|
131
|
-
|
|
131
|
+
|
|
132
132
|
Tags:
|
|
133
133
|
linkedin, user, profile, retrieve, get, api, important
|
|
134
134
|
"""
|
|
135
135
|
|
|
136
|
-
return self._unipile_app.
|
|
136
|
+
return self._unipile_app.retrieve_user_profile(
|
|
137
137
|
identifier=identifier,
|
|
138
138
|
account_id=self.account_id,
|
|
139
139
|
)
|
|
@@ -147,20 +147,20 @@ class ScraperApp(APIApplication):
|
|
|
147
147
|
limit: Optional[int] = None,
|
|
148
148
|
) -> dict[str, Any]:
|
|
149
149
|
"""
|
|
150
|
-
|
|
151
|
-
|
|
150
|
+
Fetches comments for a specified LinkedIn post. If a `comment_id` is provided, it retrieves replies to that comment instead of top-level comments. This function supports pagination and specifically targets comments, unlike others in the class that search for or list entire posts.
|
|
151
|
+
|
|
152
152
|
Args:
|
|
153
153
|
post_id: The social ID of the post. Example rn:li:activity:7342082869034393600
|
|
154
154
|
comment_id: If provided, retrieves replies to this comment ID instead of top-level comments.
|
|
155
155
|
cursor: Pagination cursor.
|
|
156
156
|
limit: Number of comments to return.
|
|
157
|
-
|
|
157
|
+
|
|
158
158
|
Returns:
|
|
159
159
|
A dictionary containing a list of comment objects and pagination details.
|
|
160
|
-
|
|
160
|
+
|
|
161
161
|
Raises:
|
|
162
162
|
httpx.HTTPError: If the API request fails.
|
|
163
|
-
|
|
163
|
+
|
|
164
164
|
Tags:
|
|
165
165
|
linkedin, post, comment, list, content, api, important
|
|
166
166
|
"""
|
|
@@ -183,7 +183,7 @@ class ScraperApp(APIApplication):
|
|
|
183
183
|
"""
|
|
184
184
|
return [
|
|
185
185
|
self.linkedin_post_search,
|
|
186
|
-
self.
|
|
186
|
+
self.linkedin_list_profile_posts,
|
|
187
187
|
self.linkedin_retrieve_profile,
|
|
188
188
|
self.linkedin_list_post_comments,
|
|
189
189
|
]
|
|
@@ -18,8 +18,7 @@ class SerpapiApp(APIApplication):
|
|
|
18
18
|
@property
|
|
19
19
|
def serpapi_api_key(self) -> str:
|
|
20
20
|
"""
|
|
21
|
-
|
|
22
|
-
Raises NotAuthorizedError if the key cannot be obtained.
|
|
21
|
+
A property that lazily retrieves the SerpApi API key from the integration and caches it for future use. It fetches credentials on first access, raising a `NotAuthorizedError` if the key is missing. Subsequent calls efficiently return the cached key.
|
|
23
22
|
"""
|
|
24
23
|
if self._serpapi_api_key is None:
|
|
25
24
|
if not self.integration:
|
|
@@ -76,21 +75,20 @@ class SerpapiApp(APIApplication):
|
|
|
76
75
|
logger.info("SerpApi API Key successfully retrieved and cached.")
|
|
77
76
|
return self._serpapi_api_key
|
|
78
77
|
|
|
79
|
-
async def
|
|
78
|
+
async def web_search(self, params: dict[str, Any] | None = None) -> str:
|
|
80
79
|
"""
|
|
81
|
-
Performs a search
|
|
82
|
-
|
|
83
|
-
|
|
80
|
+
Performs a general web search via SerpApi, defaulting to the 'google_light' engine. It accepts custom parameters, retrieves organic results, and formats them into a string with titles, links, and snippets. It also handles API authentication and raises `NotAuthorizedError` for credential-related issues.
|
|
81
|
+
|
|
84
82
|
Args:
|
|
85
83
|
params: Dictionary of engine-specific parameters (e.g., {'q': 'Coffee', 'engine': 'google_light', 'location': 'Austin, TX'}). Defaults to None.
|
|
86
|
-
|
|
84
|
+
|
|
87
85
|
Returns:
|
|
88
86
|
A formatted string containing search results with titles, links, and snippets, or an error message if the search fails.
|
|
89
|
-
|
|
87
|
+
|
|
90
88
|
Raises:
|
|
91
89
|
NotAuthorizedError: If the API key cannot be retrieved or is invalid/rejected by SerpApi.
|
|
92
90
|
Exception: For other unexpected errors during the search process. (Specific HTTP errors or SerpApiErrors are caught and returned as strings or raise NotAuthorizedError).
|
|
93
|
-
|
|
91
|
+
|
|
94
92
|
Tags:
|
|
95
93
|
search, async, web-scraping, api, serpapi, important
|
|
96
94
|
"""
|
|
@@ -195,20 +193,20 @@ class SerpapiApp(APIApplication):
|
|
|
195
193
|
place_id: str | None = None,
|
|
196
194
|
) -> dict[str, Any]:
|
|
197
195
|
"""
|
|
198
|
-
|
|
199
|
-
|
|
196
|
+
Executes a Google Maps search via SerpApi using a query, coordinates, or place ID. It enhances the results by adding a `google_maps_url` to each location, distinguishing it from `get_google_maps_reviews` which retrieves reviews for a known place.
|
|
197
|
+
|
|
200
198
|
Args:
|
|
201
199
|
q (string, optional): The search query for Google Maps (e.g., "Coffee", "Restaurants", "Gas stations").
|
|
202
200
|
ll (string, optional): Latitude and longitude with zoom level in format "@lat,lng,zoom" (e.g., "@40.7455096,-74.0083012,14z"). The zoom attribute ranges from 3z (map completely zoomed out) to 21z (map completely zoomed in). Results are not guaranteed to be within the requested geographic location.
|
|
203
201
|
place_id (string, optional): The unique reference to a place in Google Maps. Place IDs are available for most locations, including businesses, landmarks, parks, and intersections. You can find the place_id using our Google Maps API. place_id can be used without any other optional parameter. place_id and data_cid can't be used together.
|
|
204
|
-
|
|
202
|
+
|
|
205
203
|
Returns:
|
|
206
204
|
dict[str, Any]: Formatted Google Maps search results with place names, addresses, ratings, and other details.
|
|
207
|
-
|
|
205
|
+
|
|
208
206
|
Raises:
|
|
209
207
|
ValueError: Raised when required parameters are missing.
|
|
210
208
|
HTTPStatusError: Raised when the API request fails with detailed error information including status code and response body.
|
|
211
|
-
|
|
209
|
+
|
|
212
210
|
Tags:
|
|
213
211
|
google-maps, search, location, places, important
|
|
214
212
|
"""
|
|
@@ -250,19 +248,19 @@ class SerpapiApp(APIApplication):
|
|
|
250
248
|
hl: str | None = None,
|
|
251
249
|
) -> dict[str, Any]:
|
|
252
250
|
"""
|
|
253
|
-
|
|
254
|
-
|
|
251
|
+
Fetches Google Maps reviews for a specific location via SerpApi using its unique `data_id`. This function uses the `google_maps_reviews` engine, unlike `google_maps_search` which finds locations. Results can be returned in a specified language, defaulting to English.
|
|
252
|
+
|
|
255
253
|
Args:
|
|
256
254
|
data_id (string): The data ID of the place to get reviews for (e.g., "0x89c259af336b3341:0xa4969e07ce3108de").
|
|
257
255
|
hl (string, optional): Language parameter for the search results. Defaults to "en".
|
|
258
|
-
|
|
256
|
+
|
|
259
257
|
Returns:
|
|
260
258
|
dict[str, Any]: Google Maps reviews data with ratings, comments, and other review details.
|
|
261
|
-
|
|
259
|
+
|
|
262
260
|
Raises:
|
|
263
261
|
ValueError: Raised when required parameters are missing.
|
|
264
262
|
HTTPStatusError: Raised when the API request fails with detailed error information including status code and response body.
|
|
265
|
-
|
|
263
|
+
|
|
266
264
|
Tags:
|
|
267
265
|
google-maps, reviews, ratings, places, important
|
|
268
266
|
"""
|
|
@@ -287,7 +285,7 @@ class SerpapiApp(APIApplication):
|
|
|
287
285
|
|
|
288
286
|
def list_tools(self) -> list[callable]:
|
|
289
287
|
return [
|
|
290
|
-
self.
|
|
288
|
+
self.web_search,
|
|
291
289
|
self.google_maps_search,
|
|
292
290
|
self.get_google_maps_reviews,
|
|
293
291
|
]
|
|
@@ -35,8 +35,9 @@ class SharepointApp(BaseApplication):
|
|
|
35
35
|
|
|
36
36
|
@property
|
|
37
37
|
def client(self):
|
|
38
|
-
"""
|
|
39
|
-
|
|
38
|
+
"""
|
|
39
|
+
A lazy-loaded property that gets or creates an authenticated GraphClient instance. On its first call, it uses integration credentials to initialize the client, fetches the user's profile and root site ID, and caches the instance for subsequent use. This ensures efficient connection management.
|
|
40
|
+
|
|
40
41
|
Returns:
|
|
41
42
|
GraphClient: The authenticated GraphClient instance.
|
|
42
43
|
"""
|
|
@@ -49,7 +50,10 @@ class SharepointApp(BaseApplication):
|
|
|
49
50
|
if not credentials.get("access_token"):
|
|
50
51
|
raise ValueError("No access token found")
|
|
51
52
|
|
|
52
|
-
def
|
|
53
|
+
def _acquire_token():
|
|
54
|
+
"""
|
|
55
|
+
Formats stored credentials for the `GraphClient` authentication callback. It packages existing access and refresh tokens from the integration into the specific dictionary structure required by the client library for authentication, including a hardcoded 'Bearer' token type.
|
|
56
|
+
"""
|
|
53
57
|
access_token = credentials.get("access_token")
|
|
54
58
|
refresh_token = credentials.get("refresh_token")
|
|
55
59
|
return {
|
|
@@ -59,7 +63,7 @@ class SharepointApp(BaseApplication):
|
|
|
59
63
|
}
|
|
60
64
|
|
|
61
65
|
if self._client is None:
|
|
62
|
-
self._client = GraphClient(token_callback=
|
|
66
|
+
self._client = GraphClient(token_callback=_acquire_token)
|
|
63
67
|
# Get me
|
|
64
68
|
me = self._client.me.get().execute_query()
|
|
65
69
|
logger.debug(me.properties)
|
|
@@ -69,14 +73,15 @@ class SharepointApp(BaseApplication):
|
|
|
69
73
|
return self._client
|
|
70
74
|
|
|
71
75
|
def list_folders(self, folder_path: str | None = None) -> list[dict[str, Any]]:
|
|
72
|
-
"""
|
|
73
|
-
|
|
76
|
+
"""
|
|
77
|
+
Retrieves the names of all immediate subfolders within a specified directory. If a path is not provided, it defaults to listing folders in the root of the user's drive. This function is distinct from `list_documents`, which lists files.
|
|
78
|
+
|
|
74
79
|
Args:
|
|
75
80
|
folder_path (Optional[str], optional): The path to the parent folder. If None, lists folders in the root.
|
|
76
|
-
|
|
81
|
+
|
|
77
82
|
Returns:
|
|
78
83
|
List[Dict[str, Any]]: A list of folder names in the specified directory.
|
|
79
|
-
|
|
84
|
+
|
|
80
85
|
Tags:
|
|
81
86
|
important
|
|
82
87
|
"""
|
|
@@ -88,18 +93,19 @@ class SharepointApp(BaseApplication):
|
|
|
88
93
|
|
|
89
94
|
return [folder.properties.get("name") for folder in folders]
|
|
90
95
|
|
|
91
|
-
def
|
|
96
|
+
def create_folder_and_list(
|
|
92
97
|
self, folder_name: str, folder_path: str | None = None
|
|
93
98
|
) -> dict[str, Any]:
|
|
94
|
-
"""
|
|
95
|
-
|
|
99
|
+
"""
|
|
100
|
+
Creates a new folder with a given name inside a specified parent directory on SharePoint. If no path is provided, the folder is created in the root. It then returns an updated list of all folder names within that parent directory.
|
|
101
|
+
|
|
96
102
|
Args:
|
|
97
103
|
folder_name (str): The name of the folder to create.
|
|
98
104
|
folder_path (str | None, optional): The path to the parent folder. If None, creates in the root.
|
|
99
|
-
|
|
105
|
+
|
|
100
106
|
Returns:
|
|
101
107
|
Dict[str, Any]: The updated list of folders in the target directory.
|
|
102
|
-
|
|
108
|
+
|
|
103
109
|
Tags:
|
|
104
110
|
important
|
|
105
111
|
"""
|
|
@@ -110,15 +116,16 @@ class SharepointApp(BaseApplication):
|
|
|
110
116
|
folder.create_folder(folder_name).execute_query()
|
|
111
117
|
return self.list_folders(folder_path)
|
|
112
118
|
|
|
113
|
-
def
|
|
114
|
-
"""
|
|
115
|
-
|
|
119
|
+
def list_files(self, folder_path: str) -> list[dict[str, Any]]:
|
|
120
|
+
"""
|
|
121
|
+
Retrieves files from a specified folder path. For each file, it returns a dictionary containing key metadata like its name, URL, size, creation date, and last modified date. This function specifically lists files, distinct from `list_folders` which only lists directories.
|
|
122
|
+
|
|
116
123
|
Args:
|
|
117
124
|
folder_path (str): The path to the folder whose documents are to be listed.
|
|
118
|
-
|
|
125
|
+
|
|
119
126
|
Returns:
|
|
120
127
|
List[Dict[str, Any]]: A list of dictionaries containing document metadata.
|
|
121
|
-
|
|
128
|
+
|
|
122
129
|
Tags:
|
|
123
130
|
important
|
|
124
131
|
"""
|
|
@@ -136,19 +143,20 @@ class SharepointApp(BaseApplication):
|
|
|
136
143
|
for f in files
|
|
137
144
|
]
|
|
138
145
|
|
|
139
|
-
def
|
|
146
|
+
def upload_text_file(
|
|
140
147
|
self, file_path: str, file_name: str, content: str
|
|
141
148
|
) -> dict[str, Any]:
|
|
142
|
-
"""
|
|
143
|
-
|
|
149
|
+
"""
|
|
150
|
+
Uploads string content to a new file within a specified SharePoint folder path. After creation, it returns an updated list of all documents and their metadata residing in that folder, effectively confirming the file was added successfully.
|
|
151
|
+
|
|
144
152
|
Args:
|
|
145
153
|
file_path (str): The path to the folder where the document will be created.
|
|
146
154
|
file_name (str): The name of the document to create.
|
|
147
155
|
content (str): The content to write into the document.
|
|
148
|
-
|
|
156
|
+
|
|
149
157
|
Returns:
|
|
150
158
|
Dict[str, Any]: The updated list of documents in the folder.
|
|
151
|
-
|
|
159
|
+
|
|
152
160
|
Tags: important
|
|
153
161
|
"""
|
|
154
162
|
file = self.client.me.drive.root.get_by_path(file_path)
|
|
@@ -158,14 +166,15 @@ class SharepointApp(BaseApplication):
|
|
|
158
166
|
return self.list_documents(file_path)
|
|
159
167
|
|
|
160
168
|
def get_document_content(self, file_path: str) -> dict[str, Any]:
|
|
161
|
-
"""
|
|
162
|
-
|
|
169
|
+
"""
|
|
170
|
+
Retrieves a document's content from SharePoint. It returns a dictionary with the content, name, and size. Content is decoded as a string for text files or Base64-encoded for binary files. This is distinct from `list_documents` which only returns metadata without content.
|
|
171
|
+
|
|
163
172
|
Args:
|
|
164
173
|
file_path (str): The path to the document.
|
|
165
|
-
|
|
174
|
+
|
|
166
175
|
Returns:
|
|
167
176
|
Dict[str, Any]: A dictionary containing the document's name, content type, content (as text or base64), and size.
|
|
168
|
-
|
|
177
|
+
|
|
169
178
|
Tags: important
|
|
170
179
|
"""
|
|
171
180
|
file = self.client.me.drive.root.get_by_path(file_path).get().execute_query()
|
|
@@ -189,15 +198,16 @@ class SharepointApp(BaseApplication):
|
|
|
189
198
|
"size": len(content),
|
|
190
199
|
}
|
|
191
200
|
|
|
192
|
-
def
|
|
193
|
-
"""
|
|
194
|
-
|
|
201
|
+
def delete_document(self, file_path: str):
|
|
202
|
+
"""
|
|
203
|
+
Permanently deletes a specified file from SharePoint/OneDrive. The function takes the file path as an argument and returns True upon successful deletion. An exception is raised if the file is not found or the deletion fails.
|
|
204
|
+
|
|
195
205
|
Args:
|
|
196
206
|
file_path (str): The path to the file to delete.
|
|
197
|
-
|
|
207
|
+
|
|
198
208
|
Returns:
|
|
199
209
|
bool: True if the file was deleted successfully.
|
|
200
|
-
|
|
210
|
+
|
|
201
211
|
Tags:
|
|
202
212
|
important
|
|
203
213
|
"""
|
|
@@ -208,9 +218,9 @@ class SharepointApp(BaseApplication):
|
|
|
208
218
|
def list_tools(self):
|
|
209
219
|
return [
|
|
210
220
|
self.list_folders,
|
|
211
|
-
self.
|
|
212
|
-
self.
|
|
213
|
-
self.
|
|
221
|
+
self.create_folder_and_list,
|
|
222
|
+
self.list_files,
|
|
223
|
+
self.upload_text_file,
|
|
214
224
|
self.get_document_content,
|
|
215
|
-
self.
|
|
225
|
+
self.delete_document,
|
|
216
226
|
]
|