mcp-sharepoint-us 2.0.13__py3-none-any.whl → 2.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-sharepoint-us might be problematic. Click here for more details.
- mcp_sharepoint/__init__.py +37 -13
- mcp_sharepoint/auth.py +9 -1
- mcp_sharepoint/graph_api.py +280 -64
- {mcp_sharepoint_us-2.0.13.dist-info → mcp_sharepoint_us-2.0.15.dist-info}/METADATA +1 -1
- mcp_sharepoint_us-2.0.15.dist-info/RECORD +10 -0
- mcp_sharepoint_us-2.0.13.dist-info/RECORD +0 -10
- {mcp_sharepoint_us-2.0.13.dist-info → mcp_sharepoint_us-2.0.15.dist-info}/WHEEL +0 -0
- {mcp_sharepoint_us-2.0.13.dist-info → mcp_sharepoint_us-2.0.15.dist-info}/entry_points.txt +0 -0
- {mcp_sharepoint_us-2.0.13.dist-info → mcp_sharepoint_us-2.0.15.dist-info}/licenses/LICENSE +0 -0
- {mcp_sharepoint_us-2.0.13.dist-info → mcp_sharepoint_us-2.0.15.dist-info}/top_level.txt +0 -0
mcp_sharepoint/__init__.py
CHANGED
|
@@ -35,6 +35,7 @@ def ensure_context(func):
|
|
|
35
35
|
global graph_client, authenticator
|
|
36
36
|
if graph_client is None:
|
|
37
37
|
try:
|
|
38
|
+
logger.info("Initializing Graph API client...")
|
|
38
39
|
from .auth import SharePointAuthenticator
|
|
39
40
|
|
|
40
41
|
# Get credentials
|
|
@@ -44,6 +45,11 @@ def ensure_context(func):
|
|
|
44
45
|
tenant_id = os.getenv("SHP_TENANT_ID")
|
|
45
46
|
cloud = "government" if ".sharepoint.us" in site_url else "commercial"
|
|
46
47
|
|
|
48
|
+
logger.info(f"Site URL: {site_url}")
|
|
49
|
+
logger.info(f"Tenant ID: {tenant_id}")
|
|
50
|
+
logger.info(f"Client ID: {client_id}")
|
|
51
|
+
logger.info(f"Cloud: {cloud}")
|
|
52
|
+
|
|
47
53
|
# Create shared authenticator
|
|
48
54
|
authenticator = SharePointAuthenticator(
|
|
49
55
|
site_url=site_url,
|
|
@@ -52,11 +58,15 @@ def ensure_context(func):
|
|
|
52
58
|
tenant_id=tenant_id,
|
|
53
59
|
cloud=cloud
|
|
54
60
|
)
|
|
61
|
+
logger.info("Authenticator created successfully")
|
|
55
62
|
|
|
56
63
|
# Create Graph API client with direct token access
|
|
57
64
|
def get_token():
|
|
58
65
|
"""Get access token for Graph API"""
|
|
59
|
-
|
|
66
|
+
logger.debug("Token callback invoked")
|
|
67
|
+
token = authenticator.get_access_token()
|
|
68
|
+
logger.debug(f"Token acquired (length: {len(token)})")
|
|
69
|
+
return token
|
|
60
70
|
|
|
61
71
|
graph_client = GraphAPIClient(
|
|
62
72
|
site_url=site_url,
|
|
@@ -65,7 +75,7 @@ def ensure_context(func):
|
|
|
65
75
|
logger.info("Graph API client initialized successfully")
|
|
66
76
|
|
|
67
77
|
except Exception as e:
|
|
68
|
-
logger.error(f"Failed to initialize Graph API client: {e}")
|
|
78
|
+
logger.error(f"Failed to initialize Graph API client: {e}", exc_info=True)
|
|
69
79
|
raise RuntimeError(
|
|
70
80
|
f"Graph API authentication failed: {e}. "
|
|
71
81
|
"Please check your environment variables and ensure:\n"
|
|
@@ -321,28 +331,42 @@ async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
|
|
321
331
|
|
|
322
332
|
|
|
323
333
|
async def test_connection() -> list[TextContent]:
|
|
324
|
-
"""Test SharePoint connection"""
|
|
334
|
+
"""Test SharePoint connection using Microsoft Graph API"""
|
|
325
335
|
try:
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
336
|
+
logger.info("Testing Graph API connection...")
|
|
337
|
+
|
|
338
|
+
# Try to get site ID and drive ID
|
|
339
|
+
site_id = await asyncio.to_thread(graph_client._get_site_id)
|
|
340
|
+
drive_id = await asyncio.to_thread(graph_client._get_drive_id)
|
|
341
|
+
|
|
342
|
+
auth_method = "msal (Microsoft Graph API)"
|
|
343
|
+
|
|
344
|
+
logger.info(f"✓ Connection test successful - Site ID: {site_id}, Drive ID: {drive_id}")
|
|
345
|
+
|
|
329
346
|
return [TextContent(
|
|
330
347
|
type="text",
|
|
331
|
-
text=f"✓ Successfully connected to SharePoint!\n\n"
|
|
332
|
-
f"Site
|
|
333
|
-
f"
|
|
334
|
-
f"
|
|
348
|
+
text=f"✓ Successfully connected to SharePoint via Microsoft Graph API!\n\n"
|
|
349
|
+
f"Site URL: {graph_client.site_url}\n"
|
|
350
|
+
f"Graph Endpoint: {graph_client.graph_endpoint}\n"
|
|
351
|
+
f"Site ID: {site_id}\n"
|
|
352
|
+
f"Drive ID: {drive_id}\n"
|
|
353
|
+
f"Authentication Method: {auth_method}\n"
|
|
335
354
|
f"Tenant ID: {os.getenv('SHP_TENANT_ID')}\n\n"
|
|
336
|
-
f"Connection is working correctly with
|
|
355
|
+
f"Connection is working correctly with Microsoft Graph API."
|
|
337
356
|
)]
|
|
338
357
|
except Exception as e:
|
|
358
|
+
logger.error(f"✗ Connection test failed: {str(e)}", exc_info=True)
|
|
339
359
|
return [TextContent(
|
|
340
360
|
type="text",
|
|
341
361
|
text=f"✗ Connection failed: {str(e)}\n\n"
|
|
342
362
|
f"This usually means:\n"
|
|
343
363
|
f"1. Your credentials are incorrect\n"
|
|
344
|
-
f"2. Your app doesn't have proper
|
|
345
|
-
f"3.
|
|
364
|
+
f"2. Your app doesn't have proper Microsoft Graph permissions\n"
|
|
365
|
+
f"3. Network connectivity issues\n"
|
|
366
|
+
f"4. Azure AD app registration is missing required permissions:\n"
|
|
367
|
+
f" - Sites.Read.All\n"
|
|
368
|
+
f" - Files.ReadWrite.All\n\n"
|
|
369
|
+
f"Check the logs for more details."
|
|
346
370
|
)]
|
|
347
371
|
|
|
348
372
|
|
mcp_sharepoint/auth.py
CHANGED
|
@@ -226,16 +226,22 @@ class SharePointAuthenticator:
|
|
|
226
226
|
|
|
227
227
|
now = int(time.time())
|
|
228
228
|
if self._access_token and now < (self._access_token_exp - 60):
|
|
229
|
+
logger.debug("Using cached access token")
|
|
229
230
|
return self._access_token
|
|
230
231
|
|
|
232
|
+
logger.info(f"Acquiring new access token from {self._authority_url}")
|
|
233
|
+
logger.debug(f"Scopes: {self._scopes}")
|
|
234
|
+
|
|
231
235
|
last_err = None
|
|
232
236
|
for attempt in range(1, 6): # 5 attempts
|
|
233
237
|
try:
|
|
238
|
+
logger.debug(f"Token acquisition attempt {attempt}/5")
|
|
234
239
|
result = self._msal_app.acquire_token_for_client(scopes=self._scopes)
|
|
235
240
|
|
|
236
241
|
if "access_token" not in result:
|
|
237
242
|
error_desc = result.get("error_description", "Unknown error")
|
|
238
243
|
error = result.get("error", "Unknown")
|
|
244
|
+
logger.error(f"Token acquisition failed: {error} - {error_desc}")
|
|
239
245
|
raise ValueError(
|
|
240
246
|
f"Failed to acquire token: {error} - {error_desc}\n"
|
|
241
247
|
f"Authority: {self._authority_url}\n"
|
|
@@ -249,11 +255,13 @@ class SharePointAuthenticator:
|
|
|
249
255
|
self._access_token = token
|
|
250
256
|
self._access_token_exp = int(time.time()) + expires_in
|
|
251
257
|
|
|
252
|
-
logger.info(f"Successfully acquired Graph API token")
|
|
258
|
+
logger.info(f"Successfully acquired Graph API token (expires in {expires_in}s)")
|
|
259
|
+
logger.debug(f"Token length: {len(token)}, starts with: {token[:20]}...")
|
|
253
260
|
return token
|
|
254
261
|
|
|
255
262
|
except Exception as e:
|
|
256
263
|
last_err = e
|
|
264
|
+
logger.error(f"Token acquisition attempt {attempt}/5 failed: {type(e).__name__}: {e}")
|
|
257
265
|
# Exponential backoff with jitter
|
|
258
266
|
sleep_s = min(8.0, (2 ** (attempt - 1)) * 0.5) + random.random() * 0.25
|
|
259
267
|
logger.warning(
|
mcp_sharepoint/graph_api.py
CHANGED
|
@@ -5,9 +5,13 @@ Primary API for all SharePoint operations in Azure Government Cloud.
|
|
|
5
5
|
import os
|
|
6
6
|
import logging
|
|
7
7
|
import asyncio
|
|
8
|
+
import socket
|
|
9
|
+
import ssl
|
|
8
10
|
from typing import Optional, Dict, Any, List
|
|
9
11
|
from urllib.parse import urlparse, quote
|
|
10
12
|
import requests
|
|
13
|
+
from requests.adapters import HTTPAdapter
|
|
14
|
+
from urllib3.util.retry import Retry
|
|
11
15
|
|
|
12
16
|
logger = logging.getLogger(__name__)
|
|
13
17
|
|
|
@@ -40,8 +44,113 @@ class GraphAPIClient:
|
|
|
40
44
|
self.graph_endpoint = "https://graph.microsoft.com/v1.0"
|
|
41
45
|
logger.info("Using Microsoft Graph Commercial endpoint")
|
|
42
46
|
|
|
47
|
+
# Create a requests session with retry logic
|
|
48
|
+
self._session = self._create_session()
|
|
49
|
+
|
|
50
|
+
def _create_session(self) -> requests.Session:
|
|
51
|
+
"""
|
|
52
|
+
Create a requests session with retry logic and connection pooling.
|
|
53
|
+
"""
|
|
54
|
+
session = requests.Session()
|
|
55
|
+
|
|
56
|
+
# Configure retry strategy for transient errors
|
|
57
|
+
retry_strategy = Retry(
|
|
58
|
+
total=3, # Total number of retries
|
|
59
|
+
backoff_factor=1, # Wait 1, 2, 4 seconds between retries
|
|
60
|
+
status_forcelist=[429, 500, 502, 503, 504], # Retry on these HTTP status codes
|
|
61
|
+
allowed_methods=["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE", "POST"]
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
adapter = HTTPAdapter(max_retries=retry_strategy, pool_connections=10, pool_maxsize=10)
|
|
65
|
+
session.mount("http://", adapter)
|
|
66
|
+
session.mount("https://", adapter)
|
|
67
|
+
|
|
68
|
+
logger.debug("Created requests session with retry logic and connection pooling")
|
|
69
|
+
return session
|
|
70
|
+
|
|
71
|
+
def _diagnose_connectivity(self, url: str) -> None:
|
|
72
|
+
"""
|
|
73
|
+
Perform detailed connectivity diagnostics for a URL.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
url: The URL to diagnose
|
|
77
|
+
"""
|
|
78
|
+
parsed = urlparse(url)
|
|
79
|
+
hostname = parsed.hostname
|
|
80
|
+
port = parsed.port or (443 if parsed.scheme == "https" else 80)
|
|
81
|
+
|
|
82
|
+
logger.info(f"=== CONNECTIVITY DIAGNOSTICS for {hostname} ===")
|
|
83
|
+
|
|
84
|
+
# 1. DNS Resolution
|
|
85
|
+
try:
|
|
86
|
+
logger.info(f"[DNS] Resolving {hostname}...")
|
|
87
|
+
ip_addresses = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
|
|
88
|
+
for family, socktype, proto, canonname, sockaddr in ip_addresses:
|
|
89
|
+
family_name = "IPv4" if family == socket.AF_INET else "IPv6"
|
|
90
|
+
logger.info(f"[DNS] ✓ Resolved to {sockaddr[0]} ({family_name})")
|
|
91
|
+
except socket.gaierror as e:
|
|
92
|
+
logger.error(f"[DNS] ✗ DNS resolution failed: {e}")
|
|
93
|
+
return
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logger.error(f"[DNS] ✗ Unexpected error during DNS resolution: {e}")
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
# 2. TCP Connection Test
|
|
99
|
+
try:
|
|
100
|
+
logger.info(f"[TCP] Testing TCP connection to {hostname}:{port}...")
|
|
101
|
+
with socket.create_connection((hostname, port), timeout=10) as sock:
|
|
102
|
+
logger.info(f"[TCP] ✓ TCP connection successful")
|
|
103
|
+
peer_name = sock.getpeername()
|
|
104
|
+
logger.info(f"[TCP] Connected to {peer_name[0]}:{peer_name[1]}")
|
|
105
|
+
|
|
106
|
+
# 3. SSL/TLS Test (if HTTPS)
|
|
107
|
+
if parsed.scheme == "https":
|
|
108
|
+
logger.info(f"[TLS] Testing TLS handshake...")
|
|
109
|
+
context = ssl.create_default_context()
|
|
110
|
+
try:
|
|
111
|
+
with context.wrap_socket(sock, server_hostname=hostname) as ssock:
|
|
112
|
+
logger.info(f"[TLS] ✓ TLS handshake successful")
|
|
113
|
+
logger.info(f"[TLS] Protocol: {ssock.version()}")
|
|
114
|
+
cipher = ssock.cipher()
|
|
115
|
+
if cipher:
|
|
116
|
+
logger.info(f"[TLS] Cipher: {cipher[0]} (bits: {cipher[2]})")
|
|
117
|
+
|
|
118
|
+
# Get certificate info
|
|
119
|
+
cert = ssock.getpeercert()
|
|
120
|
+
if cert:
|
|
121
|
+
subject = dict(x[0] for x in cert['subject'])
|
|
122
|
+
logger.info(f"[TLS] Certificate subject: {subject.get('commonName', 'N/A')}")
|
|
123
|
+
logger.info(f"[TLS] Certificate issuer: {dict(x[0] for x in cert['issuer']).get('organizationName', 'N/A')}")
|
|
124
|
+
except ssl.SSLError as e:
|
|
125
|
+
logger.error(f"[TLS] ✗ TLS handshake failed: {e}")
|
|
126
|
+
return
|
|
127
|
+
except socket.timeout:
|
|
128
|
+
logger.error(f"[TCP] ✗ Connection timeout after 10 seconds")
|
|
129
|
+
return
|
|
130
|
+
except ConnectionRefusedError:
|
|
131
|
+
logger.error(f"[TCP] ✗ Connection refused by server")
|
|
132
|
+
return
|
|
133
|
+
except ConnectionResetError:
|
|
134
|
+
logger.error(f"[TCP] ✗ Connection reset by peer during TCP handshake")
|
|
135
|
+
return
|
|
136
|
+
except Exception as e:
|
|
137
|
+
logger.error(f"[TCP] ✗ Connection failed: {type(e).__name__}: {e}")
|
|
138
|
+
return
|
|
139
|
+
|
|
140
|
+
# 4. HTTP Basic Connectivity Test
|
|
141
|
+
try:
|
|
142
|
+
logger.info(f"[HTTP] Testing basic HTTP GET to {parsed.scheme}://{hostname}/")
|
|
143
|
+
test_url = f"{parsed.scheme}://{hostname}/"
|
|
144
|
+
response = self._session.get(test_url, timeout=10)
|
|
145
|
+
logger.info(f"[HTTP] ✓ Basic HTTP request successful (status: {response.status_code})")
|
|
146
|
+
except requests.exceptions.RequestException as e:
|
|
147
|
+
logger.error(f"[HTTP] ✗ Basic HTTP request failed: {type(e).__name__}: {e}")
|
|
148
|
+
|
|
149
|
+
logger.info(f"=== END DIAGNOSTICS ===\n")
|
|
150
|
+
|
|
43
151
|
def _get_headers(self) -> Dict[str, str]:
|
|
44
152
|
"""Get authorization headers with access token."""
|
|
153
|
+
logger.debug("Getting authorization headers...")
|
|
45
154
|
token_obj = self.token_callback()
|
|
46
155
|
# Handle both TokenResponse objects and plain strings
|
|
47
156
|
if hasattr(token_obj, 'accessToken'):
|
|
@@ -49,6 +158,8 @@ class GraphAPIClient:
|
|
|
49
158
|
else:
|
|
50
159
|
token = str(token_obj)
|
|
51
160
|
|
|
161
|
+
logger.debug(f"Token acquired for headers (length: {len(token)}, starts with: {token[:20]}...)")
|
|
162
|
+
|
|
52
163
|
return {
|
|
53
164
|
"Authorization": f"Bearer {token}",
|
|
54
165
|
"Accept": "application/json",
|
|
@@ -91,6 +202,7 @@ class GraphAPIClient:
|
|
|
91
202
|
Caches the result for reuse.
|
|
92
203
|
"""
|
|
93
204
|
if self._site_id:
|
|
205
|
+
logger.debug(f"Using cached site ID: {self._site_id}")
|
|
94
206
|
return self._site_id
|
|
95
207
|
|
|
96
208
|
parsed = urlparse(self.site_url)
|
|
@@ -104,12 +216,52 @@ class GraphAPIClient:
|
|
|
104
216
|
else:
|
|
105
217
|
url = f"{self.graph_endpoint}/sites/{hostname}:/{path}"
|
|
106
218
|
|
|
107
|
-
|
|
108
|
-
|
|
219
|
+
logger.info(f"Fetching site ID from: {url}")
|
|
220
|
+
|
|
221
|
+
# Get headers and log sanitized version
|
|
222
|
+
headers = self._get_headers()
|
|
223
|
+
sanitized_headers = {k: (v[:20] + "..." if k == "Authorization" else v) for k, v in headers.items()}
|
|
224
|
+
logger.debug(f"Request headers: {sanitized_headers}")
|
|
225
|
+
|
|
226
|
+
try:
|
|
227
|
+
# Make the request
|
|
228
|
+
logger.debug(f"Sending GET request to: {url}")
|
|
229
|
+
logger.debug(f"Timeout: 30 seconds")
|
|
230
|
+
|
|
231
|
+
response = self._session.get(url, headers=headers, timeout=30)
|
|
109
232
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
233
|
+
logger.debug(f"Response received - Status: {response.status_code}")
|
|
234
|
+
logger.debug(f"Response headers: {dict(response.headers)}")
|
|
235
|
+
logger.debug(f"Response encoding: {response.encoding}")
|
|
236
|
+
|
|
237
|
+
self._handle_response(response)
|
|
238
|
+
|
|
239
|
+
self._site_id = response.json()["id"]
|
|
240
|
+
logger.info(f"✓ Retrieved site ID: {self._site_id}")
|
|
241
|
+
return self._site_id
|
|
242
|
+
|
|
243
|
+
except requests.exceptions.ConnectionError as e:
|
|
244
|
+
logger.error(f"✗ ConnectionError getting site ID: {e}", exc_info=True)
|
|
245
|
+
logger.error("This indicates the connection was established but then dropped.")
|
|
246
|
+
logger.error("Running comprehensive diagnostics...")
|
|
247
|
+
|
|
248
|
+
# Run diagnostics to help identify the issue
|
|
249
|
+
self._diagnose_connectivity(url)
|
|
250
|
+
|
|
251
|
+
logger.error("\nPossible causes:")
|
|
252
|
+
logger.error("1. Firewall is blocking graph.microsoft.us")
|
|
253
|
+
logger.error("2. Proxy configuration needed")
|
|
254
|
+
logger.error("3. SSL/TLS version mismatch")
|
|
255
|
+
logger.error("4. Network instability")
|
|
256
|
+
raise
|
|
257
|
+
|
|
258
|
+
except requests.exceptions.Timeout:
|
|
259
|
+
logger.error(f"✗ Request timeout after 30 seconds", exc_info=True)
|
|
260
|
+
raise
|
|
261
|
+
|
|
262
|
+
except requests.exceptions.RequestException as e:
|
|
263
|
+
logger.error(f"✗ Network error getting site ID: {type(e).__name__}: {e}", exc_info=True)
|
|
264
|
+
raise
|
|
113
265
|
|
|
114
266
|
def _get_drive_id(self) -> str:
|
|
115
267
|
"""
|
|
@@ -117,17 +269,32 @@ class GraphAPIClient:
|
|
|
117
269
|
Caches the result for reuse.
|
|
118
270
|
"""
|
|
119
271
|
if self._drive_id:
|
|
272
|
+
logger.debug(f"Using cached drive ID: {self._drive_id}")
|
|
120
273
|
return self._drive_id
|
|
121
274
|
|
|
122
275
|
site_id = self._get_site_id()
|
|
123
276
|
url = f"{self.graph_endpoint}/sites/{site_id}/drive"
|
|
124
277
|
|
|
125
|
-
|
|
126
|
-
|
|
278
|
+
logger.info(f"Fetching drive ID from: {url}")
|
|
279
|
+
|
|
280
|
+
try:
|
|
281
|
+
logger.debug(f"Sending GET request to: {url}")
|
|
282
|
+
response = self._session.get(url, headers=self._get_headers(), timeout=30)
|
|
283
|
+
|
|
284
|
+
logger.debug(f"Response received - Status: {response.status_code}")
|
|
285
|
+
self._handle_response(response)
|
|
127
286
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
287
|
+
self._drive_id = response.json()["id"]
|
|
288
|
+
logger.info(f"✓ Retrieved drive ID: {self._drive_id}")
|
|
289
|
+
return self._drive_id
|
|
290
|
+
|
|
291
|
+
except requests.exceptions.ConnectionError as e:
|
|
292
|
+
logger.error(f"✗ ConnectionError getting drive ID: {e}", exc_info=True)
|
|
293
|
+
raise
|
|
294
|
+
|
|
295
|
+
except requests.exceptions.RequestException as e:
|
|
296
|
+
logger.error(f"✗ Network error getting drive ID: {type(e).__name__}: {e}", exc_info=True)
|
|
297
|
+
raise
|
|
131
298
|
|
|
132
299
|
def list_folders(self, folder_path: str = "") -> List[Dict[str, Any]]:
|
|
133
300
|
"""
|
|
@@ -139,6 +306,7 @@ class GraphAPIClient:
|
|
|
139
306
|
Returns:
|
|
140
307
|
List of folder objects with name, id, webUrl
|
|
141
308
|
"""
|
|
309
|
+
logger.info(f"Listing folders in '{folder_path}'")
|
|
142
310
|
site_id = self._get_site_id()
|
|
143
311
|
drive_id = self._get_drive_id()
|
|
144
312
|
|
|
@@ -149,23 +317,29 @@ class GraphAPIClient:
|
|
|
149
317
|
else:
|
|
150
318
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
|
|
151
319
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
320
|
+
logger.info(f"Fetching folders from: {url}")
|
|
321
|
+
try:
|
|
322
|
+
response = self._session.get(url, headers=self._get_headers(), timeout=30)
|
|
323
|
+
logger.debug(f"Response status: {response.status_code}")
|
|
324
|
+
self._handle_response(response)
|
|
325
|
+
|
|
326
|
+
items = response.json().get("value", [])
|
|
327
|
+
# Filter to only folders
|
|
328
|
+
folders = [
|
|
329
|
+
{
|
|
330
|
+
"name": item["name"],
|
|
331
|
+
"id": item["id"],
|
|
332
|
+
"webUrl": item.get("webUrl", ""),
|
|
333
|
+
}
|
|
334
|
+
for item in items
|
|
335
|
+
if "folder" in item
|
|
336
|
+
]
|
|
337
|
+
|
|
338
|
+
logger.info(f"Found {len(folders)} folders in '{folder_path}'")
|
|
339
|
+
return folders
|
|
340
|
+
except requests.exceptions.RequestException as e:
|
|
341
|
+
logger.error(f"Network error listing folders: {type(e).__name__}: {e}", exc_info=True)
|
|
342
|
+
raise
|
|
169
343
|
|
|
170
344
|
def list_documents(self, folder_path: str = "") -> List[Dict[str, Any]]:
|
|
171
345
|
"""
|
|
@@ -177,6 +351,7 @@ class GraphAPIClient:
|
|
|
177
351
|
Returns:
|
|
178
352
|
List of file objects with name, id, size, webUrl
|
|
179
353
|
"""
|
|
354
|
+
logger.info(f"Listing documents in '{folder_path}'")
|
|
180
355
|
site_id = self._get_site_id()
|
|
181
356
|
drive_id = self._get_drive_id()
|
|
182
357
|
|
|
@@ -186,24 +361,30 @@ class GraphAPIClient:
|
|
|
186
361
|
else:
|
|
187
362
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
|
|
188
363
|
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
364
|
+
logger.info(f"Fetching documents from: {url}")
|
|
365
|
+
try:
|
|
366
|
+
response = self._session.get(url, headers=self._get_headers(), timeout=30)
|
|
367
|
+
logger.debug(f"Response status: {response.status_code}")
|
|
368
|
+
self._handle_response(response)
|
|
369
|
+
|
|
370
|
+
items = response.json().get("value", [])
|
|
371
|
+
# Filter to only files
|
|
372
|
+
files = [
|
|
373
|
+
{
|
|
374
|
+
"name": item["name"],
|
|
375
|
+
"id": item["id"],
|
|
376
|
+
"size": item.get("size", 0),
|
|
377
|
+
"webUrl": item.get("webUrl", ""),
|
|
378
|
+
}
|
|
379
|
+
for item in items
|
|
380
|
+
if "file" in item
|
|
381
|
+
]
|
|
382
|
+
|
|
383
|
+
logger.info(f"Found {len(files)} files in '{folder_path}'")
|
|
384
|
+
return files
|
|
385
|
+
except requests.exceptions.RequestException as e:
|
|
386
|
+
logger.error(f"Network error listing documents: {type(e).__name__}: {e}", exc_info=True)
|
|
387
|
+
raise
|
|
207
388
|
|
|
208
389
|
def get_file_content(self, file_path: str) -> bytes:
|
|
209
390
|
"""
|
|
@@ -215,17 +396,24 @@ class GraphAPIClient:
|
|
|
215
396
|
Returns:
|
|
216
397
|
File content as bytes
|
|
217
398
|
"""
|
|
399
|
+
logger.info(f"Getting content for file '{file_path}'")
|
|
218
400
|
site_id = self._get_site_id()
|
|
219
401
|
drive_id = self._get_drive_id()
|
|
220
402
|
|
|
221
403
|
encoded_path = quote(file_path)
|
|
222
404
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
|
|
223
405
|
|
|
224
|
-
|
|
225
|
-
|
|
406
|
+
logger.info(f"Fetching file content from: {url}")
|
|
407
|
+
try:
|
|
408
|
+
response = self._session.get(url, headers=self._get_headers(), timeout=60)
|
|
409
|
+
logger.debug(f"Response status: {response.status_code}")
|
|
410
|
+
self._handle_response(response)
|
|
226
411
|
|
|
227
|
-
|
|
228
|
-
|
|
412
|
+
logger.info(f"Retrieved content for '{file_path}' ({len(response.content)} bytes)")
|
|
413
|
+
return response.content
|
|
414
|
+
except requests.exceptions.RequestException as e:
|
|
415
|
+
logger.error(f"Network error getting file content: {type(e).__name__}: {e}", exc_info=True)
|
|
416
|
+
raise
|
|
229
417
|
|
|
230
418
|
def upload_file(self, folder_path: str, file_name: str, content: bytes) -> Dict[str, Any]:
|
|
231
419
|
"""
|
|
@@ -239,6 +427,7 @@ class GraphAPIClient:
|
|
|
239
427
|
Returns:
|
|
240
428
|
File metadata
|
|
241
429
|
"""
|
|
430
|
+
logger.info(f"Uploading file '{file_name}' to '{folder_path}' ({len(content)} bytes)")
|
|
242
431
|
site_id = self._get_site_id()
|
|
243
432
|
drive_id = self._get_drive_id()
|
|
244
433
|
|
|
@@ -250,14 +439,20 @@ class GraphAPIClient:
|
|
|
250
439
|
encoded_path = quote(full_path)
|
|
251
440
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
|
|
252
441
|
|
|
442
|
+
logger.info(f"Uploading to: {url}")
|
|
253
443
|
headers = self._get_headers()
|
|
254
444
|
headers["Content-Type"] = "application/octet-stream"
|
|
255
445
|
|
|
256
|
-
|
|
257
|
-
|
|
446
|
+
try:
|
|
447
|
+
response = self._session.put(url, headers=headers, data=content, timeout=120)
|
|
448
|
+
logger.debug(f"Response status: {response.status_code}")
|
|
449
|
+
self._handle_response(response)
|
|
258
450
|
|
|
259
|
-
|
|
260
|
-
|
|
451
|
+
logger.info(f"Successfully uploaded '{file_name}' to '{folder_path}'")
|
|
452
|
+
return response.json()
|
|
453
|
+
except requests.exceptions.RequestException as e:
|
|
454
|
+
logger.error(f"Network error uploading file: {type(e).__name__}: {e}", exc_info=True)
|
|
455
|
+
raise
|
|
261
456
|
|
|
262
457
|
def delete_file(self, file_path: str) -> None:
|
|
263
458
|
"""
|
|
@@ -266,16 +461,23 @@ class GraphAPIClient:
|
|
|
266
461
|
Args:
|
|
267
462
|
file_path: Relative path to the file
|
|
268
463
|
"""
|
|
464
|
+
logger.info(f"Deleting file '{file_path}'")
|
|
269
465
|
site_id = self._get_site_id()
|
|
270
466
|
drive_id = self._get_drive_id()
|
|
271
467
|
|
|
272
468
|
encoded_path = quote(file_path)
|
|
273
469
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
|
|
274
470
|
|
|
275
|
-
|
|
276
|
-
|
|
471
|
+
logger.info(f"Deleting from: {url}")
|
|
472
|
+
try:
|
|
473
|
+
response = self._session.delete(url, headers=self._get_headers(), timeout=30)
|
|
474
|
+
logger.debug(f"Response status: {response.status_code}")
|
|
475
|
+
self._handle_response(response)
|
|
277
476
|
|
|
278
|
-
|
|
477
|
+
logger.info(f"Successfully deleted '{file_path}'")
|
|
478
|
+
except requests.exceptions.RequestException as e:
|
|
479
|
+
logger.error(f"Network error deleting file: {type(e).__name__}: {e}", exc_info=True)
|
|
480
|
+
raise
|
|
279
481
|
|
|
280
482
|
def create_folder(self, parent_path: str, folder_name: str) -> Dict[str, Any]:
|
|
281
483
|
"""
|
|
@@ -288,6 +490,7 @@ class GraphAPIClient:
|
|
|
288
490
|
Returns:
|
|
289
491
|
Folder metadata
|
|
290
492
|
"""
|
|
493
|
+
logger.info(f"Creating folder '{folder_name}' in '{parent_path}'")
|
|
291
494
|
site_id = self._get_site_id()
|
|
292
495
|
drive_id = self._get_drive_id()
|
|
293
496
|
|
|
@@ -297,17 +500,23 @@ class GraphAPIClient:
|
|
|
297
500
|
else:
|
|
298
501
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
|
|
299
502
|
|
|
503
|
+
logger.info(f"Creating folder at: {url}")
|
|
300
504
|
payload = {
|
|
301
505
|
"name": folder_name,
|
|
302
506
|
"folder": {},
|
|
303
507
|
"@microsoft.graph.conflictBehavior": "fail"
|
|
304
508
|
}
|
|
305
509
|
|
|
306
|
-
|
|
307
|
-
|
|
510
|
+
try:
|
|
511
|
+
response = self._session.post(url, headers=self._get_headers(), json=payload, timeout=30)
|
|
512
|
+
logger.debug(f"Response status: {response.status_code}")
|
|
513
|
+
self._handle_response(response)
|
|
308
514
|
|
|
309
|
-
|
|
310
|
-
|
|
515
|
+
logger.info(f"Successfully created folder '{folder_name}' in '{parent_path}'")
|
|
516
|
+
return response.json()
|
|
517
|
+
except requests.exceptions.RequestException as e:
|
|
518
|
+
logger.error(f"Network error creating folder: {type(e).__name__}: {e}", exc_info=True)
|
|
519
|
+
raise
|
|
311
520
|
|
|
312
521
|
def delete_folder(self, folder_path: str) -> None:
|
|
313
522
|
"""
|
|
@@ -316,13 +525,20 @@ class GraphAPIClient:
|
|
|
316
525
|
Args:
|
|
317
526
|
folder_path: Relative path to the folder
|
|
318
527
|
"""
|
|
528
|
+
logger.info(f"Deleting folder '{folder_path}'")
|
|
319
529
|
site_id = self._get_site_id()
|
|
320
530
|
drive_id = self._get_drive_id()
|
|
321
531
|
|
|
322
532
|
encoded_path = quote(folder_path)
|
|
323
533
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
|
|
324
534
|
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
535
|
+
logger.info(f"Deleting folder from: {url}")
|
|
536
|
+
try:
|
|
537
|
+
response = self._session.delete(url, headers=self._get_headers(), timeout=30)
|
|
538
|
+
logger.debug(f"Response status: {response.status_code}")
|
|
539
|
+
self._handle_response(response)
|
|
540
|
+
|
|
541
|
+
logger.info(f"Successfully deleted folder '{folder_path}'")
|
|
542
|
+
except requests.exceptions.RequestException as e:
|
|
543
|
+
logger.error(f"Network error deleting folder: {type(e).__name__}: {e}", exc_info=True)
|
|
544
|
+
raise
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
mcp_sharepoint/__init__.py,sha256=sSJtlX91mBQ4fM12R8XK7Vrkkr3YPJqriE8LZP157vM,20969
|
|
2
|
+
mcp_sharepoint/__main__.py,sha256=4iVDdDZx4rQ4Zo-x0RaCrT-NKeGObIz_ks3YF8di2nA,132
|
|
3
|
+
mcp_sharepoint/auth.py,sha256=fwOCsg1pv0cN26hNlsHhJhGckeDkJCiXZrMmiBn9jf4,18156
|
|
4
|
+
mcp_sharepoint/graph_api.py,sha256=csVdyg0W9-WnDjP-rACPoym1LF8SLjpJFjolaYaCwpU,21181
|
|
5
|
+
mcp_sharepoint_us-2.0.15.dist-info/licenses/LICENSE,sha256=SRM8juGH4GjIqnl5rrp-P-S5mW5h2mINOPx5-wOZG6s,1112
|
|
6
|
+
mcp_sharepoint_us-2.0.15.dist-info/METADATA,sha256=R3i9IT0YzEyfNIKZ4jmFe1q6WSA_ybHoN3-Oe-TdeTQ,11402
|
|
7
|
+
mcp_sharepoint_us-2.0.15.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
8
|
+
mcp_sharepoint_us-2.0.15.dist-info/entry_points.txt,sha256=UZOa_7OLI41rmsErbvnSz9RahPMGQVcqZUFMphOcjbY,57
|
|
9
|
+
mcp_sharepoint_us-2.0.15.dist-info/top_level.txt,sha256=R6mRoWe61lz4kUSKGV6S2XVbE7825xfC_J-ouZIYpuo,15
|
|
10
|
+
mcp_sharepoint_us-2.0.15.dist-info/RECORD,,
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
mcp_sharepoint/__init__.py,sha256=318-XBsPcTt2EH_B9j-ym_GFs91_0Kb-0WiLKfEV-L0,19744
|
|
2
|
-
mcp_sharepoint/__main__.py,sha256=4iVDdDZx4rQ4Zo-x0RaCrT-NKeGObIz_ks3YF8di2nA,132
|
|
3
|
-
mcp_sharepoint/auth.py,sha256=03p8ylIkrlNoVuVSJ96nnqUd8n7QnwWXWXkkV7y01AU,17598
|
|
4
|
-
mcp_sharepoint/graph_api.py,sha256=y3Q5OHkitAsp7QN1PFIf_sh7g5DShLEfWUlHzIHeS24,10571
|
|
5
|
-
mcp_sharepoint_us-2.0.13.dist-info/licenses/LICENSE,sha256=SRM8juGH4GjIqnl5rrp-P-S5mW5h2mINOPx5-wOZG6s,1112
|
|
6
|
-
mcp_sharepoint_us-2.0.13.dist-info/METADATA,sha256=YNzf3j597kLjomxk9NDhQSHVTc41XGWx-vzVnnFWQHg,11402
|
|
7
|
-
mcp_sharepoint_us-2.0.13.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
8
|
-
mcp_sharepoint_us-2.0.13.dist-info/entry_points.txt,sha256=UZOa_7OLI41rmsErbvnSz9RahPMGQVcqZUFMphOcjbY,57
|
|
9
|
-
mcp_sharepoint_us-2.0.13.dist-info/top_level.txt,sha256=R6mRoWe61lz4kUSKGV6S2XVbE7825xfC_J-ouZIYpuo,15
|
|
10
|
-
mcp_sharepoint_us-2.0.13.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|