mcp-sharepoint-us 2.0.13__tar.gz → 2.0.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-sharepoint-us might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-sharepoint-us
3
- Version: 2.0.13
3
+ Version: 2.0.15
4
4
  Summary: SharePoint MCP Server with Microsoft Graph API
5
5
  License: MIT
6
6
  Project-URL: Homepage, https://github.com/mdev26/mcp-sharepoint-us
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "mcp-sharepoint-us"
7
- version = "2.0.13"
7
+ version = "2.0.15"
8
8
  description = "SharePoint MCP Server with Microsoft Graph API"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.10"
@@ -35,6 +35,7 @@ def ensure_context(func):
35
35
  global graph_client, authenticator
36
36
  if graph_client is None:
37
37
  try:
38
+ logger.info("Initializing Graph API client...")
38
39
  from .auth import SharePointAuthenticator
39
40
 
40
41
  # Get credentials
@@ -44,6 +45,11 @@ def ensure_context(func):
44
45
  tenant_id = os.getenv("SHP_TENANT_ID")
45
46
  cloud = "government" if ".sharepoint.us" in site_url else "commercial"
46
47
 
48
+ logger.info(f"Site URL: {site_url}")
49
+ logger.info(f"Tenant ID: {tenant_id}")
50
+ logger.info(f"Client ID: {client_id}")
51
+ logger.info(f"Cloud: {cloud}")
52
+
47
53
  # Create shared authenticator
48
54
  authenticator = SharePointAuthenticator(
49
55
  site_url=site_url,
@@ -52,11 +58,15 @@ def ensure_context(func):
52
58
  tenant_id=tenant_id,
53
59
  cloud=cloud
54
60
  )
61
+ logger.info("Authenticator created successfully")
55
62
 
56
63
  # Create Graph API client with direct token access
57
64
  def get_token():
58
65
  """Get access token for Graph API"""
59
- return authenticator.get_access_token()
66
+ logger.debug("Token callback invoked")
67
+ token = authenticator.get_access_token()
68
+ logger.debug(f"Token acquired (length: {len(token)})")
69
+ return token
60
70
 
61
71
  graph_client = GraphAPIClient(
62
72
  site_url=site_url,
@@ -65,7 +75,7 @@ def ensure_context(func):
65
75
  logger.info("Graph API client initialized successfully")
66
76
 
67
77
  except Exception as e:
68
- logger.error(f"Failed to initialize Graph API client: {e}")
78
+ logger.error(f"Failed to initialize Graph API client: {e}", exc_info=True)
69
79
  raise RuntimeError(
70
80
  f"Graph API authentication failed: {e}. "
71
81
  "Please check your environment variables and ensure:\n"
@@ -321,28 +331,42 @@ async def call_tool(name: str, arguments: dict) -> list[TextContent]:
321
331
 
322
332
 
323
333
  async def test_connection() -> list[TextContent]:
324
- """Test SharePoint connection"""
334
+ """Test SharePoint connection using Microsoft Graph API"""
325
335
  try:
326
- web = ctx.web.get().execute_query()
327
- auth_method = os.getenv("SHP_AUTH_METHOD", "msal")
328
-
336
+ logger.info("Testing Graph API connection...")
337
+
338
+ # Try to get site ID and drive ID
339
+ site_id = await asyncio.to_thread(graph_client._get_site_id)
340
+ drive_id = await asyncio.to_thread(graph_client._get_drive_id)
341
+
342
+ auth_method = "msal (Microsoft Graph API)"
343
+
344
+ logger.info(f"✓ Connection test successful - Site ID: {site_id}, Drive ID: {drive_id}")
345
+
329
346
  return [TextContent(
330
347
  type="text",
331
- text=f"✓ Successfully connected to SharePoint!\n\n"
332
- f"Site Title: {web.title}\n"
333
- f"Site URL: {web.url}\n"
334
- f"Authentication Method: {auth_method.upper()}\n"
348
+ text=f"✓ Successfully connected to SharePoint via Microsoft Graph API!\n\n"
349
+ f"Site URL: {graph_client.site_url}\n"
350
+ f"Graph Endpoint: {graph_client.graph_endpoint}\n"
351
+ f"Site ID: {site_id}\n"
352
+ f"Drive ID: {drive_id}\n"
353
+ f"Authentication Method: {auth_method}\n"
335
354
  f"Tenant ID: {os.getenv('SHP_TENANT_ID')}\n\n"
336
- f"Connection is working correctly with modern Azure AD authentication."
355
+ f"Connection is working correctly with Microsoft Graph API."
337
356
  )]
338
357
  except Exception as e:
358
+ logger.error(f"✗ Connection test failed: {str(e)}", exc_info=True)
339
359
  return [TextContent(
340
360
  type="text",
341
361
  text=f"✗ Connection failed: {str(e)}\n\n"
342
362
  f"This usually means:\n"
343
363
  f"1. Your credentials are incorrect\n"
344
- f"2. Your app doesn't have proper SharePoint permissions\n"
345
- f"3. You're using legacy auth on a new tenant (set SHP_AUTH_METHOD=msal)"
364
+ f"2. Your app doesn't have proper Microsoft Graph permissions\n"
365
+ f"3. Network connectivity issues\n"
366
+ f"4. Azure AD app registration is missing required permissions:\n"
367
+ f" - Sites.Read.All\n"
368
+ f" - Files.ReadWrite.All\n\n"
369
+ f"Check the logs for more details."
346
370
  )]
347
371
 
348
372
 
@@ -226,16 +226,22 @@ class SharePointAuthenticator:
226
226
 
227
227
  now = int(time.time())
228
228
  if self._access_token and now < (self._access_token_exp - 60):
229
+ logger.debug("Using cached access token")
229
230
  return self._access_token
230
231
 
232
+ logger.info(f"Acquiring new access token from {self._authority_url}")
233
+ logger.debug(f"Scopes: {self._scopes}")
234
+
231
235
  last_err = None
232
236
  for attempt in range(1, 6): # 5 attempts
233
237
  try:
238
+ logger.debug(f"Token acquisition attempt {attempt}/5")
234
239
  result = self._msal_app.acquire_token_for_client(scopes=self._scopes)
235
240
 
236
241
  if "access_token" not in result:
237
242
  error_desc = result.get("error_description", "Unknown error")
238
243
  error = result.get("error", "Unknown")
244
+ logger.error(f"Token acquisition failed: {error} - {error_desc}")
239
245
  raise ValueError(
240
246
  f"Failed to acquire token: {error} - {error_desc}\n"
241
247
  f"Authority: {self._authority_url}\n"
@@ -249,11 +255,13 @@ class SharePointAuthenticator:
249
255
  self._access_token = token
250
256
  self._access_token_exp = int(time.time()) + expires_in
251
257
 
252
- logger.info(f"Successfully acquired Graph API token")
258
+ logger.info(f"Successfully acquired Graph API token (expires in {expires_in}s)")
259
+ logger.debug(f"Token length: {len(token)}, starts with: {token[:20]}...")
253
260
  return token
254
261
 
255
262
  except Exception as e:
256
263
  last_err = e
264
+ logger.error(f"Token acquisition attempt {attempt}/5 failed: {type(e).__name__}: {e}")
257
265
  # Exponential backoff with jitter
258
266
  sleep_s = min(8.0, (2 ** (attempt - 1)) * 0.5) + random.random() * 0.25
259
267
  logger.warning(
@@ -0,0 +1,544 @@
1
+ """
2
+ Microsoft Graph API implementation for SharePoint operations.
3
+ Primary API for all SharePoint operations in Azure Government Cloud.
4
+ """
5
+ import os
6
+ import logging
7
+ import asyncio
8
+ import socket
9
+ import ssl
10
+ from typing import Optional, Dict, Any, List
11
+ from urllib.parse import urlparse, quote
12
+ import requests
13
+ from requests.adapters import HTTPAdapter
14
+ from urllib3.util.retry import Retry
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class GraphAPIClient:
20
+ """
21
+ Microsoft Graph API client for SharePoint operations.
22
+ Primary client for all SharePoint operations, especially in Azure Government Cloud
23
+ where SharePoint REST API may not support app-only authentication.
24
+ """
25
+
26
+ def __init__(self, site_url: str, token_callback):
27
+ """
28
+ Initialize Graph API client.
29
+
30
+ Args:
31
+ site_url: SharePoint site URL (e.g., https://tenant.sharepoint.us/sites/SiteName)
32
+ token_callback: Function that returns access token
33
+ """
34
+ self.site_url = site_url.rstrip("/")
35
+ self.token_callback = token_callback
36
+ self._site_id = None
37
+ self._drive_id = None # Cache drive ID to avoid repeated API calls
38
+
39
+ # Determine Graph API endpoint based on cloud
40
+ if ".sharepoint.us" in site_url:
41
+ self.graph_endpoint = "https://graph.microsoft.us/v1.0"
42
+ logger.info("Using Microsoft Graph US Government endpoint")
43
+ else:
44
+ self.graph_endpoint = "https://graph.microsoft.com/v1.0"
45
+ logger.info("Using Microsoft Graph Commercial endpoint")
46
+
47
+ # Create a requests session with retry logic
48
+ self._session = self._create_session()
49
+
50
+ def _create_session(self) -> requests.Session:
51
+ """
52
+ Create a requests session with retry logic and connection pooling.
53
+ """
54
+ session = requests.Session()
55
+
56
+ # Configure retry strategy for transient errors
57
+ retry_strategy = Retry(
58
+ total=3, # Total number of retries
59
+ backoff_factor=1, # Wait 1, 2, 4 seconds between retries
60
+ status_forcelist=[429, 500, 502, 503, 504], # Retry on these HTTP status codes
61
+ allowed_methods=["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE", "POST"]
62
+ )
63
+
64
+ adapter = HTTPAdapter(max_retries=retry_strategy, pool_connections=10, pool_maxsize=10)
65
+ session.mount("http://", adapter)
66
+ session.mount("https://", adapter)
67
+
68
+ logger.debug("Created requests session with retry logic and connection pooling")
69
+ return session
70
+
71
+ def _diagnose_connectivity(self, url: str) -> None:
72
+ """
73
+ Perform detailed connectivity diagnostics for a URL.
74
+
75
+ Args:
76
+ url: The URL to diagnose
77
+ """
78
+ parsed = urlparse(url)
79
+ hostname = parsed.hostname
80
+ port = parsed.port or (443 if parsed.scheme == "https" else 80)
81
+
82
+ logger.info(f"=== CONNECTIVITY DIAGNOSTICS for {hostname} ===")
83
+
84
+ # 1. DNS Resolution
85
+ try:
86
+ logger.info(f"[DNS] Resolving {hostname}...")
87
+ ip_addresses = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
88
+ for family, socktype, proto, canonname, sockaddr in ip_addresses:
89
+ family_name = "IPv4" if family == socket.AF_INET else "IPv6"
90
+ logger.info(f"[DNS] ✓ Resolved to {sockaddr[0]} ({family_name})")
91
+ except socket.gaierror as e:
92
+ logger.error(f"[DNS] ✗ DNS resolution failed: {e}")
93
+ return
94
+ except Exception as e:
95
+ logger.error(f"[DNS] ✗ Unexpected error during DNS resolution: {e}")
96
+ return
97
+
98
+ # 2. TCP Connection Test
99
+ try:
100
+ logger.info(f"[TCP] Testing TCP connection to {hostname}:{port}...")
101
+ with socket.create_connection((hostname, port), timeout=10) as sock:
102
+ logger.info(f"[TCP] ✓ TCP connection successful")
103
+ peer_name = sock.getpeername()
104
+ logger.info(f"[TCP] Connected to {peer_name[0]}:{peer_name[1]}")
105
+
106
+ # 3. SSL/TLS Test (if HTTPS)
107
+ if parsed.scheme == "https":
108
+ logger.info(f"[TLS] Testing TLS handshake...")
109
+ context = ssl.create_default_context()
110
+ try:
111
+ with context.wrap_socket(sock, server_hostname=hostname) as ssock:
112
+ logger.info(f"[TLS] ✓ TLS handshake successful")
113
+ logger.info(f"[TLS] Protocol: {ssock.version()}")
114
+ cipher = ssock.cipher()
115
+ if cipher:
116
+ logger.info(f"[TLS] Cipher: {cipher[0]} (bits: {cipher[2]})")
117
+
118
+ # Get certificate info
119
+ cert = ssock.getpeercert()
120
+ if cert:
121
+ subject = dict(x[0] for x in cert['subject'])
122
+ logger.info(f"[TLS] Certificate subject: {subject.get('commonName', 'N/A')}")
123
+ logger.info(f"[TLS] Certificate issuer: {dict(x[0] for x in cert['issuer']).get('organizationName', 'N/A')}")
124
+ except ssl.SSLError as e:
125
+ logger.error(f"[TLS] ✗ TLS handshake failed: {e}")
126
+ return
127
+ except socket.timeout:
128
+ logger.error(f"[TCP] ✗ Connection timeout after 10 seconds")
129
+ return
130
+ except ConnectionRefusedError:
131
+ logger.error(f"[TCP] ✗ Connection refused by server")
132
+ return
133
+ except ConnectionResetError:
134
+ logger.error(f"[TCP] ✗ Connection reset by peer during TCP handshake")
135
+ return
136
+ except Exception as e:
137
+ logger.error(f"[TCP] ✗ Connection failed: {type(e).__name__}: {e}")
138
+ return
139
+
140
+ # 4. HTTP Basic Connectivity Test
141
+ try:
142
+ logger.info(f"[HTTP] Testing basic HTTP GET to {parsed.scheme}://{hostname}/")
143
+ test_url = f"{parsed.scheme}://{hostname}/"
144
+ response = self._session.get(test_url, timeout=10)
145
+ logger.info(f"[HTTP] ✓ Basic HTTP request successful (status: {response.status_code})")
146
+ except requests.exceptions.RequestException as e:
147
+ logger.error(f"[HTTP] ✗ Basic HTTP request failed: {type(e).__name__}: {e}")
148
+
149
+ logger.info(f"=== END DIAGNOSTICS ===\n")
150
+
151
+ def _get_headers(self) -> Dict[str, str]:
152
+ """Get authorization headers with access token."""
153
+ logger.debug("Getting authorization headers...")
154
+ token_obj = self.token_callback()
155
+ # Handle both TokenResponse objects and plain strings
156
+ if hasattr(token_obj, 'accessToken'):
157
+ token = token_obj.accessToken
158
+ else:
159
+ token = str(token_obj)
160
+
161
+ logger.debug(f"Token acquired for headers (length: {len(token)}, starts with: {token[:20]}...)")
162
+
163
+ return {
164
+ "Authorization": f"Bearer {token}",
165
+ "Accept": "application/json",
166
+ }
167
+
168
+ def _handle_response(self, response: requests.Response) -> None:
169
+ """
170
+ Handle Graph API response and raise detailed errors if needed.
171
+
172
+ Graph API returns errors in format:
173
+ {
174
+ "error": {
175
+ "code": "itemNotFound",
176
+ "message": "The resource could not be found."
177
+ }
178
+ }
179
+ """
180
+ if response.ok:
181
+ return
182
+
183
+ try:
184
+ error_data = response.json()
185
+ if "error" in error_data:
186
+ error = error_data["error"]
187
+ code = error.get("code", "Unknown")
188
+ message = error.get("message", "Unknown error")
189
+ raise requests.HTTPError(
190
+ f"Graph API error [{code}]: {message}",
191
+ response=response
192
+ )
193
+ except (ValueError, KeyError):
194
+ # If we can't parse the error, fall back to standard handling
195
+ pass
196
+
197
+ self._handle_response(response)
198
+
199
+ def _get_site_id(self) -> str:
200
+ """
201
+ Get the site ID from the site URL.
202
+ Caches the result for reuse.
203
+ """
204
+ if self._site_id:
205
+ logger.debug(f"Using cached site ID: {self._site_id}")
206
+ return self._site_id
207
+
208
+ parsed = urlparse(self.site_url)
209
+ hostname = parsed.netloc
210
+ path = parsed.path.strip("/")
211
+
212
+ # For root site: https://tenant.sharepoint.us
213
+ if not path or path == "sites":
214
+ url = f"{self.graph_endpoint}/sites/{hostname}"
215
+ # For subsite: https://tenant.sharepoint.us/sites/SiteName
216
+ else:
217
+ url = f"{self.graph_endpoint}/sites/{hostname}:/{path}"
218
+
219
+ logger.info(f"Fetching site ID from: {url}")
220
+
221
+ # Get headers and log sanitized version
222
+ headers = self._get_headers()
223
+ sanitized_headers = {k: (v[:20] + "..." if k == "Authorization" else v) for k, v in headers.items()}
224
+ logger.debug(f"Request headers: {sanitized_headers}")
225
+
226
+ try:
227
+ # Make the request
228
+ logger.debug(f"Sending GET request to: {url}")
229
+ logger.debug(f"Timeout: 30 seconds")
230
+
231
+ response = self._session.get(url, headers=headers, timeout=30)
232
+
233
+ logger.debug(f"Response received - Status: {response.status_code}")
234
+ logger.debug(f"Response headers: {dict(response.headers)}")
235
+ logger.debug(f"Response encoding: {response.encoding}")
236
+
237
+ self._handle_response(response)
238
+
239
+ self._site_id = response.json()["id"]
240
+ logger.info(f"✓ Retrieved site ID: {self._site_id}")
241
+ return self._site_id
242
+
243
+ except requests.exceptions.ConnectionError as e:
244
+ logger.error(f"✗ ConnectionError getting site ID: {e}", exc_info=True)
245
+ logger.error("This indicates the connection was established but then dropped.")
246
+ logger.error("Running comprehensive diagnostics...")
247
+
248
+ # Run diagnostics to help identify the issue
249
+ self._diagnose_connectivity(url)
250
+
251
+ logger.error("\nPossible causes:")
252
+ logger.error("1. Firewall is blocking graph.microsoft.us")
253
+ logger.error("2. Proxy configuration needed")
254
+ logger.error("3. SSL/TLS version mismatch")
255
+ logger.error("4. Network instability")
256
+ raise
257
+
258
+ except requests.exceptions.Timeout:
259
+ logger.error(f"✗ Request timeout after 30 seconds", exc_info=True)
260
+ raise
261
+
262
+ except requests.exceptions.RequestException as e:
263
+ logger.error(f"✗ Network error getting site ID: {type(e).__name__}: {e}", exc_info=True)
264
+ raise
265
+
266
+ def _get_drive_id(self) -> str:
267
+ """
268
+ Get the default document library drive ID.
269
+ Caches the result for reuse.
270
+ """
271
+ if self._drive_id:
272
+ logger.debug(f"Using cached drive ID: {self._drive_id}")
273
+ return self._drive_id
274
+
275
+ site_id = self._get_site_id()
276
+ url = f"{self.graph_endpoint}/sites/{site_id}/drive"
277
+
278
+ logger.info(f"Fetching drive ID from: {url}")
279
+
280
+ try:
281
+ logger.debug(f"Sending GET request to: {url}")
282
+ response = self._session.get(url, headers=self._get_headers(), timeout=30)
283
+
284
+ logger.debug(f"Response received - Status: {response.status_code}")
285
+ self._handle_response(response)
286
+
287
+ self._drive_id = response.json()["id"]
288
+ logger.info(f"✓ Retrieved drive ID: {self._drive_id}")
289
+ return self._drive_id
290
+
291
+ except requests.exceptions.ConnectionError as e:
292
+ logger.error(f"✗ ConnectionError getting drive ID: {e}", exc_info=True)
293
+ raise
294
+
295
+ except requests.exceptions.RequestException as e:
296
+ logger.error(f"✗ Network error getting drive ID: {type(e).__name__}: {e}", exc_info=True)
297
+ raise
298
+
299
+ def list_folders(self, folder_path: str = "") -> List[Dict[str, Any]]:
300
+ """
301
+ List folders in the specified path.
302
+
303
+ Args:
304
+ folder_path: Relative path from document library root
305
+
306
+ Returns:
307
+ List of folder objects with name, id, webUrl
308
+ """
309
+ logger.info(f"Listing folders in '{folder_path}'")
310
+ site_id = self._get_site_id()
311
+ drive_id = self._get_drive_id()
312
+
313
+ if folder_path:
314
+ # URL encode the path
315
+ encoded_path = quote(folder_path)
316
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/children"
317
+ else:
318
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
319
+
320
+ logger.info(f"Fetching folders from: {url}")
321
+ try:
322
+ response = self._session.get(url, headers=self._get_headers(), timeout=30)
323
+ logger.debug(f"Response status: {response.status_code}")
324
+ self._handle_response(response)
325
+
326
+ items = response.json().get("value", [])
327
+ # Filter to only folders
328
+ folders = [
329
+ {
330
+ "name": item["name"],
331
+ "id": item["id"],
332
+ "webUrl": item.get("webUrl", ""),
333
+ }
334
+ for item in items
335
+ if "folder" in item
336
+ ]
337
+
338
+ logger.info(f"Found {len(folders)} folders in '{folder_path}'")
339
+ return folders
340
+ except requests.exceptions.RequestException as e:
341
+ logger.error(f"Network error listing folders: {type(e).__name__}: {e}", exc_info=True)
342
+ raise
343
+
344
+ def list_documents(self, folder_path: str = "") -> List[Dict[str, Any]]:
345
+ """
346
+ List documents in the specified folder.
347
+
348
+ Args:
349
+ folder_path: Relative path from document library root
350
+
351
+ Returns:
352
+ List of file objects with name, id, size, webUrl
353
+ """
354
+ logger.info(f"Listing documents in '{folder_path}'")
355
+ site_id = self._get_site_id()
356
+ drive_id = self._get_drive_id()
357
+
358
+ if folder_path:
359
+ encoded_path = quote(folder_path)
360
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/children"
361
+ else:
362
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
363
+
364
+ logger.info(f"Fetching documents from: {url}")
365
+ try:
366
+ response = self._session.get(url, headers=self._get_headers(), timeout=30)
367
+ logger.debug(f"Response status: {response.status_code}")
368
+ self._handle_response(response)
369
+
370
+ items = response.json().get("value", [])
371
+ # Filter to only files
372
+ files = [
373
+ {
374
+ "name": item["name"],
375
+ "id": item["id"],
376
+ "size": item.get("size", 0),
377
+ "webUrl": item.get("webUrl", ""),
378
+ }
379
+ for item in items
380
+ if "file" in item
381
+ ]
382
+
383
+ logger.info(f"Found {len(files)} files in '{folder_path}'")
384
+ return files
385
+ except requests.exceptions.RequestException as e:
386
+ logger.error(f"Network error listing documents: {type(e).__name__}: {e}", exc_info=True)
387
+ raise
388
+
389
+ def get_file_content(self, file_path: str) -> bytes:
390
+ """
391
+ Get the content of a file.
392
+
393
+ Args:
394
+ file_path: Relative path to the file
395
+
396
+ Returns:
397
+ File content as bytes
398
+ """
399
+ logger.info(f"Getting content for file '{file_path}'")
400
+ site_id = self._get_site_id()
401
+ drive_id = self._get_drive_id()
402
+
403
+ encoded_path = quote(file_path)
404
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
405
+
406
+ logger.info(f"Fetching file content from: {url}")
407
+ try:
408
+ response = self._session.get(url, headers=self._get_headers(), timeout=60)
409
+ logger.debug(f"Response status: {response.status_code}")
410
+ self._handle_response(response)
411
+
412
+ logger.info(f"Retrieved content for '{file_path}' ({len(response.content)} bytes)")
413
+ return response.content
414
+ except requests.exceptions.RequestException as e:
415
+ logger.error(f"Network error getting file content: {type(e).__name__}: {e}", exc_info=True)
416
+ raise
417
+
418
+ def upload_file(self, folder_path: str, file_name: str, content: bytes) -> Dict[str, Any]:
419
+ """
420
+ Upload a file to SharePoint.
421
+
422
+ Args:
423
+ folder_path: Destination folder path
424
+ file_name: Name of the file
425
+ content: File content as bytes
426
+
427
+ Returns:
428
+ File metadata
429
+ """
430
+ logger.info(f"Uploading file '{file_name}' to '{folder_path}' ({len(content)} bytes)")
431
+ site_id = self._get_site_id()
432
+ drive_id = self._get_drive_id()
433
+
434
+ if folder_path:
435
+ full_path = f"{folder_path}/{file_name}"
436
+ else:
437
+ full_path = file_name
438
+
439
+ encoded_path = quote(full_path)
440
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
441
+
442
+ logger.info(f"Uploading to: {url}")
443
+ headers = self._get_headers()
444
+ headers["Content-Type"] = "application/octet-stream"
445
+
446
+ try:
447
+ response = self._session.put(url, headers=headers, data=content, timeout=120)
448
+ logger.debug(f"Response status: {response.status_code}")
449
+ self._handle_response(response)
450
+
451
+ logger.info(f"Successfully uploaded '{file_name}' to '{folder_path}'")
452
+ return response.json()
453
+ except requests.exceptions.RequestException as e:
454
+ logger.error(f"Network error uploading file: {type(e).__name__}: {e}", exc_info=True)
455
+ raise
456
+
457
+ def delete_file(self, file_path: str) -> None:
458
+ """
459
+ Delete a file.
460
+
461
+ Args:
462
+ file_path: Relative path to the file
463
+ """
464
+ logger.info(f"Deleting file '{file_path}'")
465
+ site_id = self._get_site_id()
466
+ drive_id = self._get_drive_id()
467
+
468
+ encoded_path = quote(file_path)
469
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
470
+
471
+ logger.info(f"Deleting from: {url}")
472
+ try:
473
+ response = self._session.delete(url, headers=self._get_headers(), timeout=30)
474
+ logger.debug(f"Response status: {response.status_code}")
475
+ self._handle_response(response)
476
+
477
+ logger.info(f"Successfully deleted '{file_path}'")
478
+ except requests.exceptions.RequestException as e:
479
+ logger.error(f"Network error deleting file: {type(e).__name__}: {e}", exc_info=True)
480
+ raise
481
+
482
+ def create_folder(self, parent_path: str, folder_name: str) -> Dict[str, Any]:
483
+ """
484
+ Create a new folder.
485
+
486
+ Args:
487
+ parent_path: Path to parent folder
488
+ folder_name: Name of the new folder
489
+
490
+ Returns:
491
+ Folder metadata
492
+ """
493
+ logger.info(f"Creating folder '{folder_name}' in '{parent_path}'")
494
+ site_id = self._get_site_id()
495
+ drive_id = self._get_drive_id()
496
+
497
+ if parent_path:
498
+ encoded_path = quote(parent_path)
499
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/children"
500
+ else:
501
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
502
+
503
+ logger.info(f"Creating folder at: {url}")
504
+ payload = {
505
+ "name": folder_name,
506
+ "folder": {},
507
+ "@microsoft.graph.conflictBehavior": "fail"
508
+ }
509
+
510
+ try:
511
+ response = self._session.post(url, headers=self._get_headers(), json=payload, timeout=30)
512
+ logger.debug(f"Response status: {response.status_code}")
513
+ self._handle_response(response)
514
+
515
+ logger.info(f"Successfully created folder '{folder_name}' in '{parent_path}'")
516
+ return response.json()
517
+ except requests.exceptions.RequestException as e:
518
+ logger.error(f"Network error creating folder: {type(e).__name__}: {e}", exc_info=True)
519
+ raise
520
+
521
+ def delete_folder(self, folder_path: str) -> None:
522
+ """
523
+ Delete a folder.
524
+
525
+ Args:
526
+ folder_path: Relative path to the folder
527
+ """
528
+ logger.info(f"Deleting folder '{folder_path}'")
529
+ site_id = self._get_site_id()
530
+ drive_id = self._get_drive_id()
531
+
532
+ encoded_path = quote(folder_path)
533
+ url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
534
+
535
+ logger.info(f"Deleting folder from: {url}")
536
+ try:
537
+ response = self._session.delete(url, headers=self._get_headers(), timeout=30)
538
+ logger.debug(f"Response status: {response.status_code}")
539
+ self._handle_response(response)
540
+
541
+ logger.info(f"Successfully deleted folder '{folder_path}'")
542
+ except requests.exceptions.RequestException as e:
543
+ logger.error(f"Network error deleting folder: {type(e).__name__}: {e}", exc_info=True)
544
+ raise
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-sharepoint-us
3
- Version: 2.0.13
3
+ Version: 2.0.15
4
4
  Summary: SharePoint MCP Server with Microsoft Graph API
5
5
  License: MIT
6
6
  Project-URL: Homepage, https://github.com/mdev26/mcp-sharepoint-us
@@ -1,328 +0,0 @@
1
- """
2
- Microsoft Graph API implementation for SharePoint operations.
3
- Primary API for all SharePoint operations in Azure Government Cloud.
4
- """
5
- import os
6
- import logging
7
- import asyncio
8
- from typing import Optional, Dict, Any, List
9
- from urllib.parse import urlparse, quote
10
- import requests
11
-
12
- logger = logging.getLogger(__name__)
13
-
14
-
15
- class GraphAPIClient:
16
- """
17
- Microsoft Graph API client for SharePoint operations.
18
- Primary client for all SharePoint operations, especially in Azure Government Cloud
19
- where SharePoint REST API may not support app-only authentication.
20
- """
21
-
22
- def __init__(self, site_url: str, token_callback):
23
- """
24
- Initialize Graph API client.
25
-
26
- Args:
27
- site_url: SharePoint site URL (e.g., https://tenant.sharepoint.us/sites/SiteName)
28
- token_callback: Function that returns access token
29
- """
30
- self.site_url = site_url.rstrip("/")
31
- self.token_callback = token_callback
32
- self._site_id = None
33
- self._drive_id = None # Cache drive ID to avoid repeated API calls
34
-
35
- # Determine Graph API endpoint based on cloud
36
- if ".sharepoint.us" in site_url:
37
- self.graph_endpoint = "https://graph.microsoft.us/v1.0"
38
- logger.info("Using Microsoft Graph US Government endpoint")
39
- else:
40
- self.graph_endpoint = "https://graph.microsoft.com/v1.0"
41
- logger.info("Using Microsoft Graph Commercial endpoint")
42
-
43
- def _get_headers(self) -> Dict[str, str]:
44
- """Get authorization headers with access token."""
45
- token_obj = self.token_callback()
46
- # Handle both TokenResponse objects and plain strings
47
- if hasattr(token_obj, 'accessToken'):
48
- token = token_obj.accessToken
49
- else:
50
- token = str(token_obj)
51
-
52
- return {
53
- "Authorization": f"Bearer {token}",
54
- "Accept": "application/json",
55
- }
56
-
57
- def _handle_response(self, response: requests.Response) -> None:
58
- """
59
- Handle Graph API response and raise detailed errors if needed.
60
-
61
- Graph API returns errors in format:
62
- {
63
- "error": {
64
- "code": "itemNotFound",
65
- "message": "The resource could not be found."
66
- }
67
- }
68
- """
69
- if response.ok:
70
- return
71
-
72
- try:
73
- error_data = response.json()
74
- if "error" in error_data:
75
- error = error_data["error"]
76
- code = error.get("code", "Unknown")
77
- message = error.get("message", "Unknown error")
78
- raise requests.HTTPError(
79
- f"Graph API error [{code}]: {message}",
80
- response=response
81
- )
82
- except (ValueError, KeyError):
83
- # If we can't parse the error, fall back to standard handling
84
- pass
85
-
86
- self._handle_response(response)
87
-
88
- def _get_site_id(self) -> str:
89
- """
90
- Get the site ID from the site URL.
91
- Caches the result for reuse.
92
- """
93
- if self._site_id:
94
- return self._site_id
95
-
96
- parsed = urlparse(self.site_url)
97
- hostname = parsed.netloc
98
- path = parsed.path.strip("/")
99
-
100
- # For root site: https://tenant.sharepoint.us
101
- if not path or path == "sites":
102
- url = f"{self.graph_endpoint}/sites/{hostname}"
103
- # For subsite: https://tenant.sharepoint.us/sites/SiteName
104
- else:
105
- url = f"{self.graph_endpoint}/sites/{hostname}:/{path}"
106
-
107
- response = requests.get(url, headers=self._get_headers())
108
- self._handle_response(response)
109
-
110
- self._site_id = response.json()["id"]
111
- logger.info(f"Retrieved site ID: {self._site_id}")
112
- return self._site_id
113
-
114
- def _get_drive_id(self) -> str:
115
- """
116
- Get the default document library drive ID.
117
- Caches the result for reuse.
118
- """
119
- if self._drive_id:
120
- return self._drive_id
121
-
122
- site_id = self._get_site_id()
123
- url = f"{self.graph_endpoint}/sites/{site_id}/drive"
124
-
125
- response = requests.get(url, headers=self._get_headers())
126
- self._handle_response(response)
127
-
128
- self._drive_id = response.json()["id"]
129
- logger.info(f"Retrieved drive ID: {self._drive_id}")
130
- return self._drive_id
131
-
132
- def list_folders(self, folder_path: str = "") -> List[Dict[str, Any]]:
133
- """
134
- List folders in the specified path.
135
-
136
- Args:
137
- folder_path: Relative path from document library root
138
-
139
- Returns:
140
- List of folder objects with name, id, webUrl
141
- """
142
- site_id = self._get_site_id()
143
- drive_id = self._get_drive_id()
144
-
145
- if folder_path:
146
- # URL encode the path
147
- encoded_path = quote(folder_path)
148
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/children"
149
- else:
150
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
151
-
152
- response = requests.get(url, headers=self._get_headers())
153
- self._handle_response(response)
154
-
155
- items = response.json().get("value", [])
156
- # Filter to only folders
157
- folders = [
158
- {
159
- "name": item["name"],
160
- "id": item["id"],
161
- "webUrl": item.get("webUrl", ""),
162
- }
163
- for item in items
164
- if "folder" in item
165
- ]
166
-
167
- logger.info(f"Found {len(folders)} folders in '{folder_path}'")
168
- return folders
169
-
170
- def list_documents(self, folder_path: str = "") -> List[Dict[str, Any]]:
171
- """
172
- List documents in the specified folder.
173
-
174
- Args:
175
- folder_path: Relative path from document library root
176
-
177
- Returns:
178
- List of file objects with name, id, size, webUrl
179
- """
180
- site_id = self._get_site_id()
181
- drive_id = self._get_drive_id()
182
-
183
- if folder_path:
184
- encoded_path = quote(folder_path)
185
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/children"
186
- else:
187
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
188
-
189
- response = requests.get(url, headers=self._get_headers())
190
- self._handle_response(response)
191
-
192
- items = response.json().get("value", [])
193
- # Filter to only files
194
- files = [
195
- {
196
- "name": item["name"],
197
- "id": item["id"],
198
- "size": item.get("size", 0),
199
- "webUrl": item.get("webUrl", ""),
200
- }
201
- for item in items
202
- if "file" in item
203
- ]
204
-
205
- logger.info(f"Found {len(files)} files in '{folder_path}'")
206
- return files
207
-
208
- def get_file_content(self, file_path: str) -> bytes:
209
- """
210
- Get the content of a file.
211
-
212
- Args:
213
- file_path: Relative path to the file
214
-
215
- Returns:
216
- File content as bytes
217
- """
218
- site_id = self._get_site_id()
219
- drive_id = self._get_drive_id()
220
-
221
- encoded_path = quote(file_path)
222
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
223
-
224
- response = requests.get(url, headers=self._get_headers())
225
- self._handle_response(response)
226
-
227
- logger.info(f"Retrieved content for '{file_path}' ({len(response.content)} bytes)")
228
- return response.content
229
-
230
- def upload_file(self, folder_path: str, file_name: str, content: bytes) -> Dict[str, Any]:
231
- """
232
- Upload a file to SharePoint.
233
-
234
- Args:
235
- folder_path: Destination folder path
236
- file_name: Name of the file
237
- content: File content as bytes
238
-
239
- Returns:
240
- File metadata
241
- """
242
- site_id = self._get_site_id()
243
- drive_id = self._get_drive_id()
244
-
245
- if folder_path:
246
- full_path = f"{folder_path}/{file_name}"
247
- else:
248
- full_path = file_name
249
-
250
- encoded_path = quote(full_path)
251
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
252
-
253
- headers = self._get_headers()
254
- headers["Content-Type"] = "application/octet-stream"
255
-
256
- response = requests.put(url, headers=headers, data=content)
257
- self._handle_response(response)
258
-
259
- logger.info(f"Uploaded '{file_name}' to '{folder_path}'")
260
- return response.json()
261
-
262
- def delete_file(self, file_path: str) -> None:
263
- """
264
- Delete a file.
265
-
266
- Args:
267
- file_path: Relative path to the file
268
- """
269
- site_id = self._get_site_id()
270
- drive_id = self._get_drive_id()
271
-
272
- encoded_path = quote(file_path)
273
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
274
-
275
- response = requests.delete(url, headers=self._get_headers())
276
- self._handle_response(response)
277
-
278
- logger.info(f"Deleted '{file_path}'")
279
-
280
- def create_folder(self, parent_path: str, folder_name: str) -> Dict[str, Any]:
281
- """
282
- Create a new folder.
283
-
284
- Args:
285
- parent_path: Path to parent folder
286
- folder_name: Name of the new folder
287
-
288
- Returns:
289
- Folder metadata
290
- """
291
- site_id = self._get_site_id()
292
- drive_id = self._get_drive_id()
293
-
294
- if parent_path:
295
- encoded_path = quote(parent_path)
296
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/children"
297
- else:
298
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
299
-
300
- payload = {
301
- "name": folder_name,
302
- "folder": {},
303
- "@microsoft.graph.conflictBehavior": "fail"
304
- }
305
-
306
- response = requests.post(url, headers=self._get_headers(), json=payload)
307
- self._handle_response(response)
308
-
309
- logger.info(f"Created folder '{folder_name}' in '{parent_path}'")
310
- return response.json()
311
-
312
- def delete_folder(self, folder_path: str) -> None:
313
- """
314
- Delete a folder.
315
-
316
- Args:
317
- folder_path: Relative path to the folder
318
- """
319
- site_id = self._get_site_id()
320
- drive_id = self._get_drive_id()
321
-
322
- encoded_path = quote(folder_path)
323
- url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
324
-
325
- response = requests.delete(url, headers=self._get_headers())
326
- self._handle_response(response)
327
-
328
- logger.info(f"Deleted folder '{folder_path}'")