mcp-sharepoint-us 2.0.14__tar.gz → 2.0.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-sharepoint-us might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-sharepoint-us
3
- Version: 2.0.14
3
+ Version: 2.0.15
4
4
  Summary: SharePoint MCP Server with Microsoft Graph API
5
5
  License: MIT
6
6
  Project-URL: Homepage, https://github.com/mdev26/mcp-sharepoint-us
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "mcp-sharepoint-us"
7
- version = "2.0.14"
7
+ version = "2.0.15"
8
8
  description = "SharePoint MCP Server with Microsoft Graph API"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.10"
@@ -5,9 +5,13 @@ Primary API for all SharePoint operations in Azure Government Cloud.
5
5
  import os
6
6
  import logging
7
7
  import asyncio
8
+ import socket
9
+ import ssl
8
10
  from typing import Optional, Dict, Any, List
9
11
  from urllib.parse import urlparse, quote
10
12
  import requests
13
+ from requests.adapters import HTTPAdapter
14
+ from urllib3.util.retry import Retry
11
15
 
12
16
  logger = logging.getLogger(__name__)
13
17
 
@@ -40,6 +44,110 @@ class GraphAPIClient:
40
44
  self.graph_endpoint = "https://graph.microsoft.com/v1.0"
41
45
  logger.info("Using Microsoft Graph Commercial endpoint")
42
46
 
47
+ # Create a requests session with retry logic
48
+ self._session = self._create_session()
49
+
50
+ def _create_session(self) -> requests.Session:
51
+ """
52
+ Create a requests session with retry logic and connection pooling.
53
+ """
54
+ session = requests.Session()
55
+
56
+ # Configure retry strategy for transient errors
57
+ retry_strategy = Retry(
58
+ total=3, # Total number of retries
59
+ backoff_factor=1, # Wait 1, 2, 4 seconds between retries
60
+ status_forcelist=[429, 500, 502, 503, 504], # Retry on these HTTP status codes
61
+ allowed_methods=["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE", "POST"]
62
+ )
63
+
64
+ adapter = HTTPAdapter(max_retries=retry_strategy, pool_connections=10, pool_maxsize=10)
65
+ session.mount("http://", adapter)
66
+ session.mount("https://", adapter)
67
+
68
+ logger.debug("Created requests session with retry logic and connection pooling")
69
+ return session
70
+
71
+ def _diagnose_connectivity(self, url: str) -> None:
72
+ """
73
+ Perform detailed connectivity diagnostics for a URL.
74
+
75
+ Args:
76
+ url: The URL to diagnose
77
+ """
78
+ parsed = urlparse(url)
79
+ hostname = parsed.hostname
80
+ port = parsed.port or (443 if parsed.scheme == "https" else 80)
81
+
82
+ logger.info(f"=== CONNECTIVITY DIAGNOSTICS for {hostname} ===")
83
+
84
+ # 1. DNS Resolution
85
+ try:
86
+ logger.info(f"[DNS] Resolving {hostname}...")
87
+ ip_addresses = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
88
+ for family, socktype, proto, canonname, sockaddr in ip_addresses:
89
+ family_name = "IPv4" if family == socket.AF_INET else "IPv6"
90
+ logger.info(f"[DNS] ✓ Resolved to {sockaddr[0]} ({family_name})")
91
+ except socket.gaierror as e:
92
+ logger.error(f"[DNS] ✗ DNS resolution failed: {e}")
93
+ return
94
+ except Exception as e:
95
+ logger.error(f"[DNS] ✗ Unexpected error during DNS resolution: {e}")
96
+ return
97
+
98
+ # 2. TCP Connection Test
99
+ try:
100
+ logger.info(f"[TCP] Testing TCP connection to {hostname}:{port}...")
101
+ with socket.create_connection((hostname, port), timeout=10) as sock:
102
+ logger.info(f"[TCP] ✓ TCP connection successful")
103
+ peer_name = sock.getpeername()
104
+ logger.info(f"[TCP] Connected to {peer_name[0]}:{peer_name[1]}")
105
+
106
+ # 3. SSL/TLS Test (if HTTPS)
107
+ if parsed.scheme == "https":
108
+ logger.info(f"[TLS] Testing TLS handshake...")
109
+ context = ssl.create_default_context()
110
+ try:
111
+ with context.wrap_socket(sock, server_hostname=hostname) as ssock:
112
+ logger.info(f"[TLS] ✓ TLS handshake successful")
113
+ logger.info(f"[TLS] Protocol: {ssock.version()}")
114
+ cipher = ssock.cipher()
115
+ if cipher:
116
+ logger.info(f"[TLS] Cipher: {cipher[0]} (bits: {cipher[2]})")
117
+
118
+ # Get certificate info
119
+ cert = ssock.getpeercert()
120
+ if cert:
121
+ subject = dict(x[0] for x in cert['subject'])
122
+ logger.info(f"[TLS] Certificate subject: {subject.get('commonName', 'N/A')}")
123
+ logger.info(f"[TLS] Certificate issuer: {dict(x[0] for x in cert['issuer']).get('organizationName', 'N/A')}")
124
+ except ssl.SSLError as e:
125
+ logger.error(f"[TLS] ✗ TLS handshake failed: {e}")
126
+ return
127
+ except socket.timeout:
128
+ logger.error(f"[TCP] ✗ Connection timeout after 10 seconds")
129
+ return
130
+ except ConnectionRefusedError:
131
+ logger.error(f"[TCP] ✗ Connection refused by server")
132
+ return
133
+ except ConnectionResetError:
134
+ logger.error(f"[TCP] ✗ Connection reset by peer during TCP handshake")
135
+ return
136
+ except Exception as e:
137
+ logger.error(f"[TCP] ✗ Connection failed: {type(e).__name__}: {e}")
138
+ return
139
+
140
+ # 4. HTTP Basic Connectivity Test
141
+ try:
142
+ logger.info(f"[HTTP] Testing basic HTTP GET to {parsed.scheme}://{hostname}/")
143
+ test_url = f"{parsed.scheme}://{hostname}/"
144
+ response = self._session.get(test_url, timeout=10)
145
+ logger.info(f"[HTTP] ✓ Basic HTTP request successful (status: {response.status_code})")
146
+ except requests.exceptions.RequestException as e:
147
+ logger.error(f"[HTTP] ✗ Basic HTTP request failed: {type(e).__name__}: {e}")
148
+
149
+ logger.info(f"=== END DIAGNOSTICS ===\n")
150
+
43
151
  def _get_headers(self) -> Dict[str, str]:
44
152
  """Get authorization headers with access token."""
45
153
  logger.debug("Getting authorization headers...")
@@ -109,16 +217,50 @@ class GraphAPIClient:
109
217
  url = f"{self.graph_endpoint}/sites/{hostname}:/{path}"
110
218
 
111
219
  logger.info(f"Fetching site ID from: {url}")
220
+
221
+ # Get headers and log sanitized version
222
+ headers = self._get_headers()
223
+ sanitized_headers = {k: (v[:20] + "..." if k == "Authorization" else v) for k, v in headers.items()}
224
+ logger.debug(f"Request headers: {sanitized_headers}")
225
+
112
226
  try:
113
- response = requests.get(url, headers=self._get_headers(), timeout=30)
114
- logger.debug(f"Response status: {response.status_code}")
227
+ # Make the request
228
+ logger.debug(f"Sending GET request to: {url}")
229
+ logger.debug(f"Timeout: 30 seconds")
230
+
231
+ response = self._session.get(url, headers=headers, timeout=30)
232
+
233
+ logger.debug(f"Response received - Status: {response.status_code}")
234
+ logger.debug(f"Response headers: {dict(response.headers)}")
235
+ logger.debug(f"Response encoding: {response.encoding}")
236
+
115
237
  self._handle_response(response)
116
238
 
117
239
  self._site_id = response.json()["id"]
118
- logger.info(f"Retrieved site ID: {self._site_id}")
240
+ logger.info(f"Retrieved site ID: {self._site_id}")
119
241
  return self._site_id
242
+
243
+ except requests.exceptions.ConnectionError as e:
244
+ logger.error(f"✗ ConnectionError getting site ID: {e}", exc_info=True)
245
+ logger.error("This indicates the connection was established but then dropped.")
246
+ logger.error("Running comprehensive diagnostics...")
247
+
248
+ # Run diagnostics to help identify the issue
249
+ self._diagnose_connectivity(url)
250
+
251
+ logger.error("\nPossible causes:")
252
+ logger.error("1. Firewall is blocking graph.microsoft.us")
253
+ logger.error("2. Proxy configuration needed")
254
+ logger.error("3. SSL/TLS version mismatch")
255
+ logger.error("4. Network instability")
256
+ raise
257
+
258
+ except requests.exceptions.Timeout:
259
+ logger.error(f"✗ Request timeout after 30 seconds", exc_info=True)
260
+ raise
261
+
120
262
  except requests.exceptions.RequestException as e:
121
- logger.error(f"Network error getting site ID: {type(e).__name__}: {e}", exc_info=True)
263
+ logger.error(f"Network error getting site ID: {type(e).__name__}: {e}", exc_info=True)
122
264
  raise
123
265
 
124
266
  def _get_drive_id(self) -> str:
@@ -134,16 +276,24 @@ class GraphAPIClient:
134
276
  url = f"{self.graph_endpoint}/sites/{site_id}/drive"
135
277
 
136
278
  logger.info(f"Fetching drive ID from: {url}")
279
+
137
280
  try:
138
- response = requests.get(url, headers=self._get_headers(), timeout=30)
139
- logger.debug(f"Response status: {response.status_code}")
281
+ logger.debug(f"Sending GET request to: {url}")
282
+ response = self._session.get(url, headers=self._get_headers(), timeout=30)
283
+
284
+ logger.debug(f"Response received - Status: {response.status_code}")
140
285
  self._handle_response(response)
141
286
 
142
287
  self._drive_id = response.json()["id"]
143
- logger.info(f"Retrieved drive ID: {self._drive_id}")
288
+ logger.info(f"Retrieved drive ID: {self._drive_id}")
144
289
  return self._drive_id
290
+
291
+ except requests.exceptions.ConnectionError as e:
292
+ logger.error(f"✗ ConnectionError getting drive ID: {e}", exc_info=True)
293
+ raise
294
+
145
295
  except requests.exceptions.RequestException as e:
146
- logger.error(f"Network error getting drive ID: {type(e).__name__}: {e}", exc_info=True)
296
+ logger.error(f"Network error getting drive ID: {type(e).__name__}: {e}", exc_info=True)
147
297
  raise
148
298
 
149
299
  def list_folders(self, folder_path: str = "") -> List[Dict[str, Any]]:
@@ -169,7 +319,7 @@ class GraphAPIClient:
169
319
 
170
320
  logger.info(f"Fetching folders from: {url}")
171
321
  try:
172
- response = requests.get(url, headers=self._get_headers(), timeout=30)
322
+ response = self._session.get(url, headers=self._get_headers(), timeout=30)
173
323
  logger.debug(f"Response status: {response.status_code}")
174
324
  self._handle_response(response)
175
325
 
@@ -213,7 +363,7 @@ class GraphAPIClient:
213
363
 
214
364
  logger.info(f"Fetching documents from: {url}")
215
365
  try:
216
- response = requests.get(url, headers=self._get_headers(), timeout=30)
366
+ response = self._session.get(url, headers=self._get_headers(), timeout=30)
217
367
  logger.debug(f"Response status: {response.status_code}")
218
368
  self._handle_response(response)
219
369
 
@@ -255,7 +405,7 @@ class GraphAPIClient:
255
405
 
256
406
  logger.info(f"Fetching file content from: {url}")
257
407
  try:
258
- response = requests.get(url, headers=self._get_headers(), timeout=60)
408
+ response = self._session.get(url, headers=self._get_headers(), timeout=60)
259
409
  logger.debug(f"Response status: {response.status_code}")
260
410
  self._handle_response(response)
261
411
 
@@ -294,7 +444,7 @@ class GraphAPIClient:
294
444
  headers["Content-Type"] = "application/octet-stream"
295
445
 
296
446
  try:
297
- response = requests.put(url, headers=headers, data=content, timeout=120)
447
+ response = self._session.put(url, headers=headers, data=content, timeout=120)
298
448
  logger.debug(f"Response status: {response.status_code}")
299
449
  self._handle_response(response)
300
450
 
@@ -320,7 +470,7 @@ class GraphAPIClient:
320
470
 
321
471
  logger.info(f"Deleting from: {url}")
322
472
  try:
323
- response = requests.delete(url, headers=self._get_headers(), timeout=30)
473
+ response = self._session.delete(url, headers=self._get_headers(), timeout=30)
324
474
  logger.debug(f"Response status: {response.status_code}")
325
475
  self._handle_response(response)
326
476
 
@@ -358,7 +508,7 @@ class GraphAPIClient:
358
508
  }
359
509
 
360
510
  try:
361
- response = requests.post(url, headers=self._get_headers(), json=payload, timeout=30)
511
+ response = self._session.post(url, headers=self._get_headers(), json=payload, timeout=30)
362
512
  logger.debug(f"Response status: {response.status_code}")
363
513
  self._handle_response(response)
364
514
 
@@ -384,7 +534,7 @@ class GraphAPIClient:
384
534
 
385
535
  logger.info(f"Deleting folder from: {url}")
386
536
  try:
387
- response = requests.delete(url, headers=self._get_headers(), timeout=30)
537
+ response = self._session.delete(url, headers=self._get_headers(), timeout=30)
388
538
  logger.debug(f"Response status: {response.status_code}")
389
539
  self._handle_response(response)
390
540
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-sharepoint-us
3
- Version: 2.0.14
3
+ Version: 2.0.15
4
4
  Summary: SharePoint MCP Server with Microsoft Graph API
5
5
  License: MIT
6
6
  Project-URL: Homepage, https://github.com/mdev26/mcp-sharepoint-us