mcp-sharepoint-us 2.0.12__py3-none-any.whl → 2.0.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-sharepoint-us might be problematic. Click here for more details.

@@ -14,11 +14,6 @@ from mcp.types import Resource, Tool, TextContent, ImageContent, EmbeddedResourc
14
14
  from pydantic import AnyUrl
15
15
  import mcp.server.stdio
16
16
 
17
- from office365.sharepoint.files.file import File
18
- from office365.sharepoint.folders.folder import Folder
19
- from office365.sharepoint.client_context import ClientContext
20
-
21
- from .auth import create_sharepoint_context
22
17
  from .graph_api import GraphAPIClient
23
18
 
24
19
  # Setup logging
@@ -28,78 +23,65 @@ logger = logging.getLogger(__name__)
28
23
  # Initialize MCP server
29
24
  app = Server("mcp-sharepoint")
30
25
 
31
- # Global SharePoint context, Graph API client, and authenticator
32
- ctx: Optional[ClientContext] = None
26
+ # Global Graph API client and authenticator
33
27
  graph_client: Optional[GraphAPIClient] = None
34
28
  authenticator = None
35
29
 
36
30
 
37
31
  def ensure_context(func):
38
- """Decorator to ensure SharePoint context and Graph API client are available"""
32
+ """Decorator to ensure Graph API client is available"""
39
33
  @wraps(func)
40
34
  async def wrapper(*args, **kwargs):
41
- global ctx, graph_client, authenticator
42
- if ctx is None:
35
+ global graph_client, authenticator
36
+ if graph_client is None:
43
37
  try:
44
- ctx = create_sharepoint_context()
45
- logger.info("SharePoint context initialized successfully")
38
+ logger.info("Initializing Graph API client...")
39
+ from .auth import SharePointAuthenticator
46
40
 
47
- # Get site URL for Graph client
41
+ # Get credentials
48
42
  site_url = os.getenv("SHP_SITE_URL")
43
+ client_id = os.getenv("SHP_ID_APP")
44
+ client_secret = os.getenv("SHP_ID_APP_SECRET")
45
+ tenant_id = os.getenv("SHP_TENANT_ID")
46
+ cloud = "government" if ".sharepoint.us" in site_url else "commercial"
47
+
48
+ logger.info(f"Site URL: {site_url}")
49
+ logger.info(f"Tenant ID: {tenant_id}")
50
+ logger.info(f"Client ID: {client_id}")
51
+ logger.info(f"Cloud: {cloud}")
52
+
53
+ # Create shared authenticator
54
+ authenticator = SharePointAuthenticator(
55
+ site_url=site_url,
56
+ client_id=client_id,
57
+ client_secret=client_secret,
58
+ tenant_id=tenant_id,
59
+ cloud=cloud
60
+ )
61
+ logger.info("Authenticator created successfully")
49
62
 
50
- # Create Graph API client
51
- # The token callback will use the same MSAL authenticator internally
52
- def token_callback():
53
- """Simple token callback that gets a fresh token using MSAL"""
54
- from .auth import SharePointAuthenticator
55
- from urllib.parse import urlparse
56
-
57
- site_url = os.getenv("SHP_SITE_URL")
58
- client_id = os.getenv("SHP_ID_APP")
59
- client_secret = os.getenv("SHP_ID_APP_SECRET")
60
- tenant_id = os.getenv("SHP_TENANT_ID")
61
- cloud = "government" if ".sharepoint.us" in site_url else "commercial"
62
-
63
- import msal
64
- from office365.runtime.auth.token_response import TokenResponse
65
-
66
- # Build authority URL
67
- if cloud in ("government", "us"):
68
- authority_url = f"https://login.microsoftonline.us/{tenant_id}"
69
- else:
70
- authority_url = f"https://login.microsoftonline.com/{tenant_id}"
71
-
72
- # Create MSAL app
73
- msal_app = msal.ConfidentialClientApplication(
74
- authority=authority_url,
75
- client_id=client_id,
76
- client_credential=client_secret,
77
- validate_authority=False if cloud in ("government", "us") else True
78
- )
79
-
80
- # Get scope
81
- parsed = urlparse(site_url)
82
- sharepoint_root = f"{parsed.scheme}://{parsed.netloc}"
83
- scopes = [f"{sharepoint_root}/.default"]
84
-
85
- # Acquire token
86
- result = msal_app.acquire_token_for_client(scopes=scopes)
87
- return TokenResponse.from_json(result)
63
+ # Create Graph API client with direct token access
64
+ def get_token():
65
+ """Get access token for Graph API"""
66
+ logger.debug("Token callback invoked")
67
+ token = authenticator.get_access_token()
68
+ logger.debug(f"Token acquired (length: {len(token)})")
69
+ return token
88
70
 
89
71
  graph_client = GraphAPIClient(
90
72
  site_url=site_url,
91
- token_callback=token_callback
73
+ token_callback=get_token
92
74
  )
93
75
  logger.info("Graph API client initialized successfully")
94
76
 
95
77
  except Exception as e:
96
- logger.error(f"Failed to initialize SharePoint context: {e}")
78
+ logger.error(f"Failed to initialize Graph API client: {e}", exc_info=True)
97
79
  raise RuntimeError(
98
- f"SharePoint authentication failed: {e}. "
80
+ f"Graph API authentication failed: {e}. "
99
81
  "Please check your environment variables and ensure:\n"
100
82
  "1. SHP_TENANT_ID is set correctly\n"
101
- "2. Your Azure AD app has the correct API permissions\n"
102
- "3. If using a new tenant, make sure you're using modern auth (MSAL)"
83
+ "2. Your Azure AD app has Microsoft Graph API permissions\n"
84
+ "3. The app registration has 'Sites.Read.All' and 'Files.ReadWrite.All' permissions"
103
85
  )
104
86
  return await func(*args, **kwargs)
105
87
  return wrapper
@@ -349,127 +331,88 @@ async def call_tool(name: str, arguments: dict) -> list[TextContent]:
349
331
 
350
332
 
351
333
  async def test_connection() -> list[TextContent]:
352
- """Test SharePoint connection"""
334
+ """Test SharePoint connection using Microsoft Graph API"""
353
335
  try:
354
- web = ctx.web.get().execute_query()
355
- auth_method = os.getenv("SHP_AUTH_METHOD", "msal")
356
-
336
+ logger.info("Testing Graph API connection...")
337
+
338
+ # Try to get site ID and drive ID
339
+ site_id = await asyncio.to_thread(graph_client._get_site_id)
340
+ drive_id = await asyncio.to_thread(graph_client._get_drive_id)
341
+
342
+ auth_method = "msal (Microsoft Graph API)"
343
+
344
+ logger.info(f"✓ Connection test successful - Site ID: {site_id}, Drive ID: {drive_id}")
345
+
357
346
  return [TextContent(
358
347
  type="text",
359
- text=f"✓ Successfully connected to SharePoint!\n\n"
360
- f"Site Title: {web.title}\n"
361
- f"Site URL: {web.url}\n"
362
- f"Authentication Method: {auth_method.upper()}\n"
348
+ text=f"✓ Successfully connected to SharePoint via Microsoft Graph API!\n\n"
349
+ f"Site URL: {graph_client.site_url}\n"
350
+ f"Graph Endpoint: {graph_client.graph_endpoint}\n"
351
+ f"Site ID: {site_id}\n"
352
+ f"Drive ID: {drive_id}\n"
353
+ f"Authentication Method: {auth_method}\n"
363
354
  f"Tenant ID: {os.getenv('SHP_TENANT_ID')}\n\n"
364
- f"Connection is working correctly with modern Azure AD authentication."
355
+ f"Connection is working correctly with Microsoft Graph API."
365
356
  )]
366
357
  except Exception as e:
358
+ logger.error(f"✗ Connection test failed: {str(e)}", exc_info=True)
367
359
  return [TextContent(
368
360
  type="text",
369
361
  text=f"✗ Connection failed: {str(e)}\n\n"
370
362
  f"This usually means:\n"
371
363
  f"1. Your credentials are incorrect\n"
372
- f"2. Your app doesn't have proper SharePoint permissions\n"
373
- f"3. You're using legacy auth on a new tenant (set SHP_AUTH_METHOD=msal)"
364
+ f"2. Your app doesn't have proper Microsoft Graph permissions\n"
365
+ f"3. Network connectivity issues\n"
366
+ f"4. Azure AD app registration is missing required permissions:\n"
367
+ f" - Sites.Read.All\n"
368
+ f" - Files.ReadWrite.All\n\n"
369
+ f"Check the logs for more details."
374
370
  )]
375
371
 
376
372
 
377
373
  async def list_folders(folder_path: str = "") -> list[TextContent]:
378
- """List folders in specified path"""
374
+ """List folders in specified path using Microsoft Graph API"""
379
375
  doc_lib = get_document_library_path()
380
376
  full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
381
377
 
382
378
  try:
383
- # Try SharePoint REST API first
384
- folder = ctx.web.get_folder_by_server_relative_path(full_path)
385
- folders = folder.folders.get().execute_query()
386
-
387
- folder_list = []
388
- for f in folders:
389
- folder_list.append(f"📁 {f.name}")
379
+ # Use Graph API directly
380
+ folders = await asyncio.to_thread(graph_client.list_folders, folder_path)
381
+ folder_list = [f"📁 {f['name']}" for f in folders]
390
382
 
391
383
  result = f"Folders in '{full_path}':\n\n" + "\n".join(folder_list) if folder_list else f"No folders found in '{full_path}'"
392
384
  return [TextContent(type="text", text=result)]
393
385
 
394
386
  except Exception as e:
395
- error_msg = str(e).lower()
396
-
397
- # Check if it's an app-only token error
398
- if "unsupported app only token" in error_msg or "401" in error_msg:
399
- logger.warning(f"SharePoint REST API failed with app-only token error, falling back to Graph API")
400
-
401
- try:
402
- # Fallback to Graph API
403
- folders = await asyncio.to_thread(graph_client.list_folders, folder_path)
404
-
405
- folder_list = [f"📁 {f['name']}" for f in folders]
406
-
407
- result = f"Folders in '{full_path}' (via Graph API):\n\n" + "\n".join(folder_list) if folder_list else f"No folders found in '{full_path}'"
408
- return [TextContent(type="text", text=result)]
409
-
410
- except Exception as graph_error:
411
- return [TextContent(type="text", text=f"Error with both APIs - REST: {e}, Graph: {graph_error}")]
412
-
413
- # Other errors
414
387
  return [TextContent(type="text", text=f"Error listing folders: {str(e)}")]
415
388
 
416
389
 
417
390
  async def list_documents(folder_path: str = "") -> list[TextContent]:
418
- """List documents in specified folder"""
391
+ """List documents in specified folder using Microsoft Graph API"""
419
392
  doc_lib = get_document_library_path()
420
393
  full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
421
394
 
422
395
  try:
423
- # Try SharePoint REST API first
424
- folder = ctx.web.get_folder_by_server_relative_path(full_path)
425
- files = folder.files.get().execute_query()
396
+ # Use Graph API directly
397
+ files = await asyncio.to_thread(graph_client.list_documents, folder_path)
426
398
 
427
399
  file_list = []
428
400
  for f in files:
429
- size_kb = f.length / 1024
430
- file_list.append(f"📄 {f.name} ({size_kb:.2f} KB)")
401
+ size_kb = f['size'] / 1024
402
+ file_list.append(f"📄 {f['name']} ({size_kb:.2f} KB)")
431
403
 
432
404
  result = f"Documents in '{full_path}':\n\n" + "\n".join(file_list) if file_list else f"No documents found in '{full_path}'"
433
405
  return [TextContent(type="text", text=result)]
434
406
 
435
407
  except Exception as e:
436
- error_msg = str(e).lower()
437
-
438
- # Check if it's an app-only token error
439
- if "unsupported app only token" in error_msg or "401" in error_msg:
440
- logger.warning(f"SharePoint REST API failed with app-only token error, falling back to Graph API")
441
-
442
- try:
443
- # Fallback to Graph API
444
- files = await asyncio.to_thread(graph_client.list_documents, folder_path)
445
-
446
- file_list = []
447
- for f in files:
448
- size_kb = f['size'] / 1024
449
- file_list.append(f"📄 {f['name']} ({size_kb:.2f} KB)")
450
-
451
- result = f"Documents in '{full_path}' (via Graph API):\n\n" + "\n".join(file_list) if file_list else f"No documents found in '{full_path}'"
452
- return [TextContent(type="text", text=result)]
453
-
454
- except Exception as graph_error:
455
- return [TextContent(type="text", text=f"Error with both APIs - REST: {e}, Graph: {graph_error}")]
456
-
457
- # Other errors
458
408
  return [TextContent(type="text", text=f"Error listing documents: {str(e)}")]
459
409
 
460
410
 
461
411
  async def get_document_content(file_path: str) -> list[TextContent]:
462
- """Get document content"""
412
+ """Get document content using Microsoft Graph API"""
463
413
  try:
464
- doc_lib = get_document_library_path()
465
- full_path = f"{doc_lib}/{file_path}"
466
-
467
- def _read_bytes():
468
- sp_file = ctx.web.get_file_by_server_relative_path(full_path)
469
- # IMPORTANT: execute the request
470
- return sp_file.read().execute_query()
471
-
472
- content = await asyncio.to_thread(_read_bytes)
414
+ # Use Graph API to get file content
415
+ content = await asyncio.to_thread(graph_client.get_file_content, file_path)
473
416
 
474
417
  ext = os.path.splitext(file_path)[1].lower()
475
418
  text_extensions = {'.txt', '.md', '.json', '.xml', '.html', '.csv', '.log'}
@@ -494,141 +437,127 @@ async def get_document_content(file_path: str) -> list[TextContent]:
494
437
 
495
438
 
496
439
  async def upload_document(folder_path: str, file_name: str, content: str, is_binary: bool = False) -> list[TextContent]:
497
- """Upload a document"""
440
+ """Upload a document using Microsoft Graph API"""
498
441
  try:
499
- doc_lib = get_document_library_path()
500
- full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
501
-
502
- folder = ctx.web.get_folder_by_server_relative_path(full_path)
503
-
504
442
  if is_binary:
505
443
  file_content = base64.b64decode(content)
506
444
  else:
507
445
  file_content = content.encode('utf-8')
508
-
509
- uploaded_file = folder.upload_file(file_name, file_content).execute_query()
510
-
446
+
447
+ # Use Graph API to upload file
448
+ result = await asyncio.to_thread(
449
+ graph_client.upload_file,
450
+ folder_path,
451
+ file_name,
452
+ file_content
453
+ )
454
+
511
455
  return [TextContent(
512
456
  type="text",
513
- text=f"✓ Successfully uploaded '{file_name}' to '{full_path}'"
457
+ text=f"✓ Successfully uploaded '{file_name}' to '{folder_path or 'root'}'"
514
458
  )]
515
-
459
+
516
460
  except Exception as e:
517
461
  return [TextContent(type="text", text=f"Error uploading document: {str(e)}")]
518
462
 
519
463
 
520
464
  async def update_document(file_path: str, content: str, is_binary: bool = False) -> list[TextContent]:
521
- """Update a document"""
465
+ """Update a document using Microsoft Graph API"""
522
466
  try:
523
- doc_lib = get_document_library_path()
524
- full_path = f"{doc_lib}/{file_path}"
525
-
526
467
  if is_binary:
527
468
  file_content = base64.b64decode(content)
528
469
  else:
529
470
  file_content = content.encode('utf-8')
530
-
531
- file = ctx.web.get_file_by_server_relative_path(full_path)
532
- file.write(file_content).execute_query()
533
-
471
+
472
+ # Split file_path into folder and filename
473
+ folder_path = os.path.dirname(file_path)
474
+ file_name = os.path.basename(file_path)
475
+
476
+ # Use Graph API to upload/update file (PUT overwrites)
477
+ await asyncio.to_thread(
478
+ graph_client.upload_file,
479
+ folder_path,
480
+ file_name,
481
+ file_content
482
+ )
483
+
534
484
  return [TextContent(
535
485
  type="text",
536
486
  text=f"✓ Successfully updated '{file_path}'"
537
487
  )]
538
-
488
+
539
489
  except Exception as e:
540
490
  return [TextContent(type="text", text=f"Error updating document: {str(e)}")]
541
491
 
542
492
 
543
493
  async def delete_document(file_path: str) -> list[TextContent]:
544
- """Delete a document"""
494
+ """Delete a document using Microsoft Graph API"""
545
495
  try:
546
- doc_lib = get_document_library_path()
547
- full_path = f"{doc_lib}/{file_path}"
548
-
549
- file = ctx.web.get_file_by_server_relative_path(full_path)
550
- file.delete_object().execute_query()
551
-
496
+ # Use Graph API to delete file
497
+ await asyncio.to_thread(graph_client.delete_file, file_path)
498
+
552
499
  return [TextContent(
553
500
  type="text",
554
501
  text=f"✓ Successfully deleted '{file_path}'"
555
502
  )]
556
-
503
+
557
504
  except Exception as e:
558
505
  return [TextContent(type="text", text=f"Error deleting document: {str(e)}")]
559
506
 
560
507
 
561
508
  async def create_folder(folder_path: str, folder_name: str) -> list[TextContent]:
562
- """Create a folder"""
509
+ """Create a folder using Microsoft Graph API"""
563
510
  try:
564
- doc_lib = get_document_library_path()
565
- full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
566
-
567
- parent_folder = ctx.web.get_folder_by_server_relative_path(full_path)
568
- new_folder = parent_folder.folders.add(folder_name).execute_query()
569
-
511
+ # Use Graph API to create folder
512
+ await asyncio.to_thread(
513
+ graph_client.create_folder,
514
+ folder_path,
515
+ folder_name
516
+ )
517
+
570
518
  return [TextContent(
571
519
  type="text",
572
- text=f"✓ Successfully created folder '{folder_name}' in '{full_path}'"
520
+ text=f"✓ Successfully created folder '{folder_name}' in '{folder_path or 'root'}'"
573
521
  )]
574
-
522
+
575
523
  except Exception as e:
576
524
  return [TextContent(type="text", text=f"Error creating folder: {str(e)}")]
577
525
 
578
526
 
579
527
  async def delete_folder(folder_path: str) -> list[TextContent]:
580
- """Delete a folder"""
528
+ """Delete a folder using Microsoft Graph API"""
581
529
  try:
582
- doc_lib = get_document_library_path()
583
- full_path = f"{doc_lib}/{folder_path}"
584
-
585
- folder = ctx.web.get_folder_by_server_relative_path(full_path)
586
- folder.delete_object().execute_query()
587
-
530
+ # Use Graph API to delete folder
531
+ await asyncio.to_thread(graph_client.delete_folder, folder_path)
532
+
588
533
  return [TextContent(
589
534
  type="text",
590
535
  text=f"✓ Successfully deleted folder '{folder_path}'"
591
536
  )]
592
-
537
+
593
538
  except Exception as e:
594
539
  return [TextContent(type="text", text=f"Error deleting folder: {str(e)}")]
595
540
 
596
541
 
597
542
  async def get_tree(folder_path: str = "", max_depth: int = 5, current_depth: int = 0) -> list[TextContent]:
598
- """Get folder tree structure"""
543
+ """Get folder tree structure using Microsoft Graph API"""
599
544
  if current_depth >= max_depth:
600
545
  return [TextContent(type="text", text="Max depth reached")]
601
546
 
602
547
  try:
603
- doc_lib = get_document_library_path()
604
- full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
605
-
606
- folder = ctx.web.get_folder_by_server_relative_path(full_path)
607
- folders = folder.folders.get().execute_query()
548
+ # Use Graph API to list folders
549
+ folders = await asyncio.to_thread(graph_client.list_folders, folder_path)
608
550
 
609
551
  indent = " " * current_depth
610
552
  tree_lines = [f"{indent}📁 {folder_path or 'Root'}"]
611
553
 
612
554
  for f in folders:
613
- sub_path = f"{folder_path}/{f.name}" if folder_path else f.name
555
+ sub_path = f"{folder_path}/{f['name']}" if folder_path else f['name']
614
556
  sub_tree = await get_tree(sub_path, max_depth, current_depth + 1)
615
557
  tree_lines.append(sub_tree[0].text)
616
558
 
617
559
  return [TextContent(type="text", text="\n".join(tree_lines))]
618
560
 
619
- except TypeError as e:
620
- if "can't compare offset-naive and offset-aware datetimes" in str(e):
621
- logger.error(
622
- f"DateTime comparison error occurred despite patch. "
623
- f"This may indicate a new code path in the library. Error: {e}"
624
- )
625
- return [TextContent(
626
- type="text",
627
- text=f"Encountered a datetime comparison issue. "
628
- f"A workaround patch is applied, but this specific code path may need attention.\n"
629
- f"Alternative: Use List_SharePoint_Folders for folder navigation."
630
- )]
631
- raise
632
561
  except Exception as e:
633
562
  return [TextContent(type="text", text=f"Error getting tree: {str(e)}")]
634
563
 
mcp_sharepoint/auth.py CHANGED
@@ -7,7 +7,6 @@ import logging
7
7
  import time
8
8
  import random
9
9
  from typing import Optional
10
- from urllib.parse import urlparse
11
10
  from datetime import datetime, timezone
12
11
  from office365.sharepoint.client_context import ClientContext
13
12
  from office365.runtime.auth.client_credential import ClientCredential
@@ -85,7 +84,17 @@ class SharePointAuthenticator:
85
84
  self.cert_path = cert_path
86
85
  self.cert_thumbprint = cert_thumbprint
87
86
  self.cloud = cloud.lower()
88
-
87
+
88
+ # Initialize token cache
89
+ self._access_token = None
90
+ self._access_token_exp = 0
91
+
92
+ # Set Graph API scope based on cloud environment
93
+ if self.cloud in ("government", "us"):
94
+ self._scopes = ["https://graph.microsoft.us/.default"]
95
+ else:
96
+ self._scopes = ["https://graph.microsoft.com/.default"]
97
+
89
98
  def get_context_with_msal(self) -> ClientContext:
90
99
  """
91
100
  Get ClientContext using MSAL for modern Azure AD authentication.
@@ -123,19 +132,7 @@ class SharePointAuthenticator:
123
132
  self._msal_app = msal.ConfidentialClientApplication(**msal_params)
124
133
  self._authority_url = authority_url
125
134
 
126
- # Small in-memory access-token cache (avoid repeated acquire calls)
127
- # MSAL caches too, but keeping the raw token avoids extra work in Office365 callbacks.
128
- if not hasattr(self, "_access_token"):
129
- self._access_token = None
130
- self._access_token_exp = 0
131
-
132
- # Extract root SharePoint URL for scope
133
- # For https://tenant.sharepoint.us/sites/SiteName -> https://tenant.sharepoint.us
134
- parsed = urlparse(self.site_url)
135
- sharepoint_root = f"{parsed.scheme}://{parsed.netloc}"
136
- scopes = [f"{sharepoint_root}/.default"]
137
-
138
- logger.info(f"Using SharePoint root scope: {sharepoint_root}/.default")
135
+ logger.info(f"Using Graph API scope: {self._scopes[0]}")
139
136
 
140
137
  def acquire_token():
141
138
  """
@@ -153,7 +150,7 @@ class SharePointAuthenticator:
153
150
  last_err = None
154
151
  for attempt in range(1, 6): # 5 attempts
155
152
  try:
156
- result = self._msal_app.acquire_token_for_client(scopes=scopes)
153
+ result = self._msal_app.acquire_token_for_client(scopes=self._scopes)
157
154
 
158
155
  if "access_token" not in result:
159
156
  error_desc = result.get("error_description", "Unknown error")
@@ -161,7 +158,7 @@ class SharePointAuthenticator:
161
158
  raise ValueError(
162
159
  f"Failed to acquire token: {error} - {error_desc}\n"
163
160
  f"Authority: {self._authority_url}\n"
164
- f"Scopes: {scopes}"
161
+ f"Scopes: {self._scopes}"
165
162
  )
166
163
 
167
164
  token = result["access_token"]
@@ -191,7 +188,91 @@ class SharePointAuthenticator:
191
188
  logger.info("Successfully authenticated using MSAL (Modern Azure AD)")
192
189
  return ctx
193
190
 
194
-
191
+ def get_access_token(self) -> str:
192
+ """
193
+ Get access token directly for use with Microsoft Graph API.
194
+ Uses the same retry logic as get_context_with_msal() but returns just the token string.
195
+
196
+ Returns:
197
+ Access token as string
198
+
199
+ Raises:
200
+ RuntimeError: If token acquisition fails after retries
201
+ """
202
+ # Initialize MSAL app if not already done
203
+ if not hasattr(self, "_msal_app"):
204
+ if self.cloud in ("government", "us"):
205
+ authority_url = f"https://login.microsoftonline.us/{self.tenant_id}"
206
+ logger.info("Using Azure US Government Cloud endpoints")
207
+ else:
208
+ authority_url = f"https://login.microsoftonline.com/{self.tenant_id}"
209
+ logger.info("Using Azure Commercial Cloud endpoints")
210
+
211
+ self._token_cache = msal.SerializableTokenCache()
212
+
213
+ msal_params = {
214
+ "authority": authority_url,
215
+ "client_id": self.client_id,
216
+ "client_credential": self.client_secret,
217
+ "token_cache": self._token_cache,
218
+ }
219
+
220
+ if self.cloud in ("government", "us"):
221
+ msal_params["validate_authority"] = False
222
+ logger.info("Disabled authority validation for government cloud")
223
+
224
+ self._msal_app = msal.ConfidentialClientApplication(**msal_params)
225
+ self._authority_url = authority_url
226
+
227
+ now = int(time.time())
228
+ if self._access_token and now < (self._access_token_exp - 60):
229
+ logger.debug("Using cached access token")
230
+ return self._access_token
231
+
232
+ logger.info(f"Acquiring new access token from {self._authority_url}")
233
+ logger.debug(f"Scopes: {self._scopes}")
234
+
235
+ last_err = None
236
+ for attempt in range(1, 6): # 5 attempts
237
+ try:
238
+ logger.debug(f"Token acquisition attempt {attempt}/5")
239
+ result = self._msal_app.acquire_token_for_client(scopes=self._scopes)
240
+
241
+ if "access_token" not in result:
242
+ error_desc = result.get("error_description", "Unknown error")
243
+ error = result.get("error", "Unknown")
244
+ logger.error(f"Token acquisition failed: {error} - {error_desc}")
245
+ raise ValueError(
246
+ f"Failed to acquire token: {error} - {error_desc}\n"
247
+ f"Authority: {self._authority_url}\n"
248
+ f"Scopes: {self._scopes}"
249
+ )
250
+
251
+ token = result["access_token"]
252
+
253
+ # MSAL returns expires_in (seconds) for client credential tokens
254
+ expires_in = int(result.get("expires_in", 3600))
255
+ self._access_token = token
256
+ self._access_token_exp = int(time.time()) + expires_in
257
+
258
+ logger.info(f"Successfully acquired Graph API token (expires in {expires_in}s)")
259
+ logger.debug(f"Token length: {len(token)}, starts with: {token[:20]}...")
260
+ return token
261
+
262
+ except Exception as e:
263
+ last_err = e
264
+ logger.error(f"Token acquisition attempt {attempt}/5 failed: {type(e).__name__}: {e}")
265
+ # Exponential backoff with jitter
266
+ sleep_s = min(8.0, (2 ** (attempt - 1)) * 0.5) + random.random() * 0.25
267
+ logger.warning(
268
+ f"Token acquisition attempt {attempt}/5 failed: {e}. Retrying in {sleep_s:.2f}s"
269
+ )
270
+ time.sleep(sleep_s)
271
+
272
+ # If we get here, all retries failed
273
+ raise RuntimeError(f"Token acquisition failed after retries: {last_err}")
274
+
275
+
195
276
  def get_context_with_certificate(self) -> ClientContext:
196
277
  """
197
278
  Get ClientContext using certificate-based authentication.
@@ -1,6 +1,6 @@
1
1
  """
2
- Microsoft Graph API implementation for SharePoint operations
3
- Used as a fallback when SharePoint REST API doesn't support app-only tokens
2
+ Microsoft Graph API implementation for SharePoint operations.
3
+ Primary API for all SharePoint operations in Azure Government Cloud.
4
4
  """
5
5
  import os
6
6
  import logging
@@ -15,7 +15,8 @@ logger = logging.getLogger(__name__)
15
15
  class GraphAPIClient:
16
16
  """
17
17
  Microsoft Graph API client for SharePoint operations.
18
- Fallback for when SharePoint REST API doesn't support app-only authentication.
18
+ Primary client for all SharePoint operations, especially in Azure Government Cloud
19
+ where SharePoint REST API may not support app-only authentication.
19
20
  """
20
21
 
21
22
  def __init__(self, site_url: str, token_callback):
@@ -29,6 +30,7 @@ class GraphAPIClient:
29
30
  self.site_url = site_url.rstrip("/")
30
31
  self.token_callback = token_callback
31
32
  self._site_id = None
33
+ self._drive_id = None # Cache drive ID to avoid repeated API calls
32
34
 
33
35
  # Determine Graph API endpoint based on cloud
34
36
  if ".sharepoint.us" in site_url:
@@ -40,6 +42,7 @@ class GraphAPIClient:
40
42
 
41
43
  def _get_headers(self) -> Dict[str, str]:
42
44
  """Get authorization headers with access token."""
45
+ logger.debug("Getting authorization headers...")
43
46
  token_obj = self.token_callback()
44
47
  # Handle both TokenResponse objects and plain strings
45
48
  if hasattr(token_obj, 'accessToken'):
@@ -47,17 +50,51 @@ class GraphAPIClient:
47
50
  else:
48
51
  token = str(token_obj)
49
52
 
53
+ logger.debug(f"Token acquired for headers (length: {len(token)}, starts with: {token[:20]}...)")
54
+
50
55
  return {
51
56
  "Authorization": f"Bearer {token}",
52
57
  "Accept": "application/json",
53
58
  }
54
59
 
60
+ def _handle_response(self, response: requests.Response) -> None:
61
+ """
62
+ Handle Graph API response and raise detailed errors if needed.
63
+
64
+ Graph API returns errors in format:
65
+ {
66
+ "error": {
67
+ "code": "itemNotFound",
68
+ "message": "The resource could not be found."
69
+ }
70
+ }
71
+ """
72
+ if response.ok:
73
+ return
74
+
75
+ try:
76
+ error_data = response.json()
77
+ if "error" in error_data:
78
+ error = error_data["error"]
79
+ code = error.get("code", "Unknown")
80
+ message = error.get("message", "Unknown error")
81
+ raise requests.HTTPError(
82
+ f"Graph API error [{code}]: {message}",
83
+ response=response
84
+ )
85
+ except (ValueError, KeyError):
86
+ # If we can't parse the error, fall back to standard handling
87
+ pass
88
+
89
+ self._handle_response(response)
90
+
55
91
  def _get_site_id(self) -> str:
56
92
  """
57
93
  Get the site ID from the site URL.
58
94
  Caches the result for reuse.
59
95
  """
60
96
  if self._site_id:
97
+ logger.debug(f"Using cached site ID: {self._site_id}")
61
98
  return self._site_id
62
99
 
63
100
  parsed = urlparse(self.site_url)
@@ -71,24 +108,43 @@ class GraphAPIClient:
71
108
  else:
72
109
  url = f"{self.graph_endpoint}/sites/{hostname}:/{path}"
73
110
 
74
- response = requests.get(url, headers=self._get_headers())
75
- response.raise_for_status()
111
+ logger.info(f"Fetching site ID from: {url}")
112
+ try:
113
+ response = requests.get(url, headers=self._get_headers(), timeout=30)
114
+ logger.debug(f"Response status: {response.status_code}")
115
+ self._handle_response(response)
76
116
 
77
- self._site_id = response.json()["id"]
78
- logger.info(f"Retrieved site ID: {self._site_id}")
79
- return self._site_id
117
+ self._site_id = response.json()["id"]
118
+ logger.info(f"Retrieved site ID: {self._site_id}")
119
+ return self._site_id
120
+ except requests.exceptions.RequestException as e:
121
+ logger.error(f"Network error getting site ID: {type(e).__name__}: {e}", exc_info=True)
122
+ raise
80
123
 
81
124
  def _get_drive_id(self) -> str:
82
- """Get the default document library drive ID."""
125
+ """
126
+ Get the default document library drive ID.
127
+ Caches the result for reuse.
128
+ """
129
+ if self._drive_id:
130
+ logger.debug(f"Using cached drive ID: {self._drive_id}")
131
+ return self._drive_id
132
+
83
133
  site_id = self._get_site_id()
84
134
  url = f"{self.graph_endpoint}/sites/{site_id}/drive"
85
135
 
86
- response = requests.get(url, headers=self._get_headers())
87
- response.raise_for_status()
136
+ logger.info(f"Fetching drive ID from: {url}")
137
+ try:
138
+ response = requests.get(url, headers=self._get_headers(), timeout=30)
139
+ logger.debug(f"Response status: {response.status_code}")
140
+ self._handle_response(response)
88
141
 
89
- drive_id = response.json()["id"]
90
- logger.info(f"Retrieved drive ID: {drive_id}")
91
- return drive_id
142
+ self._drive_id = response.json()["id"]
143
+ logger.info(f"Retrieved drive ID: {self._drive_id}")
144
+ return self._drive_id
145
+ except requests.exceptions.RequestException as e:
146
+ logger.error(f"Network error getting drive ID: {type(e).__name__}: {e}", exc_info=True)
147
+ raise
92
148
 
93
149
  def list_folders(self, folder_path: str = "") -> List[Dict[str, Any]]:
94
150
  """
@@ -100,6 +156,7 @@ class GraphAPIClient:
100
156
  Returns:
101
157
  List of folder objects with name, id, webUrl
102
158
  """
159
+ logger.info(f"Listing folders in '{folder_path}'")
103
160
  site_id = self._get_site_id()
104
161
  drive_id = self._get_drive_id()
105
162
 
@@ -110,23 +167,29 @@ class GraphAPIClient:
110
167
  else:
111
168
  url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
112
169
 
113
- response = requests.get(url, headers=self._get_headers())
114
- response.raise_for_status()
115
-
116
- items = response.json().get("value", [])
117
- # Filter to only folders
118
- folders = [
119
- {
120
- "name": item["name"],
121
- "id": item["id"],
122
- "webUrl": item.get("webUrl", ""),
123
- }
124
- for item in items
125
- if "folder" in item
126
- ]
127
-
128
- logger.info(f"Found {len(folders)} folders in '{folder_path}'")
129
- return folders
170
+ logger.info(f"Fetching folders from: {url}")
171
+ try:
172
+ response = requests.get(url, headers=self._get_headers(), timeout=30)
173
+ logger.debug(f"Response status: {response.status_code}")
174
+ self._handle_response(response)
175
+
176
+ items = response.json().get("value", [])
177
+ # Filter to only folders
178
+ folders = [
179
+ {
180
+ "name": item["name"],
181
+ "id": item["id"],
182
+ "webUrl": item.get("webUrl", ""),
183
+ }
184
+ for item in items
185
+ if "folder" in item
186
+ ]
187
+
188
+ logger.info(f"Found {len(folders)} folders in '{folder_path}'")
189
+ return folders
190
+ except requests.exceptions.RequestException as e:
191
+ logger.error(f"Network error listing folders: {type(e).__name__}: {e}", exc_info=True)
192
+ raise
130
193
 
131
194
  def list_documents(self, folder_path: str = "") -> List[Dict[str, Any]]:
132
195
  """
@@ -138,6 +201,7 @@ class GraphAPIClient:
138
201
  Returns:
139
202
  List of file objects with name, id, size, webUrl
140
203
  """
204
+ logger.info(f"Listing documents in '{folder_path}'")
141
205
  site_id = self._get_site_id()
142
206
  drive_id = self._get_drive_id()
143
207
 
@@ -147,24 +211,30 @@ class GraphAPIClient:
147
211
  else:
148
212
  url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
149
213
 
150
- response = requests.get(url, headers=self._get_headers())
151
- response.raise_for_status()
152
-
153
- items = response.json().get("value", [])
154
- # Filter to only files
155
- files = [
156
- {
157
- "name": item["name"],
158
- "id": item["id"],
159
- "size": item.get("size", 0),
160
- "webUrl": item.get("webUrl", ""),
161
- }
162
- for item in items
163
- if "file" in item
164
- ]
165
-
166
- logger.info(f"Found {len(files)} files in '{folder_path}'")
167
- return files
214
+ logger.info(f"Fetching documents from: {url}")
215
+ try:
216
+ response = requests.get(url, headers=self._get_headers(), timeout=30)
217
+ logger.debug(f"Response status: {response.status_code}")
218
+ self._handle_response(response)
219
+
220
+ items = response.json().get("value", [])
221
+ # Filter to only files
222
+ files = [
223
+ {
224
+ "name": item["name"],
225
+ "id": item["id"],
226
+ "size": item.get("size", 0),
227
+ "webUrl": item.get("webUrl", ""),
228
+ }
229
+ for item in items
230
+ if "file" in item
231
+ ]
232
+
233
+ logger.info(f"Found {len(files)} files in '{folder_path}'")
234
+ return files
235
+ except requests.exceptions.RequestException as e:
236
+ logger.error(f"Network error listing documents: {type(e).__name__}: {e}", exc_info=True)
237
+ raise
168
238
 
169
239
  def get_file_content(self, file_path: str) -> bytes:
170
240
  """
@@ -176,17 +246,24 @@ class GraphAPIClient:
176
246
  Returns:
177
247
  File content as bytes
178
248
  """
249
+ logger.info(f"Getting content for file '{file_path}'")
179
250
  site_id = self._get_site_id()
180
251
  drive_id = self._get_drive_id()
181
252
 
182
253
  encoded_path = quote(file_path)
183
254
  url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
184
255
 
185
- response = requests.get(url, headers=self._get_headers())
186
- response.raise_for_status()
256
+ logger.info(f"Fetching file content from: {url}")
257
+ try:
258
+ response = requests.get(url, headers=self._get_headers(), timeout=60)
259
+ logger.debug(f"Response status: {response.status_code}")
260
+ self._handle_response(response)
187
261
 
188
- logger.info(f"Retrieved content for '{file_path}' ({len(response.content)} bytes)")
189
- return response.content
262
+ logger.info(f"Retrieved content for '{file_path}' ({len(response.content)} bytes)")
263
+ return response.content
264
+ except requests.exceptions.RequestException as e:
265
+ logger.error(f"Network error getting file content: {type(e).__name__}: {e}", exc_info=True)
266
+ raise
190
267
 
191
268
  def upload_file(self, folder_path: str, file_name: str, content: bytes) -> Dict[str, Any]:
192
269
  """
@@ -200,6 +277,7 @@ class GraphAPIClient:
200
277
  Returns:
201
278
  File metadata
202
279
  """
280
+ logger.info(f"Uploading file '{file_name}' to '{folder_path}' ({len(content)} bytes)")
203
281
  site_id = self._get_site_id()
204
282
  drive_id = self._get_drive_id()
205
283
 
@@ -211,14 +289,20 @@ class GraphAPIClient:
211
289
  encoded_path = quote(full_path)
212
290
  url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
213
291
 
292
+ logger.info(f"Uploading to: {url}")
214
293
  headers = self._get_headers()
215
294
  headers["Content-Type"] = "application/octet-stream"
216
295
 
217
- response = requests.put(url, headers=headers, data=content)
218
- response.raise_for_status()
296
+ try:
297
+ response = requests.put(url, headers=headers, data=content, timeout=120)
298
+ logger.debug(f"Response status: {response.status_code}")
299
+ self._handle_response(response)
219
300
 
220
- logger.info(f"Uploaded '{file_name}' to '{folder_path}'")
221
- return response.json()
301
+ logger.info(f"Successfully uploaded '{file_name}' to '{folder_path}'")
302
+ return response.json()
303
+ except requests.exceptions.RequestException as e:
304
+ logger.error(f"Network error uploading file: {type(e).__name__}: {e}", exc_info=True)
305
+ raise
222
306
 
223
307
  def delete_file(self, file_path: str) -> None:
224
308
  """
@@ -227,16 +311,23 @@ class GraphAPIClient:
227
311
  Args:
228
312
  file_path: Relative path to the file
229
313
  """
314
+ logger.info(f"Deleting file '{file_path}'")
230
315
  site_id = self._get_site_id()
231
316
  drive_id = self._get_drive_id()
232
317
 
233
318
  encoded_path = quote(file_path)
234
319
  url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
235
320
 
236
- response = requests.delete(url, headers=self._get_headers())
237
- response.raise_for_status()
321
+ logger.info(f"Deleting from: {url}")
322
+ try:
323
+ response = requests.delete(url, headers=self._get_headers(), timeout=30)
324
+ logger.debug(f"Response status: {response.status_code}")
325
+ self._handle_response(response)
238
326
 
239
- logger.info(f"Deleted '{file_path}'")
327
+ logger.info(f"Successfully deleted '{file_path}'")
328
+ except requests.exceptions.RequestException as e:
329
+ logger.error(f"Network error deleting file: {type(e).__name__}: {e}", exc_info=True)
330
+ raise
240
331
 
241
332
  def create_folder(self, parent_path: str, folder_name: str) -> Dict[str, Any]:
242
333
  """
@@ -249,6 +340,7 @@ class GraphAPIClient:
249
340
  Returns:
250
341
  Folder metadata
251
342
  """
343
+ logger.info(f"Creating folder '{folder_name}' in '{parent_path}'")
252
344
  site_id = self._get_site_id()
253
345
  drive_id = self._get_drive_id()
254
346
 
@@ -258,17 +350,23 @@ class GraphAPIClient:
258
350
  else:
259
351
  url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
260
352
 
353
+ logger.info(f"Creating folder at: {url}")
261
354
  payload = {
262
355
  "name": folder_name,
263
356
  "folder": {},
264
357
  "@microsoft.graph.conflictBehavior": "fail"
265
358
  }
266
359
 
267
- response = requests.post(url, headers=self._get_headers(), json=payload)
268
- response.raise_for_status()
360
+ try:
361
+ response = requests.post(url, headers=self._get_headers(), json=payload, timeout=30)
362
+ logger.debug(f"Response status: {response.status_code}")
363
+ self._handle_response(response)
269
364
 
270
- logger.info(f"Created folder '{folder_name}' in '{parent_path}'")
271
- return response.json()
365
+ logger.info(f"Successfully created folder '{folder_name}' in '{parent_path}'")
366
+ return response.json()
367
+ except requests.exceptions.RequestException as e:
368
+ logger.error(f"Network error creating folder: {type(e).__name__}: {e}", exc_info=True)
369
+ raise
272
370
 
273
371
  def delete_folder(self, folder_path: str) -> None:
274
372
  """
@@ -277,13 +375,20 @@ class GraphAPIClient:
277
375
  Args:
278
376
  folder_path: Relative path to the folder
279
377
  """
378
+ logger.info(f"Deleting folder '{folder_path}'")
280
379
  site_id = self._get_site_id()
281
380
  drive_id = self._get_drive_id()
282
381
 
283
382
  encoded_path = quote(folder_path)
284
383
  url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
285
384
 
286
- response = requests.delete(url, headers=self._get_headers())
287
- response.raise_for_status()
385
+ logger.info(f"Deleting folder from: {url}")
386
+ try:
387
+ response = requests.delete(url, headers=self._get_headers(), timeout=30)
388
+ logger.debug(f"Response status: {response.status_code}")
389
+ self._handle_response(response)
288
390
 
289
- logger.info(f"Deleted folder '{folder_path}'")
391
+ logger.info(f"Successfully deleted folder '{folder_path}'")
392
+ except requests.exceptions.RequestException as e:
393
+ logger.error(f"Network error deleting folder: {type(e).__name__}: {e}", exc_info=True)
394
+ raise
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-sharepoint-us
3
- Version: 2.0.12
4
- Summary: SharePoint MCP Server with Modern Azure AD Authentication
3
+ Version: 2.0.14
4
+ Summary: SharePoint MCP Server with Microsoft Graph API
5
5
  License: MIT
6
6
  Project-URL: Homepage, https://github.com/mdev26/mcp-sharepoint-us
7
7
  Project-URL: Repository, https://github.com/mdev26/mcp-sharepoint-us
@@ -0,0 +1,10 @@
1
+ mcp_sharepoint/__init__.py,sha256=sSJtlX91mBQ4fM12R8XK7Vrkkr3YPJqriE8LZP157vM,20969
2
+ mcp_sharepoint/__main__.py,sha256=4iVDdDZx4rQ4Zo-x0RaCrT-NKeGObIz_ks3YF8di2nA,132
3
+ mcp_sharepoint/auth.py,sha256=fwOCsg1pv0cN26hNlsHhJhGckeDkJCiXZrMmiBn9jf4,18156
4
+ mcp_sharepoint/graph_api.py,sha256=GNZXKTyTKqSL9t4AAyUSszmhulyJ5cX4e5tlxpYUuYM,14510
5
+ mcp_sharepoint_us-2.0.14.dist-info/licenses/LICENSE,sha256=SRM8juGH4GjIqnl5rrp-P-S5mW5h2mINOPx5-wOZG6s,1112
6
+ mcp_sharepoint_us-2.0.14.dist-info/METADATA,sha256=reWorSnGr5fY68_KV1OK4kTj2Me8aYBRzM-lgt6Jykw,11402
7
+ mcp_sharepoint_us-2.0.14.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
8
+ mcp_sharepoint_us-2.0.14.dist-info/entry_points.txt,sha256=UZOa_7OLI41rmsErbvnSz9RahPMGQVcqZUFMphOcjbY,57
9
+ mcp_sharepoint_us-2.0.14.dist-info/top_level.txt,sha256=R6mRoWe61lz4kUSKGV6S2XVbE7825xfC_J-ouZIYpuo,15
10
+ mcp_sharepoint_us-2.0.14.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- mcp_sharepoint/__init__.py,sha256=bPS8QLq2U83JjAIr76_cEwQfZkv92K8MMWR7hbuo-9s,24298
2
- mcp_sharepoint/__main__.py,sha256=4iVDdDZx4rQ4Zo-x0RaCrT-NKeGObIz_ks3YF8di2nA,132
3
- mcp_sharepoint/auth.py,sha256=Tve5y-m1WwL6eTVpofeDv3zFSIDhwo1s26gJSy3F_1s,14729
4
- mcp_sharepoint/graph_api.py,sha256=63ZCx4G5BqimkYcYbibJtRYiU2UhsjN8nXp-qzPGBfA,9273
5
- mcp_sharepoint_us-2.0.12.dist-info/licenses/LICENSE,sha256=SRM8juGH4GjIqnl5rrp-P-S5mW5h2mINOPx5-wOZG6s,1112
6
- mcp_sharepoint_us-2.0.12.dist-info/METADATA,sha256=UP72g9PzIBBNJFY_Se2ECqiuocwUhj8aiwEu7ur1yY0,11413
7
- mcp_sharepoint_us-2.0.12.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
8
- mcp_sharepoint_us-2.0.12.dist-info/entry_points.txt,sha256=UZOa_7OLI41rmsErbvnSz9RahPMGQVcqZUFMphOcjbY,57
9
- mcp_sharepoint_us-2.0.12.dist-info/top_level.txt,sha256=R6mRoWe61lz4kUSKGV6S2XVbE7825xfC_J-ouZIYpuo,15
10
- mcp_sharepoint_us-2.0.12.dist-info/RECORD,,