mcp-sharepoint-us 2.0.12__py3-none-any.whl → 2.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_sharepoint/__init__.py +91 -186
- mcp_sharepoint/auth.py +91 -18
- mcp_sharepoint/graph_api.py +55 -16
- {mcp_sharepoint_us-2.0.12.dist-info → mcp_sharepoint_us-2.0.13.dist-info}/METADATA +2 -2
- mcp_sharepoint_us-2.0.13.dist-info/RECORD +10 -0
- mcp_sharepoint_us-2.0.12.dist-info/RECORD +0 -10
- {mcp_sharepoint_us-2.0.12.dist-info → mcp_sharepoint_us-2.0.13.dist-info}/WHEEL +0 -0
- {mcp_sharepoint_us-2.0.12.dist-info → mcp_sharepoint_us-2.0.13.dist-info}/entry_points.txt +0 -0
- {mcp_sharepoint_us-2.0.12.dist-info → mcp_sharepoint_us-2.0.13.dist-info}/licenses/LICENSE +0 -0
- {mcp_sharepoint_us-2.0.12.dist-info → mcp_sharepoint_us-2.0.13.dist-info}/top_level.txt +0 -0
mcp_sharepoint/__init__.py
CHANGED
|
@@ -14,11 +14,6 @@ from mcp.types import Resource, Tool, TextContent, ImageContent, EmbeddedResourc
|
|
|
14
14
|
from pydantic import AnyUrl
|
|
15
15
|
import mcp.server.stdio
|
|
16
16
|
|
|
17
|
-
from office365.sharepoint.files.file import File
|
|
18
|
-
from office365.sharepoint.folders.folder import Folder
|
|
19
|
-
from office365.sharepoint.client_context import ClientContext
|
|
20
|
-
|
|
21
|
-
from .auth import create_sharepoint_context
|
|
22
17
|
from .graph_api import GraphAPIClient
|
|
23
18
|
|
|
24
19
|
# Setup logging
|
|
@@ -28,78 +23,55 @@ logger = logging.getLogger(__name__)
|
|
|
28
23
|
# Initialize MCP server
|
|
29
24
|
app = Server("mcp-sharepoint")
|
|
30
25
|
|
|
31
|
-
# Global
|
|
32
|
-
ctx: Optional[ClientContext] = None
|
|
26
|
+
# Global Graph API client and authenticator
|
|
33
27
|
graph_client: Optional[GraphAPIClient] = None
|
|
34
28
|
authenticator = None
|
|
35
29
|
|
|
36
30
|
|
|
37
31
|
def ensure_context(func):
|
|
38
|
-
"""Decorator to ensure
|
|
32
|
+
"""Decorator to ensure Graph API client is available"""
|
|
39
33
|
@wraps(func)
|
|
40
34
|
async def wrapper(*args, **kwargs):
|
|
41
|
-
global
|
|
42
|
-
if
|
|
35
|
+
global graph_client, authenticator
|
|
36
|
+
if graph_client is None:
|
|
43
37
|
try:
|
|
44
|
-
|
|
45
|
-
logger.info("SharePoint context initialized successfully")
|
|
38
|
+
from .auth import SharePointAuthenticator
|
|
46
39
|
|
|
47
|
-
# Get
|
|
40
|
+
# Get credentials
|
|
48
41
|
site_url = os.getenv("SHP_SITE_URL")
|
|
42
|
+
client_id = os.getenv("SHP_ID_APP")
|
|
43
|
+
client_secret = os.getenv("SHP_ID_APP_SECRET")
|
|
44
|
+
tenant_id = os.getenv("SHP_TENANT_ID")
|
|
45
|
+
cloud = "government" if ".sharepoint.us" in site_url else "commercial"
|
|
46
|
+
|
|
47
|
+
# Create shared authenticator
|
|
48
|
+
authenticator = SharePointAuthenticator(
|
|
49
|
+
site_url=site_url,
|
|
50
|
+
client_id=client_id,
|
|
51
|
+
client_secret=client_secret,
|
|
52
|
+
tenant_id=tenant_id,
|
|
53
|
+
cloud=cloud
|
|
54
|
+
)
|
|
49
55
|
|
|
50
|
-
# Create Graph API client
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
from .auth import SharePointAuthenticator
|
|
55
|
-
from urllib.parse import urlparse
|
|
56
|
-
|
|
57
|
-
site_url = os.getenv("SHP_SITE_URL")
|
|
58
|
-
client_id = os.getenv("SHP_ID_APP")
|
|
59
|
-
client_secret = os.getenv("SHP_ID_APP_SECRET")
|
|
60
|
-
tenant_id = os.getenv("SHP_TENANT_ID")
|
|
61
|
-
cloud = "government" if ".sharepoint.us" in site_url else "commercial"
|
|
62
|
-
|
|
63
|
-
import msal
|
|
64
|
-
from office365.runtime.auth.token_response import TokenResponse
|
|
65
|
-
|
|
66
|
-
# Build authority URL
|
|
67
|
-
if cloud in ("government", "us"):
|
|
68
|
-
authority_url = f"https://login.microsoftonline.us/{tenant_id}"
|
|
69
|
-
else:
|
|
70
|
-
authority_url = f"https://login.microsoftonline.com/{tenant_id}"
|
|
71
|
-
|
|
72
|
-
# Create MSAL app
|
|
73
|
-
msal_app = msal.ConfidentialClientApplication(
|
|
74
|
-
authority=authority_url,
|
|
75
|
-
client_id=client_id,
|
|
76
|
-
client_credential=client_secret,
|
|
77
|
-
validate_authority=False if cloud in ("government", "us") else True
|
|
78
|
-
)
|
|
79
|
-
|
|
80
|
-
# Get scope
|
|
81
|
-
parsed = urlparse(site_url)
|
|
82
|
-
sharepoint_root = f"{parsed.scheme}://{parsed.netloc}"
|
|
83
|
-
scopes = [f"{sharepoint_root}/.default"]
|
|
84
|
-
|
|
85
|
-
# Acquire token
|
|
86
|
-
result = msal_app.acquire_token_for_client(scopes=scopes)
|
|
87
|
-
return TokenResponse.from_json(result)
|
|
56
|
+
# Create Graph API client with direct token access
|
|
57
|
+
def get_token():
|
|
58
|
+
"""Get access token for Graph API"""
|
|
59
|
+
return authenticator.get_access_token()
|
|
88
60
|
|
|
89
61
|
graph_client = GraphAPIClient(
|
|
90
62
|
site_url=site_url,
|
|
91
|
-
token_callback=
|
|
63
|
+
token_callback=get_token
|
|
92
64
|
)
|
|
93
65
|
logger.info("Graph API client initialized successfully")
|
|
94
66
|
|
|
95
67
|
except Exception as e:
|
|
96
|
-
logger.error(f"Failed to initialize
|
|
68
|
+
logger.error(f"Failed to initialize Graph API client: {e}")
|
|
97
69
|
raise RuntimeError(
|
|
98
|
-
f"
|
|
70
|
+
f"Graph API authentication failed: {e}. "
|
|
99
71
|
"Please check your environment variables and ensure:\n"
|
|
100
72
|
"1. SHP_TENANT_ID is set correctly\n"
|
|
101
|
-
"2. Your Azure AD app has
|
|
102
|
-
"3.
|
|
73
|
+
"2. Your Azure AD app has Microsoft Graph API permissions\n"
|
|
74
|
+
"3. The app registration has 'Sites.Read.All' and 'Files.ReadWrite.All' permissions"
|
|
103
75
|
)
|
|
104
76
|
return await func(*args, **kwargs)
|
|
105
77
|
return wrapper
|
|
@@ -375,101 +347,48 @@ async def test_connection() -> list[TextContent]:
|
|
|
375
347
|
|
|
376
348
|
|
|
377
349
|
async def list_folders(folder_path: str = "") -> list[TextContent]:
|
|
378
|
-
"""List folders in specified path"""
|
|
350
|
+
"""List folders in specified path using Microsoft Graph API"""
|
|
379
351
|
doc_lib = get_document_library_path()
|
|
380
352
|
full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
|
|
381
353
|
|
|
382
354
|
try:
|
|
383
|
-
#
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
folder_list = []
|
|
388
|
-
for f in folders:
|
|
389
|
-
folder_list.append(f"📁 {f.name}")
|
|
355
|
+
# Use Graph API directly
|
|
356
|
+
folders = await asyncio.to_thread(graph_client.list_folders, folder_path)
|
|
357
|
+
folder_list = [f"📁 {f['name']}" for f in folders]
|
|
390
358
|
|
|
391
359
|
result = f"Folders in '{full_path}':\n\n" + "\n".join(folder_list) if folder_list else f"No folders found in '{full_path}'"
|
|
392
360
|
return [TextContent(type="text", text=result)]
|
|
393
361
|
|
|
394
362
|
except Exception as e:
|
|
395
|
-
error_msg = str(e).lower()
|
|
396
|
-
|
|
397
|
-
# Check if it's an app-only token error
|
|
398
|
-
if "unsupported app only token" in error_msg or "401" in error_msg:
|
|
399
|
-
logger.warning(f"SharePoint REST API failed with app-only token error, falling back to Graph API")
|
|
400
|
-
|
|
401
|
-
try:
|
|
402
|
-
# Fallback to Graph API
|
|
403
|
-
folders = await asyncio.to_thread(graph_client.list_folders, folder_path)
|
|
404
|
-
|
|
405
|
-
folder_list = [f"📁 {f['name']}" for f in folders]
|
|
406
|
-
|
|
407
|
-
result = f"Folders in '{full_path}' (via Graph API):\n\n" + "\n".join(folder_list) if folder_list else f"No folders found in '{full_path}'"
|
|
408
|
-
return [TextContent(type="text", text=result)]
|
|
409
|
-
|
|
410
|
-
except Exception as graph_error:
|
|
411
|
-
return [TextContent(type="text", text=f"Error with both APIs - REST: {e}, Graph: {graph_error}")]
|
|
412
|
-
|
|
413
|
-
# Other errors
|
|
414
363
|
return [TextContent(type="text", text=f"Error listing folders: {str(e)}")]
|
|
415
364
|
|
|
416
365
|
|
|
417
366
|
async def list_documents(folder_path: str = "") -> list[TextContent]:
|
|
418
|
-
"""List documents in specified folder"""
|
|
367
|
+
"""List documents in specified folder using Microsoft Graph API"""
|
|
419
368
|
doc_lib = get_document_library_path()
|
|
420
369
|
full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
|
|
421
370
|
|
|
422
371
|
try:
|
|
423
|
-
#
|
|
424
|
-
|
|
425
|
-
files = folder.files.get().execute_query()
|
|
372
|
+
# Use Graph API directly
|
|
373
|
+
files = await asyncio.to_thread(graph_client.list_documents, folder_path)
|
|
426
374
|
|
|
427
375
|
file_list = []
|
|
428
376
|
for f in files:
|
|
429
|
-
size_kb = f
|
|
430
|
-
file_list.append(f"📄 {f
|
|
377
|
+
size_kb = f['size'] / 1024
|
|
378
|
+
file_list.append(f"📄 {f['name']} ({size_kb:.2f} KB)")
|
|
431
379
|
|
|
432
380
|
result = f"Documents in '{full_path}':\n\n" + "\n".join(file_list) if file_list else f"No documents found in '{full_path}'"
|
|
433
381
|
return [TextContent(type="text", text=result)]
|
|
434
382
|
|
|
435
383
|
except Exception as e:
|
|
436
|
-
error_msg = str(e).lower()
|
|
437
|
-
|
|
438
|
-
# Check if it's an app-only token error
|
|
439
|
-
if "unsupported app only token" in error_msg or "401" in error_msg:
|
|
440
|
-
logger.warning(f"SharePoint REST API failed with app-only token error, falling back to Graph API")
|
|
441
|
-
|
|
442
|
-
try:
|
|
443
|
-
# Fallback to Graph API
|
|
444
|
-
files = await asyncio.to_thread(graph_client.list_documents, folder_path)
|
|
445
|
-
|
|
446
|
-
file_list = []
|
|
447
|
-
for f in files:
|
|
448
|
-
size_kb = f['size'] / 1024
|
|
449
|
-
file_list.append(f"📄 {f['name']} ({size_kb:.2f} KB)")
|
|
450
|
-
|
|
451
|
-
result = f"Documents in '{full_path}' (via Graph API):\n\n" + "\n".join(file_list) if file_list else f"No documents found in '{full_path}'"
|
|
452
|
-
return [TextContent(type="text", text=result)]
|
|
453
|
-
|
|
454
|
-
except Exception as graph_error:
|
|
455
|
-
return [TextContent(type="text", text=f"Error with both APIs - REST: {e}, Graph: {graph_error}")]
|
|
456
|
-
|
|
457
|
-
# Other errors
|
|
458
384
|
return [TextContent(type="text", text=f"Error listing documents: {str(e)}")]
|
|
459
385
|
|
|
460
386
|
|
|
461
387
|
async def get_document_content(file_path: str) -> list[TextContent]:
|
|
462
|
-
"""Get document content"""
|
|
388
|
+
"""Get document content using Microsoft Graph API"""
|
|
463
389
|
try:
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
def _read_bytes():
|
|
468
|
-
sp_file = ctx.web.get_file_by_server_relative_path(full_path)
|
|
469
|
-
# IMPORTANT: execute the request
|
|
470
|
-
return sp_file.read().execute_query()
|
|
471
|
-
|
|
472
|
-
content = await asyncio.to_thread(_read_bytes)
|
|
390
|
+
# Use Graph API to get file content
|
|
391
|
+
content = await asyncio.to_thread(graph_client.get_file_content, file_path)
|
|
473
392
|
|
|
474
393
|
ext = os.path.splitext(file_path)[1].lower()
|
|
475
394
|
text_extensions = {'.txt', '.md', '.json', '.xml', '.html', '.csv', '.log'}
|
|
@@ -494,141 +413,127 @@ async def get_document_content(file_path: str) -> list[TextContent]:
|
|
|
494
413
|
|
|
495
414
|
|
|
496
415
|
async def upload_document(folder_path: str, file_name: str, content: str, is_binary: bool = False) -> list[TextContent]:
|
|
497
|
-
"""Upload a document"""
|
|
416
|
+
"""Upload a document using Microsoft Graph API"""
|
|
498
417
|
try:
|
|
499
|
-
doc_lib = get_document_library_path()
|
|
500
|
-
full_path = f"{doc_lib}/{folder_path}" if folder_path else doc_lib
|
|
501
|
-
|
|
502
|
-
folder = ctx.web.get_folder_by_server_relative_path(full_path)
|
|
503
|
-
|
|
504
418
|
if is_binary:
|
|
505
419
|
file_content = base64.b64decode(content)
|
|
506
420
|
else:
|
|
507
421
|
file_content = content.encode('utf-8')
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
422
|
+
|
|
423
|
+
# Use Graph API to upload file
|
|
424
|
+
result = await asyncio.to_thread(
|
|
425
|
+
graph_client.upload_file,
|
|
426
|
+
folder_path,
|
|
427
|
+
file_name,
|
|
428
|
+
file_content
|
|
429
|
+
)
|
|
430
|
+
|
|
511
431
|
return [TextContent(
|
|
512
432
|
type="text",
|
|
513
|
-
text=f"✓ Successfully uploaded '{file_name}' to '{
|
|
433
|
+
text=f"✓ Successfully uploaded '{file_name}' to '{folder_path or 'root'}'"
|
|
514
434
|
)]
|
|
515
|
-
|
|
435
|
+
|
|
516
436
|
except Exception as e:
|
|
517
437
|
return [TextContent(type="text", text=f"Error uploading document: {str(e)}")]
|
|
518
438
|
|
|
519
439
|
|
|
520
440
|
async def update_document(file_path: str, content: str, is_binary: bool = False) -> list[TextContent]:
|
|
521
|
-
"""Update a document"""
|
|
441
|
+
"""Update a document using Microsoft Graph API"""
|
|
522
442
|
try:
|
|
523
|
-
doc_lib = get_document_library_path()
|
|
524
|
-
full_path = f"{doc_lib}/{file_path}"
|
|
525
|
-
|
|
526
443
|
if is_binary:
|
|
527
444
|
file_content = base64.b64decode(content)
|
|
528
445
|
else:
|
|
529
446
|
file_content = content.encode('utf-8')
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
447
|
+
|
|
448
|
+
# Split file_path into folder and filename
|
|
449
|
+
folder_path = os.path.dirname(file_path)
|
|
450
|
+
file_name = os.path.basename(file_path)
|
|
451
|
+
|
|
452
|
+
# Use Graph API to upload/update file (PUT overwrites)
|
|
453
|
+
await asyncio.to_thread(
|
|
454
|
+
graph_client.upload_file,
|
|
455
|
+
folder_path,
|
|
456
|
+
file_name,
|
|
457
|
+
file_content
|
|
458
|
+
)
|
|
459
|
+
|
|
534
460
|
return [TextContent(
|
|
535
461
|
type="text",
|
|
536
462
|
text=f"✓ Successfully updated '{file_path}'"
|
|
537
463
|
)]
|
|
538
|
-
|
|
464
|
+
|
|
539
465
|
except Exception as e:
|
|
540
466
|
return [TextContent(type="text", text=f"Error updating document: {str(e)}")]
|
|
541
467
|
|
|
542
468
|
|
|
543
469
|
async def delete_document(file_path: str) -> list[TextContent]:
|
|
544
|
-
"""Delete a document"""
|
|
470
|
+
"""Delete a document using Microsoft Graph API"""
|
|
545
471
|
try:
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
file = ctx.web.get_file_by_server_relative_path(full_path)
|
|
550
|
-
file.delete_object().execute_query()
|
|
551
|
-
|
|
472
|
+
# Use Graph API to delete file
|
|
473
|
+
await asyncio.to_thread(graph_client.delete_file, file_path)
|
|
474
|
+
|
|
552
475
|
return [TextContent(
|
|
553
476
|
type="text",
|
|
554
477
|
text=f"✓ Successfully deleted '{file_path}'"
|
|
555
478
|
)]
|
|
556
|
-
|
|
479
|
+
|
|
557
480
|
except Exception as e:
|
|
558
481
|
return [TextContent(type="text", text=f"Error deleting document: {str(e)}")]
|
|
559
482
|
|
|
560
483
|
|
|
561
484
|
async def create_folder(folder_path: str, folder_name: str) -> list[TextContent]:
|
|
562
|
-
"""Create a folder"""
|
|
485
|
+
"""Create a folder using Microsoft Graph API"""
|
|
563
486
|
try:
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
487
|
+
# Use Graph API to create folder
|
|
488
|
+
await asyncio.to_thread(
|
|
489
|
+
graph_client.create_folder,
|
|
490
|
+
folder_path,
|
|
491
|
+
folder_name
|
|
492
|
+
)
|
|
493
|
+
|
|
570
494
|
return [TextContent(
|
|
571
495
|
type="text",
|
|
572
|
-
text=f"✓ Successfully created folder '{folder_name}' in '{
|
|
496
|
+
text=f"✓ Successfully created folder '{folder_name}' in '{folder_path or 'root'}'"
|
|
573
497
|
)]
|
|
574
|
-
|
|
498
|
+
|
|
575
499
|
except Exception as e:
|
|
576
500
|
return [TextContent(type="text", text=f"Error creating folder: {str(e)}")]
|
|
577
501
|
|
|
578
502
|
|
|
579
503
|
async def delete_folder(folder_path: str) -> list[TextContent]:
|
|
580
|
-
"""Delete a folder"""
|
|
504
|
+
"""Delete a folder using Microsoft Graph API"""
|
|
581
505
|
try:
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
folder = ctx.web.get_folder_by_server_relative_path(full_path)
|
|
586
|
-
folder.delete_object().execute_query()
|
|
587
|
-
|
|
506
|
+
# Use Graph API to delete folder
|
|
507
|
+
await asyncio.to_thread(graph_client.delete_folder, folder_path)
|
|
508
|
+
|
|
588
509
|
return [TextContent(
|
|
589
510
|
type="text",
|
|
590
511
|
text=f"✓ Successfully deleted folder '{folder_path}'"
|
|
591
512
|
)]
|
|
592
|
-
|
|
513
|
+
|
|
593
514
|
except Exception as e:
|
|
594
515
|
return [TextContent(type="text", text=f"Error deleting folder: {str(e)}")]
|
|
595
516
|
|
|
596
517
|
|
|
597
518
|
async def get_tree(folder_path: str = "", max_depth: int = 5, current_depth: int = 0) -> list[TextContent]:
|
|
598
|
-
"""Get folder tree structure"""
|
|
519
|
+
"""Get folder tree structure using Microsoft Graph API"""
|
|
599
520
|
if current_depth >= max_depth:
|
|
600
521
|
return [TextContent(type="text", text="Max depth reached")]
|
|
601
522
|
|
|
602
523
|
try:
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
folder = ctx.web.get_folder_by_server_relative_path(full_path)
|
|
607
|
-
folders = folder.folders.get().execute_query()
|
|
524
|
+
# Use Graph API to list folders
|
|
525
|
+
folders = await asyncio.to_thread(graph_client.list_folders, folder_path)
|
|
608
526
|
|
|
609
527
|
indent = " " * current_depth
|
|
610
528
|
tree_lines = [f"{indent}📁 {folder_path or 'Root'}"]
|
|
611
529
|
|
|
612
530
|
for f in folders:
|
|
613
|
-
sub_path = f"{folder_path}/{f
|
|
531
|
+
sub_path = f"{folder_path}/{f['name']}" if folder_path else f['name']
|
|
614
532
|
sub_tree = await get_tree(sub_path, max_depth, current_depth + 1)
|
|
615
533
|
tree_lines.append(sub_tree[0].text)
|
|
616
534
|
|
|
617
535
|
return [TextContent(type="text", text="\n".join(tree_lines))]
|
|
618
536
|
|
|
619
|
-
except TypeError as e:
|
|
620
|
-
if "can't compare offset-naive and offset-aware datetimes" in str(e):
|
|
621
|
-
logger.error(
|
|
622
|
-
f"DateTime comparison error occurred despite patch. "
|
|
623
|
-
f"This may indicate a new code path in the library. Error: {e}"
|
|
624
|
-
)
|
|
625
|
-
return [TextContent(
|
|
626
|
-
type="text",
|
|
627
|
-
text=f"Encountered a datetime comparison issue. "
|
|
628
|
-
f"A workaround patch is applied, but this specific code path may need attention.\n"
|
|
629
|
-
f"Alternative: Use List_SharePoint_Folders for folder navigation."
|
|
630
|
-
)]
|
|
631
|
-
raise
|
|
632
537
|
except Exception as e:
|
|
633
538
|
return [TextContent(type="text", text=f"Error getting tree: {str(e)}")]
|
|
634
539
|
|
mcp_sharepoint/auth.py
CHANGED
|
@@ -7,7 +7,6 @@ import logging
|
|
|
7
7
|
import time
|
|
8
8
|
import random
|
|
9
9
|
from typing import Optional
|
|
10
|
-
from urllib.parse import urlparse
|
|
11
10
|
from datetime import datetime, timezone
|
|
12
11
|
from office365.sharepoint.client_context import ClientContext
|
|
13
12
|
from office365.runtime.auth.client_credential import ClientCredential
|
|
@@ -85,7 +84,17 @@ class SharePointAuthenticator:
|
|
|
85
84
|
self.cert_path = cert_path
|
|
86
85
|
self.cert_thumbprint = cert_thumbprint
|
|
87
86
|
self.cloud = cloud.lower()
|
|
88
|
-
|
|
87
|
+
|
|
88
|
+
# Initialize token cache
|
|
89
|
+
self._access_token = None
|
|
90
|
+
self._access_token_exp = 0
|
|
91
|
+
|
|
92
|
+
# Set Graph API scope based on cloud environment
|
|
93
|
+
if self.cloud in ("government", "us"):
|
|
94
|
+
self._scopes = ["https://graph.microsoft.us/.default"]
|
|
95
|
+
else:
|
|
96
|
+
self._scopes = ["https://graph.microsoft.com/.default"]
|
|
97
|
+
|
|
89
98
|
def get_context_with_msal(self) -> ClientContext:
|
|
90
99
|
"""
|
|
91
100
|
Get ClientContext using MSAL for modern Azure AD authentication.
|
|
@@ -123,19 +132,7 @@ class SharePointAuthenticator:
|
|
|
123
132
|
self._msal_app = msal.ConfidentialClientApplication(**msal_params)
|
|
124
133
|
self._authority_url = authority_url
|
|
125
134
|
|
|
126
|
-
|
|
127
|
-
# MSAL caches too, but keeping the raw token avoids extra work in Office365 callbacks.
|
|
128
|
-
if not hasattr(self, "_access_token"):
|
|
129
|
-
self._access_token = None
|
|
130
|
-
self._access_token_exp = 0
|
|
131
|
-
|
|
132
|
-
# Extract root SharePoint URL for scope
|
|
133
|
-
# For https://tenant.sharepoint.us/sites/SiteName -> https://tenant.sharepoint.us
|
|
134
|
-
parsed = urlparse(self.site_url)
|
|
135
|
-
sharepoint_root = f"{parsed.scheme}://{parsed.netloc}"
|
|
136
|
-
scopes = [f"{sharepoint_root}/.default"]
|
|
137
|
-
|
|
138
|
-
logger.info(f"Using SharePoint root scope: {sharepoint_root}/.default")
|
|
135
|
+
logger.info(f"Using Graph API scope: {self._scopes[0]}")
|
|
139
136
|
|
|
140
137
|
def acquire_token():
|
|
141
138
|
"""
|
|
@@ -153,7 +150,7 @@ class SharePointAuthenticator:
|
|
|
153
150
|
last_err = None
|
|
154
151
|
for attempt in range(1, 6): # 5 attempts
|
|
155
152
|
try:
|
|
156
|
-
result = self._msal_app.acquire_token_for_client(scopes=
|
|
153
|
+
result = self._msal_app.acquire_token_for_client(scopes=self._scopes)
|
|
157
154
|
|
|
158
155
|
if "access_token" not in result:
|
|
159
156
|
error_desc = result.get("error_description", "Unknown error")
|
|
@@ -161,7 +158,7 @@ class SharePointAuthenticator:
|
|
|
161
158
|
raise ValueError(
|
|
162
159
|
f"Failed to acquire token: {error} - {error_desc}\n"
|
|
163
160
|
f"Authority: {self._authority_url}\n"
|
|
164
|
-
f"Scopes: {
|
|
161
|
+
f"Scopes: {self._scopes}"
|
|
165
162
|
)
|
|
166
163
|
|
|
167
164
|
token = result["access_token"]
|
|
@@ -191,7 +188,83 @@ class SharePointAuthenticator:
|
|
|
191
188
|
logger.info("Successfully authenticated using MSAL (Modern Azure AD)")
|
|
192
189
|
return ctx
|
|
193
190
|
|
|
194
|
-
|
|
191
|
+
def get_access_token(self) -> str:
|
|
192
|
+
"""
|
|
193
|
+
Get access token directly for use with Microsoft Graph API.
|
|
194
|
+
Uses the same retry logic as get_context_with_msal() but returns just the token string.
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Access token as string
|
|
198
|
+
|
|
199
|
+
Raises:
|
|
200
|
+
RuntimeError: If token acquisition fails after retries
|
|
201
|
+
"""
|
|
202
|
+
# Initialize MSAL app if not already done
|
|
203
|
+
if not hasattr(self, "_msal_app"):
|
|
204
|
+
if self.cloud in ("government", "us"):
|
|
205
|
+
authority_url = f"https://login.microsoftonline.us/{self.tenant_id}"
|
|
206
|
+
logger.info("Using Azure US Government Cloud endpoints")
|
|
207
|
+
else:
|
|
208
|
+
authority_url = f"https://login.microsoftonline.com/{self.tenant_id}"
|
|
209
|
+
logger.info("Using Azure Commercial Cloud endpoints")
|
|
210
|
+
|
|
211
|
+
self._token_cache = msal.SerializableTokenCache()
|
|
212
|
+
|
|
213
|
+
msal_params = {
|
|
214
|
+
"authority": authority_url,
|
|
215
|
+
"client_id": self.client_id,
|
|
216
|
+
"client_credential": self.client_secret,
|
|
217
|
+
"token_cache": self._token_cache,
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
if self.cloud in ("government", "us"):
|
|
221
|
+
msal_params["validate_authority"] = False
|
|
222
|
+
logger.info("Disabled authority validation for government cloud")
|
|
223
|
+
|
|
224
|
+
self._msal_app = msal.ConfidentialClientApplication(**msal_params)
|
|
225
|
+
self._authority_url = authority_url
|
|
226
|
+
|
|
227
|
+
now = int(time.time())
|
|
228
|
+
if self._access_token and now < (self._access_token_exp - 60):
|
|
229
|
+
return self._access_token
|
|
230
|
+
|
|
231
|
+
last_err = None
|
|
232
|
+
for attempt in range(1, 6): # 5 attempts
|
|
233
|
+
try:
|
|
234
|
+
result = self._msal_app.acquire_token_for_client(scopes=self._scopes)
|
|
235
|
+
|
|
236
|
+
if "access_token" not in result:
|
|
237
|
+
error_desc = result.get("error_description", "Unknown error")
|
|
238
|
+
error = result.get("error", "Unknown")
|
|
239
|
+
raise ValueError(
|
|
240
|
+
f"Failed to acquire token: {error} - {error_desc}\n"
|
|
241
|
+
f"Authority: {self._authority_url}\n"
|
|
242
|
+
f"Scopes: {self._scopes}"
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
token = result["access_token"]
|
|
246
|
+
|
|
247
|
+
# MSAL returns expires_in (seconds) for client credential tokens
|
|
248
|
+
expires_in = int(result.get("expires_in", 3600))
|
|
249
|
+
self._access_token = token
|
|
250
|
+
self._access_token_exp = int(time.time()) + expires_in
|
|
251
|
+
|
|
252
|
+
logger.info(f"Successfully acquired Graph API token")
|
|
253
|
+
return token
|
|
254
|
+
|
|
255
|
+
except Exception as e:
|
|
256
|
+
last_err = e
|
|
257
|
+
# Exponential backoff with jitter
|
|
258
|
+
sleep_s = min(8.0, (2 ** (attempt - 1)) * 0.5) + random.random() * 0.25
|
|
259
|
+
logger.warning(
|
|
260
|
+
f"Token acquisition attempt {attempt}/5 failed: {e}. Retrying in {sleep_s:.2f}s"
|
|
261
|
+
)
|
|
262
|
+
time.sleep(sleep_s)
|
|
263
|
+
|
|
264
|
+
# If we get here, all retries failed
|
|
265
|
+
raise RuntimeError(f"Token acquisition failed after retries: {last_err}")
|
|
266
|
+
|
|
267
|
+
|
|
195
268
|
def get_context_with_certificate(self) -> ClientContext:
|
|
196
269
|
"""
|
|
197
270
|
Get ClientContext using certificate-based authentication.
|
mcp_sharepoint/graph_api.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""
|
|
2
|
-
Microsoft Graph API implementation for SharePoint operations
|
|
3
|
-
|
|
2
|
+
Microsoft Graph API implementation for SharePoint operations.
|
|
3
|
+
Primary API for all SharePoint operations in Azure Government Cloud.
|
|
4
4
|
"""
|
|
5
5
|
import os
|
|
6
6
|
import logging
|
|
@@ -15,7 +15,8 @@ logger = logging.getLogger(__name__)
|
|
|
15
15
|
class GraphAPIClient:
|
|
16
16
|
"""
|
|
17
17
|
Microsoft Graph API client for SharePoint operations.
|
|
18
|
-
|
|
18
|
+
Primary client for all SharePoint operations, especially in Azure Government Cloud
|
|
19
|
+
where SharePoint REST API may not support app-only authentication.
|
|
19
20
|
"""
|
|
20
21
|
|
|
21
22
|
def __init__(self, site_url: str, token_callback):
|
|
@@ -29,6 +30,7 @@ class GraphAPIClient:
|
|
|
29
30
|
self.site_url = site_url.rstrip("/")
|
|
30
31
|
self.token_callback = token_callback
|
|
31
32
|
self._site_id = None
|
|
33
|
+
self._drive_id = None # Cache drive ID to avoid repeated API calls
|
|
32
34
|
|
|
33
35
|
# Determine Graph API endpoint based on cloud
|
|
34
36
|
if ".sharepoint.us" in site_url:
|
|
@@ -52,6 +54,37 @@ class GraphAPIClient:
|
|
|
52
54
|
"Accept": "application/json",
|
|
53
55
|
}
|
|
54
56
|
|
|
57
|
+
def _handle_response(self, response: requests.Response) -> None:
|
|
58
|
+
"""
|
|
59
|
+
Handle Graph API response and raise detailed errors if needed.
|
|
60
|
+
|
|
61
|
+
Graph API returns errors in format:
|
|
62
|
+
{
|
|
63
|
+
"error": {
|
|
64
|
+
"code": "itemNotFound",
|
|
65
|
+
"message": "The resource could not be found."
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
"""
|
|
69
|
+
if response.ok:
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
error_data = response.json()
|
|
74
|
+
if "error" in error_data:
|
|
75
|
+
error = error_data["error"]
|
|
76
|
+
code = error.get("code", "Unknown")
|
|
77
|
+
message = error.get("message", "Unknown error")
|
|
78
|
+
raise requests.HTTPError(
|
|
79
|
+
f"Graph API error [{code}]: {message}",
|
|
80
|
+
response=response
|
|
81
|
+
)
|
|
82
|
+
except (ValueError, KeyError):
|
|
83
|
+
# If we can't parse the error, fall back to standard handling
|
|
84
|
+
pass
|
|
85
|
+
|
|
86
|
+
self._handle_response(response)
|
|
87
|
+
|
|
55
88
|
def _get_site_id(self) -> str:
|
|
56
89
|
"""
|
|
57
90
|
Get the site ID from the site URL.
|
|
@@ -72,23 +105,29 @@ class GraphAPIClient:
|
|
|
72
105
|
url = f"{self.graph_endpoint}/sites/{hostname}:/{path}"
|
|
73
106
|
|
|
74
107
|
response = requests.get(url, headers=self._get_headers())
|
|
75
|
-
|
|
108
|
+
self._handle_response(response)
|
|
76
109
|
|
|
77
110
|
self._site_id = response.json()["id"]
|
|
78
111
|
logger.info(f"Retrieved site ID: {self._site_id}")
|
|
79
112
|
return self._site_id
|
|
80
113
|
|
|
81
114
|
def _get_drive_id(self) -> str:
|
|
82
|
-
"""
|
|
115
|
+
"""
|
|
116
|
+
Get the default document library drive ID.
|
|
117
|
+
Caches the result for reuse.
|
|
118
|
+
"""
|
|
119
|
+
if self._drive_id:
|
|
120
|
+
return self._drive_id
|
|
121
|
+
|
|
83
122
|
site_id = self._get_site_id()
|
|
84
123
|
url = f"{self.graph_endpoint}/sites/{site_id}/drive"
|
|
85
124
|
|
|
86
125
|
response = requests.get(url, headers=self._get_headers())
|
|
87
|
-
|
|
126
|
+
self._handle_response(response)
|
|
88
127
|
|
|
89
|
-
|
|
90
|
-
logger.info(f"Retrieved drive ID: {
|
|
91
|
-
return
|
|
128
|
+
self._drive_id = response.json()["id"]
|
|
129
|
+
logger.info(f"Retrieved drive ID: {self._drive_id}")
|
|
130
|
+
return self._drive_id
|
|
92
131
|
|
|
93
132
|
def list_folders(self, folder_path: str = "") -> List[Dict[str, Any]]:
|
|
94
133
|
"""
|
|
@@ -111,7 +150,7 @@ class GraphAPIClient:
|
|
|
111
150
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
|
|
112
151
|
|
|
113
152
|
response = requests.get(url, headers=self._get_headers())
|
|
114
|
-
|
|
153
|
+
self._handle_response(response)
|
|
115
154
|
|
|
116
155
|
items = response.json().get("value", [])
|
|
117
156
|
# Filter to only folders
|
|
@@ -148,7 +187,7 @@ class GraphAPIClient:
|
|
|
148
187
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root/children"
|
|
149
188
|
|
|
150
189
|
response = requests.get(url, headers=self._get_headers())
|
|
151
|
-
|
|
190
|
+
self._handle_response(response)
|
|
152
191
|
|
|
153
192
|
items = response.json().get("value", [])
|
|
154
193
|
# Filter to only files
|
|
@@ -183,7 +222,7 @@ class GraphAPIClient:
|
|
|
183
222
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}:/content"
|
|
184
223
|
|
|
185
224
|
response = requests.get(url, headers=self._get_headers())
|
|
186
|
-
|
|
225
|
+
self._handle_response(response)
|
|
187
226
|
|
|
188
227
|
logger.info(f"Retrieved content for '{file_path}' ({len(response.content)} bytes)")
|
|
189
228
|
return response.content
|
|
@@ -215,7 +254,7 @@ class GraphAPIClient:
|
|
|
215
254
|
headers["Content-Type"] = "application/octet-stream"
|
|
216
255
|
|
|
217
256
|
response = requests.put(url, headers=headers, data=content)
|
|
218
|
-
|
|
257
|
+
self._handle_response(response)
|
|
219
258
|
|
|
220
259
|
logger.info(f"Uploaded '{file_name}' to '{folder_path}'")
|
|
221
260
|
return response.json()
|
|
@@ -234,7 +273,7 @@ class GraphAPIClient:
|
|
|
234
273
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
|
|
235
274
|
|
|
236
275
|
response = requests.delete(url, headers=self._get_headers())
|
|
237
|
-
|
|
276
|
+
self._handle_response(response)
|
|
238
277
|
|
|
239
278
|
logger.info(f"Deleted '{file_path}'")
|
|
240
279
|
|
|
@@ -265,7 +304,7 @@ class GraphAPIClient:
|
|
|
265
304
|
}
|
|
266
305
|
|
|
267
306
|
response = requests.post(url, headers=self._get_headers(), json=payload)
|
|
268
|
-
|
|
307
|
+
self._handle_response(response)
|
|
269
308
|
|
|
270
309
|
logger.info(f"Created folder '{folder_name}' in '{parent_path}'")
|
|
271
310
|
return response.json()
|
|
@@ -284,6 +323,6 @@ class GraphAPIClient:
|
|
|
284
323
|
url = f"{self.graph_endpoint}/sites/{site_id}/drives/{drive_id}/root:/{encoded_path}"
|
|
285
324
|
|
|
286
325
|
response = requests.delete(url, headers=self._get_headers())
|
|
287
|
-
|
|
326
|
+
self._handle_response(response)
|
|
288
327
|
|
|
289
328
|
logger.info(f"Deleted folder '{folder_path}'")
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mcp-sharepoint-us
|
|
3
|
-
Version: 2.0.
|
|
4
|
-
Summary: SharePoint MCP Server with
|
|
3
|
+
Version: 2.0.13
|
|
4
|
+
Summary: SharePoint MCP Server with Microsoft Graph API
|
|
5
5
|
License: MIT
|
|
6
6
|
Project-URL: Homepage, https://github.com/mdev26/mcp-sharepoint-us
|
|
7
7
|
Project-URL: Repository, https://github.com/mdev26/mcp-sharepoint-us
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
mcp_sharepoint/__init__.py,sha256=318-XBsPcTt2EH_B9j-ym_GFs91_0Kb-0WiLKfEV-L0,19744
|
|
2
|
+
mcp_sharepoint/__main__.py,sha256=4iVDdDZx4rQ4Zo-x0RaCrT-NKeGObIz_ks3YF8di2nA,132
|
|
3
|
+
mcp_sharepoint/auth.py,sha256=03p8ylIkrlNoVuVSJ96nnqUd8n7QnwWXWXkkV7y01AU,17598
|
|
4
|
+
mcp_sharepoint/graph_api.py,sha256=y3Q5OHkitAsp7QN1PFIf_sh7g5DShLEfWUlHzIHeS24,10571
|
|
5
|
+
mcp_sharepoint_us-2.0.13.dist-info/licenses/LICENSE,sha256=SRM8juGH4GjIqnl5rrp-P-S5mW5h2mINOPx5-wOZG6s,1112
|
|
6
|
+
mcp_sharepoint_us-2.0.13.dist-info/METADATA,sha256=YNzf3j597kLjomxk9NDhQSHVTc41XGWx-vzVnnFWQHg,11402
|
|
7
|
+
mcp_sharepoint_us-2.0.13.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
8
|
+
mcp_sharepoint_us-2.0.13.dist-info/entry_points.txt,sha256=UZOa_7OLI41rmsErbvnSz9RahPMGQVcqZUFMphOcjbY,57
|
|
9
|
+
mcp_sharepoint_us-2.0.13.dist-info/top_level.txt,sha256=R6mRoWe61lz4kUSKGV6S2XVbE7825xfC_J-ouZIYpuo,15
|
|
10
|
+
mcp_sharepoint_us-2.0.13.dist-info/RECORD,,
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
mcp_sharepoint/__init__.py,sha256=bPS8QLq2U83JjAIr76_cEwQfZkv92K8MMWR7hbuo-9s,24298
|
|
2
|
-
mcp_sharepoint/__main__.py,sha256=4iVDdDZx4rQ4Zo-x0RaCrT-NKeGObIz_ks3YF8di2nA,132
|
|
3
|
-
mcp_sharepoint/auth.py,sha256=Tve5y-m1WwL6eTVpofeDv3zFSIDhwo1s26gJSy3F_1s,14729
|
|
4
|
-
mcp_sharepoint/graph_api.py,sha256=63ZCx4G5BqimkYcYbibJtRYiU2UhsjN8nXp-qzPGBfA,9273
|
|
5
|
-
mcp_sharepoint_us-2.0.12.dist-info/licenses/LICENSE,sha256=SRM8juGH4GjIqnl5rrp-P-S5mW5h2mINOPx5-wOZG6s,1112
|
|
6
|
-
mcp_sharepoint_us-2.0.12.dist-info/METADATA,sha256=UP72g9PzIBBNJFY_Se2ECqiuocwUhj8aiwEu7ur1yY0,11413
|
|
7
|
-
mcp_sharepoint_us-2.0.12.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
8
|
-
mcp_sharepoint_us-2.0.12.dist-info/entry_points.txt,sha256=UZOa_7OLI41rmsErbvnSz9RahPMGQVcqZUFMphOcjbY,57
|
|
9
|
-
mcp_sharepoint_us-2.0.12.dist-info/top_level.txt,sha256=R6mRoWe61lz4kUSKGV6S2XVbE7825xfC_J-ouZIYpuo,15
|
|
10
|
-
mcp_sharepoint_us-2.0.12.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|