kailash 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. kailash/__init__.py +31 -0
  2. kailash/__main__.py +11 -0
  3. kailash/cli/__init__.py +5 -0
  4. kailash/cli/commands.py +563 -0
  5. kailash/manifest.py +778 -0
  6. kailash/nodes/__init__.py +23 -0
  7. kailash/nodes/ai/__init__.py +26 -0
  8. kailash/nodes/ai/agents.py +417 -0
  9. kailash/nodes/ai/models.py +488 -0
  10. kailash/nodes/api/__init__.py +52 -0
  11. kailash/nodes/api/auth.py +567 -0
  12. kailash/nodes/api/graphql.py +480 -0
  13. kailash/nodes/api/http.py +598 -0
  14. kailash/nodes/api/rate_limiting.py +572 -0
  15. kailash/nodes/api/rest.py +665 -0
  16. kailash/nodes/base.py +1032 -0
  17. kailash/nodes/base_async.py +128 -0
  18. kailash/nodes/code/__init__.py +32 -0
  19. kailash/nodes/code/python.py +1021 -0
  20. kailash/nodes/data/__init__.py +125 -0
  21. kailash/nodes/data/readers.py +496 -0
  22. kailash/nodes/data/sharepoint_graph.py +623 -0
  23. kailash/nodes/data/sql.py +380 -0
  24. kailash/nodes/data/streaming.py +1168 -0
  25. kailash/nodes/data/vector_db.py +964 -0
  26. kailash/nodes/data/writers.py +529 -0
  27. kailash/nodes/logic/__init__.py +6 -0
  28. kailash/nodes/logic/async_operations.py +702 -0
  29. kailash/nodes/logic/operations.py +551 -0
  30. kailash/nodes/transform/__init__.py +5 -0
  31. kailash/nodes/transform/processors.py +379 -0
  32. kailash/runtime/__init__.py +6 -0
  33. kailash/runtime/async_local.py +356 -0
  34. kailash/runtime/docker.py +697 -0
  35. kailash/runtime/local.py +434 -0
  36. kailash/runtime/parallel.py +557 -0
  37. kailash/runtime/runner.py +110 -0
  38. kailash/runtime/testing.py +347 -0
  39. kailash/sdk_exceptions.py +307 -0
  40. kailash/tracking/__init__.py +7 -0
  41. kailash/tracking/manager.py +885 -0
  42. kailash/tracking/metrics_collector.py +342 -0
  43. kailash/tracking/models.py +535 -0
  44. kailash/tracking/storage/__init__.py +0 -0
  45. kailash/tracking/storage/base.py +113 -0
  46. kailash/tracking/storage/database.py +619 -0
  47. kailash/tracking/storage/filesystem.py +543 -0
  48. kailash/utils/__init__.py +0 -0
  49. kailash/utils/export.py +924 -0
  50. kailash/utils/templates.py +680 -0
  51. kailash/visualization/__init__.py +62 -0
  52. kailash/visualization/api.py +732 -0
  53. kailash/visualization/dashboard.py +951 -0
  54. kailash/visualization/performance.py +808 -0
  55. kailash/visualization/reports.py +1471 -0
  56. kailash/workflow/__init__.py +15 -0
  57. kailash/workflow/builder.py +245 -0
  58. kailash/workflow/graph.py +827 -0
  59. kailash/workflow/mermaid_visualizer.py +628 -0
  60. kailash/workflow/mock_registry.py +63 -0
  61. kailash/workflow/runner.py +302 -0
  62. kailash/workflow/state.py +238 -0
  63. kailash/workflow/visualization.py +588 -0
  64. kailash-0.1.0.dist-info/METADATA +710 -0
  65. kailash-0.1.0.dist-info/RECORD +69 -0
  66. kailash-0.1.0.dist-info/WHEEL +5 -0
  67. kailash-0.1.0.dist-info/entry_points.txt +2 -0
  68. kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
  69. kailash-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,623 @@
1
+ """SharePoint Graph API connector nodes for the Kailash SDK.
2
+
3
+ This module provides nodes for connecting to SharePoint using Microsoft Graph API.
4
+ It supports modern authentication with MSAL and provides better compatibility
5
+ with Azure AD app registrations.
6
+
7
+ Design purpose:
8
+ - Enable seamless integration with SharePoint via Graph API
9
+ - Support app-only authentication with client credentials
10
+ - Provide operations for file management and search
11
+ - Align with database persistence requirements for orchestration
12
+
13
+ Upstream dependencies:
14
+ - Base node classes from kailash.nodes.base
15
+ - MSAL library for authentication
16
+ - Microsoft Graph API
17
+
18
+ Downstream consumers:
19
+ - Workflows that need to interact with SharePoint
20
+ - Orchestration systems reading from MongoDB
21
+ - Long-running workflows with state persistence
22
+ """
23
+
24
+ import os
25
+ from pathlib import Path
26
+ from typing import Any, Dict, List, Optional
27
+
28
+ import requests
29
+
30
+ from kailash.nodes.base import Node, NodeMetadata, NodeParameter
31
+ from kailash.sdk_exceptions import (
32
+ NodeConfigurationError,
33
+ NodeExecutionError,
34
+ NodeValidationError,
35
+ )
36
+
37
+
38
+ class SharePointGraphReader(Node):
39
+ """Node for reading files from SharePoint using Microsoft Graph API.
40
+
41
+ This node uses Microsoft Graph API with MSAL authentication, providing
42
+ better compatibility with modern Azure AD app registrations compared
43
+ to the legacy SharePoint REST API.
44
+
45
+ Key features:
46
+ 1. Modern authentication with MSAL
47
+ 2. Support for listing, downloading, and searching files
48
+ 3. Folder navigation and library support
49
+ 4. Stateless design for orchestration compatibility
50
+ 5. JSON-serializable outputs for database persistence
51
+
52
+ Usage patterns:
53
+ 1. List files in document libraries
54
+ 2. Download files to local storage
55
+ 3. Search for files by name
56
+ 4. Navigate folder structures
57
+
58
+ Example:
59
+ ```python
60
+ reader = SharePointGraphReader()
61
+ result = reader.execute(
62
+ tenant_id="your-tenant-id",
63
+ client_id="your-client-id",
64
+ client_secret="your-secret",
65
+ site_url="https://company.sharepoint.com/sites/project",
66
+ operation="list_files",
67
+ library_name="Documents",
68
+ folder_path="Reports/2024"
69
+ )
70
+ ```
71
+ """
72
+
73
+ def get_metadata(self) -> NodeMetadata:
74
+ """Get node metadata for discovery and orchestration."""
75
+ return NodeMetadata(
76
+ name="SharePoint Graph Reader",
77
+ description="Read files from SharePoint using Microsoft Graph API",
78
+ tags={"sharepoint", "graph", "reader", "cloud", "microsoft"},
79
+ version="2.0.0",
80
+ author="Kailash SDK",
81
+ )
82
+
83
+ def get_parameters(self) -> Dict[str, NodeParameter]:
84
+ """Define input parameters for SharePoint Graph operations."""
85
+ return {
86
+ "tenant_id": NodeParameter(
87
+ name="tenant_id",
88
+ type=str,
89
+ required=False,
90
+ description="Azure AD tenant ID",
91
+ ),
92
+ "client_id": NodeParameter(
93
+ name="client_id",
94
+ type=str,
95
+ required=False,
96
+ description="Azure AD app client ID",
97
+ ),
98
+ "client_secret": NodeParameter(
99
+ name="client_secret",
100
+ type=str,
101
+ required=False,
102
+ description="Azure AD app client secret",
103
+ ),
104
+ "site_url": NodeParameter(
105
+ name="site_url",
106
+ type=str,
107
+ required=False,
108
+ description="SharePoint site URL",
109
+ ),
110
+ "operation": NodeParameter(
111
+ name="operation",
112
+ type=str,
113
+ required=False,
114
+ default="list_files",
115
+ description="Operation: list_files, download_file, search_files, list_libraries",
116
+ ),
117
+ "library_name": NodeParameter(
118
+ name="library_name",
119
+ type=str,
120
+ required=False,
121
+ default="Documents",
122
+ description="Document library name",
123
+ ),
124
+ "folder_path": NodeParameter(
125
+ name="folder_path",
126
+ type=str,
127
+ required=False,
128
+ default="",
129
+ description="Folder path within library",
130
+ ),
131
+ "file_name": NodeParameter(
132
+ name="file_name",
133
+ type=str,
134
+ required=False,
135
+ description="File name for download operation",
136
+ ),
137
+ "local_path": NodeParameter(
138
+ name="local_path",
139
+ type=str,
140
+ required=False,
141
+ description="Local path to save downloaded file",
142
+ ),
143
+ "search_query": NodeParameter(
144
+ name="search_query",
145
+ type=str,
146
+ required=False,
147
+ description="Search query for finding files",
148
+ ),
149
+ }
150
+
151
+ def _authenticate(
152
+ self, tenant_id: str, client_id: str, client_secret: str
153
+ ) -> Dict[str, Any]:
154
+ """Authenticate with Microsoft Graph API using MSAL.
155
+
156
+ Returns dict with token and headers for stateless operation.
157
+ """
158
+ try:
159
+ import msal
160
+ except ImportError:
161
+ raise NodeConfigurationError(
162
+ "MSAL library not installed. Install with: pip install msal"
163
+ )
164
+
165
+ app = msal.ConfidentialClientApplication(
166
+ client_id=client_id,
167
+ client_credential=client_secret,
168
+ authority=f"https://login.microsoftonline.com/{tenant_id}",
169
+ )
170
+
171
+ result = app.acquire_token_for_client(
172
+ scopes=["https://graph.microsoft.com/.default"]
173
+ )
174
+
175
+ if "access_token" not in result:
176
+ error_msg = result.get("error_description", "Unknown authentication error")
177
+ raise NodeExecutionError(f"Authentication failed: {error_msg}")
178
+
179
+ return {
180
+ "token": result["access_token"],
181
+ "headers": {
182
+ "Authorization": f"Bearer {result['access_token']}",
183
+ "Accept": "application/json",
184
+ "Content-Type": "application/json",
185
+ },
186
+ }
187
+
188
+ def _get_site_data(self, site_url: str, headers: Dict[str, str]) -> Dict[str, Any]:
189
+ """Get SharePoint site data from Graph API."""
190
+ # Convert SharePoint URL to Graph API site ID format
191
+ site_id = site_url.replace("https://", "").replace(
192
+ ".sharepoint.com", ".sharepoint.com:"
193
+ )
194
+ site_endpoint = f"https://graph.microsoft.com/v1.0/sites/{site_id}"
195
+
196
+ response = requests.get(site_endpoint, headers=headers)
197
+ if response.status_code == 200:
198
+ return response.json()
199
+ else:
200
+ raise NodeExecutionError(
201
+ f"Failed to get site data: {response.status_code} - {response.text}"
202
+ )
203
+
204
+ def _list_libraries(
205
+ self, site_id: str, headers: Dict[str, str]
206
+ ) -> List[Dict[str, Any]]:
207
+ """List all document libraries in the site."""
208
+ drives_url = f"https://graph.microsoft.com/v1.0/sites/{site_id}/drives"
209
+ response = requests.get(drives_url, headers=headers)
210
+
211
+ if response.status_code == 200:
212
+ return response.json()["value"]
213
+ else:
214
+ raise NodeExecutionError(
215
+ f"Failed to get libraries: {response.status_code} - {response.text}"
216
+ )
217
+
218
+ def _get_drive_id(
219
+ self, site_id: str, library_name: str, headers: Dict[str, str]
220
+ ) -> Optional[str]:
221
+ """Get the drive ID for a specific library."""
222
+ libraries = self._list_libraries(site_id, headers)
223
+ for lib in libraries:
224
+ if library_name.lower() in lib["name"].lower():
225
+ return lib["id"]
226
+ return None
227
+
228
+ def _list_files(
229
+ self, site_id: str, library_name: str, folder_path: str, headers: Dict[str, str]
230
+ ) -> Dict[str, Any]:
231
+ """List files in a specific library and folder."""
232
+ drive_id = self._get_drive_id(site_id, library_name, headers)
233
+ if not drive_id:
234
+ raise NodeExecutionError(f"Library '{library_name}' not found")
235
+
236
+ # Build URL based on folder path
237
+ if folder_path:
238
+ folder_path = folder_path.strip("/")
239
+ files_url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root:/{folder_path}:/children"
240
+ else:
241
+ files_url = (
242
+ f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root/children"
243
+ )
244
+
245
+ response = requests.get(files_url, headers=headers)
246
+
247
+ if response.status_code == 200:
248
+ items = response.json()["value"]
249
+
250
+ files = []
251
+ folders = []
252
+
253
+ for item in items:
254
+ if "file" in item:
255
+ files.append(
256
+ {
257
+ "name": item["name"],
258
+ "id": item["id"],
259
+ "size": item["size"],
260
+ "modified": item["lastModifiedDateTime"],
261
+ "download_url": item.get("@microsoft.graph.downloadUrl"),
262
+ }
263
+ )
264
+ elif "folder" in item:
265
+ folders.append(
266
+ {
267
+ "name": item["name"],
268
+ "id": item["id"],
269
+ "child_count": item.get("folder", {}).get("childCount", 0),
270
+ }
271
+ )
272
+
273
+ return {
274
+ "library_name": library_name,
275
+ "folder_path": folder_path,
276
+ "file_count": len(files),
277
+ "folder_count": len(folders),
278
+ "files": files,
279
+ "folders": folders,
280
+ }
281
+ else:
282
+ raise NodeExecutionError(
283
+ f"Failed to list files: {response.status_code} - {response.text}"
284
+ )
285
+
286
+ def _download_file(
287
+ self,
288
+ site_id: str,
289
+ library_name: str,
290
+ file_name: str,
291
+ folder_path: str,
292
+ local_path: str,
293
+ headers: Dict[str, str],
294
+ ) -> Dict[str, Any]:
295
+ """Download a file from SharePoint."""
296
+ drive_id = self._get_drive_id(site_id, library_name, headers)
297
+ if not drive_id:
298
+ raise NodeExecutionError(f"Library '{library_name}' not found")
299
+
300
+ # Build the file path
301
+ if folder_path:
302
+ folder_path = folder_path.strip("/")
303
+ file_path = f"{folder_path}/{file_name}"
304
+ else:
305
+ file_path = file_name
306
+
307
+ # Get file metadata
308
+ file_url = (
309
+ f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root:/{file_path}"
310
+ )
311
+ response = requests.get(file_url, headers=headers)
312
+
313
+ if response.status_code != 200:
314
+ raise NodeExecutionError(
315
+ f"File '{file_name}' not found: {response.status_code} - {response.text}"
316
+ )
317
+
318
+ file_data = response.json()
319
+ download_url = file_data["@microsoft.graph.downloadUrl"]
320
+
321
+ # Download the file
322
+ file_response = requests.get(download_url)
323
+
324
+ if file_response.status_code == 200:
325
+ # Determine local path
326
+ if not local_path:
327
+ local_path = file_name
328
+
329
+ # Ensure directory exists
330
+ Path(local_path).parent.mkdir(parents=True, exist_ok=True)
331
+
332
+ with open(local_path, "wb") as f:
333
+ f.write(file_response.content)
334
+
335
+ return {
336
+ "file_name": file_name,
337
+ "file_path": file_path,
338
+ "local_path": local_path,
339
+ "file_size": len(file_response.content),
340
+ "downloaded": True,
341
+ }
342
+ else:
343
+ raise NodeExecutionError(
344
+ f"Failed to download file: {file_response.status_code}"
345
+ )
346
+
347
+ def _search_files(
348
+ self, site_id: str, library_name: str, query: str, headers: Dict[str, str]
349
+ ) -> Dict[str, Any]:
350
+ """Search for files in a library."""
351
+ drive_id = self._get_drive_id(site_id, library_name, headers)
352
+ if not drive_id:
353
+ raise NodeExecutionError(f"Library '{library_name}' not found")
354
+
355
+ search_url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root/search(q='{query}')"
356
+ response = requests.get(search_url, headers=headers)
357
+
358
+ if response.status_code == 200:
359
+ items = response.json()["value"]
360
+
361
+ files = []
362
+ for item in items:
363
+ if "file" in item:
364
+ files.append(
365
+ {
366
+ "name": item["name"],
367
+ "id": item["id"],
368
+ "size": item["size"],
369
+ "modified": item["lastModifiedDateTime"],
370
+ "parent_path": item.get("parentReference", {}).get(
371
+ "path", ""
372
+ ),
373
+ }
374
+ )
375
+
376
+ return {
377
+ "query": query,
378
+ "library_name": library_name,
379
+ "result_count": len(files),
380
+ "files": files,
381
+ }
382
+ else:
383
+ raise NodeExecutionError(
384
+ f"Search failed: {response.status_code} - {response.text}"
385
+ )
386
+
387
+ def run(self, **kwargs) -> Dict[str, Any]:
388
+ """Execute SharePoint Graph operation.
389
+
390
+ This method is stateless and returns JSON-serializable results
391
+ suitable for database persistence and orchestration.
392
+ """
393
+ # Validate required parameters
394
+ tenant_id = kwargs.get("tenant_id")
395
+ client_id = kwargs.get("client_id")
396
+ client_secret = kwargs.get("client_secret")
397
+ site_url = kwargs.get("site_url")
398
+
399
+ if not all([tenant_id, client_id, client_secret, site_url]):
400
+ raise NodeValidationError(
401
+ "tenant_id, client_id, client_secret, and site_url are required"
402
+ )
403
+
404
+ # Get operation
405
+ operation = kwargs.get("operation", "list_files")
406
+ valid_operations = [
407
+ "list_files",
408
+ "download_file",
409
+ "search_files",
410
+ "list_libraries",
411
+ ]
412
+ if operation not in valid_operations:
413
+ raise NodeValidationError(
414
+ f"Invalid operation '{operation}'. Must be one of: {', '.join(valid_operations)}"
415
+ )
416
+
417
+ # Authenticate and get site data
418
+ auth_data = self._authenticate(tenant_id, client_id, client_secret)
419
+ headers = auth_data["headers"]
420
+ site_data = self._get_site_data(site_url, headers)
421
+ site_id = site_data["id"]
422
+
423
+ # Execute operation
424
+ if operation == "list_libraries":
425
+ libraries = self._list_libraries(site_id, headers)
426
+ return {
427
+ "site_name": site_data["displayName"],
428
+ "library_count": len(libraries),
429
+ "libraries": [
430
+ {"name": lib["name"], "id": lib["id"], "web_url": lib.get("webUrl")}
431
+ for lib in libraries
432
+ ],
433
+ }
434
+
435
+ elif operation == "list_files":
436
+ library_name = kwargs.get("library_name", "Documents")
437
+ folder_path = kwargs.get("folder_path", "")
438
+ return self._list_files(site_id, library_name, folder_path, headers)
439
+
440
+ elif operation == "download_file":
441
+ if not kwargs.get("file_name"):
442
+ raise NodeValidationError(
443
+ "file_name is required for download_file operation"
444
+ )
445
+
446
+ library_name = kwargs.get("library_name", "Documents")
447
+ file_name = kwargs["file_name"]
448
+ folder_path = kwargs.get("folder_path", "")
449
+ local_path = kwargs.get("local_path")
450
+
451
+ return self._download_file(
452
+ site_id, library_name, file_name, folder_path, local_path, headers
453
+ )
454
+
455
+ elif operation == "search_files":
456
+ if not kwargs.get("search_query"):
457
+ raise NodeValidationError(
458
+ "search_query is required for search_files operation"
459
+ )
460
+
461
+ library_name = kwargs.get("library_name", "Documents")
462
+ query = kwargs["search_query"]
463
+
464
+ return self._search_files(site_id, library_name, query, headers)
465
+
466
+
467
+ class SharePointGraphWriter(Node):
468
+ """Node for uploading files to SharePoint using Microsoft Graph API.
469
+
470
+ This node handles file uploads to SharePoint document libraries,
471
+ supporting folder structures and metadata.
472
+
473
+ Example:
474
+ ```python
475
+ writer = SharePointGraphWriter()
476
+ result = writer.execute(
477
+ tenant_id="your-tenant-id",
478
+ client_id="your-client-id",
479
+ client_secret="your-secret",
480
+ site_url="https://company.sharepoint.com/sites/project",
481
+ local_path="report.pdf",
482
+ library_name="Documents",
483
+ folder_path="Reports/2024",
484
+ sharepoint_name="Q4_Report_2024.pdf"
485
+ )
486
+ ```
487
+ """
488
+
489
+ def get_metadata(self) -> NodeMetadata:
490
+ """Get node metadata for discovery and orchestration."""
491
+ return NodeMetadata(
492
+ name="SharePoint Graph Writer",
493
+ description="Upload files to SharePoint using Microsoft Graph API",
494
+ tags={"sharepoint", "graph", "writer", "upload", "microsoft"},
495
+ version="2.0.0",
496
+ author="Kailash SDK",
497
+ )
498
+
499
+ def get_parameters(self) -> Dict[str, NodeParameter]:
500
+ """Define input parameters for SharePoint upload operations."""
501
+ return {
502
+ "tenant_id": NodeParameter(
503
+ name="tenant_id",
504
+ type=str,
505
+ required=False,
506
+ description="Azure AD tenant ID",
507
+ ),
508
+ "client_id": NodeParameter(
509
+ name="client_id",
510
+ type=str,
511
+ required=False,
512
+ description="Azure AD app client ID",
513
+ ),
514
+ "client_secret": NodeParameter(
515
+ name="client_secret",
516
+ type=str,
517
+ required=False,
518
+ description="Azure AD app client secret",
519
+ ),
520
+ "site_url": NodeParameter(
521
+ name="site_url",
522
+ type=str,
523
+ required=False,
524
+ description="SharePoint site URL",
525
+ ),
526
+ "local_path": NodeParameter(
527
+ name="local_path",
528
+ type=str,
529
+ required=False,
530
+ description="Local file path to upload",
531
+ ),
532
+ "sharepoint_name": NodeParameter(
533
+ name="sharepoint_name",
534
+ type=str,
535
+ required=False,
536
+ description="Name for file in SharePoint (defaults to local filename)",
537
+ ),
538
+ "library_name": NodeParameter(
539
+ name="library_name",
540
+ type=str,
541
+ required=False,
542
+ default="Documents",
543
+ description="Target document library",
544
+ ),
545
+ "folder_path": NodeParameter(
546
+ name="folder_path",
547
+ type=str,
548
+ required=False,
549
+ default="",
550
+ description="Target folder path within library",
551
+ ),
552
+ }
553
+
554
+ def run(self, **kwargs) -> Dict[str, Any]:
555
+ """Execute SharePoint upload operation."""
556
+ # Validate required parameters
557
+ tenant_id = kwargs.get("tenant_id")
558
+ client_id = kwargs.get("client_id")
559
+ client_secret = kwargs.get("client_secret")
560
+ site_url = kwargs.get("site_url")
561
+ local_path = kwargs.get("local_path")
562
+
563
+ if not all([tenant_id, client_id, client_secret, site_url, local_path]):
564
+ raise NodeValidationError(
565
+ "tenant_id, client_id, client_secret, site_url, and local_path are required"
566
+ )
567
+
568
+ if not os.path.exists(local_path):
569
+ raise NodeValidationError(f"Local file '{local_path}' not found")
570
+
571
+ # Reuse authentication logic from reader
572
+ reader = SharePointGraphReader()
573
+ auth_data = reader._authenticate(tenant_id, client_id, client_secret)
574
+ headers = auth_data["headers"]
575
+ site_data = reader._get_site_data(site_url, headers)
576
+ site_id = site_data["id"]
577
+
578
+ # Get parameters
579
+ library_name = kwargs.get("library_name", "Documents")
580
+ folder_path = kwargs.get("folder_path", "")
581
+ sharepoint_name = kwargs.get("sharepoint_name") or os.path.basename(local_path)
582
+
583
+ # Get drive ID
584
+ drive_id = reader._get_drive_id(site_id, library_name, headers)
585
+ if not drive_id:
586
+ raise NodeExecutionError(f"Library '{library_name}' not found")
587
+
588
+ # Build upload path
589
+ if folder_path:
590
+ folder_path = folder_path.strip("/")
591
+ upload_path = f"{folder_path}/{sharepoint_name}"
592
+ else:
593
+ upload_path = sharepoint_name
594
+
595
+ # Upload file
596
+ upload_url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root:/{upload_path}:/content"
597
+
598
+ with open(local_path, "rb") as file_content:
599
+ upload_headers = {
600
+ "Authorization": headers["Authorization"],
601
+ "Content-Type": "application/octet-stream",
602
+ }
603
+
604
+ response = requests.put(
605
+ upload_url, headers=upload_headers, data=file_content.read()
606
+ )
607
+
608
+ if response.status_code in [200, 201]:
609
+ result = response.json()
610
+ return {
611
+ "uploaded": True,
612
+ "file_name": sharepoint_name,
613
+ "file_id": result["id"],
614
+ "file_path": upload_path,
615
+ "library_name": library_name,
616
+ "web_url": result.get("webUrl"),
617
+ "size": result["size"],
618
+ "created": result["createdDateTime"],
619
+ }
620
+ else:
621
+ raise NodeExecutionError(
622
+ f"Failed to upload file: {response.status_code} - {response.text}"
623
+ )