kailash 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +31 -0
- kailash/__main__.py +11 -0
- kailash/cli/__init__.py +5 -0
- kailash/cli/commands.py +563 -0
- kailash/manifest.py +778 -0
- kailash/nodes/__init__.py +23 -0
- kailash/nodes/ai/__init__.py +26 -0
- kailash/nodes/ai/agents.py +417 -0
- kailash/nodes/ai/models.py +488 -0
- kailash/nodes/api/__init__.py +52 -0
- kailash/nodes/api/auth.py +567 -0
- kailash/nodes/api/graphql.py +480 -0
- kailash/nodes/api/http.py +598 -0
- kailash/nodes/api/rate_limiting.py +572 -0
- kailash/nodes/api/rest.py +665 -0
- kailash/nodes/base.py +1032 -0
- kailash/nodes/base_async.py +128 -0
- kailash/nodes/code/__init__.py +32 -0
- kailash/nodes/code/python.py +1021 -0
- kailash/nodes/data/__init__.py +125 -0
- kailash/nodes/data/readers.py +496 -0
- kailash/nodes/data/sharepoint_graph.py +623 -0
- kailash/nodes/data/sql.py +380 -0
- kailash/nodes/data/streaming.py +1168 -0
- kailash/nodes/data/vector_db.py +964 -0
- kailash/nodes/data/writers.py +529 -0
- kailash/nodes/logic/__init__.py +6 -0
- kailash/nodes/logic/async_operations.py +702 -0
- kailash/nodes/logic/operations.py +551 -0
- kailash/nodes/transform/__init__.py +5 -0
- kailash/nodes/transform/processors.py +379 -0
- kailash/runtime/__init__.py +6 -0
- kailash/runtime/async_local.py +356 -0
- kailash/runtime/docker.py +697 -0
- kailash/runtime/local.py +434 -0
- kailash/runtime/parallel.py +557 -0
- kailash/runtime/runner.py +110 -0
- kailash/runtime/testing.py +347 -0
- kailash/sdk_exceptions.py +307 -0
- kailash/tracking/__init__.py +7 -0
- kailash/tracking/manager.py +885 -0
- kailash/tracking/metrics_collector.py +342 -0
- kailash/tracking/models.py +535 -0
- kailash/tracking/storage/__init__.py +0 -0
- kailash/tracking/storage/base.py +113 -0
- kailash/tracking/storage/database.py +619 -0
- kailash/tracking/storage/filesystem.py +543 -0
- kailash/utils/__init__.py +0 -0
- kailash/utils/export.py +924 -0
- kailash/utils/templates.py +680 -0
- kailash/visualization/__init__.py +62 -0
- kailash/visualization/api.py +732 -0
- kailash/visualization/dashboard.py +951 -0
- kailash/visualization/performance.py +808 -0
- kailash/visualization/reports.py +1471 -0
- kailash/workflow/__init__.py +15 -0
- kailash/workflow/builder.py +245 -0
- kailash/workflow/graph.py +827 -0
- kailash/workflow/mermaid_visualizer.py +628 -0
- kailash/workflow/mock_registry.py +63 -0
- kailash/workflow/runner.py +302 -0
- kailash/workflow/state.py +238 -0
- kailash/workflow/visualization.py +588 -0
- kailash-0.1.0.dist-info/METADATA +710 -0
- kailash-0.1.0.dist-info/RECORD +69 -0
- kailash-0.1.0.dist-info/WHEEL +5 -0
- kailash-0.1.0.dist-info/entry_points.txt +2 -0
- kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
- kailash-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,710 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: kailash
|
3
|
+
Version: 0.1.0
|
4
|
+
Summary: Python SDK for the Kailash container-node architecture
|
5
|
+
Home-page: https://github.com/integrum/kailash-python-sdk
|
6
|
+
Author: Integrum
|
7
|
+
Author-email: Integrum <info@integrum.com>
|
8
|
+
Project-URL: Homepage, https://github.com/integrum/kailash-python-sdk
|
9
|
+
Project-URL: Bug Tracker, https://github.com/integrum/kailash-python-sdk/issues
|
10
|
+
Classifier: Development Status :: 3 - Alpha
|
11
|
+
Classifier: Intended Audience :: Developers
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
13
|
+
Classifier: Programming Language :: Python :: 3.8
|
14
|
+
Classifier: Programming Language :: Python :: 3.9
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
16
|
+
Requires-Python: >=3.11
|
17
|
+
Description-Content-Type: text/markdown
|
18
|
+
License-File: LICENSE
|
19
|
+
Requires-Dist: networkx>=2.7
|
20
|
+
Requires-Dist: pydantic>=1.9
|
21
|
+
Requires-Dist: matplotlib>=3.5
|
22
|
+
Requires-Dist: pyyaml>=6.0
|
23
|
+
Requires-Dist: click>=8.0
|
24
|
+
Requires-Dist: pytest>=8.3.5
|
25
|
+
Requires-Dist: mcp[cli]>=1.9.0
|
26
|
+
Requires-Dist: pandas>=2.2.3
|
27
|
+
Requires-Dist: numpy>=2.2.5
|
28
|
+
Requires-Dist: scipy>=1.15.3
|
29
|
+
Requires-Dist: scikit-learn>=1.6.1
|
30
|
+
Requires-Dist: requests>=2.32.3
|
31
|
+
Requires-Dist: pytest-cov>=6.1.1
|
32
|
+
Requires-Dist: isort>=6.0.1
|
33
|
+
Requires-Dist: aiohttp>=3.12.4
|
34
|
+
Requires-Dist: ruff>=0.11.12
|
35
|
+
Requires-Dist: msal>=1.32.3
|
36
|
+
Requires-Dist: sphinx>=8.2.3
|
37
|
+
Requires-Dist: sphinx-rtd-theme>=3.0.2
|
38
|
+
Requires-Dist: sphinx-copybutton>=0.5.2
|
39
|
+
Requires-Dist: sphinxcontrib-mermaid>=1.0.0
|
40
|
+
Requires-Dist: sphinx-autobuild>=2024.10.3
|
41
|
+
Requires-Dist: autodoc>=0.5.0
|
42
|
+
Requires-Dist: myst-parser>=4.0.1
|
43
|
+
Requires-Dist: black>=25.1.0
|
44
|
+
Requires-Dist: psutil>=7.0.0
|
45
|
+
Requires-Dist: fastapi[all]>=0.115.12
|
46
|
+
Requires-Dist: pytest-asyncio>=1.0.0
|
47
|
+
Requires-Dist: pre-commit>=4.2.0
|
48
|
+
Provides-Extra: dev
|
49
|
+
Requires-Dist: pytest>=7.0; extra == "dev"
|
50
|
+
Requires-Dist: pytest-cov>=3.0; extra == "dev"
|
51
|
+
Requires-Dist: black>=22.0; extra == "dev"
|
52
|
+
Requires-Dist: isort>=5.10; extra == "dev"
|
53
|
+
Requires-Dist: mypy>=0.9; extra == "dev"
|
54
|
+
Dynamic: author
|
55
|
+
Dynamic: home-page
|
56
|
+
Dynamic: license-file
|
57
|
+
Dynamic: requires-python
|
58
|
+
|
59
|
+
# Kailash Python SDK
|
60
|
+
|
61
|
+
<p align="center">
|
62
|
+
<img src="https://img.shields.io/badge/python-3.11+-blue.svg" alt="Python 3.8+">
|
63
|
+
<img src="https://img.shields.io/badge/license-MIT-green.svg" alt="MIT License">
|
64
|
+
<img src="https://img.shields.io/badge/code%20style-black-000000.svg" alt="Code style: black">
|
65
|
+
<img src="https://img.shields.io/badge/tests-544%20passing-brightgreen.svg" alt="Tests: 544 passing">
|
66
|
+
<img src="https://img.shields.io/badge/coverage-100%25-brightgreen.svg" alt="Coverage: 100%">
|
67
|
+
</p>
|
68
|
+
|
69
|
+
<p align="center">
|
70
|
+
<strong>A Pythonic SDK for the Kailash container-node architecture</strong>
|
71
|
+
</p>
|
72
|
+
|
73
|
+
<p align="center">
|
74
|
+
Build workflows that seamlessly integrate with Kailash's production environment while maintaining the flexibility to prototype quickly and iterate locally.
|
75
|
+
</p>
|
76
|
+
|
77
|
+
---
|
78
|
+
|
79
|
+
## ✨ Highlights
|
80
|
+
|
81
|
+
- 🚀 **Rapid Prototyping**: Create and test workflows locally without containerization
|
82
|
+
- 🏗️ **Architecture-Aligned**: Automatically ensures compliance with Kailash standards
|
83
|
+
- 🔄 **Seamless Handoff**: Export prototypes directly to production-ready formats
|
84
|
+
- 📊 **Real-time Monitoring**: Live dashboards with WebSocket streaming and performance metrics
|
85
|
+
- 🧩 **Extensible**: Easy to create custom nodes for domain-specific operations
|
86
|
+
- ⚡ **Fast Installation**: Uses `uv` for lightning-fast Python package management
|
87
|
+
|
88
|
+
## 🎯 Who Is This For?
|
89
|
+
|
90
|
+
The Kailash Python SDK is designed for:
|
91
|
+
|
92
|
+
- **AI Business Coaches (ABCs)** who need to prototype workflows quickly
|
93
|
+
- **Data Scientists** building ML pipelines compatible with production infrastructure
|
94
|
+
- **Engineers** who want to test Kailash workflows locally before deployment
|
95
|
+
- **Teams** looking to standardize their workflow development process
|
96
|
+
|
97
|
+
## 🚀 Quick Start
|
98
|
+
|
99
|
+
### Installation
|
100
|
+
|
101
|
+
```bash
|
102
|
+
# Install uv if you haven't already
|
103
|
+
curl -LsSf https://astral.sh/uv/install.sh | sh
|
104
|
+
|
105
|
+
# For users: Install from PyPI
|
106
|
+
uv pip install kailash
|
107
|
+
|
108
|
+
# For developers: Clone and sync
|
109
|
+
git clone https://github.com/integrum/kailash-python-sdk.git
|
110
|
+
cd kailash-python-sdk
|
111
|
+
uv sync
|
112
|
+
```
|
113
|
+
|
114
|
+
### Your First Workflow
|
115
|
+
|
116
|
+
```python
|
117
|
+
from kailash.workflow import Workflow
|
118
|
+
from kailash.nodes.data import CSVReader
|
119
|
+
from kailash.nodes.code import PythonCodeNode
|
120
|
+
from kailash.runtime.local import LocalRuntime
|
121
|
+
import pandas as pd
|
122
|
+
|
123
|
+
# Create a workflow
|
124
|
+
workflow = Workflow("customer_analysis", name="customer_analysis")
|
125
|
+
|
126
|
+
# Add data reader
|
127
|
+
reader = CSVReader(file_path="customers.csv")
|
128
|
+
workflow.add_node("read_customers", reader)
|
129
|
+
|
130
|
+
# Add custom processing using Python code
|
131
|
+
def analyze_customers(data):
|
132
|
+
"""Analyze customer data and compute metrics."""
|
133
|
+
df = pd.DataFrame(data)
|
134
|
+
# Convert total_spent to numeric
|
135
|
+
df['total_spent'] = pd.to_numeric(df['total_spent'])
|
136
|
+
return {
|
137
|
+
"total_customers": len(df),
|
138
|
+
"avg_spend": df["total_spent"].mean(),
|
139
|
+
"top_customers": df.nlargest(10, "total_spent").to_dict("records")
|
140
|
+
}
|
141
|
+
|
142
|
+
analyzer = PythonCodeNode.from_function(analyze_customers, name="analyzer")
|
143
|
+
workflow.add_node("analyze", analyzer)
|
144
|
+
|
145
|
+
# Connect nodes
|
146
|
+
workflow.connect("read_customers", "analyze", {"data": "data"})
|
147
|
+
|
148
|
+
# Run locally
|
149
|
+
runtime = LocalRuntime()
|
150
|
+
results, run_id = runtime.execute(workflow)
|
151
|
+
print(f"Analysis complete! Results: {results}")
|
152
|
+
|
153
|
+
# Export for production
|
154
|
+
from kailash.utils.export import WorkflowExporter
|
155
|
+
exporter = WorkflowExporter()
|
156
|
+
workflow.save("customer_analysis.yaml", format="yaml")
|
157
|
+
```
|
158
|
+
|
159
|
+
### SharePoint Integration Example
|
160
|
+
|
161
|
+
```python
|
162
|
+
from kailash.workflow import Workflow
|
163
|
+
from kailash.nodes.data import SharePointGraphReader, CSVWriter
|
164
|
+
import os
|
165
|
+
|
166
|
+
# Create workflow for SharePoint file processing
|
167
|
+
workflow = Workflow("sharepoint_processor", name="sharepoint_processor")
|
168
|
+
|
169
|
+
# Configure SharePoint reader (using environment variables)
|
170
|
+
sharepoint = SharePointGraphReader()
|
171
|
+
workflow.add_node("read_sharepoint", sharepoint)
|
172
|
+
|
173
|
+
# Process downloaded files
|
174
|
+
csv_writer = CSVWriter()
|
175
|
+
workflow.add_node("save_locally", csv_writer)
|
176
|
+
|
177
|
+
# Connect nodes
|
178
|
+
workflow.connect("read_sharepoint", "save_locally")
|
179
|
+
|
180
|
+
# Execute with credentials
|
181
|
+
from kailash.runtime.local import LocalRuntime
|
182
|
+
|
183
|
+
inputs = {
|
184
|
+
"read_sharepoint": {
|
185
|
+
"tenant_id": os.getenv("SHAREPOINT_TENANT_ID"),
|
186
|
+
"client_id": os.getenv("SHAREPOINT_CLIENT_ID"),
|
187
|
+
"client_secret": os.getenv("SHAREPOINT_CLIENT_SECRET"),
|
188
|
+
"site_url": "https://yourcompany.sharepoint.com/sites/YourSite",
|
189
|
+
"operation": "list_files",
|
190
|
+
"library_name": "Documents"
|
191
|
+
}
|
192
|
+
}
|
193
|
+
|
194
|
+
runtime = LocalRuntime()
|
195
|
+
results, run_id = runtime.execute(workflow, inputs=inputs)
|
196
|
+
```
|
197
|
+
|
198
|
+
## 📚 Documentation
|
199
|
+
|
200
|
+
| Resource | Description |
|
201
|
+
|----------|-------------|
|
202
|
+
| 📖 [User Guide](docs/user-guide.md) | Comprehensive guide for using the SDK |
|
203
|
+
| 🏛️ [Architecture](docs/adr/) | Architecture Decision Records |
|
204
|
+
| 📋 [API Reference](docs/api/) | Detailed API documentation |
|
205
|
+
| 🌐 [API Integration Guide](examples/API_INTEGRATION_README.md) | Complete API integration documentation |
|
206
|
+
| 🎓 [Examples](examples/) | Working examples and tutorials |
|
207
|
+
| 🤝 [Contributing](CONTRIBUTING.md) | Contribution guidelines |
|
208
|
+
|
209
|
+
## 🛠️ Features
|
210
|
+
|
211
|
+
### 📦 Pre-built Nodes
|
212
|
+
|
213
|
+
The SDK includes a rich set of pre-built nodes for common operations:
|
214
|
+
|
215
|
+
<table>
|
216
|
+
<tr>
|
217
|
+
<td width="50%">
|
218
|
+
|
219
|
+
**Data Operations**
|
220
|
+
- `CSVReader` - Read CSV files
|
221
|
+
- `JSONReader` - Read JSON files
|
222
|
+
- `SQLDatabaseNode` - Query databases
|
223
|
+
- `CSVWriter` - Write CSV files
|
224
|
+
- `JSONWriter` - Write JSON files
|
225
|
+
|
226
|
+
</td>
|
227
|
+
<td width="50%">
|
228
|
+
|
229
|
+
**Processing Nodes**
|
230
|
+
- `PythonCodeNode` - Custom Python logic
|
231
|
+
- `DataTransformer` - Transform data
|
232
|
+
- `Filter` - Filter records
|
233
|
+
- `Aggregator` - Aggregate data
|
234
|
+
- `TextProcessor` - Process text
|
235
|
+
|
236
|
+
</td>
|
237
|
+
</tr>
|
238
|
+
<tr>
|
239
|
+
<td width="50%">
|
240
|
+
|
241
|
+
**AI/ML Nodes**
|
242
|
+
- `EmbeddingNode` - Generate embeddings
|
243
|
+
- `VectorDatabaseNode` - Vector search
|
244
|
+
- `ModelPredictorNode` - ML predictions
|
245
|
+
- `LLMNode` - LLM integration
|
246
|
+
|
247
|
+
</td>
|
248
|
+
<td width="50%">
|
249
|
+
|
250
|
+
**API Integration Nodes**
|
251
|
+
- `HTTPRequestNode` - HTTP requests
|
252
|
+
- `RESTAPINode` - REST API client
|
253
|
+
- `GraphQLClientNode` - GraphQL queries
|
254
|
+
- `OAuth2AuthNode` - OAuth 2.0 authentication
|
255
|
+
- `RateLimitedAPINode` - Rate-limited API calls
|
256
|
+
|
257
|
+
**Other Integration Nodes**
|
258
|
+
- `KafkaConsumerNode` - Kafka streaming
|
259
|
+
- `WebSocketNode` - WebSocket connections
|
260
|
+
- `EmailNode` - Send emails
|
261
|
+
|
262
|
+
**SharePoint Integration**
|
263
|
+
- `SharePointGraphReader` - Read SharePoint files
|
264
|
+
- `SharePointGraphWriter` - Upload to SharePoint
|
265
|
+
|
266
|
+
**Real-time Monitoring**
|
267
|
+
- `RealTimeDashboard` - Live workflow monitoring
|
268
|
+
- `WorkflowPerformanceReporter` - Comprehensive reports
|
269
|
+
- `SimpleDashboardAPI` - REST API for metrics
|
270
|
+
- `DashboardAPIServer` - WebSocket streaming server
|
271
|
+
|
272
|
+
</td>
|
273
|
+
</tr>
|
274
|
+
</table>
|
275
|
+
|
276
|
+
### 🔧 Core Capabilities
|
277
|
+
|
278
|
+
#### Workflow Management
|
279
|
+
```python
|
280
|
+
from kailash.workflow import Workflow
|
281
|
+
|
282
|
+
# Create complex workflows with branching logic
|
283
|
+
workflow = Workflow("data_pipeline", name="data_pipeline")
|
284
|
+
|
285
|
+
# Add conditional branching
|
286
|
+
validator = ValidationNode()
|
287
|
+
workflow.add_node("validate", validator)
|
288
|
+
|
289
|
+
# Different paths based on validation
|
290
|
+
workflow.add_node("process_valid", processor_a)
|
291
|
+
workflow.add_node("handle_errors", error_handler)
|
292
|
+
|
293
|
+
# Connect with conditions
|
294
|
+
workflow.connect("validate", "process_valid", condition="is_valid")
|
295
|
+
workflow.connect("validate", "handle_errors", condition="has_errors")
|
296
|
+
```
|
297
|
+
|
298
|
+
#### Immutable State Management
|
299
|
+
```python
|
300
|
+
from kailash.workflow.state import WorkflowStateWrapper
|
301
|
+
from pydantic import BaseModel
|
302
|
+
|
303
|
+
# Define state model
|
304
|
+
class MyStateModel(BaseModel):
|
305
|
+
counter: int = 0
|
306
|
+
status: str = "pending"
|
307
|
+
nested: dict = {}
|
308
|
+
|
309
|
+
# Create and wrap state object
|
310
|
+
state = MyStateModel()
|
311
|
+
state_wrapper = workflow.create_state_wrapper(state)
|
312
|
+
|
313
|
+
# Single path-based update
|
314
|
+
updated_wrapper = state_wrapper.update_in(
|
315
|
+
["counter"],
|
316
|
+
42
|
317
|
+
)
|
318
|
+
|
319
|
+
# Batch update multiple fields atomically
|
320
|
+
updated_wrapper = state_wrapper.batch_update([
|
321
|
+
(["counter"], 10),
|
322
|
+
(["status"], "processing")
|
323
|
+
])
|
324
|
+
|
325
|
+
# Execute workflow with state management
|
326
|
+
final_state, results = workflow.execute_with_state(state_model=state)
|
327
|
+
```
|
328
|
+
|
329
|
+
#### Task Tracking
|
330
|
+
```python
|
331
|
+
from kailash.tracking import TaskManager
|
332
|
+
|
333
|
+
# Initialize task manager
|
334
|
+
task_manager = TaskManager()
|
335
|
+
|
336
|
+
# Create a sample workflow
|
337
|
+
from kailash.workflow import Workflow
|
338
|
+
workflow = Workflow("sample_workflow", name="Sample Workflow")
|
339
|
+
|
340
|
+
# Run workflow with tracking
|
341
|
+
from kailash.runtime.local import LocalRuntime
|
342
|
+
runtime = LocalRuntime()
|
343
|
+
results, run_id = runtime.execute(workflow, task_manager=task_manager)
|
344
|
+
|
345
|
+
# Query execution history
|
346
|
+
runs = task_manager.list_runs(status="completed", limit=10)
|
347
|
+
details = task_manager.get_run(run_id)
|
348
|
+
```
|
349
|
+
|
350
|
+
#### Local Testing
|
351
|
+
```python
|
352
|
+
from kailash.runtime.local import LocalRuntime
|
353
|
+
|
354
|
+
# Create test runtime with debugging enabled
|
355
|
+
runtime = LocalRuntime(debug=True)
|
356
|
+
|
357
|
+
# Execute with test data
|
358
|
+
test_data = {"customers": [...]}
|
359
|
+
results = runtime.execute(workflow, inputs=test_data)
|
360
|
+
|
361
|
+
# Validate results
|
362
|
+
assert results["node_id"]["output_key"] == expected_value
|
363
|
+
```
|
364
|
+
|
365
|
+
#### Performance Monitoring & Real-time Dashboards
|
366
|
+
```python
|
367
|
+
from kailash.visualization.performance import PerformanceVisualizer
|
368
|
+
from kailash.visualization.dashboard import RealTimeDashboard, DashboardConfig
|
369
|
+
from kailash.visualization.reports import WorkflowPerformanceReporter
|
370
|
+
from kailash.tracking import TaskManager
|
371
|
+
from kailash.runtime.local import LocalRuntime
|
372
|
+
|
373
|
+
# Run workflow with task tracking
|
374
|
+
task_manager = TaskManager()
|
375
|
+
runtime = LocalRuntime()
|
376
|
+
results, run_id = runtime.execute(workflow, task_manager=task_manager)
|
377
|
+
|
378
|
+
# Static performance analysis
|
379
|
+
perf_viz = PerformanceVisualizer(task_manager)
|
380
|
+
outputs = perf_viz.create_run_performance_summary(run_id, output_dir="performance_report")
|
381
|
+
perf_viz.compare_runs([run_id_1, run_id_2], output_path="comparison.png")
|
382
|
+
|
383
|
+
# Real-time monitoring dashboard
|
384
|
+
config = DashboardConfig(
|
385
|
+
update_interval=1.0,
|
386
|
+
max_history_points=100,
|
387
|
+
auto_refresh=True,
|
388
|
+
theme="light"
|
389
|
+
)
|
390
|
+
|
391
|
+
dashboard = RealTimeDashboard(task_manager, config)
|
392
|
+
dashboard.start_monitoring(run_id)
|
393
|
+
|
394
|
+
# Add real-time callbacks
|
395
|
+
def on_metrics_update(metrics):
|
396
|
+
print(f"Tasks: {metrics.completed_tasks} completed, {metrics.active_tasks} active")
|
397
|
+
|
398
|
+
dashboard.add_metrics_callback(on_metrics_update)
|
399
|
+
|
400
|
+
# Generate live HTML dashboard
|
401
|
+
dashboard.generate_live_report("live_dashboard.html", include_charts=True)
|
402
|
+
dashboard.stop_monitoring()
|
403
|
+
|
404
|
+
# Comprehensive performance reports
|
405
|
+
reporter = WorkflowPerformanceReporter(task_manager)
|
406
|
+
report_path = reporter.generate_report(
|
407
|
+
run_id,
|
408
|
+
output_path="workflow_report.html",
|
409
|
+
format=ReportFormat.HTML,
|
410
|
+
compare_runs=[run_id_1, run_id_2]
|
411
|
+
)
|
412
|
+
```
|
413
|
+
|
414
|
+
**Real-time Dashboard Features**:
|
415
|
+
- ⚡ **Live Metrics Streaming**: Real-time task progress and resource monitoring
|
416
|
+
- 📊 **Interactive Charts**: CPU, memory, and throughput visualizations with Chart.js
|
417
|
+
- 🔌 **API Endpoints**: REST and WebSocket APIs for custom integrations
|
418
|
+
- 📈 **Performance Reports**: Multi-format reports (HTML, Markdown, JSON) with insights
|
419
|
+
- 🎯 **Bottleneck Detection**: Automatic identification of performance issues
|
420
|
+
- 📱 **Responsive Design**: Mobile-friendly dashboards with auto-refresh
|
421
|
+
|
422
|
+
**Performance Metrics Collected**:
|
423
|
+
- **Execution Timeline**: Gantt charts showing node execution order and duration
|
424
|
+
- **Resource Usage**: Real-time CPU and memory consumption
|
425
|
+
- **I/O Analysis**: Read/write operations and data transfer volumes
|
426
|
+
- **Performance Heatmaps**: Identify bottlenecks across workflow runs
|
427
|
+
- **Throughput Metrics**: Tasks per minute and completion rates
|
428
|
+
- **Error Tracking**: Failed task analysis and error patterns
|
429
|
+
|
430
|
+
#### API Integration
|
431
|
+
```python
|
432
|
+
from kailash.nodes.api import (
|
433
|
+
HTTPRequestNode as RESTAPINode,
|
434
|
+
# OAuth2AuthNode,
|
435
|
+
# RateLimitedAPINode,
|
436
|
+
# RateLimitConfig
|
437
|
+
)
|
438
|
+
|
439
|
+
# OAuth 2.0 authentication
|
440
|
+
# # auth_node = OAuth2AuthNode(
|
441
|
+
# client_id="your_client_id",
|
442
|
+
# client_secret="your_client_secret",
|
443
|
+
# token_url="https://api.example.com/oauth/token"
|
444
|
+
# )
|
445
|
+
|
446
|
+
# Rate-limited API client
|
447
|
+
rate_config = None # RateLimitConfig(
|
448
|
+
# max_requests=100,
|
449
|
+
# time_window=60.0,
|
450
|
+
# strategy="token_bucket"
|
451
|
+
# )
|
452
|
+
|
453
|
+
api_client = RESTAPINode(
|
454
|
+
base_url="https://api.example.com"
|
455
|
+
# auth_node=auth_node
|
456
|
+
)
|
457
|
+
|
458
|
+
# rate_limited_client = RateLimitedAPINode(
|
459
|
+
# wrapped_node=api_client,
|
460
|
+
# rate_limit_config=rate_config
|
461
|
+
# )
|
462
|
+
```
|
463
|
+
|
464
|
+
#### Export Formats
|
465
|
+
```python
|
466
|
+
from kailash.utils.export import WorkflowExporter, ExportConfig
|
467
|
+
|
468
|
+
exporter = WorkflowExporter()
|
469
|
+
|
470
|
+
# Export to different formats
|
471
|
+
workflow.save("workflow.yaml", format="yaml") # Kailash YAML format
|
472
|
+
workflow.save("workflow.json", format="json") # JSON representation
|
473
|
+
|
474
|
+
# Export with custom configuration
|
475
|
+
config = ExportConfig(
|
476
|
+
include_metadata=True,
|
477
|
+
container_tag="latest"
|
478
|
+
)
|
479
|
+
workflow.save("deployment.yaml", format="yaml")
|
480
|
+
```
|
481
|
+
|
482
|
+
### 🎨 Visualization
|
483
|
+
|
484
|
+
```python
|
485
|
+
from kailash.workflow.visualization import WorkflowVisualizer
|
486
|
+
|
487
|
+
# Visualize workflow structure
|
488
|
+
visualizer = WorkflowVisualizer(workflow)
|
489
|
+
visualizer.visualize(output_path="workflow.png")
|
490
|
+
|
491
|
+
# Show in Jupyter notebook
|
492
|
+
visualizer.show()
|
493
|
+
```
|
494
|
+
|
495
|
+
## 💻 CLI Commands
|
496
|
+
|
497
|
+
The SDK includes a comprehensive CLI for workflow management:
|
498
|
+
|
499
|
+
```bash
|
500
|
+
# Project initialization
|
501
|
+
kailash init my-project --template data-pipeline
|
502
|
+
|
503
|
+
# Workflow operations
|
504
|
+
kailash validate workflow.yaml
|
505
|
+
kailash run workflow.yaml --inputs data.json
|
506
|
+
kailash export workflow.py --format kubernetes
|
507
|
+
|
508
|
+
# Task management
|
509
|
+
kailash tasks list --status running
|
510
|
+
kailash tasks show run-123
|
511
|
+
kailash tasks cancel run-123
|
512
|
+
|
513
|
+
# Development tools
|
514
|
+
kailash test workflow.yaml --data test_data.json
|
515
|
+
kailash debug workflow.yaml --breakpoint node-id
|
516
|
+
```
|
517
|
+
|
518
|
+
## 🏗️ Architecture
|
519
|
+
|
520
|
+
The SDK follows a clean, modular architecture:
|
521
|
+
|
522
|
+
```
|
523
|
+
kailash/
|
524
|
+
├── nodes/ # Node implementations and base classes
|
525
|
+
│ ├── base.py # Abstract Node class
|
526
|
+
│ ├── data/ # Data I/O nodes
|
527
|
+
│ ├── transform/ # Transformation nodes
|
528
|
+
│ ├── logic/ # Business logic nodes
|
529
|
+
│ └── ai/ # AI/ML nodes
|
530
|
+
├── workflow/ # Workflow management
|
531
|
+
│ ├── graph.py # DAG representation
|
532
|
+
│ └── visualization.py # Visualization tools
|
533
|
+
├── visualization/ # Performance visualization
|
534
|
+
│ └── performance.py # Performance metrics charts
|
535
|
+
├── runtime/ # Execution engines
|
536
|
+
│ ├── local.py # Local execution
|
537
|
+
│ └── docker.py # Docker execution (planned)
|
538
|
+
├── tracking/ # Monitoring and tracking
|
539
|
+
│ ├── manager.py # Task management
|
540
|
+
│ └── metrics_collector.py # Performance metrics
|
541
|
+
│ └── storage/ # Storage backends
|
542
|
+
├── cli/ # Command-line interface
|
543
|
+
└── utils/ # Utilities and helpers
|
544
|
+
```
|
545
|
+
|
546
|
+
## 🧪 Testing
|
547
|
+
|
548
|
+
The SDK is thoroughly tested with comprehensive test suites:
|
549
|
+
|
550
|
+
```bash
|
551
|
+
# Run all tests
|
552
|
+
uv run pytest
|
553
|
+
|
554
|
+
# Run with coverage
|
555
|
+
uv run pytest --cov=kailash --cov-report=html
|
556
|
+
|
557
|
+
# Run specific test categories
|
558
|
+
uv run pytest tests/unit/
|
559
|
+
uv run pytest tests/integration/
|
560
|
+
uv run pytest tests/e2e/
|
561
|
+
```
|
562
|
+
|
563
|
+
## 🤝 Contributing
|
564
|
+
|
565
|
+
We welcome contributions! Please see our [Contributing Guide](CONTRIBUTING.md) for details.
|
566
|
+
|
567
|
+
### Development Setup
|
568
|
+
|
569
|
+
```bash
|
570
|
+
# Clone the repository
|
571
|
+
git clone https://github.com/integrum/kailash-python-sdk.git
|
572
|
+
cd kailash-python-sdk
|
573
|
+
|
574
|
+
# Install uv if you haven't already
|
575
|
+
curl -LsSf https://astral.sh/uv/install.sh | sh
|
576
|
+
|
577
|
+
# Sync dependencies (creates venv automatically and installs everything)
|
578
|
+
uv sync
|
579
|
+
|
580
|
+
# Run commands using uv (no need to activate venv)
|
581
|
+
uv run pytest
|
582
|
+
uv run kailash --help
|
583
|
+
|
584
|
+
# Or activate the venv if you prefer
|
585
|
+
source .venv/bin/activate # On Windows: .venv\Scripts\activate
|
586
|
+
|
587
|
+
# Install development dependencies
|
588
|
+
uv add --dev pre-commit detect-secrets doc8
|
589
|
+
|
590
|
+
# Install Trivy (macOS with Homebrew)
|
591
|
+
brew install trivy
|
592
|
+
|
593
|
+
# Set up pre-commit hooks
|
594
|
+
pre-commit install
|
595
|
+
pre-commit install --hook-type pre-push
|
596
|
+
|
597
|
+
# Run initial setup (formats code and fixes issues)
|
598
|
+
pre-commit run --all-files
|
599
|
+
```
|
600
|
+
|
601
|
+
### Code Quality & Pre-commit Hooks
|
602
|
+
|
603
|
+
We use automated pre-commit hooks to ensure code quality:
|
604
|
+
|
605
|
+
**Hooks Include:**
|
606
|
+
- **Black**: Code formatting
|
607
|
+
- **isort**: Import sorting
|
608
|
+
- **Ruff**: Fast Python linting
|
609
|
+
- **pytest**: Unit tests
|
610
|
+
- **Trivy**: Security vulnerability scanning
|
611
|
+
- **detect-secrets**: Secret detection
|
612
|
+
- **doc8**: Documentation linting
|
613
|
+
- **mypy**: Type checking
|
614
|
+
|
615
|
+
**Manual Quality Checks:**
|
616
|
+
```bash
|
617
|
+
# Format code
|
618
|
+
black src/ tests/
|
619
|
+
isort src/ tests/
|
620
|
+
|
621
|
+
# Linting and fixes
|
622
|
+
ruff check src/ tests/ --fix
|
623
|
+
|
624
|
+
# Type checking
|
625
|
+
mypy src/
|
626
|
+
|
627
|
+
# Run all pre-commit hooks manually
|
628
|
+
pre-commit run --all-files
|
629
|
+
|
630
|
+
# Run specific hooks
|
631
|
+
pre-commit run black
|
632
|
+
pre-commit run pytest-check
|
633
|
+
```
|
634
|
+
|
635
|
+
## 📈 Project Status
|
636
|
+
|
637
|
+
<table>
|
638
|
+
<tr>
|
639
|
+
<td width="40%">
|
640
|
+
|
641
|
+
### ✅ Completed
|
642
|
+
- Core node system with 15+ node types
|
643
|
+
- Workflow builder with DAG validation
|
644
|
+
- Local & async execution engines
|
645
|
+
- Task tracking with metrics
|
646
|
+
- Multiple storage backends
|
647
|
+
- Export functionality (YAML/JSON)
|
648
|
+
- CLI interface
|
649
|
+
- Immutable state management
|
650
|
+
- API integration with rate limiting
|
651
|
+
- OAuth 2.0 authentication
|
652
|
+
- SharePoint Graph API integration
|
653
|
+
- **Real-time performance metrics collection**
|
654
|
+
- **Performance visualization dashboards**
|
655
|
+
- **Real-time monitoring dashboard with WebSocket streaming**
|
656
|
+
- **Comprehensive performance reports (HTML, Markdown, JSON)**
|
657
|
+
- **100% test coverage (544 tests)**
|
658
|
+
- **15 test categories all passing**
|
659
|
+
- 21+ working examples
|
660
|
+
|
661
|
+
</td>
|
662
|
+
<td width="30%">
|
663
|
+
|
664
|
+
### 🚧 In Progress
|
665
|
+
- Comprehensive API documentation
|
666
|
+
- Security audit & hardening
|
667
|
+
- Performance optimizations
|
668
|
+
- Docker runtime finalization
|
669
|
+
|
670
|
+
</td>
|
671
|
+
<td width="30%">
|
672
|
+
|
673
|
+
### 📋 Planned
|
674
|
+
- Cloud deployment templates
|
675
|
+
- Visual workflow editor
|
676
|
+
- Plugin system
|
677
|
+
- Additional integrations
|
678
|
+
|
679
|
+
</td>
|
680
|
+
</tr>
|
681
|
+
</table>
|
682
|
+
|
683
|
+
### 🎯 Test Suite Status
|
684
|
+
- **Total Tests**: 544 passing (100%)
|
685
|
+
- **Test Categories**: 15/15 at 100%
|
686
|
+
- **Integration Tests**: 65 passing
|
687
|
+
- **Examples**: 21/21 working
|
688
|
+
- **Code Coverage**: Comprehensive
|
689
|
+
|
690
|
+
## 📄 License
|
691
|
+
|
692
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
693
|
+
|
694
|
+
## 🙏 Acknowledgments
|
695
|
+
|
696
|
+
- The Integrum team for the Kailash architecture
|
697
|
+
- All contributors who have helped shape this SDK
|
698
|
+
- The Python community for excellent tools and libraries
|
699
|
+
|
700
|
+
## 📞 Support
|
701
|
+
|
702
|
+
- 📋 [GitHub Issues](https://github.com/integrum/kailash-python-sdk/issues)
|
703
|
+
- 📧 Email: support@integrum.com
|
704
|
+
- 💬 Slack: [Join our community](https://integrum.slack.com/kailash-sdk)
|
705
|
+
|
706
|
+
---
|
707
|
+
|
708
|
+
<p align="center">
|
709
|
+
Made with ❤️ by the Integrum Team
|
710
|
+
</p>
|