kailash 0.3.2__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +27 -3
- kailash/nodes/admin/__init__.py +42 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1523 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +248 -40
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +436 -5
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/ai/vision_utils.py +148 -0
- kailash/nodes/alerts/__init__.py +26 -0
- kailash/nodes/alerts/base.py +234 -0
- kailash/nodes/alerts/discord.py +499 -0
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +283 -10
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +103 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +133 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/security.py +1 -1
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/METADATA +256 -20
- kailash-0.4.1.dist-info/RECORD +227 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.2.dist-info/RECORD +0 -136
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/WHEEL +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,643 @@
|
|
1
|
+
"""
|
2
|
+
Dynamic Schema Generation for Kailash Middleware
|
3
|
+
|
4
|
+
Provides schema generation for nodes, workflows, and UI components to enable
|
5
|
+
dynamic frontend form generation and validation.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import inspect
|
9
|
+
import logging
|
10
|
+
from datetime import datetime, timezone
|
11
|
+
from enum import Enum
|
12
|
+
from typing import Any, Dict, List, Optional, Type, Union, get_type_hints
|
13
|
+
|
14
|
+
from ...nodes.base import Node, NodeParameter
|
15
|
+
from ...workflow import Workflow
|
16
|
+
|
17
|
+
logger = logging.getLogger(__name__)
|
18
|
+
|
19
|
+
|
20
|
+
class SchemaType(str, Enum):
|
21
|
+
"""Schema field types for frontend UI generation."""
|
22
|
+
|
23
|
+
STRING = "string"
|
24
|
+
INTEGER = "integer"
|
25
|
+
FLOAT = "float"
|
26
|
+
BOOLEAN = "boolean"
|
27
|
+
ARRAY = "array"
|
28
|
+
OBJECT = "object"
|
29
|
+
ENUM = "enum"
|
30
|
+
FILE = "file"
|
31
|
+
COLOR = "color"
|
32
|
+
DATE = "date"
|
33
|
+
DATETIME = "datetime"
|
34
|
+
EMAIL = "email"
|
35
|
+
URL = "url"
|
36
|
+
PASSWORD = "password"
|
37
|
+
TEXTAREA = "textarea"
|
38
|
+
SELECT = "select"
|
39
|
+
MULTISELECT = "multiselect"
|
40
|
+
SLIDER = "slider"
|
41
|
+
TOGGLE = "toggle"
|
42
|
+
|
43
|
+
|
44
|
+
class UIWidget(str, Enum):
|
45
|
+
"""UI widget types for enhanced form rendering."""
|
46
|
+
|
47
|
+
INPUT = "input"
|
48
|
+
TEXTAREA = "textarea"
|
49
|
+
SELECT = "select"
|
50
|
+
MULTISELECT = "multiselect"
|
51
|
+
CHECKBOX = "checkbox"
|
52
|
+
RADIO = "radio"
|
53
|
+
SLIDER = "slider"
|
54
|
+
TOGGLE = "toggle"
|
55
|
+
FILE_UPLOAD = "file_upload"
|
56
|
+
COLOR_PICKER = "color_picker"
|
57
|
+
DATE_PICKER = "date_picker"
|
58
|
+
DATETIME_PICKER = "datetime_picker"
|
59
|
+
JSON_EDITOR = "json_editor"
|
60
|
+
CODE_EDITOR = "code_editor"
|
61
|
+
|
62
|
+
|
63
|
+
class FieldSchema:
|
64
|
+
"""Schema for a single form field."""
|
65
|
+
|
66
|
+
def __init__(
|
67
|
+
self,
|
68
|
+
name: str,
|
69
|
+
type: SchemaType,
|
70
|
+
widget: UIWidget = None,
|
71
|
+
label: str = None,
|
72
|
+
description: str = None,
|
73
|
+
required: bool = False,
|
74
|
+
default: Any = None,
|
75
|
+
options: List[Dict[str, Any]] = None,
|
76
|
+
validation: Dict[str, Any] = None,
|
77
|
+
ui_hints: Dict[str, Any] = None,
|
78
|
+
):
|
79
|
+
self.name = name
|
80
|
+
self.type = type
|
81
|
+
self.widget = widget or self._default_widget_for_type(type)
|
82
|
+
self.label = label or name.replace("_", " ").title()
|
83
|
+
self.description = description
|
84
|
+
self.required = required
|
85
|
+
self.default = default
|
86
|
+
self.options = options or []
|
87
|
+
self.validation = validation or {}
|
88
|
+
self.ui_hints = ui_hints or {}
|
89
|
+
|
90
|
+
def _default_widget_for_type(self, schema_type: SchemaType) -> UIWidget:
|
91
|
+
"""Get default widget for schema type."""
|
92
|
+
widget_map = {
|
93
|
+
SchemaType.STRING: UIWidget.INPUT,
|
94
|
+
SchemaType.INTEGER: UIWidget.INPUT,
|
95
|
+
SchemaType.FLOAT: UIWidget.INPUT,
|
96
|
+
SchemaType.BOOLEAN: UIWidget.TOGGLE,
|
97
|
+
SchemaType.ARRAY: UIWidget.MULTISELECT,
|
98
|
+
SchemaType.OBJECT: UIWidget.JSON_EDITOR,
|
99
|
+
SchemaType.ENUM: UIWidget.SELECT,
|
100
|
+
SchemaType.FILE: UIWidget.FILE_UPLOAD,
|
101
|
+
SchemaType.COLOR: UIWidget.COLOR_PICKER,
|
102
|
+
SchemaType.DATE: UIWidget.DATE_PICKER,
|
103
|
+
SchemaType.DATETIME: UIWidget.DATETIME_PICKER,
|
104
|
+
SchemaType.EMAIL: UIWidget.INPUT,
|
105
|
+
SchemaType.URL: UIWidget.INPUT,
|
106
|
+
SchemaType.PASSWORD: UIWidget.INPUT,
|
107
|
+
SchemaType.TEXTAREA: UIWidget.TEXTAREA,
|
108
|
+
}
|
109
|
+
return widget_map.get(schema_type, UIWidget.INPUT)
|
110
|
+
|
111
|
+
def to_dict(self) -> Dict[str, Any]:
|
112
|
+
"""Convert field schema to dictionary."""
|
113
|
+
return {
|
114
|
+
"name": self.name,
|
115
|
+
"type": self.type.value,
|
116
|
+
"widget": self.widget.value,
|
117
|
+
"label": self.label,
|
118
|
+
"description": self.description,
|
119
|
+
"required": self.required,
|
120
|
+
"default": self.default,
|
121
|
+
"options": self.options,
|
122
|
+
"validation": self.validation,
|
123
|
+
"ui_hints": self.ui_hints,
|
124
|
+
}
|
125
|
+
|
126
|
+
|
127
|
+
class NodeSchemaGenerator:
|
128
|
+
"""Generates schemas for nodes to enable dynamic UI creation."""
|
129
|
+
|
130
|
+
def __init__(self):
|
131
|
+
self.type_mapping = {
|
132
|
+
str: SchemaType.STRING,
|
133
|
+
int: SchemaType.INTEGER,
|
134
|
+
float: SchemaType.FLOAT,
|
135
|
+
bool: SchemaType.BOOLEAN,
|
136
|
+
list: SchemaType.ARRAY,
|
137
|
+
dict: SchemaType.OBJECT,
|
138
|
+
}
|
139
|
+
|
140
|
+
def generate_node_schema(self, node_class: Type[Node]) -> Dict[str, Any]:
|
141
|
+
"""Generate complete schema for a node class."""
|
142
|
+
try:
|
143
|
+
# Get node metadata
|
144
|
+
node_metadata = self._extract_node_metadata(node_class)
|
145
|
+
|
146
|
+
# Generate parameter schemas
|
147
|
+
parameter_schemas = self._generate_parameter_schemas(node_class)
|
148
|
+
|
149
|
+
# Generate input/output schemas
|
150
|
+
input_schemas = self._generate_input_schemas(node_class)
|
151
|
+
output_schemas = self._generate_output_schemas(node_class)
|
152
|
+
|
153
|
+
# Combine into complete schema
|
154
|
+
schema = {
|
155
|
+
"node_type": node_class.__name__,
|
156
|
+
"category": getattr(node_class, "category", "general"),
|
157
|
+
"description": node_metadata["description"],
|
158
|
+
"version": node_metadata["version"],
|
159
|
+
"tags": node_metadata["tags"],
|
160
|
+
"parameters": parameter_schemas,
|
161
|
+
"inputs": input_schemas,
|
162
|
+
"outputs": output_schemas,
|
163
|
+
"ui_config": self._generate_ui_config(node_class),
|
164
|
+
"validation_rules": self._generate_validation_rules(node_class),
|
165
|
+
"examples": self._extract_examples(node_class),
|
166
|
+
}
|
167
|
+
|
168
|
+
return schema
|
169
|
+
|
170
|
+
except Exception as e:
|
171
|
+
logger.error(f"Error generating schema for {node_class.__name__}: {e}")
|
172
|
+
return self._fallback_schema(node_class)
|
173
|
+
|
174
|
+
def _extract_node_metadata(self, node_class: Type[Node]) -> Dict[str, Any]:
|
175
|
+
"""Extract metadata from node class."""
|
176
|
+
doc = inspect.getdoc(node_class) or ""
|
177
|
+
|
178
|
+
# Parse docstring for structured metadata
|
179
|
+
description = doc.split("\n")[0] if doc else node_class.__name__
|
180
|
+
|
181
|
+
return {
|
182
|
+
"description": description,
|
183
|
+
"version": getattr(node_class, "__version__", "1.0.0"),
|
184
|
+
"tags": getattr(node_class, "tags", []),
|
185
|
+
"author": getattr(node_class, "__author__", ""),
|
186
|
+
"documentation": doc,
|
187
|
+
}
|
188
|
+
|
189
|
+
def _generate_parameter_schemas(
|
190
|
+
self, node_class: Type[Node]
|
191
|
+
) -> List[Dict[str, Any]]:
|
192
|
+
"""Generate schemas for node parameters."""
|
193
|
+
schemas = []
|
194
|
+
|
195
|
+
try:
|
196
|
+
# Try to get parameters from node class
|
197
|
+
if hasattr(node_class, "get_parameters"):
|
198
|
+
# Create a temporary instance to get parameters
|
199
|
+
try:
|
200
|
+
temp_instance = node_class("temp")
|
201
|
+
parameters = temp_instance.get_parameters()
|
202
|
+
|
203
|
+
for param_name, param in parameters.items():
|
204
|
+
if isinstance(param, NodeParameter):
|
205
|
+
field_schema = self._convert_node_parameter_to_schema(
|
206
|
+
param_name, param
|
207
|
+
)
|
208
|
+
schemas.append(field_schema.to_dict())
|
209
|
+
except Exception as e:
|
210
|
+
logger.warning(
|
211
|
+
f"Could not instantiate {node_class.__name__} for parameter extraction: {e}"
|
212
|
+
)
|
213
|
+
|
214
|
+
# Fallback: extract from __init__ signature
|
215
|
+
if not schemas:
|
216
|
+
schemas = self._extract_from_init_signature(node_class)
|
217
|
+
|
218
|
+
except Exception as e:
|
219
|
+
logger.error(
|
220
|
+
f"Error generating parameter schemas for {node_class.__name__}: {e}"
|
221
|
+
)
|
222
|
+
|
223
|
+
return schemas
|
224
|
+
|
225
|
+
def _convert_node_parameter_to_schema(
|
226
|
+
self, name: str, param: NodeParameter
|
227
|
+
) -> FieldSchema:
|
228
|
+
"""Convert NodeParameter to FieldSchema."""
|
229
|
+
# Map NodeParameter type to SchemaType
|
230
|
+
schema_type = self._map_type_to_schema_type(param.type)
|
231
|
+
|
232
|
+
# Determine widget based on parameter properties
|
233
|
+
widget = None
|
234
|
+
if hasattr(param, "widget"):
|
235
|
+
widget = param.widget
|
236
|
+
elif param.type is bool:
|
237
|
+
widget = UIWidget.TOGGLE
|
238
|
+
elif hasattr(param, "choices") and param.choices:
|
239
|
+
widget = UIWidget.SELECT
|
240
|
+
|
241
|
+
# Extract validation rules
|
242
|
+
validation = {}
|
243
|
+
if hasattr(param, "min_value") and param.min_value is not None:
|
244
|
+
validation["min"] = param.min_value
|
245
|
+
if hasattr(param, "max_value") and param.max_value is not None:
|
246
|
+
validation["max"] = param.max_value
|
247
|
+
if hasattr(param, "pattern") and param.pattern:
|
248
|
+
validation["pattern"] = param.pattern
|
249
|
+
|
250
|
+
# Extract options for enums/choices
|
251
|
+
options = []
|
252
|
+
if hasattr(param, "choices") and param.choices:
|
253
|
+
options = [
|
254
|
+
{"value": choice, "label": str(choice)} for choice in param.choices
|
255
|
+
]
|
256
|
+
|
257
|
+
return FieldSchema(
|
258
|
+
name=name,
|
259
|
+
type=schema_type,
|
260
|
+
widget=widget,
|
261
|
+
label=getattr(param, "label", None),
|
262
|
+
description=getattr(param, "description", None),
|
263
|
+
required=param.required,
|
264
|
+
default=param.default,
|
265
|
+
options=options,
|
266
|
+
validation=validation,
|
267
|
+
)
|
268
|
+
|
269
|
+
def _map_type_to_schema_type(self, python_type: Type) -> SchemaType:
|
270
|
+
"""Map Python type to SchemaType."""
|
271
|
+
if python_type in self.type_mapping:
|
272
|
+
return self.type_mapping[python_type]
|
273
|
+
|
274
|
+
# Handle special types
|
275
|
+
if python_type is str:
|
276
|
+
return SchemaType.STRING
|
277
|
+
elif python_type in (int, float):
|
278
|
+
return SchemaType.INTEGER if python_type is int else SchemaType.FLOAT
|
279
|
+
elif python_type is bool:
|
280
|
+
return SchemaType.BOOLEAN
|
281
|
+
elif hasattr(python_type, "__origin__"):
|
282
|
+
# Handle generic types like List[str], Dict[str, Any]
|
283
|
+
origin = python_type.__origin__
|
284
|
+
if origin in (list, List):
|
285
|
+
return SchemaType.ARRAY
|
286
|
+
elif origin in (dict, Dict):
|
287
|
+
return SchemaType.OBJECT
|
288
|
+
|
289
|
+
# Default to string for unknown types
|
290
|
+
return SchemaType.STRING
|
291
|
+
|
292
|
+
def _extract_from_init_signature(
|
293
|
+
self, node_class: Type[Node]
|
294
|
+
) -> List[Dict[str, Any]]:
|
295
|
+
"""Extract parameter schemas from __init__ signature as fallback."""
|
296
|
+
schemas = []
|
297
|
+
|
298
|
+
try:
|
299
|
+
sig = inspect.signature(node_class.__init__)
|
300
|
+
type_hints = get_type_hints(node_class.__init__)
|
301
|
+
|
302
|
+
for param_name, param in sig.parameters.items():
|
303
|
+
if param_name in ("self", "name"): # Skip self and name parameters
|
304
|
+
continue
|
305
|
+
|
306
|
+
param_type = type_hints.get(param_name, str)
|
307
|
+
schema_type = self._map_type_to_schema_type(param_type)
|
308
|
+
|
309
|
+
field_schema = FieldSchema(
|
310
|
+
name=param_name,
|
311
|
+
type=schema_type,
|
312
|
+
required=param.default == param.empty,
|
313
|
+
default=param.default if param.default != param.empty else None,
|
314
|
+
)
|
315
|
+
|
316
|
+
schemas.append(field_schema.to_dict())
|
317
|
+
|
318
|
+
except Exception as e:
|
319
|
+
logger.error(f"Error extracting from init signature: {e}")
|
320
|
+
|
321
|
+
return schemas
|
322
|
+
|
323
|
+
def _generate_input_schemas(self, node_class: Type[Node]) -> List[Dict[str, Any]]:
|
324
|
+
"""Generate schemas for node inputs."""
|
325
|
+
# This would analyze the node's process method to determine inputs
|
326
|
+
# For now, return a generic input schema
|
327
|
+
return [
|
328
|
+
{
|
329
|
+
"name": "input",
|
330
|
+
"type": "object",
|
331
|
+
"description": "Input data for the node",
|
332
|
+
"required": True,
|
333
|
+
}
|
334
|
+
]
|
335
|
+
|
336
|
+
def _generate_output_schemas(self, node_class: Type[Node]) -> List[Dict[str, Any]]:
|
337
|
+
"""Generate schemas for node outputs."""
|
338
|
+
# This would analyze the node's process method to determine outputs
|
339
|
+
# For now, return a generic output schema
|
340
|
+
return [
|
341
|
+
{
|
342
|
+
"name": "output",
|
343
|
+
"type": "object",
|
344
|
+
"description": "Output data from the node",
|
345
|
+
}
|
346
|
+
]
|
347
|
+
|
348
|
+
def _generate_ui_config(self, node_class: Type[Node]) -> Dict[str, Any]:
|
349
|
+
"""Generate UI configuration for the node."""
|
350
|
+
return {
|
351
|
+
"icon": getattr(node_class, "icon", "🔧"),
|
352
|
+
"color": getattr(node_class, "color", "#3498db"),
|
353
|
+
"size": getattr(node_class, "size", {"width": 200, "height": 100}),
|
354
|
+
"ports": {
|
355
|
+
"input": {"position": "left", "color": "#2ecc71"},
|
356
|
+
"output": {"position": "right", "color": "#e74c3c"},
|
357
|
+
},
|
358
|
+
}
|
359
|
+
|
360
|
+
def _generate_validation_rules(self, node_class: Type[Node]) -> Dict[str, Any]:
|
361
|
+
"""Generate validation rules for the node."""
|
362
|
+
return {
|
363
|
+
"required_parameters": [],
|
364
|
+
"parameter_dependencies": {},
|
365
|
+
"custom_validation": None,
|
366
|
+
}
|
367
|
+
|
368
|
+
def _extract_examples(self, node_class: Type[Node]) -> List[Dict[str, Any]]:
|
369
|
+
"""Extract usage examples from node documentation."""
|
370
|
+
# This would parse docstrings or look for example attributes
|
371
|
+
return []
|
372
|
+
|
373
|
+
def _fallback_schema(self, node_class: Type[Node]) -> Dict[str, Any]:
|
374
|
+
"""Generate minimal fallback schema when normal generation fails."""
|
375
|
+
return {
|
376
|
+
"node_type": node_class.__name__,
|
377
|
+
"category": "unknown",
|
378
|
+
"description": f"Node: {node_class.__name__}",
|
379
|
+
"version": "1.0.0",
|
380
|
+
"tags": [],
|
381
|
+
"parameters": [],
|
382
|
+
"inputs": [{"name": "input", "type": "object"}],
|
383
|
+
"outputs": [{"name": "output", "type": "object"}],
|
384
|
+
"ui_config": {"icon": "❓", "color": "#95a5a6"},
|
385
|
+
"validation_rules": {},
|
386
|
+
"examples": [],
|
387
|
+
}
|
388
|
+
|
389
|
+
|
390
|
+
class WorkflowSchemaGenerator:
|
391
|
+
"""Generates schemas for workflows."""
|
392
|
+
|
393
|
+
def __init__(self, node_schema_generator: NodeSchemaGenerator = None):
|
394
|
+
self.node_generator = node_schema_generator or NodeSchemaGenerator()
|
395
|
+
|
396
|
+
def generate_workflow_schema(self, workflow: Workflow) -> Dict[str, Any]:
|
397
|
+
"""Generate schema for a workflow."""
|
398
|
+
try:
|
399
|
+
# Get workflow metadata
|
400
|
+
workflow_metadata = {
|
401
|
+
"workflow_id": workflow.workflow_id,
|
402
|
+
"name": workflow.name,
|
403
|
+
"description": workflow.description,
|
404
|
+
"version": workflow.version,
|
405
|
+
"created_at": datetime.now(timezone.utc).isoformat(),
|
406
|
+
"tags": getattr(workflow, "tags", []),
|
407
|
+
}
|
408
|
+
|
409
|
+
# Generate node schemas
|
410
|
+
node_schemas = {}
|
411
|
+
for node_id, node in workflow.nodes.items():
|
412
|
+
node_schemas[node_id] = self.node_generator.generate_node_schema(
|
413
|
+
type(node)
|
414
|
+
)
|
415
|
+
node_schemas[node_id]["instance_id"] = node_id
|
416
|
+
node_schemas[node_id]["instance_name"] = getattr(node, "name", node_id)
|
417
|
+
|
418
|
+
# Generate connection schemas
|
419
|
+
connection_schemas = []
|
420
|
+
for connection in workflow.connections:
|
421
|
+
connection_schemas.append(
|
422
|
+
{
|
423
|
+
"source_node": connection.source_node,
|
424
|
+
"source_output": connection.source_output,
|
425
|
+
"target_node": connection.target_node,
|
426
|
+
"target_input": connection.target_input,
|
427
|
+
"mapping": getattr(connection, "mapping", {}),
|
428
|
+
}
|
429
|
+
)
|
430
|
+
|
431
|
+
# Generate execution schema
|
432
|
+
execution_schema = {
|
433
|
+
"input_parameters": self._extract_workflow_inputs(workflow),
|
434
|
+
"output_parameters": self._extract_workflow_outputs(workflow),
|
435
|
+
"execution_order": self._determine_execution_order(workflow),
|
436
|
+
}
|
437
|
+
|
438
|
+
return {
|
439
|
+
"metadata": workflow_metadata,
|
440
|
+
"nodes": node_schemas,
|
441
|
+
"connections": connection_schemas,
|
442
|
+
"execution": execution_schema,
|
443
|
+
"ui_layout": self._generate_ui_layout(workflow),
|
444
|
+
}
|
445
|
+
|
446
|
+
except Exception as e:
|
447
|
+
logger.error(f"Error generating workflow schema: {e}")
|
448
|
+
return self._fallback_workflow_schema(workflow)
|
449
|
+
|
450
|
+
def _extract_workflow_inputs(self, workflow: Workflow) -> List[Dict[str, Any]]:
|
451
|
+
"""Extract input parameters for the workflow."""
|
452
|
+
# Find nodes with no incoming connections
|
453
|
+
input_nodes = []
|
454
|
+
for node_id, node in workflow.nodes.items():
|
455
|
+
has_incoming = any(
|
456
|
+
conn.target_node == node_id for conn in workflow.connections
|
457
|
+
)
|
458
|
+
if not has_incoming:
|
459
|
+
input_nodes.append(
|
460
|
+
{
|
461
|
+
"node_id": node_id,
|
462
|
+
"node_type": type(node).__name__,
|
463
|
+
"parameters": [], # Would extract from node schema
|
464
|
+
}
|
465
|
+
)
|
466
|
+
return input_nodes
|
467
|
+
|
468
|
+
def _extract_workflow_outputs(self, workflow: Workflow) -> List[Dict[str, Any]]:
|
469
|
+
"""Extract output parameters for the workflow."""
|
470
|
+
# Find nodes with no outgoing connections
|
471
|
+
output_nodes = []
|
472
|
+
for node_id, node in workflow.nodes.items():
|
473
|
+
has_outgoing = any(
|
474
|
+
conn.source_node == node_id for conn in workflow.connections
|
475
|
+
)
|
476
|
+
if not has_outgoing:
|
477
|
+
output_nodes.append(
|
478
|
+
{
|
479
|
+
"node_id": node_id,
|
480
|
+
"node_type": type(node).__name__,
|
481
|
+
"outputs": [], # Would extract from node schema
|
482
|
+
}
|
483
|
+
)
|
484
|
+
return output_nodes
|
485
|
+
|
486
|
+
def _determine_execution_order(self, workflow: Workflow) -> List[str]:
|
487
|
+
"""Determine execution order of nodes."""
|
488
|
+
# Simple topological sort (would be more sophisticated in practice)
|
489
|
+
order = []
|
490
|
+
remaining_nodes = set(workflow.nodes.keys())
|
491
|
+
|
492
|
+
while remaining_nodes:
|
493
|
+
# Find nodes with no pending dependencies
|
494
|
+
ready_nodes = []
|
495
|
+
for node_id in remaining_nodes:
|
496
|
+
dependencies = [
|
497
|
+
conn.source_node
|
498
|
+
for conn in workflow.connections
|
499
|
+
if conn.target_node == node_id
|
500
|
+
and conn.source_node in remaining_nodes
|
501
|
+
]
|
502
|
+
if not dependencies:
|
503
|
+
ready_nodes.append(node_id)
|
504
|
+
|
505
|
+
if not ready_nodes:
|
506
|
+
# Circular dependency - add remaining arbitrarily
|
507
|
+
ready_nodes = list(remaining_nodes)
|
508
|
+
|
509
|
+
for node_id in ready_nodes:
|
510
|
+
order.append(node_id)
|
511
|
+
remaining_nodes.remove(node_id)
|
512
|
+
|
513
|
+
return order
|
514
|
+
|
515
|
+
def _generate_ui_layout(self, workflow: Workflow) -> Dict[str, Any]:
|
516
|
+
"""Generate UI layout information for the workflow."""
|
517
|
+
return {
|
518
|
+
"type": "directed_graph",
|
519
|
+
"auto_layout": True,
|
520
|
+
"node_spacing": {"x": 250, "y": 150},
|
521
|
+
"grid": {"enabled": True, "size": 20},
|
522
|
+
"zoom": {"min": 0.1, "max": 3.0, "default": 1.0},
|
523
|
+
}
|
524
|
+
|
525
|
+
def _fallback_workflow_schema(self, workflow: Workflow) -> Dict[str, Any]:
|
526
|
+
"""Generate minimal fallback schema for workflow."""
|
527
|
+
return {
|
528
|
+
"metadata": {
|
529
|
+
"workflow_id": workflow.workflow_id,
|
530
|
+
"name": workflow.name,
|
531
|
+
"description": workflow.description
|
532
|
+
or "Workflow schema generation failed",
|
533
|
+
"version": "1.0.0",
|
534
|
+
},
|
535
|
+
"nodes": {},
|
536
|
+
"connections": [],
|
537
|
+
"execution": {
|
538
|
+
"input_parameters": [],
|
539
|
+
"output_parameters": [],
|
540
|
+
"execution_order": [],
|
541
|
+
},
|
542
|
+
"ui_layout": {"type": "directed_graph"},
|
543
|
+
}
|
544
|
+
|
545
|
+
|
546
|
+
class DynamicSchemaRegistry:
|
547
|
+
"""Registry for managing and caching generated schemas."""
|
548
|
+
|
549
|
+
def __init__(self):
|
550
|
+
self.node_generator = NodeSchemaGenerator()
|
551
|
+
self.workflow_generator = WorkflowSchemaGenerator(self.node_generator)
|
552
|
+
self.node_schemas_cache: Dict[str, Dict[str, Any]] = {}
|
553
|
+
self.workflow_schemas_cache: Dict[str, Dict[str, Any]] = {}
|
554
|
+
self.schema_metadata = {
|
555
|
+
"created_at": datetime.now(timezone.utc),
|
556
|
+
"schemas_generated": 0,
|
557
|
+
"cache_hits": 0,
|
558
|
+
"cache_misses": 0,
|
559
|
+
}
|
560
|
+
|
561
|
+
def get_node_schema(
|
562
|
+
self, node_class: Type[Node], use_cache: bool = True
|
563
|
+
) -> Dict[str, Any]:
|
564
|
+
"""Get schema for a node class with caching."""
|
565
|
+
class_name = node_class.__name__
|
566
|
+
|
567
|
+
if use_cache and class_name in self.node_schemas_cache:
|
568
|
+
self.schema_metadata["cache_hits"] += 1
|
569
|
+
return self.node_schemas_cache[class_name]
|
570
|
+
|
571
|
+
self.schema_metadata["cache_misses"] += 1
|
572
|
+
schema = self.node_generator.generate_node_schema(node_class)
|
573
|
+
|
574
|
+
if use_cache:
|
575
|
+
self.node_schemas_cache[class_name] = schema
|
576
|
+
|
577
|
+
self.schema_metadata["schemas_generated"] += 1
|
578
|
+
return schema
|
579
|
+
|
580
|
+
def get_workflow_schema(
|
581
|
+
self, workflow: Workflow, use_cache: bool = True
|
582
|
+
) -> Dict[str, Any]:
|
583
|
+
"""Get schema for a workflow with caching."""
|
584
|
+
workflow_id = workflow.workflow_id
|
585
|
+
|
586
|
+
if use_cache and workflow_id in self.workflow_schemas_cache:
|
587
|
+
self.schema_metadata["cache_hits"] += 1
|
588
|
+
return self.workflow_schemas_cache[workflow_id]
|
589
|
+
|
590
|
+
self.schema_metadata["cache_misses"] += 1
|
591
|
+
schema = self.workflow_generator.generate_workflow_schema(workflow)
|
592
|
+
|
593
|
+
if use_cache:
|
594
|
+
self.workflow_schemas_cache[workflow_id] = schema
|
595
|
+
|
596
|
+
self.schema_metadata["schemas_generated"] += 1
|
597
|
+
return schema
|
598
|
+
|
599
|
+
def get_all_node_schemas(
|
600
|
+
self, node_classes: List[Type[Node]]
|
601
|
+
) -> Dict[str, Dict[str, Any]]:
|
602
|
+
"""Get schemas for multiple node classes."""
|
603
|
+
schemas = {}
|
604
|
+
for node_class in node_classes:
|
605
|
+
schemas[node_class.__name__] = self.get_node_schema(node_class)
|
606
|
+
return schemas
|
607
|
+
|
608
|
+
def invalidate_cache(self, node_class: Type[Node] = None, workflow_id: str = None):
|
609
|
+
"""Invalidate cached schemas."""
|
610
|
+
if node_class:
|
611
|
+
class_name = node_class.__name__
|
612
|
+
if class_name in self.node_schemas_cache:
|
613
|
+
del self.node_schemas_cache[class_name]
|
614
|
+
|
615
|
+
if workflow_id:
|
616
|
+
if workflow_id in self.workflow_schemas_cache:
|
617
|
+
del self.workflow_schemas_cache[workflow_id]
|
618
|
+
|
619
|
+
if node_class is None and workflow_id is None:
|
620
|
+
# Clear all caches
|
621
|
+
self.node_schemas_cache.clear()
|
622
|
+
self.workflow_schemas_cache.clear()
|
623
|
+
|
624
|
+
def get_stats(self) -> Dict[str, Any]:
|
625
|
+
"""Get schema registry statistics."""
|
626
|
+
return {
|
627
|
+
**self.schema_metadata,
|
628
|
+
"cached_node_schemas": len(self.node_schemas_cache),
|
629
|
+
"cached_workflow_schemas": len(self.workflow_schemas_cache),
|
630
|
+
"cache_hit_rate": (
|
631
|
+
self.schema_metadata["cache_hits"]
|
632
|
+
/ (
|
633
|
+
self.schema_metadata["cache_hits"]
|
634
|
+
+ self.schema_metadata["cache_misses"]
|
635
|
+
)
|
636
|
+
if (
|
637
|
+
self.schema_metadata["cache_hits"]
|
638
|
+
+ self.schema_metadata["cache_misses"]
|
639
|
+
)
|
640
|
+
> 0
|
641
|
+
else 0
|
642
|
+
),
|
643
|
+
}
|