kailash 0.6.6__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +35 -5
- kailash/access_control.py +64 -46
- kailash/adapters/__init__.py +5 -0
- kailash/adapters/mcp_platform_adapter.py +273 -0
- kailash/api/workflow_api.py +34 -3
- kailash/channels/__init__.py +21 -0
- kailash/channels/api_channel.py +409 -0
- kailash/channels/base.py +271 -0
- kailash/channels/cli_channel.py +661 -0
- kailash/channels/event_router.py +496 -0
- kailash/channels/mcp_channel.py +648 -0
- kailash/channels/session.py +423 -0
- kailash/mcp_server/discovery.py +57 -18
- kailash/middleware/communication/api_gateway.py +23 -3
- kailash/middleware/communication/realtime.py +83 -0
- kailash/middleware/core/agent_ui.py +1 -1
- kailash/middleware/gateway/storage_backends.py +393 -0
- kailash/middleware/mcp/enhanced_server.py +22 -16
- kailash/nexus/__init__.py +21 -0
- kailash/nexus/cli/__init__.py +5 -0
- kailash/nexus/cli/__main__.py +6 -0
- kailash/nexus/cli/main.py +176 -0
- kailash/nexus/factory.py +413 -0
- kailash/nexus/gateway.py +545 -0
- kailash/nodes/__init__.py +8 -5
- kailash/nodes/ai/iterative_llm_agent.py +988 -17
- kailash/nodes/ai/llm_agent.py +29 -9
- kailash/nodes/api/__init__.py +2 -2
- kailash/nodes/api/monitoring.py +1 -1
- kailash/nodes/base.py +29 -5
- kailash/nodes/base_async.py +54 -14
- kailash/nodes/code/async_python.py +1 -1
- kailash/nodes/code/python.py +50 -6
- kailash/nodes/data/async_sql.py +90 -0
- kailash/nodes/data/bulk_operations.py +939 -0
- kailash/nodes/data/query_builder.py +373 -0
- kailash/nodes/data/query_cache.py +512 -0
- kailash/nodes/monitoring/__init__.py +10 -0
- kailash/nodes/monitoring/deadlock_detector.py +964 -0
- kailash/nodes/monitoring/performance_anomaly.py +1078 -0
- kailash/nodes/monitoring/race_condition_detector.py +1151 -0
- kailash/nodes/monitoring/transaction_metrics.py +790 -0
- kailash/nodes/monitoring/transaction_monitor.py +931 -0
- kailash/nodes/security/behavior_analysis.py +414 -0
- kailash/nodes/system/__init__.py +17 -0
- kailash/nodes/system/command_parser.py +820 -0
- kailash/nodes/transaction/__init__.py +48 -0
- kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
- kailash/nodes/transaction/saga_coordinator.py +652 -0
- kailash/nodes/transaction/saga_state_storage.py +411 -0
- kailash/nodes/transaction/saga_step.py +467 -0
- kailash/nodes/transaction/transaction_context.py +756 -0
- kailash/nodes/transaction/two_phase_commit.py +978 -0
- kailash/nodes/transform/processors.py +17 -1
- kailash/nodes/validation/__init__.py +21 -0
- kailash/nodes/validation/test_executor.py +532 -0
- kailash/nodes/validation/validation_nodes.py +447 -0
- kailash/resources/factory.py +1 -1
- kailash/runtime/access_controlled.py +9 -7
- kailash/runtime/async_local.py +84 -21
- kailash/runtime/local.py +21 -2
- kailash/runtime/parameter_injector.py +187 -31
- kailash/runtime/runner.py +6 -4
- kailash/runtime/testing.py +1 -1
- kailash/security.py +22 -3
- kailash/servers/__init__.py +32 -0
- kailash/servers/durable_workflow_server.py +430 -0
- kailash/servers/enterprise_workflow_server.py +522 -0
- kailash/servers/gateway.py +183 -0
- kailash/servers/workflow_server.py +293 -0
- kailash/utils/data_validation.py +192 -0
- kailash/workflow/builder.py +382 -15
- kailash/workflow/cyclic_runner.py +102 -10
- kailash/workflow/validation.py +144 -8
- kailash/workflow/visualization.py +99 -27
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/METADATA +3 -2
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/RECORD +81 -40
- kailash/workflow/builder_improvements.py +0 -207
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/WHEEL +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/top_level.txt +0 -0
kailash/workflow/validation.py
CHANGED
@@ -390,6 +390,49 @@ class CycleLinter:
|
|
390
390
|
)
|
391
391
|
)
|
392
392
|
|
393
|
+
# Check for potentially problematic mappings
|
394
|
+
if source_param in [
|
395
|
+
"result",
|
396
|
+
"output",
|
397
|
+
"data",
|
398
|
+
] and target_param in ["result", "output", "data"]:
|
399
|
+
if source_param != target_param:
|
400
|
+
self.issues.append(
|
401
|
+
ValidationIssue(
|
402
|
+
severity=IssueSeverity.INFO,
|
403
|
+
category="parameter_mapping",
|
404
|
+
code="CYC010A",
|
405
|
+
message=f"Generic parameter mapping '{source_param}' -> '{target_param}' in cycle {cycle_id}",
|
406
|
+
cycle_id=cycle_id,
|
407
|
+
suggestion="Consider using more specific parameter names for clarity",
|
408
|
+
documentation_link="guide/mistakes/063-cyclic-parameter-propagation-multi-fix.md",
|
409
|
+
)
|
410
|
+
)
|
411
|
+
|
412
|
+
# Check for dot notation in mappings
|
413
|
+
if (
|
414
|
+
"." in source_param
|
415
|
+
and target_param == source_param.split(".")[-1]
|
416
|
+
):
|
417
|
+
# This is actually a good pattern - dot notation to specific field
|
418
|
+
pass
|
419
|
+
elif "." not in source_param and "." not in target_param:
|
420
|
+
# Simple mapping - check if it makes sense
|
421
|
+
if source_param.startswith(
|
422
|
+
"temp_"
|
423
|
+
) or target_param.startswith("temp_"):
|
424
|
+
self.issues.append(
|
425
|
+
ValidationIssue(
|
426
|
+
severity=IssueSeverity.INFO,
|
427
|
+
category="parameter_mapping",
|
428
|
+
code="CYC010B",
|
429
|
+
message=f"Temporary parameter mapping '{source_param}' -> '{target_param}' in cycle {cycle_id}",
|
430
|
+
cycle_id=cycle_id,
|
431
|
+
suggestion="Consider using permanent parameter names for production workflows",
|
432
|
+
documentation_link="guide/mistakes/063-cyclic-parameter-propagation-multi-fix.md",
|
433
|
+
)
|
434
|
+
)
|
435
|
+
|
393
436
|
# Check for missing parameter propagation
|
394
437
|
if not mapping and len(cycle_nodes) > 1:
|
395
438
|
self.issues.append(
|
@@ -589,20 +632,53 @@ class CycleLinter:
|
|
589
632
|
|
590
633
|
def _has_unsafe_parameter_access(self, code: str) -> bool:
|
591
634
|
"""Check if PythonCodeNode has unsafe parameter access."""
|
592
|
-
|
635
|
+
import re
|
636
|
+
|
637
|
+
# Look for direct parameter access without try/except or safety checks
|
593
638
|
lines = code.split("\n")
|
594
639
|
|
640
|
+
# Common parameter names that might be unsafe
|
641
|
+
unsafe_patterns = [
|
642
|
+
r"\b(data|input|params|context|kwargs|args)\[", # Direct indexing
|
643
|
+
r"\b(data|input|params|context|kwargs|args)\.", # Direct attribute access
|
644
|
+
r"\b(data|input|params|context|kwargs|args)\.get\(", # .get() without default
|
645
|
+
]
|
646
|
+
|
647
|
+
# Safety patterns that indicate safe access
|
648
|
+
safety_patterns = [
|
649
|
+
r"try\s*:",
|
650
|
+
r"except\s*:",
|
651
|
+
r"if\s+.*\s+is\s+not\s+None\s*:",
|
652
|
+
r"if\s+.*\s+in\s+",
|
653
|
+
r"\.get\(.*,.*\)", # .get() with default value
|
654
|
+
r"isinstance\s*\(",
|
655
|
+
r"hasattr\s*\(",
|
656
|
+
]
|
657
|
+
|
658
|
+
has_unsafe_access = False
|
659
|
+
has_safety_checks = False
|
660
|
+
|
595
661
|
for line in lines:
|
596
662
|
line = line.strip()
|
597
663
|
if line and not line.startswith("#"):
|
598
|
-
# Check for
|
599
|
-
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
return True
|
664
|
+
# Check for unsafe patterns
|
665
|
+
for pattern in unsafe_patterns:
|
666
|
+
if re.search(pattern, line):
|
667
|
+
has_unsafe_access = True
|
668
|
+
break
|
604
669
|
|
605
|
-
|
670
|
+
# Check for safety patterns
|
671
|
+
for pattern in safety_patterns:
|
672
|
+
if re.search(pattern, line):
|
673
|
+
has_safety_checks = True
|
674
|
+
break
|
675
|
+
|
676
|
+
# Also check for undefined variables (potential parameters)
|
677
|
+
undefined_vars = self._find_undefined_variables(code)
|
678
|
+
if undefined_vars:
|
679
|
+
has_unsafe_access = True
|
680
|
+
|
681
|
+
return has_unsafe_access and not has_safety_checks
|
606
682
|
|
607
683
|
def _is_defined_before_use(self, var_name: str, code: str) -> bool:
|
608
684
|
"""Check if variable is defined before use in code."""
|
@@ -619,6 +695,66 @@ class CycleLinter:
|
|
619
695
|
|
620
696
|
return True
|
621
697
|
|
698
|
+
def _find_undefined_variables(self, code: str) -> list[str]:
|
699
|
+
"""Find variables that are used but not defined in the code."""
|
700
|
+
import re
|
701
|
+
|
702
|
+
lines = code.split("\n")
|
703
|
+
defined_vars = set()
|
704
|
+
used_vars = set()
|
705
|
+
|
706
|
+
# Built-in variables and functions that don't need definition
|
707
|
+
builtin_vars = {
|
708
|
+
"len",
|
709
|
+
"sum",
|
710
|
+
"min",
|
711
|
+
"max",
|
712
|
+
"dict",
|
713
|
+
"list",
|
714
|
+
"set",
|
715
|
+
"str",
|
716
|
+
"int",
|
717
|
+
"float",
|
718
|
+
"bool",
|
719
|
+
"sorted",
|
720
|
+
"print",
|
721
|
+
"isinstance",
|
722
|
+
"type",
|
723
|
+
"hasattr",
|
724
|
+
"getattr",
|
725
|
+
"True",
|
726
|
+
"False",
|
727
|
+
"None",
|
728
|
+
"range",
|
729
|
+
"enumerate",
|
730
|
+
"zip",
|
731
|
+
"any",
|
732
|
+
"all",
|
733
|
+
}
|
734
|
+
|
735
|
+
for line in lines:
|
736
|
+
line = line.strip()
|
737
|
+
if line and not line.startswith("#"):
|
738
|
+
# Find variable definitions
|
739
|
+
if (
|
740
|
+
"=" in line
|
741
|
+
and not line.startswith("if")
|
742
|
+
and not line.startswith("elif")
|
743
|
+
):
|
744
|
+
var_match = re.match(r"^([a-zA-Z_]\w*)\s*=", line)
|
745
|
+
if var_match:
|
746
|
+
defined_vars.add(var_match.group(1))
|
747
|
+
|
748
|
+
# Find variable uses
|
749
|
+
variables = re.findall(r"\b([a-zA-Z_]\w*)\b", line)
|
750
|
+
for var in variables:
|
751
|
+
if var not in builtin_vars and not var.startswith("_"):
|
752
|
+
used_vars.add(var)
|
753
|
+
|
754
|
+
# Return variables that are used but not defined
|
755
|
+
undefined = used_vars - defined_vars
|
756
|
+
return list(undefined)
|
757
|
+
|
622
758
|
def _is_valid_condition_syntax(self, condition: str) -> bool:
|
623
759
|
"""Check if convergence condition has valid Python syntax."""
|
624
760
|
try:
|
@@ -18,7 +18,7 @@ class WorkflowVisualizer:
|
|
18
18
|
|
19
19
|
def __init__(
|
20
20
|
self,
|
21
|
-
workflow: Workflow,
|
21
|
+
workflow: Workflow | None = None,
|
22
22
|
node_colors: dict[str, str] | None = None,
|
23
23
|
edge_colors: dict[str, str] | None = None,
|
24
24
|
layout: str = "hierarchical",
|
@@ -26,7 +26,7 @@ class WorkflowVisualizer:
|
|
26
26
|
"""Initialize visualizer.
|
27
27
|
|
28
28
|
Args:
|
29
|
-
workflow: Workflow to visualize
|
29
|
+
workflow: Workflow to visualize (can be set later)
|
30
30
|
node_colors: Custom node color map
|
31
31
|
edge_colors: Custom edge color map
|
32
32
|
layout: Layout algorithm to use
|
@@ -70,9 +70,12 @@ class WorkflowVisualizer:
|
|
70
70
|
"""Get colors for all nodes in workflow."""
|
71
71
|
colors = []
|
72
72
|
for node_id in self.workflow.graph.nodes():
|
73
|
-
node_instance =
|
74
|
-
|
75
|
-
|
73
|
+
node_instance = target_workflow.nodes.get(node_id)
|
74
|
+
if node_instance:
|
75
|
+
node_type = node_instance.node_type
|
76
|
+
colors.append(self._get_node_color(node_type))
|
77
|
+
else:
|
78
|
+
colors.append(self.node_colors["default"])
|
76
79
|
return colors
|
77
80
|
|
78
81
|
def _get_node_labels(self) -> dict[str, str]:
|
@@ -119,11 +122,15 @@ class WorkflowVisualizer:
|
|
119
122
|
|
120
123
|
return edge_labels
|
121
124
|
|
122
|
-
def _calculate_layout(self) -> dict[str, tuple[float, float]]:
|
125
|
+
def _calculate_layout(self, workflow: 'Workflow' = None) -> dict[str, tuple[float, float]]:
|
123
126
|
"""Calculate node positions for visualization."""
|
127
|
+
target_workflow = workflow or self.workflow
|
128
|
+
if not target_workflow:
|
129
|
+
return {}
|
130
|
+
|
124
131
|
# Try to use stored positions first
|
125
132
|
pos = {}
|
126
|
-
for node_id, node_instance in
|
133
|
+
for node_id, node_instance in target_workflow.nodes.items():
|
127
134
|
if node_instance.position != (0, 0):
|
128
135
|
pos[node_id] = node_instance.position
|
129
136
|
|
@@ -133,32 +140,69 @@ class WorkflowVisualizer:
|
|
133
140
|
# Use hierarchical layout for DAGs
|
134
141
|
try:
|
135
142
|
# Create layers based on topological order
|
136
|
-
layers = self._create_layers()
|
143
|
+
layers = self._create_layers(target_workflow)
|
137
144
|
pos = self._hierarchical_layout(layers)
|
138
145
|
except Exception:
|
139
146
|
# Fallback to spring layout
|
140
|
-
pos = nx.spring_layout(
|
147
|
+
pos = nx.spring_layout(target_workflow.graph, k=3, iterations=50)
|
141
148
|
elif self.layout == "circular":
|
142
|
-
pos = nx.circular_layout(
|
149
|
+
pos = nx.circular_layout(target_workflow.graph)
|
143
150
|
elif self.layout == "spring":
|
144
|
-
pos = nx.spring_layout(
|
151
|
+
pos = nx.spring_layout(target_workflow.graph, k=2, iterations=100)
|
145
152
|
else:
|
146
153
|
# Default to spring layout
|
147
|
-
pos = nx.spring_layout(
|
154
|
+
pos = nx.spring_layout(target_workflow.graph)
|
148
155
|
|
149
156
|
return pos
|
157
|
+
|
158
|
+
def _get_layout_positions(self, workflow: Workflow) -> dict[str, tuple[float, float]]:
|
159
|
+
"""Get layout positions for workflow nodes."""
|
160
|
+
# Temporarily store workflow and calculate layout
|
161
|
+
original_workflow = self.workflow
|
162
|
+
self.workflow = workflow
|
163
|
+
try:
|
164
|
+
return self._calculate_layout()
|
165
|
+
finally:
|
166
|
+
self.workflow = original_workflow
|
167
|
+
|
168
|
+
def _get_node_colors(self, workflow: Workflow) -> list[str]:
|
169
|
+
"""Get node colors for workflow."""
|
170
|
+
colors = []
|
171
|
+
for node_id in workflow.graph.nodes():
|
172
|
+
node_instance = workflow.get_node(node_id)
|
173
|
+
if node_instance:
|
174
|
+
node_type = node_instance.__class__.__name__.lower()
|
175
|
+
# Map node types to color categories
|
176
|
+
if "data" in node_type or "csv" in node_type or "json" in node_type:
|
177
|
+
color_key = "data"
|
178
|
+
elif "transform" in node_type or "python" in node_type:
|
179
|
+
color_key = "transform"
|
180
|
+
elif "switch" in node_type or "merge" in node_type:
|
181
|
+
color_key = "logic"
|
182
|
+
elif "llm" in node_type or "ai" in node_type:
|
183
|
+
color_key = "ai"
|
184
|
+
else:
|
185
|
+
color_key = "default"
|
186
|
+
colors.append(self.node_colors.get(color_key, self.node_colors["default"]))
|
187
|
+
else:
|
188
|
+
colors.append(self.node_colors["default"])
|
189
|
+
return colors
|
150
190
|
|
151
|
-
def _create_layers(self) -> dict[int, list]:
|
191
|
+
def _create_layers(self, workflow: 'Workflow' = None) -> dict[int, list]:
|
152
192
|
"""Create layers of nodes for hierarchical layout."""
|
193
|
+
target_workflow = workflow or self.workflow
|
194
|
+
if not target_workflow:
|
195
|
+
return {}
|
196
|
+
|
153
197
|
layers = {}
|
154
|
-
remaining = set(
|
198
|
+
remaining = set(target_workflow.graph.nodes())
|
155
199
|
layer = 0
|
156
200
|
|
157
201
|
while remaining:
|
158
202
|
# Find nodes with no dependencies in remaining set
|
159
203
|
current_layer = []
|
160
204
|
for node in remaining:
|
161
|
-
predecessors = set(
|
205
|
+
predecessors = set(target_workflow.graph.predecessors(node))
|
162
206
|
if not predecessors.intersection(remaining):
|
163
207
|
current_layer.append(node)
|
164
208
|
|
@@ -196,20 +240,34 @@ class WorkflowVisualizer:
|
|
196
240
|
|
197
241
|
def _draw_graph(
|
198
242
|
self,
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
243
|
+
workflow: Workflow | None = None,
|
244
|
+
pos: dict[str, tuple[float, float]] | None = None,
|
245
|
+
node_colors: list[str] | None = None,
|
246
|
+
show_labels: bool = True,
|
247
|
+
show_connections: bool = True,
|
203
248
|
) -> None:
|
204
249
|
"""Draw the graph with given positions and options."""
|
250
|
+
# Use provided workflow or fall back to instance workflow
|
251
|
+
target_workflow = workflow or self.workflow
|
252
|
+
if not target_workflow:
|
253
|
+
raise ValueError("No workflow provided to draw")
|
254
|
+
|
255
|
+
# Use default position if not provided
|
256
|
+
if pos is None:
|
257
|
+
pos = self._get_layout_positions(target_workflow)
|
258
|
+
|
259
|
+
# Use default colors if not provided
|
260
|
+
if node_colors is None:
|
261
|
+
node_colors = self._get_node_colors(target_workflow)
|
262
|
+
|
205
263
|
# Draw nodes
|
206
264
|
nx.draw_networkx_nodes(
|
207
|
-
|
265
|
+
target_workflow.graph, pos, node_color=node_colors, node_size=3000, alpha=0.9
|
208
266
|
)
|
209
267
|
|
210
268
|
# Draw edges
|
211
269
|
nx.draw_networkx_edges(
|
212
|
-
|
270
|
+
target_workflow.graph,
|
213
271
|
pos,
|
214
272
|
edge_color=self.edge_colors["default"],
|
215
273
|
width=2,
|
@@ -221,16 +279,26 @@ class WorkflowVisualizer:
|
|
221
279
|
|
222
280
|
# Draw labels
|
223
281
|
if show_labels:
|
282
|
+
# Temporarily set workflow for label generation
|
283
|
+
old_workflow = self.workflow
|
284
|
+
self.workflow = target_workflow
|
224
285
|
labels = self._get_node_labels()
|
286
|
+
self.workflow = old_workflow
|
287
|
+
|
225
288
|
nx.draw_networkx_labels(
|
226
|
-
|
289
|
+
target_workflow.graph, pos, labels, font_size=10, font_weight="bold"
|
227
290
|
)
|
228
291
|
|
229
292
|
# Draw connection labels
|
230
293
|
if show_connections:
|
294
|
+
# Temporarily set workflow for edge label generation
|
295
|
+
old_workflow = self.workflow
|
296
|
+
self.workflow = target_workflow
|
231
297
|
edge_labels = self._get_edge_labels()
|
298
|
+
self.workflow = old_workflow
|
299
|
+
|
232
300
|
nx.draw_networkx_edge_labels(
|
233
|
-
|
301
|
+
target_workflow.graph, pos, edge_labels, font_size=8
|
234
302
|
)
|
235
303
|
|
236
304
|
def visualize(
|
@@ -255,10 +323,14 @@ class WorkflowVisualizer:
|
|
255
323
|
**kwargs: Additional options passed to plt.savefig
|
256
324
|
"""
|
257
325
|
try:
|
326
|
+
# Check if workflow is available
|
327
|
+
if not self.workflow:
|
328
|
+
raise ValueError("No workflow to visualize. Set workflow property or create visualizer with workflow.")
|
329
|
+
|
258
330
|
plt.figure(figsize=figsize)
|
259
331
|
|
260
332
|
# Calculate node positions
|
261
|
-
pos = self._calculate_layout()
|
333
|
+
pos = self._calculate_layout(self.workflow)
|
262
334
|
|
263
335
|
# Handle empty workflow case
|
264
336
|
if not self.workflow.graph.nodes():
|
@@ -266,11 +338,11 @@ class WorkflowVisualizer:
|
|
266
338
|
node_colors = []
|
267
339
|
else:
|
268
340
|
# Draw the graph with colors
|
269
|
-
node_colors = self._get_node_colors()
|
341
|
+
node_colors = self._get_node_colors(self.workflow)
|
270
342
|
|
271
343
|
# Draw the graph components
|
272
|
-
if pos
|
273
|
-
self._draw_graph(pos, node_colors, show_labels, show_connections)
|
344
|
+
if pos:
|
345
|
+
self._draw_graph(workflow=self.workflow, pos=pos, node_colors=node_colors, show_labels=show_labels, show_connections=show_connections)
|
274
346
|
|
275
347
|
# Set title
|
276
348
|
if title is None:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: kailash
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.8.0
|
4
4
|
Summary: Python SDK for the Kailash container-node architecture
|
5
5
|
Home-page: https://github.com/integrum/kailash-python-sdk
|
6
6
|
Author: Integrum
|
@@ -21,7 +21,7 @@ Requires-Dist: matplotlib>=3.5
|
|
21
21
|
Requires-Dist: pyyaml>=6.0
|
22
22
|
Requires-Dist: click>=8.0
|
23
23
|
Requires-Dist: pytest>=8.3.5
|
24
|
-
Requires-Dist: mcp[cli]
|
24
|
+
Requires-Dist: mcp[cli]==1.11.0
|
25
25
|
Requires-Dist: pandas>=2.2.3
|
26
26
|
Requires-Dist: numpy>=2.2.5
|
27
27
|
Requires-Dist: scipy>=1.15.3
|
@@ -78,6 +78,7 @@ Requires-Dist: passlib>=1.7.4
|
|
78
78
|
Requires-Dist: pyotp>=2.9.0
|
79
79
|
Requires-Dist: opentelemetry-instrumentation-fastapi>=0.55b1
|
80
80
|
Requires-Dist: seaborn>=0.13.2
|
81
|
+
Requires-Dist: sqlparse>=0.5.3
|
81
82
|
Provides-Extra: dev
|
82
83
|
Requires-Dist: pytest>=7.0; extra == "dev"
|
83
84
|
Requires-Dist: pytest-cov>=3.0; extra == "dev"
|