kailash 0.9.18__py3-none-any.whl → 0.9.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/mcp_server/subscriptions.py +23 -23
- kailash/nodes/__init__.py +1 -0
- kailash/nodes/ai/ai_providers.py +237 -25
- kailash/nodes/ai/llm_agent.py +73 -15
- kailash/nodes/code/python.py +178 -15
- kailash/nodes/data/async_sql.py +93 -26
- kailash/runtime/local.py +4 -1
- kailash/workflow/builder.py +1 -1
- kailash/workflow/templates.py +2 -1
- {kailash-0.9.18.dist-info → kailash-0.9.20.dist-info}/METADATA +20 -19
- {kailash-0.9.18.dist-info → kailash-0.9.20.dist-info}/RECORD +17 -18
- kailash/nodes/ai/a2a_backup.py +0 -1807
- {kailash-0.9.18.dist-info → kailash-0.9.20.dist-info}/WHEEL +0 -0
- {kailash-0.9.18.dist-info → kailash-0.9.20.dist-info}/entry_points.txt +0 -0
- {kailash-0.9.18.dist-info → kailash-0.9.20.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.9.18.dist-info → kailash-0.9.20.dist-info}/licenses/NOTICE +0 -0
- {kailash-0.9.18.dist-info → kailash-0.9.20.dist-info}/top_level.txt +0 -0
kailash/nodes/code/python.py
CHANGED
@@ -49,11 +49,14 @@ Examples:
|
|
49
49
|
import ast
|
50
50
|
import importlib.util
|
51
51
|
import inspect
|
52
|
+
import json
|
52
53
|
import logging
|
53
54
|
import os
|
54
55
|
import resource
|
55
56
|
import traceback
|
56
57
|
from collections.abc import Callable
|
58
|
+
from datetime import date, datetime
|
59
|
+
from decimal import Decimal
|
57
60
|
from pathlib import Path
|
58
61
|
from typing import Any, get_type_hints
|
59
62
|
|
@@ -617,6 +620,37 @@ class CodeExecutor:
|
|
617
620
|
logger.error(error_msg)
|
618
621
|
raise NodeExecutionError(error_msg)
|
619
622
|
|
623
|
+
def _ensure_serializable(self, data: Any) -> Any:
|
624
|
+
"""Ensure data is JSON-serializable following AsyncSQL pattern."""
|
625
|
+
if data is None:
|
626
|
+
return None
|
627
|
+
elif isinstance(data, (str, int, float, bool)):
|
628
|
+
return data
|
629
|
+
elif isinstance(data, (datetime, date)):
|
630
|
+
return data.isoformat()
|
631
|
+
elif isinstance(data, Decimal):
|
632
|
+
return float(data)
|
633
|
+
elif isinstance(data, dict):
|
634
|
+
return {k: self._ensure_serializable(v) for k, v in data.items()}
|
635
|
+
elif isinstance(data, (list, tuple)):
|
636
|
+
return [self._ensure_serializable(item) for item in data]
|
637
|
+
else:
|
638
|
+
try:
|
639
|
+
json.dumps(data)
|
640
|
+
return data
|
641
|
+
except (TypeError, ValueError):
|
642
|
+
# Check if object has .to_dict() method for enhanced validation
|
643
|
+
if hasattr(data, "to_dict") and callable(getattr(data, "to_dict")):
|
644
|
+
try:
|
645
|
+
# Convert object to dict using its to_dict() method
|
646
|
+
dict_result = data.to_dict()
|
647
|
+
# Recursively ensure the dict result is also serializable
|
648
|
+
return self._ensure_json_serializable(dict_result)
|
649
|
+
except (TypeError, ValueError, AttributeError):
|
650
|
+
# If .to_dict() exists but fails, fall back to string
|
651
|
+
return str(data)
|
652
|
+
return str(data)
|
653
|
+
|
620
654
|
|
621
655
|
class FunctionWrapper:
|
622
656
|
"""Wrapper for converting Python functions to nodes.
|
@@ -651,6 +685,37 @@ class FunctionWrapper:
|
|
651
685
|
# Handle cases where type hints can't be resolved
|
652
686
|
self.type_hints = {}
|
653
687
|
|
688
|
+
def _ensure_serializable(self, data: Any) -> Any:
|
689
|
+
"""Ensure data is JSON-serializable."""
|
690
|
+
if data is None:
|
691
|
+
return None
|
692
|
+
elif isinstance(data, (str, int, float, bool)):
|
693
|
+
return data
|
694
|
+
elif isinstance(data, (datetime, date)):
|
695
|
+
return data.isoformat()
|
696
|
+
elif isinstance(data, Decimal):
|
697
|
+
return float(data)
|
698
|
+
elif isinstance(data, dict):
|
699
|
+
return {k: self._ensure_serializable(v) for k, v in data.items()}
|
700
|
+
elif isinstance(data, (list, tuple)):
|
701
|
+
return [self._ensure_serializable(item) for item in data]
|
702
|
+
else:
|
703
|
+
try:
|
704
|
+
json.dumps(data)
|
705
|
+
return data
|
706
|
+
except (TypeError, ValueError):
|
707
|
+
# Check if object has .to_dict() method for enhanced validation
|
708
|
+
if hasattr(data, "to_dict") and callable(getattr(data, "to_dict")):
|
709
|
+
try:
|
710
|
+
# Convert object to dict using its to_dict() method
|
711
|
+
dict_result = data.to_dict()
|
712
|
+
# Recursively ensure the dict result is also serializable
|
713
|
+
return self._ensure_json_serializable(dict_result)
|
714
|
+
except (TypeError, ValueError, AttributeError):
|
715
|
+
# If .to_dict() exists but fails, fall back to string
|
716
|
+
return str(data)
|
717
|
+
return str(data)
|
718
|
+
|
654
719
|
def get_input_types(self) -> dict[str, type]:
|
655
720
|
"""Extract input types from function signature.
|
656
721
|
|
@@ -710,18 +775,51 @@ class FunctionWrapper:
|
|
710
775
|
return self.type_hints.get("return", Any)
|
711
776
|
|
712
777
|
def execute(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
713
|
-
"""Execute the wrapped function."""
|
778
|
+
"""Execute the wrapped function with proper serialization."""
|
714
779
|
result = self.executor.execute_function(self.func, inputs)
|
715
780
|
|
716
|
-
#
|
717
|
-
|
718
|
-
if not isinstance(result, dict):
|
719
|
-
result = {"result": result}
|
720
|
-
else:
|
721
|
-
# For dict results, wrap the entire dict in "result" key
|
722
|
-
result = {"result": result}
|
781
|
+
# Ensure JSON serializability inline
|
782
|
+
result = self._ensure_json_serializable(result)
|
723
783
|
|
724
|
-
|
784
|
+
# Smart wrapping: only wrap if result doesn't already have expected structure
|
785
|
+
# If function already returns {"result": value}, don't double-wrap
|
786
|
+
if isinstance(result, dict) and len(result) == 1 and "result" in result:
|
787
|
+
# Function already returned properly formatted result
|
788
|
+
return result
|
789
|
+
else:
|
790
|
+
# Wrap result for consistent schema validation
|
791
|
+
return {"result": result}
|
792
|
+
|
793
|
+
def _ensure_json_serializable(self, data: Any) -> Any:
|
794
|
+
"""Convert data to JSON-serializable format."""
|
795
|
+
if data is None:
|
796
|
+
return None
|
797
|
+
elif isinstance(data, (str, int, float, bool)):
|
798
|
+
return data
|
799
|
+
elif isinstance(data, (datetime, date)):
|
800
|
+
return data.isoformat()
|
801
|
+
elif isinstance(data, Decimal):
|
802
|
+
return float(data)
|
803
|
+
elif isinstance(data, dict):
|
804
|
+
return {k: self._ensure_json_serializable(v) for k, v in data.items()}
|
805
|
+
elif isinstance(data, (list, tuple)):
|
806
|
+
return [self._ensure_json_serializable(item) for item in data]
|
807
|
+
else:
|
808
|
+
try:
|
809
|
+
json.dumps(data)
|
810
|
+
return data
|
811
|
+
except (TypeError, ValueError):
|
812
|
+
# Check if object has .to_dict() method for enhanced validation
|
813
|
+
if hasattr(data, "to_dict") and callable(getattr(data, "to_dict")):
|
814
|
+
try:
|
815
|
+
# Convert object to dict using its to_dict() method
|
816
|
+
dict_result = data.to_dict()
|
817
|
+
# Recursively ensure the dict result is also serializable
|
818
|
+
return self._ensure_json_serializable(dict_result)
|
819
|
+
except (TypeError, ValueError, AttributeError):
|
820
|
+
# If .to_dict() exists but fails, fall back to string
|
821
|
+
return str(data)
|
822
|
+
return str(data)
|
725
823
|
|
726
824
|
def to_node(
|
727
825
|
self,
|
@@ -837,6 +935,37 @@ class ClassWrapper:
|
|
837
935
|
# Handle descriptor objects like properties
|
838
936
|
self.type_hints = {}
|
839
937
|
|
938
|
+
def _ensure_serializable(self, data: Any) -> Any:
|
939
|
+
"""Ensure data is JSON-serializable."""
|
940
|
+
if data is None:
|
941
|
+
return None
|
942
|
+
elif isinstance(data, (str, int, float, bool)):
|
943
|
+
return data
|
944
|
+
elif isinstance(data, (datetime, date)):
|
945
|
+
return data.isoformat()
|
946
|
+
elif isinstance(data, Decimal):
|
947
|
+
return float(data)
|
948
|
+
elif isinstance(data, dict):
|
949
|
+
return {k: self._ensure_serializable(v) for k, v in data.items()}
|
950
|
+
elif isinstance(data, (list, tuple)):
|
951
|
+
return [self._ensure_serializable(item) for item in data]
|
952
|
+
else:
|
953
|
+
try:
|
954
|
+
json.dumps(data)
|
955
|
+
return data
|
956
|
+
except (TypeError, ValueError):
|
957
|
+
# Check if object has .to_dict() method for enhanced validation
|
958
|
+
if hasattr(data, "to_dict") and callable(getattr(data, "to_dict")):
|
959
|
+
try:
|
960
|
+
# Convert object to dict using its to_dict() method
|
961
|
+
dict_result = data.to_dict()
|
962
|
+
# Recursively ensure the dict result is also serializable
|
963
|
+
return self._ensure_json_serializable(dict_result)
|
964
|
+
except (TypeError, ValueError, AttributeError):
|
965
|
+
# If .to_dict() exists but fails, fall back to string
|
966
|
+
return str(data)
|
967
|
+
return str(data)
|
968
|
+
|
840
969
|
def get_input_types(self) -> dict[str, type]:
|
841
970
|
"""Extract input types from method signature."""
|
842
971
|
input_types = {}
|
@@ -900,16 +1029,48 @@ class ClassWrapper:
|
|
900
1029
|
# Execute the method
|
901
1030
|
result = self.executor.execute_function(method, inputs)
|
902
1031
|
|
903
|
-
#
|
904
|
-
|
1032
|
+
# Ensure JSON serializability inline
|
1033
|
+
result = self._ensure_json_serializable(result)
|
1034
|
+
|
1035
|
+
# Smart wrapping: only wrap non-dict results in "result" key
|
1036
|
+
# Dict results are returned as-is to avoid double wrapping
|
905
1037
|
if not isinstance(result, dict):
|
906
1038
|
result = {"result": result}
|
907
|
-
|
908
|
-
# For dict results, wrap the entire dict in "result" key
|
909
|
-
result = {"result": result}
|
1039
|
+
# Dict results are already properly structured, no wrapping needed
|
910
1040
|
|
911
1041
|
return result
|
912
1042
|
|
1043
|
+
def _ensure_json_serializable(self, data: Any) -> Any:
|
1044
|
+
"""Convert data to JSON-serializable format."""
|
1045
|
+
if data is None:
|
1046
|
+
return None
|
1047
|
+
elif isinstance(data, (str, int, float, bool)):
|
1048
|
+
return data
|
1049
|
+
elif isinstance(data, (datetime, date)):
|
1050
|
+
return data.isoformat()
|
1051
|
+
elif isinstance(data, Decimal):
|
1052
|
+
return float(data)
|
1053
|
+
elif isinstance(data, dict):
|
1054
|
+
return {k: self._ensure_json_serializable(v) for k, v in data.items()}
|
1055
|
+
elif isinstance(data, (list, tuple)):
|
1056
|
+
return [self._ensure_json_serializable(item) for item in data]
|
1057
|
+
else:
|
1058
|
+
try:
|
1059
|
+
json.dumps(data)
|
1060
|
+
return data
|
1061
|
+
except (TypeError, ValueError):
|
1062
|
+
# Check if object has .to_dict() method for enhanced validation
|
1063
|
+
if hasattr(data, "to_dict") and callable(getattr(data, "to_dict")):
|
1064
|
+
try:
|
1065
|
+
# Convert object to dict using its to_dict() method
|
1066
|
+
dict_result = data.to_dict()
|
1067
|
+
# Recursively ensure the dict result is also serializable
|
1068
|
+
return self._ensure_json_serializable(dict_result)
|
1069
|
+
except (TypeError, ValueError, AttributeError):
|
1070
|
+
# If .to_dict() exists but fails, fall back to string
|
1071
|
+
return str(data)
|
1072
|
+
return str(data)
|
1073
|
+
|
913
1074
|
def to_node(
|
914
1075
|
self,
|
915
1076
|
name: str | None = None,
|
@@ -1272,7 +1433,9 @@ class PythonCodeNode(Node):
|
|
1272
1433
|
elif self.function:
|
1273
1434
|
# Execute function
|
1274
1435
|
wrapper = FunctionWrapper(self.function, self.executor)
|
1275
|
-
|
1436
|
+
result = wrapper.execute(kwargs)
|
1437
|
+
# FunctionWrapper.execute() already handles result wrapping
|
1438
|
+
return result
|
1276
1439
|
|
1277
1440
|
elif self.class_type:
|
1278
1441
|
# Execute class method
|
kailash/nodes/data/async_sql.py
CHANGED
@@ -1530,15 +1530,28 @@ class SQLiteAdapter(DatabaseAdapter):
|
|
1530
1530
|
|
1531
1531
|
if fetch_mode == FetchMode.ONE:
|
1532
1532
|
row = await cursor.fetchone()
|
1533
|
-
|
1533
|
+
result = self._convert_row(dict(row)) if row else None
|
1534
1534
|
elif fetch_mode == FetchMode.ALL:
|
1535
1535
|
rows = await cursor.fetchall()
|
1536
|
-
|
1536
|
+
result = [self._convert_row(dict(row)) for row in rows]
|
1537
1537
|
elif fetch_mode == FetchMode.MANY:
|
1538
1538
|
if not fetch_size:
|
1539
1539
|
raise ValueError("fetch_size required for MANY mode")
|
1540
1540
|
rows = await cursor.fetchmany(fetch_size)
|
1541
|
-
|
1541
|
+
result = [self._convert_row(dict(row)) for row in rows]
|
1542
|
+
else:
|
1543
|
+
result = []
|
1544
|
+
|
1545
|
+
# Check if this was an INSERT and capture lastrowid for SQLite
|
1546
|
+
if query.strip().upper().startswith("INSERT") and (
|
1547
|
+
not result or result == [] or result is None
|
1548
|
+
):
|
1549
|
+
# For INSERT without RETURNING, capture lastrowid
|
1550
|
+
lastrowid = cursor.lastrowid if hasattr(cursor, "lastrowid") else None
|
1551
|
+
if lastrowid is not None:
|
1552
|
+
return {"lastrowid": lastrowid}
|
1553
|
+
|
1554
|
+
return result
|
1542
1555
|
else:
|
1543
1556
|
# Create new connection for non-transactional queries
|
1544
1557
|
if self._is_memory_db:
|
@@ -1557,6 +1570,19 @@ class SQLiteAdapter(DatabaseAdapter):
|
|
1557
1570
|
raise ValueError("fetch_size required for MANY mode")
|
1558
1571
|
rows = await cursor.fetchmany(fetch_size)
|
1559
1572
|
result = [self._convert_row(dict(row)) for row in rows]
|
1573
|
+
else:
|
1574
|
+
result = []
|
1575
|
+
|
1576
|
+
# Check if this was an INSERT and capture lastrowid for SQLite
|
1577
|
+
if query.strip().upper().startswith("INSERT") and (
|
1578
|
+
not result or result == []
|
1579
|
+
):
|
1580
|
+
# For INSERT without RETURNING, capture lastrowid
|
1581
|
+
lastrowid = (
|
1582
|
+
cursor.lastrowid if hasattr(cursor, "lastrowid") else None
|
1583
|
+
)
|
1584
|
+
if lastrowid is not None:
|
1585
|
+
result = {"lastrowid": lastrowid}
|
1560
1586
|
|
1561
1587
|
# Commit for memory databases (needed for INSERT/UPDATE/DELETE)
|
1562
1588
|
await db.commit()
|
@@ -1577,9 +1603,24 @@ class SQLiteAdapter(DatabaseAdapter):
|
|
1577
1603
|
if not fetch_size:
|
1578
1604
|
raise ValueError("fetch_size required for MANY mode")
|
1579
1605
|
rows = await cursor.fetchmany(fetch_size)
|
1580
|
-
|
1606
|
+
result = [self._convert_row(dict(row)) for row in rows]
|
1607
|
+
else:
|
1608
|
+
result = []
|
1581
1609
|
|
1582
|
-
|
1610
|
+
# Check if this was an INSERT and capture lastrowid for SQLite
|
1611
|
+
if query.strip().upper().startswith("INSERT") and (
|
1612
|
+
not result or result == []
|
1613
|
+
):
|
1614
|
+
# For INSERT without RETURNING, capture lastrowid
|
1615
|
+
lastrowid = (
|
1616
|
+
cursor.lastrowid if hasattr(cursor, "lastrowid") else None
|
1617
|
+
)
|
1618
|
+
if lastrowid is not None:
|
1619
|
+
await db.commit() # Commit before returning
|
1620
|
+
return {"lastrowid": lastrowid}
|
1621
|
+
|
1622
|
+
await db.commit()
|
1623
|
+
return result
|
1583
1624
|
|
1584
1625
|
async def execute_many(
|
1585
1626
|
self,
|
@@ -3421,28 +3462,37 @@ class AsyncSQLDatabaseNode(AsyncNode):
|
|
3421
3462
|
parameter_types=parameter_types,
|
3422
3463
|
)
|
3423
3464
|
|
3424
|
-
#
|
3425
|
-
result
|
3465
|
+
# Check for special SQLite lastrowid result
|
3466
|
+
if isinstance(result, dict) and "lastrowid" in result:
|
3467
|
+
# This is a special SQLite INSERT result
|
3468
|
+
formatted_data = result # Keep as-is
|
3469
|
+
row_count = 1 # One row was inserted
|
3470
|
+
else:
|
3471
|
+
# Ensure all data is JSON-serializable (safety net for adapter inconsistencies)
|
3472
|
+
result = self._ensure_serializable(result)
|
3426
3473
|
|
3427
|
-
|
3428
|
-
|
3474
|
+
# Format results based on requested format
|
3475
|
+
formatted_data = self._format_results(result, result_format)
|
3476
|
+
row_count = None # Will be calculated below
|
3429
3477
|
|
3430
3478
|
# For DataFrame, we need special handling for row count
|
3431
|
-
row_count
|
3432
|
-
|
3433
|
-
|
3434
|
-
|
3435
|
-
|
3436
|
-
|
3479
|
+
if row_count is None: # Only calculate if not already set
|
3480
|
+
if result_format == "dataframe":
|
3481
|
+
try:
|
3482
|
+
row_count = len(formatted_data)
|
3483
|
+
except:
|
3484
|
+
# If pandas isn't available, formatted_data is still a list
|
3485
|
+
row_count = (
|
3486
|
+
len(result)
|
3487
|
+
if isinstance(result, list)
|
3488
|
+
else (1 if result else 0)
|
3489
|
+
)
|
3490
|
+
else:
|
3437
3491
|
row_count = (
|
3438
3492
|
len(result)
|
3439
3493
|
if isinstance(result, list)
|
3440
3494
|
else (1 if result else 0)
|
3441
3495
|
)
|
3442
|
-
else:
|
3443
|
-
row_count = (
|
3444
|
-
len(result) if isinstance(result, list) else (1 if result else 0)
|
3445
|
-
)
|
3446
3496
|
|
3447
3497
|
# Extract column names if available
|
3448
3498
|
columns = []
|
@@ -4677,13 +4727,30 @@ class AsyncSQLDatabaseNode(AsyncNode):
|
|
4677
4727
|
self._adapter = None
|
4678
4728
|
|
4679
4729
|
def __del__(self):
|
4680
|
-
"""Ensure connections are closed."""
|
4730
|
+
"""Ensure connections are closed safely."""
|
4681
4731
|
if self._adapter and self._connected:
|
4682
|
-
#
|
4732
|
+
# Try to schedule cleanup, but be resilient to event loop issues
|
4683
4733
|
try:
|
4684
|
-
|
4685
|
-
|
4686
|
-
|
4687
|
-
|
4688
|
-
|
4734
|
+
import asyncio
|
4735
|
+
|
4736
|
+
# Check if there's a running event loop that's not closed
|
4737
|
+
try:
|
4738
|
+
loop = asyncio.get_running_loop()
|
4739
|
+
if loop and not loop.is_closed():
|
4740
|
+
# Create cleanup task only if loop is healthy
|
4741
|
+
try:
|
4742
|
+
loop.create_task(self.cleanup())
|
4743
|
+
except RuntimeError as e:
|
4744
|
+
# Loop might be closing, ignore gracefully
|
4745
|
+
logger.debug(f"Could not schedule cleanup task: {e}")
|
4746
|
+
else:
|
4747
|
+
logger.debug("Event loop is closed, skipping async cleanup")
|
4748
|
+
except RuntimeError:
|
4749
|
+
# No running event loop - this is normal during shutdown
|
4750
|
+
logger.debug(
|
4751
|
+
"No running event loop for cleanup, connections will be cleaned by GC"
|
4752
|
+
)
|
4753
|
+
except Exception as e:
|
4754
|
+
# Complete fallback - any unexpected error should not crash __del__
|
4755
|
+
logger.debug(f"Error during connection cleanup: {e}")
|
4689
4756
|
pass
|
kailash/runtime/local.py
CHANGED
@@ -2333,7 +2333,10 @@ class LocalRuntime:
|
|
2333
2333
|
else:
|
2334
2334
|
# Standard node execution (backward compatibility)
|
2335
2335
|
try:
|
2336
|
-
if hasattr(node, "
|
2336
|
+
if hasattr(node, "execute_async"):
|
2337
|
+
# For AsyncNode and its subclasses, use execute_async which handles event loop properly
|
2338
|
+
node_result = await node.execute_async(**inputs)
|
2339
|
+
elif hasattr(node, "async_run"):
|
2337
2340
|
node_result = await node.async_run(**inputs)
|
2338
2341
|
else:
|
2339
2342
|
node_result = node.execute(**inputs)
|
kailash/workflow/builder.py
CHANGED
@@ -558,7 +558,7 @@ class WorkflowBuilder:
|
|
558
558
|
Raises:
|
559
559
|
WorkflowValidationError: If node_id is already used or instance is invalid
|
560
560
|
"""
|
561
|
-
return self.
|
561
|
+
return self._add_node_instance(node_instance, node_id)
|
562
562
|
|
563
563
|
def add_node_type(
|
564
564
|
self,
|
kailash/workflow/templates.py
CHANGED
@@ -171,7 +171,8 @@ class CycleTemplates:
|
|
171
171
|
... )
|
172
172
|
"""
|
173
173
|
if cycle_id is None:
|
174
|
-
|
174
|
+
# Use timestamp with milliseconds for ID generation to ensure uniqueness
|
175
|
+
cycle_id = f"optimization_cycle_{int(time.time() * 1000)}"
|
175
176
|
|
176
177
|
# Connect processor to evaluator
|
177
178
|
workflow.connect(processor_node, evaluator_node)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: kailash
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.20
|
4
4
|
Summary: Python SDK for the Kailash container-node architecture
|
5
5
|
Home-page: https://github.com/integrum/kailash-python-sdk
|
6
6
|
Author: Integrum
|
@@ -53,7 +53,6 @@ Requires-Dist: pre-commit>=4.2.0
|
|
53
53
|
Requires-Dist: twine>=6.1.0
|
54
54
|
Requires-Dist: ollama>=0.5.1
|
55
55
|
Requires-Dist: sqlalchemy>=2.0.0
|
56
|
-
Requires-Dist: psycopg2-binary>=2.9.0
|
57
56
|
Requires-Dist: pymysql>=1.1.0
|
58
57
|
Requires-Dist: aiosqlite>=0.19.0
|
59
58
|
Requires-Dist: websockets>=12.0
|
@@ -70,7 +69,7 @@ Requires-Dist: qrcode>=8.2
|
|
70
69
|
Requires-Dist: aiofiles>=24.1.0
|
71
70
|
Requires-Dist: bcrypt>=4.3.0
|
72
71
|
Requires-Dist: plotly>=6.2.0
|
73
|
-
Requires-Dist: redis
|
72
|
+
Requires-Dist: redis>=6.2.0
|
74
73
|
Requires-Dist: faker>=37.4.0
|
75
74
|
Requires-Dist: structlog>=25.4.0
|
76
75
|
Requires-Dist: authlib>=1.6.0
|
@@ -86,6 +85,8 @@ Requires-Dist: seaborn>=0.13.2
|
|
86
85
|
Requires-Dist: sqlparse>=0.5.3
|
87
86
|
Requires-Dist: jsonschema>=4.24.0
|
88
87
|
Requires-Dist: openai>=1.97.1
|
88
|
+
Requires-Dist: pymongo>=4.15.2
|
89
|
+
Requires-Dist: psycopg>=3.2.10
|
89
90
|
Provides-Extra: dev
|
90
91
|
Requires-Dist: pytest>=7.0; extra == "dev"
|
91
92
|
Requires-Dist: pytest-cov>=3.0; extra == "dev"
|
@@ -121,28 +122,28 @@ Dynamic: requires-python
|
|
121
122
|
|
122
123
|
---
|
123
124
|
|
124
|
-
## 🔥 Latest Release: v0.9.
|
125
|
+
## 🔥 Latest Release: v0.9.20 (October 6, 2025)
|
125
126
|
|
126
|
-
**
|
127
|
+
**Provider Registry Fix & Multi-Modal Support**
|
127
128
|
|
128
|
-
###
|
129
|
-
- **
|
130
|
-
- **
|
131
|
-
- **
|
132
|
-
- **Patent Grant**: Includes Apache 2.0 patent protection clauses
|
129
|
+
### 🐛 Critical Bug Fixes
|
130
|
+
- **Mock Provider Bypass**: Removed hardcoded `if provider == "mock"` logic from LLMAgentNode
|
131
|
+
- **Tool Execution Flow**: Unified provider response generation for all providers
|
132
|
+
- **Provider Registry**: All providers now use consistent registry path
|
133
133
|
|
134
|
-
###
|
135
|
-
- **
|
136
|
-
- **
|
137
|
-
- **
|
138
|
-
- **
|
134
|
+
### ✨ Enhancements
|
135
|
+
- **Custom Mock Providers**: Enables signature-aware mock providers (e.g., KaizenMockProvider)
|
136
|
+
- **Multi-Modal Foundation**: Foundation for vision/audio processing in Kaizen AI framework
|
137
|
+
- **Tool Call Generation**: MockProvider generates mock tool_calls for action-oriented messages
|
138
|
+
- **Enhanced Testing**: 510+ tests passing with custom mock providers
|
139
139
|
|
140
140
|
### 📦 Package Updates
|
141
|
-
- **kailash**: v0.9.
|
142
|
-
- **kailash-
|
143
|
-
- **kailash-
|
141
|
+
- **kailash**: v0.9.20 - Provider registry fix & multi-modal support
|
142
|
+
- **kailash-kaizen**: v0.1.1 - AI agent framework (NEW!)
|
143
|
+
- **kailash-nexus**: v1.0.6 - Multi-channel platform
|
144
|
+
- **kailash-dataflow**: v0.5.0 - Database framework
|
144
145
|
|
145
|
-
[Full Changelog](sdk-users/6-reference/changelogs/releases/v0.9.
|
146
|
+
[Full Changelog](sdk-users/6-reference/changelogs/releases/v0.9.20-provider-registry-fix.md) | [Core SDK 0.9.20](https://pypi.org/project/kailash/0.9.20/) | [Kaizen 0.1.1](https://pypi.org/project/kailash-kaizen/0.1.1/) | [Nexus 1.0.6](https://pypi.org/project/kailash-nexus/1.0.6/) | [DataFlow 0.5.0](https://pypi.org/project/kailash-dataflow/0.5.0/)
|
146
147
|
|
147
148
|
## 🎯 What Makes Kailash Different
|
148
149
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
kailash/__init__.py,sha256=
|
1
|
+
kailash/__init__.py,sha256=YNnr4CZ7_iAp9097f3LS6alh0M8H9Ls5XxI3w8aKL10,2928
|
2
2
|
kailash/__main__.py,sha256=vr7TVE5o16V6LsTmRFKG6RDKUXHpIWYdZ6Dok2HkHnI,198
|
3
3
|
kailash/access_control.py,sha256=MjKtkoQ2sg1Mgfe7ovGxVwhAbpJKvaepPWr8dxOueMA,26058
|
4
4
|
kailash/access_control_abac.py,sha256=FPfa_8PuDP3AxTjdWfiH3ntwWO8NodA0py9W8SE5dno,30263
|
@@ -93,7 +93,7 @@ kailash/mcp_server/oauth.py,sha256=GFC2O2ueiTTI6V-91Huevhc3K8CxrHe22knuHfuCTqY,5
|
|
93
93
|
kailash/mcp_server/protocol.py,sha256=NIdEwJT21JT9ItajXniPNvCbZtTbpqyOC_ZezqsguGE,35694
|
94
94
|
kailash/mcp_server/registry_integration.py,sha256=B8CSLq_O1ea3cXrbVjC3bB_OFgHIP-KS9dk77mNM02I,19791
|
95
95
|
kailash/mcp_server/server.py,sha256=yFp1F4QQl6gkTY_9JJWmiMiwfT-zACLJLubz-NR5sCw,108675
|
96
|
-
kailash/mcp_server/subscriptions.py,sha256
|
96
|
+
kailash/mcp_server/subscriptions.py,sha256=-SWy1RnKUrBQ1y8ooeTiA2ODhxBbzfpsrtb8HtkhTYg,58591
|
97
97
|
kailash/mcp_server/transports.py,sha256=fBa7CTVYTDb0ZbBQTsZ2d8rKvcVuqBIteczq8eqarr4,49919
|
98
98
|
kailash/mcp_server/servers/ai_registry.py,sha256=IdF_keUuJlMsvjLjSAykxxbm46K4qA7eCj7T-lYSrzk,10007
|
99
99
|
kailash/mcp_server/utils/__init__.py,sha256=R20N-iiKXUPxc9MOh6vPO1vIfkPmwhEQ5KNFgGd4xSs,771
|
@@ -155,7 +155,7 @@ kailash/monitoring/__init__.py,sha256=w7We20bpBdcYR3PTfN9lkep8fPEc3T2eenUkNwjdw_
|
|
155
155
|
kailash/monitoring/alerts.py,sha256=Hk3Xs0EEkOIBH2ZhlejJBOsLYaPlvRejAAEGqNQISc0,21400
|
156
156
|
kailash/monitoring/asyncsql_metrics.py,sha256=jj9M8D5qHoS3zEFfZYsUCWsy5kb-J5-iYVacmNUaGjE,9577
|
157
157
|
kailash/monitoring/metrics.py,sha256=SiAnL3o6K0QaJHgfAuWBa-0pTkW5zymhuPEsj4bgOgM,22022
|
158
|
-
kailash/nodes/__init__.py,sha256=
|
158
|
+
kailash/nodes/__init__.py,sha256=dBnEwrop0cPblHxSOtVWAKCDzhRtcyQVv9j_YGWxczQ,6410
|
159
159
|
kailash/nodes/__init___original.py,sha256=p2KSo0dyUBCLClU123qpQ0tyv5S_36PTxosNyW58nyY,1031
|
160
160
|
kailash/nodes/base.py,sha256=GR2E1fWf8j1yMvJic7m2NAih7kjY1NtoDi47hHwoZ40,85437
|
161
161
|
kailash/nodes/base_async.py,sha256=whxepCiVplrltfzEQuabmnGCpEV5WgfqwgxbLdCyiDk,8864
|
@@ -177,14 +177,13 @@ kailash/nodes/admin/transaction_utils.py,sha256=IAAdIiQ5Q4kTT5ul4hv1iSfoXG3CeFAk
|
|
177
177
|
kailash/nodes/admin/user_management.py,sha256=Al9mKgTx3ASDSefZaIcPxL4zSjASI3cGdZIs33uwpvg,54707
|
178
178
|
kailash/nodes/ai/__init__.py,sha256=1mH94Ap5Zo0sUFNNZbMQf3h_KmyS2fquCTFMPvdJLoc,2725
|
179
179
|
kailash/nodes/ai/a2a.py,sha256=g3QIeCefWpcmQ3RpuoTAnCj9Q8145ieV3NyEfGsyOno,138552
|
180
|
-
kailash/nodes/ai/a2a_backup.py,sha256=fDSnihMFQ6MuhKSzL9ueWGAQLrKZQRkq9HqArdotf_w,70048
|
181
180
|
kailash/nodes/ai/agents.py,sha256=CRA3cdapQjpuvOniXUh6ZVWAlRxUIepVw1BROW6QzdY,20373
|
182
|
-
kailash/nodes/ai/ai_providers.py,sha256=
|
181
|
+
kailash/nodes/ai/ai_providers.py,sha256=XdkZlG7TJcQWoPEYLBy65jjtkOTxOpjl7MFO5MfZc6U,76627
|
183
182
|
kailash/nodes/ai/embedding_generator.py,sha256=akGCzz7zLRSziqEQCiPwL2qWhRWxuM_1RQh-YtVEddw,31879
|
184
183
|
kailash/nodes/ai/hybrid_search.py,sha256=k26uDDP_bwrIpv7Yl7PBCPvWSyQEmTlBjI1IpbgDsO4,35446
|
185
184
|
kailash/nodes/ai/intelligent_agent_orchestrator.py,sha256=LvBqMKc64zSxFWVCjbLKKel2QwEzoTeJAEgna7rZw00,83097
|
186
185
|
kailash/nodes/ai/iterative_llm_agent.py,sha256=h8iP1KFhB_eCDs7UvmY_9y0OUBuprYMj2MLM6dR0W2c,100287
|
187
|
-
kailash/nodes/ai/llm_agent.py,sha256
|
186
|
+
kailash/nodes/ai/llm_agent.py,sha256=-3K5BuMsPk_-0gIc-_pg95-cgh8yXZ5CaQvE91Wu3nw,99307
|
188
187
|
kailash/nodes/ai/models.py,sha256=wsEeUTuegy87mnLtKgSTg7ggCXvC1n3MsL-iZ4qujHs,16393
|
189
188
|
kailash/nodes/ai/self_organizing.py,sha256=B7NwKaBW8OHQBf5b0F9bSs8Wm-5BDJ9IjIkxS9h00mg,62885
|
190
189
|
kailash/nodes/ai/semantic_memory.py,sha256=ZTXIgxwMheux712cN__cNrQ3VgHaKcDyfQv_Gto7MRM,18644
|
@@ -214,13 +213,13 @@ kailash/nodes/cache/cache_invalidation.py,sha256=IUvxrRj3K5EF29Z2EaKl7t6Uze_cssn
|
|
214
213
|
kailash/nodes/cache/redis_pool_manager.py,sha256=GR82GCWxo_gAzRE-091OB6AhKre8CTwM3OoePLb2gvE,21574
|
215
214
|
kailash/nodes/code/__init__.py,sha256=yhEwuMjUEPFfe6hMGMd4E4gZdLUuf2JEQ7knYapiM4o,1283
|
216
215
|
kailash/nodes/code/async_python.py,sha256=Ai-iMpmz-sAori73JBk0wZtqmwtmF2GNPDxqB04I2Ck,37058
|
217
|
-
kailash/nodes/code/python.py,sha256=
|
216
|
+
kailash/nodes/code/python.py,sha256=rHBynVhV16G9AOQMZtLnpwTkp-wxsGe69BAjVZ4X6hg,70224
|
218
217
|
kailash/nodes/compliance/__init__.py,sha256=6a_FL4ofc8MAVuZ-ARW5uYenZLS4mBFVM9AI2QsnoF8,214
|
219
218
|
kailash/nodes/compliance/data_retention.py,sha256=90bH_eGwlcDzUdklAJeXQM-RcuLUGQFQ5fgHOK8a4qk,69443
|
220
219
|
kailash/nodes/compliance/gdpr.py,sha256=ZMoHZjAo4QtGwtFCzGMrAUBFV3TbZOnJ5DZGZS87Bas,70548
|
221
220
|
kailash/nodes/data/__init__.py,sha256=f0h4ysvXxlyFcNJLvDyXrgJ0ixwDF1cS0pJ2QNPakhg,5213
|
222
221
|
kailash/nodes/data/async_connection.py,sha256=wfArHs9svU48bxGZIiixSV2YVn9cukNgEjagwTRu6J4,17250
|
223
|
-
kailash/nodes/data/async_sql.py,sha256=
|
222
|
+
kailash/nodes/data/async_sql.py,sha256=9C-XRTDrzpVwFRrI13ym539UajT0Qgh9jmIjisDPi28,188864
|
224
223
|
kailash/nodes/data/async_vector.py,sha256=HtwQLO25IXu8Vq80qzU8rMkUAKPQ2qM0x8YxjXHlygU,21005
|
225
224
|
kailash/nodes/data/bulk_operations.py,sha256=WVopmosVkIlweFxVt3boLdCPc93EqpYyQ1Ez9mCIt0c,34453
|
226
225
|
kailash/nodes/data/directory.py,sha256=fbfLqD_ijRubk-4xew3604QntPsyDxqaF4k6TpfyjDg,9923
|
@@ -341,7 +340,7 @@ kailash/runtime/async_local.py,sha256=sYNggSU0R-oo8cCvU5ayodDBqASzUhxu994ZvZxDSC
|
|
341
340
|
kailash/runtime/compatibility_reporter.py,sha256=TOQD0ODnJdsxEPyNSYOV_zQxu60X_yvHeu26seFOMEA,19807
|
342
341
|
kailash/runtime/docker.py,sha256=sZknVl1PCGfAZeyc0-exTuKlllSyjYlFIgJoiB3CRNs,23500
|
343
342
|
kailash/runtime/hierarchical_switch_executor.py,sha256=k6aPGbpf6z2m6dTbHrEyuDR8ZCvOqUanBGYp70arQn0,20782
|
344
|
-
kailash/runtime/local.py,sha256=
|
343
|
+
kailash/runtime/local.py,sha256=nIQRWUwSHVg2Daafq_JggBLf-zTDBaGMcwObBzVI0po,201389
|
345
344
|
kailash/runtime/parallel.py,sha256=-M9VVG36RxnrrmdbcBe9IjQWb58tAEEo76RQQ2uIXaE,21084
|
346
345
|
kailash/runtime/parallel_cyclic.py,sha256=yANZHnePjhCPuCFbq3lFQA1K6jbCv5Of5-vIKbCsmZk,19863
|
347
346
|
kailash/runtime/parameter_injection.py,sha256=kG4GhmarsRr5t3VDFbc2G1HSbsZJg6UmienHCE2Ru7o,14852
|
@@ -399,7 +398,7 @@ kailash/visualization/reports.py,sha256=D7kJ0flHr16d-qSEq8vnw20N8u_dgTrXtKVSXVm8
|
|
399
398
|
kailash/workflow/__init__.py,sha256=DDQDE9K6RmbX6479guNLLgjiVVV-gQERRvCEJWSVlsM,1836
|
400
399
|
kailash/workflow/async_builder.py,sha256=iv8bDJHdWAUZ77SyMo6sucd92dTdtXesdxycrSE7mM4,20613
|
401
400
|
kailash/workflow/async_patterns.py,sha256=X0ZDXwr6UAu0WC1xnCB7-0V1-tRbKs9UI4JqaBCB6tE,22824
|
402
|
-
kailash/workflow/builder.py,sha256=
|
401
|
+
kailash/workflow/builder.py,sha256=mHWfoHsq4W5DvJmv3b_vMDeVXqqpNHXO6U3Kuvk54tI,51079
|
403
402
|
kailash/workflow/contracts.py,sha256=Uch-s2SC-NYrg0n2zgljgkyFHf4bufY4OydFuIfAk7E,13442
|
404
403
|
kailash/workflow/convergence.py,sha256=vfIDR-uNaQE-LVUEzrRtfgKPgX9gL0nLNH-nTg5ra-c,10031
|
405
404
|
kailash/workflow/cycle_analyzer.py,sha256=BGBpgdB-g0-KRI65sVAvHV4lxfoCzMt4uKOHbw8GXT4,32596
|
@@ -420,14 +419,14 @@ kailash/workflow/resilience.py,sha256=Ecef4gBg-QWP369a_xfzQnVWhHryvEcO2RSFVSriLJ
|
|
420
419
|
kailash/workflow/runner.py,sha256=l6jb-H7DwbRlvQ3H3SuTs70rut-u7H3Gi8nybKCEjZU,10795
|
421
420
|
kailash/workflow/safety.py,sha256=pS5GKu7UdkzFZcb16Dn-0jBxjULDU-59_M0CbUVMVyw,11298
|
422
421
|
kailash/workflow/state.py,sha256=UTZxs5-Ona6uvBhx1__i6-RX8gB4qazkBIWE7uyRmWQ,7600
|
423
|
-
kailash/workflow/templates.py,sha256=
|
422
|
+
kailash/workflow/templates.py,sha256=aZQzEPQD368nN0x0ICQlRKmAr2FqTxIOUa-7rb7EUWI,48578
|
424
423
|
kailash/workflow/type_inference.py,sha256=i1F7Yd_Z3elTXrthsLpqGbOnQBIVVVEjhRpI0HrIjd0,24492
|
425
424
|
kailash/workflow/validation.py,sha256=LdbIPQSokCqSLfWTBhJR82pa_0va44pcVu9dpEM4rvY,45177
|
426
425
|
kailash/workflow/visualization.py,sha256=nHBW-Ai8QBMZtn2Nf3EE1_aiMGi9S6Ui_BfpA5KbJPU,23187
|
427
|
-
kailash-0.9.
|
428
|
-
kailash-0.9.
|
429
|
-
kailash-0.9.
|
430
|
-
kailash-0.9.
|
431
|
-
kailash-0.9.
|
432
|
-
kailash-0.9.
|
433
|
-
kailash-0.9.
|
426
|
+
kailash-0.9.20.dist-info/licenses/LICENSE,sha256=9GYZHXVUmx6FdFRNzOeE_w7a_aEGeYbqTVmFtJlrbGk,13438
|
427
|
+
kailash-0.9.20.dist-info/licenses/NOTICE,sha256=9ssIK4LcHSTFqriXGdteMpBPTS1rSLlYtjppZ_bsjZ0,723
|
428
|
+
kailash-0.9.20.dist-info/METADATA,sha256=m_PViYPw0xnQ2Ijzwu778vb98XIpvZOksgeuMrq2tRY,23678
|
429
|
+
kailash-0.9.20.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
430
|
+
kailash-0.9.20.dist-info/entry_points.txt,sha256=M_q3b8PG5W4XbhSgESzIJjh3_4OBKtZFYFsOdkr2vO4,45
|
431
|
+
kailash-0.9.20.dist-info/top_level.txt,sha256=z7GzH2mxl66498pVf5HKwo5wwfPtt9Aq95uZUpH6JV0,8
|
432
|
+
kailash-0.9.20.dist-info/RECORD,,
|