ibm-watsonx-orchestrate 1.3.0__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ibm_watsonx_orchestrate/__init__.py +2 -1
- ibm_watsonx_orchestrate/agent_builder/agents/types.py +2 -0
- ibm_watsonx_orchestrate/agent_builder/knowledge_bases/types.py +10 -2
- ibm_watsonx_orchestrate/agent_builder/toolkits/base_toolkit.py +32 -0
- ibm_watsonx_orchestrate/agent_builder/toolkits/types.py +42 -0
- ibm_watsonx_orchestrate/agent_builder/tools/openapi_tool.py +10 -1
- ibm_watsonx_orchestrate/agent_builder/tools/python_tool.py +4 -2
- ibm_watsonx_orchestrate/agent_builder/tools/types.py +2 -1
- ibm_watsonx_orchestrate/cli/commands/agents/agents_command.py +29 -0
- ibm_watsonx_orchestrate/cli/commands/agents/agents_controller.py +271 -12
- ibm_watsonx_orchestrate/cli/commands/knowledge_bases/knowledge_bases_controller.py +17 -2
- ibm_watsonx_orchestrate/cli/commands/models/env_file_model_provider_mapper.py +180 -0
- ibm_watsonx_orchestrate/cli/commands/models/models_command.py +199 -8
- ibm_watsonx_orchestrate/cli/commands/server/server_command.py +117 -48
- ibm_watsonx_orchestrate/cli/commands/server/types.py +105 -0
- ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_command.py +55 -7
- ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_controller.py +123 -42
- ibm_watsonx_orchestrate/cli/commands/tools/tools_command.py +22 -1
- ibm_watsonx_orchestrate/cli/commands/tools/tools_controller.py +197 -12
- ibm_watsonx_orchestrate/client/agents/agent_client.py +4 -1
- ibm_watsonx_orchestrate/client/agents/assistant_agent_client.py +5 -1
- ibm_watsonx_orchestrate/client/agents/external_agent_client.py +5 -1
- ibm_watsonx_orchestrate/client/analytics/llm/analytics_llm_client.py +2 -6
- ibm_watsonx_orchestrate/client/base_api_client.py +5 -2
- ibm_watsonx_orchestrate/client/connections/connections_client.py +3 -9
- ibm_watsonx_orchestrate/client/model_policies/__init__.py +0 -0
- ibm_watsonx_orchestrate/client/model_policies/model_policies_client.py +47 -0
- ibm_watsonx_orchestrate/client/model_policies/types.py +36 -0
- ibm_watsonx_orchestrate/client/models/__init__.py +0 -0
- ibm_watsonx_orchestrate/client/models/models_client.py +46 -0
- ibm_watsonx_orchestrate/client/models/types.py +189 -0
- ibm_watsonx_orchestrate/client/toolkit/toolkit_client.py +20 -6
- ibm_watsonx_orchestrate/client/tools/tempus_client.py +40 -0
- ibm_watsonx_orchestrate/client/tools/tool_client.py +8 -0
- ibm_watsonx_orchestrate/docker/compose-lite.yml +68 -13
- ibm_watsonx_orchestrate/docker/default.env +22 -12
- ibm_watsonx_orchestrate/docker/tempus/common-config.yaml +1 -1
- ibm_watsonx_orchestrate/experimental/flow_builder/__init__.py +0 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/data_map.py +19 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/flows/__init__.py +42 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/flows/constants.py +19 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/flows/decorators.py +144 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/flows/events.py +72 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/flows/flow.py +1310 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/node.py +116 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/resources/flow_status.openapi.yml +66 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/types.py +765 -0
- ibm_watsonx_orchestrate/experimental/flow_builder/utils.py +115 -0
- ibm_watsonx_orchestrate/utils/utils.py +5 -2
- {ibm_watsonx_orchestrate-1.3.0.dist-info → ibm_watsonx_orchestrate-1.5.0.dist-info}/METADATA +4 -1
- {ibm_watsonx_orchestrate-1.3.0.dist-info → ibm_watsonx_orchestrate-1.5.0.dist-info}/RECORD +54 -32
- {ibm_watsonx_orchestrate-1.3.0.dist-info → ibm_watsonx_orchestrate-1.5.0.dist-info}/WHEEL +0 -0
- {ibm_watsonx_orchestrate-1.3.0.dist-info → ibm_watsonx_orchestrate-1.5.0.dist-info}/entry_points.txt +0 -0
- {ibm_watsonx_orchestrate-1.3.0.dist-info → ibm_watsonx_orchestrate-1.5.0.dist-info}/licenses/LICENSE +0 -0
@@ -46,38 +46,46 @@ CELERY_RESULTS_TTL="3600"
|
|
46
46
|
EVENT_BROKER_TTL="-1"
|
47
47
|
|
48
48
|
# START -- IMAGE REGISTRIES AND TAGS
|
49
|
-
#
|
49
|
+
# The registry URL to pull the private images from, including the name of the repository in the registry.
|
50
|
+
# e.g. cp.icr.io/cp/wxo-lite
|
51
|
+
# If the registry URL is not set here or by the user, then it will be set automatically based on the value of WO_DEVELOPER_EDITION_SOURCE
|
52
|
+
# The *_REGISTRY variables are used to set the registry URL for each component. If not set, the URL here will be used.
|
53
|
+
# See get_default_registry_env_vars_by_dev_edition_source() in src/ibm_watsonx_orchestrate/cli/commands/server/server_command.py for more details.
|
50
54
|
REGISTRY_URL=
|
51
55
|
|
52
|
-
SERVER_TAG=
|
56
|
+
SERVER_TAG=30-05-2025
|
53
57
|
SERVER_REGISTRY=
|
54
58
|
|
55
|
-
WORKER_TAG=
|
59
|
+
WORKER_TAG=30-05-2025
|
56
60
|
WORKER_REGISTRY=
|
57
61
|
|
62
|
+
AI_GATEWAY_TAG=30-05-2025
|
63
|
+
AI_GATEWAY_REGISTRY=
|
64
|
+
|
58
65
|
DB_REGISTRY=
|
59
66
|
# If you build multiarch set all three of these to the same, we have a pr against main
|
60
67
|
# to not have this separation, but we can merge it later
|
61
|
-
DBTAG=
|
62
|
-
AMDDBTAG=
|
63
|
-
ARM64DBTAG=
|
68
|
+
DBTAG=30-05-2025
|
69
|
+
AMDDBTAG=30-05-2025
|
70
|
+
ARM64DBTAG=30-05-2025
|
64
71
|
|
65
72
|
UI_REGISTRY=
|
66
|
-
UITAG=
|
73
|
+
UITAG=23-05-2025
|
67
74
|
|
68
75
|
CM_REGISTRY=
|
69
|
-
CM_TAG=
|
76
|
+
CM_TAG=13-05-2025
|
70
77
|
|
71
|
-
TRM_TAG=
|
78
|
+
TRM_TAG=25-05-2025
|
72
79
|
TRM_REGISTRY=
|
73
80
|
|
74
|
-
TR_TAG=
|
81
|
+
TR_TAG=25-05-2025
|
75
82
|
TR_REGISTRY=
|
76
83
|
|
84
|
+
BUILDER_TAG=27-05-2025
|
77
85
|
BUILDER_REGISTRY=
|
78
|
-
BUILDER_TAG=02-05-2025
|
79
86
|
|
80
|
-
|
87
|
+
|
88
|
+
FLOW_RUNTIME_TAG=13-05-2025
|
81
89
|
FLOW_RUMTIME_REGISTRY=
|
82
90
|
|
83
91
|
# END -- IMAGE REGISTRIES AND TAGS
|
@@ -110,6 +118,8 @@ WXO_BASE_URL=http://wxo-server:4321
|
|
110
118
|
RUNTIME_MANAGER_API_KEY="testapikey"
|
111
119
|
TOOLS_RUNTIME_MANAGER_BASE_URL="http://tools-runtime-manager:8080"
|
112
120
|
CONNECTION_SERVICE_BASE_URL="http://wxo-server-connection-manager:3001"
|
121
|
+
AI_GATEWAY_BASE_URL="http://ai-gateway:8787/v1"
|
122
|
+
AI_GATEWAY_ENABLED=True
|
113
123
|
|
114
124
|
#To Prevent warnings
|
115
125
|
VECTOR_STORE_PROVIDER=
|
@@ -1 +1 @@
|
|
1
|
-
NODE_LOG_LEVEL:
|
1
|
+
NODE_LOG_LEVEL: audit
|
File without changes
|
@@ -0,0 +1,19 @@
|
|
1
|
+
from typing import Any, Optional, Self
|
2
|
+
from pydantic import BaseModel, Field, SerializeAsAny
|
3
|
+
|
4
|
+
from .types import (
|
5
|
+
Assignment
|
6
|
+
)
|
7
|
+
|
8
|
+
|
9
|
+
class DataMap(BaseModel):
|
10
|
+
maps: Optional[list[Assignment]] = Field(default_factory=list)
|
11
|
+
|
12
|
+
def to_json(self) -> dict[str, Any]:
|
13
|
+
model_spec = {}
|
14
|
+
if self.maps and len(self.maps) > 0:
|
15
|
+
model_spec["maps"] = [assignment.model_dump() for assignment in self.maps]
|
16
|
+
|
17
|
+
def add(self, line: Assignment) -> Self:
|
18
|
+
self.maps.append(line)
|
19
|
+
|
@@ -0,0 +1,42 @@
|
|
1
|
+
from .constants import START, END, RESERVED
|
2
|
+
from ..types import FlowContext, TaskData, TaskEventType
|
3
|
+
from ..node import UserNode, AgentNode, StartNode, EndNode, PromptNode
|
4
|
+
from .flow import Flow, CompiledFlow, FlowRun, FlowEvent, FlowEventType, FlowFactory, MatchPolicy, WaitPolicy, ForeachPolicy, Branch, Foreach, Loop
|
5
|
+
from .decorators import user, flow_spec, flow
|
6
|
+
from ..data_map import Assignment, DataMap
|
7
|
+
|
8
|
+
|
9
|
+
__all__ = [
|
10
|
+
"START",
|
11
|
+
"END",
|
12
|
+
"RESERVED",
|
13
|
+
|
14
|
+
"FlowContext",
|
15
|
+
"TaskData",
|
16
|
+
"TaskEventType",
|
17
|
+
|
18
|
+
"UserNode",
|
19
|
+
"AgentNode",
|
20
|
+
"StartNode",
|
21
|
+
"EndNode",
|
22
|
+
"PromptNode",
|
23
|
+
"Assignment",
|
24
|
+
"DataMap",
|
25
|
+
|
26
|
+
"Flow",
|
27
|
+
"CompiledFlow",
|
28
|
+
"FlowRun",
|
29
|
+
"FlowEvent",
|
30
|
+
"FlowEventType",
|
31
|
+
"FlowFactory",
|
32
|
+
"MatchPolicy",
|
33
|
+
"WaitPolicy",
|
34
|
+
"ForeachPolicy",
|
35
|
+
"Branch",
|
36
|
+
"Foreach",
|
37
|
+
"Loop",
|
38
|
+
|
39
|
+
"user",
|
40
|
+
"flow_spec",
|
41
|
+
"flow"
|
42
|
+
]
|
@@ -0,0 +1,19 @@
|
|
1
|
+
"""
|
2
|
+
Predefined constants for Flow.
|
3
|
+
"""
|
4
|
+
import sys
|
5
|
+
|
6
|
+
START = sys.intern("__start__")
|
7
|
+
END = sys.intern("__end__")
|
8
|
+
|
9
|
+
ANY_USER = sys.intern("__any_user__")
|
10
|
+
CURRENT_USER = sys.intern("__current_user__")
|
11
|
+
FLOW_CONTEXT = sys.intern("FlowContext")
|
12
|
+
|
13
|
+
RESERVED = {
|
14
|
+
START,
|
15
|
+
END,
|
16
|
+
FLOW_CONTEXT,
|
17
|
+
ANY_USER,
|
18
|
+
CURRENT_USER
|
19
|
+
}
|
@@ -0,0 +1,144 @@
|
|
1
|
+
"""
|
2
|
+
A set of decorators to help define different Flow constructs.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import asyncio
|
6
|
+
from functools import wraps
|
7
|
+
import logging
|
8
|
+
import inspect
|
9
|
+
from typing import Callable, Optional, Sequence
|
10
|
+
from pydantic import BaseModel
|
11
|
+
from ..types import extract_node_spec, UserNodeSpec, FlowSpec
|
12
|
+
|
13
|
+
from .flow import FlowFactory, Flow
|
14
|
+
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
|
18
|
+
class FlowWrapper:
|
19
|
+
def __init__(self, func, a_model):
|
20
|
+
self.func = func
|
21
|
+
self.a_model = a_model
|
22
|
+
wraps(func)(self) # Preserve metadata
|
23
|
+
|
24
|
+
def __call__(self, *args, **kwargs):
|
25
|
+
result = self.func(self.a_model)
|
26
|
+
if not isinstance(result, Flow):
|
27
|
+
raise ValueError("Return value must be of type Flow")
|
28
|
+
return result
|
29
|
+
|
30
|
+
def user(*args, name: str|None=None, description: str|None=None, owners: Sequence[str]|None = None, message: str | None = None):
|
31
|
+
"""Decorator to mark a function as a user node specification."""
|
32
|
+
|
33
|
+
def decorator(func: Callable):
|
34
|
+
node_spec = extract_node_spec(func, name, description)
|
35
|
+
func.__user_spec__ = UserNodeSpec(type = "user",
|
36
|
+
name = node_spec.name,
|
37
|
+
display_name = node_spec.display_name,
|
38
|
+
description = node_spec.description,
|
39
|
+
input_schema = node_spec.input_schema,
|
40
|
+
output_schema = node_spec.output_schema,
|
41
|
+
output_schema_object = node_spec.output_schema_object,
|
42
|
+
text=message,
|
43
|
+
owners=owners)
|
44
|
+
|
45
|
+
@wraps(func)
|
46
|
+
def wrapper(*args, **kwargs):
|
47
|
+
logger.error(f"User node {name} is not supported yet.")
|
48
|
+
r = func(*args, **kwargs)
|
49
|
+
return r
|
50
|
+
|
51
|
+
return wrapper
|
52
|
+
|
53
|
+
if len(args) == 1 and callable(args[0]):
|
54
|
+
return decorator(args[0])
|
55
|
+
else:
|
56
|
+
return decorator
|
57
|
+
|
58
|
+
|
59
|
+
def flow_spec(*args,
|
60
|
+
name: Optional[str]=None,
|
61
|
+
description: str|None=None,
|
62
|
+
initiators: Sequence[str] = ()):
|
63
|
+
"""Decorator to mark a function as a flow specification."""
|
64
|
+
|
65
|
+
def decorator(func: Callable):
|
66
|
+
node_spec = extract_node_spec(func, name, description)
|
67
|
+
a_spec = FlowSpec(type = "flow",
|
68
|
+
name = node_spec.name,
|
69
|
+
display_name = node_spec.display_name,
|
70
|
+
description = node_spec.description,
|
71
|
+
input_schema = node_spec.input_schema,
|
72
|
+
output_schema = node_spec.output_schema,
|
73
|
+
output_schema_object = node_spec.output_schema_object,
|
74
|
+
initiators = initiators)
|
75
|
+
|
76
|
+
# we should also check a flow is async
|
77
|
+
if not asyncio.iscoroutinefunction(func):
|
78
|
+
raise ValueError("Flow must be asynchronous.")
|
79
|
+
|
80
|
+
logger.info("Generated flow spec: %s", a_spec)
|
81
|
+
func.__flow_spec__ = a_spec
|
82
|
+
|
83
|
+
@wraps(func)
|
84
|
+
def wrapper(*args, **kwargs):
|
85
|
+
logger.info("Creating flow spec: %s", name)
|
86
|
+
r = func(*args, **kwargs)
|
87
|
+
logger.info("Flow spec %s returned: %s", name, r)
|
88
|
+
return r
|
89
|
+
|
90
|
+
return wrapper
|
91
|
+
|
92
|
+
if len(args) == 1 and callable(args[0]):
|
93
|
+
return decorator(args[0])
|
94
|
+
else:
|
95
|
+
return decorator
|
96
|
+
|
97
|
+
def flow(*args,
|
98
|
+
name: Optional[str]=None,
|
99
|
+
display_name: Optional[str]=None,
|
100
|
+
description: str|None=None,
|
101
|
+
input_schema: type[BaseModel] | None = None,
|
102
|
+
output_schema: type[BaseModel] | None = None,
|
103
|
+
initiators: Sequence[str] = ()):
|
104
|
+
"""Decorator to mark a function as a flow model builder."""
|
105
|
+
|
106
|
+
def decorator(func: Callable):
|
107
|
+
"""
|
108
|
+
Decorator that takes a function as an argument and returns a wrapper function.
|
109
|
+
The wrapper function takes a single argument of type Flow and calls the original function with the created flow as an argument.
|
110
|
+
"""
|
111
|
+
|
112
|
+
sig = inspect.signature(func)
|
113
|
+
if len(sig.parameters) != 1:
|
114
|
+
raise ValueError("Only one argument is allowed")
|
115
|
+
param = list(sig.parameters.values())[0]
|
116
|
+
if param.annotation != Flow:
|
117
|
+
raise ValueError("Argument must be of type Flow")
|
118
|
+
if sig.return_annotation != Flow:
|
119
|
+
raise ValueError("Return value must be of type Flow")
|
120
|
+
|
121
|
+
node_spec = extract_node_spec(func, name, description)
|
122
|
+
a_model = FlowFactory.create_flow(
|
123
|
+
name = node_spec.name,
|
124
|
+
display_name = display_name,
|
125
|
+
description = node_spec.description,
|
126
|
+
input_schema = input_schema,
|
127
|
+
output_schema = output_schema,
|
128
|
+
initiators = initiators)
|
129
|
+
|
130
|
+
# logger.info("Creating flow model: %s", a_model.spec.name)
|
131
|
+
|
132
|
+
# @wraps(func)
|
133
|
+
# def wrapper(*args, **kwargs):
|
134
|
+
# result = func(a_model)
|
135
|
+
# if not isinstance(result, Flow):
|
136
|
+
# raise ValueError("Return value must be of type Flow")
|
137
|
+
# return result
|
138
|
+
|
139
|
+
return FlowWrapper(func, a_model)
|
140
|
+
|
141
|
+
if len(args) == 1 and callable(args[0]):
|
142
|
+
return decorator(args[0])
|
143
|
+
else:
|
144
|
+
return decorator
|
@@ -0,0 +1,72 @@
|
|
1
|
+
import redis
|
2
|
+
import time
|
3
|
+
import json
|
4
|
+
from dotenv import load_dotenv
|
5
|
+
import os
|
6
|
+
|
7
|
+
from typing import (
|
8
|
+
AsyncIterator, Union
|
9
|
+
)
|
10
|
+
|
11
|
+
from ..types import (
|
12
|
+
FlowEventType, TaskEventType, FlowEvent, FlowContext
|
13
|
+
)
|
14
|
+
|
15
|
+
class StreamConsumer:
|
16
|
+
def __init__(self, instance_id: str):
|
17
|
+
|
18
|
+
load_dotenv()
|
19
|
+
self.redis_host = os.getenv("REDIS_HOST", "localhost")
|
20
|
+
self.redis_port = os.getenv("REDIS_PORT", 6379)
|
21
|
+
self.redis_db = os.getenv("REDIS_DB", 0)
|
22
|
+
self.redis = redis.Redis(host=self.redis_host, port=self.redis_port, db=self.redis_db)
|
23
|
+
self.instance_id = instance_id
|
24
|
+
self.stream_name = f"tempus:{self.instance_id}"
|
25
|
+
self.last_processed_id = 0
|
26
|
+
|
27
|
+
|
28
|
+
async def consume(self) -> AsyncIterator[FlowEvent]:
|
29
|
+
|
30
|
+
while True:
|
31
|
+
try:
|
32
|
+
# XREAD command: Read new messages from the stream
|
33
|
+
messages = self.redis.xread({self.stream_name: self.last_processed_id}, block=5000, count=10)
|
34
|
+
|
35
|
+
for stream, events in messages:
|
36
|
+
for event_id, event_data in events:
|
37
|
+
self.last_processed_id = event_id # Update the last read event ID
|
38
|
+
flow_event = deserialize_flow_event(event_data)
|
39
|
+
yield flow_event
|
40
|
+
|
41
|
+
except Exception as e:
|
42
|
+
print(f"Error occurred: {e}")
|
43
|
+
time.sleep(5) # Wait for 5 seconds before retrying
|
44
|
+
|
45
|
+
time.sleep(1) # Sleep for 1 second before checking for new messages
|
46
|
+
|
47
|
+
def deserialize_flow_event(byte_data: bytes) -> FlowEvent:
|
48
|
+
"""Deserialize byte data into a FlowEvent object."""
|
49
|
+
# Decode the byte data
|
50
|
+
decoded_data = byte_data[b'message'].decode('utf-8')
|
51
|
+
|
52
|
+
# Parse the JSON string into a dictionary
|
53
|
+
parsed_data = json.loads(decoded_data)
|
54
|
+
|
55
|
+
# Deserialize into FlowEvent using Pydantic's parsing
|
56
|
+
flow_event = FlowEvent(
|
57
|
+
kind=get_event_type(parsed_data["kind"]),
|
58
|
+
context=FlowContext(**parsed_data["context"]) if "context" in parsed_data and parsed_data["context"] != {} else None,
|
59
|
+
error=json.loads(parsed_data["error"]) if "error" in parsed_data and parsed_data["error"] != {} and len(parsed_data["error"]) > 0 else None,
|
60
|
+
)
|
61
|
+
|
62
|
+
return flow_event
|
63
|
+
|
64
|
+
def get_event_type(selected_event_type: str) -> Union[FlowEventType, TaskEventType]:
|
65
|
+
"""Selects the right event type from the corresponding enumerator"""
|
66
|
+
eventKind = selected_event_type.upper()
|
67
|
+
if eventKind in FlowEventType.__members__:
|
68
|
+
return FlowEventType(selected_event_type)
|
69
|
+
elif eventKind in TaskEventType.__members__:
|
70
|
+
return TaskEventType(selected_event_type)
|
71
|
+
else:
|
72
|
+
raise ValueError(f"Invalid event type: {eventKind}")
|