ibm-watsonx-orchestrate 1.6.2__py3-none-any.whl → 1.7.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. ibm_watsonx_orchestrate/__init__.py +2 -1
  2. ibm_watsonx_orchestrate/agent_builder/agents/agent.py +3 -3
  3. ibm_watsonx_orchestrate/agent_builder/agents/assistant_agent.py +3 -2
  4. ibm_watsonx_orchestrate/agent_builder/agents/external_agent.py +3 -2
  5. ibm_watsonx_orchestrate/agent_builder/agents/types.py +9 -8
  6. ibm_watsonx_orchestrate/agent_builder/connections/connections.py +4 -3
  7. ibm_watsonx_orchestrate/agent_builder/knowledge_bases/knowledge_base_requests.py +1 -22
  8. ibm_watsonx_orchestrate/agent_builder/knowledge_bases/types.py +1 -17
  9. ibm_watsonx_orchestrate/agent_builder/tools/base_tool.py +2 -1
  10. ibm_watsonx_orchestrate/agent_builder/tools/openapi_tool.py +14 -13
  11. ibm_watsonx_orchestrate/agent_builder/tools/python_tool.py +136 -92
  12. ibm_watsonx_orchestrate/agent_builder/tools/types.py +10 -9
  13. ibm_watsonx_orchestrate/cli/commands/agents/agents_command.py +7 -7
  14. ibm_watsonx_orchestrate/cli/commands/agents/agents_controller.py +4 -3
  15. ibm_watsonx_orchestrate/cli/commands/environment/environment_controller.py +5 -5
  16. ibm_watsonx_orchestrate/cli/commands/environment/types.py +2 -0
  17. ibm_watsonx_orchestrate/cli/commands/knowledge_bases/knowledge_bases_command.py +0 -18
  18. ibm_watsonx_orchestrate/cli/commands/knowledge_bases/knowledge_bases_controller.py +33 -19
  19. ibm_watsonx_orchestrate/cli/commands/models/models_command.py +1 -1
  20. ibm_watsonx_orchestrate/cli/commands/server/server_command.py +100 -36
  21. ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_command.py +1 -1
  22. ibm_watsonx_orchestrate/cli/commands/tools/tools_controller.py +11 -4
  23. ibm_watsonx_orchestrate/cli/config.py +3 -3
  24. ibm_watsonx_orchestrate/cli/init_helper.py +10 -1
  25. ibm_watsonx_orchestrate/cli/main.py +2 -0
  26. ibm_watsonx_orchestrate/client/knowledge_bases/knowledge_base_client.py +1 -1
  27. ibm_watsonx_orchestrate/client/local_service_instance.py +3 -1
  28. ibm_watsonx_orchestrate/client/service_instance.py +33 -7
  29. ibm_watsonx_orchestrate/docker/compose-lite.yml +177 -2
  30. ibm_watsonx_orchestrate/docker/default.env +22 -2
  31. ibm_watsonx_orchestrate/flow_builder/flows/__init__.py +3 -1
  32. ibm_watsonx_orchestrate/flow_builder/flows/decorators.py +4 -2
  33. ibm_watsonx_orchestrate/flow_builder/flows/events.py +10 -9
  34. ibm_watsonx_orchestrate/flow_builder/flows/flow.py +91 -20
  35. ibm_watsonx_orchestrate/flow_builder/node.py +12 -1
  36. ibm_watsonx_orchestrate/flow_builder/types.py +169 -16
  37. ibm_watsonx_orchestrate/flow_builder/utils.py +120 -5
  38. ibm_watsonx_orchestrate/utils/exceptions.py +23 -0
  39. {ibm_watsonx_orchestrate-1.6.2.dist-info → ibm_watsonx_orchestrate-1.7.0b1.dist-info}/METADATA +2 -4
  40. {ibm_watsonx_orchestrate-1.6.2.dist-info → ibm_watsonx_orchestrate-1.7.0b1.dist-info}/RECORD +43 -43
  41. ibm_watsonx_orchestrate/flow_builder/resources/flow_status.openapi.yml +0 -66
  42. {ibm_watsonx_orchestrate-1.6.2.dist-info → ibm_watsonx_orchestrate-1.7.0b1.dist-info}/WHEEL +0 -0
  43. {ibm_watsonx_orchestrate-1.6.2.dist-info → ibm_watsonx_orchestrate-1.7.0b1.dist-info}/entry_points.txt +0 -0
  44. {ibm_watsonx_orchestrate-1.6.2.dist-info → ibm_watsonx_orchestrate-1.7.0b1.dist-info}/licenses/LICENSE +0 -0
@@ -2,6 +2,7 @@ from enum import Enum
2
2
  from typing import List, Any, Dict, Literal, Optional, Union
3
3
 
4
4
  from pydantic import BaseModel, model_validator, ConfigDict, Field, AliasChoices
5
+ from ibm_watsonx_orchestrate.utils.exceptions import BadRequest
5
6
 
6
7
 
7
8
  class ToolPermission(str, Enum):
@@ -74,19 +75,19 @@ class OpenApiSecurityScheme(BaseModel):
74
75
  @model_validator(mode='after')
75
76
  def validate_security_scheme(self) -> 'OpenApiSecurityScheme':
76
77
  if self.type == 'http' and self.scheme is None:
77
- raise ValueError("'scheme' is required when type is 'http'")
78
+ raise BadRequest("'scheme' is required when type is 'http'")
78
79
 
79
80
  if self.type == 'oauth2' and self.flows is None:
80
- raise ValueError("'flows' is required when type is 'oauth2'")
81
+ raise BadRequest("'flows' is required when type is 'oauth2'")
81
82
 
82
83
  if self.type == 'openIdConnect' and self.open_id_connect_url is None:
83
- raise ValueError("'open_id_connect_url' is required when type is 'openIdConnect'")
84
+ raise BadRequest("'open_id_connect_url' is required when type is 'openIdConnect'")
84
85
 
85
86
  if self.type == 'apiKey':
86
87
  if self.name is None:
87
- raise ValueError("'name' is required when type is 'apiKey'")
88
+ raise BadRequest("'name' is required when type is 'apiKey'")
88
89
  if self.in_field is None:
89
- raise ValueError("'in_field' is required when type is 'apiKey'")
90
+ raise BadRequest("'in_field' is required when type is 'apiKey'")
90
91
 
91
92
  return self
92
93
 
@@ -111,7 +112,7 @@ class OpenApiToolBinding(BaseModel):
111
112
  @model_validator(mode='after')
112
113
  def validate_openapi_tool_binding(self):
113
114
  if len(self.servers) != 1:
114
- raise ValueError("OpenAPI definition must include exactly one server")
115
+ raise BadRequest("OpenAPI definition must include exactly one server")
115
116
  return self
116
117
 
117
118
 
@@ -129,7 +130,7 @@ class WxFlowsToolBinding(BaseModel):
129
130
  @model_validator(mode='after')
130
131
  def validate_security_scheme(self) -> 'WxFlowsToolBinding':
131
132
  if self.security.type != 'apiKey':
132
- raise ValueError("'security' scheme must be of type 'apiKey'")
133
+ raise BadRequest("'security' scheme must be of type 'apiKey'")
133
134
  return self
134
135
 
135
136
 
@@ -173,9 +174,9 @@ class ToolBinding(BaseModel):
173
174
  self.flow is not None
174
175
  ]
175
176
  if sum(bindings) == 0:
176
- raise ValueError("One binding must be set")
177
+ raise BadRequest("One binding must be set")
177
178
  if sum(bindings) > 1:
178
- raise ValueError("Only one binding can be set")
179
+ raise BadRequest("Only one binding can be set")
179
180
  return self
180
181
 
181
182
  class ToolSpec(BaseModel):
@@ -31,6 +31,13 @@ def agent_create(
31
31
  str,
32
32
  typer.Option("--name", "-n", help="Name of the agent you wish to create"),
33
33
  ],
34
+ description: Annotated[
35
+ str,
36
+ typer.Option(
37
+ "--description",
38
+ help="Description of the agent",
39
+ ),
40
+ ],
34
41
  title: Annotated[
35
42
  str,
36
43
  typer.Option("--title", "-t", help="Title of the agent you wish to create. Only needed for External and Assistant Agents"),
@@ -87,13 +94,6 @@ def agent_create(
87
94
  str,
88
95
  typer.Option("--app-id", help="Application ID for the agent"),
89
96
  ] = None,
90
- description: Annotated[
91
- str,
92
- typer.Option(
93
- "--description",
94
- help="Description of the agent",
95
- ),
96
- ] = None,
97
97
  llm: Annotated[
98
98
  str,
99
99
  typer.Option(
@@ -29,6 +29,7 @@ from ibm_watsonx_orchestrate.client.agents.agent_client import AgentClient, Agen
29
29
  from ibm_watsonx_orchestrate.client.agents.external_agent_client import ExternalAgentClient
30
30
  from ibm_watsonx_orchestrate.client.agents.assistant_agent_client import AssistantAgentClient
31
31
  from ibm_watsonx_orchestrate.client.tools.tool_client import ToolClient
32
+ from ibm_watsonx_orchestrate.utils.exceptions import BadRequest
32
33
  from ibm_watsonx_orchestrate.client.connections import get_connections_client
33
34
  from ibm_watsonx_orchestrate.client.knowledge_bases.knowledge_base_client import KnowledgeBaseClient
34
35
 
@@ -71,7 +72,7 @@ def create_agent_from_spec(file:str, kind:str) -> Agent | ExternalAgent | Assist
71
72
  case AgentKind.ASSISTANT:
72
73
  agent = AssistantAgent.from_spec(file)
73
74
  case _:
74
- raise ValueError("'kind' must be either 'native' or 'external'")
75
+ raise BadRequest("'kind' must be either 'native' or 'external'")
75
76
 
76
77
  return agent
77
78
 
@@ -88,7 +89,7 @@ def parse_file(file: str) -> List[Agent | ExternalAgent | AssistantAgent]:
88
89
  agents = import_python_agent(file)
89
90
  return agents
90
91
  else:
91
- raise ValueError("file must end in .json, .yaml, .yml or .py")
92
+ raise BadRequest("file must end in .json, .yaml, .yml or .py")
92
93
 
93
94
  def parse_create_native_args(name: str, kind: AgentKind, description: str | None, **args) -> dict:
94
95
  agent_details = {
@@ -950,7 +951,7 @@ class AgentsController:
950
951
  elif kind == AgentKind.ASSISTANT:
951
952
  client = self.get_assistant_client()
952
953
  else:
953
- raise ValueError("'kind' must be 'native'")
954
+ raise BadRequest("'kind' must be 'native'")
954
955
 
955
956
  draft_agents = client.get_draft_by_name(name)
956
957
  if len(draft_agents) > 1:
@@ -23,7 +23,7 @@ from ibm_watsonx_orchestrate.cli.config import (
23
23
  )
24
24
  from ibm_watsonx_orchestrate.client.client import Client
25
25
  from ibm_watsonx_orchestrate.client.client_errors import ClientError
26
- from ibm_watsonx_orchestrate.client.agents.agent_client import AgentClient, ClientAPIException
26
+ from ibm_watsonx_orchestrate.client.knowledge_bases.knowledge_base_client import KnowledgeBaseClient, ClientAPIException
27
27
  from ibm_watsonx_orchestrate.client.credentials import Credentials
28
28
  from threading import Lock
29
29
  from ibm_watsonx_orchestrate.client.utils import is_local_dev, check_token_validity, is_cpd_env
@@ -55,13 +55,13 @@ def _validate_token_functionality(token: str, url: str) -> None:
55
55
  '''
56
56
  is_cpd = is_cpd_env(url)
57
57
  if is_cpd is True:
58
- agent_client = AgentClient(base_url=url, api_key=token, is_local=is_local_dev(url), verify=False)
58
+ knowledge_base_client = KnowledgeBaseClient(base_url=url, api_key=token, is_local=is_local_dev(url), verify=False)
59
59
  else:
60
- agent_client = AgentClient(base_url=url, api_key=token, is_local=is_local_dev(url))
61
- agent_client.api_key = token
60
+ knowledge_base_client = KnowledgeBaseClient(base_url=url, api_key=token, is_local=is_local_dev(url))
61
+ knowledge_base_client.api_key = token
62
62
 
63
63
  try:
64
- agent_client.get()
64
+ knowledge_base_client.get()
65
65
  except ClientAPIException as e:
66
66
  if e.response.status_code >= 400:
67
67
  reason = e.response.reason
@@ -4,6 +4,8 @@ from enum import Enum
4
4
  class EnvironmentAuthType(str, Enum):
5
5
  IBM_CLOUD_IAM = 'ibm_iam'
6
6
  MCSP = 'mcsp'
7
+ MCSP_V1 = 'mcsp_v1'
8
+ MCSP_V2 = 'mcsp_v2'
7
9
  CPD = 'cpd'
8
10
 
9
11
  def __str__(self):
@@ -21,24 +21,6 @@ def knowledge_base_import(
21
21
  controller = KnowledgeBaseController()
22
22
  controller.import_knowledge_base(file=file, app_id=app_id)
23
23
 
24
- @knowledge_bases_app.command(name="patch", help="Patch a knowledge base by uploading documents, or providing an external vector index")
25
- def knowledge_base_patch(
26
- file: Annotated[
27
- str,
28
- typer.Option("--file", "-f", help="YAML or JSON file with knowledge base definition"),
29
- ],
30
- name: Annotated[
31
- str,
32
- typer.Option("--name", "-n", help="Name of the knowledge base you wish to update"),
33
- ]=None,
34
- id: Annotated[
35
- str,
36
- typer.Option("--id", "-i", help="ID of the knowledge base you wish to update"),
37
- ]=None
38
- ):
39
- controller = KnowledgeBaseController()
40
- controller.update_knowledge_base(id=id, name=name, file=file)
41
-
42
24
 
43
25
  @knowledge_bases_app.command(name="list", help="List all knowledge bases")
44
26
  def list_knowledge_bases(
@@ -8,7 +8,6 @@ import inspect
8
8
  from pathlib import Path
9
9
  from typing import List
10
10
 
11
- from ibm_watsonx_orchestrate.agent_builder.knowledge_bases.knowledge_base_requests import KnowledgeBaseUpdateRequest
12
11
  from ibm_watsonx_orchestrate.agent_builder.knowledge_bases.knowledge_base import KnowledgeBase
13
12
  from ibm_watsonx_orchestrate.client.knowledge_bases.knowledge_base_client import KnowledgeBaseClient
14
13
  from ibm_watsonx_orchestrate.client.base_api_client import ClientAPIException
@@ -72,11 +71,20 @@ class KnowledgeBaseController:
72
71
  client = self.get_client()
73
72
 
74
73
  knowledge_bases = parse_file(file=file)
74
+ existing_knowledge_bases = client.get_by_names([kb.name for kb in knowledge_bases])
75
+
75
76
  for kb in knowledge_bases:
76
77
  try:
78
+ file_dir = "/".join(file.split("/")[:-1])
79
+
80
+ existing = list(filter(lambda ex: ex.get('name') == kb.name, existing_knowledge_bases))
81
+ if len(existing) > 0:
82
+ logger.info(f"Existing knowledge base '{kb.name}' found. Updating...")
83
+ self.update_knowledge_base(existing[0].get("id"), kb=kb, file_dir=file_dir)
84
+ continue
85
+
77
86
  kb.validate_documents_or_index_exists()
78
87
  if kb.documents:
79
- file_dir = "/".join(file.split("/")[:-1])
80
88
  files = [('files', (get_file_name(file_path), open(get_relative_file_path(file_path, file_dir), 'rb'))) for file_path in kb.documents]
81
89
 
82
90
  kb.prioritize_built_in_index = True
@@ -106,10 +114,7 @@ class KnowledgeBaseController:
106
114
 
107
115
  logger.info(f"Successfully imported knowledge base '{kb.name}'")
108
116
  except ClientAPIException as e:
109
- if "duplicate key value violates unique constraint" in e.response.text:
110
- logger.error(f"A knowledge base with the name '{kb.name}' already exists. Failed to import knowledge base")
111
- else:
112
- logger.error(f"Error importing knowledge base '{kb.name}\n' {e.response.text}")
117
+ logger.error(f"Error importing knowledge base '{kb.name}\n' {e.response.text}")
113
118
 
114
119
  def get_id(
115
120
  self, id: str, name: str
@@ -131,27 +136,36 @@ class KnowledgeBaseController:
131
136
 
132
137
 
133
138
  def update_knowledge_base(
134
- self, id: str, name: str, file: str
139
+ self, knowledge_base_id: str, kb: KnowledgeBase, file_dir: str
135
140
  ) -> None:
136
- knowledge_base_id = self.get_id(id, name)
137
- update_request = KnowledgeBaseUpdateRequest.from_spec(file=file)
141
+ filtered_files = []
142
+
143
+ if kb.documents:
144
+ status = self.get_client().status(knowledge_base_id)
145
+ existing_docs = [doc.get("metadata", {}).get("original_file_name", "") for doc in status.get("documents", [])]
146
+
147
+ for filepath in kb.documents:
148
+ filename = get_file_name(filepath)
138
149
 
139
- if update_request.documents:
140
- file_dir = "/".join(file.split("/")[:-1])
141
- files = [('files', (get_file_name(file_path), open(get_relative_file_path(file_path, file_dir), 'rb'))) for file_path in update_request.documents]
150
+ if filename in existing_docs:
151
+ logger.warning(f'Document \"{filename}\" already exists in knowledge base, skipping.')
152
+ else:
153
+ filtered_files.append(filepath)
154
+
155
+ if filtered_files:
156
+ files = [('files', (get_file_name(file_path), open(get_relative_file_path(file_path, file_dir), 'rb'))) for file_path in filtered_files]
142
157
 
143
- update_request.prioritize_built_in_index = True
144
- payload = update_request.model_dump(exclude_none=True);
158
+ kb.prioritize_built_in_index = True
159
+ payload = kb.model_dump(exclude_none=True);
145
160
  payload.pop('documents');
146
161
 
147
162
  self.get_client().update_with_documents(knowledge_base_id, payload=payload, files=files)
148
163
  else:
149
- if update_request.conversational_search_tool and update_request.conversational_search_tool.index_config:
150
- update_request.prioritize_built_in_index = False
151
- self.get_client().update(knowledge_base_id, update_request.model_dump(exclude_none=True))
164
+ if kb.conversational_search_tool and kb.conversational_search_tool.index_config:
165
+ kb.prioritize_built_in_index = False
166
+ self.get_client().update(knowledge_base_id, kb.model_dump(exclude_none=True))
152
167
 
153
- logEnding = f"with ID '{id}'" if id else f"'{name}'"
154
- logger.info(f"Successfully updated knowledge base {logEnding}")
168
+ logger.info(f"Knowledge base '{kb.name}' updated successfully")
155
169
 
156
170
 
157
171
  def knowledge_base_status( self, id: str, name: str) -> None:
@@ -149,7 +149,7 @@ def models_policy_add(
149
149
  retry_attempts: Annotated[
150
150
  int,
151
151
  typer.Option('--retry-attempts', help='The number of attempts to retry'),
152
- ],
152
+ ] = None,
153
153
  strategy_on_code: Annotated[
154
154
  List[int],
155
155
  typer.Option('--strategy-on-code', help='The http status to consider invoking the strategy'),
@@ -33,6 +33,29 @@ logger = logging.getLogger(__name__)
33
33
  server_app = typer.Typer(no_args_is_help=True)
34
34
 
35
35
 
36
+ _ALWAYS_UNSET: set[str] = {
37
+ "WO_API_KEY",
38
+ "WO_INSTANCE",
39
+ "DOCKER_IAM_KEY",
40
+ "WO_DEVELOPER_EDITION_SOURCE",
41
+ "WATSONX_SPACE_ID",
42
+ "WATSONX_APIKEY",
43
+ "WO_USERNAME",
44
+ "WO_PASSWORD",
45
+ }
46
+
47
+ def define_saas_wdu_runtime(value: str = "none") -> None:
48
+ cfg = Config()
49
+
50
+ current_config_file_values = cfg.get(USER_ENV_CACHE_HEADER)
51
+ current_config_file_values["SAAS_WDU_RUNTIME"] = value
52
+
53
+ cfg.save(
54
+ {
55
+ USER_ENV_CACHE_HEADER: current_config_file_values
56
+ }
57
+ )
58
+
36
59
  def ensure_docker_installed() -> None:
37
60
  try:
38
61
  subprocess.run(["docker", "--version"], check=True, capture_output=True)
@@ -261,6 +284,13 @@ def _check_exclusive_observibility(langfuse_enabled: bool, ibm_tele_enabled: boo
261
284
  return False
262
285
  return True
263
286
 
287
+ def _prepare_clean_env(env_file: Path) -> None:
288
+ """Remove env vars so terminal definitions don't override"""
289
+ keys_from_file = set(dotenv_values(str(env_file)).keys())
290
+ keys_to_unset = keys_from_file | _ALWAYS_UNSET
291
+ for key in keys_to_unset:
292
+ os.environ.pop(key, None)
293
+
264
294
  def write_merged_env_file(merged_env: dict) -> Path:
265
295
  tmp = tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".env")
266
296
  with tmp:
@@ -293,7 +323,8 @@ NON_SECRET_ENV_ITEMS = {
293
323
  "WO_INSTANCE",
294
324
  "USE_SAAS_ML_TOOLS_RUNTIME",
295
325
  "AUTHORIZATION_URL",
296
- "OPENSOURCE_REGISTRY_PROXY"
326
+ "OPENSOURCE_REGISTRY_PROXY",
327
+ "SAAS_WDU_RUNTIME"
297
328
  }
298
329
  def persist_user_env(env: dict, include_secrets: bool = False) -> None:
299
330
  if include_secrets:
@@ -313,10 +344,10 @@ def get_persisted_user_env() -> dict | None:
313
344
  user_env = cfg.get(USER_ENV_CACHE_HEADER) if cfg.get(USER_ENV_CACHE_HEADER) else None
314
345
  return user_env
315
346
 
316
-
317
- def run_compose_lite(final_env_file: Path, experimental_with_langfuse=False, experimental_with_ibm_telemetry=False) -> None:
347
+ def run_compose_lite(final_env_file: Path, experimental_with_langfuse=False, experimental_with_ibm_telemetry=False, with_doc_processing=False) -> None:
318
348
  compose_path = get_compose_file()
319
349
  compose_command = ensure_docker_compose_installed()
350
+ _prepare_clean_env(final_env_file)
320
351
  db_tag = read_env_file(final_env_file).get('DBTAG', None)
321
352
  logger.info(f"Detected architecture: {platform.machine()}, using DBTAG: {db_tag}")
322
353
 
@@ -341,19 +372,17 @@ def run_compose_lite(final_env_file: Path, experimental_with_langfuse=False, exp
341
372
 
342
373
 
343
374
  # Step 2: Start all remaining services (except DB)
375
+ profiles = []
344
376
  if experimental_with_langfuse:
345
- command = compose_command + [
346
- '--profile',
347
- 'langfuse'
348
- ]
349
- elif experimental_with_ibm_telemetry:
350
- command = compose_command + [
351
- '--profile',
352
- 'ibm-telemetry'
353
- ]
354
- else:
355
- command = compose_command
377
+ profiles.append("langfuse")
378
+ if experimental_with_ibm_telemetry:
379
+ profiles.append("ibm-telemetry")
380
+ if with_doc_processing:
381
+ profiles.append("docproc")
356
382
 
383
+ command = compose_command[:]
384
+ for profile in profiles:
385
+ command += ["--profile", profile]
357
386
 
358
387
  command += [
359
388
  "-f", str(compose_path),
@@ -431,6 +460,7 @@ def wait_for_wxo_ui_health_check(timeout_seconds=45, interval_seconds=2):
431
460
  def run_compose_lite_ui(user_env_file: Path) -> bool:
432
461
  compose_path = get_compose_file()
433
462
  compose_command = ensure_docker_compose_installed()
463
+ _prepare_clean_env(user_env_file)
434
464
  ensure_docker_installed()
435
465
 
436
466
  default_env = read_env_file(get_default_env_file())
@@ -525,6 +555,7 @@ def run_compose_lite_ui(user_env_file: Path) -> bool:
525
555
  def run_compose_lite_down_ui(user_env_file: Path, is_reset: bool = False) -> None:
526
556
  compose_path = get_compose_file()
527
557
  compose_command = ensure_docker_compose_installed()
558
+ _prepare_clean_env(user_env_file)
528
559
 
529
560
 
530
561
  ensure_docker_installed()
@@ -568,6 +599,7 @@ def run_compose_lite_down_ui(user_env_file: Path, is_reset: bool = False) -> Non
568
599
  def run_compose_lite_down(final_env_file: Path, is_reset: bool = False) -> None:
569
600
  compose_path = get_compose_file()
570
601
  compose_command = ensure_docker_compose_installed()
602
+ _prepare_clean_env(final_env_file)
571
603
 
572
604
  command = compose_command + [
573
605
  '--profile', '*',
@@ -600,6 +632,7 @@ def run_compose_lite_down(final_env_file: Path, is_reset: bool = False) -> None:
600
632
  def run_compose_lite_logs(final_env_file: Path, is_reset: bool = False) -> None:
601
633
  compose_path = get_compose_file()
602
634
  compose_command = ensure_docker_compose_installed()
635
+ _prepare_clean_env(final_env_file)
603
636
 
604
637
  command = compose_command + [
605
638
  "-f", str(compose_path),
@@ -655,63 +688,63 @@ def confirm_accepts_license_agreement(accepts_by_argument: bool):
655
688
  def auto_configure_callback_ip(merged_env_dict: dict) -> dict:
656
689
  """
657
690
  Automatically detect and configure CALLBACK_HOST_URL if it's empty.
658
-
691
+
659
692
  Args:
660
693
  merged_env_dict: The merged environment dictionary
661
-
694
+
662
695
  Returns:
663
696
  Updated environment dictionary with CALLBACK_HOST_URL set
664
697
  """
665
698
  callback_url = merged_env_dict.get('CALLBACK_HOST_URL', '').strip()
666
-
699
+
667
700
  # Only auto-configure if CALLBACK_HOST_URL is empty
668
701
  if not callback_url:
669
702
  logger.info("Auto-detecting local IP address for async tool callbacks...")
670
-
703
+
671
704
  system = platform.system()
672
705
  ip = None
673
-
706
+
674
707
  try:
675
708
  if system in ("Linux", "Darwin"):
676
709
  result = subprocess.run(["ifconfig"], capture_output=True, text=True, check=True)
677
710
  lines = result.stdout.splitlines()
678
-
711
+
679
712
  for line in lines:
680
713
  line = line.strip()
681
714
  # Unix ifconfig output format: "inet 192.168.1.100 netmask 0xffffff00 broadcast 192.168.1.255"
682
715
  if line.startswith("inet ") and "127.0.0.1" not in line:
683
716
  candidate_ip = line.split()[1]
684
717
  # Validate IP is not loopback or link-local
685
- if (candidate_ip and
686
- not candidate_ip.startswith("127.") and
718
+ if (candidate_ip and
719
+ not candidate_ip.startswith("127.") and
687
720
  not candidate_ip.startswith("169.254")):
688
721
  ip = candidate_ip
689
722
  break
690
-
723
+
691
724
  elif system == "Windows":
692
725
  result = subprocess.run(["ipconfig"], capture_output=True, text=True, check=True)
693
726
  lines = result.stdout.splitlines()
694
-
727
+
695
728
  for line in lines:
696
729
  line = line.strip()
697
730
  # Windows ipconfig output format: " IPv4 Address. . . . . . . . . . . : 192.168.1.100"
698
731
  if "IPv4 Address" in line and ":" in line:
699
732
  candidate_ip = line.split(":")[-1].strip()
700
733
  # Validate IP is not loopback or link-local
701
- if (candidate_ip and
702
- not candidate_ip.startswith("127.") and
734
+ if (candidate_ip and
735
+ not candidate_ip.startswith("127.") and
703
736
  not candidate_ip.startswith("169.254")):
704
737
  ip = candidate_ip
705
738
  break
706
-
739
+
707
740
  else:
708
741
  logger.warning(f"Unsupported platform: {system}")
709
742
  ip = None
710
-
743
+
711
744
  except Exception as e:
712
745
  logger.debug(f"IP detection failed on {system}: {e}")
713
746
  ip = None
714
-
747
+
715
748
  if ip:
716
749
  callback_url = f"http://{ip}:4321"
717
750
  merged_env_dict['CALLBACK_HOST_URL'] = callback_url
@@ -724,7 +757,7 @@ def auto_configure_callback_ip(merged_env_dict: dict) -> dict:
724
757
  logger.info("For external tools, consider using ngrok or similar tunneling service.")
725
758
  else:
726
759
  logger.info(f"Using existing CALLBACK_HOST_URL: {callback_url}")
727
-
760
+
728
761
  return merged_env_dict
729
762
 
730
763
  @server_app.command(name="start")
@@ -755,9 +788,16 @@ def server_start(
755
788
  "--accept-terms-and-conditions",
756
789
  help="By providing this flag you accept the terms and conditions outlined in the logs on server start."
757
790
  ),
791
+ with_doc_processing: bool = typer.Option(
792
+ False,
793
+ '--with-doc-processing', '-d',
794
+ help='Enable IBM Document Processing to extract information from your business documents. Enabling this activates the Watson Document Understanding service.'
795
+ ),
758
796
  ):
759
797
  confirm_accepts_license_agreement(accept_terms_and_conditions)
760
798
 
799
+ define_saas_wdu_runtime()
800
+
761
801
  if user_env_file and not Path(user_env_file).exists():
762
802
  logger.error(f"Error: The specified environment file '{user_env_file}' does not exist.")
763
803
  sys.exit(1)
@@ -789,10 +829,14 @@ def server_start(
789
829
  logger.error("Please select either langfuse or ibm telemetry for observability not both")
790
830
  sys.exit(1)
791
831
 
792
- # Add LANGFUSE_ENABLED into the merged_env_dict, for tempus to pick up.
832
+ # Add LANGFUSE_ENABLED and DOCPROC_ENABLED into the merged_env_dict, for tempus to pick up.
793
833
  if experimental_with_langfuse:
794
834
  merged_env_dict['LANGFUSE_ENABLED'] = 'true'
795
-
835
+
836
+ if with_doc_processing:
837
+ merged_env_dict['DOCPROC_ENABLED'] = 'true'
838
+ define_saas_wdu_runtime("local")
839
+
796
840
  if experimental_with_ibm_telemetry:
797
841
  merged_env_dict['USE_IBM_TELEMETRY'] = 'true'
798
842
 
@@ -806,10 +850,12 @@ def server_start(
806
850
 
807
851
 
808
852
  final_env_file = write_merged_env_file(merged_env_dict)
809
- run_compose_lite(final_env_file=final_env_file,
810
- experimental_with_langfuse=experimental_with_langfuse,
811
- experimental_with_ibm_telemetry=experimental_with_ibm_telemetry)
812
853
 
854
+ run_compose_lite(final_env_file=final_env_file,
855
+ experimental_with_langfuse=experimental_with_langfuse,
856
+ experimental_with_ibm_telemetry=experimental_with_ibm_telemetry,
857
+ with_doc_processing=with_doc_processing)
858
+
813
859
  run_db_migration()
814
860
 
815
861
  logger.info("Waiting for orchestrate server to be fully initialized and ready...")
@@ -836,6 +882,8 @@ def server_start(
836
882
 
837
883
  if experimental_with_langfuse:
838
884
  logger.info(f"You can access the observability platform Langfuse at http://localhost:3010, username: orchestrate@ibm.com, password: orchestrate")
885
+ if with_doc_processing:
886
+ logger.info(f"Document processing capabilities are now available for use in Flows (both ADK and runtime). Note: This option is currently available only in the Developer edition.")
839
887
 
840
888
  @server_app.command(name="stop")
841
889
  def server_stop(
@@ -845,6 +893,7 @@ def server_stop(
845
893
  help="Path to a .env file that overrides default.env. Then environment variables override both."
846
894
  )
847
895
  ):
896
+
848
897
  ensure_docker_installed()
849
898
  default_env_path = get_default_env_file()
850
899
  merged_env_dict = merge_env(
@@ -901,9 +950,24 @@ def server_logs(
901
950
  def run_db_migration() -> None:
902
951
  compose_path = get_compose_file()
903
952
  compose_command = ensure_docker_compose_installed()
953
+ default_env_path = get_default_env_file()
954
+ merged_env_dict = merge_env(default_env_path, user_env_path=None)
955
+ merged_env_dict['WATSONX_SPACE_ID']='X'
956
+ merged_env_dict['WATSONX_APIKEY']='X'
957
+ merged_env_dict['WXAI_API_KEY'] = ''
958
+ merged_env_dict['ASSISTANT_EMBEDDINGS_API_KEY'] = ''
959
+ merged_env_dict['ASSISTANT_LLM_SPACE_ID'] = ''
960
+ merged_env_dict['ROUTING_LLM_SPACE_ID'] = ''
961
+ merged_env_dict['USE_SAAS_ML_TOOLS_RUNTIME'] = ''
962
+ merged_env_dict['BAM_API_KEY'] = ''
963
+ merged_env_dict['ASSISTANT_EMBEDDINGS_SPACE_ID'] = ''
964
+ merged_env_dict['ROUTING_LLM_API_KEY'] = ''
965
+ merged_env_dict['ASSISTANT_LLM_API_KEY'] = ''
966
+ final_env_file = write_merged_env_file(merged_env_dict)
904
967
 
905
968
  command = compose_command + [
906
969
  "-f", str(compose_path),
970
+ "--env-file", str(final_env_file),
907
971
  "exec",
908
972
  "wxo-server-db",
909
973
  "bash",
@@ -943,4 +1007,4 @@ def run_db_migration() -> None:
943
1007
  sys.exit(1)
944
1008
 
945
1009
  if __name__ == "__main__":
946
- server_app()
1010
+ server_app()
@@ -47,7 +47,7 @@ def import_toolkit(
47
47
  ] = None,
48
48
  tools: Annotated[
49
49
  Optional[str],
50
- typer.Option("--tools", "-t", help="Comma-separated list of tools to import. Or you can use `*` to use all tools"),
50
+ typer.Option("--tools", "-t", help="Comma-separated list of tools to import. Or you can use \"*\" to use all tools"),
51
51
  ] = None,
52
52
  app_id: Annotated[
53
53
  List[str],
@@ -41,7 +41,7 @@ from ibm_watsonx_orchestrate.client.connections import get_connections_client, g
41
41
  from ibm_watsonx_orchestrate.client.utils import instantiate_client, is_local_dev
42
42
  from ibm_watsonx_orchestrate.flow_builder.utils import import_flow_support_tools
43
43
  from ibm_watsonx_orchestrate.utils.utils import sanatize_app_id
44
- from ibm_watsonx_orchestrate.client.utils import is_local_dev
44
+ from ibm_watsonx_orchestrate.utils.exceptions import BadRequest
45
45
 
46
46
  from ibm_watsonx_orchestrate import __version__
47
47
 
@@ -400,7 +400,14 @@ The [bold]flow tool[/bold] is being imported from [green]`{file}`[/green].
400
400
 
401
401
  [bold cyan]Additional information:[/bold cyan]
402
402
 
403
- - The [bold green]Get flow status[/bold green] tool is being imported to support flow tools. This tool can query the status of a flow tool instance. You can add it to your agent using the UI or including the following tool name in your agent definition: [green]i__get_flow_status_intrinsic_tool__[/green].
403
+ - The [bold green]Get flow status[/bold green] tool is being imported to support flow tools. This tool can query the status of a flow tool instance. You can add it to your agent using the UI or including the following tool name in your agent definition: [green]i__get_flow_status_intrinsic_tool__[/green].
404
+
405
+ [bold cyan]Experimental Features - Scheduling Flows and Agents: [/bold cyan]
406
+ - You can now schedule any Flows to be run on a later time. Just include the [bold green]"schedulable=True"[/bold green] attribute in the @flow decorator.
407
+ - Once enabled, you can schedule a flow by saying something like: [bold green]Can you schedule the flow <flow_name> to run everyday at 7am EST for 3 times?[/bold green]
408
+ - To schedule an agent, see the example in [bold green]examples/flow_builder/agent_scheduler[/bold green]. Use that to import the [bold green]agent_run[/bold green] tool to your agent.
409
+ - Use [bold green]agent_run[/bold green] tool to schedule an agent. For example: [bold green]Can you schedule the agent <agent_name> to run every weekday at 8am UK time?[/bold green]
410
+ - In scheduling, it is important to mention timezone or UTC time (also known as Greenwich Mean Time or Coordinated Universal Time) will be used.
404
411
 
405
412
  """
406
413
 
@@ -477,7 +484,7 @@ The [bold]flow tool[/bold] is being imported from [green]`{file}`[/green].
477
484
  permission="read_only",
478
485
  flow_model=model)
479
486
 
480
- tools = import_flow_support_tools()
487
+ tools = import_flow_support_tools(model=model)
481
488
 
482
489
  tools.append(tool)
483
490
 
@@ -554,7 +561,7 @@ class ToolsController:
554
561
  tools = []
555
562
  logger.warning("Skill Import not implemented yet")
556
563
  case _:
557
- raise ValueError("Invalid kind selected")
564
+ raise BadRequest("Invalid kind selected")
558
565
 
559
566
  for tool in tools:
560
567
  yield tool