ibm-watsonx-orchestrate 1.9.0.dev0__py3-none-any.whl → 1.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. ibm_watsonx_orchestrate/__init__.py +1 -2
  2. ibm_watsonx_orchestrate/agent_builder/agents/types.py +2 -0
  3. ibm_watsonx_orchestrate/agent_builder/connections/__init__.py +1 -1
  4. ibm_watsonx_orchestrate/agent_builder/connections/connections.py +1 -1
  5. ibm_watsonx_orchestrate/agent_builder/connections/types.py +16 -12
  6. ibm_watsonx_orchestrate/agent_builder/knowledge_bases/types.py +45 -2
  7. ibm_watsonx_orchestrate/agent_builder/toolkits/types.py +18 -15
  8. ibm_watsonx_orchestrate/agent_builder/tools/python_tool.py +1 -1
  9. ibm_watsonx_orchestrate/agent_builder/tools/types.py +1 -1
  10. ibm_watsonx_orchestrate/agent_builder/voice_configurations/__init__.py +1 -0
  11. ibm_watsonx_orchestrate/agent_builder/voice_configurations/types.py +98 -0
  12. ibm_watsonx_orchestrate/cli/commands/agents/agents_command.py +20 -0
  13. ibm_watsonx_orchestrate/cli/commands/agents/agents_controller.py +170 -1
  14. ibm_watsonx_orchestrate/cli/commands/connections/connections_command.py +7 -7
  15. ibm_watsonx_orchestrate/cli/commands/connections/connections_controller.py +36 -26
  16. ibm_watsonx_orchestrate/cli/commands/knowledge_bases/knowledge_bases_controller.py +51 -22
  17. ibm_watsonx_orchestrate/cli/commands/server/server_command.py +110 -16
  18. ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_command.py +43 -10
  19. ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_controller.py +52 -25
  20. ibm_watsonx_orchestrate/cli/commands/tools/tools_controller.py +5 -0
  21. ibm_watsonx_orchestrate/cli/commands/voice_configurations/voice_configurations_command.py +58 -0
  22. ibm_watsonx_orchestrate/cli/commands/voice_configurations/voice_configurations_controller.py +173 -0
  23. ibm_watsonx_orchestrate/cli/main.py +2 -0
  24. ibm_watsonx_orchestrate/client/agents/agent_client.py +64 -1
  25. ibm_watsonx_orchestrate/client/connections/connections_client.py +4 -3
  26. ibm_watsonx_orchestrate/client/knowledge_bases/knowledge_base_client.py +4 -4
  27. ibm_watsonx_orchestrate/client/voice_configurations/voice_configurations_client.py +75 -0
  28. ibm_watsonx_orchestrate/docker/compose-lite.yml +53 -5
  29. ibm_watsonx_orchestrate/docker/default.env +22 -14
  30. ibm_watsonx_orchestrate/flow_builder/flows/__init__.py +2 -0
  31. ibm_watsonx_orchestrate/flow_builder/flows/flow.py +115 -31
  32. ibm_watsonx_orchestrate/flow_builder/node.py +39 -15
  33. ibm_watsonx_orchestrate/flow_builder/types.py +114 -25
  34. ibm_watsonx_orchestrate/run/connections.py +2 -2
  35. {ibm_watsonx_orchestrate-1.9.0.dev0.dist-info → ibm_watsonx_orchestrate-1.10.0.dist-info}/METADATA +1 -1
  36. {ibm_watsonx_orchestrate-1.9.0.dev0.dist-info → ibm_watsonx_orchestrate-1.10.0.dist-info}/RECORD +39 -34
  37. {ibm_watsonx_orchestrate-1.9.0.dev0.dist-info → ibm_watsonx_orchestrate-1.10.0.dist-info}/WHEEL +0 -0
  38. {ibm_watsonx_orchestrate-1.9.0.dev0.dist-info → ibm_watsonx_orchestrate-1.10.0.dist-info}/entry_points.txt +0 -0
  39. {ibm_watsonx_orchestrate-1.9.0.dev0.dist-info → ibm_watsonx_orchestrate-1.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -160,7 +160,7 @@ def set_credentials_connection_command(
160
160
  typer.Option(
161
161
  '--username',
162
162
  '-u',
163
- help='For basic auth, the username to login with'
163
+ help='For basic auth and oauth_auth_password_flow, the username to login with'
164
164
  )
165
165
  ] = None,
166
166
  password: Annotated[
@@ -168,7 +168,7 @@ def set_credentials_connection_command(
168
168
  typer.Option(
169
169
  '--password',
170
170
  '-p',
171
- help='For basic auth, the password to login with'
171
+ help='For basic auth and oauth_auth_password_flow, the password to login with'
172
172
  )
173
173
  ] = None,
174
174
  token: Annotated[
@@ -191,14 +191,14 @@ def set_credentials_connection_command(
191
191
  typer.Option(
192
192
  '--client-id',
193
193
  # help='For oauth_auth_on_behalf_of_flow, oauth_auth_code_flow, oauth_auth_implicit_flow, oauth_auth_password_flow and oauth_auth_client_credentials_flow, the client_id to authenticate against the application token server'
194
- help='For oauth_auth_on_behalf_of_flow and oauth_auth_client_credentials_flow, the client_id to authenticate against the application token server'
194
+ help='For oauth_auth_on_behalf_of_flow, oauth_auth_password_flow and oauth_auth_client_credentials_flow, the client_id to authenticate against the application token server'
195
195
  )
196
196
  ] = None,
197
197
  client_secret: Annotated[
198
198
  str,
199
199
  typer.Option(
200
200
  '--client-secret',
201
- help='For oauth_auth_client_credentials_flow, the client_secret to authenticate with'
201
+ help='For oauth_auth_client_credentials_flow and oauth_auth_password_flow, the client_secret to authenticate with'
202
202
  )
203
203
  ] = None,
204
204
  send_via: Annotated[
@@ -213,7 +213,7 @@ def set_credentials_connection_command(
213
213
  typer.Option(
214
214
  '--token-url',
215
215
  # help='For oauth_auth_on_behalf_of_flow, oauth_auth_code_flow, oauth_auth_password_flow and oauth_auth_client_credentials_flow, the url of the application token server'
216
- help='For oauth_auth_on_behalf_of_flow and oauth_auth_client_credentials_flow, the url of the application token server'
216
+ help='For oauth_auth_on_behalf_of_flow, oauth_auth_password_flow and oauth_auth_client_credentials_flow, the url of the application token server'
217
217
  )
218
218
  ] = None,
219
219
  auth_url: Annotated[
@@ -227,14 +227,14 @@ def set_credentials_connection_command(
227
227
  str,
228
228
  typer.Option(
229
229
  '--grant-type',
230
- help='For oauth_auth_on_behalf_of_flow and oauth_auth_client_credentials_flow, the grant type used by the application token server'
230
+ help='For oauth_auth_on_behalf_of_flow, oauth_auth_password_flow and oauth_auth_client_credentials_flow, the grant type used by the application token server'
231
231
  )
232
232
  ] = None,
233
233
  scope: Annotated[
234
234
  str,
235
235
  typer.Option(
236
236
  '--scope',
237
- help='For oauth_auth_code_flow and oauth_auth_client_credentials_flow, the optional scopes used by the application token server. Should be in the form of a space seperated string.'
237
+ help='For oauth_auth_code_flow, oauth_auth_password_flow and oauth_auth_client_credentials_flow, the optional scopes used by the application token server. Should be in the form of a space seperated string.'
238
238
  )
239
239
  ] = None,
240
240
  entries: Annotated[
@@ -19,15 +19,15 @@ from ibm_watsonx_orchestrate.agent_builder.connections.types import (
19
19
  BasicAuthCredentials,
20
20
  BearerTokenAuthCredentials,
21
21
  APIKeyAuthCredentials,
22
- # OAuth2AuthCodeCredentials,
22
+ OAuth2AuthCodeCredentials,
23
23
  OAuth2ClientCredentials,
24
24
  # OAuth2ImplicitCredentials,
25
- # OAuth2PasswordCredentials,
25
+ OAuth2PasswordCredentials,
26
26
  OAuthOnBehalfOfCredentials,
27
27
  KeyValueConnectionCredentials,
28
28
  CREDENTIALS,
29
29
  IdentityProviderCredentials,
30
- OAUTH_CONNECTION_TYPES, OAuth2AuthCodeCredentials
30
+ OAUTH_CONNECTION_TYPES
31
31
 
32
32
  )
33
33
 
@@ -115,7 +115,7 @@ def _format_token_headers(header_list: List) -> dict:
115
115
 
116
116
  def _validate_connection_params(type: ConnectionType, **args) -> None:
117
117
 
118
- if type == ConnectionType.BASIC_AUTH and (
118
+ if type in {ConnectionType.BASIC_AUTH, ConnectionType.OAUTH2_PASSWORD} and (
119
119
  args.get('username') is None or args.get('password') is None
120
120
  ):
121
121
  raise typer.BadParameter(
@@ -136,7 +136,7 @@ def _validate_connection_params(type: ConnectionType, **args) -> None:
136
136
  f"Missing flags --api-key is required for type {type}"
137
137
  )
138
138
 
139
- if type in {ConnectionType.OAUTH2_CLIENT_CREDS, ConnectionType.OAUTH2_AUTH_CODE} and args.get('client_secret') is None:
139
+ if type in {ConnectionType.OAUTH2_CLIENT_CREDS, ConnectionType.OAUTH2_AUTH_CODE, ConnectionType.OAUTH2_PASSWORD} and args.get('client_secret') is None:
140
140
  raise typer.BadParameter(
141
141
  f"Missing flags --client-secret is required for type {type}"
142
142
  )
@@ -146,14 +146,14 @@ def _validate_connection_params(type: ConnectionType, **args) -> None:
146
146
  f"Missing flags --auth-url is required for type {type}"
147
147
  )
148
148
 
149
- if type in {ConnectionType.OAUTH_ON_BEHALF_OF_FLOW, ConnectionType.OAUTH2_CLIENT_CREDS, ConnectionType.OAUTH2_AUTH_CODE} and (
149
+ if type in {ConnectionType.OAUTH_ON_BEHALF_OF_FLOW, ConnectionType.OAUTH2_CLIENT_CREDS, ConnectionType.OAUTH2_AUTH_CODE, ConnectionType.OAUTH2_PASSWORD} and (
150
150
  args.get('client_id') is None
151
151
  ):
152
152
  raise typer.BadParameter(
153
153
  f"Missing flags --client-id is required for type {type}"
154
154
  )
155
155
 
156
- if type in {ConnectionType.OAUTH_ON_BEHALF_OF_FLOW, ConnectionType.OAUTH2_CLIENT_CREDS, ConnectionType.OAUTH2_AUTH_CODE} and (
156
+ if type in {ConnectionType.OAUTH_ON_BEHALF_OF_FLOW, ConnectionType.OAUTH2_CLIENT_CREDS, ConnectionType.OAUTH2_AUTH_CODE, ConnectionType.OAUTH2_PASSWORD} and (
157
157
  args.get('token_url') is None
158
158
  ):
159
159
  raise typer.BadParameter(
@@ -209,13 +209,11 @@ def _get_credentials(type: ConnectionType, **kwargs):
209
209
  # authorization_url=kwargs.get("auth_url"),
210
210
  # client_id=kwargs.get("client_id"),
211
211
  # )
212
- # case ConnectionType.OAUTH2_PASSWORD:
213
- # return OAuth2PasswordCredentials(
214
- # authorization_url=kwargs.get("auth_url"),
215
- # client_id=kwargs.get("client_id"),
216
- # client_secret=kwargs.get("client_secret"),
217
- # token_url=kwargs.get("token_url")
218
- # )
212
+ case ConnectionType.OAUTH2_PASSWORD:
213
+ keys = ["username", "password", "client_id","client_secret","token_url","grant_type", "scope"]
214
+ filtered_args = { key_name: kwargs[key_name] for key_name in keys if kwargs.get(key_name) }
215
+ return OAuth2PasswordCredentials(**filtered_args)
216
+
219
217
  case ConnectionType.OAUTH_ON_BEHALF_OF_FLOW:
220
218
  return OAuthOnBehalfOfCredentials(
221
219
  client_id=kwargs.get("client_id"),
@@ -283,25 +281,24 @@ def add_configuration(config: ConnectionConfiguration) -> None:
283
281
  logger.error(response_text)
284
282
  exit(1)
285
283
 
286
- def add_credentials(app_id: str, environment: ConnectionEnvironment, use_app_credentials: bool, credentials: CREDENTIALS) -> None:
284
+ def add_credentials(app_id: str, environment: ConnectionEnvironment, use_app_credentials: bool, credentials: CREDENTIALS, payload: dict = None) -> None:
287
285
  client = get_connections_client()
288
286
  try:
289
287
  existing_credentials = client.get_credentials(app_id=app_id, env=environment, use_app_credentials=use_app_credentials)
290
- if use_app_credentials:
291
- payload = {
292
- "app_credentials": credentials.model_dump(exclude_none=True)
293
- }
294
- else:
295
- payload = {
296
- "runtime_credentials": credentials.model_dump(exclude_none=True)
297
- }
288
+ if not payload:
289
+ if use_app_credentials:
290
+ payload = {
291
+ "app_credentials": credentials.model_dump(exclude_none=True)
292
+ }
293
+ else:
294
+ payload = {
295
+ "runtime_credentials": credentials.model_dump(exclude_none=True)
296
+ }
298
297
 
299
- logger.info(f"Setting credentials for environment '{environment}' on connection '{app_id}'")
300
298
  if existing_credentials:
301
299
  client.update_credentials(app_id=app_id, env=environment, use_app_credentials=use_app_credentials, payload=payload)
302
300
  else:
303
301
  client.create_credentials(app_id=app_id,env=environment, use_app_credentials=use_app_credentials, payload=payload)
304
- logger.info(f"Credentials successfully set for '{environment}' environment of connection '{app_id}'")
305
302
  except requests.HTTPError as e:
306
303
  response = e.response
307
304
  response_text = response.text
@@ -489,7 +486,20 @@ def set_credentials_connection(
489
486
  _validate_connection_params(type=conn_type, **kwargs)
490
487
  credentials = _get_credentials(type=conn_type, **kwargs)
491
488
 
492
- add_credentials(app_id=app_id, environment=environment, use_app_credentials=use_app_credentials, credentials=credentials)
489
+ # Special handling for oauth2 password flow as it sends both app_creds and runtime_creds
490
+ logger.info(f"Setting credentials for environment '{environment}' on connection '{app_id}'")
491
+ if conn_type == ConnectionType.OAUTH2_PASSWORD:
492
+ credentials_model = credentials.model_dump(exclude_none=True)
493
+ runtime_cred_keys = {"username", "password"}
494
+ app_creds = {"app_credentials": {k: credentials_model[k] for k in credentials_model if k not in runtime_cred_keys}}
495
+ runtime_creds = {"runtime_credentials": {k: credentials_model[k] for k in credentials_model if k in runtime_cred_keys}}
496
+
497
+ add_credentials(app_id=app_id, environment=environment, use_app_credentials=True, credentials=credentials, payload=app_creds)
498
+ add_credentials(app_id=app_id, environment=environment, use_app_credentials=False, credentials=credentials, payload=runtime_creds)
499
+ else:
500
+ add_credentials(app_id=app_id, environment=environment, use_app_credentials=use_app_credentials, credentials=credentials)
501
+
502
+ logger.info(f"Credentials successfully set for '{environment}' environment of connection '{app_id}'")
493
503
 
494
504
  def set_identity_provider_connection(
495
505
  app_id: str,
@@ -13,6 +13,7 @@ from ibm_watsonx_orchestrate.client.knowledge_bases.knowledge_base_client import
13
13
  from ibm_watsonx_orchestrate.client.base_api_client import ClientAPIException
14
14
  from ibm_watsonx_orchestrate.client.connections import get_connections_client
15
15
  from ibm_watsonx_orchestrate.client.utils import instantiate_client
16
+ from ibm_watsonx_orchestrate.agent_builder.knowledge_bases.types import FileUpload
16
17
 
17
18
  logger = logging.getLogger(__name__)
18
19
 
@@ -43,7 +44,8 @@ def parse_file(file: str) -> List[KnowledgeBase]:
43
44
  def to_column_name(col: str):
44
45
  return " ".join([word.capitalize() if not word[0].isupper() else word for word in col.split("_")])
45
46
 
46
- def get_file_name(path: str):
47
+ def get_file_name(file: str | FileUpload):
48
+ path = file.path if isinstance(file, FileUpload) else file
47
49
  # This name prettifying currently screws up file type detection on ingestion
48
50
  # return to_column_name(path.split("/")[-1].split(".")[0])
49
51
  return path.split("/")[-1]
@@ -55,7 +57,11 @@ def get_relative_file_path(path, dir):
55
57
  return f"{dir}{path.removeprefix('.')}"
56
58
  else:
57
59
  return f"{dir}/{path}"
58
-
60
+
61
+ def build_file_object(file_dir: str, file: str | FileUpload):
62
+ if isinstance(file, FileUpload):
63
+ return ('files', (get_file_name(file.path), open(get_relative_file_path(file.path, file_dir), 'rb')))
64
+ return ('files', (get_file_name(file), open(get_relative_file_path(file, file_dir), 'rb')))
59
65
 
60
66
  class KnowledgeBaseController:
61
67
  def __init__(self):
@@ -72,6 +78,21 @@ class KnowledgeBaseController:
72
78
 
73
79
  knowledge_bases = parse_file(file=file)
74
80
 
81
+ if app_id:
82
+ connections_client = get_connections_client()
83
+ connection_id = None
84
+
85
+ connections = connections_client.get_draft_by_app_id(app_id=app_id)
86
+ if not connections:
87
+ logger.error(f"No connection exists with the app-id '{app_id}'")
88
+ exit(1)
89
+
90
+ connection_id = connections.connection_id
91
+
92
+ for kb in knowledge_bases:
93
+ if kb.conversational_search_tool and kb.conversational_search_tool.index_config and len(kb.conversational_search_tool.index_config) > 0:
94
+ kb.conversational_search_tool.index_config[0].connection_id = connection_id
95
+
75
96
  existing_knowledge_bases = client.get_by_names([kb.name for kb in knowledge_bases])
76
97
 
77
98
  for kb in knowledge_bases:
@@ -86,32 +107,32 @@ class KnowledgeBaseController:
86
107
 
87
108
  kb.validate_documents_or_index_exists()
88
109
  if kb.documents:
89
- files = [('files', (get_file_name(file_path), open(get_relative_file_path(file_path, file_dir), 'rb'))) for file_path in kb.documents]
110
+ files = [build_file_object(file_dir, file) for file in kb.documents]
111
+ file_urls = { get_file_name(file): file.url for file in kb.documents if isinstance(file, FileUpload) and file.url }
90
112
 
91
113
  kb.prioritize_built_in_index = True
92
114
  payload = kb.model_dump(exclude_none=True);
93
115
  payload.pop('documents');
94
116
 
95
- client.create_built_in(payload=payload, files=files)
117
+ data = {
118
+ 'knowledge_base': json.dumps(payload),
119
+ 'file_urls': json.dumps(file_urls)
120
+ }
121
+
122
+ client.create_built_in(payload=data, files=files)
96
123
  else:
97
124
  if len(kb.conversational_search_tool.index_config) != 1:
98
125
  raise ValueError(f"Must provide exactly one conversational_search_tool.index_config. Provided {len(kb.conversational_search_tool.index_config)}.")
99
126
 
100
-
101
- if app_id:
102
- connections_client = get_connections_client()
103
- connection_id = None
104
- if app_id is not None:
105
- connections = connections_client.get_draft_by_app_id(app_id=app_id)
106
- if not connections:
107
- logger.error(f"No connection exists with the app-id '{app_id}'")
108
- exit(1)
109
-
110
- connection_id = connections.connection_id
111
- kb.conversational_search_tool.index_config[0].connection_id = connection_id
127
+ if (kb.conversational_search_tool.index_config[0].milvus or \
128
+ kb.conversational_search_tool.index_config[0].elastic_search) and \
129
+ not kb.conversational_search_tool.index_config[0].connection_id:
130
+ raise ValueError(f"Must provide credentials (via --app-id) when using milvus or elastic_search.")
112
131
 
113
132
  kb.prioritize_built_in_index = False
114
- client.create(payload=kb.model_dump(exclude_none=True))
133
+ data = { 'knowledge_base': json.dumps(kb.model_dump(exclude_none=True)) }
134
+
135
+ client.create(payload=data)
115
136
 
116
137
  logger.info(f"Successfully imported knowledge base '{kb.name}'")
117
138
  except ClientAPIException as e:
@@ -144,8 +165,8 @@ class KnowledgeBaseController:
144
165
  existing_docs = [doc.get("metadata", {}).get("original_file_name", "") for doc in status.get("documents", [])]
145
166
 
146
167
  removed_docs = existing_docs[:]
147
- for filepath in kb.documents:
148
- filename = get_file_name(filepath)
168
+ for file in kb.documents:
169
+ filename = get_file_name(file)
149
170
 
150
171
  if filename in existing_docs:
151
172
  logger.warning(f'Document \"{filename}\" already exists in knowledge base. Updating...')
@@ -155,17 +176,25 @@ class KnowledgeBaseController:
155
176
  logger.warning(f'Document \"{filename}\" removed from knowledge base.')
156
177
 
157
178
 
158
- files = [('files', (get_file_name(file_path), open(get_relative_file_path(file_path, file_dir), 'rb'))) for file_path in kb.documents]
179
+ files = [build_file_object(file_dir, file) for file in kb.documents]
180
+ file_urls = { get_file_name(file): file.url for file in kb.documents if isinstance(file, FileUpload) and file.url }
159
181
 
160
182
  kb.prioritize_built_in_index = True
161
183
  payload = kb.model_dump(exclude_none=True);
162
184
  payload.pop('documents');
163
185
 
164
- self.get_client().update_with_documents(knowledge_base_id, payload=payload, files=files)
186
+ data = {
187
+ 'knowledge_base': json.dumps(payload),
188
+ 'file_urls': json.dumps(file_urls)
189
+ }
190
+
191
+ self.get_client().update_with_documents(knowledge_base_id, payload=data, files=files)
165
192
  else:
166
193
  if kb.conversational_search_tool and kb.conversational_search_tool.index_config:
167
194
  kb.prioritize_built_in_index = False
168
- self.get_client().update(knowledge_base_id, kb.model_dump(exclude_none=True))
195
+
196
+ data = { 'knowledge_base': json.dumps(kb.model_dump(exclude_none=True)) }
197
+ self.get_client().update(knowledge_base_id, payload=data)
169
198
 
170
199
  logger.info(f"Knowledge base '{kb.name}' updated successfully")
171
200
 
@@ -332,7 +332,6 @@ def write_merged_env_file(merged_env: dict, target_path: str = None) -> Path:
332
332
  file.write(f"{key}={val}\n")
333
333
  return Path(file.name)
334
334
 
335
-
336
335
  def get_dbtag_from_architecture(merged_env_dict: dict) -> str:
337
336
  """Detects system architecture and returns the corresponding DBTAG."""
338
337
  arch = platform.machine()
@@ -370,9 +369,14 @@ def get_persisted_user_env() -> dict | None:
370
369
  user_env = cfg.get(USER_ENV_CACHE_HEADER) if cfg.get(USER_ENV_CACHE_HEADER) else None
371
370
  return user_env
372
371
 
373
- def run_compose_lite(final_env_file: Path, experimental_with_langfuse=False, experimental_with_ibm_telemetry=False, with_doc_processing=False) -> None:
374
-
375
-
372
+ def run_compose_lite(
373
+ final_env_file: Path,
374
+ experimental_with_langfuse=False,
375
+ experimental_with_ibm_telemetry=False,
376
+ with_doc_processing=False,
377
+ with_voice=False,
378
+ experimental_with_langflow=False,
379
+ ) -> None:
376
380
  compose_path = get_compose_file()
377
381
 
378
382
  compose_command = ensure_docker_compose_installed()
@@ -400,7 +404,11 @@ def run_compose_lite(final_env_file: Path, experimental_with_langfuse=False, exp
400
404
  logger.info("Database container started successfully. Now starting other services...")
401
405
 
402
406
 
403
- # Step 2: Start all remaining services (except DB)
407
+ # Step 2: Create Langflow DB (if enabled)
408
+ if experimental_with_langflow:
409
+ create_langflow_db()
410
+
411
+ # Step 3: Start all remaining services (except DB)
404
412
  profiles = []
405
413
  if experimental_with_langfuse:
406
414
  profiles.append("langfuse")
@@ -408,6 +416,10 @@ def run_compose_lite(final_env_file: Path, experimental_with_langfuse=False, exp
408
416
  profiles.append("ibm-telemetry")
409
417
  if with_doc_processing:
410
418
  profiles.append("docproc")
419
+ if with_voice:
420
+ profiles.append("voice")
421
+ if experimental_with_langflow:
422
+ profiles.append("langflow")
411
423
 
412
424
  command = compose_command[:]
413
425
  for profile in profiles:
@@ -659,7 +671,6 @@ def run_compose_lite_down(final_env_file: Path, is_reset: bool = False) -> None:
659
671
  )
660
672
  sys.exit(1)
661
673
 
662
-
663
674
  def run_compose_lite_logs(final_env_file: Path, is_reset: bool = False) -> None:
664
675
  compose_path = get_compose_file()
665
676
  compose_command = ensure_docker_compose_installed()
@@ -855,6 +866,17 @@ def server_start(
855
866
  '--compose-file', '-f',
856
867
  help='Provide the path to a custom docker-compose file to use instead of the default compose file'
857
868
  ),
869
+ with_voice: bool = typer.Option(
870
+ False,
871
+ '--with-voice', '-v',
872
+ help='Enable voice controller to interact with the chat via voice channels'
873
+ ),
874
+ experimental_with_langflow: bool = typer.Option(
875
+ False,
876
+ '--experimental-with-langflow',
877
+ help='(Experimental) Enable Langflow UI, available at http://localhost:7861',
878
+ hidden=True
879
+ ),
858
880
  ):
859
881
  confirm_accepts_license_agreement(accept_terms_and_conditions)
860
882
 
@@ -896,6 +918,10 @@ def server_start(
896
918
  if experimental_with_ibm_telemetry:
897
919
  merged_env_dict['USE_IBM_TELEMETRY'] = 'true'
898
920
 
921
+ if experimental_with_langflow:
922
+ merged_env_dict['LANGFLOW_ENABLED'] = 'true'
923
+
924
+
899
925
  try:
900
926
  dev_edition_source = get_dev_edition_source(merged_env_dict)
901
927
  docker_login_by_dev_edition_source(merged_env_dict, dev_edition_source)
@@ -908,7 +934,9 @@ def server_start(
908
934
  run_compose_lite(final_env_file=final_env_file,
909
935
  experimental_with_langfuse=experimental_with_langfuse,
910
936
  experimental_with_ibm_telemetry=experimental_with_ibm_telemetry,
911
- with_doc_processing=with_doc_processing)
937
+ with_doc_processing=with_doc_processing,
938
+ with_voice=with_voice,
939
+ experimental_with_langflow=experimental_with_langflow)
912
940
 
913
941
  run_db_migration()
914
942
 
@@ -938,6 +966,8 @@ def server_start(
938
966
  logger.info(f"You can access the observability platform Langfuse at http://localhost:3010, username: orchestrate@ibm.com, password: orchestrate")
939
967
  if with_doc_processing:
940
968
  logger.info(f"Document processing in Flows (Public Preview) has been enabled.")
969
+ if experimental_with_langflow:
970
+ logger.info("Langflow has been enabled, the Langflow UI is available at http://localhost:7861")
941
971
 
942
972
  @server_app.command(name="stop")
943
973
  def server_stop(
@@ -1018,15 +1048,11 @@ def run_db_migration() -> None:
1018
1048
  merged_env_dict['ROUTING_LLM_API_KEY'] = ''
1019
1049
  merged_env_dict['ASSISTANT_LLM_API_KEY'] = ''
1020
1050
  final_env_file = write_merged_env_file(merged_env_dict)
1051
+
1021
1052
 
1022
- command = compose_command + [
1023
- "-f", str(compose_path),
1024
- "--env-file", str(final_env_file),
1025
- "exec",
1026
- "wxo-server-db",
1027
- "bash",
1028
- "-c",
1029
- '''
1053
+ pg_user = merged_env_dict.get("POSTGRES_USER","postgres")
1054
+
1055
+ migration_command = f'''
1030
1056
  APPLIED_MIGRATIONS_FILE="/var/lib/postgresql/applied_migrations/applied_migrations.txt"
1031
1057
  touch "$APPLIED_MIGRATIONS_FILE"
1032
1058
 
@@ -1037,7 +1063,7 @@ def run_db_migration() -> None:
1037
1063
  echo "Skipping already applied migration: $filename"
1038
1064
  else
1039
1065
  echo "Applying migration: $filename"
1040
- if psql -U postgres -d postgres -q -f "$file" > /dev/null 2>&1; then
1066
+ if psql -U {pg_user} -d postgres -q -f "$file" > /dev/null 2>&1; then
1041
1067
  echo "$filename" >> "$APPLIED_MIGRATIONS_FILE"
1042
1068
  else
1043
1069
  echo "Error applying $filename. Stopping migrations."
@@ -1046,6 +1072,15 @@ def run_db_migration() -> None:
1046
1072
  fi
1047
1073
  done
1048
1074
  '''
1075
+
1076
+ command = compose_command + [
1077
+ "-f", str(compose_path),
1078
+ "--env-file", str(final_env_file),
1079
+ "exec",
1080
+ "wxo-server-db",
1081
+ "bash",
1082
+ "-c",
1083
+ migration_command
1049
1084
  ]
1050
1085
 
1051
1086
  logger.info("Running Database Migration...")
@@ -1060,6 +1095,65 @@ def run_db_migration() -> None:
1060
1095
  )
1061
1096
  sys.exit(1)
1062
1097
 
1098
+ def create_langflow_db() -> None:
1099
+ compose_path = get_compose_file()
1100
+ compose_command = ensure_docker_compose_installed()
1101
+ default_env_path = get_default_env_file()
1102
+ merged_env_dict = merge_env(default_env_path, user_env_path=None)
1103
+ merged_env_dict['WATSONX_SPACE_ID']='X'
1104
+ merged_env_dict['WATSONX_APIKEY']='X'
1105
+ merged_env_dict['WXAI_API_KEY'] = ''
1106
+ merged_env_dict['ASSISTANT_EMBEDDINGS_API_KEY'] = ''
1107
+ merged_env_dict['ASSISTANT_LLM_SPACE_ID'] = ''
1108
+ merged_env_dict['ROUTING_LLM_SPACE_ID'] = ''
1109
+ merged_env_dict['USE_SAAS_ML_TOOLS_RUNTIME'] = ''
1110
+ merged_env_dict['BAM_API_KEY'] = ''
1111
+ merged_env_dict['ASSISTANT_EMBEDDINGS_SPACE_ID'] = ''
1112
+ merged_env_dict['ROUTING_LLM_API_KEY'] = ''
1113
+ merged_env_dict['ASSISTANT_LLM_API_KEY'] = ''
1114
+ final_env_file = write_merged_env_file(merged_env_dict)
1115
+
1116
+ pg_timeout = merged_env_dict.get('POSTGRES_READY_TIMEOUT','10')
1117
+
1118
+ pg_user = merged_env_dict.get("POSTGRES_USER","postgres")
1119
+
1120
+ creation_command = f"""
1121
+ echo 'Waiting for pg to initialize...'
1122
+
1123
+ timeout={pg_timeout}
1124
+ while [[ -z `pg_isready | grep 'accepting connections'` ]] && (( timeout > 0 )); do
1125
+ ((timeout-=1)) && sleep 1;
1126
+ done
1127
+
1128
+ if psql -U {pg_user} -lqt | cut -d \\| -f 1 | grep -qw langflow; then
1129
+ echo 'Existing Langflow DB found'
1130
+ else
1131
+ echo 'Creating Langflow DB'
1132
+ createdb -U "{pg_user}" -O "{pg_user}" langflow;
1133
+ psql -U {pg_user} -q -d postgres -c "GRANT CONNECT ON DATABASE langflow TO {pg_user}";
1134
+ fi
1135
+ """
1136
+ command = compose_command + [
1137
+ "-f", str(compose_path),
1138
+ "--env-file", str(final_env_file),
1139
+ "exec",
1140
+ "wxo-server-db",
1141
+ "bash",
1142
+ "-c",
1143
+ creation_command
1144
+ ]
1145
+
1146
+ logger.info("Preparing Langflow resources...")
1147
+ result = subprocess.run(command, capture_output=False)
1148
+
1149
+ if result.returncode == 0:
1150
+ logger.info("Langflow resources sucessfully created")
1151
+ else:
1152
+ error_message = result.stderr.decode('utf-8') if result.stderr else "Error occurred."
1153
+ logger.error(
1154
+ f"Failed to create Langflow resources\n{error_message}"
1155
+ )
1156
+ sys.exit(1)
1063
1157
 
1064
1158
  def bump_file_iteration(filename: str) -> str:
1065
1159
  regex = re.compile(f"^(?P<name>[^\\(\\s\\.\\)]+)(\\((?P<num>\\d+)\\))?(?P<type>\\.(?:{'|'.join(_EXPORT_FILE_TYPES)}))?$")
@@ -1,7 +1,7 @@
1
1
  import typer
2
2
  from typing import List
3
3
  from typing_extensions import Annotated, Optional
4
- from ibm_watsonx_orchestrate.agent_builder.toolkits.types import ToolkitKind, Language
4
+ from ibm_watsonx_orchestrate.agent_builder.toolkits.types import ToolkitKind, Language, ToolkitTransportKind
5
5
  from ibm_watsonx_orchestrate.cli.commands.toolkit.toolkit_controller import ToolkitController
6
6
  import logging
7
7
  import sys
@@ -45,6 +45,14 @@ def import_toolkit(
45
45
  "The first argument will be used as the executable, the rest as its arguments."
46
46
  ),
47
47
  ] = None,
48
+ url: Annotated[
49
+ Optional[str],
50
+ typer.Option("--url", "-u", help="The URL of the remote MCP server", hidden=True),
51
+ ] = None,
52
+ transport: Annotated[
53
+ ToolkitTransportKind,
54
+ typer.Option("--transport", help="The communication protocol to use for the remote MCP server. Only \"sse\" or \"streamable_http\" supported", hidden=True),
55
+ ] = None,
48
56
  tools: Annotated[
49
57
  Optional[str],
50
58
  typer.Option("--tools", "-t", help="Comma-separated list of tools to import. Or you can use \"*\" to use all tools"),
@@ -64,18 +72,41 @@ def import_toolkit(
64
72
  else:
65
73
  tool_list = None
66
74
 
67
- if not package and not package_root and not command:
68
- logger.error("You must provide either '--package', '--package-root' or '--command'.")
69
- sys.exit(1)
75
+ if not url and not transport:
76
+ if not package and not package_root and not command:
77
+ logger.error("You must provide either '--package', '--package-root' or '--command'.")
78
+ sys.exit(1)
70
79
 
71
- if package_root and not command:
72
- logger.error("Error: '--command' flag must be provided when '--package-root' is specified.")
73
- sys.exit(1)
74
-
75
- if package_root and package:
76
- logger.error("Please choose either '--package-root' or '--package' but not both.")
80
+ if package_root and not command:
81
+ logger.error("Error: '--command' flag must be provided when '--package-root' is specified.")
82
+ sys.exit(1)
83
+
84
+ if package_root and package:
85
+ logger.error("Please choose either '--package-root' or '--package' but not both.")
86
+ sys.exit(1)
87
+
88
+ if (url and not transport) or (transport and not url):
89
+ logger.error("Both '--url' and '--transport' must be provided together for remote MCP.")
77
90
  sys.exit(1)
78
91
 
92
+ if url and transport:
93
+ forbidden_local_opts = []
94
+ if package:
95
+ forbidden_local_opts.append("--package")
96
+ if package_root:
97
+ forbidden_local_opts.append("--package-root")
98
+ if language:
99
+ forbidden_local_opts.append("--language")
100
+ if command:
101
+ forbidden_local_opts.append("--command")
102
+
103
+ if forbidden_local_opts:
104
+ logger.error(
105
+ f"When using '--url' and '--transport' for a remote MCP, you cannot specify: "
106
+ f"{', '.join(forbidden_local_opts)}"
107
+ )
108
+ sys.exit(1)
109
+
79
110
  if package and not package_root:
80
111
  if not command:
81
112
  if language == Language.NODE:
@@ -97,6 +128,8 @@ def import_toolkit(
97
128
  package_root=package_root,
98
129
  language=language,
99
130
  command=command,
131
+ url=url,
132
+ transport=transport
100
133
  )
101
134
  toolkit_controller.import_toolkit(tools=tool_list, app_id=app_id)
102
135