ibm-watsonx-orchestrate 1.11.1__py3-none-any.whl → 1.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ibm_watsonx_orchestrate/__init__.py +1 -1
- ibm_watsonx_orchestrate/agent_builder/agents/types.py +22 -5
- ibm_watsonx_orchestrate/agent_builder/connections/connections.py +3 -3
- ibm_watsonx_orchestrate/agent_builder/connections/types.py +14 -0
- ibm_watsonx_orchestrate/agent_builder/models/types.py +1 -0
- ibm_watsonx_orchestrate/agent_builder/toolkits/base_toolkit.py +1 -1
- ibm_watsonx_orchestrate/agent_builder/tools/__init__.py +1 -0
- ibm_watsonx_orchestrate/agent_builder/tools/base_tool.py +1 -1
- ibm_watsonx_orchestrate/agent_builder/tools/langflow_tool.py +184 -0
- ibm_watsonx_orchestrate/agent_builder/tools/openapi_tool.py +9 -3
- ibm_watsonx_orchestrate/agent_builder/tools/types.py +20 -2
- ibm_watsonx_orchestrate/cli/commands/agents/agents_controller.py +19 -6
- ibm_watsonx_orchestrate/cli/commands/connections/connections_command.py +18 -0
- ibm_watsonx_orchestrate/cli/commands/connections/connections_controller.py +114 -0
- ibm_watsonx_orchestrate/cli/commands/copilot/copilot_controller.py +2 -6
- ibm_watsonx_orchestrate/cli/commands/copilot/copilot_server_controller.py +24 -91
- ibm_watsonx_orchestrate/cli/commands/evaluations/evaluations_command.py +52 -2
- ibm_watsonx_orchestrate/cli/commands/evaluations/evaluations_controller.py +1 -1
- ibm_watsonx_orchestrate/cli/commands/models/model_provider_mapper.py +23 -4
- ibm_watsonx_orchestrate/cli/commands/models/models_controller.py +3 -3
- ibm_watsonx_orchestrate/cli/commands/partners/offering/partners_offering_command.py +56 -0
- ibm_watsonx_orchestrate/cli/commands/partners/offering/partners_offering_controller.py +475 -0
- ibm_watsonx_orchestrate/cli/commands/partners/offering/types.py +99 -0
- ibm_watsonx_orchestrate/cli/commands/partners/partners_command.py +12 -0
- ibm_watsonx_orchestrate/cli/commands/partners/partners_controller.py +0 -0
- ibm_watsonx_orchestrate/cli/commands/server/server_command.py +124 -637
- ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_command.py +2 -2
- ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_controller.py +2 -2
- ibm_watsonx_orchestrate/cli/commands/tools/tools_command.py +2 -3
- ibm_watsonx_orchestrate/cli/commands/tools/tools_controller.py +233 -44
- ibm_watsonx_orchestrate/cli/main.py +2 -0
- ibm_watsonx_orchestrate/client/connections/connections_client.py +4 -1
- ibm_watsonx_orchestrate/client/tools/tempus_client.py +3 -0
- ibm_watsonx_orchestrate/client/tools/tool_client.py +5 -2
- ibm_watsonx_orchestrate/client/utils.py +31 -1
- ibm_watsonx_orchestrate/docker/compose-lite.yml +58 -7
- ibm_watsonx_orchestrate/docker/default.env +20 -17
- ibm_watsonx_orchestrate/flow_builder/flows/decorators.py +10 -2
- ibm_watsonx_orchestrate/flow_builder/flows/flow.py +71 -9
- ibm_watsonx_orchestrate/flow_builder/node.py +14 -2
- ibm_watsonx_orchestrate/flow_builder/types.py +36 -3
- ibm_watsonx_orchestrate/langflow/__init__.py +0 -0
- ibm_watsonx_orchestrate/langflow/langflow_utils.py +195 -0
- ibm_watsonx_orchestrate/langflow/lfx_deps.py +84 -0
- ibm_watsonx_orchestrate/utils/docker_utils.py +280 -0
- ibm_watsonx_orchestrate/utils/environment.py +369 -0
- ibm_watsonx_orchestrate/utils/utils.py +7 -3
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/METADATA +2 -2
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/RECORD +52 -41
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/WHEEL +0 -0
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/entry_points.txt +0 -0
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,34 +1,26 @@
|
|
1
|
-
import importlib.resources as resources
|
2
1
|
import logging
|
3
2
|
import os
|
4
3
|
import platform
|
5
|
-
import subprocess
|
6
4
|
import sys
|
7
5
|
import shutil
|
8
|
-
import tempfile
|
9
6
|
import time
|
10
7
|
from pathlib import Path
|
11
|
-
from urllib.parse import urlparse
|
12
8
|
|
13
9
|
import re
|
14
10
|
import jwt
|
15
11
|
import requests
|
16
12
|
import typer
|
17
|
-
from dotenv import dotenv_values
|
18
13
|
|
19
14
|
from ibm_watsonx_orchestrate.client.utils import instantiate_client
|
20
15
|
|
21
|
-
from ibm_watsonx_orchestrate.cli.commands.server.types import WatsonXAIEnvConfig, ModelGatewayEnvConfig
|
22
|
-
|
23
16
|
from ibm_watsonx_orchestrate.cli.commands.environment.environment_controller import _login
|
24
17
|
|
25
|
-
from ibm_watsonx_orchestrate.cli.config import LICENSE_HEADER, \
|
26
|
-
ENV_ACCEPT_LICENSE
|
27
|
-
|
28
18
|
from ibm_watsonx_orchestrate.cli.config import PROTECTED_ENV_NAME, clear_protected_env_credentials_token, Config, \
|
29
19
|
AUTH_CONFIG_FILE_FOLDER, AUTH_CONFIG_FILE, AUTH_MCSP_TOKEN_OPT, AUTH_SECTION_HEADER, USER_ENV_CACHE_HEADER, LICENSE_HEADER, \
|
30
20
|
ENV_ACCEPT_LICENSE
|
31
21
|
from ibm_watsonx_orchestrate.client.agents.agent_client import AgentClient
|
22
|
+
from ibm_watsonx_orchestrate.utils.docker_utils import DockerLoginService, DockerComposeCore, DockerUtils
|
23
|
+
from ibm_watsonx_orchestrate.utils.environment import EnvService
|
32
24
|
|
33
25
|
logger = logging.getLogger(__name__)
|
34
26
|
|
@@ -42,305 +34,6 @@ _EXPORT_FILE_TYPES: set[str] = {
|
|
42
34
|
'env'
|
43
35
|
}
|
44
36
|
|
45
|
-
_ALWAYS_UNSET: set[str] = {
|
46
|
-
"WO_API_KEY",
|
47
|
-
"WO_INSTANCE",
|
48
|
-
"DOCKER_IAM_KEY",
|
49
|
-
"WO_DEVELOPER_EDITION_SOURCE",
|
50
|
-
"WATSONX_SPACE_ID",
|
51
|
-
"WATSONX_APIKEY",
|
52
|
-
"WO_USERNAME",
|
53
|
-
"WO_PASSWORD",
|
54
|
-
}
|
55
|
-
|
56
|
-
NON_SECRET_ENV_ITEMS: set[str] = {
|
57
|
-
"WO_DEVELOPER_EDITION_SOURCE",
|
58
|
-
"WO_INSTANCE",
|
59
|
-
"USE_SAAS_ML_TOOLS_RUNTIME",
|
60
|
-
"AUTHORIZATION_URL",
|
61
|
-
"OPENSOURCE_REGISTRY_PROXY",
|
62
|
-
"SAAS_WDU_RUNTIME",
|
63
|
-
"LATEST_ENV_FILE",
|
64
|
-
}
|
65
|
-
|
66
|
-
def define_saas_wdu_runtime(value: str = "none") -> None:
|
67
|
-
cfg = Config()
|
68
|
-
cfg.write(USER_ENV_CACHE_HEADER,"SAAS_WDU_RUNTIME",value)
|
69
|
-
|
70
|
-
def set_compose_file_path_in_env(path: str = None) -> None:
|
71
|
-
Config().save(
|
72
|
-
{
|
73
|
-
USER_ENV_CACHE_HEADER: {
|
74
|
-
"DOCKER_COMPOSE_FILE_PATH" : path
|
75
|
-
}
|
76
|
-
}
|
77
|
-
)
|
78
|
-
|
79
|
-
def get_compose_file_path_from_env() -> str:
|
80
|
-
return Config().read(USER_ENV_CACHE_HEADER,"DOCKER_COMPOSE_FILE_PATH")
|
81
|
-
|
82
|
-
|
83
|
-
def ensure_docker_installed() -> None:
|
84
|
-
try:
|
85
|
-
subprocess.run(["docker", "--version"], check=True, capture_output=True)
|
86
|
-
except (FileNotFoundError, subprocess.CalledProcessError):
|
87
|
-
logger.error("Unable to find an installed docker")
|
88
|
-
sys.exit(1)
|
89
|
-
|
90
|
-
def ensure_docker_compose_installed() -> list:
|
91
|
-
try:
|
92
|
-
subprocess.run(["docker", "compose", "version"], check=True, capture_output=True)
|
93
|
-
return ["docker", "compose"]
|
94
|
-
except (FileNotFoundError, subprocess.CalledProcessError):
|
95
|
-
pass
|
96
|
-
|
97
|
-
try:
|
98
|
-
subprocess.run(["docker-compose", "version"], check=True, capture_output=True)
|
99
|
-
return ["docker-compose"]
|
100
|
-
except (FileNotFoundError, subprocess.CalledProcessError):
|
101
|
-
typer.echo("Unable to find an installed docker-compose or docker compose")
|
102
|
-
sys.exit(1)
|
103
|
-
|
104
|
-
def docker_login(api_key: str, registry_url: str, username:str = "iamapikey") -> None:
|
105
|
-
logger.info(f"Logging into Docker registry: {registry_url} ...")
|
106
|
-
result = subprocess.run(
|
107
|
-
["docker", "login", "-u", username, "--password-stdin", registry_url],
|
108
|
-
input=api_key.encode("utf-8"),
|
109
|
-
capture_output=True,
|
110
|
-
)
|
111
|
-
if result.returncode != 0:
|
112
|
-
logger.error(f"Error logging into Docker:\n{result.stderr.decode('utf-8')}")
|
113
|
-
sys.exit(1)
|
114
|
-
logger.info("Successfully logged in to Docker.")
|
115
|
-
|
116
|
-
def docker_login_by_dev_edition_source(env_dict: dict, source: str) -> None:
|
117
|
-
if env_dict.get('WO_DEVELOPER_EDITION_SKIP_LOGIN', None) == 'true':
|
118
|
-
logger.info('WO_DEVELOPER_EDITION_SKIP_LOGIN is set to true, skipping login.')
|
119
|
-
logger.warning('If the developer edition images are not already pulled this call will fail without first setting WO_DEVELOPER_EDITION_SKIP_LOGIN=false')
|
120
|
-
else:
|
121
|
-
if not env_dict.get("REGISTRY_URL"):
|
122
|
-
raise ValueError("REGISTRY_URL is not set.")
|
123
|
-
registry_url = env_dict["REGISTRY_URL"].split("/")[0]
|
124
|
-
if source == "internal":
|
125
|
-
iam_api_key = env_dict.get("DOCKER_IAM_KEY")
|
126
|
-
if not iam_api_key:
|
127
|
-
raise ValueError("DOCKER_IAM_KEY is required in the environment file if WO_DEVELOPER_EDITION_SOURCE is set to 'internal'.")
|
128
|
-
docker_login(iam_api_key, registry_url, "iamapikey")
|
129
|
-
elif source == "myibm":
|
130
|
-
wo_entitlement_key = env_dict.get("WO_ENTITLEMENT_KEY")
|
131
|
-
if not wo_entitlement_key:
|
132
|
-
raise ValueError("WO_ENTITLEMENT_KEY is required in the environment file.")
|
133
|
-
docker_login(wo_entitlement_key, registry_url, "cp")
|
134
|
-
elif source == "orchestrate":
|
135
|
-
wo_auth_type = env_dict.get("WO_AUTH_TYPE")
|
136
|
-
api_key, username = get_docker_cred_by_wo_auth_type(env_dict, wo_auth_type)
|
137
|
-
docker_login(api_key, registry_url, username)
|
138
|
-
|
139
|
-
|
140
|
-
def get_compose_file() -> Path:
|
141
|
-
custom_compose_path = get_compose_file_path_from_env()
|
142
|
-
return Path(custom_compose_path) if custom_compose_path else get_default_compose_file()
|
143
|
-
|
144
|
-
|
145
|
-
def get_default_compose_file() -> Path:
|
146
|
-
with resources.as_file(
|
147
|
-
resources.files("ibm_watsonx_orchestrate.docker").joinpath("compose-lite.yml")
|
148
|
-
) as compose_file:
|
149
|
-
return compose_file
|
150
|
-
|
151
|
-
|
152
|
-
def get_default_env_file() -> Path:
|
153
|
-
with resources.as_file(
|
154
|
-
resources.files("ibm_watsonx_orchestrate.docker").joinpath("default.env")
|
155
|
-
) as env_file:
|
156
|
-
return env_file
|
157
|
-
|
158
|
-
|
159
|
-
def read_env_file(env_path: Path|str) -> dict:
|
160
|
-
return dotenv_values(str(env_path))
|
161
|
-
|
162
|
-
def merge_env(
|
163
|
-
default_env_path: Path,
|
164
|
-
user_env_path: Path | None
|
165
|
-
) -> dict:
|
166
|
-
|
167
|
-
merged = dotenv_values(str(default_env_path))
|
168
|
-
|
169
|
-
if user_env_path is not None:
|
170
|
-
user_env = dotenv_values(str(user_env_path))
|
171
|
-
merged.update(user_env)
|
172
|
-
|
173
|
-
return merged
|
174
|
-
|
175
|
-
def get_default_registry_env_vars_by_dev_edition_source(default_env: dict, user_env:dict, source: str) -> dict[str,str]:
|
176
|
-
component_registry_var_names = {key for key in default_env if key.endswith("_REGISTRY")} | {'REGISTRY_URL'}
|
177
|
-
|
178
|
-
registry_url = user_env.get("REGISTRY_URL", None)
|
179
|
-
if not registry_url:
|
180
|
-
if source == "internal":
|
181
|
-
registry_url = "us.icr.io/watson-orchestrate-private"
|
182
|
-
elif source == "myibm":
|
183
|
-
registry_url = "cp.icr.io/cp/wxo-lite"
|
184
|
-
elif source == "orchestrate":
|
185
|
-
# extract the hostname from the WO_INSTANCE URL, and replace the "api." prefix with "registry." to construct the registry URL per region
|
186
|
-
wo_url = user_env.get("WO_INSTANCE")
|
187
|
-
|
188
|
-
if not wo_url:
|
189
|
-
raise ValueError("WO_INSTANCE is required in the environment file if the developer edition source is set to 'orchestrate'.")
|
190
|
-
|
191
|
-
parsed = urlparse(wo_url)
|
192
|
-
hostname = parsed.hostname
|
193
|
-
|
194
|
-
registry_url = f"registry.{hostname[4:]}/cp/wxo-lite"
|
195
|
-
else:
|
196
|
-
raise ValueError(f"Unknown value for developer edition source: {source}. Must be one of ['internal', 'myibm', 'orchestrate'].")
|
197
|
-
|
198
|
-
result = {name: registry_url for name in component_registry_var_names}
|
199
|
-
return result
|
200
|
-
|
201
|
-
def get_dev_edition_source(env_dict: dict | None) -> str:
|
202
|
-
if not env_dict:
|
203
|
-
return "myibm"
|
204
|
-
|
205
|
-
source = env_dict.get("WO_DEVELOPER_EDITION_SOURCE")
|
206
|
-
|
207
|
-
if source:
|
208
|
-
return source
|
209
|
-
if env_dict.get("WO_INSTANCE"):
|
210
|
-
return "orchestrate"
|
211
|
-
return "myibm"
|
212
|
-
|
213
|
-
def get_docker_cred_by_wo_auth_type(env_dict: dict, auth_type: str | None) -> tuple[str, str]:
|
214
|
-
# Try infer the auth type if not provided
|
215
|
-
if not auth_type:
|
216
|
-
instance_url = env_dict.get("WO_INSTANCE")
|
217
|
-
if instance_url:
|
218
|
-
if ".cloud.ibm.com" in instance_url:
|
219
|
-
auth_type = "ibm_iam"
|
220
|
-
elif ".ibm.com" in instance_url:
|
221
|
-
auth_type = "mcsp"
|
222
|
-
elif "https://cpd" in instance_url:
|
223
|
-
auth_type = "cpd"
|
224
|
-
|
225
|
-
if auth_type in {"mcsp", "ibm_iam"}:
|
226
|
-
wo_api_key = env_dict.get("WO_API_KEY")
|
227
|
-
if not wo_api_key:
|
228
|
-
raise ValueError("WO_API_KEY is required in the environment file if the WO_AUTH_TYPE is set to 'mcsp' or 'ibm_iam'.")
|
229
|
-
instance_url = env_dict.get("WO_INSTANCE")
|
230
|
-
if not instance_url:
|
231
|
-
raise ValueError("WO_INSTANCE is required in the environment file if the WO_AUTH_TYPE is set to 'mcsp' or 'ibm_iam'.")
|
232
|
-
path = urlparse(instance_url).path
|
233
|
-
if not path or '/' not in path:
|
234
|
-
raise ValueError(f"Invalid WO_INSTANCE URL: '{instance_url}'. It should contain the instance (tenant) id.")
|
235
|
-
tenant_id = path.split('/')[-1]
|
236
|
-
return wo_api_key, f"wxouser-{tenant_id}"
|
237
|
-
elif auth_type == "cpd":
|
238
|
-
wo_api_key = env_dict.get("WO_API_KEY")
|
239
|
-
wo_password = env_dict.get("WO_PASSWORD")
|
240
|
-
if not wo_api_key and not wo_password:
|
241
|
-
raise ValueError("WO_API_KEY or WO_PASSWORD is required in the environment file if the WO_AUTH_TYPE is set to 'cpd'.")
|
242
|
-
wo_username = env_dict.get("WO_USERNAME")
|
243
|
-
if not wo_username:
|
244
|
-
raise ValueError("WO_USERNAME is required in the environment file if the WO_AUTH_TYPE is set to 'cpd'.")
|
245
|
-
return wo_api_key or wo_password, wo_username # type: ignore[return-value]
|
246
|
-
else:
|
247
|
-
raise ValueError(f"Unknown value for WO_AUTH_TYPE: '{auth_type}'. Must be one of ['mcsp', 'ibm_iam', 'cpd'].")
|
248
|
-
|
249
|
-
def apply_server_env_dict_defaults(provided_env_dict: dict) -> dict:
|
250
|
-
|
251
|
-
env_dict = provided_env_dict.copy()
|
252
|
-
|
253
|
-
env_dict['DBTAG'] = get_dbtag_from_architecture(merged_env_dict=env_dict)
|
254
|
-
|
255
|
-
model_config = None
|
256
|
-
try:
|
257
|
-
use_model_proxy = env_dict.get("USE_SAAS_ML_TOOLS_RUNTIME")
|
258
|
-
if not use_model_proxy or use_model_proxy.lower() != 'true':
|
259
|
-
model_config = WatsonXAIEnvConfig.model_validate(env_dict)
|
260
|
-
except ValueError:
|
261
|
-
pass
|
262
|
-
|
263
|
-
# If no watsonx ai detials are found, try build model gateway config
|
264
|
-
if not model_config:
|
265
|
-
try:
|
266
|
-
model_config = ModelGatewayEnvConfig.model_validate(env_dict)
|
267
|
-
except ValueError as e :
|
268
|
-
pass
|
269
|
-
|
270
|
-
if not model_config:
|
271
|
-
logger.error("Missing required model access environment variables. Please set Watson Orchestrate credentials 'WO_INSTANCE' and 'WO_API_KEY'. For CPD, set 'WO_INSTANCE', 'WO_USERNAME' and either 'WO_API_KEY' or 'WO_PASSWORD'. Alternatively, you can set WatsonX AI credentials directly using 'WATSONX_SPACE_ID' and 'WATSONX_APIKEY'")
|
272
|
-
sys.exit(1)
|
273
|
-
|
274
|
-
env_dict.update(model_config.model_dump(exclude_none=True))
|
275
|
-
|
276
|
-
return env_dict
|
277
|
-
|
278
|
-
def apply_llm_api_key_defaults(env_dict: dict) -> None:
|
279
|
-
llm_value = env_dict.get("WATSONX_APIKEY")
|
280
|
-
if llm_value:
|
281
|
-
env_dict.setdefault("ASSISTANT_LLM_API_KEY", llm_value)
|
282
|
-
env_dict.setdefault("ASSISTANT_EMBEDDINGS_API_KEY", llm_value)
|
283
|
-
env_dict.setdefault("ROUTING_LLM_API_KEY", llm_value)
|
284
|
-
env_dict.setdefault("BAM_API_KEY", llm_value)
|
285
|
-
env_dict.setdefault("WXAI_API_KEY", llm_value)
|
286
|
-
space_value = env_dict.get("WATSONX_SPACE_ID")
|
287
|
-
if space_value:
|
288
|
-
env_dict.setdefault("ASSISTANT_LLM_SPACE_ID", space_value)
|
289
|
-
env_dict.setdefault("ASSISTANT_EMBEDDINGS_SPACE_ID", space_value)
|
290
|
-
env_dict.setdefault("ROUTING_LLM_SPACE_ID", space_value)
|
291
|
-
|
292
|
-
def _is_docker_container_running(container_name):
|
293
|
-
ensure_docker_installed()
|
294
|
-
command = [ "docker",
|
295
|
-
"ps",
|
296
|
-
"-f",
|
297
|
-
f"name={container_name}"
|
298
|
-
]
|
299
|
-
result = subprocess.run(command, env=os.environ, capture_output=True)
|
300
|
-
if container_name in str(result.stdout):
|
301
|
-
return True
|
302
|
-
return False
|
303
|
-
|
304
|
-
def _check_exclusive_observibility(langfuse_enabled: bool, ibm_tele_enabled: bool):
|
305
|
-
if langfuse_enabled and ibm_tele_enabled:
|
306
|
-
return False
|
307
|
-
if langfuse_enabled and _is_docker_container_running("docker-frontend-server-1"):
|
308
|
-
return False
|
309
|
-
if ibm_tele_enabled and _is_docker_container_running("docker-langfuse-web-1"):
|
310
|
-
return False
|
311
|
-
return True
|
312
|
-
|
313
|
-
def _prepare_clean_env(env_file: Path) -> None:
|
314
|
-
"""Remove env vars so terminal definitions don't override"""
|
315
|
-
keys_from_file = set(dotenv_values(str(env_file)).keys())
|
316
|
-
keys_to_unset = keys_from_file | _ALWAYS_UNSET
|
317
|
-
for key in keys_to_unset:
|
318
|
-
os.environ.pop(key, None)
|
319
|
-
|
320
|
-
def write_merged_env_file(merged_env: dict, target_path: str = None) -> Path:
|
321
|
-
|
322
|
-
if target_path:
|
323
|
-
file = open(target_path,"w")
|
324
|
-
else:
|
325
|
-
file = tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".env")
|
326
|
-
|
327
|
-
with file:
|
328
|
-
for key, val in merged_env.items():
|
329
|
-
file.write(f"{key}={val}\n")
|
330
|
-
return Path(file.name)
|
331
|
-
|
332
|
-
def get_dbtag_from_architecture(merged_env_dict: dict) -> str:
|
333
|
-
"""Detects system architecture and returns the corresponding DBTAG."""
|
334
|
-
arch = platform.machine()
|
335
|
-
|
336
|
-
arm64_tag = merged_env_dict.get("ARM64DBTAG")
|
337
|
-
amd_tag = merged_env_dict.get("AMDDBTAG")
|
338
|
-
|
339
|
-
if arch in ["aarch64", "arm64"]:
|
340
|
-
return arm64_tag
|
341
|
-
else:
|
342
|
-
return amd_tag
|
343
|
-
|
344
37
|
def refresh_local_credentials() -> None:
|
345
38
|
"""
|
346
39
|
Refresh the local credentials
|
@@ -348,51 +41,24 @@ def refresh_local_credentials() -> None:
|
|
348
41
|
clear_protected_env_credentials_token()
|
349
42
|
_login(name=PROTECTED_ENV_NAME, apikey=None)
|
350
43
|
|
351
|
-
def persist_user_env(env: dict, include_secrets: bool = False) -> None:
|
352
|
-
if include_secrets:
|
353
|
-
persistable_env = env
|
354
|
-
else:
|
355
|
-
persistable_env = {k:env[k] for k in NON_SECRET_ENV_ITEMS if k in env}
|
356
|
-
|
357
|
-
cfg = Config()
|
358
|
-
cfg.save(
|
359
|
-
{
|
360
|
-
USER_ENV_CACHE_HEADER: persistable_env
|
361
|
-
}
|
362
|
-
)
|
363
|
-
|
364
|
-
def get_persisted_user_env() -> dict | None:
|
365
|
-
cfg = Config()
|
366
|
-
user_env = cfg.get(USER_ENV_CACHE_HEADER) if cfg.get(USER_ENV_CACHE_HEADER) else None
|
367
|
-
return user_env
|
368
|
-
|
369
44
|
def run_compose_lite(
|
370
|
-
final_env_file: Path,
|
45
|
+
final_env_file: Path,
|
46
|
+
env_service: EnvService,
|
371
47
|
experimental_with_langfuse=False,
|
372
48
|
experimental_with_ibm_telemetry=False,
|
373
49
|
with_doc_processing=False,
|
374
50
|
with_voice=False,
|
375
|
-
|
51
|
+
with_connections_ui=False,
|
52
|
+
with_langflow=False,
|
376
53
|
) -> None:
|
377
|
-
|
378
|
-
|
379
|
-
compose_command = ensure_docker_compose_installed()
|
380
|
-
_prepare_clean_env(final_env_file)
|
381
|
-
db_tag = read_env_file(final_env_file).get('DBTAG', None)
|
54
|
+
EnvService.prepare_clean_env(final_env_file)
|
55
|
+
db_tag = EnvService.read_env_file(final_env_file).get('DBTAG', None)
|
382
56
|
logger.info(f"Detected architecture: {platform.machine()}, using DBTAG: {db_tag}")
|
383
57
|
|
58
|
+
compose_core = DockerComposeCore(env_service)
|
59
|
+
|
384
60
|
# Step 1: Start only the DB container
|
385
|
-
|
386
|
-
"-f", str(compose_path),
|
387
|
-
"--env-file", str(final_env_file),
|
388
|
-
"up",
|
389
|
-
"-d",
|
390
|
-
"--remove-orphans",
|
391
|
-
"wxo-server-db"
|
392
|
-
]
|
393
|
-
|
394
|
-
logger.info("Starting database container...")
|
395
|
-
result = subprocess.run(db_command, env=os.environ, capture_output=False)
|
61
|
+
result = compose_core.service_up(service_name="wxo-server-db", friendly_name="WxO Server DB", final_env_file=final_env_file, compose_env=os.environ)
|
396
62
|
|
397
63
|
if result.returncode != 0:
|
398
64
|
logger.error(f"Error starting DB container: {result.stderr}")
|
@@ -402,7 +68,7 @@ def run_compose_lite(
|
|
402
68
|
|
403
69
|
|
404
70
|
# Step 2: Create Langflow DB (if enabled)
|
405
|
-
if
|
71
|
+
if with_langflow:
|
406
72
|
create_langflow_db()
|
407
73
|
|
408
74
|
# Step 3: Start all remaining services (except DB)
|
@@ -415,27 +81,12 @@ def run_compose_lite(
|
|
415
81
|
profiles.append("docproc")
|
416
82
|
if with_voice:
|
417
83
|
profiles.append("voice")
|
418
|
-
if
|
84
|
+
if with_connections_ui:
|
85
|
+
profiles.append("connections-ui")
|
86
|
+
if with_langflow:
|
419
87
|
profiles.append("langflow")
|
420
88
|
|
421
|
-
|
422
|
-
for profile in profiles:
|
423
|
-
command += ["--profile", profile]
|
424
|
-
|
425
|
-
command += [
|
426
|
-
"-f", str(compose_path),
|
427
|
-
"--env-file", str(final_env_file),
|
428
|
-
"up",
|
429
|
-
"--scale",
|
430
|
-
"ui=0",
|
431
|
-
"--scale",
|
432
|
-
"cpe=0",
|
433
|
-
"-d",
|
434
|
-
"--remove-orphans",
|
435
|
-
]
|
436
|
-
|
437
|
-
logger.info("Starting docker-compose services...")
|
438
|
-
result = subprocess.run(command, capture_output=False)
|
89
|
+
result = compose_core.services_up(profiles, final_env_file, ["--scale", "ui=0", "--scale", "cpe=0"])
|
439
90
|
|
440
91
|
if result.returncode == 0:
|
441
92
|
logger.info("Services started successfully.")
|
@@ -498,29 +149,13 @@ def wait_for_wxo_ui_health_check(timeout_seconds=45, interval_seconds=2):
|
|
498
149
|
return False
|
499
150
|
|
500
151
|
def run_compose_lite_ui(user_env_file: Path) -> bool:
|
501
|
-
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
if not user_env:
|
509
|
-
user_env = get_persisted_user_env() or {}
|
510
|
-
|
511
|
-
dev_edition_source = get_dev_edition_source(user_env)
|
512
|
-
default_registry_vars = get_default_registry_env_vars_by_dev_edition_source(default_env, user_env, source=dev_edition_source)
|
513
|
-
|
514
|
-
# Update the default environment with the default registry variables only if they are not already set
|
515
|
-
for key in default_registry_vars:
|
516
|
-
if key not in default_env or not default_env[key]:
|
517
|
-
default_env[key] = default_registry_vars[key]
|
518
|
-
|
519
|
-
# Merge the default environment with the user environment
|
520
|
-
merged_env_dict = {
|
521
|
-
**default_env,
|
522
|
-
**user_env,
|
523
|
-
}
|
152
|
+
DockerUtils.ensure_docker_installed()
|
153
|
+
|
154
|
+
cli_config = Config()
|
155
|
+
env_service = EnvService(cli_config)
|
156
|
+
env_service.prepare_clean_env(user_env_file)
|
157
|
+
user_env = env_service.get_user_env(user_env_file)
|
158
|
+
merged_env_dict = env_service.prepare_server_env_vars_minimal(user_env=user_env)
|
524
159
|
|
525
160
|
_login(name=PROTECTED_ENV_NAME)
|
526
161
|
auth_cfg = Config(AUTH_CONFIG_FILE_FOLDER, AUTH_CONFIG_FILE)
|
@@ -537,22 +172,22 @@ def run_compose_lite_ui(user_env_file: Path) -> bool:
|
|
537
172
|
sys.exit(1)
|
538
173
|
|
539
174
|
try:
|
540
|
-
|
175
|
+
DockerLoginService(env_service=env_service).login_by_dev_edition_source(merged_env_dict)
|
541
176
|
except ValueError as ignored:
|
542
177
|
# do nothing, as the docker login here is not mandatory
|
543
178
|
pass
|
544
179
|
|
545
180
|
# Auto-configure callback IP for async tools
|
546
|
-
merged_env_dict = auto_configure_callback_ip(merged_env_dict)
|
181
|
+
merged_env_dict = env_service.auto_configure_callback_ip(merged_env_dict)
|
547
182
|
|
548
183
|
#These are to removed warning and not used in UI component
|
549
184
|
if not 'WATSONX_SPACE_ID' in merged_env_dict:
|
550
185
|
merged_env_dict['WATSONX_SPACE_ID']='X'
|
551
186
|
if not 'WATSONX_APIKEY' in merged_env_dict:
|
552
187
|
merged_env_dict['WATSONX_APIKEY']='X'
|
553
|
-
apply_llm_api_key_defaults(merged_env_dict)
|
188
|
+
env_service.apply_llm_api_key_defaults(merged_env_dict)
|
554
189
|
|
555
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
190
|
+
final_env_file = env_service.write_merged_env_file(merged_env_dict)
|
556
191
|
|
557
192
|
logger.info("Waiting for orchestrate server to be fully started and ready...")
|
558
193
|
|
@@ -562,17 +197,9 @@ def run_compose_lite_ui(user_env_file: Path) -> bool:
|
|
562
197
|
logger.error("Healthcheck failed orchestrate server. Make sure you start the server components with `orchestrate server start` before trying to start the chat UI")
|
563
198
|
return False
|
564
199
|
|
565
|
-
|
566
|
-
"-f", str(compose_path),
|
567
|
-
"--env-file", str(final_env_file),
|
568
|
-
"up",
|
569
|
-
"ui",
|
570
|
-
"-d",
|
571
|
-
"--remove-orphans"
|
572
|
-
]
|
200
|
+
compose_core = DockerComposeCore(env_service)
|
573
201
|
|
574
|
-
|
575
|
-
result = subprocess.run(command, capture_output=False)
|
202
|
+
result = compose_core.service_up(service_name="ui", friendly_name="UI", final_env_file=final_env_file)
|
576
203
|
|
577
204
|
if result.returncode == 0:
|
578
205
|
logger.info("Chat UI Service started successfully.")
|
@@ -593,36 +220,23 @@ def run_compose_lite_ui(user_env_file: Path) -> bool:
|
|
593
220
|
return True
|
594
221
|
|
595
222
|
def run_compose_lite_down_ui(user_env_file: Path, is_reset: bool = False) -> None:
|
596
|
-
|
597
|
-
|
598
|
-
|
599
|
-
|
600
|
-
|
601
|
-
ensure_docker_installed()
|
602
|
-
default_env_path = get_default_env_file()
|
603
|
-
merged_env_dict = merge_env(
|
223
|
+
EnvService.prepare_clean_env(user_env_file)
|
224
|
+
DockerUtils.ensure_docker_installed()
|
225
|
+
default_env_path = EnvService.get_default_env_file()
|
226
|
+
merged_env_dict = EnvService.merge_env(
|
604
227
|
default_env_path,
|
605
228
|
user_env_file
|
606
229
|
)
|
607
230
|
merged_env_dict['WATSONX_SPACE_ID']='X'
|
608
231
|
merged_env_dict['WATSONX_APIKEY']='X'
|
609
|
-
apply_llm_api_key_defaults(merged_env_dict)
|
610
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
"down",
|
616
|
-
"ui"
|
617
|
-
]
|
618
|
-
|
619
|
-
if is_reset:
|
620
|
-
command.append("--volumes")
|
621
|
-
logger.info("Stopping docker-compose UI service and resetting volumes...")
|
622
|
-
else:
|
623
|
-
logger.info("Stopping docker-compose UI service...")
|
232
|
+
EnvService.apply_llm_api_key_defaults(merged_env_dict)
|
233
|
+
final_env_file = EnvService.write_merged_env_file(merged_env_dict)
|
234
|
+
|
235
|
+
cli_config = Config()
|
236
|
+
env_service = EnvService(cli_config)
|
237
|
+
compose_core = DockerComposeCore(env_service)
|
624
238
|
|
625
|
-
result =
|
239
|
+
result = compose_core.service_down(service_name="ui", friendly_name="UI", final_env_file=final_env_file, is_reset=is_reset)
|
626
240
|
|
627
241
|
if result.returncode == 0:
|
628
242
|
logger.info("UI service stopped successfully.")
|
@@ -637,24 +251,13 @@ def run_compose_lite_down_ui(user_env_file: Path, is_reset: bool = False) -> Non
|
|
637
251
|
sys.exit(1)
|
638
252
|
|
639
253
|
def run_compose_lite_down(final_env_file: Path, is_reset: bool = False) -> None:
|
640
|
-
|
641
|
-
|
642
|
-
|
643
|
-
|
644
|
-
|
645
|
-
'--profile', '*',
|
646
|
-
"-f", str(compose_path),
|
647
|
-
"--env-file", str(final_env_file),
|
648
|
-
"down"
|
649
|
-
]
|
650
|
-
|
651
|
-
if is_reset:
|
652
|
-
command.append("--volumes")
|
653
|
-
logger.info("Stopping docker-compose services and resetting volumes...")
|
654
|
-
else:
|
655
|
-
logger.info("Stopping docker-compose services...")
|
254
|
+
EnvService.prepare_clean_env(final_env_file)
|
255
|
+
|
256
|
+
cli_config = Config()
|
257
|
+
env_service = EnvService(cli_config)
|
258
|
+
compose_core = DockerComposeCore(env_service)
|
656
259
|
|
657
|
-
result =
|
260
|
+
result = compose_core.services_down(final_env_file=final_env_file, is_reset=is_reset)
|
658
261
|
|
659
262
|
if result.returncode == 0:
|
660
263
|
logger.info("Services stopped successfully.")
|
@@ -668,23 +271,14 @@ def run_compose_lite_down(final_env_file: Path, is_reset: bool = False) -> None:
|
|
668
271
|
)
|
669
272
|
sys.exit(1)
|
670
273
|
|
671
|
-
def run_compose_lite_logs(final_env_file: Path
|
672
|
-
|
673
|
-
compose_command = ensure_docker_compose_installed()
|
674
|
-
_prepare_clean_env(final_env_file)
|
274
|
+
def run_compose_lite_logs(final_env_file: Path) -> None:
|
275
|
+
EnvService.prepare_clean_env(final_env_file)
|
675
276
|
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
"--profile",
|
680
|
-
"*",
|
681
|
-
"logs",
|
682
|
-
"-f"
|
683
|
-
]
|
277
|
+
cli_config = Config()
|
278
|
+
env_service = EnvService(cli_config)
|
279
|
+
compose_core = DockerComposeCore(env_service)
|
684
280
|
|
685
|
-
|
686
|
-
|
687
|
-
result = subprocess.run(command, capture_output=False)
|
281
|
+
result = compose_core.services_logs(final_env_file=final_env_file, should_follow=True)
|
688
282
|
|
689
283
|
if result.returncode == 0:
|
690
284
|
logger.info("End of docker logs")
|
@@ -698,13 +292,12 @@ def run_compose_lite_logs(final_env_file: Path, is_reset: bool = False) -> None:
|
|
698
292
|
)
|
699
293
|
sys.exit(1)
|
700
294
|
|
701
|
-
def confirm_accepts_license_agreement(accepts_by_argument: bool):
|
702
|
-
cfg = Config()
|
295
|
+
def confirm_accepts_license_agreement(accepts_by_argument: bool, cfg: Config):
|
703
296
|
accepts_license = cfg.read(LICENSE_HEADER, ENV_ACCEPT_LICENSE)
|
704
297
|
if accepts_license != True:
|
705
298
|
logger.warning(('''
|
706
299
|
By running the following command your machine will install IBM watsonx Orchestrate Developer Edition, which is governed by the following IBM license agreement:
|
707
|
-
- * https://www.ibm.com/support/customer/csol/terms/?id=L-
|
300
|
+
- * https://www.ibm.com/support/customer/csol/terms/?id=L-GLQU-5KA4PY&lc=en
|
708
301
|
Additionally, the following prerequisite open source programs will be obtained from Docker Hub and will be installed on your machine. Each of the below programs are Separately Licensed Code, and are governed by the separate license agreements identified below, and not by the IBM license agreement:
|
709
302
|
* redis (7.2) - https://github.com/redis/redis/blob/7.2.7/COPYING
|
710
303
|
* minio - https://github.com/minio/minio/blob/master/LICENSE
|
@@ -712,6 +305,7 @@ def confirm_accepts_license_agreement(accepts_by_argument: bool):
|
|
712
305
|
* etcd - https://github.com/etcd-io/etcd/blob/main/LICENSE
|
713
306
|
* clickhouse-server - https://github.com/ClickHouse/ClickHouse/blob/master/LICENSE
|
714
307
|
* langfuse - https://github.com/langfuse/langfuse/blob/main/LICENSE
|
308
|
+
* langflow - https://github.com/langflow-ai/langflow/blob/main/LICENSE
|
715
309
|
After installation, you are solely responsible for obtaining and installing updates and fixes, including security patches, for the above prerequisite open source programs. To update images the customer will run `orchestrate server reset && orchestrate server start -e .env`.
|
716
310
|
''').strip())
|
717
311
|
if not accepts_by_argument:
|
@@ -724,107 +318,6 @@ def confirm_accepts_license_agreement(accepts_by_argument: bool):
|
|
724
318
|
logger.error('The terms and conditions were not accepted, exiting.')
|
725
319
|
exit(1)
|
726
320
|
|
727
|
-
def auto_configure_callback_ip(merged_env_dict: dict) -> dict:
|
728
|
-
"""
|
729
|
-
Automatically detect and configure CALLBACK_HOST_URL if it's empty.
|
730
|
-
|
731
|
-
Args:
|
732
|
-
merged_env_dict: The merged environment dictionary
|
733
|
-
|
734
|
-
Returns:
|
735
|
-
Updated environment dictionary with CALLBACK_HOST_URL set
|
736
|
-
"""
|
737
|
-
callback_url = merged_env_dict.get('CALLBACK_HOST_URL', '').strip()
|
738
|
-
|
739
|
-
# Only auto-configure if CALLBACK_HOST_URL is empty
|
740
|
-
if not callback_url:
|
741
|
-
logger.info("Auto-detecting local IP address for async tool callbacks...")
|
742
|
-
|
743
|
-
system = platform.system()
|
744
|
-
ip = None
|
745
|
-
|
746
|
-
try:
|
747
|
-
if system in ("Linux", "Darwin"):
|
748
|
-
result = subprocess.run(["ifconfig"], capture_output=True, text=True, check=True)
|
749
|
-
lines = result.stdout.splitlines()
|
750
|
-
|
751
|
-
for line in lines:
|
752
|
-
line = line.strip()
|
753
|
-
# Unix ifconfig output format: "inet 192.168.1.100 netmask 0xffffff00 broadcast 192.168.1.255"
|
754
|
-
if line.startswith("inet ") and "127.0.0.1" not in line:
|
755
|
-
candidate_ip = line.split()[1]
|
756
|
-
# Validate IP is not loopback or link-local
|
757
|
-
if (candidate_ip and
|
758
|
-
not candidate_ip.startswith("127.") and
|
759
|
-
not candidate_ip.startswith("169.254")):
|
760
|
-
ip = candidate_ip
|
761
|
-
break
|
762
|
-
|
763
|
-
elif system == "Windows":
|
764
|
-
result = subprocess.run(["ipconfig"], capture_output=True, text=True, check=True)
|
765
|
-
lines = result.stdout.splitlines()
|
766
|
-
|
767
|
-
for line in lines:
|
768
|
-
line = line.strip()
|
769
|
-
# Windows ipconfig output format: " IPv4 Address. . . . . . . . . . . : 192.168.1.100"
|
770
|
-
if "IPv4 Address" in line and ":" in line:
|
771
|
-
candidate_ip = line.split(":")[-1].strip()
|
772
|
-
# Validate IP is not loopback or link-local
|
773
|
-
if (candidate_ip and
|
774
|
-
not candidate_ip.startswith("127.") and
|
775
|
-
not candidate_ip.startswith("169.254")):
|
776
|
-
ip = candidate_ip
|
777
|
-
break
|
778
|
-
|
779
|
-
else:
|
780
|
-
logger.warning(f"Unsupported platform: {system}")
|
781
|
-
ip = None
|
782
|
-
|
783
|
-
except Exception as e:
|
784
|
-
logger.debug(f"IP detection failed on {system}: {e}")
|
785
|
-
ip = None
|
786
|
-
|
787
|
-
if ip:
|
788
|
-
callback_url = f"http://{ip}:4321"
|
789
|
-
merged_env_dict['CALLBACK_HOST_URL'] = callback_url
|
790
|
-
logger.info(f"Auto-configured CALLBACK_HOST_URL to: {callback_url}")
|
791
|
-
else:
|
792
|
-
# Fallback for localhost
|
793
|
-
callback_url = "http://host.docker.internal:4321"
|
794
|
-
merged_env_dict['CALLBACK_HOST_URL'] = callback_url
|
795
|
-
logger.info(f"Using Docker internal URL: {callback_url}")
|
796
|
-
logger.info("For external tools, consider using ngrok or similar tunneling service.")
|
797
|
-
else:
|
798
|
-
logger.info(f"Using existing CALLBACK_HOST_URL: {callback_url}")
|
799
|
-
|
800
|
-
return merged_env_dict
|
801
|
-
|
802
|
-
def prepare_server_env_vars(user_env: dict = {}):
|
803
|
-
|
804
|
-
default_env = read_env_file(get_default_env_file())
|
805
|
-
dev_edition_source = get_dev_edition_source(user_env)
|
806
|
-
default_registry_vars = get_default_registry_env_vars_by_dev_edition_source(default_env, user_env, source=dev_edition_source)
|
807
|
-
|
808
|
-
# Update the default environment with the default registry variables only if they are not already set
|
809
|
-
for key in default_registry_vars:
|
810
|
-
if key not in default_env or not default_env[key]:
|
811
|
-
default_env[key] = default_registry_vars[key]
|
812
|
-
|
813
|
-
# Merge the default environment with the user environment
|
814
|
-
merged_env_dict = {
|
815
|
-
**default_env,
|
816
|
-
**user_env,
|
817
|
-
}
|
818
|
-
|
819
|
-
merged_env_dict = apply_server_env_dict_defaults(merged_env_dict)
|
820
|
-
|
821
|
-
# Auto-configure callback IP for async tools
|
822
|
-
merged_env_dict = auto_configure_callback_ip(merged_env_dict)
|
823
|
-
|
824
|
-
apply_llm_api_key_defaults(merged_env_dict)
|
825
|
-
|
826
|
-
return merged_env_dict
|
827
|
-
|
828
321
|
@server_app.command(name="start")
|
829
322
|
def server_start(
|
830
323
|
user_env_file: str = typer.Option(
|
@@ -868,18 +361,20 @@ def server_start(
|
|
868
361
|
'--with-voice', '-v',
|
869
362
|
help='Enable voice controller to interact with the chat via voice channels'
|
870
363
|
),
|
871
|
-
|
364
|
+
with_connections_ui: bool = typer.Option(
|
872
365
|
False,
|
873
|
-
'--
|
874
|
-
help='
|
875
|
-
|
366
|
+
'--with-connections-ui', '-c',
|
367
|
+
help='Enables connections ui to facilitate OAuth connections and credential management via a UI'),
|
368
|
+
with_langflow: bool = typer.Option(
|
369
|
+
False,
|
370
|
+
'--with-langflow',
|
371
|
+
help='Enable Langflow UI, available at http://localhost:7861'
|
876
372
|
),
|
877
373
|
):
|
878
|
-
|
374
|
+
cli_config = Config()
|
375
|
+
confirm_accepts_license_agreement(accept_terms_and_conditions, cli_config)
|
879
376
|
|
880
|
-
|
881
|
-
|
882
|
-
ensure_docker_installed()
|
377
|
+
DockerUtils.ensure_docker_installed()
|
883
378
|
|
884
379
|
if user_env_file and not Path(user_env_file).exists():
|
885
380
|
logger.error(f"The specified environment file '{user_env_file}' does not exist.")
|
@@ -891,16 +386,20 @@ def server_start(
|
|
891
386
|
else:
|
892
387
|
logger.error(f"The specified docker-compose file '{custom_compose_file}' does not exist.")
|
893
388
|
sys.exit(1)
|
389
|
+
|
390
|
+
env_service = EnvService(cli_config)
|
391
|
+
|
392
|
+
env_service.define_saas_wdu_runtime()
|
894
393
|
|
895
394
|
#Run regardless, to allow this to set compose as 'None' when not in use
|
896
|
-
set_compose_file_path_in_env(custom_compose_file)
|
395
|
+
env_service.set_compose_file_path_in_env(custom_compose_file)
|
897
396
|
|
898
|
-
user_env =
|
899
|
-
persist_user_env(user_env, include_secrets=persist_env_secrets)
|
397
|
+
user_env = env_service.get_user_env(user_env_file=user_env_file, fallback_to_persisted_env=False)
|
398
|
+
env_service.persist_user_env(user_env, include_secrets=persist_env_secrets)
|
900
399
|
|
901
|
-
merged_env_dict = prepare_server_env_vars(user_env)
|
400
|
+
merged_env_dict = env_service.prepare_server_env_vars(user_env=user_env, should_drop_auth_routes=False)
|
902
401
|
|
903
|
-
if not
|
402
|
+
if not DockerUtils.check_exclusive_observability(experimental_with_langfuse, experimental_with_ibm_telemetry):
|
904
403
|
logger.error("Please select either langfuse or ibm telemetry for observability not both")
|
905
404
|
sys.exit(1)
|
906
405
|
|
@@ -910,30 +409,30 @@ def server_start(
|
|
910
409
|
|
911
410
|
if with_doc_processing:
|
912
411
|
merged_env_dict['DOCPROC_ENABLED'] = 'true'
|
913
|
-
define_saas_wdu_runtime("local")
|
412
|
+
env_service.define_saas_wdu_runtime("local")
|
914
413
|
|
915
414
|
if experimental_with_ibm_telemetry:
|
916
415
|
merged_env_dict['USE_IBM_TELEMETRY'] = 'true'
|
917
416
|
|
918
|
-
if
|
417
|
+
if with_langflow:
|
919
418
|
merged_env_dict['LANGFLOW_ENABLED'] = 'true'
|
920
419
|
|
921
420
|
|
922
421
|
try:
|
923
|
-
|
924
|
-
docker_login_by_dev_edition_source(merged_env_dict, dev_edition_source)
|
422
|
+
DockerLoginService(env_service=env_service).login_by_dev_edition_source(merged_env_dict)
|
925
423
|
except ValueError as e:
|
926
424
|
logger.error(f"Error: {e}")
|
927
425
|
sys.exit(1)
|
928
426
|
|
929
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
427
|
+
final_env_file = env_service.write_merged_env_file(merged_env_dict)
|
930
428
|
|
931
429
|
run_compose_lite(final_env_file=final_env_file,
|
932
430
|
experimental_with_langfuse=experimental_with_langfuse,
|
933
431
|
experimental_with_ibm_telemetry=experimental_with_ibm_telemetry,
|
934
432
|
with_doc_processing=with_doc_processing,
|
935
433
|
with_voice=with_voice,
|
936
|
-
|
434
|
+
with_connections_ui=with_connections_ui,
|
435
|
+
with_langflow=with_langflow, env_service=env_service)
|
937
436
|
|
938
437
|
run_db_migration()
|
939
438
|
|
@@ -963,9 +462,10 @@ def server_start(
|
|
963
462
|
logger.info(f"You can access the observability platform Langfuse at http://localhost:3010, username: orchestrate@ibm.com, password: orchestrate")
|
964
463
|
if with_doc_processing:
|
965
464
|
logger.info(f"Document processing in Flows (Public Preview) has been enabled.")
|
966
|
-
if
|
465
|
+
if with_connections_ui:
|
466
|
+
logger.info("Connections UI can be found at http://localhost:3412/connectors")
|
467
|
+
if with_langflow:
|
967
468
|
logger.info("Langflow has been enabled, the Langflow UI is available at http://localhost:7861")
|
968
|
-
|
969
469
|
@server_app.command(name="stop")
|
970
470
|
def server_stop(
|
971
471
|
user_env_file: str = typer.Option(
|
@@ -975,16 +475,16 @@ def server_stop(
|
|
975
475
|
)
|
976
476
|
):
|
977
477
|
|
978
|
-
ensure_docker_installed()
|
979
|
-
default_env_path = get_default_env_file()
|
980
|
-
merged_env_dict = merge_env(
|
478
|
+
DockerUtils.ensure_docker_installed()
|
479
|
+
default_env_path = EnvService.get_default_env_file()
|
480
|
+
merged_env_dict = EnvService.merge_env(
|
981
481
|
default_env_path,
|
982
482
|
Path(user_env_file) if user_env_file else None
|
983
483
|
)
|
984
484
|
merged_env_dict['WATSONX_SPACE_ID']='X'
|
985
485
|
merged_env_dict['WATSONX_APIKEY']='X'
|
986
|
-
apply_llm_api_key_defaults(merged_env_dict)
|
987
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
486
|
+
EnvService.apply_llm_api_key_defaults(merged_env_dict)
|
487
|
+
final_env_file = EnvService.write_merged_env_file(merged_env_dict)
|
988
488
|
run_compose_lite_down(final_env_file=final_env_file)
|
989
489
|
|
990
490
|
@server_app.command(name="reset")
|
@@ -996,16 +496,16 @@ def server_reset(
|
|
996
496
|
)
|
997
497
|
):
|
998
498
|
|
999
|
-
ensure_docker_installed()
|
1000
|
-
default_env_path = get_default_env_file()
|
1001
|
-
merged_env_dict = merge_env(
|
499
|
+
DockerUtils.ensure_docker_installed()
|
500
|
+
default_env_path = EnvService.get_default_env_file()
|
501
|
+
merged_env_dict = EnvService.merge_env(
|
1002
502
|
default_env_path,
|
1003
503
|
Path(user_env_file) if user_env_file else None
|
1004
504
|
)
|
1005
505
|
merged_env_dict['WATSONX_SPACE_ID']='X'
|
1006
506
|
merged_env_dict['WATSONX_APIKEY']='X'
|
1007
|
-
apply_llm_api_key_defaults(merged_env_dict)
|
1008
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
507
|
+
EnvService.apply_llm_api_key_defaults(merged_env_dict)
|
508
|
+
final_env_file = EnvService.write_merged_env_file(merged_env_dict)
|
1009
509
|
run_compose_lite_down(final_env_file=final_env_file, is_reset=True)
|
1010
510
|
|
1011
511
|
@server_app.command(name="logs")
|
@@ -1016,23 +516,21 @@ def server_logs(
|
|
1016
516
|
help="Path to a .env file that overrides default.env. Then environment variables override both."
|
1017
517
|
)
|
1018
518
|
):
|
1019
|
-
ensure_docker_installed()
|
1020
|
-
default_env_path = get_default_env_file()
|
1021
|
-
merged_env_dict = merge_env(
|
519
|
+
DockerUtils.ensure_docker_installed()
|
520
|
+
default_env_path = EnvService.get_default_env_file()
|
521
|
+
merged_env_dict = EnvService.merge_env(
|
1022
522
|
default_env_path,
|
1023
523
|
Path(user_env_file) if user_env_file else None
|
1024
524
|
)
|
1025
525
|
merged_env_dict['WATSONX_SPACE_ID']='X'
|
1026
526
|
merged_env_dict['WATSONX_APIKEY']='X'
|
1027
|
-
apply_llm_api_key_defaults(merged_env_dict)
|
1028
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
527
|
+
EnvService.apply_llm_api_key_defaults(merged_env_dict)
|
528
|
+
final_env_file = EnvService.write_merged_env_file(merged_env_dict)
|
1029
529
|
run_compose_lite_logs(final_env_file=final_env_file)
|
1030
530
|
|
1031
531
|
def run_db_migration() -> None:
|
1032
|
-
|
1033
|
-
|
1034
|
-
default_env_path = get_default_env_file()
|
1035
|
-
merged_env_dict = merge_env(default_env_path, user_env_path=None)
|
532
|
+
default_env_path = EnvService.get_default_env_file()
|
533
|
+
merged_env_dict = EnvService.merge_env(default_env_path, user_env_path=None)
|
1036
534
|
merged_env_dict['WATSONX_SPACE_ID']='X'
|
1037
535
|
merged_env_dict['WATSONX_APIKEY']='X'
|
1038
536
|
merged_env_dict['WXAI_API_KEY'] = ''
|
@@ -1044,7 +542,7 @@ def run_db_migration() -> None:
|
|
1044
542
|
merged_env_dict['ASSISTANT_EMBEDDINGS_SPACE_ID'] = ''
|
1045
543
|
merged_env_dict['ROUTING_LLM_API_KEY'] = ''
|
1046
544
|
merged_env_dict['ASSISTANT_LLM_API_KEY'] = ''
|
1047
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
545
|
+
final_env_file = EnvService.write_merged_env_file(merged_env_dict)
|
1048
546
|
|
1049
547
|
|
1050
548
|
pg_user = merged_env_dict.get("POSTGRES_USER","postgres")
|
@@ -1070,18 +568,13 @@ def run_db_migration() -> None:
|
|
1070
568
|
done
|
1071
569
|
'''
|
1072
570
|
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1076
|
-
"exec",
|
1077
|
-
"wxo-server-db",
|
1078
|
-
"bash",
|
1079
|
-
"-c",
|
1080
|
-
migration_command
|
1081
|
-
]
|
571
|
+
cli_config = Config()
|
572
|
+
env_service = EnvService(cli_config)
|
573
|
+
compose_core = DockerComposeCore(env_service)
|
1082
574
|
|
1083
|
-
|
1084
|
-
|
575
|
+
result = compose_core.service_container_bash_exec(service_name="wxo-server-db",
|
576
|
+
log_message="Running Database Migration...",
|
577
|
+
final_env_file=final_env_file, bash_command=migration_command)
|
1085
578
|
|
1086
579
|
if result.returncode == 0:
|
1087
580
|
logger.info("Migration ran successfully.")
|
@@ -1093,10 +586,8 @@ def run_db_migration() -> None:
|
|
1093
586
|
sys.exit(1)
|
1094
587
|
|
1095
588
|
def create_langflow_db() -> None:
|
1096
|
-
|
1097
|
-
|
1098
|
-
default_env_path = get_default_env_file()
|
1099
|
-
merged_env_dict = merge_env(default_env_path, user_env_path=None)
|
589
|
+
default_env_path = EnvService.get_default_env_file()
|
590
|
+
merged_env_dict = EnvService.merge_env(default_env_path, user_env_path=None)
|
1100
591
|
merged_env_dict['WATSONX_SPACE_ID']='X'
|
1101
592
|
merged_env_dict['WATSONX_APIKEY']='X'
|
1102
593
|
merged_env_dict['WXAI_API_KEY'] = ''
|
@@ -1108,7 +599,7 @@ def create_langflow_db() -> None:
|
|
1108
599
|
merged_env_dict['ASSISTANT_EMBEDDINGS_SPACE_ID'] = ''
|
1109
600
|
merged_env_dict['ROUTING_LLM_API_KEY'] = ''
|
1110
601
|
merged_env_dict['ASSISTANT_LLM_API_KEY'] = ''
|
1111
|
-
final_env_file = write_merged_env_file(merged_env_dict)
|
602
|
+
final_env_file = EnvService.write_merged_env_file(merged_env_dict)
|
1112
603
|
|
1113
604
|
pg_timeout = merged_env_dict.get('POSTGRES_READY_TIMEOUT','10')
|
1114
605
|
|
@@ -1130,18 +621,14 @@ def create_langflow_db() -> None:
|
|
1130
621
|
psql -U {pg_user} -q -d postgres -c "GRANT CONNECT ON DATABASE langflow TO {pg_user}";
|
1131
622
|
fi
|
1132
623
|
"""
|
1133
|
-
|
1134
|
-
|
1135
|
-
|
1136
|
-
|
1137
|
-
|
1138
|
-
|
1139
|
-
|
1140
|
-
|
1141
|
-
]
|
1142
|
-
|
1143
|
-
logger.info("Preparing Langflow resources...")
|
1144
|
-
result = subprocess.run(command, capture_output=False)
|
624
|
+
|
625
|
+
cli_config = Config()
|
626
|
+
env_service = EnvService(cli_config)
|
627
|
+
compose_core = DockerComposeCore(env_service)
|
628
|
+
|
629
|
+
result = compose_core.service_container_bash_exec(service_name="wxo-server-db",
|
630
|
+
log_message="Preparing Langflow resources...",
|
631
|
+
final_env_file=final_env_file, bash_command=creation_command)
|
1145
632
|
|
1146
633
|
if result.returncode == 0:
|
1147
634
|
logger.info("Langflow resources sucessfully created")
|
@@ -1182,22 +669,22 @@ def server_eject(
|
|
1182
669
|
sys.exit(1)
|
1183
670
|
|
1184
671
|
logger.warning("Changes to your docker compose file are not supported")
|
1185
|
-
|
1186
|
-
compose_file_path = get_compose_file()
|
1187
672
|
|
673
|
+
cli_config = Config()
|
674
|
+
env_service = EnvService(cli_config)
|
675
|
+
compose_file_path = env_service.get_compose_file()
|
1188
676
|
compose_output_file = get_next_free_file_iteration('docker-compose.yml')
|
1189
677
|
logger.info(f"Exporting docker compose file to '{compose_output_file}'")
|
1190
678
|
|
1191
679
|
shutil.copyfile(compose_file_path,compose_output_file)
|
1192
680
|
|
1193
|
-
|
1194
|
-
|
1195
|
-
merged_env_dict = prepare_server_env_vars(user_env)
|
681
|
+
user_env = env_service.get_user_env(user_env_file=user_env_file, fallback_to_persisted_env=False)
|
682
|
+
merged_env_dict = env_service.prepare_server_env_vars(user_env=user_env, should_drop_auth_routes=False)
|
1196
683
|
|
1197
684
|
env_output_file = get_next_free_file_iteration('server.env')
|
1198
685
|
logger.info(f"Exporting env file to '{env_output_file}'")
|
1199
686
|
|
1200
|
-
write_merged_env_file(merged_env=merged_env_dict,target_path=env_output_file)
|
687
|
+
env_service.write_merged_env_file(merged_env=merged_env_dict,target_path=env_output_file)
|
1201
688
|
|
1202
689
|
logger.info(f"To make use of the exported configuration file run \"orchestrate server start -e {env_output_file} -f {compose_output_file}\"")
|
1203
690
|
|