ibm-watsonx-orchestrate 1.11.1__py3-none-any.whl → 1.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ibm_watsonx_orchestrate/__init__.py +1 -1
- ibm_watsonx_orchestrate/agent_builder/agents/types.py +22 -5
- ibm_watsonx_orchestrate/agent_builder/connections/connections.py +3 -3
- ibm_watsonx_orchestrate/agent_builder/connections/types.py +14 -0
- ibm_watsonx_orchestrate/agent_builder/models/types.py +1 -0
- ibm_watsonx_orchestrate/agent_builder/toolkits/base_toolkit.py +1 -1
- ibm_watsonx_orchestrate/agent_builder/tools/__init__.py +1 -0
- ibm_watsonx_orchestrate/agent_builder/tools/base_tool.py +1 -1
- ibm_watsonx_orchestrate/agent_builder/tools/langflow_tool.py +184 -0
- ibm_watsonx_orchestrate/agent_builder/tools/openapi_tool.py +9 -3
- ibm_watsonx_orchestrate/agent_builder/tools/types.py +20 -2
- ibm_watsonx_orchestrate/cli/commands/agents/agents_controller.py +19 -6
- ibm_watsonx_orchestrate/cli/commands/connections/connections_command.py +18 -0
- ibm_watsonx_orchestrate/cli/commands/connections/connections_controller.py +114 -0
- ibm_watsonx_orchestrate/cli/commands/copilot/copilot_controller.py +2 -6
- ibm_watsonx_orchestrate/cli/commands/copilot/copilot_server_controller.py +24 -91
- ibm_watsonx_orchestrate/cli/commands/evaluations/evaluations_command.py +52 -2
- ibm_watsonx_orchestrate/cli/commands/evaluations/evaluations_controller.py +1 -1
- ibm_watsonx_orchestrate/cli/commands/models/model_provider_mapper.py +23 -4
- ibm_watsonx_orchestrate/cli/commands/models/models_controller.py +3 -3
- ibm_watsonx_orchestrate/cli/commands/partners/offering/partners_offering_command.py +56 -0
- ibm_watsonx_orchestrate/cli/commands/partners/offering/partners_offering_controller.py +475 -0
- ibm_watsonx_orchestrate/cli/commands/partners/offering/types.py +99 -0
- ibm_watsonx_orchestrate/cli/commands/partners/partners_command.py +12 -0
- ibm_watsonx_orchestrate/cli/commands/partners/partners_controller.py +0 -0
- ibm_watsonx_orchestrate/cli/commands/server/server_command.py +124 -637
- ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_command.py +2 -2
- ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_controller.py +2 -2
- ibm_watsonx_orchestrate/cli/commands/tools/tools_command.py +2 -3
- ibm_watsonx_orchestrate/cli/commands/tools/tools_controller.py +233 -44
- ibm_watsonx_orchestrate/cli/main.py +2 -0
- ibm_watsonx_orchestrate/client/connections/connections_client.py +4 -1
- ibm_watsonx_orchestrate/client/tools/tempus_client.py +3 -0
- ibm_watsonx_orchestrate/client/tools/tool_client.py +5 -2
- ibm_watsonx_orchestrate/client/utils.py +31 -1
- ibm_watsonx_orchestrate/docker/compose-lite.yml +58 -7
- ibm_watsonx_orchestrate/docker/default.env +20 -17
- ibm_watsonx_orchestrate/flow_builder/flows/decorators.py +10 -2
- ibm_watsonx_orchestrate/flow_builder/flows/flow.py +71 -9
- ibm_watsonx_orchestrate/flow_builder/node.py +14 -2
- ibm_watsonx_orchestrate/flow_builder/types.py +36 -3
- ibm_watsonx_orchestrate/langflow/__init__.py +0 -0
- ibm_watsonx_orchestrate/langflow/langflow_utils.py +195 -0
- ibm_watsonx_orchestrate/langflow/lfx_deps.py +84 -0
- ibm_watsonx_orchestrate/utils/docker_utils.py +280 -0
- ibm_watsonx_orchestrate/utils/environment.py +369 -0
- ibm_watsonx_orchestrate/utils/utils.py +7 -3
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/METADATA +2 -2
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/RECORD +52 -41
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/WHEEL +0 -0
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/entry_points.txt +0 -0
- {ibm_watsonx_orchestrate-1.11.1.dist-info → ibm_watsonx_orchestrate-1.12.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,195 @@
|
|
1
|
+
import logging
|
2
|
+
import ast
|
3
|
+
import sys
|
4
|
+
from pathlib import Path
|
5
|
+
import importlib.util
|
6
|
+
|
7
|
+
from pydantic import BaseModel
|
8
|
+
|
9
|
+
from .lfx_deps import LFX_DEPENDENCIES
|
10
|
+
|
11
|
+
logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
class LangflowComponent(BaseModel):
|
14
|
+
id: str
|
15
|
+
name: str
|
16
|
+
credentials: dict
|
17
|
+
requirements: list[str] = []
|
18
|
+
|
19
|
+
class LangflowModelSpec(BaseModel):
|
20
|
+
version: str
|
21
|
+
components: list[LangflowComponent]
|
22
|
+
|
23
|
+
_MODULE_MAP = {
|
24
|
+
"mem0":"mem0ai",
|
25
|
+
}
|
26
|
+
|
27
|
+
import math
|
28
|
+
from collections import Counter
|
29
|
+
|
30
|
+
def _calculate_entropy(s):
|
31
|
+
"""
|
32
|
+
Calculates the Shannon entropy of a string.
|
33
|
+
|
34
|
+
Parameters:
|
35
|
+
s (str): Input string.
|
36
|
+
|
37
|
+
Returns:
|
38
|
+
float: Shannon entropy value.
|
39
|
+
"""
|
40
|
+
if not s:
|
41
|
+
return 0.0
|
42
|
+
|
43
|
+
freq = Counter(s)
|
44
|
+
length = len(s)
|
45
|
+
|
46
|
+
entropy = -sum((count / length) * math.log2(count / length) for count in freq.values())
|
47
|
+
return entropy
|
48
|
+
|
49
|
+
def _mask_api_key(key):
|
50
|
+
"""
|
51
|
+
Masks an API key by keeping the first 5 characters visible,
|
52
|
+
masking the rest with asterisks, and truncating the result to a maximum of 25 characters.
|
53
|
+
|
54
|
+
Parameters:
|
55
|
+
key (str): The API key string.
|
56
|
+
|
57
|
+
Returns:
|
58
|
+
str: Masked and truncated API key.
|
59
|
+
"""
|
60
|
+
if not isinstance(key, str):
|
61
|
+
return key
|
62
|
+
|
63
|
+
# if this is a potential real API key -- mask it
|
64
|
+
if _calculate_entropy(key) > 4.1:
|
65
|
+
visible_part = key[:5]
|
66
|
+
masked_part = '*' * (len(key) - 5)
|
67
|
+
masked_key = visible_part + masked_part
|
68
|
+
|
69
|
+
return masked_key[:25]
|
70
|
+
elif len(key) > 25:
|
71
|
+
# if the key is longer than 25 characters, truncates it anyway
|
72
|
+
return key[:22] + '...'
|
73
|
+
|
74
|
+
return key
|
75
|
+
|
76
|
+
def _extract_imports(source_code) -> list[str]:
|
77
|
+
tree = ast.parse(source_code)
|
78
|
+
imports = set()
|
79
|
+
for node in ast.walk(tree):
|
80
|
+
if isinstance(node, ast.Import):
|
81
|
+
for alias in node.names:
|
82
|
+
# we only need the module name, not sub-module
|
83
|
+
imports.add(alias.name.split('.')[0])
|
84
|
+
elif isinstance(node, ast.ImportFrom):
|
85
|
+
if node.module:
|
86
|
+
# we only need the module name, not sub-module
|
87
|
+
imports.add(node.module.split('.')[0])
|
88
|
+
return sorted(imports)
|
89
|
+
|
90
|
+
|
91
|
+
|
92
|
+
def _is_builtin_module(module_name: str) -> bool:
|
93
|
+
underscore_module_name = f"_{module_name}"
|
94
|
+
|
95
|
+
# Check against the list of standard modules
|
96
|
+
if module_name in sys.stdlib_module_names:
|
97
|
+
return True
|
98
|
+
|
99
|
+
if underscore_module_name in sys.stdlib_module_names:
|
100
|
+
return True
|
101
|
+
|
102
|
+
# Check against the list of built-in module names
|
103
|
+
if module_name in sys.builtin_module_names:
|
104
|
+
return True
|
105
|
+
|
106
|
+
if underscore_module_name in sys.builtin_module_names:
|
107
|
+
return True
|
108
|
+
|
109
|
+
# Use importlib to find the module spec
|
110
|
+
spec = importlib.util.find_spec(module_name)
|
111
|
+
if spec is None:
|
112
|
+
return False # Module not found
|
113
|
+
|
114
|
+
# Check if the loader is a BuiltinImporter
|
115
|
+
return isinstance(spec.loader, importlib.machinery.BuiltinImporter)
|
116
|
+
|
117
|
+
|
118
|
+
def _find_missing_requirements(imported_modules, requirements_modules: list[str]) -> list[str]:
|
119
|
+
"""
|
120
|
+
Compare imported modules with requirements.txt and return missing ones.
|
121
|
+
|
122
|
+
Parameters:
|
123
|
+
imported_modules (list): List of module names used in the code.
|
124
|
+
requirements_file_path (str): Path to the requirements.txt file.
|
125
|
+
|
126
|
+
Returns:
|
127
|
+
list: Modules that are imported but not listed in requirements.txt.
|
128
|
+
"""
|
129
|
+
def normalize_module_name(name):
|
130
|
+
module_name = name.split('.')[0].lower()
|
131
|
+
# sometimes the module name in pipy is different than the real name
|
132
|
+
if module_name in _MODULE_MAP:
|
133
|
+
module_name = _MODULE_MAP[module_name]
|
134
|
+
return module_name
|
135
|
+
|
136
|
+
# Normalize imported module names
|
137
|
+
normalized_imports = [normalize_module_name(mod) for mod in imported_modules]
|
138
|
+
|
139
|
+
# filter out all built-ins
|
140
|
+
filtered_imports = [
|
141
|
+
module for module in normalized_imports
|
142
|
+
if _is_builtin_module(module) is False
|
143
|
+
]
|
144
|
+
|
145
|
+
# Compare and find missing modules
|
146
|
+
missing_modules = [
|
147
|
+
module for module in filtered_imports
|
148
|
+
if module not in requirements_modules
|
149
|
+
]
|
150
|
+
|
151
|
+
return missing_modules
|
152
|
+
|
153
|
+
|
154
|
+
|
155
|
+
def parse_langflow_model(model) -> LangflowModelSpec:
|
156
|
+
"""
|
157
|
+
Extracts component details and Langflow version from a Langflow JSON object.
|
158
|
+
|
159
|
+
Parameters:
|
160
|
+
model (dict): The Langflow JSON object.
|
161
|
+
|
162
|
+
Returns:
|
163
|
+
LangflowModelSpec: A LangflowModelSpec object containing the extracted version and component information.
|
164
|
+
"""
|
165
|
+
version = model.get("last_tested_version", "Unknown")
|
166
|
+
components = []
|
167
|
+
data = model.get('data', {} )
|
168
|
+
|
169
|
+
# get the list of available modules
|
170
|
+
requirements_modules = LFX_DEPENDENCIES
|
171
|
+
|
172
|
+
for node in data.get("nodes", []):
|
173
|
+
node_data = node.get("data", {})
|
174
|
+
node_info = node_data.get("node", {})
|
175
|
+
template = node_info.get("template", {})
|
176
|
+
code = template.get("code")
|
177
|
+
credentials = {}
|
178
|
+
|
179
|
+
missing_imports = []
|
180
|
+
for field_name, field_info in template.items():
|
181
|
+
if isinstance(field_info, dict) and field_info.get("password", False) == True:
|
182
|
+
credentials[field_name] = _mask_api_key(field_info.get("value"))
|
183
|
+
|
184
|
+
if code and code.get("value") != None:
|
185
|
+
imports = _extract_imports(code.get("value"))
|
186
|
+
if len(imports) > 0:
|
187
|
+
missing_imports = _find_missing_requirements(imports, requirements_modules)
|
188
|
+
|
189
|
+
component_info = LangflowComponent(name=node_info.get("display_name", "Unknown"), id=node_data.get("id", "Unknown"),
|
190
|
+
credentials=credentials, requirements=missing_imports)
|
191
|
+
|
192
|
+
components.append(component_info)
|
193
|
+
|
194
|
+
return LangflowModelSpec(version=version, components=components)
|
195
|
+
|
@@ -0,0 +1,84 @@
|
|
1
|
+
LFX_DEPENDENCIES = [
|
2
|
+
"aiofile",
|
3
|
+
"aiofiles",
|
4
|
+
"annotated-types",
|
5
|
+
"anyio",
|
6
|
+
"asyncer",
|
7
|
+
"cachetools",
|
8
|
+
"caio",
|
9
|
+
"certifi",
|
10
|
+
"chardet",
|
11
|
+
"charset-normalizer",
|
12
|
+
"click",
|
13
|
+
"defusedxml",
|
14
|
+
"docstring_parser",
|
15
|
+
"emoji",
|
16
|
+
"fastapi",
|
17
|
+
"h11",
|
18
|
+
"h2",
|
19
|
+
"hpack",
|
20
|
+
"httpcore",
|
21
|
+
"httpx",
|
22
|
+
"hyperframe",
|
23
|
+
"idna",
|
24
|
+
"json_repair",
|
25
|
+
"jsonpatch",
|
26
|
+
"jsonpointer",
|
27
|
+
"langchain",
|
28
|
+
"langchain-core",
|
29
|
+
"langchain-text-splitters",
|
30
|
+
"langsmith",
|
31
|
+
"lfx-nightly",
|
32
|
+
"loguru",
|
33
|
+
"markdown-it-py",
|
34
|
+
"mdurl",
|
35
|
+
"nanoid",
|
36
|
+
"networkx",
|
37
|
+
"numpy",
|
38
|
+
"orjson",
|
39
|
+
"packaging",
|
40
|
+
"pandas",
|
41
|
+
"passlib",
|
42
|
+
"pillow",
|
43
|
+
"platformdirs",
|
44
|
+
"pydantic",
|
45
|
+
"pydantic-settings",
|
46
|
+
"pydantic_core",
|
47
|
+
"Pygments",
|
48
|
+
"python-dateutil",
|
49
|
+
"python-dotenv",
|
50
|
+
"pytz",
|
51
|
+
"PyYAML",
|
52
|
+
"requests",
|
53
|
+
"requests-toolbelt",
|
54
|
+
"rich",
|
55
|
+
"shellingham",
|
56
|
+
"six",
|
57
|
+
"sniffio",
|
58
|
+
"SQLAlchemy",
|
59
|
+
"starlette",
|
60
|
+
"structlog",
|
61
|
+
"tenacity",
|
62
|
+
"tomli",
|
63
|
+
"typer",
|
64
|
+
"typing-inspection",
|
65
|
+
"typing_extensions",
|
66
|
+
"tzdata",
|
67
|
+
"urllib3",
|
68
|
+
"uvicorn",
|
69
|
+
"validators",
|
70
|
+
"zstandard",
|
71
|
+
"langflow",
|
72
|
+
"langchain_openai",
|
73
|
+
"langchain_core",
|
74
|
+
"langchain_text_splitters",
|
75
|
+
"collections",
|
76
|
+
"typing",
|
77
|
+
"datetime",
|
78
|
+
"zoneinfo",
|
79
|
+
"or",
|
80
|
+
"re",
|
81
|
+
"os",
|
82
|
+
"copy",
|
83
|
+
"json"
|
84
|
+
]
|
@@ -0,0 +1,280 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
import subprocess
|
4
|
+
import sys
|
5
|
+
from enum import Enum
|
6
|
+
from pathlib import Path
|
7
|
+
from typing import MutableMapping
|
8
|
+
from urllib.parse import urlparse
|
9
|
+
|
10
|
+
import requests
|
11
|
+
import typer
|
12
|
+
|
13
|
+
from ibm_watsonx_orchestrate.cli.config import Config
|
14
|
+
from ibm_watsonx_orchestrate.utils.environment import EnvService
|
15
|
+
|
16
|
+
logger = logging.getLogger(__name__)
|
17
|
+
|
18
|
+
|
19
|
+
class DockerUtils:
|
20
|
+
|
21
|
+
@staticmethod
|
22
|
+
def ensure_docker_installed () -> None:
|
23
|
+
try:
|
24
|
+
subprocess.run(["docker", "--version"], check=True, capture_output=True)
|
25
|
+
except (FileNotFoundError, subprocess.CalledProcessError):
|
26
|
+
logger.error("Unable to find an installed docker")
|
27
|
+
sys.exit(1)
|
28
|
+
|
29
|
+
@staticmethod
|
30
|
+
def check_exclusive_observability(langfuse_enabled: bool, ibm_tele_enabled: bool):
|
31
|
+
if langfuse_enabled and ibm_tele_enabled:
|
32
|
+
return False
|
33
|
+
if langfuse_enabled and DockerUtils.__is_docker_container_running("docker-frontend-server-1"):
|
34
|
+
return False
|
35
|
+
if ibm_tele_enabled and DockerUtils.__is_docker_container_running("docker-langfuse-web-1"):
|
36
|
+
return False
|
37
|
+
return True
|
38
|
+
|
39
|
+
@staticmethod
|
40
|
+
def __is_docker_container_running(container_name):
|
41
|
+
DockerUtils.ensure_docker_installed()
|
42
|
+
command = ["docker",
|
43
|
+
"ps",
|
44
|
+
"-f",
|
45
|
+
f"name={container_name}"
|
46
|
+
]
|
47
|
+
result = subprocess.run(command, env=os.environ, capture_output=True)
|
48
|
+
if container_name in str(result.stdout):
|
49
|
+
return True
|
50
|
+
return False
|
51
|
+
|
52
|
+
|
53
|
+
class DockerLoginService:
|
54
|
+
|
55
|
+
def __init__(self, env_service: EnvService):
|
56
|
+
self.__env_service = env_service
|
57
|
+
|
58
|
+
def login_by_dev_edition_source(self, env_dict: dict) -> None:
|
59
|
+
source = self.__env_service.get_dev_edition_source_core(env_dict=env_dict)
|
60
|
+
|
61
|
+
if env_dict.get('WO_DEVELOPER_EDITION_SKIP_LOGIN', None) == 'true':
|
62
|
+
logger.info('WO_DEVELOPER_EDITION_SKIP_LOGIN is set to true, skipping login.')
|
63
|
+
logger.warning('If the developer edition images are not already pulled this call will fail without first setting WO_DEVELOPER_EDITION_SKIP_LOGIN=false')
|
64
|
+
else:
|
65
|
+
if not env_dict.get("REGISTRY_URL"):
|
66
|
+
raise ValueError("REGISTRY_URL is not set.")
|
67
|
+
registry_url = env_dict["REGISTRY_URL"].split("/")[0]
|
68
|
+
if source == "internal":
|
69
|
+
iam_api_key = env_dict.get("DOCKER_IAM_KEY")
|
70
|
+
if not iam_api_key:
|
71
|
+
raise ValueError(
|
72
|
+
"DOCKER_IAM_KEY is required in the environment file if WO_DEVELOPER_EDITION_SOURCE is set to 'internal'.")
|
73
|
+
self.__docker_login(iam_api_key, registry_url, "iamapikey")
|
74
|
+
elif source == "myibm":
|
75
|
+
wo_entitlement_key = env_dict.get("WO_ENTITLEMENT_KEY")
|
76
|
+
if not wo_entitlement_key:
|
77
|
+
raise ValueError("WO_ENTITLEMENT_KEY is required in the environment file.")
|
78
|
+
self.__docker_login(wo_entitlement_key, registry_url, "cp")
|
79
|
+
elif source == "orchestrate":
|
80
|
+
wo_auth_type = env_dict.get("WO_AUTH_TYPE")
|
81
|
+
api_key, username = self.__get_docker_cred_by_wo_auth_type(auth_type=wo_auth_type, env_dict=env_dict)
|
82
|
+
self.__docker_login(api_key, registry_url, username)
|
83
|
+
|
84
|
+
@staticmethod
|
85
|
+
def __docker_login(api_key: str, registry_url: str, username: str = "iamapikey") -> None:
|
86
|
+
logger.info(f"Logging into Docker registry: {registry_url} ...")
|
87
|
+
result = subprocess.run(
|
88
|
+
["docker", "login", "-u", username, "--password-stdin", registry_url],
|
89
|
+
input=api_key.encode("utf-8"),
|
90
|
+
capture_output=True,
|
91
|
+
)
|
92
|
+
if result.returncode != 0:
|
93
|
+
logger.error(f"Error logging into Docker:\n{result.stderr.decode('utf-8')}")
|
94
|
+
sys.exit(1)
|
95
|
+
logger.info("Successfully logged in to Docker.")
|
96
|
+
|
97
|
+
@staticmethod
|
98
|
+
def __get_docker_cred_by_wo_auth_type(auth_type: str | None, env_dict: dict) -> tuple[str, str]:
|
99
|
+
# Try infer the auth type if not provided
|
100
|
+
if not auth_type:
|
101
|
+
instance_url = env_dict.get("WO_INSTANCE")
|
102
|
+
if instance_url:
|
103
|
+
if ".cloud.ibm.com" in instance_url:
|
104
|
+
auth_type = "ibm_iam"
|
105
|
+
elif ".ibm.com" in instance_url:
|
106
|
+
auth_type = "mcsp"
|
107
|
+
elif "https://cpd" in instance_url:
|
108
|
+
auth_type = "cpd"
|
109
|
+
|
110
|
+
if auth_type in {"mcsp", "ibm_iam"}:
|
111
|
+
wo_api_key = env_dict.get("WO_API_KEY")
|
112
|
+
if not wo_api_key:
|
113
|
+
raise ValueError(
|
114
|
+
"WO_API_KEY is required in the environment file if the WO_AUTH_TYPE is set to 'mcsp' or 'ibm_iam'.")
|
115
|
+
instance_url = env_dict.get("WO_INSTANCE")
|
116
|
+
if not instance_url:
|
117
|
+
raise ValueError(
|
118
|
+
"WO_INSTANCE is required in the environment file if the WO_AUTH_TYPE is set to 'mcsp' or 'ibm_iam'.")
|
119
|
+
path = urlparse(instance_url).path
|
120
|
+
if not path or '/' not in path:
|
121
|
+
raise ValueError(
|
122
|
+
f"Invalid WO_INSTANCE URL: '{instance_url}'. It should contain the instance (tenant) id.")
|
123
|
+
tenant_id = path.split('/')[-1]
|
124
|
+
return wo_api_key, f"wxouser-{tenant_id}"
|
125
|
+
elif auth_type == "cpd":
|
126
|
+
wo_api_key = env_dict.get("WO_API_KEY")
|
127
|
+
wo_password = env_dict.get("WO_PASSWORD")
|
128
|
+
if not wo_api_key and not wo_password:
|
129
|
+
raise ValueError(
|
130
|
+
"WO_API_KEY or WO_PASSWORD is required in the environment file if the WO_AUTH_TYPE is set to 'cpd'.")
|
131
|
+
wo_username = env_dict.get("WO_USERNAME")
|
132
|
+
if not wo_username:
|
133
|
+
raise ValueError("WO_USERNAME is required in the environment file if the WO_AUTH_TYPE is set to 'cpd'.")
|
134
|
+
return wo_api_key or wo_password, wo_username # type: ignore[return-value]
|
135
|
+
else:
|
136
|
+
raise ValueError(
|
137
|
+
f"Unknown value for WO_AUTH_TYPE: '{auth_type}'. Must be one of ['mcsp', 'ibm_iam', 'cpd'].")
|
138
|
+
|
139
|
+
|
140
|
+
class DockerComposeCore:
|
141
|
+
|
142
|
+
def __init__(self, env_service: EnvService) -> None:
|
143
|
+
self.__env_service = env_service
|
144
|
+
|
145
|
+
def service_up (self, service_name: str, friendly_name: str, final_env_file: Path, compose_env: MutableMapping = None) -> subprocess.CompletedProcess[bytes]:
|
146
|
+
base_command = self.__ensure_docker_compose_installed()
|
147
|
+
compose_path = self.__env_service.get_compose_file()
|
148
|
+
|
149
|
+
command = base_command + [
|
150
|
+
"-f", str(compose_path),
|
151
|
+
"--env-file", str(final_env_file),
|
152
|
+
"up",
|
153
|
+
service_name,
|
154
|
+
"-d",
|
155
|
+
"--remove-orphans"
|
156
|
+
]
|
157
|
+
|
158
|
+
kwargs = {}
|
159
|
+
if compose_env is not None:
|
160
|
+
kwargs["env"] = compose_env
|
161
|
+
|
162
|
+
logger.info(f"Starting docker-compose {friendly_name} service...")
|
163
|
+
|
164
|
+
return subprocess.run(command, capture_output=False, **kwargs)
|
165
|
+
|
166
|
+
def services_up(self, profiles: list[str], final_env_file: Path, supplementary_compose_args: list[str]) -> subprocess.CompletedProcess[bytes]:
|
167
|
+
compose_path = self.__env_service.get_compose_file()
|
168
|
+
command = self.__ensure_docker_compose_installed()[:]
|
169
|
+
|
170
|
+
for profile in profiles:
|
171
|
+
command += ["--profile", profile]
|
172
|
+
|
173
|
+
compose_args = [
|
174
|
+
"-f", str(compose_path),
|
175
|
+
"--env-file", str(final_env_file),
|
176
|
+
"up"
|
177
|
+
]
|
178
|
+
|
179
|
+
for arg in supplementary_compose_args:
|
180
|
+
compose_args.append(arg)
|
181
|
+
|
182
|
+
compose_args.append("-d")
|
183
|
+
compose_args.append("--remove-orphans")
|
184
|
+
|
185
|
+
command += compose_args
|
186
|
+
|
187
|
+
logger.info("Starting docker-compose services...")
|
188
|
+
return subprocess.run(command, capture_output=False)
|
189
|
+
|
190
|
+
def service_down (self, service_name: str, friendly_name: str, final_env_file: Path, is_reset: bool = False) -> subprocess.CompletedProcess[bytes]:
|
191
|
+
base_command = self.__ensure_docker_compose_installed()
|
192
|
+
compose_path = self.__env_service.get_compose_file()
|
193
|
+
|
194
|
+
command = base_command + [
|
195
|
+
"-f", str(compose_path),
|
196
|
+
"--env-file", str(final_env_file),
|
197
|
+
"down",
|
198
|
+
service_name
|
199
|
+
]
|
200
|
+
|
201
|
+
if is_reset:
|
202
|
+
command.append("--volumes")
|
203
|
+
logger.info(f"Stopping docker-compose {friendly_name} service and resetting volumes...")
|
204
|
+
|
205
|
+
else:
|
206
|
+
logger.info(f"Stopping docker-compose {friendly_name} service...")
|
207
|
+
|
208
|
+
return subprocess.run(command, capture_output=False)
|
209
|
+
|
210
|
+
def services_down (self, final_env_file: Path, is_reset: bool = False) -> subprocess.CompletedProcess[bytes]:
|
211
|
+
base_command = self.__ensure_docker_compose_installed()
|
212
|
+
compose_path = self.__env_service.get_compose_file()
|
213
|
+
|
214
|
+
command = base_command + [
|
215
|
+
"--profile", "*",
|
216
|
+
"-f", str(compose_path),
|
217
|
+
"--env-file", str(final_env_file),
|
218
|
+
"down"
|
219
|
+
]
|
220
|
+
|
221
|
+
if is_reset:
|
222
|
+
command.append("--volumes")
|
223
|
+
logger.info("Stopping docker-compose service and resetting volumes...")
|
224
|
+
|
225
|
+
else:
|
226
|
+
logger.info("Stopping docker-compose services...")
|
227
|
+
|
228
|
+
return subprocess.run(command, capture_output=False)
|
229
|
+
|
230
|
+
def services_logs (self, final_env_file: Path, should_follow: bool = True) -> subprocess.CompletedProcess[bytes]:
|
231
|
+
compose_path = self.__env_service.get_compose_file()
|
232
|
+
|
233
|
+
command = [
|
234
|
+
"-f", str(compose_path),
|
235
|
+
"--env-file", str(final_env_file),
|
236
|
+
"--profile", "*",
|
237
|
+
"logs"
|
238
|
+
]
|
239
|
+
|
240
|
+
if should_follow is True:
|
241
|
+
command.append("--follow")
|
242
|
+
|
243
|
+
command = self.__ensure_docker_compose_installed() + command
|
244
|
+
|
245
|
+
logger.info("Docker Logs...")
|
246
|
+
return subprocess.run(command, capture_output=False)
|
247
|
+
|
248
|
+
def service_container_bash_exec (self, service_name: str, log_message: str, final_env_file: Path, bash_command: str) -> subprocess.CompletedProcess[bytes]:
|
249
|
+
base_command = self.__ensure_docker_compose_installed()
|
250
|
+
compose_path = self.__env_service.get_compose_file()
|
251
|
+
|
252
|
+
command = base_command + [
|
253
|
+
"-f", str(compose_path),
|
254
|
+
"--env-file", str(final_env_file),
|
255
|
+
"exec",
|
256
|
+
service_name,
|
257
|
+
"bash",
|
258
|
+
"-c",
|
259
|
+
bash_command
|
260
|
+
]
|
261
|
+
|
262
|
+
logger.info(log_message)
|
263
|
+
return subprocess.run(command, capture_output=False)
|
264
|
+
|
265
|
+
@staticmethod
|
266
|
+
def __ensure_docker_compose_installed() -> list:
|
267
|
+
try:
|
268
|
+
subprocess.run(["docker", "compose", "version"], check=True, capture_output=True)
|
269
|
+
return ["docker", "compose"]
|
270
|
+
except (FileNotFoundError, subprocess.CalledProcessError):
|
271
|
+
pass
|
272
|
+
|
273
|
+
try:
|
274
|
+
subprocess.run(["docker-compose", "version"], check=True, capture_output=True)
|
275
|
+
return ["docker-compose"]
|
276
|
+
except (FileNotFoundError, subprocess.CalledProcessError):
|
277
|
+
# NOTE: ideally, typer should be a type that's injected into the constructor but is referenced directly for
|
278
|
+
# the purposes of reporting some info to the user.
|
279
|
+
typer.echo("Unable to find an installed docker-compose or docker compose")
|
280
|
+
sys.exit(1)
|