truefoundry 0.2.10__py3-none-any.whl → 0.3.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of truefoundry might be problematic. Click here for more details.
- truefoundry/__init__.py +1 -0
- truefoundry/autodeploy/cli.py +31 -18
- truefoundry/deploy/__init__.py +118 -1
- truefoundry/deploy/auto_gen/models.py +1675 -0
- truefoundry/deploy/builder/__init__.py +116 -0
- truefoundry/deploy/builder/builders/__init__.py +22 -0
- truefoundry/deploy/builder/builders/dockerfile.py +57 -0
- truefoundry/deploy/builder/builders/tfy_notebook_buildpack/__init__.py +44 -0
- truefoundry/deploy/builder/builders/tfy_notebook_buildpack/dockerfile_template.py +51 -0
- truefoundry/deploy/builder/builders/tfy_python_buildpack/__init__.py +44 -0
- truefoundry/deploy/builder/builders/tfy_python_buildpack/dockerfile_template.py +158 -0
- truefoundry/deploy/builder/docker_service.py +168 -0
- truefoundry/deploy/cli/cli.py +19 -26
- truefoundry/deploy/cli/commands/__init__.py +18 -0
- truefoundry/deploy/cli/commands/apply_command.py +52 -0
- truefoundry/deploy/cli/commands/build_command.py +45 -0
- truefoundry/deploy/cli/commands/build_logs_command.py +89 -0
- truefoundry/deploy/cli/commands/create_command.py +75 -0
- truefoundry/deploy/cli/commands/delete_command.py +77 -0
- truefoundry/deploy/cli/commands/deploy_command.py +99 -0
- truefoundry/deploy/cli/commands/get_command.py +216 -0
- truefoundry/deploy/cli/commands/list_command.py +171 -0
- truefoundry/deploy/cli/commands/login_command.py +33 -0
- truefoundry/deploy/cli/commands/logout_command.py +20 -0
- truefoundry/deploy/cli/commands/logs_command.py +134 -0
- truefoundry/deploy/cli/commands/patch_application_command.py +79 -0
- truefoundry/deploy/cli/commands/patch_command.py +70 -0
- truefoundry/deploy/cli/commands/redeploy_command.py +41 -0
- truefoundry/deploy/cli/commands/terminate_comand.py +44 -0
- truefoundry/deploy/cli/commands/trigger_command.py +87 -0
- truefoundry/deploy/cli/config.py +10 -0
- truefoundry/deploy/cli/console.py +5 -0
- truefoundry/deploy/cli/const.py +12 -0
- truefoundry/deploy/cli/display_util.py +118 -0
- truefoundry/deploy/cli/util.py +92 -0
- truefoundry/deploy/core/__init__.py +7 -0
- truefoundry/deploy/core/login.py +9 -0
- truefoundry/deploy/core/logout.py +5 -0
- truefoundry/deploy/function_service/__init__.py +3 -0
- truefoundry/deploy/function_service/__main__.py +27 -0
- truefoundry/deploy/function_service/app.py +92 -0
- truefoundry/deploy/function_service/build.py +45 -0
- truefoundry/deploy/function_service/remote/__init__.py +6 -0
- truefoundry/deploy/function_service/remote/context.py +3 -0
- truefoundry/deploy/function_service/remote/method.py +67 -0
- truefoundry/deploy/function_service/remote/remote.py +144 -0
- truefoundry/deploy/function_service/route.py +137 -0
- truefoundry/deploy/function_service/service.py +113 -0
- truefoundry/deploy/function_service/utils.py +53 -0
- truefoundry/deploy/io/__init__.py +0 -0
- truefoundry/deploy/io/output_callback.py +23 -0
- truefoundry/deploy/io/rich_output_callback.py +27 -0
- truefoundry/deploy/json_util.py +7 -0
- truefoundry/deploy/lib/__init__.py +0 -0
- truefoundry/deploy/lib/auth/auth_service_client.py +81 -0
- truefoundry/deploy/lib/auth/credential_file_manager.py +115 -0
- truefoundry/deploy/lib/auth/credential_provider.py +131 -0
- truefoundry/deploy/lib/auth/servicefoundry_session.py +59 -0
- truefoundry/deploy/lib/clients/__init__.py +0 -0
- truefoundry/deploy/lib/clients/servicefoundry_client.py +723 -0
- truefoundry/deploy/lib/clients/shell_client.py +13 -0
- truefoundry/deploy/lib/clients/utils.py +41 -0
- truefoundry/deploy/lib/const.py +43 -0
- truefoundry/deploy/lib/dao/__init__.py +0 -0
- truefoundry/deploy/lib/dao/application.py +246 -0
- truefoundry/deploy/lib/dao/apply.py +80 -0
- truefoundry/deploy/lib/dao/version.py +33 -0
- truefoundry/deploy/lib/dao/workspace.py +71 -0
- truefoundry/deploy/lib/exceptions.py +23 -0
- truefoundry/deploy/lib/logs_utils.py +43 -0
- truefoundry/deploy/lib/messages.py +12 -0
- truefoundry/deploy/lib/model/__init__.py +0 -0
- truefoundry/deploy/lib/model/entity.py +382 -0
- truefoundry/deploy/lib/session.py +146 -0
- truefoundry/deploy/lib/util.py +70 -0
- truefoundry/deploy/lib/win32.py +129 -0
- truefoundry/deploy/v2/__init__.py +0 -0
- truefoundry/deploy/v2/lib/__init__.py +3 -0
- truefoundry/deploy/v2/lib/deploy.py +232 -0
- truefoundry/deploy/v2/lib/deployable_patched_models.py +68 -0
- truefoundry/deploy/v2/lib/models.py +53 -0
- truefoundry/deploy/v2/lib/patched_models.py +497 -0
- truefoundry/deploy/v2/lib/source.py +267 -0
- truefoundry/langchain/__init__.py +12 -1
- truefoundry/langchain/deprecated.py +302 -0
- truefoundry/langchain/truefoundry_chat.py +130 -0
- truefoundry/langchain/truefoundry_embeddings.py +171 -0
- truefoundry/langchain/truefoundry_llm.py +106 -0
- truefoundry/langchain/utils.py +85 -0
- truefoundry/logger.py +17 -0
- truefoundry/pydantic_v1.py +5 -0
- truefoundry/python_deploy_codegen.py +132 -0
- {truefoundry-0.2.10.dist-info → truefoundry-0.3.0rc1.dist-info}/METADATA +22 -5
- truefoundry-0.3.0rc1.dist-info/RECORD +124 -0
- truefoundry/deploy/cli/deploy.py +0 -165
- truefoundry-0.2.10.dist-info/RECORD +0 -38
- /truefoundry/{deploy/cli/version.py → version.py} +0 -0
- {truefoundry-0.2.10.dist-info → truefoundry-0.3.0rc1.dist-info}/WHEEL +0 -0
- {truefoundry-0.2.10.dist-info → truefoundry-0.3.0rc1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,723 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import functools
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import time
|
|
7
|
+
from datetime import datetime, timezone
|
|
8
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
|
9
|
+
from urllib.parse import urljoin, urlparse
|
|
10
|
+
|
|
11
|
+
import requests
|
|
12
|
+
import socketio
|
|
13
|
+
from dateutil.tz import tzlocal
|
|
14
|
+
from packaging import version
|
|
15
|
+
from rich.status import Status
|
|
16
|
+
from tqdm import tqdm
|
|
17
|
+
from tqdm.utils import CallbackIOWrapper
|
|
18
|
+
|
|
19
|
+
from truefoundry.deploy.io.output_callback import OutputCallBack
|
|
20
|
+
from truefoundry.deploy.lib.auth.servicefoundry_session import ServiceFoundrySession
|
|
21
|
+
from truefoundry.deploy.lib.clients.utils import request_handling
|
|
22
|
+
from truefoundry.deploy.lib.const import API_SERVER_RELATIVE_PATH, VERSION_PREFIX
|
|
23
|
+
from truefoundry.deploy.lib.model.entity import (
|
|
24
|
+
Application,
|
|
25
|
+
CreateDockerRepositoryResponse,
|
|
26
|
+
Deployment,
|
|
27
|
+
DockerRegistryCredentials,
|
|
28
|
+
JobRun,
|
|
29
|
+
TenantInfo,
|
|
30
|
+
Token,
|
|
31
|
+
TriggerJobResult,
|
|
32
|
+
Workspace,
|
|
33
|
+
WorkspaceResources,
|
|
34
|
+
)
|
|
35
|
+
from truefoundry.deploy.lib.win32 import allow_interrupt
|
|
36
|
+
from truefoundry.deploy.v2.lib.models import (
|
|
37
|
+
AppDeploymentStatusResponse,
|
|
38
|
+
ApplicationFqnResponse,
|
|
39
|
+
BuildResponse,
|
|
40
|
+
DeploymentFqnResponse,
|
|
41
|
+
)
|
|
42
|
+
from truefoundry.logger import logger
|
|
43
|
+
from truefoundry.pydantic_v1 import parse_obj_as
|
|
44
|
+
from truefoundry.version import __version__
|
|
45
|
+
|
|
46
|
+
DEPLOYMENT_LOGS_SUBSCRIBE_MESSAGE = "DEPLOYMENT_LOGS"
|
|
47
|
+
BUILD_LOGS_SUBSCRIBE_MESSAGE = "BUILD_LOGS"
|
|
48
|
+
|
|
49
|
+
if TYPE_CHECKING:
|
|
50
|
+
from truefoundry.deploy.auto_gen.models import Application
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _upload_packaged_code(metadata, package_file):
|
|
54
|
+
file_size = os.stat(package_file).st_size
|
|
55
|
+
with open(package_file, "rb") as file_to_upload:
|
|
56
|
+
with tqdm(
|
|
57
|
+
total=file_size,
|
|
58
|
+
unit="B",
|
|
59
|
+
unit_scale=True,
|
|
60
|
+
unit_divisor=1024,
|
|
61
|
+
desc="Uploading package",
|
|
62
|
+
) as progress_bar:
|
|
63
|
+
wrapped_file = CallbackIOWrapper(
|
|
64
|
+
progress_bar.update, file_to_upload, "read"
|
|
65
|
+
)
|
|
66
|
+
headers = metadata.get("headers", {})
|
|
67
|
+
http_response = requests.put(
|
|
68
|
+
metadata["url"], data=wrapped_file, headers=headers
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
if http_response.status_code not in [204, 201, 200]:
|
|
72
|
+
raise RuntimeError(f"Failed to upload code {http_response.content}")
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def check_min_cli_version(fn):
|
|
76
|
+
@functools.wraps(fn)
|
|
77
|
+
def inner(*args, **kwargs):
|
|
78
|
+
if __version__ != "0.0.0":
|
|
79
|
+
client: "ServiceFoundryServiceClient" = args[0]
|
|
80
|
+
# "0.0.0" indicates dev version
|
|
81
|
+
# noinspection PyProtectedMember
|
|
82
|
+
min_cli_version_required = client._min_cli_version_required
|
|
83
|
+
if version.parse(__version__) < version.parse(min_cli_version_required):
|
|
84
|
+
raise Exception(
|
|
85
|
+
"You are using an outdated version of `truefoundry`.\n"
|
|
86
|
+
f"Run `pip install truefoundry>={min_cli_version_required}` to install the supported version.",
|
|
87
|
+
)
|
|
88
|
+
else:
|
|
89
|
+
logger.debug("Ignoring minimum cli version check")
|
|
90
|
+
|
|
91
|
+
return fn(*args, **kwargs)
|
|
92
|
+
|
|
93
|
+
return inner
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class ServiceFoundryServiceClient:
|
|
97
|
+
def __init__(self, init_session: bool = True, base_url: Optional[str] = None):
|
|
98
|
+
self._session: Optional[ServiceFoundrySession] = None
|
|
99
|
+
if init_session:
|
|
100
|
+
if base_url:
|
|
101
|
+
logger.warning("Passed base url %r will be ignored", base_url)
|
|
102
|
+
self._session = ServiceFoundrySession()
|
|
103
|
+
base_url = self._session.base_url
|
|
104
|
+
elif not base_url:
|
|
105
|
+
raise Exception("Neither session, not base_url provided")
|
|
106
|
+
|
|
107
|
+
self._base_url = base_url.strip("/")
|
|
108
|
+
self._api_server_url = f"{self._base_url}/{API_SERVER_RELATIVE_PATH}"
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def base_url(self) -> str:
|
|
112
|
+
return self._base_url
|
|
113
|
+
|
|
114
|
+
def get_tenant_info(self) -> TenantInfo:
|
|
115
|
+
res = requests.get(
|
|
116
|
+
url=f"{self._api_server_url}/v1/tenant-id",
|
|
117
|
+
params={"hostName": urlparse(self._api_server_url).netloc},
|
|
118
|
+
)
|
|
119
|
+
res = request_handling(res)
|
|
120
|
+
return TenantInfo.parse_obj(res)
|
|
121
|
+
|
|
122
|
+
@functools.cached_property
|
|
123
|
+
def _min_cli_version_required(self) -> str:
|
|
124
|
+
url = f"{self._api_server_url}/v1/min-cli-version"
|
|
125
|
+
res = requests.get(url)
|
|
126
|
+
res = request_handling(res)
|
|
127
|
+
return res["truefoundryCliMinVersion"]
|
|
128
|
+
|
|
129
|
+
def _get_header(self):
|
|
130
|
+
if not self._session:
|
|
131
|
+
return {}
|
|
132
|
+
return {"Authorization": f"Bearer {self._session.access_token}"}
|
|
133
|
+
|
|
134
|
+
@check_min_cli_version
|
|
135
|
+
def get_id_from_fqn(self, fqn_type: str, fqn: str):
|
|
136
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/fqn/{fqn_type}"
|
|
137
|
+
res = requests.get(url, headers=self._get_header(), params={"fqn": fqn})
|
|
138
|
+
return request_handling(res)
|
|
139
|
+
|
|
140
|
+
@check_min_cli_version
|
|
141
|
+
def list_workspace(self):
|
|
142
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/workspace"
|
|
143
|
+
res = requests.get(url, headers=self._get_header())
|
|
144
|
+
return request_handling(res)
|
|
145
|
+
|
|
146
|
+
@check_min_cli_version
|
|
147
|
+
def list_workspaces(
|
|
148
|
+
self,
|
|
149
|
+
cluster_id: Optional[str] = None,
|
|
150
|
+
workspace_name: Optional[str] = None,
|
|
151
|
+
workspace_fqn: Optional[str] = None,
|
|
152
|
+
) -> List[Workspace]:
|
|
153
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/workspace"
|
|
154
|
+
params = {}
|
|
155
|
+
if cluster_id:
|
|
156
|
+
params["clusterId"] = cluster_id
|
|
157
|
+
if workspace_name:
|
|
158
|
+
params["workspaceName"] = workspace_name
|
|
159
|
+
if workspace_fqn:
|
|
160
|
+
params["workspaceFqn"] = workspace_fqn
|
|
161
|
+
res = requests.get(url, params=params, headers=self._get_header())
|
|
162
|
+
response = request_handling(res)
|
|
163
|
+
return parse_obj_as(List[Workspace], response)
|
|
164
|
+
|
|
165
|
+
@check_min_cli_version
|
|
166
|
+
def create_workspace(
|
|
167
|
+
self,
|
|
168
|
+
workspace_name: str,
|
|
169
|
+
cluster_name: str,
|
|
170
|
+
resources: WorkspaceResources,
|
|
171
|
+
) -> Workspace:
|
|
172
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/workspace"
|
|
173
|
+
res = requests.post(
|
|
174
|
+
url,
|
|
175
|
+
json={
|
|
176
|
+
"manifest": {
|
|
177
|
+
"cluster": cluster_name,
|
|
178
|
+
"name": workspace_name,
|
|
179
|
+
"resources": resources.dict(exclude_none=True),
|
|
180
|
+
}
|
|
181
|
+
},
|
|
182
|
+
headers=self._get_header(),
|
|
183
|
+
)
|
|
184
|
+
res = request_handling(res)
|
|
185
|
+
return Workspace.parse_obj(res)
|
|
186
|
+
|
|
187
|
+
@check_min_cli_version
|
|
188
|
+
def remove_workspace(self, workspace_id, force=False) -> Workspace:
|
|
189
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/workspace/{workspace_id}"
|
|
190
|
+
force = json.dumps(
|
|
191
|
+
force
|
|
192
|
+
) # this dumb conversion is required because `params` just casts as str
|
|
193
|
+
res = requests.delete(url, headers=self._get_header(), params={"force": force})
|
|
194
|
+
response = request_handling(res)
|
|
195
|
+
return Workspace.parse_obj(response["workspace"])
|
|
196
|
+
|
|
197
|
+
@check_min_cli_version
|
|
198
|
+
def get_workspace_by_name(self, workspace_name, cluster_id):
|
|
199
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/workspace"
|
|
200
|
+
res = requests.get(
|
|
201
|
+
url,
|
|
202
|
+
headers=self._get_header(),
|
|
203
|
+
params={"name": workspace_name, "clusterId": cluster_id},
|
|
204
|
+
)
|
|
205
|
+
return request_handling(res)
|
|
206
|
+
|
|
207
|
+
@check_min_cli_version
|
|
208
|
+
def get_workspace(self, workspace_id):
|
|
209
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/workspace/{workspace_id}"
|
|
210
|
+
res = requests.get(url, headers=self._get_header())
|
|
211
|
+
return request_handling(res)
|
|
212
|
+
|
|
213
|
+
@check_min_cli_version
|
|
214
|
+
def get_workspace_by_fqn(self, workspace_fqn: str) -> List[Workspace]:
|
|
215
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/workspace"
|
|
216
|
+
res = requests.get(
|
|
217
|
+
url,
|
|
218
|
+
headers=self._get_header(),
|
|
219
|
+
params={"fqn": workspace_fqn},
|
|
220
|
+
)
|
|
221
|
+
response = request_handling(res)
|
|
222
|
+
return parse_obj_as(List[Workspace], response)
|
|
223
|
+
|
|
224
|
+
@check_min_cli_version
|
|
225
|
+
def list_deployments(self, workspace_id: str = None):
|
|
226
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/deployment"
|
|
227
|
+
params = {}
|
|
228
|
+
if workspace_id:
|
|
229
|
+
params["workspaceId"] = workspace_id
|
|
230
|
+
res = requests.get(url=url, params=params, headers=self._get_header())
|
|
231
|
+
return request_handling(res)
|
|
232
|
+
|
|
233
|
+
@check_min_cli_version
|
|
234
|
+
def list_cluster(self):
|
|
235
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/cluster"
|
|
236
|
+
res = requests.get(url, headers=self._get_header())
|
|
237
|
+
return request_handling(res)
|
|
238
|
+
|
|
239
|
+
@check_min_cli_version
|
|
240
|
+
def get_cluster(self, cluster_id):
|
|
241
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/cluster/{cluster_id}"
|
|
242
|
+
res = requests.get(url, headers=self._get_header())
|
|
243
|
+
return request_handling(res)
|
|
244
|
+
|
|
245
|
+
@check_min_cli_version
|
|
246
|
+
def get_presigned_url(self, space_name, service_name, env):
|
|
247
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/deployment/code-upload-url"
|
|
248
|
+
res = requests.post(
|
|
249
|
+
url,
|
|
250
|
+
json={
|
|
251
|
+
"workspaceFqn": space_name,
|
|
252
|
+
"serviceName": service_name,
|
|
253
|
+
"stage": env,
|
|
254
|
+
},
|
|
255
|
+
headers=self._get_header(),
|
|
256
|
+
)
|
|
257
|
+
return request_handling(res)
|
|
258
|
+
|
|
259
|
+
@check_min_cli_version
|
|
260
|
+
def upload_code_package(
|
|
261
|
+
self, workspace_fqn: str, component_name: str, package_local_path: str
|
|
262
|
+
) -> str:
|
|
263
|
+
http_response = self.get_presigned_url(
|
|
264
|
+
space_name=workspace_fqn, service_name=component_name, env="default"
|
|
265
|
+
)
|
|
266
|
+
_upload_packaged_code(metadata=http_response, package_file=package_local_path)
|
|
267
|
+
|
|
268
|
+
return http_response["uri"]
|
|
269
|
+
|
|
270
|
+
@check_min_cli_version
|
|
271
|
+
def deploy_application(
|
|
272
|
+
self, workspace_id: str, application: Application
|
|
273
|
+
) -> Deployment:
|
|
274
|
+
data = {
|
|
275
|
+
"workspaceId": workspace_id,
|
|
276
|
+
"name": application.name,
|
|
277
|
+
"manifest": application.dict(exclude_none=True),
|
|
278
|
+
}
|
|
279
|
+
logger.debug(json.dumps(data))
|
|
280
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/deployment"
|
|
281
|
+
deploy_response = requests.post(url, json=data, headers=self._get_header())
|
|
282
|
+
response = request_handling(deploy_response)
|
|
283
|
+
return Deployment.parse_obj(response["deployment"])
|
|
284
|
+
|
|
285
|
+
def _get_log_print_line(self, log: dict):
|
|
286
|
+
timestamp = int(log["time"]) / 1e6
|
|
287
|
+
|
|
288
|
+
time_obj = datetime.fromtimestamp(timestamp / 1000.0, tz=timezone.utc)
|
|
289
|
+
time_obj.replace(tzinfo=timezone.utc)
|
|
290
|
+
local_time = time_obj.astimezone(tzlocal())
|
|
291
|
+
local_time_str = local_time.isoformat()
|
|
292
|
+
return f'[{local_time_str}] {log["log"].strip()}'
|
|
293
|
+
|
|
294
|
+
def _tail_logs(
|
|
295
|
+
self,
|
|
296
|
+
tail_logs_url: str,
|
|
297
|
+
query_dict: dict,
|
|
298
|
+
# NOTE: Rather making this printer callback an argument,
|
|
299
|
+
# we should have global printer callback
|
|
300
|
+
# which will be initialized based on the running env (cli, lib, notebook)
|
|
301
|
+
subscribe_message: str,
|
|
302
|
+
socketio_path: str = "socket.io",
|
|
303
|
+
callback=None,
|
|
304
|
+
wait=True,
|
|
305
|
+
):
|
|
306
|
+
callback = callback or OutputCallBack()
|
|
307
|
+
sio = socketio.Client(request_timeout=60)
|
|
308
|
+
callback.print_line("Waiting for the task to start...")
|
|
309
|
+
|
|
310
|
+
@sio.on(subscribe_message)
|
|
311
|
+
def logs(data):
|
|
312
|
+
try:
|
|
313
|
+
_log = json.loads(data)
|
|
314
|
+
callback.print_line(self._get_log_print_line(_log["body"]))
|
|
315
|
+
except Exception:
|
|
316
|
+
logger.exception(f"Error while parsing log line, {data!r}")
|
|
317
|
+
|
|
318
|
+
def sio_disconnect_no_exception():
|
|
319
|
+
try:
|
|
320
|
+
sio.disconnect()
|
|
321
|
+
except Exception:
|
|
322
|
+
logger.exception("Error while disconnecting from socket connection")
|
|
323
|
+
|
|
324
|
+
with allow_interrupt(sio_disconnect_no_exception):
|
|
325
|
+
sio.connect(
|
|
326
|
+
tail_logs_url,
|
|
327
|
+
transports="websocket",
|
|
328
|
+
headers=self._get_header(),
|
|
329
|
+
socketio_path=socketio_path,
|
|
330
|
+
)
|
|
331
|
+
# TODO: We should have have a timeout here. `emit` does
|
|
332
|
+
# not support timeout. Explore `sio.call`.
|
|
333
|
+
sio.emit(
|
|
334
|
+
subscribe_message,
|
|
335
|
+
json.dumps(query_dict),
|
|
336
|
+
)
|
|
337
|
+
if wait:
|
|
338
|
+
sio.wait()
|
|
339
|
+
|
|
340
|
+
@check_min_cli_version
|
|
341
|
+
def get_deployment(self, application_id: str, deployment_id: str) -> Deployment:
|
|
342
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/app/{application_id}/deployments/{deployment_id}"
|
|
343
|
+
res = requests.get(url, headers=self._get_header())
|
|
344
|
+
res = request_handling(res)
|
|
345
|
+
return Deployment.parse_obj(res)
|
|
346
|
+
|
|
347
|
+
@check_min_cli_version
|
|
348
|
+
def get_deployment_statuses(
|
|
349
|
+
self, application_id: str, deployment_id: str
|
|
350
|
+
) -> List[AppDeploymentStatusResponse]:
|
|
351
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/app/{application_id}/deployments/{deployment_id}/statuses"
|
|
352
|
+
res = requests.get(url, headers=self._get_header())
|
|
353
|
+
res = request_handling(res)
|
|
354
|
+
return parse_obj_as(List[AppDeploymentStatusResponse], res)
|
|
355
|
+
|
|
356
|
+
@check_min_cli_version
|
|
357
|
+
def get_deployment_build_response(
|
|
358
|
+
self, application_id: str, deployment_id: str
|
|
359
|
+
) -> List[BuildResponse]:
|
|
360
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/app/{application_id}/deployments/{deployment_id}/builds"
|
|
361
|
+
res = requests.get(url, headers=self._get_header())
|
|
362
|
+
res = request_handling(res)
|
|
363
|
+
return parse_obj_as(List[BuildResponse], res)
|
|
364
|
+
|
|
365
|
+
def _get_deployment_logs(
|
|
366
|
+
self,
|
|
367
|
+
workspace_id: str,
|
|
368
|
+
application_id: str,
|
|
369
|
+
deployment_id: str,
|
|
370
|
+
job_run_name: Optional[str] = None,
|
|
371
|
+
start_ts_nano: Optional[int] = None,
|
|
372
|
+
end_ts_nano: Optional[int] = None,
|
|
373
|
+
limit: Optional[int] = None,
|
|
374
|
+
num_logs_to_ignore: Optional[int] = None,
|
|
375
|
+
) -> List:
|
|
376
|
+
get_logs_query = {"applicationId": application_id}
|
|
377
|
+
if deployment_id:
|
|
378
|
+
get_logs_query["deploymentId"] = deployment_id
|
|
379
|
+
data = {"getLogsQuery": json.dumps(get_logs_query)}
|
|
380
|
+
if start_ts_nano:
|
|
381
|
+
data["startTs"] = str(start_ts_nano)
|
|
382
|
+
if end_ts_nano:
|
|
383
|
+
data["endTs"] = str(end_ts_nano)
|
|
384
|
+
if limit:
|
|
385
|
+
data["limit"] = str(limit)
|
|
386
|
+
if num_logs_to_ignore:
|
|
387
|
+
data["numLogsToIgnore"] = int(num_logs_to_ignore)
|
|
388
|
+
if job_run_name:
|
|
389
|
+
data["jobRunName"] = job_run_name
|
|
390
|
+
|
|
391
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/logs/{workspace_id}"
|
|
392
|
+
res = requests.get(url=url, params=data, headers=self._get_header())
|
|
393
|
+
res = request_handling(res)
|
|
394
|
+
return list(res["logs"])
|
|
395
|
+
|
|
396
|
+
@check_min_cli_version
|
|
397
|
+
def tail_build_logs(
|
|
398
|
+
self,
|
|
399
|
+
build_response: BuildResponse,
|
|
400
|
+
callback=None,
|
|
401
|
+
wait: bool = True,
|
|
402
|
+
):
|
|
403
|
+
callback = callback or OutputCallBack()
|
|
404
|
+
tail_logs_obj = json.loads(build_response.tailLogsUrl)
|
|
405
|
+
self._tail_logs(
|
|
406
|
+
tail_logs_url=urljoin(
|
|
407
|
+
tail_logs_obj["uri"], f"/?type={BUILD_LOGS_SUBSCRIBE_MESSAGE}"
|
|
408
|
+
),
|
|
409
|
+
socketio_path=tail_logs_obj["path"],
|
|
410
|
+
query_dict={
|
|
411
|
+
"pipelineRunName": build_response.name,
|
|
412
|
+
"startTs": build_response.logsStartTs,
|
|
413
|
+
},
|
|
414
|
+
callback=callback,
|
|
415
|
+
wait=wait,
|
|
416
|
+
subscribe_message=BUILD_LOGS_SUBSCRIBE_MESSAGE,
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
@check_min_cli_version
|
|
420
|
+
def tail_logs_for_deployment(
|
|
421
|
+
self,
|
|
422
|
+
workspace_id: str,
|
|
423
|
+
application_id: str,
|
|
424
|
+
deployment_id: str,
|
|
425
|
+
start_ts: int,
|
|
426
|
+
limit: int,
|
|
427
|
+
callback=None,
|
|
428
|
+
wait: bool = True,
|
|
429
|
+
):
|
|
430
|
+
callback = callback or OutputCallBack()
|
|
431
|
+
self._tail_logs(
|
|
432
|
+
tail_logs_url=urljoin(
|
|
433
|
+
self._api_server_url, f"/?type={DEPLOYMENT_LOGS_SUBSCRIBE_MESSAGE}"
|
|
434
|
+
),
|
|
435
|
+
query_dict={
|
|
436
|
+
"workspaceId": workspace_id,
|
|
437
|
+
"startTs": str(int(start_ts * 1e6)),
|
|
438
|
+
"limit": limit,
|
|
439
|
+
"getLogsQuery": {
|
|
440
|
+
"applicationId": application_id,
|
|
441
|
+
"deploymentId": deployment_id,
|
|
442
|
+
},
|
|
443
|
+
},
|
|
444
|
+
callback=callback,
|
|
445
|
+
wait=wait,
|
|
446
|
+
subscribe_message=DEPLOYMENT_LOGS_SUBSCRIBE_MESSAGE,
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
@check_min_cli_version
|
|
450
|
+
def poll_logs_for_deployment(
|
|
451
|
+
self,
|
|
452
|
+
workspace_id: str,
|
|
453
|
+
application_id: str,
|
|
454
|
+
deployment_id: str,
|
|
455
|
+
job_run_name: Optional[str],
|
|
456
|
+
start_ts: int,
|
|
457
|
+
limit: int,
|
|
458
|
+
poll_interval_seconds: int,
|
|
459
|
+
callback=None,
|
|
460
|
+
):
|
|
461
|
+
callback = callback or OutputCallBack()
|
|
462
|
+
start_ts_nano = int(start_ts * 1e6)
|
|
463
|
+
|
|
464
|
+
with Status(status="Polling for logs") as spinner:
|
|
465
|
+
num_logs_to_ignore = 0
|
|
466
|
+
|
|
467
|
+
while True:
|
|
468
|
+
logs_list = self._get_deployment_logs(
|
|
469
|
+
workspace_id=workspace_id,
|
|
470
|
+
application_id=application_id,
|
|
471
|
+
deployment_id=deployment_id,
|
|
472
|
+
job_run_name=job_run_name,
|
|
473
|
+
start_ts_nano=start_ts_nano,
|
|
474
|
+
limit=limit,
|
|
475
|
+
num_logs_to_ignore=num_logs_to_ignore,
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
if len(logs_list) == 0:
|
|
479
|
+
logger.warning("Did not receive any logs")
|
|
480
|
+
time.sleep(poll_interval_seconds)
|
|
481
|
+
continue
|
|
482
|
+
|
|
483
|
+
for log in logs_list:
|
|
484
|
+
callback.print_line(self._get_log_print_line(log))
|
|
485
|
+
|
|
486
|
+
last_log_time = logs_list[-1]["time"]
|
|
487
|
+
num_logs_to_ignore = 0
|
|
488
|
+
for log in reversed(logs_list):
|
|
489
|
+
if log["time"] != last_log_time:
|
|
490
|
+
break
|
|
491
|
+
num_logs_to_ignore += 1
|
|
492
|
+
|
|
493
|
+
start_ts_nano = int(last_log_time)
|
|
494
|
+
spinner.update(status=f"Waiting for {poll_interval_seconds} secs.")
|
|
495
|
+
time.sleep(poll_interval_seconds)
|
|
496
|
+
|
|
497
|
+
@check_min_cli_version
|
|
498
|
+
def fetch_deployment_logs(
|
|
499
|
+
self,
|
|
500
|
+
workspace_id: str,
|
|
501
|
+
application_id: str,
|
|
502
|
+
deployment_id: str,
|
|
503
|
+
job_run_name: Optional[str],
|
|
504
|
+
start_ts: Optional[int],
|
|
505
|
+
end_ts: Optional[int],
|
|
506
|
+
limit: Optional[int],
|
|
507
|
+
callback=None,
|
|
508
|
+
):
|
|
509
|
+
callback = callback or OutputCallBack()
|
|
510
|
+
logs_list = self._get_deployment_logs(
|
|
511
|
+
workspace_id=workspace_id,
|
|
512
|
+
application_id=application_id,
|
|
513
|
+
deployment_id=deployment_id,
|
|
514
|
+
job_run_name=job_run_name,
|
|
515
|
+
start_ts_nano=int(start_ts * 1e6),
|
|
516
|
+
end_ts_nano=int(end_ts * 1e6),
|
|
517
|
+
limit=limit,
|
|
518
|
+
)
|
|
519
|
+
for log in logs_list:
|
|
520
|
+
callback.print_line(self._get_log_print_line(log))
|
|
521
|
+
|
|
522
|
+
@check_min_cli_version
|
|
523
|
+
def fetch_build_logs(
|
|
524
|
+
self,
|
|
525
|
+
build_response: BuildResponse,
|
|
526
|
+
callback=None,
|
|
527
|
+
) -> None:
|
|
528
|
+
callback = callback or OutputCallBack()
|
|
529
|
+
url = build_response.getLogsUrl
|
|
530
|
+
res = requests.get(url=url, headers=self._get_header())
|
|
531
|
+
logs_list = request_handling(res)
|
|
532
|
+
for log in logs_list["logs"]:
|
|
533
|
+
# TODO: Have to establish a log line format that includes timestamp, level, message
|
|
534
|
+
callback.print_line(self._get_log_print_line(log))
|
|
535
|
+
|
|
536
|
+
@check_min_cli_version
|
|
537
|
+
def get_deployment_info_by_fqn(self, deployment_fqn: str) -> DeploymentFqnResponse:
|
|
538
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/fqn/deployment"
|
|
539
|
+
res = requests.get(
|
|
540
|
+
url, headers=self._get_header(), params={"fqn": deployment_fqn}
|
|
541
|
+
)
|
|
542
|
+
res = request_handling(res)
|
|
543
|
+
return DeploymentFqnResponse.parse_obj(res)
|
|
544
|
+
|
|
545
|
+
@check_min_cli_version
|
|
546
|
+
def get_application_info_by_fqn(
|
|
547
|
+
self, application_fqn: str
|
|
548
|
+
) -> ApplicationFqnResponse:
|
|
549
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/fqn/app"
|
|
550
|
+
res = requests.get(
|
|
551
|
+
url, headers=self._get_header(), params={"fqn": application_fqn}
|
|
552
|
+
)
|
|
553
|
+
res = request_handling(res)
|
|
554
|
+
return ApplicationFqnResponse.parse_obj(res)
|
|
555
|
+
|
|
556
|
+
@check_min_cli_version
|
|
557
|
+
def remove_application(self, application_id: str):
|
|
558
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/app/{application_id}"
|
|
559
|
+
res = requests.delete(url, headers=self._get_header())
|
|
560
|
+
response = request_handling(res)
|
|
561
|
+
# TODO: Add pydantic here.
|
|
562
|
+
return response
|
|
563
|
+
|
|
564
|
+
@check_min_cli_version
|
|
565
|
+
def get_application_info(self, application_id: str) -> Application:
|
|
566
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/app/{application_id}"
|
|
567
|
+
res = requests.get(url, headers=self._get_header())
|
|
568
|
+
response = request_handling(res)
|
|
569
|
+
return Application.parse_obj(response)
|
|
570
|
+
|
|
571
|
+
def list_job_runs(
|
|
572
|
+
self,
|
|
573
|
+
application_id: str,
|
|
574
|
+
limit: Optional[int] = None,
|
|
575
|
+
offset: Optional[int] = None,
|
|
576
|
+
search_prefix: Optional[str] = None,
|
|
577
|
+
) -> List[JobRun]:
|
|
578
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/jobs/{application_id}/runs"
|
|
579
|
+
params = {}
|
|
580
|
+
if limit:
|
|
581
|
+
params["limit"] = limit
|
|
582
|
+
if offset:
|
|
583
|
+
params["offset"] = offset
|
|
584
|
+
if search_prefix:
|
|
585
|
+
params["searchPrefix"] = search_prefix
|
|
586
|
+
res = requests.get(url, headers=self._get_header(), params=params)
|
|
587
|
+
res = request_handling(res)
|
|
588
|
+
return parse_obj_as(List[JobRun], res["data"])
|
|
589
|
+
|
|
590
|
+
def get_job_run(
|
|
591
|
+
self,
|
|
592
|
+
application_id: str,
|
|
593
|
+
job_run_name: str,
|
|
594
|
+
):
|
|
595
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/jobs/{application_id}/runs/{job_run_name}"
|
|
596
|
+
res = requests.get(url, headers=self._get_header())
|
|
597
|
+
res = request_handling(res)
|
|
598
|
+
return parse_obj_as(JobRun, res)
|
|
599
|
+
|
|
600
|
+
def trigger_job(
|
|
601
|
+
self,
|
|
602
|
+
deployment_id: str,
|
|
603
|
+
component_name: str,
|
|
604
|
+
command: Optional[str] = None,
|
|
605
|
+
params: Optional[Dict[str, str]] = None,
|
|
606
|
+
) -> TriggerJobResult:
|
|
607
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/jobs/trigger"
|
|
608
|
+
body = {
|
|
609
|
+
"deploymentId": deployment_id,
|
|
610
|
+
"componentName": component_name,
|
|
611
|
+
"input": {},
|
|
612
|
+
}
|
|
613
|
+
if command:
|
|
614
|
+
body["input"]["command"] = command
|
|
615
|
+
if params:
|
|
616
|
+
body["input"]["params"] = params
|
|
617
|
+
res = requests.post(url, json=body, headers=self._get_header())
|
|
618
|
+
response = request_handling(res)
|
|
619
|
+
return TriggerJobResult.parse_obj(response)
|
|
620
|
+
|
|
621
|
+
@check_min_cli_version
|
|
622
|
+
def get_docker_registry_creds(
|
|
623
|
+
self, docker_registry_fqn: str, cluster_id: str
|
|
624
|
+
) -> DockerRegistryCredentials:
|
|
625
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/docker-registry/creds"
|
|
626
|
+
res = requests.get(
|
|
627
|
+
url,
|
|
628
|
+
headers=self._get_header(),
|
|
629
|
+
params={
|
|
630
|
+
"fqn": docker_registry_fqn,
|
|
631
|
+
"clusterId": cluster_id,
|
|
632
|
+
},
|
|
633
|
+
)
|
|
634
|
+
response = request_handling(res)
|
|
635
|
+
return DockerRegistryCredentials.parse_obj(response)
|
|
636
|
+
|
|
637
|
+
@check_min_cli_version
|
|
638
|
+
def create_repo_in_registry(
|
|
639
|
+
self, docker_registry_fqn: str, workspace_fqn: str, application_name: str
|
|
640
|
+
) -> CreateDockerRepositoryResponse:
|
|
641
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/docker-registry/create-repo"
|
|
642
|
+
res = requests.post(
|
|
643
|
+
url,
|
|
644
|
+
headers=self._get_header(),
|
|
645
|
+
data={
|
|
646
|
+
"fqn": docker_registry_fqn,
|
|
647
|
+
"workspaceFqn": workspace_fqn,
|
|
648
|
+
"applicationName": application_name,
|
|
649
|
+
},
|
|
650
|
+
)
|
|
651
|
+
response = request_handling(res)
|
|
652
|
+
return CreateDockerRepositoryResponse.parse_obj(response)
|
|
653
|
+
|
|
654
|
+
@check_min_cli_version
|
|
655
|
+
def list_applications(
|
|
656
|
+
self,
|
|
657
|
+
application_id: Optional[str] = None,
|
|
658
|
+
workspace_id: Optional[str] = None,
|
|
659
|
+
application_name: Optional[str] = None,
|
|
660
|
+
) -> List[Application]:
|
|
661
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/app"
|
|
662
|
+
params = {}
|
|
663
|
+
if application_id:
|
|
664
|
+
params["applicationId"] = application_id
|
|
665
|
+
if workspace_id:
|
|
666
|
+
params["workspaceId"] = workspace_id
|
|
667
|
+
if application_name:
|
|
668
|
+
params["applicationName"] = application_name
|
|
669
|
+
res = requests.get(url, params=params, headers=self._get_header())
|
|
670
|
+
response = request_handling(res)
|
|
671
|
+
return parse_obj_as(List[Application], response)
|
|
672
|
+
|
|
673
|
+
@check_min_cli_version
|
|
674
|
+
def list_versions(
|
|
675
|
+
self,
|
|
676
|
+
application_id: str,
|
|
677
|
+
deployment_version: Optional[int] = None,
|
|
678
|
+
deployment_id: Optional[str] = None,
|
|
679
|
+
) -> List[Deployment]:
|
|
680
|
+
url = (
|
|
681
|
+
f"{self._api_server_url}/{VERSION_PREFIX}/app/{application_id}/deployments"
|
|
682
|
+
)
|
|
683
|
+
params = {}
|
|
684
|
+
if deployment_version:
|
|
685
|
+
params["version"] = deployment_version
|
|
686
|
+
if deployment_id:
|
|
687
|
+
params["deploymentId"] = deployment_id
|
|
688
|
+
res = requests.get(url, params=params, headers=self._get_header())
|
|
689
|
+
response = request_handling(res)
|
|
690
|
+
return parse_obj_as(List[Deployment], response)
|
|
691
|
+
|
|
692
|
+
@check_min_cli_version
|
|
693
|
+
def get_token_from_api_key(self, api_key: str) -> Token:
|
|
694
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/oauth/api-key/token"
|
|
695
|
+
data = {"apiKey": api_key}
|
|
696
|
+
res = requests.get(url, params=data)
|
|
697
|
+
res = request_handling(res)
|
|
698
|
+
return Token.parse_obj(res)
|
|
699
|
+
|
|
700
|
+
@check_min_cli_version
|
|
701
|
+
def apply(self, manifest: Dict[str, Any]) -> None:
|
|
702
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/apply"
|
|
703
|
+
body = {"manifest": manifest}
|
|
704
|
+
res = requests.put(url, headers=self._get_header(), json=body)
|
|
705
|
+
res = request_handling(res)
|
|
706
|
+
return res
|
|
707
|
+
|
|
708
|
+
def terminate_job_run(
|
|
709
|
+
self,
|
|
710
|
+
deployment_id: str,
|
|
711
|
+
job_run_name: str,
|
|
712
|
+
callback=None,
|
|
713
|
+
):
|
|
714
|
+
callback = callback or OutputCallBack()
|
|
715
|
+
url = f"{self._api_server_url}/{VERSION_PREFIX}/jobs/terminate?deploymentId={deployment_id}&jobRunName={job_run_name}"
|
|
716
|
+
body = {
|
|
717
|
+
"deploymentId": deployment_id,
|
|
718
|
+
"jobRunName": job_run_name,
|
|
719
|
+
}
|
|
720
|
+
res = requests.post(url, json=body, headers=self._get_header())
|
|
721
|
+
res = request_handling(res)
|
|
722
|
+
|
|
723
|
+
return res
|