thestage 0.5.38__py3-none-any.whl → 0.5.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. thestage/.env +3 -4
  2. thestage/__init__.py +1 -1
  3. thestage/controllers/config_controller.py +3 -4
  4. thestage/controllers/container_controller.py +12 -16
  5. thestage/controllers/project_controller.py +10 -3
  6. thestage/controllers/utils_controller.py +2 -3
  7. thestage/entities/file_item.py +27 -0
  8. thestage/exceptions/file_system_exception.py +6 -0
  9. thestage/helpers/error_handler.py +2 -2
  10. thestage/helpers/logger/app_logger.py +3 -4
  11. thestage/services/abstract_service.py +1 -2
  12. thestage/services/app_config_service.py +2 -3
  13. thestage/services/clients/.DS_Store +0 -0
  14. thestage/services/clients/git/git_client.py +3 -3
  15. thestage/services/clients/thestage_api/api_client.py +3 -61
  16. thestage/services/clients/thestage_api/core/api_client_abstract.py +91 -0
  17. thestage/services/clients/thestage_api/core/api_client_core.py +25 -0
  18. thestage/services/clients/thestage_api/core/http_client_exception.py +12 -0
  19. thestage/services/clients/thestage_api/dtos/logging_controller/log_polling_request.py +1 -1
  20. thestage/services/clients/thestage_api/dtos/project_response.py +0 -2
  21. thestage/services/clients/thestage_api/dtos/sftp_path_helper.py +3 -2
  22. thestage/services/config_provider/config_provider.py +98 -44
  23. thestage/services/connect/connect_service.py +1 -1
  24. thestage/services/container/container_service.py +2 -8
  25. thestage/services/core_files/config_entity.py +25 -0
  26. thestage/services/filesystem_service.py +115 -0
  27. thestage/services/instance/instance_service.py +1 -2
  28. thestage/services/logging/logging_service.py +76 -95
  29. thestage/services/project/project_service.py +9 -7
  30. thestage/services/remote_server_service.py +3 -3
  31. thestage/services/service_factory.py +1 -2
  32. thestage/services/validation_service.py +26 -10
  33. {thestage-0.5.38.dist-info → thestage-0.5.40.dist-info}/METADATA +1 -2
  34. {thestage-0.5.38.dist-info → thestage-0.5.40.dist-info}/RECORD +37 -29
  35. {thestage-0.5.38.dist-info → thestage-0.5.40.dist-info}/WHEEL +1 -1
  36. {thestage-0.5.38.dist-info → thestage-0.5.40.dist-info}/LICENSE.txt +0 -0
  37. {thestage-0.5.38.dist-info → thestage-0.5.40.dist-info}/entry_points.txt +0 -0
@@ -1,30 +1,56 @@
1
+ import typer
2
+
3
+ import json
1
4
  import os
5
+ from json import JSONDecodeError
2
6
  from pathlib import Path
3
- from typing import Optional, Dict
7
+ from typing import Optional, Dict, Any
4
8
 
5
- import typer
6
- from thestage_core.entities.config_entity import ConfigEntity
7
- from thestage_core.services.config_provider.config_provider import ConfigProviderCore
8
-
9
- from thestage.config import THESTAGE_AUTH_TOKEN, THESTAGE_API_URL
9
+ from thestage.exceptions.file_system_exception import FileSystemException
10
+ from thestage.services.core_files.config_entity import ConfigEntity
10
11
  from thestage.helpers.ssh_util import parse_private_key
11
12
  from thestage.services.connect.dto.remote_server_config import RemoteServerConfig
13
+ from thestage.services.filesystem_service import FileSystemServiceCore
12
14
  from thestage.services.project.dto.project_config import ProjectConfig
15
+ from thestage.config import THESTAGE_CONFIG_DIR, THESTAGE_CONFIG_FILE, THESTAGE_AUTH_TOKEN, THESTAGE_API_URL
16
+
17
+ class ConfigProvider():
18
+ # path to current physical subject we work with (project / etc) - might be passed explicitly
19
+ _local_path: Optional[Path] = None
20
+ _local_config_path: Optional[Path] = None
21
+ _global_config_path: Optional[Path] = None
22
+ _global_config_file: Optional[Path] = None
23
+ _file_system_service = FileSystemServiceCore
13
24
 
14
25
 
15
- class ConfigProvider(ConfigProviderCore):
16
26
  def __init__(
17
27
  self,
18
- local_path: str,
28
+ local_path: Optional[str] = None,
19
29
  ):
20
- super(ConfigProvider, self).__init__(
21
- local_path=local_path,
30
+ self._file_system_service = FileSystemServiceCore()
31
+ if local_path:
32
+ self._local_path = self._file_system_service.get_path(directory=local_path, auto_create=False)
33
+
34
+ config_folder_name = self._file_system_service.get_path(f"{THESTAGE_CONFIG_DIR}", False)
35
+ self._local_config_path = self._local_path.joinpath(config_folder_name)
36
+
37
+ home_dir = self._file_system_service.get_home_path()
38
+ self._global_config_path = home_dir.joinpath(config_folder_name)
39
+
40
+ if self._global_config_path:
41
+ if not self._global_config_path.exists():
42
+ self._file_system_service.create_if_not_exists_dir(self._global_config_path)
43
+
44
+ self._global_config_file = self._global_config_path.joinpath(
45
+ self._file_system_service.get_path(f"{THESTAGE_CONFIG_FILE}", False)
22
46
  )
47
+ if not self._global_config_file.exists():
48
+ self._file_system_service.create_if_not_exists_file(self._global_config_file)
49
+
23
50
 
24
- def get_full_config(self, check_daemon: bool = False,) -> ConfigEntity:
51
+ def get_full_config(self) -> ConfigEntity:
25
52
  config_values = {}
26
53
 
27
- # read env data
28
54
  config_from_env = {}
29
55
  config_from_env['main'] = {}
30
56
  if THESTAGE_AUTH_TOKEN:
@@ -35,8 +61,7 @@ class ConfigProvider(ConfigProviderCore):
35
61
  if config_from_env:
36
62
  self.__update_config_values_dict(values_to_update=config_values, new_values=config_from_env)
37
63
 
38
- # read global config data
39
- config_from_file = self._read_config_file(self._global_config_file)
64
+ config_from_file = self.__read_config_file(self._global_config_file)
40
65
  if config_from_file:
41
66
  self.__update_config_values_dict(values_to_update=config_values, new_values=config_from_file)
42
67
 
@@ -47,27 +72,25 @@ class ConfigProvider(ConfigProviderCore):
47
72
  if self._global_config_path and not config.runtime.config_global_path:
48
73
  config.runtime.config_global_path = str(self._global_config_path)
49
74
 
50
- if check_daemon:
51
- self.check_for_daemon(config=config)
52
-
53
75
  return config
54
76
 
77
+
55
78
  def save_project_config(self, project_config: ProjectConfig):
56
- self.__create_empty_project_config_if_missing()
57
- # self.read_project_config()
58
- project_config_path = self.__get_project_config_path(with_file=True)
59
- self._save_config_file(data=project_config.model_dump(), file_path=project_config_path)
79
+ project_data_dirpath = self.__get_project_config_path()
80
+ if not project_data_dirpath.exists():
81
+ self._file_system_service.create_if_not_exists_dir(project_data_dirpath)
82
+
83
+ project_data_filepath = self.__get_project_config_path(with_file=True)
84
+ if not project_data_filepath.exists():
85
+ self._file_system_service.create_if_not_exists_file(project_data_filepath)
60
86
 
61
- def save_remote_server_config(self, remote_server_config: RemoteServerConfig):
62
- self.__create_empty_project_config_if_missing()
63
- # self.read_project_config()
64
87
  project_config_path = self.__get_project_config_path(with_file=True)
65
- self._save_config_file(data=remote_server_config.model_dump(), file_path=project_config_path)
88
+ self.__save_config_file(data=project_config.model_dump(), file_path=project_config_path)
66
89
 
67
90
 
68
91
  def save_project_deploy_ssh_key(self, deploy_ssh_key: str, project_slug: str, project_id: int) -> str:
69
92
  deploy_key_dirpath = self._global_config_path.joinpath('project_deploy_keys')
70
- self._file_system_service.create_if_not_exists(deploy_key_dirpath)
93
+ self._file_system_service.create_if_not_exists_dir(deploy_key_dirpath)
71
94
 
72
95
  deploy_key_filepath = deploy_key_dirpath.joinpath(f'project_deploy_key_{project_id}_{project_slug}')
73
96
  self._file_system_service.create_if_not_exists_file(deploy_key_filepath)
@@ -79,30 +102,20 @@ class ConfigProvider(ConfigProviderCore):
79
102
 
80
103
  return str(deploy_key_filepath)
81
104
 
105
+
82
106
  def read_project_config(self) -> Optional[ProjectConfig]:
83
107
  project_data_dirpath = self.__get_project_config_path()
84
108
  if not project_data_dirpath.exists():
85
109
  return None
86
- # self._file_system_service.create_if_not_exists(project_data_dirpath)
87
110
 
88
111
  project_data_filepath = self.__get_project_config_path(with_file=True)
89
112
  if not project_data_filepath.exists():
90
113
  return None
91
114
 
92
- config_data = self._read_config_file(project_data_filepath) if project_data_filepath and project_data_filepath.exists() else {}
115
+ config_data = self.__read_config_file(project_data_filepath) if project_data_filepath and project_data_filepath.exists() else {}
93
116
  return ProjectConfig.model_validate(config_data)
94
117
 
95
118
 
96
- def __create_empty_project_config_if_missing(self):
97
- project_data_dirpath = self.__get_project_config_path()
98
- if not project_data_dirpath.exists():
99
- self._file_system_service.create_if_not_exists(project_data_dirpath)
100
-
101
- project_data_filepath = self.__get_project_config_path(with_file=True)
102
- if not project_data_filepath.exists():
103
- self._file_system_service.create_if_not_exists_file(project_data_filepath)
104
-
105
-
106
119
  def __get_project_config_path(self, with_file: bool = False) -> Path:
107
120
  if with_file:
108
121
  return self._local_config_path.joinpath('project.json')
@@ -124,7 +137,7 @@ class ConfigProvider(ConfigProviderCore):
124
137
  typer.echo("Could not identify provided private key (expected RSA, ECDSA, ED25519)")
125
138
  raise typer.Exit(1)
126
139
 
127
- config = self.read_remote_server_config()
140
+ config = self.__read_remote_server_config()
128
141
  remote_server_config_filepath = self.__get_remote_server_config_path()
129
142
  if config:
130
143
  if not config.ip_address_to_ssh_key_map:
@@ -142,21 +155,22 @@ class ConfigProvider(ConfigProviderCore):
142
155
  self._file_system_service.create_if_not_exists_file(remote_server_config_filepath)
143
156
  config = RemoteServerConfig(ip_address_to_ssh_key_map={ip_address: str(ssh_key_path)})
144
157
 
145
- self._save_config_file(data=config.model_dump(), file_path=remote_server_config_filepath)
158
+ self.__save_config_file(data=config.model_dump(), file_path=remote_server_config_filepath)
146
159
 
147
160
  return str(remote_server_config_filepath)
148
161
 
149
162
 
150
- def read_remote_server_config(self) -> Optional[RemoteServerConfig]:
163
+ def __read_remote_server_config(self) -> Optional[RemoteServerConfig]:
151
164
  config_filepath = self.__get_remote_server_config_path()
152
165
  if not config_filepath.is_file():
153
166
  return None
154
167
 
155
- config_data = self._read_config_file(config_filepath) if config_filepath and config_filepath.exists() else {}
168
+ config_data = self.__read_config_file(config_filepath) if config_filepath and config_filepath.exists() else {}
156
169
  return RemoteServerConfig.model_validate(config_data)
157
170
 
171
+
158
172
  def get_valid_private_key_path_by_ip_address(self, ip_address: str) -> Optional[str]:
159
- remote_server_config = self.read_remote_server_config()
173
+ remote_server_config = self.__read_remote_server_config()
160
174
  if remote_server_config and remote_server_config.ip_address_to_ssh_key_map:
161
175
  private_key_path = remote_server_config.ip_address_to_ssh_key_map.get(ip_address)
162
176
  if private_key_path and Path(private_key_path).is_file():
@@ -165,10 +179,50 @@ class ConfigProvider(ConfigProviderCore):
165
179
  self.update_remote_server_config_entry(ip_address, None)
166
180
  return None
167
181
 
182
+
168
183
  @staticmethod
169
184
  def __update_config_values_dict(values_to_update: Dict, new_values: Dict):
170
185
  if 'main' in new_values:
171
186
  if 'main' in values_to_update:
172
187
  values_to_update['main'].update(new_values['main'])
173
188
  else:
174
- values_to_update['main'] = new_values['main']
189
+ values_to_update['main'] = new_values['main']
190
+
191
+
192
+ def __read_config_file(self, path: Path) -> Dict[str, Any]:
193
+ result = {}
194
+ try:
195
+ if path and path.exists():
196
+ with path.open("r") as file:
197
+ try:
198
+ if os.stat(path).st_size != 0:
199
+ result = json.load(file)
200
+ except JSONDecodeError:
201
+ pass
202
+ except OSError:
203
+ raise FileSystemException(f"Could not open config file: {path}")
204
+ return result
205
+
206
+
207
+ @staticmethod
208
+ def __save_config_file(data: Dict, file_path: Path):
209
+ with open(file_path, 'w') as configfile:
210
+ json.dump(data, configfile, indent=1)
211
+
212
+
213
+ def save_config(self, config: ConfigEntity):
214
+ data: Dict[str, Any] = self.__read_config_file(self._global_config_file)
215
+ data.update(config.model_dump(exclude_none=True, by_alias=True, exclude={'runtime', 'RUNTIME', 'daemon', 'DAEMON'}))
216
+ self.__save_config_file(data=data, file_path=self._global_config_file)
217
+
218
+
219
+ def clear_config(self, ):
220
+ if self._global_config_path and self._global_config_path.exists():
221
+ self._file_system_service.remove_folder(str(self._global_config_path))
222
+
223
+ os.unsetenv('THESTAGE_CONFIG_DIR')
224
+ os.unsetenv('THESTAGE_CONFIG_FILE')
225
+ os.unsetenv('THESTAGE_CLI_ENV')
226
+ os.unsetenv('THESTAGE_API_URL')
227
+ os.unsetenv('THESTAGE_LOG_FILE')
228
+ os.unsetenv('THESTAGE_AUTH_TOKEN')
@@ -1,8 +1,8 @@
1
1
  from typing import Optional
2
2
  import typer
3
- from thestage_core.exceptions.http_error_exception import HttpClientException
4
3
 
5
4
  from thestage.i18n.translation import __
5
+ from thestage.services.clients.thestage_api.core.http_client_exception import HttpClientException
6
6
  from thestage.services.clients.thestage_api.dtos.enums.container_status import DockerContainerStatus
7
7
  from thestage.services.clients.thestage_api.dtos.enums.selfhosted_status import SelfhostedBusinessStatus
8
8
  from thestage.services.clients.thestage_api.dtos.enums.instance_rented_status import InstanceRentedBusinessStatus
@@ -1,22 +1,16 @@
1
- import uuid
2
1
  from pathlib import Path
3
2
  from typing import List, Tuple, Optional, Dict
4
3
 
5
4
  import typer
6
- from thestage_core.entities.config_entity import ConfigEntity
7
- from thestage_core.entities.file_item import FileItemEntity
8
- from thestage_core.services.filesystem_service import FileSystemServiceCore
9
-
5
+ from thestage.services.core_files.config_entity import ConfigEntity
10
6
  from thestage.entities.container import DockerContainerEntity
11
- from thestage.helpers.logger.app_logger import app_logger
12
7
  from thestage.services.clients.thestage_api.dtos.container_param_request import DockerContainerActionRequestDto
13
8
  from thestage.services.clients.thestage_api.dtos.enums.container_pending_action import DockerContainerAction
14
9
  from thestage.services.clients.thestage_api.dtos.enums.container_status import DockerContainerStatus
15
10
  from thestage.entities.enums.shell_type import ShellType
16
11
  from thestage.services.clients.thestage_api.dtos.paginated_entity_list import PaginatedEntityList
17
- from thestage.services.clients.thestage_api.dtos.project_response import ProjectDto
18
- from thestage.services.clients.thestage_api.dtos.sftp_path_helper import SftpFileItemEntity
19
12
  from thestage.services.container.mapper.container_mapper import ContainerMapper
13
+ from thestage.services.filesystem_service import FileSystemServiceCore
20
14
  from thestage.services.remote_server_service import RemoteServerService
21
15
  from thestage.i18n.translation import __
22
16
  from thestage.services.abstract_service import AbstractService
@@ -0,0 +1,25 @@
1
+ from typing import Optional
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+ # saved to file
6
+ class MainConfigEntity(BaseModel):
7
+ thestage_auth_token: Optional[str] = Field(None, alias='thestage_auth_token')
8
+ thestage_api_url: Optional[str] = Field(None, alias='thestage_api_url')
9
+
10
+
11
+ class DaemonConfigEntity(BaseModel):
12
+ daemon_token: Optional[str] = Field(None, alias='daemon_token')
13
+ backend_api_url: Optional[str] = Field(None, alias='backend_api_url')
14
+
15
+ # not saved to file
16
+ class RuntimeConfigEntity(BaseModel):
17
+ working_directory: Optional[str] = Field(None, alias='working_directory')
18
+ config_global_path: Optional[str] = Field(None, alias='config_global_path')
19
+
20
+
21
+ class ConfigEntity(BaseModel):
22
+ main: MainConfigEntity = Field(default_factory=MainConfigEntity, alias='main')
23
+ runtime: RuntimeConfigEntity = Field(default_factory=RuntimeConfigEntity, alias="runtime") # TODO merge with main
24
+ daemon: DaemonConfigEntity = Field(default_factory=DaemonConfigEntity, alias="daemon") # TODO this should not be in core package
25
+ start_on_daemon: bool = Field(False, alias='start_on_daemon') # TODO this should not be in core package
@@ -0,0 +1,115 @@
1
+ import os
2
+ import shutil
3
+ from pathlib import Path
4
+ from typing import Optional, List
5
+
6
+ from thestage.entities.file_item import FileItemEntity
7
+ from thestage.exceptions.file_system_exception import FileSystemException
8
+
9
+
10
+ class FileSystemServiceCore:
11
+
12
+ def get_ssh_path(self) -> Optional[Path]:
13
+ home_path = self.get_home_path()
14
+ ssh_path = home_path.joinpath('.ssh')
15
+ if not ssh_path.exists():
16
+ raise FileSystemException(f"Path does not exist: {ssh_path}")
17
+ return ssh_path
18
+
19
+ def get_home_path(self) -> Optional[Path]:
20
+ try:
21
+ return Path.home()
22
+ except RuntimeError | OSError as ex1:
23
+ raise FileSystemException("Error getting user home path") from ex1
24
+
25
+ def create_if_not_exists_dir(self, path: Path) -> Path:
26
+ if not path.exists():
27
+ try:
28
+ path.mkdir(exist_ok=True, parents=True)
29
+ except FileNotFoundError as ex1:
30
+ raise FileSystemException(message=f"FileNotFoundError (dir): {path}") from ex1
31
+ except OSError as ex2:
32
+ raise FileSystemException(message=f"Could not create directory: {path}") from ex2
33
+ return path
34
+
35
+ def create_if_not_exists_file(self, path: Path) -> Path:
36
+ if not path.exists():
37
+ try:
38
+ path.touch(exist_ok=True)
39
+ except FileNotFoundError as ex1:
40
+ raise FileSystemException(message=f"FileNotFoundError (file): {path}") from ex1
41
+ except OSError as ex2:
42
+ raise FileSystemException(message=f"Could not create file: {path}") from ex2
43
+ return path
44
+
45
+ def get_path(self, directory: str, auto_create: bool = True) -> Path:
46
+ path = Path(directory)
47
+ if auto_create:
48
+ self.create_if_not_exists_dir(path)
49
+ return path
50
+
51
+ def is_folder_empty(self, folder: str, auto_create: bool) -> bool:
52
+ path = self.get_path(folder, auto_create)
53
+ if not path.exists():
54
+ return True
55
+ if not path.is_dir():
56
+ raise FileSystemException(message=f"Expected directory but found a file: {path}")
57
+ objects = os.listdir(path)
58
+ if len(objects) == 0:
59
+ return True
60
+ else:
61
+ return False
62
+
63
+ def is_folder_exists(self, folder: str, auto_create: bool = True) -> bool:
64
+ path = self.get_path(folder, auto_create=auto_create)
65
+ if path.exists():
66
+ return True
67
+ else:
68
+ return False
69
+
70
+ def find_line_in_text_file(self, file: str, find: str) -> bool:
71
+ path = self.get_path(file, auto_create=False)
72
+ if path and path.exists():
73
+ with open(path, 'r') as file:
74
+ for line in file.readlines():
75
+ if (find + "\n") == line:
76
+ return True
77
+ return False
78
+
79
+ def add_line_to_text_file(self, file: str, new_line: str):
80
+ path = self.get_path(file, auto_create=False)
81
+ if path and path.exists():
82
+ with open(path, 'a') as file:
83
+ file.write(new_line)
84
+ file.write('\n')
85
+
86
+ def check_if_path_exist(self, file: str) -> bool:
87
+ path = self.get_path(file, auto_create=False)
88
+ if path.exists():
89
+ return True
90
+ else:
91
+ return False
92
+
93
+ def get_path_items(self, folder: str) -> List[FileItemEntity]:
94
+ path = self.get_path(folder, auto_create=False)
95
+ path_items = []
96
+ if not path.exists():
97
+ return path_items
98
+
99
+ parent = FileItemEntity.build_from_path(path=path)
100
+ path_items.append(parent)
101
+ if path.is_dir():
102
+ objects = os.listdir(path)
103
+ if objects:
104
+ for item in objects:
105
+ elem = path.joinpath(item)
106
+ if elem.is_dir():
107
+ parent.children.extend(self.get_path_items(folder=str(elem)))
108
+ else:
109
+ parent.children.append(FileItemEntity.build_from_path(path=elem))
110
+ return path_items
111
+
112
+ def remove_folder(self, path: str):
113
+ real_path = self.get_path(directory=path, auto_create=False)
114
+ if real_path and real_path.exists():
115
+ shutil.rmtree(real_path)
@@ -2,8 +2,7 @@ from pathlib import Path
2
2
  from typing import List, Optional, Dict
3
3
 
4
4
  import typer
5
- from thestage_core.entities.config_entity import ConfigEntity
6
-
5
+ from thestage.services.core_files.config_entity import ConfigEntity
7
6
  from thestage.i18n.translation import __
8
7
  from thestage.services.clients.thestage_api.dtos.enums.selfhosted_status import SelfhostedBusinessStatus
9
8
  from thestage.services.clients.thestage_api.dtos.enums.instance_rented_status import InstanceRentedBusinessStatus
@@ -8,8 +8,9 @@ import aioconsole
8
8
  import typer
9
9
  from httpx import ReadTimeout, ConnectError, ConnectTimeout
10
10
  from requests.exceptions import ChunkedEncodingError
11
- from thestage_core.entities.config_entity import ConfigEntity
12
11
 
12
+ from thestage.services.clients.thestage_api.dtos.enums.container_status import DockerContainerStatus
13
+ from thestage.services.core_files.config_entity import ConfigEntity
13
14
  from thestage.services.clients.thestage_api.dtos.enums.inference_simulator_status import InferenceSimulatorStatus
14
15
  from thestage.services.clients.thestage_api.dtos.enums.task_status import TaskStatus
15
16
  from thestage.services.clients.thestage_api.dtos.inference_controller.get_inference_simulator_response import \
@@ -69,25 +70,23 @@ class LoggingService(AbstractService):
69
70
 
70
71
 
71
72
  @error_handler()
72
- def stream_container_logs(self, config: ConfigEntity, container: DockerContainerDto):
73
- typer.echo(__(
74
- f"Log stream for docker container '%container_slug%' started",
75
- {
76
- 'container_slug': container.slug,
77
- }
78
- ))
79
- typer.echo(__("Press CTRL+C to stop"))
80
- try:
81
- for log_json in self.__thestage_api_client.get_container_log_stream(
82
- token=config.main.thestage_auth_token,
83
- container_id=container.id
84
- ):
85
- self.__print_log_line(log_json)
86
- except ChunkedEncodingError as e1: # handling server timeout
87
- typer.echo(__('Log stream disconnected (1)'))
73
+ def print_last_container_logs(self, config: ConfigEntity, container_uid: str, logs_number: Optional[int]):
74
+ container: Optional[DockerContainerDto] = self.__thestage_api_client.get_container(
75
+ token=config.main.thestage_auth_token,
76
+ container_slug=container_uid,
77
+ )
78
+
79
+ if not container:
80
+ typer.echo("Container was not found")
88
81
  raise typer.Exit(1)
89
82
 
90
- typer.echo(__('Log stream disconnected'))
83
+ logs = self.__thestage_api_client.query_user_logs(
84
+ token=config.main.thestage_auth_token,
85
+ container_id=container.id,
86
+ limit=logs_number
87
+ )
88
+ for log_message in reversed(logs.queryResult):
89
+ self.__print_log_line_object(log_message)
91
90
 
92
91
 
93
92
  @error_handler()
@@ -134,7 +133,7 @@ class LoggingService(AbstractService):
134
133
 
135
134
  typer.echo(__("CTRL+C to cancel the task. CTRL+D to disconnect from log stream."))
136
135
 
137
- print_logs_task = asyncio.create_task(self.print_task_or_inference_simulator_logs(config.main.thestage_auth_token, task.id))
136
+ print_logs_task = asyncio.create_task(self.print_realtime_logs(token=config.main.thestage_auth_token, task_id=task.id))
138
137
  input_task = asyncio.create_task(self.read_log_stream_input())
139
138
 
140
139
  def sigint_handler():
@@ -200,7 +199,7 @@ class LoggingService(AbstractService):
200
199
  typer.echo(__("CTRL+D to disconnect from log stream."))
201
200
 
202
201
  print_task_or_inference_simulator_logs = asyncio.create_task(
203
- self.print_task_or_inference_simulator_logs(config.main.thestage_auth_token, inference_simulator_id=inference_simulator.id)
202
+ self.print_realtime_logs(token=config.main.thestage_auth_token, inference_simulator_id=inference_simulator.id)
204
203
  )
205
204
  input_task = asyncio.create_task(self.read_log_stream_input())
206
205
 
@@ -212,6 +211,49 @@ class LoggingService(AbstractService):
212
211
  typer.echo(__(f"Disconnected from log stream. You can try to reconnect with 'thestage project inference-simulator logs {slug}'."))
213
212
 
214
213
 
214
+ @error_handler()
215
+ def stream_container_logs_with_controls(self, config: ConfigEntity, container_uid: str):
216
+ asyncio.run(
217
+ self.__stream_container_logs_with_controls_async(
218
+ config=config,
219
+ container_uid=container_uid
220
+ )
221
+ )
222
+
223
+
224
+ @error_handler()
225
+ async def __stream_container_logs_with_controls_async(self, config: ConfigEntity, container_uid: str):
226
+ container: Optional[DockerContainerDto] = self.__thestage_api_client.get_container(
227
+ token=config.main.thestage_auth_token,
228
+ container_slug=container_uid,
229
+ )
230
+
231
+ if container:
232
+ if container.frontend_status.status_key not in [DockerContainerStatus.RUNNING]:
233
+ typer.echo(f"Container status: '{container.frontend_status.status_translation}'")
234
+ else:
235
+ typer.echo("Container was not found")
236
+ raise typer.Exit(1)
237
+
238
+ typer.echo(f"Log stream for Docker container started")
239
+ typer.echo("CTRL+D to disconnect from log stream.")
240
+
241
+ print_logs_task = asyncio.create_task(self.print_realtime_logs(token=config.main.thestage_auth_token, docker_container_id=container.id))
242
+ input_task = asyncio.create_task(self.read_log_stream_input())
243
+
244
+ def sigint_handler():
245
+ input_task.cancel()
246
+
247
+ loop = asyncio.get_event_loop()
248
+ for signal_item in [SIGINT]: # SIGINT == CTRL+C
249
+ loop.add_signal_handler(signal_item, sigint_handler)
250
+
251
+ done, pending = await asyncio.wait([print_logs_task, input_task], return_when=asyncio.FIRST_COMPLETED)
252
+
253
+ if input_task in done:
254
+ print_logs_task.cancel()
255
+
256
+
215
257
  async def read_log_stream_input(self):
216
258
  try:
217
259
  while True:
@@ -223,7 +265,13 @@ class LoggingService(AbstractService):
223
265
  pass
224
266
 
225
267
 
226
- async def print_task_or_inference_simulator_logs(self, token: str, task_id: Optional[int] = None, inference_simulator_id: Optional[int] = None):
268
+ async def print_realtime_logs(
269
+ self,
270
+ token: str,
271
+ task_id: Optional[int] = None,
272
+ inference_simulator_id: Optional[int] = None,
273
+ docker_container_id: Optional[int] = None,
274
+ ):
227
275
  polling_interval_seconds: float = 4 # also adjust polling api method timeout if changed
228
276
  between_logs_sleeping_coef: float = 1 # we emulate delay between logs, but if for any reason code runs for too long - delays will be controlled with this coef
229
277
  last_iteration_log_timestamp: Optional[str] = None # pointer to next iteration polling start (obtained from each response)
@@ -249,7 +297,7 @@ class LoggingService(AbstractService):
249
297
  token=token,
250
298
  task_id=task_id,
251
299
  inference_simulator_id=inference_simulator_id,
252
- docker_container_id=None,
300
+ docker_container_id=docker_container_id,
253
301
  last_log_timestamp=last_iteration_log_timestamp,
254
302
  last_log_id=last_log_id
255
303
  )
@@ -283,82 +331,15 @@ class LoggingService(AbstractService):
283
331
  errors_started_at = datetime.utcnow()
284
332
 
285
333
  if consecutive_error_count > 7:
334
+ seconds_with_error = (datetime.utcnow() - errors_started_at).total_seconds()
286
335
  if inference_simulator_id:
287
- seconds_with_error = (datetime.utcnow() - errors_started_at).total_seconds()
288
336
  print_nonblocking(f"Log stream: disconnected from server (connectivity issues for {seconds_with_error} seconds). Try 'thestage inference-simulator logs <inference-simulator-UID>' to reconnect.", writer)
289
- break
337
+ elif task_id:
338
+ print_nonblocking(f"Log stream: disconnected from server (connectivity issues for {seconds_with_error} seconds). Try 'thestage project task logs {task_id}' to reconnect.", writer)
339
+ elif docker_container_id:
340
+ print_nonblocking(f"Log stream: disconnected from server (connectivity issues for {seconds_with_error} seconds). Try 'thestage container logs <docker-container-UID>' to reconnect.", writer)
290
341
  else:
291
- seconds_with_error = (datetime.utcnow() - errors_started_at).total_seconds()
292
- print_nonblocking(f"Log stream: disconnected from server (connectivity issues for {seconds_with_error} seconds). Try 'thestage project inference-simulator logs {task_id}' to reconnect.", writer)
293
- break
294
-
295
- # depending on iteration duration - sleep for the remaining time and adjust log sleep coefficient if needed
296
- iteration_duration = (datetime.utcnow() - iteration_started_at).total_seconds()
297
- if iteration_duration > polling_interval_seconds:
298
- between_logs_sleeping_coef *= 0.85
299
- else:
300
- await asyncio.sleep(polling_interval_seconds - iteration_duration)
301
- if between_logs_sleeping_coef < 1:
302
- between_logs_sleeping_coef = min(1.0, between_logs_sleeping_coef * 1.15)
303
-
304
-
305
- async def print_inference_simulator_logs(self, token: str, inference_simulator_id: int):
306
- polling_interval_seconds: float = 4 # also adjust polling api method timeout if changed
307
- between_logs_sleeping_coef: float = 1 # we emulate delay between logs, but if for any reason code runs for too long - delays will be controlled with this coef
308
- last_iteration_log_timestamp: Optional[str] = None # pointer to next iteration polling start (obtained from each response)
309
- last_log_id: Optional[str] = None # pointer to next iteration polling start - to exclude the log id from result (obtained from each response)
310
- consecutive_error_count: int = 0 # connectivity errors count - stream will disconnect if too many errors in a row
311
- iteration_started_at: datetime # used to control iteration duration - polling should be done at around exact rate
312
- errors_started_at: Optional[datetime] = None # time since errors started to stream disconnect
313
-
314
- is_no_more_logs = False
315
- while not is_no_more_logs:
316
- log_wait_remaining_limit: float = polling_interval_seconds # hard limit just in case
317
- iteration_started_at = datetime.utcnow()
318
- last_printed_log_timestamp: Optional[datetime] = None
319
- reader, writer = await aioconsole.get_standard_streams()
320
-
321
- # this shows (somewhat accurate) time difference between logs here and in real time. should not grow.
322
- # if last_iteration_log_timestamp:
323
- # last_log_timestamp_parsed = datetime.strptime(last_iteration_log_timestamp, '%Y-%m-%dT%H:%M:%S.%f')
324
- # stream_to_logs_diff = datetime.utcnow() - last_log_timestamp_parsed
325
- # print_nonblocking(f'TDIFF {stream_to_logs_diff.total_seconds()}', writer)
326
- try:
327
- logs_response = await self.__thestage_api_client.poll_logs_httpx(
328
- token=token,
329
- inference_simulator_id=inference_simulator_id,
330
- docker_container_id=None,
331
- last_log_timestamp=last_iteration_log_timestamp,
332
- last_log_id=last_log_id
333
- )
334
-
335
- if consecutive_error_count > 0:
336
- consecutive_error_count = 0
337
- errors_started_at = None
338
- log_wait_remaining_limit = 0 # no log delays after reconnect
339
-
340
- last_iteration_log_timestamp = logs_response.lastLogTimestamp
341
- last_log_id = logs_response.lastLogId
342
-
343
- for log_item in logs_response.logs:
344
- current_log_timestamp = datetime.strptime(log_item.timestamp[:26], '%Y-%m-%dT%H:%M:%S.%f') # python does not like nanoseconds
345
- if last_printed_log_timestamp is not None and log_wait_remaining_limit > 0:
346
- logs_sleeptime = (current_log_timestamp - last_printed_log_timestamp).total_seconds() * between_logs_sleeping_coef
347
- await asyncio.sleep(logs_sleeptime)
348
- log_wait_remaining_limit -= logs_sleeptime
349
- self.__print_log_line_object_nonblocking(log_item, writer)
350
- last_printed_log_timestamp = current_log_timestamp
351
-
352
- except (ReadTimeout, ConnectError, ConnectTimeout) as e:
353
- consecutive_error_count += 1
354
- if consecutive_error_count == 1:
355
- print_nonblocking("Network issues, attempting to re-establish connection...", writer, BytePrintStyle.ORANGE)
356
- if not errors_started_at:
357
- errors_started_at = datetime.utcnow()
358
-
359
- if consecutive_error_count > 7:
360
- seconds_with_error = (datetime.utcnow() - errors_started_at).total_seconds()
361
- print_nonblocking(f"Log stream: disconnected from server (connectivity issues for {seconds_with_error} seconds).", writer)
342
+ print_nonblocking(f"Log stream: disconnected from server (connectivity issues for {seconds_with_error} seconds)", writer)
362
343
  break
363
344
 
364
345
  # depending on iteration duration - sleep for the remaining time and adjust log sleep coefficient if needed