ob-metaflow-extensions 1.1.123rc2__py2.py3-none-any.whl → 1.1.124__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-extensions might be problematic. Click here for more details.

@@ -319,7 +319,6 @@ STEP_DECORATORS_DESC = [
319
319
  ("snowpark", ".snowpark.snowpark_decorator.SnowparkDecorator"),
320
320
  ("tensorboard", ".tensorboard.TensorboardDecorator"),
321
321
  ("gpu_profile", ".profilers.gpu_profile_decorator.GPUProfileDecorator"),
322
- ("app_deploy", ".apps.deploy_decorator.WorkstationAppDeployDecorator"),
323
322
  ]
324
323
  FLOW_DECORATORS_DESC = [("nim", ".nim.NimDecorator")]
325
324
  TOGGLE_STEP_DECORATOR = [
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ob-metaflow-extensions
3
- Version: 1.1.123rc2
3
+ Version: 1.1.124
4
4
  Summary: Outerbounds Platform Extensions for Metaflow
5
5
  Author: Outerbounds, Inc.
6
6
  License: Commercial
7
7
  Description-Content-Type: text/markdown
8
8
  Requires-Dist: boto3
9
9
  Requires-Dist: kubernetes
10
- Requires-Dist: ob-metaflow (==2.13.8.1)
10
+ Requires-Dist: ob-metaflow (==2.13.9.1)
11
11
 
12
12
  # Outerbounds platform package
13
13
 
@@ -1,13 +1,9 @@
1
1
  metaflow_extensions/outerbounds/__init__.py,sha256=TRGvIUMjkfneWtYUFSWoubu_Kf2ekAL4WLbV3IxOj9k,499
2
2
  metaflow_extensions/outerbounds/remote_config.py,sha256=Zpfpjgz68_ZgxlXezjzlsDLo4840rkWuZgwDB_5H57U,4059
3
3
  metaflow_extensions/outerbounds/config/__init__.py,sha256=JsQGRuGFz28fQWjUvxUgR8EKBLGRdLUIk_buPLJplJY,1225
4
- metaflow_extensions/outerbounds/plugins/__init__.py,sha256=0M_MbFmDcHRYs9mJ7SFDT1hmXkVEhzEEWRgUU7mEt2w,12844
4
+ metaflow_extensions/outerbounds/plugins/__init__.py,sha256=QDcoAAiLEWG1-_mZhpswwtgtCZJvyGEsZOhNau7S9L4,12768
5
5
  metaflow_extensions/outerbounds/plugins/auth_server.py,sha256=_Q9_2EL0Xy77bCRphkwT1aSu8gQXRDOH-Z-RxTUO8N4,2202
6
6
  metaflow_extensions/outerbounds/plugins/perimeters.py,sha256=QXh3SFP7GQbS-RAIxUOPbhPzQ7KDFVxZkTdKqFKgXjI,2697
7
- metaflow_extensions/outerbounds/plugins/apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- metaflow_extensions/outerbounds/plugins/apps/app_utils.py,sha256=JrVKlbRx8-nSmI4cRrB7F8BQGDHleABIYZudK4P-XFE,7905
9
- metaflow_extensions/outerbounds/plugins/apps/deploy_decorator.py,sha256=oHCkcXHYIoCi9LujhBsJsktZM44Zkf4d_g4RHLsiW18,5858
10
- metaflow_extensions/outerbounds/plugins/apps/supervisord_utils.py,sha256=QDM7s-iVKnnmE7fM8K-nFoLojQvL_cT8hUj1LF1JOBs,8372
11
7
  metaflow_extensions/outerbounds/plugins/fast_bakery/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
8
  metaflow_extensions/outerbounds/plugins/fast_bakery/docker_environment.py,sha256=i6F3FXwvEhkmUCTHDJ4VmSoL6vKyQhC_YRCtY6F4EkA,14209
13
9
  metaflow_extensions/outerbounds/plugins/fast_bakery/fast_bakery.py,sha256=cj63FrdioggipQFP8GwgxU3FYe6IyzjGSUGYxLQZ4nQ,5189
@@ -48,7 +44,7 @@ metaflow_extensions/outerbounds/toplevel/plugins/azure/__init__.py,sha256=WUuhz2
48
44
  metaflow_extensions/outerbounds/toplevel/plugins/gcp/__init__.py,sha256=BbZiaH3uILlEZ6ntBLKeNyqn3If8nIXZFq_Apd7Dhco,70
49
45
  metaflow_extensions/outerbounds/toplevel/plugins/kubernetes/__init__.py,sha256=5zG8gShSj8m7rgF4xgWBZFuY3GDP5n1T0ktjRpGJLHA,69
50
46
  metaflow_extensions/outerbounds/toplevel/plugins/snowflake/__init__.py,sha256=LptpH-ziXHrednMYUjIaosS1SXD3sOtF_9_eRqd8SJw,50
51
- ob_metaflow_extensions-1.1.123rc2.dist-info/METADATA,sha256=JYsmhOhSHRHULTOGp9b7wA3UE-KKMg6b4x-9BMcfHGc,523
52
- ob_metaflow_extensions-1.1.123rc2.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
53
- ob_metaflow_extensions-1.1.123rc2.dist-info/top_level.txt,sha256=NwG0ukwjygtanDETyp_BUdtYtqIA_lOjzFFh1TsnxvI,20
54
- ob_metaflow_extensions-1.1.123rc2.dist-info/RECORD,,
47
+ ob_metaflow_extensions-1.1.124.dist-info/METADATA,sha256=_3jU-slK-7GJKkydQ1poaPiU9iomr6p0VU-HMK70gX4,520
48
+ ob_metaflow_extensions-1.1.124.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
49
+ ob_metaflow_extensions-1.1.124.dist-info/top_level.txt,sha256=NwG0ukwjygtanDETyp_BUdtYtqIA_lOjzFFh1TsnxvI,20
50
+ ob_metaflow_extensions-1.1.124.dist-info/RECORD,,
File without changes
@@ -1,170 +0,0 @@
1
- from metaflow.exception import MetaflowException
2
- import os
3
- from metaflow.metaflow_config_funcs import init_config
4
- import requests
5
- import time
6
- import random
7
-
8
- # IMPORTANT: Currently contents of this file are mostly duplicated from the outerbounds package.
9
- # This is purely due to the time rush of having to deliver this feature. As a fast forward, we
10
- # will reorganize things in a way that the amount of duplication in minimum.
11
-
12
-
13
- APP_READY_POLL_TIMEOUT_SECONDS = 300
14
- # Even after our backend validates that the app routes are ready, it takes a few seconds for
15
- # the app to be accessible via the browser. Till we hunt down this delay, add an extra buffer.
16
- APP_READY_EXTRA_BUFFER_SECONDS = 30
17
-
18
-
19
- def start_app(port=-1, name=""):
20
- if len(name) == 0 or len(name) >= 20:
21
- raise MetaflowException("App name should not be more than 20 characters long.")
22
- elif not name.isalnum() or not name.islower():
23
- raise MetaflowException(
24
- "App name can only contain lowercase alphanumeric characters."
25
- )
26
-
27
- if "WORKSTATION_ID" not in os.environ:
28
- raise MetaflowException(
29
- "All outerbounds app commands can only be run from a workstation."
30
- )
31
-
32
- workstation_id = os.environ["WORKSTATION_ID"]
33
-
34
- try:
35
- try:
36
- conf = init_config()
37
- metaflow_token = conf["METAFLOW_SERVICE_AUTH_KEY"]
38
- api_url = conf["OBP_API_SERVER"]
39
-
40
- workstations_response = requests.get(
41
- f"https://{api_url}/v1/workstations",
42
- headers={"x-api-key": metaflow_token},
43
- )
44
- workstations_response.raise_for_status()
45
- except:
46
- raise MetaflowException("Failed to list workstations!")
47
-
48
- workstations_json = workstations_response.json()["workstations"]
49
- for workstation in workstations_json:
50
- if workstation["instance_id"] == os.environ["WORKSTATION_ID"]:
51
- if "named_ports" in workstation["spec"]:
52
- try:
53
- ensure_app_start_request_is_valid(
54
- workstation["spec"]["named_ports"], port, name
55
- )
56
- except ValueError as e:
57
- raise MetaflowException(str(e))
58
-
59
- for named_port in workstation["spec"]["named_ports"]:
60
- if int(named_port["port"]) == port:
61
- if named_port["enabled"] and named_port["name"] == name:
62
- print(f"App {name} started on port {port}!")
63
- print(
64
- f"Browser URL: https://{api_url.replace('api', 'ui')}/apps/{os.environ['WORKSTATION_ID']}/{name}/"
65
- )
66
- print(
67
- f"API URL: https://{api_url}/apps/{os.environ['WORKSTATION_ID']}/{name}/"
68
- )
69
- return
70
- else:
71
- try:
72
- response = requests.put(
73
- f"https://{api_url}/v1/workstations/update/{workstation_id}/namedports",
74
- headers={"x-api-key": metaflow_token},
75
- json={
76
- "port": port,
77
- "name": name,
78
- "enabled": True,
79
- },
80
- )
81
-
82
- response.raise_for_status()
83
- poll_success = wait_for_app_port_to_be_accessible(
84
- api_url,
85
- metaflow_token,
86
- workstation_id,
87
- name,
88
- APP_READY_POLL_TIMEOUT_SECONDS,
89
- )
90
- if poll_success:
91
- print(f"App {name} started on port {port}!")
92
- print(
93
- f"Browser URL: https://{api_url.replace('api', 'ui')}/apps/{os.environ['WORKSTATION_ID']}/{name}/"
94
- )
95
- print(
96
- f"API URL: https://{api_url}/apps/{os.environ['WORKSTATION_ID']}/{name}/"
97
- )
98
- else:
99
- raise MetaflowException(
100
- f"The app could not be deployed in {APP_READY_POLL_TIMEOUT_SECONDS / 60} minutes. Please try again later."
101
- )
102
- except Exception:
103
- raise MetaflowException(
104
- f"Failed to start app {name} on port {port}!"
105
- )
106
- except Exception as e:
107
- raise MetaflowException(f"Failed to start app {name} on port {port}!")
108
-
109
-
110
- def ensure_app_start_request_is_valid(existing_named_ports, port: int, name: str):
111
- existing_apps_by_port = {np["port"]: np for np in existing_named_ports}
112
-
113
- if port not in existing_apps_by_port:
114
- raise MetaflowException(f"Port {port} not found on workstation")
115
-
116
- for existing_named_port in existing_named_ports:
117
- if (
118
- name == existing_named_port["name"]
119
- and existing_named_port["port"] != port
120
- and existing_named_port["enabled"]
121
- ):
122
- raise MetaflowException(
123
- f"App with name '{name}' is already deployed on port {existing_named_port['port']}"
124
- )
125
-
126
-
127
- def wait_for_app_port_to_be_accessible(
128
- api_url, metaflow_token, workstation_id, app_name, poll_timeout_seconds
129
- ) -> bool:
130
- num_retries_per_request = 3
131
- start_time = time.time()
132
- retry_delay = 1.0
133
- poll_interval = 10
134
- wait_message = f"App {app_name} is currently being deployed..."
135
- while time.time() - start_time < poll_timeout_seconds:
136
- for _ in range(num_retries_per_request):
137
- try:
138
- workstations_response = requests.get(
139
- f"https://{api_url}/v1/workstations",
140
- headers={"x-api-key": metaflow_token},
141
- )
142
- workstations_response.raise_for_status()
143
- if is_app_ready(workstations_response.json(), workstation_id, app_name):
144
- print(wait_message)
145
- time.sleep(APP_READY_EXTRA_BUFFER_SECONDS)
146
- return True
147
- else:
148
- print(wait_message)
149
- time.sleep(poll_interval)
150
- except (
151
- requests.exceptions.ConnectionError,
152
- requests.exceptions.ReadTimeout,
153
- ):
154
- time.sleep(retry_delay)
155
- retry_delay *= 2 # Double the delay for the next attempt
156
- retry_delay += random.uniform(0, 1) # Add jitter
157
- retry_delay = min(retry_delay, 10)
158
- return False
159
-
160
-
161
- def is_app_ready(response_json: dict, workstation_id: str, app_name: str) -> bool:
162
- """Checks if the app is ready in the given workstation's response."""
163
- workstations = response_json.get("workstations", [])
164
- for workstation in workstations:
165
- if workstation.get("instance_id") == workstation_id:
166
- hosted_apps = workstation.get("status", {}).get("hosted_apps", [])
167
- for hosted_app in hosted_apps:
168
- if hosted_app.get("name") == app_name:
169
- return bool(hosted_app.get("ready"))
170
- return False
@@ -1,155 +0,0 @@
1
- from metaflow.exception import MetaflowException
2
- from metaflow.decorators import StepDecorator
3
- from metaflow import current
4
- from .app_utils import start_app
5
- from .supervisord_utils import SupervisorClient, SupervisorClientException
6
- import os
7
- import random
8
- import string
9
- import tempfile
10
- import sys
11
-
12
- DEFAULT_WAIT_TIME_SECONDS_FOR_PROCESS_TO_START = 10
13
- BASE_DIR_FOR_APP_ASSETS = "/home/ob-workspace/.appdaemon/apps/"
14
-
15
-
16
- class WorkstationAppDeployDecorator(StepDecorator):
17
- """
18
- Specifies that this step is used to deploy an instance of the app.
19
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
20
-
21
- Parameters
22
- ----------
23
- app_port : int
24
- Number of GPUs to use.
25
- app_name : str
26
- Name of the app to deploy.
27
- """
28
-
29
- name = "app_deploy"
30
- defaults = {"app_port": 8080, "app_name": "app"}
31
-
32
- def step_init(self, flow, graph, step, decos, environment, flow_datastore, logger):
33
- if any([deco.name == "kubernetes" for deco in decos]):
34
- raise MetaflowException(
35
- "Step *{step}* is marked for execution both on Kubernetes and "
36
- "Nvidia. Please use one or the other.".format(step=step)
37
- )
38
-
39
- # We always need to have some environment defined through the flow to deploy and app.
40
- # Which means either step decorators like @pypi / @conda must be defined.
41
- # or flow level decorators like @conda_base / @pypi_base.
42
- if not any([deco.name == "pypi" or deco.name == "conda" for deco in decos]):
43
- flow_decorators = flow._flow_decorators.keys()
44
- if (
45
- "conda_base" not in flow_decorators
46
- and "pypi_base" not in flow_decorators
47
- ):
48
- raise MetaflowException(
49
- "@app_deploy requires either step decorators like @pypi / @conda or flow level decorators like @conda_base / @pypi_base to be defined."
50
- )
51
-
52
- app_port = self.attributes["app_port"]
53
- app_name = self.attributes["app_name"]
54
-
55
- # Currently this decorator is expected to only execute on workstation.
56
- if app_port is None or app_port < 6000 or app_port > 6002:
57
- raise MetaflowException(
58
- "AppDeployDecorator requires app_port to be between 6000 and 6002."
59
- )
60
-
61
- if app_name is None:
62
- raise MetaflowException("AppDeployDecorator requires app_name to be set.")
63
-
64
- def task_pre_step(
65
- self,
66
- step_name,
67
- task_datastore,
68
- metadata,
69
- run_id,
70
- task_id,
71
- flow,
72
- graph,
73
- retry_count,
74
- max_user_code_retries,
75
- ubf_context,
76
- inputs,
77
- ):
78
- os.makedirs(BASE_DIR_FOR_APP_ASSETS, exist_ok=True)
79
- # First we want to create a directory where the user's app directory and artifacts can be stored.
80
- with tempfile.TemporaryDirectory(
81
- prefix=BASE_DIR_FOR_APP_ASSETS, delete=False
82
- ) as temp_dir:
83
- launch_temp_dir = temp_dir
84
-
85
- # Expose this to the user, so that they can use it write their artifacts.
86
- setattr(flow, "deploy_dir", launch_temp_dir)
87
-
88
- # Make sure to record deploy_dir so that the user cannot accidentally override it.
89
- self._deploy_dir = launch_temp_dir
90
-
91
- def task_post_step(
92
- self, step_name, flow, graph, retry_count, max_user_code_retries
93
- ):
94
- deploy_dir = self._deploy_dir
95
-
96
- # By default we assume that the user has a __main__.py file in their app directory.
97
- # They can always override this behavior.
98
- user_provided_entrypoint = getattr(flow, "entrypoint", None)
99
-
100
- if user_provided_entrypoint is not None and not isinstance(
101
- user_provided_entrypoint, str
102
- ):
103
- raise MetaflowException(
104
- f"@app_deploy requires entrypoint to be set to a string. The current value of entrypoint {user_provided_entrypoint} is not valid."
105
- )
106
-
107
- flow_directory = os.path.dirname(os.path.abspath(sys.argv[0]))
108
-
109
- # By default, we assume that the layout of the flow directory is:
110
- # flow_dir/
111
- # - deployer_flow.py
112
- # - my_custom_app/
113
- # - __main__.py
114
- # - other_files
115
- # - other_dirs/
116
- # This can be overridden by the user by setting the app_dir attribute.
117
- # None of this matters if the user provides a custom entrypoint, since in that case we don't copy
118
- # anything anywhere.
119
- app_location = getattr(
120
- flow, "app_dir", os.path.join(flow_directory, self.attributes["app_name"])
121
- )
122
-
123
- if user_provided_entrypoint is None and not os.path.exists(app_location):
124
- raise MetaflowException(f"App directory {app_location} does not exist.")
125
-
126
- wait_time_for_app_start = getattr(
127
- flow,
128
- "wait_time_for_app_start",
129
- DEFAULT_WAIT_TIME_SECONDS_FOR_PROCESS_TO_START,
130
- )
131
-
132
- try:
133
- supervisor_client = SupervisorClient(
134
- wait_time_seconds_for_app_start=wait_time_for_app_start
135
- )
136
-
137
- # First, let's deploy the app.
138
- start_app(
139
- port=self.attributes["app_port"], name=self.attributes["app_name"]
140
- )
141
-
142
- # Now, let's add the app to supervisor.
143
- supervisor_client.start_process_with_supervisord(
144
- self.attributes["app_name"],
145
- self.attributes["app_port"],
146
- user_provided_entrypoint,
147
- deploy_dir,
148
- app_location,
149
- )
150
- except SupervisorClientException as e:
151
- raise MetaflowException(str(e))
152
- except Exception as e:
153
- raise MetaflowException(
154
- f"Failed to start {self.attributes['app_name']}! Cause: {str(e)}"
155
- ) from e
@@ -1,239 +0,0 @@
1
- import argparse
2
- import os
3
- import configparser
4
- import tempfile
5
- import sys
6
- import subprocess
7
- from pathlib import Path
8
- import shutil
9
- from enum import Enum
10
- import time
11
-
12
-
13
- class SupervisorClientException(Exception):
14
- pass
15
-
16
-
17
- class SupervisorClient:
18
- class SupervisodProcessCodes(Enum):
19
- STOPPED = 0
20
- STARTING = 10
21
- RUNNING = 20
22
- BACKOFF = 30
23
- STOPPING = 40
24
- EXITED = 100
25
- FATAL = 200
26
- UNKNOWN = 1000
27
-
28
- def __init__(self, wait_time_seconds_for_app_start: int):
29
- self.supervisor_conf_loc = os.environ.get("SUPERVISOR_CONF_PATH")
30
-
31
- self.wait_time_seconds_for_app_start = wait_time_seconds_for_app_start
32
- if self.supervisor_conf_loc is None or not os.path.exists(
33
- self.supervisor_conf_loc
34
- ):
35
- raise SupervisorClientException(
36
- "This workstation does not support deploying apps! Please reach out to Outerbounds for support."
37
- )
38
-
39
- self.metaflow_envs_persistent_path = os.environ.get(
40
- "SUPERVISOR_PYTHON_ENVS_PATH"
41
- )
42
- if self.metaflow_envs_persistent_path is None:
43
- raise SupervisorClientException(
44
- "This workstation does not support deploying apps! Please reach out to Outerbounds for support."
45
- )
46
-
47
- # Check if supervisorctl is installed
48
- if not shutil.which("supervisorctl"):
49
- raise SupervisorClientException(
50
- "This workstation does not support deploying apps! Please reach out to Outerbounds for support."
51
- )
52
-
53
- def _stop_existing_app_at_port(self, app_port):
54
- supervisor_config = configparser.ConfigParser()
55
- supervisor_config.read(self.supervisor_conf_loc)
56
-
57
- for program in supervisor_config.sections():
58
- if "obp_app_port" in supervisor_config[program]:
59
- if supervisor_config[program]["obp_app_port"].strip() == str(app_port):
60
- res = subprocess.run(
61
- ["supervisorctl", "stop", program],
62
- stdout=subprocess.DEVNULL,
63
- stderr=subprocess.DEVNULL,
64
- )
65
-
66
- del supervisor_config[program]
67
-
68
- with tempfile.NamedTemporaryFile(
69
- "w", dir=os.path.dirname(self.supervisor_conf_loc), delete=False
70
- ) as f:
71
- supervisor_config.write(f)
72
- tmp_file = f.name
73
-
74
- os.rename(tmp_file, self.supervisor_conf_loc)
75
-
76
- def start_process_with_supervisord(
77
- self,
78
- app_name,
79
- app_port,
80
- user_provided_entrypoint,
81
- deploy_dir=None,
82
- app_dir=None,
83
- ):
84
- """
85
- Add a new program entry to supervisor configuration.
86
-
87
- Args:
88
- app_name: The name of the app to start.
89
- entrypoint: The entrypoint to start the app with.
90
- directory: The directory to run the app in.
91
- deploy_dir: The directory to copy the app to and deploy from.
92
- app_dir: The directory to copy the app from.
93
- """
94
-
95
- entrypoint = user_provided_entrypoint
96
- deploy_dir_for_port = "/home/ob-workspace/.appdaemon/apps/6000"
97
- launch_directory = (
98
- "/home/ob-workspace/.appdaemon/apps"
99
- if entrypoint is None
100
- else "/home/ob-workspace/.appdaemon"
101
- )
102
-
103
- # Step 1: Stop any existing apps that are running on the same port.
104
- self._stop_existing_app_at_port(app_port)
105
-
106
- if user_provided_entrypoint is None:
107
- # Step 2: Copy the app_dir to the deploy_dir.
108
- recursive_copy(app_dir, deploy_dir)
109
-
110
- # Step 3: Copy the entire deploy_dir to the port specific directory.
111
- if os.path.exists(deploy_dir_for_port):
112
- shutil.rmtree(deploy_dir_for_port)
113
-
114
- os.makedirs(deploy_dir_for_port)
115
- recursive_copy(deploy_dir, deploy_dir_for_port)
116
-
117
- # Apply default value
118
- entrypoint = f"-m {str(app_port)}"
119
-
120
- shutil.rmtree(deploy_dir)
121
-
122
- persistent_path_for_executable = (
123
- self.persist_metaflow_generated_python_environment()
124
- )
125
-
126
- command = f"{persistent_path_for_executable} {entrypoint}"
127
-
128
- entry = {
129
- "command": command,
130
- "directory": launch_directory,
131
- "autostart": "true",
132
- "autorestart": "true",
133
- "obp_app_port": app_port, # Record the app port for internal reference. This is not used by supervisor.
134
- }
135
-
136
- supervisor_config = configparser.ConfigParser()
137
- supervisor_config.read(self.supervisor_conf_loc)
138
-
139
- supervisor_config[f"program:{app_name}"] = entry
140
-
141
- with tempfile.NamedTemporaryFile(
142
- "w", dir=os.path.dirname(self.supervisor_conf_loc), delete=False
143
- ) as f:
144
- supervisor_config.write(f)
145
- tmp_file = f.name
146
-
147
- os.rename(tmp_file, self.supervisor_conf_loc)
148
-
149
- # Execute supervisorctl reload
150
- # Capture the exit code
151
- exit_code = subprocess.run(
152
- ["supervisorctl", "reload"],
153
- stdout=subprocess.DEVNULL,
154
- stderr=subprocess.DEVNULL,
155
- ).returncode
156
- if exit_code != 0:
157
- print("Failed to reload supervisor configuration!", file=sys.stderr)
158
- return
159
-
160
- print(
161
- f"Waiting for {self.wait_time_seconds_for_app_start} seconds for {app_name} to start..."
162
- )
163
- time.sleep(self.wait_time_seconds_for_app_start)
164
- status = self._get_launched_prcoess_status(app_name)
165
-
166
- if status not in [
167
- self.SupervisodProcessCodes.RUNNING,
168
- self.SupervisodProcessCodes.STARTING,
169
- ]:
170
- raise SupervisorClientException(
171
- f"Failed to start {app_name}! Try running {command} manually to debug."
172
- )
173
-
174
- def _get_launched_prcoess_status(self, app_name):
175
- status_cmd_output = subprocess.run(
176
- ["supervisorctl", "status", app_name],
177
- stdout=subprocess.PIPE,
178
- stderr=subprocess.PIPE,
179
- ).stdout.decode("utf-8")
180
-
181
- status_cmd_output_parts = [
182
- x.strip() for x in status_cmd_output.split(" ") if x.strip()
183
- ]
184
-
185
- status_str = status_cmd_output_parts[1]
186
-
187
- if status_str == "RUNNING":
188
- return self.SupervisodProcessCodes.RUNNING
189
- elif status_str == "STOPPED":
190
- return self.SupervisodProcessCodes.STOPPED
191
- elif status_str == "STARTING":
192
- return self.SupervisodProcessCodes.STARTING
193
- elif status_str == "BACKOFF":
194
- return self.SupervisodProcessCodes.BACKOFF
195
- elif status_str == "STOPPING":
196
- return self.SupervisodProcessCodes.STOPPING
197
- elif status_str == "EXITED":
198
- return self.SupervisodProcessCodes.EXITED
199
- elif status_str == "FATAL":
200
- return self.SupervisodProcessCodes.FATAL
201
- else:
202
- return self.SupervisodProcessCodes.UNKNOWN
203
-
204
- # By default, an environment generated by metaflow will end up in a path like: /root/micromamba/envs/metaflow/linux-64/02699a4d2d50cfc/bin/python
205
- # However, on a workstation these environments are not persisted, so we need to copy them over to /home/ob-workspace
206
- def persist_metaflow_generated_python_environment(self):
207
- current_executable = sys.executable
208
- environment_path = Path(current_executable).parent.parent
209
-
210
- persistent_path_for_this_environment = os.path.join(
211
- self.metaflow_envs_persistent_path,
212
- environment_path.parent.name,
213
- environment_path.name,
214
- )
215
-
216
- final_executable_path = os.path.join(
217
- persistent_path_for_this_environment,
218
- Path(current_executable).parent.name,
219
- Path(current_executable).name,
220
- )
221
-
222
- if os.path.exists(final_executable_path):
223
- return final_executable_path
224
-
225
- os.makedirs(persistent_path_for_this_environment, exist_ok=True)
226
-
227
- recursive_copy(environment_path, persistent_path_for_this_environment)
228
-
229
- return final_executable_path
230
-
231
-
232
- def recursive_copy(src, dst):
233
- for item in os.listdir(src):
234
- s = os.path.join(src, item)
235
- d = os.path.join(dst, item)
236
- if os.path.isdir(s):
237
- shutil.copytree(s, d, dirs_exist_ok=True)
238
- else:
239
- shutil.copy2(s, d)