ob-metaflow-extensions 1.1.170__py2.py3-none-any.whl → 1.4.35__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow-extensions might be problematic. Click here for more details.
- metaflow_extensions/outerbounds/plugins/__init__.py +6 -2
- metaflow_extensions/outerbounds/plugins/apps/app_cli.py +0 -0
- metaflow_extensions/outerbounds/plugins/apps/app_deploy_decorator.py +146 -0
- metaflow_extensions/outerbounds/plugins/apps/core/__init__.py +10 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_state_machine.py +506 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_vendor/__init__.py +0 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.py +4 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.py +478 -0
- metaflow_extensions/outerbounds/plugins/apps/core/app_cli.py +1200 -0
- metaflow_extensions/outerbounds/plugins/apps/core/app_config.py +146 -0
- metaflow_extensions/outerbounds/plugins/apps/core/artifacts.py +0 -0
- metaflow_extensions/outerbounds/plugins/apps/core/capsule.py +958 -0
- metaflow_extensions/outerbounds/plugins/apps/core/click_importer.py +24 -0
- metaflow_extensions/outerbounds/plugins/apps/core/code_package/__init__.py +3 -0
- metaflow_extensions/outerbounds/plugins/apps/core/code_package/code_packager.py +618 -0
- metaflow_extensions/outerbounds/plugins/apps/core/code_package/examples.py +125 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/__init__.py +12 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/cli_generator.py +161 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/config_utils.py +868 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/schema_export.py +288 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/typed_configs.py +139 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/typed_init_generator.py +398 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/unified_config.py +1088 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config_schema.yaml +337 -0
- metaflow_extensions/outerbounds/plugins/apps/core/dependencies.py +115 -0
- metaflow_extensions/outerbounds/plugins/apps/core/deployer.py +303 -0
- metaflow_extensions/outerbounds/plugins/apps/core/experimental/__init__.py +89 -0
- metaflow_extensions/outerbounds/plugins/apps/core/perimeters.py +87 -0
- metaflow_extensions/outerbounds/plugins/apps/core/secrets.py +164 -0
- metaflow_extensions/outerbounds/plugins/apps/core/utils.py +233 -0
- metaflow_extensions/outerbounds/plugins/apps/core/validations.py +17 -0
- metaflow_extensions/outerbounds/plugins/aws/assume_role_decorator.py +25 -12
- metaflow_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.py +9 -77
- metaflow_extensions/outerbounds/plugins/checkpoint_datastores/external_chckpt.py +85 -0
- metaflow_extensions/outerbounds/plugins/checkpoint_datastores/nebius.py +7 -78
- metaflow_extensions/outerbounds/plugins/fast_bakery/baker.py +110 -0
- metaflow_extensions/outerbounds/plugins/fast_bakery/docker_environment.py +6 -2
- metaflow_extensions/outerbounds/plugins/fast_bakery/fast_bakery.py +1 -0
- metaflow_extensions/outerbounds/plugins/nvct/nvct_decorator.py +8 -8
- metaflow_extensions/outerbounds/plugins/optuna/__init__.py +48 -0
- metaflow_extensions/outerbounds/plugins/profilers/simple_card_decorator.py +96 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/__init__.py +7 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/binary_caller.py +132 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/constants.py +11 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/exceptions.py +13 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/proxy_bootstrap.py +59 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/s3_proxy_api.py +93 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/s3_proxy_decorator.py +250 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/s3_proxy_manager.py +225 -0
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark_client.py +6 -3
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark_decorator.py +13 -7
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark_job.py +8 -2
- metaflow_extensions/outerbounds/plugins/torchtune/__init__.py +4 -0
- metaflow_extensions/outerbounds/plugins/vllm/__init__.py +173 -95
- metaflow_extensions/outerbounds/plugins/vllm/status_card.py +9 -9
- metaflow_extensions/outerbounds/plugins/vllm/vllm_manager.py +159 -9
- metaflow_extensions/outerbounds/remote_config.py +8 -3
- metaflow_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.py +63 -1
- metaflow_extensions/outerbounds/toplevel/ob_internal.py +3 -0
- metaflow_extensions/outerbounds/toplevel/plugins/optuna/__init__.py +1 -0
- metaflow_extensions/outerbounds/toplevel/s3_proxy.py +88 -0
- {ob_metaflow_extensions-1.1.170.dist-info → ob_metaflow_extensions-1.4.35.dist-info}/METADATA +2 -2
- {ob_metaflow_extensions-1.1.170.dist-info → ob_metaflow_extensions-1.4.35.dist-info}/RECORD +65 -21
- {ob_metaflow_extensions-1.1.170.dist-info → ob_metaflow_extensions-1.4.35.dist-info}/WHEEL +0 -0
- {ob_metaflow_extensions-1.1.170.dist-info → ob_metaflow_extensions-1.4.35.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import functools
|
|
3
|
+
import json
|
|
4
|
+
import signal
|
|
5
|
+
import time
|
|
6
|
+
from typing import Dict, List, Optional, Tuple
|
|
7
|
+
|
|
8
|
+
from metaflow import current
|
|
9
|
+
from metaflow.decorators import StepDecorator
|
|
10
|
+
from .exceptions import S3ProxyException
|
|
11
|
+
from .constants import S3_PROXY_WRITE_MODES
|
|
12
|
+
from collections import namedtuple
|
|
13
|
+
|
|
14
|
+
S3ProxyBinaryConfig = namedtuple(
|
|
15
|
+
"S3ProxyBinaryConfig", ["integration_name", "write_mode", "debug"]
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def monkey_patch_environment(
|
|
20
|
+
environment, step_name_and_deco_attrs: Dict[str, S3ProxyBinaryConfig]
|
|
21
|
+
):
|
|
22
|
+
wrapping_func = environment.bootstrap_commands
|
|
23
|
+
|
|
24
|
+
@functools.wraps(wrapping_func)
|
|
25
|
+
def wrapper(step_name, ds_type, *args):
|
|
26
|
+
base_boostrap_cmd = wrapping_func(step_name, ds_type, *args)
|
|
27
|
+
additional_cmd = []
|
|
28
|
+
|
|
29
|
+
if step_name in step_name_and_deco_attrs:
|
|
30
|
+
integration_name = step_name_and_deco_attrs[step_name].integration_name
|
|
31
|
+
write_mode = step_name_and_deco_attrs[step_name].write_mode
|
|
32
|
+
debug = step_name_and_deco_attrs[step_name].debug
|
|
33
|
+
additional_cmd = [
|
|
34
|
+
"echo 'Setting up the S3 proxy.'",
|
|
35
|
+
f"python -m metaflow_extensions.outerbounds.plugins.s3_proxy.proxy_bootstrap bootstrap --integration-name {integration_name} --write-mode {write_mode} --debug {debug} --uc-proxy-cfg-write-path ./.uc_proxy_cfg_file --proxy-status-write-path ./.proxy_status_file",
|
|
36
|
+
"export METAFLOW_S3_PROXY_USER_CODE_CONFIG=$(cat ./.uc_proxy_cfg_file)",
|
|
37
|
+
"export METAFLOW_S3_PROXY_STATUS=$(cat ./.proxy_status_file)",
|
|
38
|
+
"export METAFLOW_S3_PROXY_SETUP_SUCCESS=True",
|
|
39
|
+
"flush_mflogs",
|
|
40
|
+
]
|
|
41
|
+
return base_boostrap_cmd + additional_cmd
|
|
42
|
+
|
|
43
|
+
environment.bootstrap_commands = wrapper
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class S3ProxyDecorator(StepDecorator):
|
|
47
|
+
"""
|
|
48
|
+
Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
|
|
49
|
+
for S3 read and write requests.
|
|
50
|
+
|
|
51
|
+
This decorator requires an integration in the Outerbounds platform that
|
|
52
|
+
points to an external bucket. It affects S3 operations performed via
|
|
53
|
+
Metaflow's `get_aws_client` and `S3` within a `@step`.
|
|
54
|
+
|
|
55
|
+
Read operations
|
|
56
|
+
---------------
|
|
57
|
+
All read operations pass through the proxy. If an object does not already
|
|
58
|
+
exist in the external bucket, it is cached there. For example, if code reads
|
|
59
|
+
from buckets `FOO` and `BAR` using the `S3` interface, objects from both
|
|
60
|
+
buckets are cached in the external bucket.
|
|
61
|
+
|
|
62
|
+
During task execution, all S3‑related read requests are routed through the
|
|
63
|
+
proxy:
|
|
64
|
+
- If the object is present in the external object store, the proxy
|
|
65
|
+
streams it directly from there without accessing the requested origin
|
|
66
|
+
bucket.
|
|
67
|
+
- If the object is not present in the external storage, the proxy
|
|
68
|
+
fetches it from the requested bucket, caches it in the external
|
|
69
|
+
storage, and streams the response from the origin bucket.
|
|
70
|
+
|
|
71
|
+
Warning
|
|
72
|
+
-------
|
|
73
|
+
All READ operations (e.g., GetObject, HeadObject) pass through the external
|
|
74
|
+
bucket regardless of the bucket specified in user code. Even
|
|
75
|
+
`S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
|
|
76
|
+
external bucket cache.
|
|
77
|
+
|
|
78
|
+
Write operations
|
|
79
|
+
----------------
|
|
80
|
+
Write behavior is controlled by the `write_mode` parameter, which determines
|
|
81
|
+
whether writes also persist objects in the cache.
|
|
82
|
+
|
|
83
|
+
`write_mode` values:
|
|
84
|
+
- `origin-and-cache`: objects are written both to the cache and to their
|
|
85
|
+
intended origin bucket.
|
|
86
|
+
- `origin`: objects are written only to their intended origin bucket.
|
|
87
|
+
|
|
88
|
+
Parameters
|
|
89
|
+
----------
|
|
90
|
+
integration_name : str, optional
|
|
91
|
+
[Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
|
|
92
|
+
that holds the configuration for the external, S3‑compatible object
|
|
93
|
+
storage bucket. If not specified, the only available S3 proxy
|
|
94
|
+
integration in the namespace is used (fails if multiple exist).
|
|
95
|
+
write_mode : str, optional
|
|
96
|
+
Controls whether writes also go to the external bucket.
|
|
97
|
+
- `origin` (default)
|
|
98
|
+
- `origin-and-cache`
|
|
99
|
+
debug : bool, optional
|
|
100
|
+
Enables debug logging for proxy operations.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
name = "s3_proxy"
|
|
104
|
+
defaults = {
|
|
105
|
+
"integration_name": None,
|
|
106
|
+
"write_mode": None,
|
|
107
|
+
"debug": False,
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
_environment_patched = False
|
|
111
|
+
|
|
112
|
+
_proxy_status = None
|
|
113
|
+
|
|
114
|
+
@classmethod
|
|
115
|
+
def patch_environment(cls, flow, environment):
|
|
116
|
+
"""
|
|
117
|
+
We need to patch the environment boostrap command so that
|
|
118
|
+
we can launch the s3 proxy before the step code execution.
|
|
119
|
+
We also want to ensure that we are running the proxy bootstrap
|
|
120
|
+
only for the steps that have the decorator set. This is why we pass down all
|
|
121
|
+
the step names that will change the boostrap commands.
|
|
122
|
+
"""
|
|
123
|
+
if cls._environment_patched:
|
|
124
|
+
return
|
|
125
|
+
|
|
126
|
+
steps_with_s3_proxy = [
|
|
127
|
+
step
|
|
128
|
+
for step in flow
|
|
129
|
+
if any(deco.name == "s3_proxy" for deco in step.decorators)
|
|
130
|
+
]
|
|
131
|
+
if len(steps_with_s3_proxy) == 0: # weird but y not?
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
step_names_and_deco_attrs = {}
|
|
135
|
+
for s in steps_with_s3_proxy:
|
|
136
|
+
_decos = [x for x in s.decorators if x.name == "s3_proxy"]
|
|
137
|
+
deco = _decos[0]
|
|
138
|
+
step_names_and_deco_attrs[s.name] = S3ProxyBinaryConfig(
|
|
139
|
+
integration_name=deco.attributes["integration_name"],
|
|
140
|
+
write_mode=deco.attributes["write_mode"],
|
|
141
|
+
debug=deco.attributes["debug"],
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
monkey_patch_environment(environment, step_names_and_deco_attrs)
|
|
145
|
+
cls._environment_patched = True
|
|
146
|
+
|
|
147
|
+
def step_init(self, flow, graph, step, decos, environment, flow_datastore, logger):
|
|
148
|
+
write_mode = self.attributes["write_mode"]
|
|
149
|
+
if write_mode and write_mode not in S3_PROXY_WRITE_MODES:
|
|
150
|
+
raise S3ProxyException(
|
|
151
|
+
f"unexpected write_mode specified: {write_mode}. Allowed values are: {','.join(S3_PROXY_WRITE_MODES)}."
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
self.patch_environment(flow, environment)
|
|
155
|
+
if (
|
|
156
|
+
os.environ.get("METAFLOW_S3_PROXY_USER_CODE_CONFIG")
|
|
157
|
+
and os.environ.get("METAFLOW_S3_PROXY_STATUS")
|
|
158
|
+
and self.attributes["debug"]
|
|
159
|
+
):
|
|
160
|
+
print("[@s3_proxy] S3 Proxy detected. Debug mode is enabled.")
|
|
161
|
+
|
|
162
|
+
if os.environ.get("METAFLOW_S3_PROXY_STATUS"):
|
|
163
|
+
proxy_status = json.loads(os.environ.get("METAFLOW_S3_PROXY_STATUS"))
|
|
164
|
+
self._proxy_status = proxy_status
|
|
165
|
+
|
|
166
|
+
def task_pre_step(
|
|
167
|
+
self,
|
|
168
|
+
step_name,
|
|
169
|
+
task_datastore,
|
|
170
|
+
metadata,
|
|
171
|
+
run_id,
|
|
172
|
+
task_id,
|
|
173
|
+
flow,
|
|
174
|
+
graph,
|
|
175
|
+
retry_count,
|
|
176
|
+
max_user_code_retries,
|
|
177
|
+
ubf_context,
|
|
178
|
+
inputs,
|
|
179
|
+
):
|
|
180
|
+
"""Setup S3 proxy before step execution"""
|
|
181
|
+
pass
|
|
182
|
+
|
|
183
|
+
def task_finished(
|
|
184
|
+
self, step_name, flow, graph, is_task_ok, retry_count, max_retries
|
|
185
|
+
):
|
|
186
|
+
if not self._proxy_status:
|
|
187
|
+
return
|
|
188
|
+
|
|
189
|
+
status = self._proxy_status
|
|
190
|
+
proxy_pid = status.get("proxy_pid")
|
|
191
|
+
config_path = status.get("config_path")
|
|
192
|
+
binary_path = status.get("binary_path")
|
|
193
|
+
|
|
194
|
+
# 1) Stop processes: try to terminate the process group for clean child shutdown
|
|
195
|
+
if proxy_pid:
|
|
196
|
+
try:
|
|
197
|
+
pgid = os.getpgid(proxy_pid)
|
|
198
|
+
os.killpg(pgid, signal.SIGTERM)
|
|
199
|
+
time.sleep(1)
|
|
200
|
+
except Exception:
|
|
201
|
+
# Fall back to killing the pid directly if pgid is unavailable
|
|
202
|
+
try:
|
|
203
|
+
os.kill(proxy_pid, signal.SIGTERM)
|
|
204
|
+
except Exception:
|
|
205
|
+
pass
|
|
206
|
+
|
|
207
|
+
# 2) Clear files based on status
|
|
208
|
+
for path in (config_path, binary_path):
|
|
209
|
+
try:
|
|
210
|
+
if path and os.path.exists(path):
|
|
211
|
+
os.remove(path)
|
|
212
|
+
except Exception:
|
|
213
|
+
pass
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
class NebiusS3ProxyDecorator(S3ProxyDecorator):
|
|
217
|
+
|
|
218
|
+
__doc__ = (
|
|
219
|
+
"""
|
|
220
|
+
`@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
221
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
222
|
+
a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:\n
|
|
223
|
+
"""
|
|
224
|
+
+ S3ProxyDecorator.__doc__
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
name = "nebius_s3_proxy"
|
|
228
|
+
defaults = {
|
|
229
|
+
"integration_name": None,
|
|
230
|
+
"write_mode": None,
|
|
231
|
+
"debug": False,
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class CoreWeaveS3ProxyDecorator(S3ProxyDecorator):
|
|
236
|
+
__doc__ = (
|
|
237
|
+
"""
|
|
238
|
+
`@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
239
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
240
|
+
a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:\n
|
|
241
|
+
"""
|
|
242
|
+
+ S3ProxyDecorator.__doc__
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
name = "coreweave_s3_proxy"
|
|
246
|
+
defaults = {
|
|
247
|
+
"integration_name": None,
|
|
248
|
+
"write_mode": None,
|
|
249
|
+
"debug": False,
|
|
250
|
+
}
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import gzip
|
|
4
|
+
import sys
|
|
5
|
+
import time
|
|
6
|
+
import threading
|
|
7
|
+
import subprocess
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Optional, Tuple
|
|
10
|
+
|
|
11
|
+
import requests
|
|
12
|
+
|
|
13
|
+
from .constants import (
|
|
14
|
+
S3_PROXY_BINARY_URLS,
|
|
15
|
+
DEFAULT_PROXY_PORT,
|
|
16
|
+
DEFAULT_PROXY_HOST,
|
|
17
|
+
)
|
|
18
|
+
from metaflow.metaflow_config import AWS_SECRETS_MANAGER_DEFAULT_REGION
|
|
19
|
+
from .s3_proxy_api import S3ProxyApiClient
|
|
20
|
+
from .exceptions import S3ProxyException
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class S3ProxyManager:
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
integration_name: Optional[str] = None,
|
|
27
|
+
write_mode: Optional[str] = None,
|
|
28
|
+
debug: bool = False,
|
|
29
|
+
):
|
|
30
|
+
self.integration_name = integration_name
|
|
31
|
+
self.write_mode = write_mode
|
|
32
|
+
self.debug = debug
|
|
33
|
+
self.process = None
|
|
34
|
+
self.binary_path = None
|
|
35
|
+
self.config_path = None
|
|
36
|
+
self.api_client = S3ProxyApiClient()
|
|
37
|
+
self.proxy_config = None
|
|
38
|
+
|
|
39
|
+
def setup_proxy(self) -> Tuple[dict, int, str, str]:
|
|
40
|
+
try:
|
|
41
|
+
if self._is_running_in_kubernetes():
|
|
42
|
+
config_data = self.api_client.fetch_s3_proxy_config(
|
|
43
|
+
self.integration_name
|
|
44
|
+
)
|
|
45
|
+
self.binary_path = self._download_binary()
|
|
46
|
+
self.config_path = self._write_config_file(config_data)
|
|
47
|
+
# In the new world where the binary is being called
|
|
48
|
+
# before even the metaflow code exection starts,
|
|
49
|
+
# so this implies a few important things:
|
|
50
|
+
# 1, We start the actual proxy process via another python file that safely ships logs to mflog.
|
|
51
|
+
# 2. We passback the right values to the metaflow step process via env vars.
|
|
52
|
+
# 3. Metaflow step code relies on env vars to decide if clients need to have s3 proxy in them.
|
|
53
|
+
self.process = self._start_proxy_process()
|
|
54
|
+
|
|
55
|
+
user_code_proxy_config = self._setup_proxy_config(config_data)
|
|
56
|
+
|
|
57
|
+
return_tuple = (
|
|
58
|
+
user_code_proxy_config, # this is the config that will be used within the metaflow `step` code.
|
|
59
|
+
self.process.pid, # This is the pid of the process that will jumpstart, monitor and ship logs to MFLOG for the proxy process
|
|
60
|
+
self.config_path, # This is the path to the config that is derived from the integration. It contains the actual bucket path and name where external objects are stored.
|
|
61
|
+
self.binary_path, # This is the path to the binary for the proxy.
|
|
62
|
+
)
|
|
63
|
+
# We return a tuple because these values need to be passed down to the metaflow step process where
|
|
64
|
+
# it will handle thier removal gracefully after the step is finished.
|
|
65
|
+
return return_tuple
|
|
66
|
+
|
|
67
|
+
print(
|
|
68
|
+
"[@s3_proxy] skipping s3-proxy set up because metaflow has not detected a Kubernetes environment"
|
|
69
|
+
)
|
|
70
|
+
raise S3ProxyException(
|
|
71
|
+
"S3 proxy setup failed because metaflow has not detected a Kubernetes environment"
|
|
72
|
+
)
|
|
73
|
+
except Exception as e:
|
|
74
|
+
if self.debug:
|
|
75
|
+
print(f"[@s3_proxy] Setup failed: {e}")
|
|
76
|
+
self.cleanup()
|
|
77
|
+
raise
|
|
78
|
+
|
|
79
|
+
def _is_running_in_kubernetes(self) -> bool:
|
|
80
|
+
"""Check if running inside a Kubernetes pod by checking for Kubernetes service account token."""
|
|
81
|
+
return (
|
|
82
|
+
os.path.exists("/var/run/secrets/kubernetes.io/serviceaccount/token")
|
|
83
|
+
and os.environ.get("KUBERNETES_SERVICE_HOST") is not None
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
def _download_binary(self) -> str:
|
|
87
|
+
binary_path = Path("/tmp/s3-proxy")
|
|
88
|
+
if binary_path.exists():
|
|
89
|
+
if self.debug:
|
|
90
|
+
print("[@s3_proxy] Binary already exists, skipping download")
|
|
91
|
+
return str(binary_path.absolute())
|
|
92
|
+
|
|
93
|
+
try:
|
|
94
|
+
if self.debug:
|
|
95
|
+
print("[@s3_proxy] Downloading binary...")
|
|
96
|
+
|
|
97
|
+
from platform import machine
|
|
98
|
+
|
|
99
|
+
arch = machine()
|
|
100
|
+
if arch not in S3_PROXY_BINARY_URLS:
|
|
101
|
+
raise S3ProxyException(
|
|
102
|
+
f"unsupported platform architecture: {arch}. Please reach out to your Outerbounds Support team for more help."
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
response = requests.get(S3_PROXY_BINARY_URLS[arch], stream=True, timeout=60)
|
|
106
|
+
response.raise_for_status()
|
|
107
|
+
|
|
108
|
+
with open(binary_path, "wb") as f:
|
|
109
|
+
with gzip.GzipFile(fileobj=response.raw) as gz:
|
|
110
|
+
f.write(gz.read())
|
|
111
|
+
|
|
112
|
+
binary_path.chmod(0o755)
|
|
113
|
+
|
|
114
|
+
if self.debug:
|
|
115
|
+
print("[@s3_proxy] Binary downloaded successfully")
|
|
116
|
+
|
|
117
|
+
return str(binary_path.absolute())
|
|
118
|
+
|
|
119
|
+
except Exception as e:
|
|
120
|
+
if self.debug:
|
|
121
|
+
print(f"[@s3_proxy] Binary download failed: {e}")
|
|
122
|
+
raise S3ProxyException(f"Failed to download S3 proxy binary: {e}")
|
|
123
|
+
|
|
124
|
+
def _write_config_file(self, config_data) -> str:
|
|
125
|
+
config_path = Path("/tmp/s3-proxy-config.json")
|
|
126
|
+
|
|
127
|
+
proxy_config = {
|
|
128
|
+
"bucketName": config_data.bucket_name,
|
|
129
|
+
"endpointUrl": config_data.endpoint_url,
|
|
130
|
+
"accessKeyId": config_data.access_key_id,
|
|
131
|
+
"accessKeySecret": config_data.secret_access_key,
|
|
132
|
+
"region": config_data.region,
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
config_path.write_text(json.dumps(proxy_config, indent=2))
|
|
136
|
+
|
|
137
|
+
if self.debug:
|
|
138
|
+
print(f"[@s3_proxy] Config written to {config_path}")
|
|
139
|
+
|
|
140
|
+
return str(config_path.absolute())
|
|
141
|
+
|
|
142
|
+
def _start_proxy_process(self) -> subprocess.Popen:
|
|
143
|
+
# This command will jump start a process that will then call the proxy binary
|
|
144
|
+
# The reason we do something like this is because we need to run all of this before
|
|
145
|
+
# even the `step` command is called. So we need a python process that will ship the logs
|
|
146
|
+
# of the proxy process to MFLOG instead of setting print statements. We need this process
|
|
147
|
+
# to run independently since the S3ProxyManager gets called in the boostrap_proxy which will
|
|
148
|
+
# exit after jump starting the proxy process.
|
|
149
|
+
cmd = [self.binary_path, "--bucket-config", self.config_path, "serve"]
|
|
150
|
+
_env = os.environ.copy()
|
|
151
|
+
_env["S3_PROXY_BINARY_COMMAND"] = " ".join(cmd)
|
|
152
|
+
if self.debug:
|
|
153
|
+
_env["S3_PROXY_BINARY_DEBUG"] = "True"
|
|
154
|
+
_cmd = [
|
|
155
|
+
sys.executable,
|
|
156
|
+
"-m",
|
|
157
|
+
"metaflow_extensions.outerbounds.plugins.s3_proxy.binary_caller",
|
|
158
|
+
]
|
|
159
|
+
devnull = subprocess.DEVNULL
|
|
160
|
+
process = subprocess.Popen(
|
|
161
|
+
_cmd,
|
|
162
|
+
stdout=devnull,
|
|
163
|
+
stderr=devnull,
|
|
164
|
+
text=True,
|
|
165
|
+
start_new_session=True,
|
|
166
|
+
env=_env,
|
|
167
|
+
)
|
|
168
|
+
time.sleep(3)
|
|
169
|
+
|
|
170
|
+
if process.poll() is None:
|
|
171
|
+
if self.debug:
|
|
172
|
+
print(f"[@s3_proxy] Proxy started successfully (pid: {process.pid})")
|
|
173
|
+
|
|
174
|
+
return process
|
|
175
|
+
else:
|
|
176
|
+
stdout_data, stderr_data = process.communicate()
|
|
177
|
+
if self.debug:
|
|
178
|
+
print(f"[@s3_proxy] Proxy failed to start - output: {stdout_data}")
|
|
179
|
+
raise S3ProxyException(f"S3 proxy failed to start: {stdout_data}")
|
|
180
|
+
|
|
181
|
+
def _setup_proxy_config(self, config_data):
|
|
182
|
+
from metaflow.metaflow_config import AWS_SECRETS_MANAGER_DEFAULT_REGION
|
|
183
|
+
|
|
184
|
+
region = os.environ.get(
|
|
185
|
+
"METAFLOW_AWS_SECRETS_MANAGER_DEFAULT_REGION",
|
|
186
|
+
AWS_SECRETS_MANAGER_DEFAULT_REGION,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
proxy_config = {
|
|
190
|
+
"endpoint_url": f"http://{DEFAULT_PROXY_HOST}:{DEFAULT_PROXY_PORT}",
|
|
191
|
+
"region": region,
|
|
192
|
+
"bucket_name": config_data.bucket_name,
|
|
193
|
+
"active": True,
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
if self.write_mode:
|
|
197
|
+
proxy_config["write_mode"] = self.write_mode
|
|
198
|
+
|
|
199
|
+
self.proxy_config = proxy_config
|
|
200
|
+
return proxy_config
|
|
201
|
+
|
|
202
|
+
def cleanup(self):
|
|
203
|
+
try:
|
|
204
|
+
from metaflow_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import (
|
|
205
|
+
clear_s3_proxy_config,
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
clear_s3_proxy_config()
|
|
209
|
+
|
|
210
|
+
if self.process and self.process.poll() is None:
|
|
211
|
+
self.process.terminate()
|
|
212
|
+
self.process.wait(timeout=5)
|
|
213
|
+
if self.debug:
|
|
214
|
+
print("[@s3_proxy] Proxy process stopped")
|
|
215
|
+
|
|
216
|
+
from os import remove
|
|
217
|
+
|
|
218
|
+
remove(self.config_path)
|
|
219
|
+
remove(self.binary_path)
|
|
220
|
+
|
|
221
|
+
except Exception as e:
|
|
222
|
+
if self.debug:
|
|
223
|
+
print(f"[@s3_proxy] Cleanup error: {e}")
|
|
224
|
+
finally:
|
|
225
|
+
self.proxy_config = None
|
|
@@ -27,9 +27,12 @@ class SnowparkClient(object):
|
|
|
27
27
|
except (NameError, ImportError, ModuleNotFoundError):
|
|
28
28
|
raise SnowflakeException(
|
|
29
29
|
"Could not import module 'snowflake'.\n\nInstall Snowflake "
|
|
30
|
-
"Python
|
|
31
|
-
"
|
|
32
|
-
"
|
|
30
|
+
"Python packages first:\n"
|
|
31
|
+
" snowflake==1.8.0\n"
|
|
32
|
+
" snowflake-connector-python==3.18.0\n"
|
|
33
|
+
" snowflake-snowpark-python==1.40.0\n\n"
|
|
34
|
+
"You can install them by executing:\n"
|
|
35
|
+
"%s -m pip install snowflake==1.8.0 snowflake-connector-python==3.18.0 snowflake-snowpark-python==1.40.0\n"
|
|
33
36
|
"or equivalent through your favorite Python package manager."
|
|
34
37
|
% sys.executable
|
|
35
38
|
)
|
|
@@ -42,10 +42,13 @@ class Snowflake(object):
|
|
|
42
42
|
return session
|
|
43
43
|
except (NameError, ImportError, ModuleNotFoundError):
|
|
44
44
|
raise SnowflakeException(
|
|
45
|
-
"Could not import module 'snowflake'.\n\
|
|
46
|
-
"
|
|
47
|
-
"
|
|
48
|
-
"
|
|
45
|
+
"Could not import module 'snowflake'.\n\n"
|
|
46
|
+
"Install required Snowflake packages using the @pypi decorator:\n"
|
|
47
|
+
"@pypi(packages={\n"
|
|
48
|
+
" 'snowflake': '1.8.0',\n"
|
|
49
|
+
" 'snowflake-connector-python': '3.18.0',\n"
|
|
50
|
+
" 'snowflake-snowpark-python': '1.40.0'\n"
|
|
51
|
+
"})\n"
|
|
49
52
|
)
|
|
50
53
|
|
|
51
54
|
|
|
@@ -143,9 +146,12 @@ class SnowparkDecorator(StepDecorator):
|
|
|
143
146
|
except (NameError, ImportError, ModuleNotFoundError):
|
|
144
147
|
raise SnowflakeException(
|
|
145
148
|
"Could not import module 'snowflake'.\n\nInstall Snowflake "
|
|
146
|
-
"Python
|
|
147
|
-
"
|
|
148
|
-
"
|
|
149
|
+
"Python packages first:\n"
|
|
150
|
+
" snowflake==1.8.0\n"
|
|
151
|
+
" snowflake-connector-python==3.18.0\n"
|
|
152
|
+
" snowflake-snowpark-python==1.40.0\n\n"
|
|
153
|
+
"You can install them by executing:\n"
|
|
154
|
+
"%s -m pip install snowflake==1.8.0 snowflake-connector-python==3.18.0 snowflake-snowpark-python==1.40.0\n"
|
|
149
155
|
"or equivalent through your favorite Python package manager."
|
|
150
156
|
% sys.executable
|
|
151
157
|
)
|
|
@@ -199,11 +199,17 @@ class RunningJob(object):
|
|
|
199
199
|
|
|
200
200
|
@property
|
|
201
201
|
def status(self):
|
|
202
|
-
|
|
202
|
+
status_list = self.status_obj()
|
|
203
|
+
if not status_list:
|
|
204
|
+
return "UNKNOWN"
|
|
205
|
+
return status_list[0].get("status", "UNKNOWN")
|
|
203
206
|
|
|
204
207
|
@property
|
|
205
208
|
def message(self):
|
|
206
|
-
|
|
209
|
+
status_list = self.status_obj()
|
|
210
|
+
if not status_list:
|
|
211
|
+
return None
|
|
212
|
+
return status_list[0].get("message")
|
|
207
213
|
|
|
208
214
|
@property
|
|
209
215
|
def is_waiting(self):
|
|
@@ -13,12 +13,14 @@ class TorchTune:
|
|
|
13
13
|
def __init__(
|
|
14
14
|
self,
|
|
15
15
|
use_multi_node_config: bool = False,
|
|
16
|
+
config_overrides: Optional[Dict] = None,
|
|
16
17
|
):
|
|
17
18
|
"""
|
|
18
19
|
Initialize the Tune launcher.
|
|
19
20
|
|
|
20
21
|
:param use_multi_node_config: If True, attempt to build a distributed configuration
|
|
21
22
|
from current.torch.torchrun_args.
|
|
23
|
+
:param config_overrides: Optional dictionary of config overrides for tune run.
|
|
22
24
|
"""
|
|
23
25
|
self.multi_node_config = {}
|
|
24
26
|
if use_multi_node_config:
|
|
@@ -37,6 +39,8 @@ class TorchTune:
|
|
|
37
39
|
"num_processes": current.torch.torchrun_args["nproc_per_node"]
|
|
38
40
|
* current.torch.torchrun_args["nnodes"],
|
|
39
41
|
}
|
|
42
|
+
if config_overrides:
|
|
43
|
+
self.multi_node_config.update(config_overrides)
|
|
40
44
|
print(
|
|
41
45
|
f"[Metaflow Tune] Discovered multi-node config for torchrun: {self.multi_node_config}"
|
|
42
46
|
)
|