ob-metaflow-extensions 1.1.130__py2.py3-none-any.whl → 1.5.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow-extensions might be problematic. Click here for more details.
- metaflow_extensions/outerbounds/__init__.py +1 -1
- metaflow_extensions/outerbounds/plugins/__init__.py +34 -4
- metaflow_extensions/outerbounds/plugins/apps/__init__.py +0 -0
- metaflow_extensions/outerbounds/plugins/apps/app_cli.py +0 -0
- metaflow_extensions/outerbounds/plugins/apps/app_utils.py +187 -0
- metaflow_extensions/outerbounds/plugins/apps/consts.py +3 -0
- metaflow_extensions/outerbounds/plugins/apps/core/__init__.py +15 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_state_machine.py +506 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_vendor/__init__.py +0 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.py +4 -0
- metaflow_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.py +478 -0
- metaflow_extensions/outerbounds/plugins/apps/core/app_config.py +128 -0
- metaflow_extensions/outerbounds/plugins/apps/core/app_deploy_decorator.py +330 -0
- metaflow_extensions/outerbounds/plugins/apps/core/artifacts.py +0 -0
- metaflow_extensions/outerbounds/plugins/apps/core/capsule.py +958 -0
- metaflow_extensions/outerbounds/plugins/apps/core/click_importer.py +24 -0
- metaflow_extensions/outerbounds/plugins/apps/core/code_package/__init__.py +3 -0
- metaflow_extensions/outerbounds/plugins/apps/core/code_package/code_packager.py +618 -0
- metaflow_extensions/outerbounds/plugins/apps/core/code_package/examples.py +125 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/__init__.py +15 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/cli_generator.py +165 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/config_utils.py +966 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/schema_export.py +299 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/typed_configs.py +233 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/typed_init_generator.py +537 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config/unified_config.py +1125 -0
- metaflow_extensions/outerbounds/plugins/apps/core/config_schema.yaml +337 -0
- metaflow_extensions/outerbounds/plugins/apps/core/dependencies.py +115 -0
- metaflow_extensions/outerbounds/plugins/apps/core/deployer.py +959 -0
- metaflow_extensions/outerbounds/plugins/apps/core/experimental/__init__.py +89 -0
- metaflow_extensions/outerbounds/plugins/apps/core/perimeters.py +87 -0
- metaflow_extensions/outerbounds/plugins/apps/core/secrets.py +164 -0
- metaflow_extensions/outerbounds/plugins/apps/core/utils.py +233 -0
- metaflow_extensions/outerbounds/plugins/apps/core/validations.py +17 -0
- metaflow_extensions/outerbounds/plugins/apps/deploy_decorator.py +201 -0
- metaflow_extensions/outerbounds/plugins/apps/supervisord_utils.py +243 -0
- metaflow_extensions/outerbounds/plugins/aws/__init__.py +4 -0
- metaflow_extensions/outerbounds/plugins/aws/assume_role.py +3 -0
- metaflow_extensions/outerbounds/plugins/aws/assume_role_decorator.py +118 -0
- metaflow_extensions/outerbounds/plugins/card_utilities/injector.py +1 -1
- metaflow_extensions/outerbounds/plugins/checkpoint_datastores/__init__.py +2 -0
- metaflow_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.py +71 -0
- metaflow_extensions/outerbounds/plugins/checkpoint_datastores/external_chckpt.py +85 -0
- metaflow_extensions/outerbounds/plugins/checkpoint_datastores/nebius.py +73 -0
- metaflow_extensions/outerbounds/plugins/fast_bakery/baker.py +110 -0
- metaflow_extensions/outerbounds/plugins/fast_bakery/docker_environment.py +43 -9
- metaflow_extensions/outerbounds/plugins/fast_bakery/fast_bakery.py +12 -0
- metaflow_extensions/outerbounds/plugins/kubernetes/kubernetes_client.py +18 -44
- metaflow_extensions/outerbounds/plugins/kubernetes/pod_killer.py +374 -0
- metaflow_extensions/outerbounds/plugins/nim/card.py +2 -16
- metaflow_extensions/outerbounds/plugins/nim/{__init__.py → nim_decorator.py} +13 -49
- metaflow_extensions/outerbounds/plugins/nim/nim_manager.py +294 -233
- metaflow_extensions/outerbounds/plugins/nim/utils.py +36 -0
- metaflow_extensions/outerbounds/plugins/nvcf/constants.py +2 -2
- metaflow_extensions/outerbounds/plugins/nvcf/nvcf.py +100 -19
- metaflow_extensions/outerbounds/plugins/nvcf/nvcf_decorator.py +6 -1
- metaflow_extensions/outerbounds/plugins/nvct/__init__.py +0 -0
- metaflow_extensions/outerbounds/plugins/nvct/exceptions.py +71 -0
- metaflow_extensions/outerbounds/plugins/nvct/nvct.py +131 -0
- metaflow_extensions/outerbounds/plugins/nvct/nvct_cli.py +289 -0
- metaflow_extensions/outerbounds/plugins/nvct/nvct_decorator.py +286 -0
- metaflow_extensions/outerbounds/plugins/nvct/nvct_runner.py +218 -0
- metaflow_extensions/outerbounds/plugins/nvct/utils.py +29 -0
- metaflow_extensions/outerbounds/plugins/ollama/__init__.py +225 -0
- metaflow_extensions/outerbounds/plugins/ollama/constants.py +1 -0
- metaflow_extensions/outerbounds/plugins/ollama/exceptions.py +22 -0
- metaflow_extensions/outerbounds/plugins/ollama/ollama.py +1924 -0
- metaflow_extensions/outerbounds/plugins/ollama/status_card.py +292 -0
- metaflow_extensions/outerbounds/plugins/optuna/__init__.py +48 -0
- metaflow_extensions/outerbounds/plugins/profilers/simple_card_decorator.py +96 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/__init__.py +7 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/binary_caller.py +132 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/constants.py +11 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/exceptions.py +13 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/proxy_bootstrap.py +59 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/s3_proxy_api.py +93 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/s3_proxy_decorator.py +250 -0
- metaflow_extensions/outerbounds/plugins/s3_proxy/s3_proxy_manager.py +225 -0
- metaflow_extensions/outerbounds/plugins/secrets/secrets.py +38 -2
- metaflow_extensions/outerbounds/plugins/snowflake/snowflake.py +81 -11
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark.py +18 -8
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark_cli.py +6 -0
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark_client.py +45 -18
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark_decorator.py +18 -9
- metaflow_extensions/outerbounds/plugins/snowpark/snowpark_job.py +10 -4
- metaflow_extensions/outerbounds/plugins/torchtune/__init__.py +163 -0
- metaflow_extensions/outerbounds/plugins/vllm/__init__.py +255 -0
- metaflow_extensions/outerbounds/plugins/vllm/constants.py +1 -0
- metaflow_extensions/outerbounds/plugins/vllm/exceptions.py +1 -0
- metaflow_extensions/outerbounds/plugins/vllm/status_card.py +352 -0
- metaflow_extensions/outerbounds/plugins/vllm/vllm_manager.py +621 -0
- metaflow_extensions/outerbounds/remote_config.py +46 -9
- metaflow_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.py +94 -2
- metaflow_extensions/outerbounds/toplevel/ob_internal.py +4 -0
- metaflow_extensions/outerbounds/toplevel/plugins/ollama/__init__.py +1 -0
- metaflow_extensions/outerbounds/toplevel/plugins/optuna/__init__.py +1 -0
- metaflow_extensions/outerbounds/toplevel/plugins/torchtune/__init__.py +1 -0
- metaflow_extensions/outerbounds/toplevel/plugins/vllm/__init__.py +1 -0
- metaflow_extensions/outerbounds/toplevel/s3_proxy.py +88 -0
- {ob_metaflow_extensions-1.1.130.dist-info → ob_metaflow_extensions-1.5.1.dist-info}/METADATA +2 -2
- ob_metaflow_extensions-1.5.1.dist-info/RECORD +133 -0
- metaflow_extensions/outerbounds/plugins/nim/utilities.py +0 -5
- ob_metaflow_extensions-1.1.130.dist-info/RECORD +0 -56
- {ob_metaflow_extensions-1.1.130.dist-info → ob_metaflow_extensions-1.5.1.dist-info}/WHEEL +0 -0
- {ob_metaflow_extensions-1.1.130.dist-info → ob_metaflow_extensions-1.5.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
from typing import Dict, Optional
|
|
4
|
+
|
|
5
|
+
from .exceptions import S3ProxyConfigException, S3ProxyApiException
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class S3ProxyConfigResponse:
|
|
9
|
+
def __init__(self, data: Dict):
|
|
10
|
+
self.bucket_name = data.get("bucket_name")
|
|
11
|
+
self.endpoint_url = data.get("endpoint_url")
|
|
12
|
+
self.access_key_id = data.get("access_key_id")
|
|
13
|
+
self.secret_access_key = data.get("secret_access_key")
|
|
14
|
+
self.region = data.get("region")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class S3ProxyApiClient:
|
|
18
|
+
def __init__(self):
|
|
19
|
+
self.perimeter, self.integrations_url = self._get_api_configs()
|
|
20
|
+
|
|
21
|
+
def _get_api_configs(self):
|
|
22
|
+
from metaflow_extensions.outerbounds.remote_config import init_config
|
|
23
|
+
from os import environ
|
|
24
|
+
|
|
25
|
+
conf = init_config()
|
|
26
|
+
perimeter = conf.get("OBP_PERIMETER") or environ.get("OBP_PERIMETER", "")
|
|
27
|
+
integrations_url = conf.get("OBP_INTEGRATIONS_URL") or environ.get(
|
|
28
|
+
"OBP_INTEGRATIONS_URL", ""
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
if not perimeter:
|
|
32
|
+
raise S3ProxyConfigException(
|
|
33
|
+
"No perimeter set. Please run `outerbounds configure` command."
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
if not integrations_url:
|
|
37
|
+
raise S3ProxyConfigException(
|
|
38
|
+
"No integrations URL set. Please contact your Outerbounds support team."
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
return perimeter, integrations_url
|
|
42
|
+
|
|
43
|
+
def fetch_s3_proxy_config(
|
|
44
|
+
self, integration_name: Optional[str] = None
|
|
45
|
+
) -> S3ProxyConfigResponse:
|
|
46
|
+
url = f"{self.integrations_url}/s3proxy"
|
|
47
|
+
|
|
48
|
+
payload = {"perimeter_name": self.perimeter}
|
|
49
|
+
if integration_name:
|
|
50
|
+
payload["integration_name"] = integration_name
|
|
51
|
+
|
|
52
|
+
headers = {"Content-Type": "application/json"}
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
from metaflow.metaflow_config import SERVICE_HEADERS
|
|
56
|
+
|
|
57
|
+
headers.update(SERVICE_HEADERS or {})
|
|
58
|
+
except ImportError:
|
|
59
|
+
pass
|
|
60
|
+
|
|
61
|
+
response = self._make_request(url, headers, payload)
|
|
62
|
+
return S3ProxyConfigResponse(response)
|
|
63
|
+
|
|
64
|
+
def _make_request(self, url: str, headers: Dict, payload: Dict) -> Dict:
|
|
65
|
+
from metaflow_extensions.outerbounds.plugins.secrets.secrets import (
|
|
66
|
+
_api_server_get,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
retryable_status_codes = [409]
|
|
70
|
+
json_payload = json.dumps(payload)
|
|
71
|
+
|
|
72
|
+
for attempt in range(3):
|
|
73
|
+
response = _api_server_get(
|
|
74
|
+
url, data=json_payload, headers=headers, conn_error_retries=5
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
if response.status_code not in retryable_status_codes:
|
|
78
|
+
break
|
|
79
|
+
|
|
80
|
+
if attempt < 2:
|
|
81
|
+
time.sleep(0.5 * (attempt + 1))
|
|
82
|
+
|
|
83
|
+
if response.status_code != 200:
|
|
84
|
+
error_msg = f"API request failed with status {response.status_code}"
|
|
85
|
+
try:
|
|
86
|
+
error_data = response.json()
|
|
87
|
+
if "message" in error_data:
|
|
88
|
+
error_msg = error_data["message"]
|
|
89
|
+
except:
|
|
90
|
+
pass
|
|
91
|
+
raise S3ProxyApiException(error_msg)
|
|
92
|
+
|
|
93
|
+
return response.json()
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import functools
|
|
3
|
+
import json
|
|
4
|
+
import signal
|
|
5
|
+
import time
|
|
6
|
+
from typing import Dict, List, Optional, Tuple
|
|
7
|
+
|
|
8
|
+
from metaflow import current
|
|
9
|
+
from metaflow.decorators import StepDecorator
|
|
10
|
+
from .exceptions import S3ProxyException
|
|
11
|
+
from .constants import S3_PROXY_WRITE_MODES
|
|
12
|
+
from collections import namedtuple
|
|
13
|
+
|
|
14
|
+
S3ProxyBinaryConfig = namedtuple(
|
|
15
|
+
"S3ProxyBinaryConfig", ["integration_name", "write_mode", "debug"]
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def monkey_patch_environment(
|
|
20
|
+
environment, step_name_and_deco_attrs: Dict[str, S3ProxyBinaryConfig]
|
|
21
|
+
):
|
|
22
|
+
wrapping_func = environment.bootstrap_commands
|
|
23
|
+
|
|
24
|
+
@functools.wraps(wrapping_func)
|
|
25
|
+
def wrapper(step_name, ds_type, *args):
|
|
26
|
+
base_boostrap_cmd = wrapping_func(step_name, ds_type, *args)
|
|
27
|
+
additional_cmd = []
|
|
28
|
+
|
|
29
|
+
if step_name in step_name_and_deco_attrs:
|
|
30
|
+
integration_name = step_name_and_deco_attrs[step_name].integration_name
|
|
31
|
+
write_mode = step_name_and_deco_attrs[step_name].write_mode
|
|
32
|
+
debug = step_name_and_deco_attrs[step_name].debug
|
|
33
|
+
additional_cmd = [
|
|
34
|
+
"echo 'Setting up the S3 proxy.'",
|
|
35
|
+
f"python -m metaflow_extensions.outerbounds.plugins.s3_proxy.proxy_bootstrap bootstrap --integration-name {integration_name} --write-mode {write_mode} --debug {debug} --uc-proxy-cfg-write-path ./.uc_proxy_cfg_file --proxy-status-write-path ./.proxy_status_file",
|
|
36
|
+
"export METAFLOW_S3_PROXY_USER_CODE_CONFIG=$(cat ./.uc_proxy_cfg_file)",
|
|
37
|
+
"export METAFLOW_S3_PROXY_STATUS=$(cat ./.proxy_status_file)",
|
|
38
|
+
"export METAFLOW_S3_PROXY_SETUP_SUCCESS=True",
|
|
39
|
+
"flush_mflogs",
|
|
40
|
+
]
|
|
41
|
+
return base_boostrap_cmd + additional_cmd
|
|
42
|
+
|
|
43
|
+
environment.bootstrap_commands = wrapper
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class S3ProxyDecorator(StepDecorator):
|
|
47
|
+
"""
|
|
48
|
+
Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
|
|
49
|
+
for S3 read and write requests.
|
|
50
|
+
|
|
51
|
+
This decorator requires an integration in the Outerbounds platform that
|
|
52
|
+
points to an external bucket. It affects S3 operations performed via
|
|
53
|
+
Metaflow's `get_aws_client` and `S3` within a `@step`.
|
|
54
|
+
|
|
55
|
+
Read operations
|
|
56
|
+
---------------
|
|
57
|
+
All read operations pass through the proxy. If an object does not already
|
|
58
|
+
exist in the external bucket, it is cached there. For example, if code reads
|
|
59
|
+
from buckets `FOO` and `BAR` using the `S3` interface, objects from both
|
|
60
|
+
buckets are cached in the external bucket.
|
|
61
|
+
|
|
62
|
+
During task execution, all S3‑related read requests are routed through the
|
|
63
|
+
proxy:
|
|
64
|
+
- If the object is present in the external object store, the proxy
|
|
65
|
+
streams it directly from there without accessing the requested origin
|
|
66
|
+
bucket.
|
|
67
|
+
- If the object is not present in the external storage, the proxy
|
|
68
|
+
fetches it from the requested bucket, caches it in the external
|
|
69
|
+
storage, and streams the response from the origin bucket.
|
|
70
|
+
|
|
71
|
+
Warning
|
|
72
|
+
-------
|
|
73
|
+
All READ operations (e.g., GetObject, HeadObject) pass through the external
|
|
74
|
+
bucket regardless of the bucket specified in user code. Even
|
|
75
|
+
`S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
|
|
76
|
+
external bucket cache.
|
|
77
|
+
|
|
78
|
+
Write operations
|
|
79
|
+
----------------
|
|
80
|
+
Write behavior is controlled by the `write_mode` parameter, which determines
|
|
81
|
+
whether writes also persist objects in the cache.
|
|
82
|
+
|
|
83
|
+
`write_mode` values:
|
|
84
|
+
- `origin-and-cache`: objects are written both to the cache and to their
|
|
85
|
+
intended origin bucket.
|
|
86
|
+
- `origin`: objects are written only to their intended origin bucket.
|
|
87
|
+
|
|
88
|
+
Parameters
|
|
89
|
+
----------
|
|
90
|
+
integration_name : str, optional
|
|
91
|
+
[Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
|
|
92
|
+
that holds the configuration for the external, S3‑compatible object
|
|
93
|
+
storage bucket. If not specified, the only available S3 proxy
|
|
94
|
+
integration in the namespace is used (fails if multiple exist).
|
|
95
|
+
write_mode : str, optional
|
|
96
|
+
Controls whether writes also go to the external bucket.
|
|
97
|
+
- `origin` (default)
|
|
98
|
+
- `origin-and-cache`
|
|
99
|
+
debug : bool, optional
|
|
100
|
+
Enables debug logging for proxy operations.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
name = "s3_proxy"
|
|
104
|
+
defaults = {
|
|
105
|
+
"integration_name": None,
|
|
106
|
+
"write_mode": None,
|
|
107
|
+
"debug": False,
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
_environment_patched = False
|
|
111
|
+
|
|
112
|
+
_proxy_status = None
|
|
113
|
+
|
|
114
|
+
@classmethod
|
|
115
|
+
def patch_environment(cls, flow, environment):
|
|
116
|
+
"""
|
|
117
|
+
We need to patch the environment boostrap command so that
|
|
118
|
+
we can launch the s3 proxy before the step code execution.
|
|
119
|
+
We also want to ensure that we are running the proxy bootstrap
|
|
120
|
+
only for the steps that have the decorator set. This is why we pass down all
|
|
121
|
+
the step names that will change the boostrap commands.
|
|
122
|
+
"""
|
|
123
|
+
if cls._environment_patched:
|
|
124
|
+
return
|
|
125
|
+
|
|
126
|
+
steps_with_s3_proxy = [
|
|
127
|
+
step
|
|
128
|
+
for step in flow
|
|
129
|
+
if any(deco.name == "s3_proxy" for deco in step.decorators)
|
|
130
|
+
]
|
|
131
|
+
if len(steps_with_s3_proxy) == 0: # weird but y not?
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
step_names_and_deco_attrs = {}
|
|
135
|
+
for s in steps_with_s3_proxy:
|
|
136
|
+
_decos = [x for x in s.decorators if x.name == "s3_proxy"]
|
|
137
|
+
deco = _decos[0]
|
|
138
|
+
step_names_and_deco_attrs[s.name] = S3ProxyBinaryConfig(
|
|
139
|
+
integration_name=deco.attributes["integration_name"],
|
|
140
|
+
write_mode=deco.attributes["write_mode"],
|
|
141
|
+
debug=deco.attributes["debug"],
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
monkey_patch_environment(environment, step_names_and_deco_attrs)
|
|
145
|
+
cls._environment_patched = True
|
|
146
|
+
|
|
147
|
+
def step_init(self, flow, graph, step, decos, environment, flow_datastore, logger):
|
|
148
|
+
write_mode = self.attributes["write_mode"]
|
|
149
|
+
if write_mode and write_mode not in S3_PROXY_WRITE_MODES:
|
|
150
|
+
raise S3ProxyException(
|
|
151
|
+
f"unexpected write_mode specified: {write_mode}. Allowed values are: {','.join(S3_PROXY_WRITE_MODES)}."
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
self.patch_environment(flow, environment)
|
|
155
|
+
if (
|
|
156
|
+
os.environ.get("METAFLOW_S3_PROXY_USER_CODE_CONFIG")
|
|
157
|
+
and os.environ.get("METAFLOW_S3_PROXY_STATUS")
|
|
158
|
+
and self.attributes["debug"]
|
|
159
|
+
):
|
|
160
|
+
print("[@s3_proxy] S3 Proxy detected. Debug mode is enabled.")
|
|
161
|
+
|
|
162
|
+
if os.environ.get("METAFLOW_S3_PROXY_STATUS"):
|
|
163
|
+
proxy_status = json.loads(os.environ.get("METAFLOW_S3_PROXY_STATUS"))
|
|
164
|
+
self._proxy_status = proxy_status
|
|
165
|
+
|
|
166
|
+
def task_pre_step(
|
|
167
|
+
self,
|
|
168
|
+
step_name,
|
|
169
|
+
task_datastore,
|
|
170
|
+
metadata,
|
|
171
|
+
run_id,
|
|
172
|
+
task_id,
|
|
173
|
+
flow,
|
|
174
|
+
graph,
|
|
175
|
+
retry_count,
|
|
176
|
+
max_user_code_retries,
|
|
177
|
+
ubf_context,
|
|
178
|
+
inputs,
|
|
179
|
+
):
|
|
180
|
+
"""Setup S3 proxy before step execution"""
|
|
181
|
+
pass
|
|
182
|
+
|
|
183
|
+
def task_finished(
|
|
184
|
+
self, step_name, flow, graph, is_task_ok, retry_count, max_retries
|
|
185
|
+
):
|
|
186
|
+
if not self._proxy_status:
|
|
187
|
+
return
|
|
188
|
+
|
|
189
|
+
status = self._proxy_status
|
|
190
|
+
proxy_pid = status.get("proxy_pid")
|
|
191
|
+
config_path = status.get("config_path")
|
|
192
|
+
binary_path = status.get("binary_path")
|
|
193
|
+
|
|
194
|
+
# 1) Stop processes: try to terminate the process group for clean child shutdown
|
|
195
|
+
if proxy_pid:
|
|
196
|
+
try:
|
|
197
|
+
pgid = os.getpgid(proxy_pid)
|
|
198
|
+
os.killpg(pgid, signal.SIGTERM)
|
|
199
|
+
time.sleep(1)
|
|
200
|
+
except Exception:
|
|
201
|
+
# Fall back to killing the pid directly if pgid is unavailable
|
|
202
|
+
try:
|
|
203
|
+
os.kill(proxy_pid, signal.SIGTERM)
|
|
204
|
+
except Exception:
|
|
205
|
+
pass
|
|
206
|
+
|
|
207
|
+
# 2) Clear files based on status
|
|
208
|
+
for path in (config_path, binary_path):
|
|
209
|
+
try:
|
|
210
|
+
if path and os.path.exists(path):
|
|
211
|
+
os.remove(path)
|
|
212
|
+
except Exception:
|
|
213
|
+
pass
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
class NebiusS3ProxyDecorator(S3ProxyDecorator):
|
|
217
|
+
|
|
218
|
+
__doc__ = (
|
|
219
|
+
"""
|
|
220
|
+
`@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
221
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
222
|
+
a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:\n
|
|
223
|
+
"""
|
|
224
|
+
+ S3ProxyDecorator.__doc__
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
name = "nebius_s3_proxy"
|
|
228
|
+
defaults = {
|
|
229
|
+
"integration_name": None,
|
|
230
|
+
"write_mode": None,
|
|
231
|
+
"debug": False,
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class CoreWeaveS3ProxyDecorator(S3ProxyDecorator):
|
|
236
|
+
__doc__ = (
|
|
237
|
+
"""
|
|
238
|
+
`@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
239
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
240
|
+
a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:\n
|
|
241
|
+
"""
|
|
242
|
+
+ S3ProxyDecorator.__doc__
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
name = "coreweave_s3_proxy"
|
|
246
|
+
defaults = {
|
|
247
|
+
"integration_name": None,
|
|
248
|
+
"write_mode": None,
|
|
249
|
+
"debug": False,
|
|
250
|
+
}
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import gzip
|
|
4
|
+
import sys
|
|
5
|
+
import time
|
|
6
|
+
import threading
|
|
7
|
+
import subprocess
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Optional, Tuple
|
|
10
|
+
|
|
11
|
+
import requests
|
|
12
|
+
|
|
13
|
+
from .constants import (
|
|
14
|
+
S3_PROXY_BINARY_URLS,
|
|
15
|
+
DEFAULT_PROXY_PORT,
|
|
16
|
+
DEFAULT_PROXY_HOST,
|
|
17
|
+
)
|
|
18
|
+
from metaflow.metaflow_config import AWS_SECRETS_MANAGER_DEFAULT_REGION
|
|
19
|
+
from .s3_proxy_api import S3ProxyApiClient
|
|
20
|
+
from .exceptions import S3ProxyException
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class S3ProxyManager:
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
integration_name: Optional[str] = None,
|
|
27
|
+
write_mode: Optional[str] = None,
|
|
28
|
+
debug: bool = False,
|
|
29
|
+
):
|
|
30
|
+
self.integration_name = integration_name
|
|
31
|
+
self.write_mode = write_mode
|
|
32
|
+
self.debug = debug
|
|
33
|
+
self.process = None
|
|
34
|
+
self.binary_path = None
|
|
35
|
+
self.config_path = None
|
|
36
|
+
self.api_client = S3ProxyApiClient()
|
|
37
|
+
self.proxy_config = None
|
|
38
|
+
|
|
39
|
+
def setup_proxy(self) -> Tuple[dict, int, str, str]:
|
|
40
|
+
try:
|
|
41
|
+
if self._is_running_in_kubernetes():
|
|
42
|
+
config_data = self.api_client.fetch_s3_proxy_config(
|
|
43
|
+
self.integration_name
|
|
44
|
+
)
|
|
45
|
+
self.binary_path = self._download_binary()
|
|
46
|
+
self.config_path = self._write_config_file(config_data)
|
|
47
|
+
# In the new world where the binary is being called
|
|
48
|
+
# before even the metaflow code exection starts,
|
|
49
|
+
# so this implies a few important things:
|
|
50
|
+
# 1, We start the actual proxy process via another python file that safely ships logs to mflog.
|
|
51
|
+
# 2. We passback the right values to the metaflow step process via env vars.
|
|
52
|
+
# 3. Metaflow step code relies on env vars to decide if clients need to have s3 proxy in them.
|
|
53
|
+
self.process = self._start_proxy_process()
|
|
54
|
+
|
|
55
|
+
user_code_proxy_config = self._setup_proxy_config(config_data)
|
|
56
|
+
|
|
57
|
+
return_tuple = (
|
|
58
|
+
user_code_proxy_config, # this is the config that will be used within the metaflow `step` code.
|
|
59
|
+
self.process.pid, # This is the pid of the process that will jumpstart, monitor and ship logs to MFLOG for the proxy process
|
|
60
|
+
self.config_path, # This is the path to the config that is derived from the integration. It contains the actual bucket path and name where external objects are stored.
|
|
61
|
+
self.binary_path, # This is the path to the binary for the proxy.
|
|
62
|
+
)
|
|
63
|
+
# We return a tuple because these values need to be passed down to the metaflow step process where
|
|
64
|
+
# it will handle thier removal gracefully after the step is finished.
|
|
65
|
+
return return_tuple
|
|
66
|
+
|
|
67
|
+
print(
|
|
68
|
+
"[@s3_proxy] skipping s3-proxy set up because metaflow has not detected a Kubernetes environment"
|
|
69
|
+
)
|
|
70
|
+
raise S3ProxyException(
|
|
71
|
+
"S3 proxy setup failed because metaflow has not detected a Kubernetes environment"
|
|
72
|
+
)
|
|
73
|
+
except Exception as e:
|
|
74
|
+
if self.debug:
|
|
75
|
+
print(f"[@s3_proxy] Setup failed: {e}")
|
|
76
|
+
self.cleanup()
|
|
77
|
+
raise
|
|
78
|
+
|
|
79
|
+
def _is_running_in_kubernetes(self) -> bool:
|
|
80
|
+
"""Check if running inside a Kubernetes pod by checking for Kubernetes service account token."""
|
|
81
|
+
return (
|
|
82
|
+
os.path.exists("/var/run/secrets/kubernetes.io/serviceaccount/token")
|
|
83
|
+
and os.environ.get("KUBERNETES_SERVICE_HOST") is not None
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
def _download_binary(self) -> str:
|
|
87
|
+
binary_path = Path("/tmp/s3-proxy")
|
|
88
|
+
if binary_path.exists():
|
|
89
|
+
if self.debug:
|
|
90
|
+
print("[@s3_proxy] Binary already exists, skipping download")
|
|
91
|
+
return str(binary_path.absolute())
|
|
92
|
+
|
|
93
|
+
try:
|
|
94
|
+
if self.debug:
|
|
95
|
+
print("[@s3_proxy] Downloading binary...")
|
|
96
|
+
|
|
97
|
+
from platform import machine
|
|
98
|
+
|
|
99
|
+
arch = machine()
|
|
100
|
+
if arch not in S3_PROXY_BINARY_URLS:
|
|
101
|
+
raise S3ProxyException(
|
|
102
|
+
f"unsupported platform architecture: {arch}. Please reach out to your Outerbounds Support team for more help."
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
response = requests.get(S3_PROXY_BINARY_URLS[arch], stream=True, timeout=60)
|
|
106
|
+
response.raise_for_status()
|
|
107
|
+
|
|
108
|
+
with open(binary_path, "wb") as f:
|
|
109
|
+
with gzip.GzipFile(fileobj=response.raw) as gz:
|
|
110
|
+
f.write(gz.read())
|
|
111
|
+
|
|
112
|
+
binary_path.chmod(0o755)
|
|
113
|
+
|
|
114
|
+
if self.debug:
|
|
115
|
+
print("[@s3_proxy] Binary downloaded successfully")
|
|
116
|
+
|
|
117
|
+
return str(binary_path.absolute())
|
|
118
|
+
|
|
119
|
+
except Exception as e:
|
|
120
|
+
if self.debug:
|
|
121
|
+
print(f"[@s3_proxy] Binary download failed: {e}")
|
|
122
|
+
raise S3ProxyException(f"Failed to download S3 proxy binary: {e}")
|
|
123
|
+
|
|
124
|
+
def _write_config_file(self, config_data) -> str:
|
|
125
|
+
config_path = Path("/tmp/s3-proxy-config.json")
|
|
126
|
+
|
|
127
|
+
proxy_config = {
|
|
128
|
+
"bucketName": config_data.bucket_name,
|
|
129
|
+
"endpointUrl": config_data.endpoint_url,
|
|
130
|
+
"accessKeyId": config_data.access_key_id,
|
|
131
|
+
"accessKeySecret": config_data.secret_access_key,
|
|
132
|
+
"region": config_data.region,
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
config_path.write_text(json.dumps(proxy_config, indent=2))
|
|
136
|
+
|
|
137
|
+
if self.debug:
|
|
138
|
+
print(f"[@s3_proxy] Config written to {config_path}")
|
|
139
|
+
|
|
140
|
+
return str(config_path.absolute())
|
|
141
|
+
|
|
142
|
+
def _start_proxy_process(self) -> subprocess.Popen:
|
|
143
|
+
# This command will jump start a process that will then call the proxy binary
|
|
144
|
+
# The reason we do something like this is because we need to run all of this before
|
|
145
|
+
# even the `step` command is called. So we need a python process that will ship the logs
|
|
146
|
+
# of the proxy process to MFLOG instead of setting print statements. We need this process
|
|
147
|
+
# to run independently since the S3ProxyManager gets called in the boostrap_proxy which will
|
|
148
|
+
# exit after jump starting the proxy process.
|
|
149
|
+
cmd = [self.binary_path, "--bucket-config", self.config_path, "serve"]
|
|
150
|
+
_env = os.environ.copy()
|
|
151
|
+
_env["S3_PROXY_BINARY_COMMAND"] = " ".join(cmd)
|
|
152
|
+
if self.debug:
|
|
153
|
+
_env["S3_PROXY_BINARY_DEBUG"] = "True"
|
|
154
|
+
_cmd = [
|
|
155
|
+
sys.executable,
|
|
156
|
+
"-m",
|
|
157
|
+
"metaflow_extensions.outerbounds.plugins.s3_proxy.binary_caller",
|
|
158
|
+
]
|
|
159
|
+
devnull = subprocess.DEVNULL
|
|
160
|
+
process = subprocess.Popen(
|
|
161
|
+
_cmd,
|
|
162
|
+
stdout=devnull,
|
|
163
|
+
stderr=devnull,
|
|
164
|
+
text=True,
|
|
165
|
+
start_new_session=True,
|
|
166
|
+
env=_env,
|
|
167
|
+
)
|
|
168
|
+
time.sleep(3)
|
|
169
|
+
|
|
170
|
+
if process.poll() is None:
|
|
171
|
+
if self.debug:
|
|
172
|
+
print(f"[@s3_proxy] Proxy started successfully (pid: {process.pid})")
|
|
173
|
+
|
|
174
|
+
return process
|
|
175
|
+
else:
|
|
176
|
+
stdout_data, stderr_data = process.communicate()
|
|
177
|
+
if self.debug:
|
|
178
|
+
print(f"[@s3_proxy] Proxy failed to start - output: {stdout_data}")
|
|
179
|
+
raise S3ProxyException(f"S3 proxy failed to start: {stdout_data}")
|
|
180
|
+
|
|
181
|
+
def _setup_proxy_config(self, config_data):
|
|
182
|
+
from metaflow.metaflow_config import AWS_SECRETS_MANAGER_DEFAULT_REGION
|
|
183
|
+
|
|
184
|
+
region = os.environ.get(
|
|
185
|
+
"METAFLOW_AWS_SECRETS_MANAGER_DEFAULT_REGION",
|
|
186
|
+
AWS_SECRETS_MANAGER_DEFAULT_REGION,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
proxy_config = {
|
|
190
|
+
"endpoint_url": f"http://{DEFAULT_PROXY_HOST}:{DEFAULT_PROXY_PORT}",
|
|
191
|
+
"region": region,
|
|
192
|
+
"bucket_name": config_data.bucket_name,
|
|
193
|
+
"active": True,
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
if self.write_mode:
|
|
197
|
+
proxy_config["write_mode"] = self.write_mode
|
|
198
|
+
|
|
199
|
+
self.proxy_config = proxy_config
|
|
200
|
+
return proxy_config
|
|
201
|
+
|
|
202
|
+
def cleanup(self):
|
|
203
|
+
try:
|
|
204
|
+
from metaflow_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import (
|
|
205
|
+
clear_s3_proxy_config,
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
clear_s3_proxy_config()
|
|
209
|
+
|
|
210
|
+
if self.process and self.process.poll() is None:
|
|
211
|
+
self.process.terminate()
|
|
212
|
+
self.process.wait(timeout=5)
|
|
213
|
+
if self.debug:
|
|
214
|
+
print("[@s3_proxy] Proxy process stopped")
|
|
215
|
+
|
|
216
|
+
from os import remove
|
|
217
|
+
|
|
218
|
+
remove(self.config_path)
|
|
219
|
+
remove(self.binary_path)
|
|
220
|
+
|
|
221
|
+
except Exception as e:
|
|
222
|
+
if self.debug:
|
|
223
|
+
print(f"[@s3_proxy] Cleanup error: {e}")
|
|
224
|
+
finally:
|
|
225
|
+
self.proxy_config = None
|
|
@@ -4,13 +4,48 @@ from typing import Dict
|
|
|
4
4
|
import base64
|
|
5
5
|
import json
|
|
6
6
|
import requests
|
|
7
|
+
import random
|
|
7
8
|
import time
|
|
9
|
+
import sys
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
class OuterboundsSecretsException(Exception):
|
|
11
13
|
pass
|
|
12
14
|
|
|
13
15
|
|
|
16
|
+
def _api_server_get(*args, conn_error_retries=2, **kwargs):
|
|
17
|
+
"""
|
|
18
|
+
There are two categories of errors that we need to handle when dealing with any API server.
|
|
19
|
+
1. HTTP errors. These are are errors that are returned from the API server.
|
|
20
|
+
- How to handle retries for this case will be application specific.
|
|
21
|
+
2. Errors when the API server may not be reachable (DNS resolution / network issues)
|
|
22
|
+
- In this scenario, we know that something external to the API server is going wrong causing the issue.
|
|
23
|
+
- Failing pre-maturely in the case might not be the best course of action since critical user jobs might crash on intermittent issues.
|
|
24
|
+
- So in this case, we can just planely retry the request.
|
|
25
|
+
|
|
26
|
+
This function handles the second case. It's a simple wrapper to handle the retry logic for connection errors.
|
|
27
|
+
If this function is provided a `conn_error_retries` of 5, then the last retry will have waited 32 seconds.
|
|
28
|
+
Generally this is a safe enough number of retries after which we can assume that something is really broken. Until then,
|
|
29
|
+
there can be intermittent issues that would resolve themselves if we retry gracefully.
|
|
30
|
+
"""
|
|
31
|
+
_num_retries = 0
|
|
32
|
+
noise = random.uniform(-0.5, 0.5)
|
|
33
|
+
while _num_retries < conn_error_retries:
|
|
34
|
+
try:
|
|
35
|
+
return requests.get(*args, **kwargs)
|
|
36
|
+
except requests.exceptions.ConnectionError:
|
|
37
|
+
if _num_retries <= conn_error_retries - 1:
|
|
38
|
+
# Exponential backoff with 2^(_num_retries+1) seconds
|
|
39
|
+
time.sleep((2 ** (_num_retries + 1)) + noise)
|
|
40
|
+
_num_retries += 1
|
|
41
|
+
else:
|
|
42
|
+
print(
|
|
43
|
+
"[@secrets] Failed to connect to the API server. ",
|
|
44
|
+
file=sys.stderr,
|
|
45
|
+
)
|
|
46
|
+
raise
|
|
47
|
+
|
|
48
|
+
|
|
14
49
|
class OuterboundsSecretsApiResponse:
|
|
15
50
|
def __init__(self, response):
|
|
16
51
|
self.response = response
|
|
@@ -133,7 +168,9 @@ class OuterboundsSecretsProvider(SecretsProvider):
|
|
|
133
168
|
retryable_status_codes = [409]
|
|
134
169
|
json_payload = json.dumps(payload)
|
|
135
170
|
for attempt in range(2): # 0 = initial attempt, 1-2 = retries
|
|
136
|
-
response =
|
|
171
|
+
response = _api_server_get(
|
|
172
|
+
url, data=json_payload, headers=request_headers, conn_error_retries=5
|
|
173
|
+
)
|
|
137
174
|
if response.status_code not in retryable_status_codes:
|
|
138
175
|
break
|
|
139
176
|
|
|
@@ -141,7 +178,6 @@ class OuterboundsSecretsProvider(SecretsProvider):
|
|
|
141
178
|
sleep_time = 0.5 * (attempt + 1)
|
|
142
179
|
time.sleep(sleep_time)
|
|
143
180
|
|
|
144
|
-
response = requests.get(url, data=json_payload, headers=request_headers)
|
|
145
181
|
self._handle_error_response(response)
|
|
146
182
|
return OuterboundsSecretsApiResponse(response.json())
|
|
147
183
|
|