outerbounds 0.3.180rc5__py3-none-any.whl → 0.3.182rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -119,7 +119,7 @@ class _capsuleDeployerStateMachine:
119
119
  return self.to_dot()
120
120
 
121
121
  def to_diagraph(self):
122
- from graphviz import Digraph
122
+ from graphviz import Digraph # type: ignore
123
123
 
124
124
  # Create a new Digraph
125
125
  dot = Digraph(name="StateMachine", format="png")
@@ -164,13 +164,23 @@ class CurrentWorkerInfo(TypedDict):
164
164
 
165
165
  class DEPLOYMENT_READY_CONDITIONS:
166
166
  """
167
- Deployment ready conditions define what is considered a successful completion of a deployment.
167
+ Deployment ready conditions define what is considered a successful completion of the current deployment instance.
168
168
  This allows users or platform designers to configure the criteria for deployment readiness.
169
169
 
170
- Reasons for different deployment modes include:
171
- 1) [at_least_one_running] Some endpoints may be deployed ephemerally and are considered ready when at least one instance is running; additional instances are for load management.
172
- 2) [all_running] Operators may require that all replicas are available, running, and only the current deployment version is serving traffic.
173
- 3) [fully_finished] Operators may only care that the minimum number of replicas for the current rollout are present in the cluster.
170
+ Why do we need deployment readiness conditions?
171
+ - Deployments might be taking place from a CI/CD esq environment, In these setups, the downstream build triggers might be depending on a specific criteria for deployment completion. Having readiness conditions allows the CI/CD systems to get a signal of when the deployment is ready.
172
+ - Users might be calling the deployment API under different conditions:
173
+ - Some users might want a cluster of workers ready before serving traffic while others might want just one worker ready to start serving traffic.
174
+
175
+ Some readiness conditions include:
176
+ 1) [at_least_one_running] Atleast min(min_replicas, 1) workers of the current deployment instance's version have started running.
177
+ - Usecase: Some endpoints may be deployed ephemerally and are considered ready when at least one instance is running; additional instances are for load management.
178
+ 2) [all_running] Atleast min_replicas number of workers are running for the deployment to be considered ready.
179
+ - Usecase: Operators may require that all replicas are available before traffic is routed. Needed when inference endpoints maybe under some SLA or require a larger load
180
+ 3) [fully_finished] Atleast min_replicas number of workers are running for the deployment and there are no pending or crashlooping workers from previous versions lying around.
181
+ - Usecase: Ensuring endpoint is fully available and no other versions are running.
182
+ 4) [async] The deployment will be assumed ready when the `upgradeInProgress` flag changes from `True` to `False`.
183
+ - Usecase: Operators may only care that the URL is minted for the deployment and the deployment might also be setup with 0 min workers.
174
184
  """
175
185
 
176
186
  # `ATLEAST_ONE_RUNNING` implies that atleast one worker of the current deployment instance's version has started running.
@@ -183,6 +193,9 @@ class DEPLOYMENT_READY_CONDITIONS:
183
193
  # `FULLY_FINISHED` implies that the deployment has the minimum number of replicas and all the workers are related to the current deployment instance's version.
184
194
  FULLY_FINISHED = "fully_finished"
185
195
 
196
+ # `ASYNC` implies that the deployment will be assumed ready after the URL is minted and the worker statuses are not checked.
197
+ ASYNC = "async"
198
+
186
199
  @classmethod
187
200
  def docstring(cls):
188
201
  return cls.__doc__
@@ -304,9 +317,11 @@ def _capsule_worker_semantic_status(
304
317
  for w in _workers:
305
318
  if w.get("phase") != phase:
306
319
  continue
307
- if w.get("version") not in xx:
308
- xx[w.get("version")] = []
309
- xx[w.get("version")].append(w)
320
+ worker_version = w.get("version")
321
+ if worker_version is not None:
322
+ if worker_version not in xx:
323
+ xx[worker_version] = []
324
+ xx[worker_version].append(w)
310
325
  return xx
311
326
 
312
327
  pending_workers = _make_version_dict(workers, "Pending")
@@ -20,7 +20,7 @@ from typing import Dict, List, Any, Optional, Union
20
20
  # end up creating issues with click in general. So we need to figure a
21
21
  # way to figure the right import of click dynamically. a neat way to handle that would be
22
22
  # to have a function that can import the correct click based on the context in which stuff is being loaded.
23
- from metaflow._vendor import click
23
+ from .click_importer import click
24
24
  from outerbounds._vendor import yaml
25
25
  from outerbounds.utils import metaflowconfig
26
26
  from .app_config import (
@@ -33,9 +33,6 @@ from .app_config import (
33
33
  from .perimeters import PerimeterExtractor
34
34
  from .cli_to_config import build_config_from_options
35
35
  from .utils import (
36
- CommaSeparatedListType,
37
- KVPairType,
38
- KVDictType,
39
36
  MultiStepSpinner,
40
37
  )
41
38
  from . import experimental
@@ -56,6 +53,171 @@ import uuid
56
53
  from datetime import datetime
57
54
 
58
55
 
56
+ class KeyValueDictPair(click.ParamType):
57
+ name = "KV-DICT-PAIR" # type: ignore
58
+
59
+ def convert(self, value, param, ctx):
60
+ # Parse a string of the form KEY=VALUE into a dict {KEY: VALUE}
61
+ if len(value.split("=", 1)) != 2:
62
+ self.fail(
63
+ f"Invalid format for {value}. Expected format: KEY=VALUE", param, ctx
64
+ )
65
+
66
+ key, _value = value.split("=", 1)
67
+ try:
68
+ return {"key": key, "value": json.loads(_value)}
69
+ except json.JSONDecodeError:
70
+ return {"key": key, "value": _value}
71
+ except Exception as e:
72
+ self.fail(f"Invalid value for {value}. Error: {e}", param, ctx)
73
+
74
+ def __str__(self):
75
+ return repr(self)
76
+
77
+ def __repr__(self):
78
+ return "KV-PAIR"
79
+
80
+
81
+ class KeyValuePair(click.ParamType):
82
+ name = "KV-PAIR" # type: ignore
83
+
84
+ def convert(self, value, param, ctx):
85
+ # Parse a string of the form KEY=VALUE into a dict {KEY: VALUE}
86
+ if len(value.split("=", 1)) != 2:
87
+ self.fail(
88
+ f"Invalid format for {value}. Expected format: KEY=VALUE", param, ctx
89
+ )
90
+
91
+ key, _value = value.split("=", 1)
92
+ try:
93
+ return {key: json.loads(_value)}
94
+ except json.JSONDecodeError:
95
+ return {key: _value}
96
+ except Exception as e:
97
+ self.fail(f"Invalid value for {value}. Error: {e}", param, ctx)
98
+
99
+ def __str__(self):
100
+ return repr(self)
101
+
102
+ def __repr__(self):
103
+ return "KV-PAIR"
104
+
105
+
106
+ class MountMetaflowArtifact(click.ParamType):
107
+ name = "MOUNT-METAFLOW-ARTIFACT" # type: ignore
108
+
109
+ def convert(self, value, param, ctx):
110
+ """
111
+ Convert a string like "flow=MyFlow,artifact=my_model,path=/tmp/abc" or
112
+ "pathspec=MyFlow/123/foo/345/my_model,path=/tmp/abc" to a dict.
113
+ """
114
+ artifact_dict = {}
115
+ parts = value.split(",")
116
+
117
+ for part in parts:
118
+ if "=" not in part:
119
+ self.fail(
120
+ f"Invalid format in part '{part}'. Expected 'key=value'", param, ctx
121
+ )
122
+
123
+ key, val = part.split("=", 1)
124
+ artifact_dict[key.strip()] = val.strip()
125
+
126
+ # Validate required fields
127
+ if "pathspec" in artifact_dict:
128
+ if "path" not in artifact_dict:
129
+ self.fail(
130
+ "When using 'pathspec', you must also specify 'path'", param, ctx
131
+ )
132
+
133
+ # Return as pathspec format
134
+ return {
135
+ "pathspec": artifact_dict["pathspec"],
136
+ "path": artifact_dict["path"],
137
+ }
138
+ elif (
139
+ "flow" in artifact_dict
140
+ and "artifact" in artifact_dict
141
+ and "path" in artifact_dict
142
+ ):
143
+ # Return as flow/artifact format
144
+ result = {
145
+ "flow": artifact_dict["flow"],
146
+ "artifact": artifact_dict["artifact"],
147
+ "path": artifact_dict["path"],
148
+ }
149
+
150
+ # Add optional namespace if provided
151
+ if "namespace" in artifact_dict:
152
+ result["namespace"] = artifact_dict["namespace"]
153
+
154
+ return result
155
+ else:
156
+ self.fail(
157
+ "Invalid format. Must be either 'flow=X,artifact=Y,path=Z' or 'pathspec=X,path=Z'",
158
+ param,
159
+ ctx,
160
+ )
161
+
162
+ def __str__(self):
163
+ return repr(self)
164
+
165
+ def __repr__(self):
166
+ return "MOUNT-METAFLOW-ARTIFACT"
167
+
168
+
169
+ class MountSecret(click.ParamType):
170
+ name = "MOUNT-SECRET" # type: ignore
171
+
172
+ def convert(self, value, param, ctx):
173
+ """
174
+ Convert a string like "id=my_secret,path=/tmp/secret" to a dict.
175
+ """
176
+ secret_dict = {}
177
+ parts = value.split(",")
178
+
179
+ for part in parts:
180
+ if "=" not in part:
181
+ self.fail(
182
+ f"Invalid format in part '{part}'. Expected 'key=value'", param, ctx
183
+ )
184
+
185
+ key, val = part.split("=", 1)
186
+ secret_dict[key.strip()] = val.strip()
187
+
188
+ # Validate required fields
189
+ if "id" in secret_dict and "path" in secret_dict:
190
+ return {"id": secret_dict["id"], "path": secret_dict["path"]}
191
+ else:
192
+ self.fail("Invalid format. Must be 'key=X,path=Y'", param, ctx)
193
+
194
+ def __str__(self):
195
+ return repr(self)
196
+
197
+ def __repr__(self):
198
+ return "MOUNT-SECRET"
199
+
200
+
201
+ class CommaSeparatedList(click.ParamType):
202
+ name = "COMMA-SEPARATED-LIST" # type: ignore
203
+
204
+ def convert(self, value, param, ctx):
205
+ return value.split(",")
206
+
207
+ def __str__(self):
208
+ return repr(self)
209
+
210
+ def __repr__(self):
211
+ return "COMMA-SEPARATED-LIST"
212
+
213
+
214
+ KVPairType = KeyValuePair()
215
+ MetaflowArtifactType = MountMetaflowArtifact()
216
+ SecretMountType = MountSecret()
217
+ CommaSeparatedListType = CommaSeparatedList()
218
+ KVDictType = KeyValueDictPair()
219
+
220
+
59
221
  class ColorTheme:
60
222
  TIMESTAMP = "magenta"
61
223
  LOADING_COLOR = "cyan"
@@ -40,7 +40,7 @@ class AppConfig:
40
40
  """Initialize configuration from a dictionary."""
41
41
  self.config = config_dict or {}
42
42
  self.schema = self._load_schema()
43
- self._final_state = {}
43
+ self._final_state: Dict[str, Any] = {}
44
44
 
45
45
  def set_state(self, key, value):
46
46
  self._final_state[key] = value
@@ -4,7 +4,7 @@ import pathlib
4
4
  import requests
5
5
  import time
6
6
  import shlex
7
- from typing import Optional, List, Dict, Any, Tuple
7
+ from typing import Optional, List, Dict, Any, Tuple, Union
8
8
  from .utils import TODOException, safe_requests_wrapper, MaximumRetriesExceeded
9
9
  from .app_config import AppConfig, CAPSULE_DEBUG, AuthType
10
10
  from . import experimental
@@ -53,7 +53,7 @@ class CapsuleStateMachine:
53
53
 
54
54
  def __init__(self, capsule_id: str, current_deployment_instance_version: str):
55
55
  self._capsule_id = capsule_id
56
- self._status_trail = []
56
+ self._status_trail: List[Dict[str, Any]] = []
57
57
  self._current_deployment_instance_version = current_deployment_instance_version
58
58
 
59
59
  def get_status_trail(self):
@@ -127,7 +127,7 @@ class CapsuleWorkersStateMachine:
127
127
  self._end_state_capsule_version = end_state_capsule_version
128
128
  self._deployment_mode = deployment_mode
129
129
  self._minimum_replicas = minimum_replicas
130
- self._status_trail = []
130
+ self._status_trail: List[Dict[str, Union[float, List[WorkerStatus]]]] = []
131
131
 
132
132
  def get_status_trail(self):
133
133
  return self._status_trail
@@ -187,7 +187,7 @@ class CapsuleWorkersStateMachine:
187
187
 
188
188
  @property
189
189
  def current_status(self) -> List[WorkerStatus]:
190
- return self._status_trail[-1].get("status")
190
+ return self._status_trail[-1].get("status") # type: ignore
191
191
 
192
192
  def current_version_deployment_status(self) -> CapsuleWorkerSemanticStatus:
193
193
  return _capsule_worker_semantic_status(
@@ -663,7 +663,7 @@ class CapsuleDeployer:
663
663
  """
664
664
  if capsule_response.get("version", None) != current_deployment_instance_version:
665
665
  raise CapsuleDeploymentException(
666
- self.identifier,
666
+ self.identifier, # type: ignore
667
667
  f"A capsule upgrade was triggered outside current deployment instance. Current deployment version was discarded. Current deployment version: {current_deployment_instance_version} and new version: {capsule_response.get('version', None)}",
668
668
  )
669
669
 
@@ -0,0 +1,24 @@
1
+ """
2
+ The purpose of this file is a little bit of cleverness to allow us to use the CLI in this package across
3
+ metaflow and outerbounds projects.
4
+
5
+ The issue is that since outerbounds and metaflow both vendor click, we can't use object from one import path
6
+ and expect them to work with objects created from the other import path.
7
+
8
+ Meaning `outerbounds._vendor.click.Group` and `metaflow._vendor.click.Group` are different classes.
9
+ So we need to ensure that based on when the import is taking place, we import the correct class.
10
+
11
+ Overall, this ONLY affects constructs in click we are using to construct related to the cli decorators but
12
+ it doesn't affect any capabilities in click for logging.
13
+ """
14
+ import os
15
+
16
+ # Import Hacks
17
+ if os.environ.get("APPS_CLI_LOADING_IN_METAFLOW", None):
18
+ from metaflow._vendor import click as metaflow_click
19
+
20
+ click = metaflow_click # type: ignore
21
+ else:
22
+ from outerbounds._vendor import click as outerbounds_click
23
+
24
+ click = outerbounds_click # type: ignore
@@ -43,7 +43,7 @@ def bake_deployment_image(
43
43
  # 1. When the user has specified something like `pypi`/`conda`
44
44
  # 2, When the user has specified something like `from_requirements`/ `from_pyproject`
45
45
  # TODO: add parsers for the pyproject/requirements stuff.
46
- from metaflow.ob_internal import bake_image
46
+ from metaflow.ob_internal import bake_image # type: ignore
47
47
  from metaflow.plugins.pypi.parsers import (
48
48
  requirements_txt_parser,
49
49
  pyproject_toml_parser,
@@ -83,7 +83,7 @@ def bake_deployment_image(
83
83
  python_packages_exist = len(pypi_packages) > 0 or len(conda_packages) > 0
84
84
  if (not python_packages_exist) or app_config.get_state("skip_dependencies", False):
85
85
  # Inform the user that no dependencies are being used.
86
- if app_config.get_state("skip_dependencies", False):
86
+ if app_config.get_state("skip_dependencies", False) and logger:
87
87
  logger(
88
88
  "⏭️ Skipping baking dependencies into the image based on the --no-deps flag."
89
89
  )
@@ -1,5 +1,5 @@
1
1
  from functools import wraps
2
- from metaflow._vendor import click
2
+ from ..click_importer import click
3
3
  import os
4
4
  from typing import TYPE_CHECKING
5
5
 
@@ -78,7 +78,14 @@ def capsule_input_overrides(app_config: "AppConfig", capsule_input: dict):
78
78
 
79
79
 
80
80
  def _code_info(app_config: "AppConfig"):
81
- from metaflow.metaflow_git import get_repository_info, _call_git
81
+ from metaflow.metaflow_git import get_repository_info
82
+
83
+ try:
84
+ from metaflow.metaflow_git import _call_git # type: ignore
85
+ except ImportError:
86
+ # Fallback if _call_git is not available
87
+ def _call_git(args, path=None):
88
+ return "", 1, True
82
89
 
83
90
  repo_info = get_repository_info(app_config.get_state("packaging_directory", None))
84
91
  if len(repo_info) == 0:
@@ -88,16 +95,16 @@ def _code_info(app_config: "AppConfig"):
88
95
  ["log", "-1", "--pretty=%B"],
89
96
  path=app_config.get_state("packaging_directory", None),
90
97
  )
91
- _url = (
92
- repo_info["repo_url"]
93
- if not repo_info["repo_url"].endswith(".git")
94
- else repo_info["repo_url"].rstrip(".git")
95
- )
98
+ repo_url = repo_info["repo_url"]
99
+ if isinstance(repo_url, str):
100
+ _url = repo_url if not repo_url.endswith(".git") else repo_url.rstrip(".git")
101
+ else:
102
+ _url = str(repo_url)
96
103
  _code_info = {
97
104
  "commitId": repo_info["commit_sha"],
98
- "commitLink": os.path.join(_url, "commit", repo_info["commit_sha"]),
105
+ "commitLink": os.path.join(_url, "commit", str(repo_info["commit_sha"])),
99
106
  }
100
- if not failed and returncode == 0:
107
+ if not failed and returncode == 0 and isinstance(git_log_info, str):
101
108
  _code_info["commitMessage"] = git_log_info.strip()
102
109
 
103
110
  return _code_info
@@ -1,11 +1,13 @@
1
1
  import os
2
2
  import json
3
- from typing import Tuple
3
+ from typing import Tuple, Union
4
4
 
5
5
 
6
6
  class PerimeterExtractor:
7
7
  @classmethod
8
- def for_ob_cli(cls, config_dir: str, profile: str) -> Tuple[str, str]:
8
+ def for_ob_cli(
9
+ cls, config_dir: str, profile: str
10
+ ) -> Union[Tuple[str, str], Tuple[None, None]]:
9
11
  """
10
12
  This function will be called when we are trying to extract the perimeter
11
13
  via the ob cli's execution. We will rely on the following logic:
@@ -35,17 +37,9 @@ class PerimeterExtractor:
35
37
  config_dir, profile, "OBP_API_SERVER"
36
38
  )
37
39
 
38
- return perimeter, api_server
39
-
40
- pass
40
+ return perimeter, api_server # type: ignore
41
41
 
42
42
  @classmethod
43
- def during_metaflow_execution(cls):
44
- pass # todo: implement this
45
-
46
-
47
- def general_purpose_perimeter_extractor(config_dir: str, profile: str) -> str:
48
- """
49
- This function will rely on a mix of configuration variables and utilities with ob package to extract the perimeter
50
- from the current environment.
51
- """
43
+ def during_metaflow_execution(cls) -> str:
44
+ # TODO: implement this
45
+ return ""
@@ -98,7 +98,7 @@ class SecretRetriever:
98
98
  raise OuterboundsSecretsException(f"Error decoding secret: {e}")
99
99
 
100
100
  def _get_secret_configs(self):
101
- from metaflow_extensions.outerbounds.remote_config import init_config
101
+ from metaflow_extensions.outerbounds.remote_config import init_config # type: ignore
102
102
  from os import environ
103
103
 
104
104
  conf = init_config()
outerbounds/apps/utils.py CHANGED
@@ -3,6 +3,11 @@ import time
3
3
  import sys
4
4
  import json
5
5
  import requests
6
+ from typing import Optional
7
+
8
+ # This click import is not used to construct any ob
9
+ # package cli. Its used only for printing stuff.
10
+ # So we can use the static metaflow._vendor import path
6
11
  from metaflow._vendor import click
7
12
  from .app_config import CAPSULE_DEBUG
8
13
  import sys
@@ -10,6 +15,7 @@ import threading
10
15
  import time
11
16
  import logging
12
17
  import itertools
18
+ from typing import Union, Callable, Any, List
13
19
 
14
20
  from outerbounds._vendor.spinner import (
15
21
  Spinners,
@@ -36,13 +42,13 @@ class MultiStepSpinner:
36
42
  self,
37
43
  spinner: Spinners = Spinners.dots,
38
44
  text: str = "",
39
- color: str = None,
45
+ color: Optional[str] = None,
40
46
  align: str = "right",
41
47
  file=sys.stdout,
42
48
  ):
43
49
  cfg = spinner.value
44
50
  self.frames = cfg["frames"]
45
- self.interval = cfg["interval"] / 1000.0
51
+ self.interval = float(cfg["interval"]) / 1000.0 # type: ignore
46
52
  self.text = text
47
53
  self.color = color
48
54
  if align not in ("left", "right"):
@@ -50,7 +56,7 @@ class MultiStepSpinner:
50
56
  self.align = align
51
57
  self._write_file = file
52
58
  # precompute clear length: max frame width + space + text length
53
- max_frame = max(self.frames, key=len)
59
+ max_frame = max(self.frames, key=lambda x: len(x)) # type: ignore
54
60
  self.clear_len = len(self.main_text) + len(max_frame) + 1
55
61
 
56
62
  self._stop_evt = threading.Event()
@@ -150,171 +156,6 @@ class MaximumRetriesExceeded(Exception):
150
156
  return f"Maximum retries exceeded for {self.url}[{self.method}] {self.status_code} {self.text}"
151
157
 
152
158
 
153
- class KeyValueDictPair(click.ParamType):
154
- name = "KV-DICT-PAIR"
155
-
156
- def convert(self, value, param, ctx):
157
- # Parse a string of the form KEY=VALUE into a dict {KEY: VALUE}
158
- if len(value.split("=", 1)) != 2:
159
- self.fail(
160
- f"Invalid format for {value}. Expected format: KEY=VALUE", param, ctx
161
- )
162
-
163
- key, _value = value.split("=", 1)
164
- try:
165
- return {"key": key, "value": json.loads(_value)}
166
- except json.JSONDecodeError:
167
- return {"key": key, "value": _value}
168
- except Exception as e:
169
- self.fail(f"Invalid value for {value}. Error: {e}", param, ctx)
170
-
171
- def __str__(self):
172
- return repr(self)
173
-
174
- def __repr__(self):
175
- return "KV-PAIR"
176
-
177
-
178
- class KeyValuePair(click.ParamType):
179
- name = "KV-PAIR"
180
-
181
- def convert(self, value, param, ctx):
182
- # Parse a string of the form KEY=VALUE into a dict {KEY: VALUE}
183
- if len(value.split("=", 1)) != 2:
184
- self.fail(
185
- f"Invalid format for {value}. Expected format: KEY=VALUE", param, ctx
186
- )
187
-
188
- key, _value = value.split("=", 1)
189
- try:
190
- return {key: json.loads(_value)}
191
- except json.JSONDecodeError:
192
- return {key: _value}
193
- except Exception as e:
194
- self.fail(f"Invalid value for {value}. Error: {e}", param, ctx)
195
-
196
- def __str__(self):
197
- return repr(self)
198
-
199
- def __repr__(self):
200
- return "KV-PAIR"
201
-
202
-
203
- class MountMetaflowArtifact(click.ParamType):
204
- name = "MOUNT-METAFLOW-ARTIFACT"
205
-
206
- def convert(self, value, param, ctx):
207
- """
208
- Convert a string like "flow=MyFlow,artifact=my_model,path=/tmp/abc" or
209
- "pathspec=MyFlow/123/foo/345/my_model,path=/tmp/abc" to a dict.
210
- """
211
- artifact_dict = {}
212
- parts = value.split(",")
213
-
214
- for part in parts:
215
- if "=" not in part:
216
- self.fail(
217
- f"Invalid format in part '{part}'. Expected 'key=value'", param, ctx
218
- )
219
-
220
- key, val = part.split("=", 1)
221
- artifact_dict[key.strip()] = val.strip()
222
-
223
- # Validate required fields
224
- if "pathspec" in artifact_dict:
225
- if "path" not in artifact_dict:
226
- self.fail(
227
- "When using 'pathspec', you must also specify 'path'", param, ctx
228
- )
229
-
230
- # Return as pathspec format
231
- return {
232
- "pathspec": artifact_dict["pathspec"],
233
- "path": artifact_dict["path"],
234
- }
235
- elif (
236
- "flow" in artifact_dict
237
- and "artifact" in artifact_dict
238
- and "path" in artifact_dict
239
- ):
240
- # Return as flow/artifact format
241
- result = {
242
- "flow": artifact_dict["flow"],
243
- "artifact": artifact_dict["artifact"],
244
- "path": artifact_dict["path"],
245
- }
246
-
247
- # Add optional namespace if provided
248
- if "namespace" in artifact_dict:
249
- result["namespace"] = artifact_dict["namespace"]
250
-
251
- return result
252
- else:
253
- self.fail(
254
- "Invalid format. Must be either 'flow=X,artifact=Y,path=Z' or 'pathspec=X,path=Z'",
255
- param,
256
- ctx,
257
- )
258
-
259
- def __str__(self):
260
- return repr(self)
261
-
262
- def __repr__(self):
263
- return "MOUNT-METAFLOW-ARTIFACT"
264
-
265
-
266
- class MountSecret(click.ParamType):
267
- name = "MOUNT-SECRET"
268
-
269
- def convert(self, value, param, ctx):
270
- """
271
- Convert a string like "id=my_secret,path=/tmp/secret" to a dict.
272
- """
273
- secret_dict = {}
274
- parts = value.split(",")
275
-
276
- for part in parts:
277
- if "=" not in part:
278
- self.fail(
279
- f"Invalid format in part '{part}'. Expected 'key=value'", param, ctx
280
- )
281
-
282
- key, val = part.split("=", 1)
283
- secret_dict[key.strip()] = val.strip()
284
-
285
- # Validate required fields
286
- if "id" in secret_dict and "path" in secret_dict:
287
- return {"id": secret_dict["id"], "path": secret_dict["path"]}
288
- else:
289
- self.fail("Invalid format. Must be 'key=X,path=Y'", param, ctx)
290
-
291
- def __str__(self):
292
- return repr(self)
293
-
294
- def __repr__(self):
295
- return "MOUNT-SECRET"
296
-
297
-
298
- class CommaSeparatedList(click.ParamType):
299
- name = "COMMA-SEPARATED-LIST"
300
-
301
- def convert(self, value, param, ctx):
302
- return value.split(",")
303
-
304
- def __str__(self):
305
- return repr(self)
306
-
307
- def __repr__(self):
308
- return "COMMA-SEPARATED-LIST"
309
-
310
-
311
- KVPairType = KeyValuePair()
312
- MetaflowArtifactType = MountMetaflowArtifact()
313
- SecretMountType = MountSecret()
314
- CommaSeparatedListType = CommaSeparatedList()
315
- KVDictType = KeyValueDictPair()
316
-
317
-
318
159
  class TODOException(Exception):
319
160
  pass
320
161
 
@@ -7,6 +7,7 @@ from . import (
7
7
  tutorials_cli,
8
8
  fast_bakery_cli,
9
9
  secrets_cli,
10
+ kubernetes_cli,
10
11
  )
11
12
 
12
13
 
@@ -20,6 +21,7 @@ from . import (
20
21
  tutorials_cli.cli,
21
22
  fast_bakery_cli.cli,
22
23
  secrets_cli.cli,
24
+ kubernetes_cli.cli,
23
25
  ],
24
26
  )
25
27
  def cli(**kwargs):
@@ -0,0 +1,479 @@
1
+ import json
2
+ import base64
3
+ import boto3
4
+ import requests
5
+ import os
6
+ import time
7
+ from datetime import datetime
8
+ from functools import partial
9
+
10
+ from os import path, environ
11
+ from sys import exit
12
+ from outerbounds._vendor import click
13
+
14
+ from ..utils import metaflowconfig
15
+
16
+
17
+ def _logger(
18
+ body="", system_msg=False, head="", bad=False, timestamp=True, nl=True, color=None
19
+ ):
20
+ if timestamp:
21
+ if timestamp is True:
22
+ dt = datetime.now()
23
+ else:
24
+ dt = timestamp
25
+ tstamp = dt.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
26
+ click.secho(tstamp + " ", fg=ColorTheme.TIMESTAMP, nl=False)
27
+ if head:
28
+ click.secho(head, fg=ColorTheme.INFO_COLOR, nl=False)
29
+ click.secho(
30
+ body,
31
+ bold=system_msg,
32
+ fg=ColorTheme.BAD_COLOR if bad else color if color is not None else None,
33
+ nl=nl,
34
+ )
35
+
36
+
37
+ class ColorTheme:
38
+ TIMESTAMP = "magenta"
39
+ LOADING_COLOR = "cyan"
40
+ BAD_COLOR = "red"
41
+ INFO_COLOR = "green"
42
+
43
+ TL_HEADER_COLOR = "magenta"
44
+ ROW_COLOR = "bright_white"
45
+
46
+ INFO_KEY_COLOR = "green"
47
+ INFO_VALUE_COLOR = "bright_white"
48
+
49
+
50
+ def print_table(data, headers):
51
+ """Print data in a formatted table."""
52
+
53
+ if not data:
54
+ return
55
+
56
+ # Calculate column widths
57
+ col_widths = [len(h) for h in headers]
58
+
59
+ # Calculate actual widths based on data
60
+ for row in data:
61
+ for i, cell in enumerate(row):
62
+ col_widths[i] = max(col_widths[i], len(str(cell)))
63
+
64
+ # Print header
65
+ header_row = " | ".join(
66
+ [headers[i].ljust(col_widths[i]) for i in range(len(headers))]
67
+ )
68
+ click.secho("-" * len(header_row), fg=ColorTheme.TL_HEADER_COLOR)
69
+ click.secho(header_row, fg=ColorTheme.TL_HEADER_COLOR, bold=True)
70
+ click.secho("-" * len(header_row), fg=ColorTheme.TL_HEADER_COLOR)
71
+
72
+ # Print data rows
73
+ for row in data:
74
+ formatted_row = " | ".join(
75
+ [str(row[i]).ljust(col_widths[i]) for i in range(len(row))]
76
+ )
77
+ click.secho(formatted_row, fg=ColorTheme.ROW_COLOR, bold=True)
78
+ click.secho("-" * len(header_row), fg=ColorTheme.TL_HEADER_COLOR)
79
+
80
+
81
+ def _get_kubernetes_client():
82
+ """Get kubernetes client from metaflow configuration."""
83
+ from metaflow.plugins.kubernetes.kubernetes_client import KubernetesClient
84
+
85
+ return KubernetesClient()
86
+
87
+
88
+ def _get_current_user():
89
+ """Get current user from environment or metaflow config."""
90
+ # Try to get user from metaflow config first
91
+ try:
92
+ from metaflow.util import get_username
93
+
94
+ user = get_username()
95
+ if user:
96
+ return user
97
+ except:
98
+ pass
99
+
100
+ # Fallback to environment variables
101
+ raise click.ClickException("Failed to get current user")
102
+
103
+
104
+ def _format_jobs_and_jobsets_table(
105
+ jobs_with_outcomes, jobsets_with_outcomes, filter_unchanged=True
106
+ ):
107
+ """Format jobs and jobsets into a table for display."""
108
+ headers = [
109
+ "Type",
110
+ "Name",
111
+ "Namespace",
112
+ "Status",
113
+ "Outcome",
114
+ "Created",
115
+ "Flow",
116
+ "Run ID",
117
+ "User",
118
+ ]
119
+ table_data = []
120
+
121
+ # Add jobs to table
122
+ for job, outcome in jobs_with_outcomes:
123
+ # Filter out unchanged resources if requested
124
+ if filter_unchanged and outcome == "leave_unchanged":
125
+ continue
126
+
127
+ annotations = job.metadata.annotations or {}
128
+
129
+ # Format creation timestamp
130
+ created_time = "N/A"
131
+ if job.metadata.creation_timestamp:
132
+ created_time = job.metadata.creation_timestamp.strftime("%Y-%m-%d %H:%M:%S")
133
+
134
+ table_data.append(
135
+ [
136
+ "Job",
137
+ job.metadata.name,
138
+ job.metadata.namespace,
139
+ str(job.status.active or 0) + " active"
140
+ if job.status.active
141
+ else "inactive",
142
+ outcome,
143
+ created_time,
144
+ annotations.get("metaflow/flow_name", "N/A"),
145
+ annotations.get("metaflow/run_id", "N/A"),
146
+ annotations.get("metaflow/user", "N/A"),
147
+ ]
148
+ )
149
+
150
+ # Add jobsets to table
151
+ for jobset, outcome in jobsets_with_outcomes:
152
+ # Filter out unchanged resources if requested
153
+ if filter_unchanged and outcome == "leave_unchanged":
154
+ continue
155
+
156
+ metadata = jobset.get("metadata", {})
157
+ annotations = metadata.get("annotations", {})
158
+ status = jobset.get("status", {})
159
+
160
+ # Format creation timestamp
161
+ created_time = "N/A"
162
+ creation_timestamp = metadata.get("creationTimestamp")
163
+ if creation_timestamp:
164
+ try:
165
+ from datetime import datetime
166
+
167
+ # Parse ISO timestamp
168
+ dt = datetime.fromisoformat(creation_timestamp.replace("Z", "+00:00"))
169
+ created_time = dt.strftime("%Y-%m-%d %H:%M:%S")
170
+ except:
171
+ created_time = (
172
+ creation_timestamp[:19]
173
+ if len(creation_timestamp) >= 19
174
+ else creation_timestamp
175
+ )
176
+
177
+ table_data.append(
178
+ [
179
+ "JobSet",
180
+ metadata.get("name", "N/A"),
181
+ metadata.get("namespace", "N/A"),
182
+ "terminal" if status.get("terminalState") else "running",
183
+ outcome,
184
+ created_time,
185
+ annotations.get("metaflow/flow_name", "N/A"),
186
+ annotations.get("metaflow/run_id", "N/A"),
187
+ annotations.get("metaflow/user", "N/A"),
188
+ ]
189
+ )
190
+
191
+ return headers, table_data
192
+
193
+
194
+ @click.group()
195
+ def cli(**kwargs):
196
+ pass
197
+
198
+
199
+ @click.group(help="Commands for interacting with Kubernetes.")
200
+ def kubernetes(**kwargs):
201
+ pass
202
+
203
+
204
+ @kubernetes.command(help="Kill pods/jobs/jobsets for a specific flow.")
205
+ @click.option("--flow-name", required=True, help="Flow name to kill pods for")
206
+ @click.option("--run-id", help="Specific run ID to kill pods for")
207
+ @click.option("--my-runs", is_flag=True, help="Only kill runs by current user")
208
+ @click.option(
209
+ "--dry-run", is_flag=True, help="Show what would be killed without actually killing"
210
+ )
211
+ @click.option("--auto-approve", is_flag=True, help="Skip confirmation prompt")
212
+ @click.option(
213
+ "--clear-everything",
214
+ is_flag=True,
215
+ help="Force delete ALL matching resources regardless of their status (including terminal/completed ones)",
216
+ )
217
+ def kill(flow_name, run_id, my_runs, dry_run, auto_approve, clear_everything):
218
+ """Kill pods/jobs/jobsets for a specific flow."""
219
+ import warnings
220
+ from metaflow.ob_internal import PodKiller # type: ignore
221
+
222
+ warnings.filterwarnings("ignore")
223
+
224
+ logger = partial(_logger, timestamp=True)
225
+
226
+ # Get kubernetes client
227
+ kubernetes_client = _get_kubernetes_client()
228
+
229
+ # Determine user filter
230
+ user = None
231
+ if my_runs:
232
+ user = _get_current_user()
233
+ logger(f"🔍 Filtering for runs by user: {user}", color=ColorTheme.INFO_COLOR)
234
+
235
+ pod_killer = PodKiller(
236
+ kubernetes_client=kubernetes_client.get(),
237
+ echo_func=lambda x: None,
238
+ namespace=kubernetes_client._namespace,
239
+ )
240
+
241
+ # Find matching jobs and jobsets
242
+ logger(
243
+ f"🔍 Searching for jobs and jobsets matching flow: {flow_name}",
244
+ color=ColorTheme.INFO_COLOR,
245
+ )
246
+ if run_id:
247
+ logger(f"🔍 Filtering by run ID: {run_id}", color=ColorTheme.INFO_COLOR)
248
+
249
+ try:
250
+ (
251
+ jobs_with_outcomes,
252
+ jobsets_with_outcomes,
253
+ ) = pod_killer.extract_matching_jobs_and_jobsets(
254
+ flow_name=flow_name, run_id=run_id, user=user
255
+ )
256
+ except Exception as e:
257
+ logger(f"Error finding matching resources: {e}", bad=True, system_msg=True)
258
+ exit(1)
259
+
260
+ # Check if anything was found
261
+ total_resources_found = len(jobs_with_outcomes) + len(jobsets_with_outcomes)
262
+ if total_resources_found == 0:
263
+ logger("✅ No matching jobs or jobsets found.", color=ColorTheme.INFO_COLOR)
264
+ return
265
+
266
+ # Calculate resources that will be processed
267
+ if clear_everything:
268
+ # Process ALL resources regardless of status
269
+ jobs_to_process = len(jobs_with_outcomes)
270
+ jobsets_to_process = len(jobsets_with_outcomes)
271
+ total_to_process = jobs_to_process + jobsets_to_process
272
+ filter_table = False
273
+
274
+ # Show warning for clear-everything mode
275
+ logger(
276
+ "⚠️ CLEAR EVERYTHING MODE: All matching resources will be force deleted regardless of status!",
277
+ color=ColorTheme.BAD_COLOR,
278
+ system_msg=True,
279
+ )
280
+ else:
281
+ # Normal mode: only process resources not in terminal state
282
+ jobs_to_process = len(
283
+ [j for j, outcome in jobs_with_outcomes if outcome != "leave_unchanged"]
284
+ )
285
+ jobsets_to_process = len(
286
+ [j for j, outcome in jobsets_with_outcomes if outcome != "leave_unchanged"]
287
+ )
288
+ total_to_process = jobs_to_process + jobsets_to_process
289
+ filter_table = True
290
+
291
+ # Display what will be affected
292
+ headers, table_data = _format_jobs_and_jobsets_table(
293
+ jobs_with_outcomes, jobsets_with_outcomes, filter_unchanged=filter_table
294
+ )
295
+
296
+ if total_to_process == 0:
297
+ logger(
298
+ "✅ All matching resources are already in terminal state. Nothing to do.",
299
+ color=ColorTheme.INFO_COLOR,
300
+ )
301
+ return
302
+
303
+ if dry_run:
304
+ logger(
305
+ "=== DRY RUN - The following resources would be affected ===",
306
+ color=ColorTheme.INFO_COLOR,
307
+ system_msg=True,
308
+ timestamp=False,
309
+ )
310
+ else:
311
+ logger(
312
+ "=== The following resources will be killed/deleted ===",
313
+ color=ColorTheme.BAD_COLOR,
314
+ system_msg=True,
315
+ timestamp=False,
316
+ )
317
+
318
+ print_table(table_data, headers)
319
+
320
+ # Show summary
321
+ logger(
322
+ "📊 Summary:",
323
+ )
324
+ logger(
325
+ f" • Total resources found: {total_resources_found}",
326
+ )
327
+ logger(
328
+ f" • Jobs to process: {jobs_to_process}",
329
+ )
330
+ logger(
331
+ f" • JobSets to process: {jobsets_to_process}",
332
+ )
333
+ logger(
334
+ f" • Resources to process: {total_to_process}",
335
+ )
336
+
337
+ if clear_everything:
338
+ logger(
339
+ " • Mode: CLEAR EVERYTHING (forcing deletion of ALL resources)",
340
+ color=ColorTheme.BAD_COLOR,
341
+ )
342
+ else:
343
+ # Show how many are being skipped in normal mode
344
+ skipped_resources = total_resources_found - total_to_process
345
+ if skipped_resources > 0:
346
+ logger(
347
+ f" • Resources already in terminal state (skipped): {skipped_resources}",
348
+ color=ColorTheme.INFO_COLOR,
349
+ )
350
+
351
+ if dry_run:
352
+ logger(
353
+ "🔍 Dry run completed. No resources were actually killed.",
354
+ color=ColorTheme.INFO_COLOR,
355
+ system_msg=True,
356
+ )
357
+ return
358
+
359
+ # Confirm before proceeding (unless auto-approve is set)
360
+ if not auto_approve:
361
+ confirm = click.prompt(
362
+ click.style(
363
+ f"⚠️ Are you sure you want to kill/delete {total_to_process} resources?",
364
+ fg=ColorTheme.BAD_COLOR,
365
+ bold=True,
366
+ ),
367
+ default="no",
368
+ type=click.Choice(["yes", "no"]),
369
+ )
370
+ if confirm == "no":
371
+ logger("❌ Operation cancelled.", color=ColorTheme.BAD_COLOR)
372
+ exit(1)
373
+
374
+ # Execute the kills/deletions
375
+ logger(
376
+ f"🚀 Processing {total_to_process} resources...",
377
+ color=ColorTheme.INFO_COLOR,
378
+ system_msg=True,
379
+ )
380
+
381
+ try:
382
+ progress_label = (
383
+ f"⚰️ Coffin: Deleting jobs and jobsets matching flow: {flow_name}"
384
+ )
385
+ if clear_everything:
386
+ progress_label = (
387
+ f"🔥 CLEAR ALL: Force deleting ALL resources for flow: {flow_name}"
388
+ )
389
+
390
+ __progress_bar = click.progressbar(
391
+ length=total_to_process,
392
+ label=click.style(
393
+ progress_label,
394
+ fg=ColorTheme.BAD_COLOR if clear_everything else ColorTheme.INFO_COLOR,
395
+ bold=True,
396
+ ),
397
+ fill_char=click.style(
398
+ "█",
399
+ fg=ColorTheme.BAD_COLOR if clear_everything else ColorTheme.INFO_COLOR,
400
+ bold=True,
401
+ ),
402
+ empty_char=click.style(
403
+ "░",
404
+ fg=ColorTheme.BAD_COLOR if clear_everything else ColorTheme.INFO_COLOR,
405
+ bold=True,
406
+ ),
407
+ item_show_func=lambda x: click.style(
408
+ x,
409
+ fg=ColorTheme.BAD_COLOR,
410
+ bold=True,
411
+ ),
412
+ )
413
+
414
+ pod_killer = PodKiller(
415
+ kubernetes_client=kubernetes_client.get(),
416
+ echo_func=lambda x: None,
417
+ namespace=kubernetes_client._namespace,
418
+ progress_bar=__progress_bar,
419
+ )
420
+
421
+ if clear_everything:
422
+ # Force delete everything mode
423
+ (
424
+ results,
425
+ jobs_processed,
426
+ jobsets_processed,
427
+ ) = pod_killer.process_matching_jobs_and_jobsets_force_all(
428
+ flow_name=flow_name, run_id=run_id, user=user
429
+ )
430
+ else:
431
+ # Normal mode
432
+ (
433
+ results,
434
+ jobs_processed,
435
+ jobsets_processed,
436
+ ) = pod_killer.process_matching_jobs_and_jobsets(
437
+ flow_name=flow_name, run_id=run_id, user=user
438
+ )
439
+
440
+ # Report results
441
+ successful_operations = sum(1 for r in results if r is True)
442
+ failed_operations = sum(1 for r in results if r is False)
443
+
444
+ logger(
445
+ "📊 Operation completed:",
446
+ )
447
+ logger(
448
+ f" • Jobs processed: {jobs_processed}",
449
+ )
450
+ logger(
451
+ f" • JobSets processed: {jobsets_processed}",
452
+ )
453
+ logger(
454
+ f" • Successful operations: {successful_operations}",
455
+ )
456
+
457
+ if failed_operations > 0:
458
+ logger(
459
+ f" • Failed operations: {failed_operations}",
460
+ color=ColorTheme.BAD_COLOR,
461
+ )
462
+ logger(
463
+ "⚠️ Some operations failed. Check the logs above for details.",
464
+ color=ColorTheme.BAD_COLOR,
465
+ system_msg=True,
466
+ )
467
+ else:
468
+ logger(
469
+ "✅ All operations completed successfully!",
470
+ color=ColorTheme.INFO_COLOR,
471
+ system_msg=True,
472
+ )
473
+
474
+ except Exception as e:
475
+ logger(f"Error during kill operation: {e}", bad=True, system_msg=True)
476
+ raise e
477
+
478
+
479
+ cli.add_command(kubernetes, name="kubernetes")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: outerbounds
3
- Version: 0.3.180rc5
3
+ Version: 0.3.182rc0
4
4
  Summary: More Data Science, Less Administration
5
5
  License: Proprietary
6
6
  Keywords: data science,machine learning,MLOps
@@ -29,8 +29,8 @@ Requires-Dist: google-cloud-secret-manager (>=2.20.0,<3.0.0) ; extra == "gcp"
29
29
  Requires-Dist: google-cloud-storage (>=2.14.0,<3.0.0) ; extra == "gcp"
30
30
  Requires-Dist: metaflow-checkpoint (==0.2.1)
31
31
  Requires-Dist: ob-metaflow (==2.15.17.1)
32
- Requires-Dist: ob-metaflow-extensions (==1.1.168rc5)
33
- Requires-Dist: ob-metaflow-stubs (==6.0.3.180rc5)
32
+ Requires-Dist: ob-metaflow-extensions (==1.1.170rc0)
33
+ Requires-Dist: ob-metaflow-stubs (==6.0.3.182rc0)
34
34
  Requires-Dist: opentelemetry-distro (>=0.41b0) ; extra == "otel"
35
35
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.20.0) ; extra == "otel"
36
36
  Requires-Dist: opentelemetry-instrumentation-requests (>=0.41b0) ; extra == "otel"
@@ -43,28 +43,30 @@ outerbounds/_vendor/yaml/scanner.py,sha256=ZcI8IngR56PaQ0m27WU2vxCqmDCuRjz-hr7pi
43
43
  outerbounds/_vendor/yaml/serializer.py,sha256=8wFZRy9SsQSktF_f9OOroroqsh4qVUe53ry07P9UgCc,4368
44
44
  outerbounds/_vendor/yaml/tokens.py,sha256=JBSu38wihGr4l73JwbfMA7Ks1-X84g8-NskTz7KwPmA,2578
45
45
  outerbounds/apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
- outerbounds/apps/_state_machine.py,sha256=3hQF5O2zJdtQWdy9e5w393O85u6UjGApqTMlRU3UhFk,12964
47
- outerbounds/apps/app_cli.py,sha256=j_0vEQlsKGgvlPidog4bSZrY-_D1ne3JmHVafijL-iI,45354
48
- outerbounds/apps/app_config.py,sha256=KBmW9grhiuG9XZG-R0GZkM-024cjj6ztGzOX_2wZW34,11291
46
+ outerbounds/apps/_state_machine.py,sha256=ixgL--jne3q71gQNnUeK-UdLP-Oc2kSGSsIW1fiHZPY,14469
47
+ outerbounds/apps/app_cli.py,sha256=vTbIN43A9A_OHTM0I6cb28g3GtY4_jk1wmUi5wG09w0,50174
48
+ outerbounds/apps/app_config.py,sha256=UHVK8JLIuW-OcGg5WxDm4QHeImPGtohD4KpJryZntC4,11307
49
49
  outerbounds/apps/artifacts.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
- outerbounds/apps/capsule.py,sha256=YoOkUYGPrbXPaSUwS3teui1XPJDZCIxn-9tPpm1X0GM,30927
50
+ outerbounds/apps/capsule.py,sha256=IYaD5X-IpSD9IsLpIJUaJ2B31up7PY7GhMMizkwLV7I,31039
51
51
  outerbounds/apps/cli_to_config.py,sha256=Thc5jXRxoU6Pr8kAVVOX-5Es5ha6y6Vh_GBzL__oI7Q,3299
52
+ outerbounds/apps/click_importer.py,sha256=nnkPOR6TKrtIpc3a5Fna1zVJoQqDZvUXlNA9CdiNKFc,995
52
53
  outerbounds/apps/code_package/__init__.py,sha256=8McF7pgx8ghvjRnazp2Qktlxi9yYwNiwESSQrk-2oW8,68
53
54
  outerbounds/apps/code_package/code_packager.py,sha256=RWvM5BKjgLhu7icsO_n5SSYC57dwyST0dWpoWF88ovU,22881
54
55
  outerbounds/apps/code_package/examples.py,sha256=aF8qKIJxCVv_ugcShQjqUsXKKKMsm1oMkQIl8w3QKuw,4016
55
56
  outerbounds/apps/config_schema.yaml,sha256=j_mysTAPkIMSocItTg3aduMDfBs2teIhAErvpF0Elus,8826
56
- outerbounds/apps/dependencies.py,sha256=UucyQYZ5VjUPBb3XzAARa4fuiLHXuV7iMZ8OZ_nAuE8,3949
57
+ outerbounds/apps/dependencies.py,sha256=03pZY-JRN-dYN-iyZ73zoEIEKmrOvbY4qld7RlRXYuw,3976
57
58
  outerbounds/apps/deployer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- outerbounds/apps/experimental/__init__.py,sha256=ExPFIJSF8FcE1pKVyiNQnX8aBi3Rz8YFQ2_s5NVeU7I,3056
59
- outerbounds/apps/perimeters.py,sha256=DKWKMbkSx8WObf4yA74UolleT417B9c_6POioztMHxY,1766
60
- outerbounds/apps/secrets.py,sha256=sBMJzwjsARhtcuxgn18i66lagllpYOlh2oXR_hVAb1Y,6110
61
- outerbounds/apps/utils.py,sha256=NbEpSrzk-l95EBFoWuxil_0xjpIuFt0jjIXD2r1w71M,12118
59
+ outerbounds/apps/experimental/__init__.py,sha256=RUZBAyqFnX3pRQxTjNmS1-qpgQcc9xQGQD2yJh4MA_M,3349
60
+ outerbounds/apps/perimeters.py,sha256=1J1_-5legFPskv3HTRwQMpzTytE3TO8KRT2IvVOrWcQ,1584
61
+ outerbounds/apps/secrets.py,sha256=aWzcAayQEJghQgFP_qp9w6jyvan_hoL4_ceqZ0ZjLd4,6126
62
+ outerbounds/apps/utils.py,sha256=6REvD9PtJcLYzrxX5lZ5Dzzm-Sy2l-I1oSzQN9viYRs,7611
62
63
  outerbounds/apps/validations.py,sha256=kR2eXckx0XJ4kUOOLkMRepbTh0INtL1Z8aV4-fZpfc8,678
63
64
  outerbounds/cli_main.py,sha256=e9UMnPysmc7gbrimq2I4KfltggyU7pw59Cn9aEguVcU,74
64
65
  outerbounds/command_groups/__init__.py,sha256=QPWtj5wDRTINDxVUL7XPqG3HoxHNvYOg08EnuSZB2Hc,21
65
66
  outerbounds/command_groups/apps_cli.py,sha256=ecXyLhGxjbct62iqviP9qBX8s4d-XG56ICpTM2h2etk,20821
66
- outerbounds/command_groups/cli.py,sha256=de4_QY1UeoKX6y-IXIbmklAi6bz0DsdBSmAoCg6lq1o,482
67
+ outerbounds/command_groups/cli.py,sha256=FTeeDrvyBb-qcs2xklTiCyVTN5I0tBPyBReqDIE4oWU,530
67
68
  outerbounds/command_groups/fast_bakery_cli.py,sha256=5kja7v6C651XAY6dsP_IkBPJQgfU4hA4S9yTOiVPhW0,6213
69
+ outerbounds/command_groups/kubernetes_cli.py,sha256=2bxPKUp5g_gdwVo4lT-IeWvHxz6Jmj1KxG70nXNgX_M,14758
68
70
  outerbounds/command_groups/local_setup_cli.py,sha256=tuuqJRXQ_guEwOuQSIf9wkUU0yg8yAs31myGViAK15s,36364
69
71
  outerbounds/command_groups/perimeters_cli.py,sha256=iF_Uw7ROiSctf6FgoJEy30iDBLVE1j9FKuR3shgJRmc,19050
70
72
  outerbounds/command_groups/secrets_cli.py,sha256=Vgn_aiTo76a0s5hCJhNWEOrCVhyYeivD08ooQxz0y7c,2952
@@ -76,7 +78,7 @@ outerbounds/utils/metaflowconfig.py,sha256=l2vJbgPkLISU-XPGZFaC8ZKmYFyJemlD6bwB-
76
78
  outerbounds/utils/schema.py,sha256=lMUr9kNgn9wy-sO_t_Tlxmbt63yLeN4b0xQXbDUDj4A,2331
77
79
  outerbounds/utils/utils.py,sha256=4Z8cszNob_8kDYCLNTrP-wWads_S_MdL3Uj3ju4mEsk,501
78
80
  outerbounds/vendor.py,sha256=gRLRJNXtZBeUpPEog0LOeIsl6GosaFFbCxUvR4bW6IQ,5093
79
- outerbounds-0.3.180rc5.dist-info/METADATA,sha256=bhULcwevS8zjmFWtUdm_NqRqF1vYNqx1z-NGfBmKpb8,1846
80
- outerbounds-0.3.180rc5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
81
- outerbounds-0.3.180rc5.dist-info/entry_points.txt,sha256=AP6rZg7y5SK9e9a9iVq0Fi9Q2KPjPZSwtZ6R98rLw-8,56
82
- outerbounds-0.3.180rc5.dist-info/RECORD,,
81
+ outerbounds-0.3.182rc0.dist-info/METADATA,sha256=Y51QdSpzQvPHckh4lLqNcic0u-gM1sIi4rX2K4RmUPA,1846
82
+ outerbounds-0.3.182rc0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
83
+ outerbounds-0.3.182rc0.dist-info/entry_points.txt,sha256=AP6rZg7y5SK9e9a9iVq0Fi9Q2KPjPZSwtZ6R98rLw-8,56
84
+ outerbounds-0.3.182rc0.dist-info/RECORD,,