ob-metaflow-extensions 1.1.175rc3__py2.py3-none-any.whl → 1.2.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-extensions might be problematic. Click here for more details.

@@ -324,7 +324,6 @@ CLIS_DESC = [
324
324
  ("nvct", ".nvct.nvct_cli.cli"),
325
325
  ("fast-bakery", ".fast_bakery.fast_bakery_cli.cli"),
326
326
  ("snowpark", ".snowpark.snowpark_cli.cli"),
327
- ("app", ".apps.app_cli.cli"),
328
327
  ]
329
328
  STEP_DECORATORS_DESC = [
330
329
  ("nvidia", ".nvcf.nvcf_decorator.NvcfDecorator"),
@@ -1,3 +0,0 @@
1
- from .core import app_cli as ob_apps_cli
2
-
3
- cli = ob_apps_cli.cli
@@ -1,4 +1,24 @@
1
- from typing import List, Tuple, Dict, Union
1
+ import sys
2
+ from typing import TYPE_CHECKING, Dict, List, Tuple, Union
3
+
4
+
5
+ # on 3.8+ use the stdlib TypedDict;
6
+ # in TYPE_CHECKING blocks mypy/pyright still pick it up on older Pythons
7
+ if sys.version_info >= (3, 8):
8
+ from typing import TypedDict
9
+ else:
10
+ if TYPE_CHECKING:
11
+ # for the benefit of type-checkers
12
+ from typing import TypedDict # noqa: F401
13
+ # runtime no-op TypedDict shim
14
+ class _TypedDictMeta(type):
15
+ def __new__(cls, name, bases, namespace, total=True):
16
+ # ignore total at runtime
17
+ return super().__new__(cls, name, bases, namespace)
18
+
19
+ class TypedDict(dict, metaclass=_TypedDictMeta):
20
+ # Runtime stand-in for typing.TypedDict on <3.8.
21
+ pass
2
22
 
3
23
 
4
24
  class _dagNode:
@@ -134,9 +154,6 @@ class _capsuleDeployerStateMachine:
134
154
  dot.render("state_machine", view=False)
135
155
 
136
156
 
137
- from typing import TypedDict
138
-
139
-
140
157
  class AccessInfo(TypedDict):
141
158
  outOfClusterURL: str
142
159
  inClusterURL: str
@@ -158,9 +175,6 @@ class WorkerStatus(TypedDict):
158
175
  version: str
159
176
 
160
177
 
161
- from typing import Dict, List, TypedDict
162
-
163
-
164
178
  class WorkerInfoDict(TypedDict):
165
179
  # TODO : Check if we need to account for the `Terminating` state
166
180
  pending: Dict[str, List[WorkerStatus]]
@@ -191,7 +205,7 @@ class DEPLOYMENT_READY_CONDITIONS:
191
205
  2) [all_running] Atleast min_replicas number of workers are running for the deployment to be considered ready.
192
206
  - Usecase: Operators may require that all replicas are available before traffic is routed. Needed when inference endpoints maybe under some SLA or require a larger load
193
207
  3) [fully_finished] Atleast min_replicas number of workers are running for the deployment and there are no pending or crashlooping workers from previous versions lying around.
194
- - Usecase: Ensuring endpoint is fully available and no other versions are running.
208
+ - Usecase: Ensuring endpoint is fully available and no other versions are running or endpoint has been fully scaled down.
195
209
  4) [async] The deployment will be assumed ready as soon as the server responds with a 200.
196
210
  - Usecase: Operators may only care that the URL is minted for the deployment or the deployment eventually scales down to 0.
197
211
  """
@@ -203,7 +217,7 @@ class DEPLOYMENT_READY_CONDITIONS:
203
217
  # It doesn't imply that all the workers relating to other deployments have been torn down.
204
218
  ALL_RUNNING = "all_running"
205
219
 
206
- # `FULLY_FINISHED` implies that the deployment has the minimum number of replicas and all the workers are related to the current deployment instance's version.
220
+ # `FULLY_FINISHED` implies Atleast min_replicas number of workers are running for the deployment and there are no pending or crashlooping workers from previous versions lying around.
207
221
  FULLY_FINISHED = "fully_finished"
208
222
 
209
223
  # `ASYNC` implies that the deployment will be assumed ready after the URL is minted and the worker statuses are not checked.
@@ -273,10 +287,13 @@ class DEPLOYMENT_READY_CONDITIONS:
273
287
  and not capsule_status["updateInProgress"]
274
288
  )
275
289
  elif readiness_condition == cls.FULLY_FINISHED:
276
- _readiness_condition_satisfied = (
277
- worker_semantic_status["status"]["fully_finished"]
278
- and not capsule_status["updateInProgress"]
279
- )
290
+ # We dont wait for updateInProgress in this condition since
291
+ # UpdateInProgress can switch to false when users scale all replicas down to 0.
292
+ # So for this condition to satisfy we will only rely on the worker semantic status.
293
+ # ie. the thing actually tracking what is running and what is not.
294
+ _readiness_condition_satisfied = worker_semantic_status["status"][
295
+ "fully_finished"
296
+ ]
280
297
  elif readiness_condition == cls.ASYNC:
281
298
  # The async readiness condition is satisfied immediately after the server responds
282
299
  # with the URL.
@@ -402,6 +419,11 @@ def _capsule_worker_status_diff(
402
419
  def _capsule_worker_semantic_status(
403
420
  workers: List[WorkerStatus], version: str, min_replicas: int
404
421
  ) -> CapsuleWorkerSemanticStatus:
422
+ def _filter_workers_by_phase(
423
+ workers: List[WorkerStatus], phase: str
424
+ ) -> List[WorkerStatus]:
425
+ return [w for w in workers if w.get("phase") == phase]
426
+
405
427
  def _make_version_dict(
406
428
  _workers: List[WorkerStatus], phase: str
407
429
  ) -> Dict[str, List[WorkerStatus]]:
@@ -447,8 +469,12 @@ def _capsule_worker_semantic_status(
447
469
  "all_running": count_for_version(running_workers) >= min_replicas,
448
470
  "fully_finished": (
449
471
  count_for_version(running_workers) >= min_replicas
450
- and len(pending_workers) == 0
451
- and len(crashlooping_workers) == 0
472
+ # count the workers of different versions that are runnning
473
+ # and ensure that only the current version's workers are running.
474
+ and count_for_version(running_workers)
475
+ == len(_filter_workers_by_phase(workers, "Running"))
476
+ and len(_filter_workers_by_phase(workers, "Pending")) == 0
477
+ and len(_filter_workers_by_phase(workers, "CrashLoopBackOff")) == 0
452
478
  ),
453
479
  "current_info": {
454
480
  "pending": count_for_version(pending_workers),
@@ -744,14 +744,6 @@ def list(ctx, project, branch, name, tags, format, auth_type):
744
744
  print_table(table_data, headers)
745
745
 
746
746
 
747
- @app.command()
748
- @auto_cli_options()
749
- @click.pass_context
750
- @click.argument("command", nargs=-1, type=click.UNPROCESSED, required=False)
751
- def deploy2(ctx, **kwargs):
752
- pass
753
-
754
-
755
747
  @app.command(help="Delete an app/apps from the Outerbounds Platform.")
756
748
  @click.option("--name", type=str, help="Filter app to delete by name")
757
749
  @click.option("--id", "cap_id", type=str, help="Filter app to delete by id")
@@ -206,7 +206,7 @@ class CapsuleWorkersStateMachine:
206
206
 
207
207
  class CapsuleInput:
208
208
  @classmethod
209
- def construct_exec_command(cls, commands: list[str]):
209
+ def construct_exec_command(cls, commands: List[str]):
210
210
  commands = ["set -eEuo pipefail"] + commands
211
211
  command_string = "\n".join(commands)
212
212
  # First constuct a base64 encoded string of the quoted command
@@ -254,7 +254,7 @@ class CapsuleInput:
254
254
  replicas.get("min"),
255
255
  replicas.get("max"),
256
256
  )
257
- if fixed:
257
+ if fixed is not None:
258
258
  _min, _max = fixed, fixed
259
259
  gpu_resource = app_config.get_state("resources").get("gpu")
260
260
  resources = {}
@@ -711,7 +711,7 @@ class CapsuleDeployer:
711
711
  def _get_min_replicas(self):
712
712
  replicas = self._app_config.get_state("replicas", {})
713
713
  fixed, _min, _ = replicas.get("fixed"), replicas.get("min"), replicas.get("max")
714
- if fixed:
714
+ if fixed is not None:
715
715
  return fixed
716
716
  return _min
717
717
 
@@ -740,9 +740,9 @@ class CapsuleDeployer:
740
740
  # We first need to check if someone has not upgraded the capsule under the hood and
741
741
  # the current deployment instance is invalid.
742
742
  self._backend_version_mismatch_check(
743
- capsule_response, self.current_deployment_instance_version
743
+ capsule_response, self.current_deployment_instance_version # type: ignore
744
744
  )
745
- state_machine.add_status(capsule_response.get("status", {}))
745
+ state_machine.add_status(capsule_response.get("status", {})) # type: ignore
746
746
  workers_state_machine.add_status(workers_response)
747
747
  state_machine.report_current_status(logger)
748
748
 
@@ -267,10 +267,13 @@ class ConfigField:
267
267
  self._qual_name_stack = []
268
268
 
269
269
  # This function allows config fields to be made aware of the
270
- # owner instance's names. Its via in the ConfigMeta classes'
271
- # _set_owner_instance function. But the _set_owner_instance gets
272
- # called within the ConfigField's __set__ function
273
- # (when the actual instance of the value is being set)
270
+ # owner instance's names. It's called from the `commit_owner_names_across_tree`
271
+ # Decorator. Its called once the config instance is completely ready and
272
+ # it wil not have any further runtime instance modifications done to it.
273
+ # The core intent is to ensure that the full config lineage tree is captured and
274
+ # we have a full trace of where the config is coming from so that we can showcase it
275
+ # to users when they make configurational errors. It also allows us to reference those
276
+ # config values in the error messages across different types of errors.
274
277
  def _set_owner_name(self, owner_name: str):
275
278
  self._qual_name_stack.append(owner_name)
276
279
 
@@ -288,7 +291,6 @@ class ConfigField:
288
291
  return instance.__dict__.get(self.name)
289
292
 
290
293
  def __set__(self, instance, value):
291
-
292
294
  if self.parsing_fn:
293
295
  value = self.parsing_fn(value)
294
296
 
@@ -299,12 +301,6 @@ class ConfigField:
299
301
  f"Value {value} is not of type {self.field_type} for the field {self.name}"
300
302
  )
301
303
 
302
- # We set the owner instance in the ConfigMeta based classes so they
303
- # propagate it down to the ConfigField based classes.
304
- if ConfigMeta.is_instance(value):
305
- for x in self._qual_name_stack + [self.name]:
306
- value._set_owner_instance(x)
307
-
308
304
  instance.__dict__[self.name] = value
309
305
 
310
306
  def __str__(self) -> str:
@@ -316,6 +312,55 @@ class ConfigField:
316
312
  return f"<ConfigField name='{self.name}' type={type_name} default={self.default!r}>"
317
313
 
318
314
 
315
+ # Add this decorator function before the ConfigMeta class
316
+ # One of the core utilities of the ConfigMeta class
317
+ # is that we can track the tree of elements in the Config
318
+ # class that allow us to make those visible at runtime when
319
+ # the user has some configurational error. it also allows us to
320
+ # Figure out what the user is trying to extactly configure and where
321
+ # that configuration is coming from.
322
+ # Hence this decorator is set on what ever configMeta class based function
323
+ # so that when it gets called, the full call tree is properly set.
324
+ def commit_owner_names_across_tree(func):
325
+ """
326
+ Decorator that commits owner names across the configuration tree before executing the decorated function.
327
+
328
+ This decorator ensures that all ConfigField instances in the configuration tree are aware of their
329
+ fully qualified names by traversing the tree and calling _set_owner_name on each field.
330
+ """
331
+
332
+ def wrapper(self, *args, **kwargs):
333
+ def _commit_owner_names_recursive(instance, field_name_stack=None):
334
+ if field_name_stack is None:
335
+ field_name_stack = []
336
+
337
+ if not ConfigMeta.is_instance(instance):
338
+ return
339
+
340
+ fields = instance._fields # type: ignore
341
+ # fields is a dictionary of field_name: ConfigField
342
+ for field_name, field_info in fields.items():
343
+ if ConfigMeta.is_instance(field_info.field_type):
344
+ # extract the actual instance of the ConfigMeta class
345
+ _instance = instance.__dict__[field_name]
346
+ # The instance should hold the _commit_owner_names_across_tree
347
+ _commit_owner_names_recursive(
348
+ _instance, field_name_stack + [field_name]
349
+ )
350
+ else:
351
+ if len(field_name_stack) > 0:
352
+ for x in field_name_stack:
353
+ field_info._set_owner_name(x) # type: ignore
354
+
355
+ # Commit owner names before executing the original function
356
+ _commit_owner_names_recursive(self)
357
+
358
+ # Execute the original function
359
+ return func(self, *args, **kwargs)
360
+
361
+ return wrapper
362
+
363
+
319
364
  class ConfigMeta(type):
320
365
  """Metaclass implementing the configuration system's class transformation layer.
321
366
 
@@ -395,10 +440,6 @@ class ConfigMeta(type):
395
440
  if isinstance(value, ConfigField):
396
441
  fields[key] = value
397
442
 
398
- def _set_owner_to_instance(self, instance_name: str):
399
- for field_name, field_info in fields.items(): # field_info is a ConfigField
400
- field_info._set_owner_name(instance_name)
401
-
402
443
  # Store fields metadata on the class
403
444
  namespace["_fields"] = fields
404
445
 
@@ -407,7 +448,6 @@ class ConfigMeta(type):
407
448
  return fields[field_name]
408
449
 
409
450
  namespace["_get_field"] = get_field
410
- namespace["_set_owner_instance"] = _set_owner_to_instance
411
451
 
412
452
  # Auto-generate __init__ method;
413
453
  # Override it for all classes.
@@ -2,11 +2,38 @@
2
2
  Auto-generated typed classes for ConfigMeta classes.
3
3
 
4
4
  This module provides IDE-friendly typed interfaces for all configuration classes.
5
+ The reason we auto-generate this file is because we want to provide a bridge between what is the ConfigMeta classes and the typed programmatic interface.
6
+ The CoreConfig class is setup in a way that if any additionally params are missed out from being auto-generated then it will not affect the core functionality of the programmatic API.
7
+ The new parameters will just not show up in IDE autocompletions.
8
+ It is fine if this file is not regularly updated by running the script in the .pre-commit-config.app-changes.yaml
9
+ but it is recommended that this file not be deleted or manually edited.
10
+
5
11
  """
6
12
 
7
- from typing import Optional, List, Dict, Any, TypedDict
13
+ from typing import Optional, List, Dict, Any
8
14
  from .unified_config import CoreConfig
9
15
 
16
+ import sys
17
+ from typing import TYPE_CHECKING
18
+
19
+ # on 3.8+ use the stdlib TypedDict;
20
+ # in TYPE_CHECKING blocks mypy/pyright still pick it up on older Pythons
21
+ if sys.version_info >= (3, 8):
22
+ from typing import TypedDict
23
+ else:
24
+ if TYPE_CHECKING:
25
+ # for the benefit of type-checkers
26
+ from typing import TypedDict # noqa: F401
27
+ # runtime no-op TypedDict shim
28
+ class _TypedDictMeta(type):
29
+ def __new__(cls, name, bases, namespace, total=True):
30
+ # ignore total at runtime
31
+ return super().__new__(cls, name, bases, namespace)
32
+
33
+ class TypedDict(dict, metaclass=_TypedDictMeta):
34
+ # Runtime stand-in for typing.TypedDict on <3.8.
35
+ pass
36
+
10
37
 
11
38
  class ResourceConfigDict(TypedDict, total=False):
12
39
  cpu: Optional[str]
@@ -5,7 +5,7 @@ This module provides a mechanism to dynamically generate explicit typed classes
5
5
  from ConfigMeta classes that IDEs can understand and provide autocomplete for.
6
6
  """
7
7
 
8
- from typing import Any, Dict, List, Optional, Union, Type
8
+ from typing import Any, Dict, List, Optional, Union, Type, Set
9
9
 
10
10
  from .config_utils import ConfigMeta
11
11
 
@@ -13,6 +13,68 @@ import os
13
13
 
14
14
  current_dir = os.path.dirname(__file__)
15
15
 
16
+ TYPED_DICT_IMPORT = """
17
+ import sys
18
+ from typing import TYPE_CHECKING
19
+
20
+ # on 3.8+ use the stdlib TypedDict;
21
+ # in TYPE_CHECKING blocks mypy/pyright still pick it up on older Pythons
22
+ if sys.version_info >= (3, 8):
23
+ from typing import TypedDict
24
+ else:
25
+ if TYPE_CHECKING:
26
+ # for the benefit of type-checkers
27
+ from typing import TypedDict # noqa: F401
28
+ # runtime no-op TypedDict shim
29
+ class _TypedDictMeta(type):
30
+ def __new__(cls, name, bases, namespace, total=True):
31
+ # ignore total at runtime
32
+ return super().__new__(cls, name, bases, namespace)
33
+
34
+ class TypedDict(dict, metaclass=_TypedDictMeta):
35
+ # Runtime stand-in for typing.TypedDict on <3.8.
36
+ pass
37
+ """
38
+
39
+
40
+ def collect_nested_configs_recursive(
41
+ config_class: Type, visited: Optional[Set[str]] = None
42
+ ) -> Dict[str, Type]:
43
+ """
44
+ Recursively collect all nested ConfigMeta classes from a config class.
45
+
46
+ Args:
47
+ config_class: A class that inherits from ConfigMeta
48
+ visited: Set of already visited class names to avoid infinite recursion
49
+
50
+ Returns:
51
+ Dictionary mapping class names to ConfigMeta classes
52
+ """
53
+ if visited is None:
54
+ visited = set()
55
+
56
+ nested_configs = {}
57
+
58
+ # Avoid infinite recursion by tracking visited classes
59
+ if config_class.__name__ in visited:
60
+ return nested_configs
61
+
62
+ visited.add(config_class.__name__)
63
+
64
+ # First pass: collect immediate nested configs
65
+ for field_name, field_info in config_class._fields.items():
66
+ if ConfigMeta.is_instance(field_info.field_type):
67
+ nested_class = field_info.field_type
68
+ nested_configs[nested_class.__name__] = nested_class
69
+
70
+ # Recursively collect nested configs from this nested class
71
+ deeper_nested = collect_nested_configs_recursive(
72
+ nested_class, visited.copy()
73
+ )
74
+ nested_configs.update(deeper_nested)
75
+
76
+ return nested_configs
77
+
16
78
 
17
79
  def generate_typed_class_code(config_class: Type) -> str:
18
80
  """
@@ -29,22 +91,19 @@ def generate_typed_class_code(config_class: Type) -> str:
29
91
 
30
92
  class_name = f"Typed{config_class.__name__}"
31
93
 
32
- # Generate TypedDict for nested configs
94
+ # Generate TypedDict for nested configs - now recursive
33
95
  nested_typeddict_code = []
34
96
 
35
- # First pass: collect all nested configs
36
- nested_configs = {}
37
- for field_name, field_info in config_class._fields.items():
38
- if ConfigMeta.is_instance(field_info.field_type):
39
- nested_configs[field_info.field_type.__name__] = field_info.field_type
97
+ # Recursively collect all nested configs
98
+ nested_configs = collect_nested_configs_recursive(config_class)
40
99
 
41
- # Generate TypedDict classes for nested configs
100
+ # Generate TypedDict classes for all nested configs
42
101
  for nested_name, nested_class in nested_configs.items():
43
102
  dict_name = f"{nested_name}Dict"
44
103
  fields = []
45
104
 
46
105
  for field_name, field_info in nested_class._fields.items():
47
- field_type = _get_type_string(field_info.field_type)
106
+ field_type = _get_type_string(field_info.field_type, quote_config_meta=True)
48
107
  if not field_info.required:
49
108
  field_type = f"Optional[{field_type}]"
50
109
  fields.append(f" {field_name}: {field_type}")
@@ -119,8 +178,13 @@ def generate_typed_class_code(config_class: Type) -> str:
119
178
  return (newline + newline).join(full_code)
120
179
 
121
180
 
122
- def _get_type_string(field_type: Type) -> str:
123
- """Convert a type to its string representation for code generation."""
181
+ def _get_type_string(field_type: Type, quote_config_meta: bool = False) -> str:
182
+ """Convert a type to its string representation for code generation.
183
+
184
+ Args:
185
+ field_type: The type to convert
186
+ quote_config_meta: Whether to quote ConfigMeta type references for forward declarations
187
+ """
124
188
  if field_type == str:
125
189
  return "str"
126
190
  elif field_type == int:
@@ -129,6 +193,10 @@ def _get_type_string(field_type: Type) -> str:
129
193
  return "float"
130
194
  elif field_type == bool:
131
195
  return "bool"
196
+ elif ConfigMeta.is_instance(field_type):
197
+ # Handle ConfigMeta classes by referencing their Dict type
198
+ dict_type = f"{field_type.__name__}Dict"
199
+ return f'"{dict_type}"' if quote_config_meta else dict_type
132
200
  elif hasattr(field_type, "__origin__"):
133
201
  # Handle generic types like List[str], Dict[str, str], etc.
134
202
  origin = field_type.__origin__
@@ -136,18 +204,18 @@ def _get_type_string(field_type: Type) -> str:
136
204
 
137
205
  if origin == list:
138
206
  if args:
139
- return f"List[{_get_type_string(args[0])}]"
207
+ return f"List[{_get_type_string(args[0], quote_config_meta)}]"
140
208
  return "List[Any]"
141
209
  elif origin == dict:
142
210
  if len(args) == 2:
143
- return f"Dict[{_get_type_string(args[0])}, {_get_type_string(args[1])}]"
211
+ return f"Dict[{_get_type_string(args[0], quote_config_meta)}, {_get_type_string(args[1], quote_config_meta)}]"
144
212
  return "Dict[str, Any]"
145
213
  elif origin == Union:
146
214
  # Handle Optional types
147
215
  if len(args) == 2 and type(None) in args:
148
216
  non_none_type = args[0] if args[1] is type(None) else args[1]
149
- return f"Optional[{_get_type_string(non_none_type)}]"
150
- return f"Union[{', '.join(_get_type_string(arg) for arg in args)}]"
217
+ return f"Optional[{_get_type_string(non_none_type, quote_config_meta)}]"
218
+ return f"Union[{', '.join(_get_type_string(arg, quote_config_meta) for arg in args)}]"
151
219
 
152
220
  # Default case - use the type name
153
221
  return getattr(field_type, "__name__", str(field_type))
@@ -167,9 +235,10 @@ def generate_typed_classes_module(
167
235
  Complete Python module code
168
236
  """
169
237
  imports = [
170
- "from typing import Optional, List, Dict, Any, TypedDict",
238
+ "from typing import Optional, List, Dict, Any",
171
239
  "from .unified_config import "
172
240
  + ", ".join(cls.__name__ for cls in config_classes),
241
+ TYPED_DICT_IMPORT,
173
242
  ]
174
243
 
175
244
  class_codes = []
@@ -186,6 +255,18 @@ def generate_typed_classes_module(
186
255
  + newline
187
256
  + "This module provides IDE-friendly typed interfaces for all configuration classes."
188
257
  + newline
258
+ + "The reason we auto-generate this file is because we want to provide a bridge between what is the ConfigMeta classes and the typed programmatic interface."
259
+ + newline
260
+ + "The CoreConfig class is setup in a way that if any additionally params are missed out from being auto-generated "
261
+ + "then it will not affect the core functionality of the programmatic API."
262
+ + newline
263
+ + "The new parameters will just not show up in IDE autocompletions."
264
+ + newline
265
+ + "It is fine if this file is not regularly updated by running the script in the .pre-commit-config.app-changes.yaml"
266
+ + newline
267
+ + "but it is recommended that this file not be deleted or manually edited."
268
+ + newline
269
+ + newline
189
270
  + '"""'
190
271
  + newline
191
272
  + newline
@@ -215,7 +296,7 @@ def create_typed_init_class_dynamic(config_class: Type) -> Type:
215
296
  def create_init_method():
216
297
  # Build the signature dynamically
217
298
  sig_params = []
218
- annotations = {"return": None}
299
+ annotations: Dict[str, Any] = {"return": type(None)}
219
300
 
220
301
  for field_name, field_info in config_class._fields.items():
221
302
  field_type = field_info.field_type
@@ -259,7 +340,7 @@ def create_typed_init_class_dynamic(config_class: Type) -> Type:
259
340
  __init__.__annotations__ = annotations
260
341
  return __init__
261
342
 
262
- def create_config(self) -> config_class:
343
+ def create_config(self):
263
344
  """Create and return the ConfigMeta class instance."""
264
345
  return config_class.from_dict(self._kwargs)
265
346
 
@@ -290,7 +371,7 @@ def create_typed_init_class_dynamic(config_class: Type) -> Type:
290
371
 
291
372
 
292
373
  # Auto-generate and write typed classes to a file
293
- def generate_typed_classes_file(output_file: str = None):
374
+ def generate_typed_classes_file(output_file: Optional[str] = None):
294
375
  """
295
376
  Generate typed classes and write them to a file for IDE support.
296
377
 
@@ -29,6 +29,7 @@ from .config_utils import (
29
29
  validate_config_meta,
30
30
  validate_required_fields,
31
31
  ConfigValidationFailedException,
32
+ commit_owner_names_across_tree,
32
33
  )
33
34
 
34
35
 
@@ -49,26 +50,26 @@ class UnitParser:
49
50
  "default_unit": "Mi",
50
51
  "requires_unit": True, # if a Unit free value is provided then we will add the default unit to it.
51
52
  # Regex to match values with units (e.g., "512Mi", "4Gi", "1024Ki")
52
- "unit_regex": r"^\d+(\.\d+)?(Ki|Mi|Gi|Ti|Pi|Ei)$",
53
+ "correct_unit_regex": r"^\d+(\.\d+)?(Ki|Mi|Gi|Ti|Pi|Ei)$",
53
54
  },
54
55
  "cpu": {
55
56
  "default_unit": None,
56
57
  "requires_unit": False, # if a Unit free value is provided then we will not add the default unit to it.
57
58
  # Accepts values like 400m, 4, 0.4, 1000n, etc.
58
59
  # Regex to match values with units (e.g., "400m", "1000n", "2", "0.5")
59
- "unit_regex": r"^(\d+(\.\d+)?(m|n)?|\d+(\.\d+)?)$",
60
+ "correct_unit_regex": r"^(\d+(\.\d+)?(m|n)?|\d+(\.\d+)?)$",
60
61
  },
61
62
  "disk": {
62
63
  "default_unit": "Mi",
63
64
  "requires_unit": True, # if a Unit free value is provided then we will add the default unit to it.
64
65
  # Regex to match values with units (e.g., "100Mi", "1Gi", "500Ki")
65
- "unit_regex": r"^\d+(\.\d+)?(Ki|Mi|Gi|Ti|Pi|Ei)$",
66
+ "correct_unit_regex": r"^\d+(\.\d+)?(Ki|Mi|Gi|Ti|Pi|Ei)$",
66
67
  },
67
68
  "gpu": {
68
69
  "default_unit": None,
69
70
  "requires_unit": False,
70
71
  # Regex to match values with units (usually just integer count, e.g., "1", "2")
71
- "unit_regex": r"^\d+$",
72
+ "correct_unit_regex": r"^\d+$",
72
73
  },
73
74
  }
74
75
 
@@ -76,10 +77,9 @@ class UnitParser:
76
77
  self.metric_name = metric_name
77
78
 
78
79
  def validate(self, value: str):
79
- if self.metrics[self.metric_name]["requires_unit"]:
80
- if not re.match(self.metrics[self.metric_name]["unit_regex"], value):
81
- return False
82
- return True
80
+ if re.match(self.metrics[self.metric_name]["correct_unit_regex"], value):
81
+ return True
82
+ return False
83
83
 
84
84
  def process(self, value: str):
85
85
  value = str(value)
@@ -92,12 +92,9 @@ class UnitParser:
92
92
  )
93
93
  return value
94
94
 
95
- if re.match(self.metrics[self.metric_name]["unit_regex"], value):
96
- return value
97
-
98
95
  return value
99
96
 
100
- def parse(self, value: str):
97
+ def parse(self, value: Union[str, None]):
101
98
  if value is None:
102
99
  return None
103
100
  return self.process(value)
@@ -117,7 +114,7 @@ class UnitParser:
117
114
  field_name=metric_name,
118
115
  field_info=field_info,
119
116
  current_value=value,
120
- message=f"Invalid value for `{metric_name}`. Must be of the format {parser.metrics[metric_name]['unit_regex']}.",
117
+ message=f"Invalid value for `{metric_name}`. Must be of the format {parser.metrics[metric_name]['correct_unit_regex']}.",
121
118
  )
122
119
  return validation
123
120
 
@@ -138,7 +135,7 @@ class BasicValidations:
138
135
  field_name=self.field_name,
139
136
  field_info=self._get_field(),
140
137
  current_value=current_value,
141
- message=f"Configuration field {self.field_name} has invalid value {current_value}. Value must be one of: {' '.join(enums)}",
138
+ message=f"Configuration field {self.field_name} has invalid value {current_value}. Value must be one of: {'/'.join(enums)}",
142
139
  )
143
140
  return True
144
141
 
@@ -345,12 +342,14 @@ class ReplicaConfig(metaclass=ConfigMeta):
345
342
  replica_config.fixed is None,
346
343
  ]
347
344
  ):
345
+ # if nothing is set then set
348
346
  replica_config.fixed = 1
347
+ elif replica_config.min is not None and replica_config.max is None:
348
+ replica_config.max = replica_config.min
349
349
  return
350
350
 
351
351
  @staticmethod
352
352
  def validate(replica_config: "ReplicaConfig"):
353
- # TODO: Have a better validation story.
354
353
  both_min_max_set = (
355
354
  replica_config.min is not None and replica_config.max is not None
356
355
  )
@@ -932,6 +931,7 @@ How to read this schema:
932
931
  def validate(self):
933
932
  validate_config_meta(self)
934
933
 
934
+ @commit_owner_names_across_tree
935
935
  def commit(self):
936
936
  self.validate()
937
937
  validate_required_fields(self)
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ob-metaflow-extensions
3
- Version: 1.1.175rc3
3
+ Version: 1.2.0
4
4
  Summary: Outerbounds Platform Extensions for Metaflow
5
5
  Author: Outerbounds, Inc.
6
6
  License: Commercial
7
7
  Description-Content-Type: text/markdown
8
8
  Requires-Dist: boto3
9
9
  Requires-Dist: kubernetes
10
- Requires-Dist: ob-metaflow (==2.15.18.1)
10
+ Requires-Dist: ob-metaflow (==2.15.21.1)
11
11
 
12
12
  # Outerbounds platform package
13
13
 
@@ -1,22 +1,22 @@
1
1
  metaflow_extensions/outerbounds/__init__.py,sha256=Gb8u06s9ClQsA_vzxmkCzuMnigPy7kKcDnLfb7eB-64,514
2
2
  metaflow_extensions/outerbounds/remote_config.py,sha256=pEFJuKDYs98eoB_-ryPjVi9b_c4gpHMdBHE14ltoxIU,4672
3
3
  metaflow_extensions/outerbounds/config/__init__.py,sha256=JsQGRuGFz28fQWjUvxUgR8EKBLGRdLUIk_buPLJplJY,1225
4
- metaflow_extensions/outerbounds/plugins/__init__.py,sha256=8DmLGwlr6UZWe39rbJdhO5ANemDp2ZwxY6MlqVPCppc,13740
4
+ metaflow_extensions/outerbounds/plugins/__init__.py,sha256=SgqRTG7FFk-jrPBNPYQ-D0W26Yr_dgg7ZL-YJInTQGw,13706
5
5
  metaflow_extensions/outerbounds/plugins/auth_server.py,sha256=_Q9_2EL0Xy77bCRphkwT1aSu8gQXRDOH-Z-RxTUO8N4,2202
6
6
  metaflow_extensions/outerbounds/plugins/perimeters.py,sha256=QXh3SFP7GQbS-RAIxUOPbhPzQ7KDFVxZkTdKqFKgXjI,2697
7
7
  metaflow_extensions/outerbounds/plugins/apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- metaflow_extensions/outerbounds/plugins/apps/app_cli.py,sha256=aVgKe2CBcOgXQL2TYgO-sdIFlTntvbjHZFphR1w1CAM,64
8
+ metaflow_extensions/outerbounds/plugins/apps/app_cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  metaflow_extensions/outerbounds/plugins/apps/app_deploy_decorator.py,sha256=aN2B7nf0IwnOg_9kpqVSkGyZPhr4D9YI1WuCSlNP92s,3498
10
10
  metaflow_extensions/outerbounds/plugins/apps/app_utils.py,sha256=sw9whU17lAzlD2K2kEDNjlk1Ib-2xE2UNhJkmzD8Qv8,8543
11
11
  metaflow_extensions/outerbounds/plugins/apps/consts.py,sha256=iHsyqbUg9k-rgswCs1Jxf5QZIxR1V-peCDRjgr9kdBM,177
12
12
  metaflow_extensions/outerbounds/plugins/apps/deploy_decorator.py,sha256=VkmiMdNYHhNdt-Qm9AVv7aE2LWFsIFEc16YcOYjwF6Q,8568
13
13
  metaflow_extensions/outerbounds/plugins/apps/supervisord_utils.py,sha256=GQoN2gyPClcpR9cLldJmbCfqXnoAHxp8xUnY7vzaYtY,9026
14
14
  metaflow_extensions/outerbounds/plugins/apps/core/__init__.py,sha256=c6uCgKlgEkTmM9BVdAO-m3vZvUpK2KW_AZZ2236now4,237
15
- metaflow_extensions/outerbounds/plugins/apps/core/_state_machine.py,sha256=8RvEqRI9WaUiFu2LBKIqWRbbe9bSM2EptTR56wPit-0,18434
16
- metaflow_extensions/outerbounds/plugins/apps/core/app_cli.py,sha256=HR4nQXyxHSe6NbUF0mutsOQR_rIT-_U6C-mdSQ1_8Os,42326
15
+ metaflow_extensions/outerbounds/plugins/apps/core/_state_machine.py,sha256=b0WI7jVIReWo52AtWXFlaoET2u3nOVH9oITnVlWFIBk,19881
16
+ metaflow_extensions/outerbounds/plugins/apps/core/app_cli.py,sha256=9YyvOQzPNlpxA2K9AZ4jYpfDWpLSp66u_NotGGE5DHg,42155
17
17
  metaflow_extensions/outerbounds/plugins/apps/core/app_config.py,sha256=PHt-HdNfTHIuhY-eB5vkRMp1RKQNWJ4DKdgZWyYgUuc,4167
18
18
  metaflow_extensions/outerbounds/plugins/apps/core/artifacts.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- metaflow_extensions/outerbounds/plugins/apps/core/capsule.py,sha256=kOyHX8mzt44N4x4MKXCcBTZxS9ZcmRcUgbVuTgAMPpA,33879
19
+ metaflow_extensions/outerbounds/plugins/apps/core/capsule.py,sha256=fRcJ_GgC4FxGscRS4M4mcMNpjoHA6Zdc1LIgdfDkJkI,33935
20
20
  metaflow_extensions/outerbounds/plugins/apps/core/click_importer.py,sha256=kgoPQmK_-8PSSTc3QMSaynCLQ5VWTkKFOC69FPURyXA,998
21
21
  metaflow_extensions/outerbounds/plugins/apps/core/config_schema.yaml,sha256=iTThO2vNQrFWe9nYfjiOcMf6FOQ6vU_1ZhXhUAr0L24,8142
22
22
  metaflow_extensions/outerbounds/plugins/apps/core/dependencies.py,sha256=HDPj7rDARcsKeex5GwH0IP8rOXMH6YdOufgXDknP1S8,4006
@@ -33,11 +33,11 @@ metaflow_extensions/outerbounds/plugins/apps/core/code_package/code_packager.py,
33
33
  metaflow_extensions/outerbounds/plugins/apps/core/code_package/examples.py,sha256=aF8qKIJxCVv_ugcShQjqUsXKKKMsm1oMkQIl8w3QKuw,4016
34
34
  metaflow_extensions/outerbounds/plugins/apps/core/config/__init__.py,sha256=fSFBjC5ujTpBlNJGyVsaoWl5VZ_8mXbEIPiFvzTrgKA,382
35
35
  metaflow_extensions/outerbounds/plugins/apps/core/config/cli_generator.py,sha256=0R0-wy7RxAMR9doVRvuluRYxAYgyjZXlTIkOeYGyz7M,5350
36
- metaflow_extensions/outerbounds/plugins/apps/core/config/config_utils.py,sha256=zBZSe-1CsHj5MxrKNuAGLy11yXD4qF_IJJkdORfCW6A,32089
36
+ metaflow_extensions/outerbounds/plugins/apps/core/config/config_utils.py,sha256=bozzUR8rbfOnb5M532RZxB5QNvVgEC1gnVjfCvQ82Yk,34053
37
37
  metaflow_extensions/outerbounds/plugins/apps/core/config/schema_export.py,sha256=tigPtb0we-urwbmctG1GbaQ9NKRKZn4KBbJKmaEntCg,9501
38
- metaflow_extensions/outerbounds/plugins/apps/core/config/typed_configs.py,sha256=RufXI8BQn6G5kixY6P27VYJ0d36ZG-JNFW15bA1bnk4,3179
39
- metaflow_extensions/outerbounds/plugins/apps/core/config/typed_init_generator.py,sha256=1iszHsXjxAWeGI4XhwoTT8lVtzQwstF3ZOiVYBykO6w,10256
40
- metaflow_extensions/outerbounds/plugins/apps/core/config/unified_config.py,sha256=58PsC1ZK3p5wNvavVRzb4Qq764wOUjPuSvXDsrrsSw4,35474
38
+ metaflow_extensions/outerbounds/plugins/apps/core/config/typed_configs.py,sha256=bAC2lV1xWtcw0r2LPlqDrggeXPLOyrtZha2KDpm_Vx0,4454
39
+ metaflow_extensions/outerbounds/plugins/apps/core/config/typed_init_generator.py,sha256=KiJ1eiwtBR5eWdBzWqvO6KlqJ2qzjJvl3w4c1uJ3g0Y,13419
40
+ metaflow_extensions/outerbounds/plugins/apps/core/config/unified_config.py,sha256=HzLFqMHuo-2o3KR4hJlTtMkl978yvCnGu2lm_PnhEp0,35548
41
41
  metaflow_extensions/outerbounds/plugins/apps/core/experimental/__init__.py,sha256=rd4qGTkHndKYfJmoAKZWiY0KK4j5BK6RBrtle-it1Mg,2746
42
42
  metaflow_extensions/outerbounds/plugins/aws/__init__.py,sha256=VBGdjNKeFLXGZuqh4jVk8cFtO1AWof73a6k_cnbAOYA,145
43
43
  metaflow_extensions/outerbounds/plugins/aws/assume_role.py,sha256=mBewNlnSYsR2rFXFkX-DUH6ku01h2yOcMcLHoCL7eyI,161
@@ -115,7 +115,7 @@ metaflow_extensions/outerbounds/toplevel/plugins/ollama/__init__.py,sha256=GRSz2
115
115
  metaflow_extensions/outerbounds/toplevel/plugins/snowflake/__init__.py,sha256=LptpH-ziXHrednMYUjIaosS1SXD3sOtF_9_eRqd8SJw,50
116
116
  metaflow_extensions/outerbounds/toplevel/plugins/torchtune/__init__.py,sha256=uTVkdSk3xZ7hEKYfdlyVteWj5KeDwaM1hU9WT-_YKfI,50
117
117
  metaflow_extensions/outerbounds/toplevel/plugins/vllm/__init__.py,sha256=ekcgD3KVydf-a0xMI60P4uy6ePkSEoFHiGnDq1JM940,45
118
- ob_metaflow_extensions-1.1.175rc3.dist-info/METADATA,sha256=wnDlhzELSLX2xxzL6U87sqIbb1tWYehFruJ783o-8Gg,524
119
- ob_metaflow_extensions-1.1.175rc3.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
120
- ob_metaflow_extensions-1.1.175rc3.dist-info/top_level.txt,sha256=NwG0ukwjygtanDETyp_BUdtYtqIA_lOjzFFh1TsnxvI,20
121
- ob_metaflow_extensions-1.1.175rc3.dist-info/RECORD,,
118
+ ob_metaflow_extensions-1.2.0.dist-info/METADATA,sha256=ZlwS67W8SEHHEGzsdvxWg6TODhFy-N-o9GpflqBkMR4,519
119
+ ob_metaflow_extensions-1.2.0.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
120
+ ob_metaflow_extensions-1.2.0.dist-info/top_level.txt,sha256=NwG0ukwjygtanDETyp_BUdtYtqIA_lOjzFFh1TsnxvI,20
121
+ ob_metaflow_extensions-1.2.0.dist-info/RECORD,,