ob-metaflow 2.16.6.4rc0__py2.py3-none-any.whl → 2.16.6.5rc2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (30) hide show
  1. metaflow/_vendor/imghdr/__init__.py +0 -5
  2. metaflow/client/core.py +1 -6
  3. metaflow/extension_support/__init__.py +3 -4
  4. metaflow/metaflow_environment.py +6 -14
  5. metaflow/package/__init__.py +9 -18
  6. metaflow/packaging_sys/__init__.py +43 -53
  7. metaflow/packaging_sys/backend.py +6 -21
  8. metaflow/packaging_sys/tar_backend.py +3 -16
  9. metaflow/packaging_sys/v1.py +21 -21
  10. metaflow/plugins/argo/argo_client.py +31 -14
  11. metaflow/plugins/argo/argo_workflows.py +66 -17
  12. metaflow/plugins/argo/argo_workflows_cli.py +1 -2
  13. metaflow/plugins/argo/argo_workflows_deployer_objects.py +32 -0
  14. metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +14 -0
  15. metaflow/plugins/kubernetes/kubernetes_decorator.py +1 -1
  16. metaflow/plugins/pypi/conda_decorator.py +2 -4
  17. metaflow/runner/click_api.py +7 -14
  18. metaflow/runner/deployer.py +83 -7
  19. metaflow/runner/subprocess_manager.py +12 -20
  20. metaflow/user_decorators/mutable_flow.py +3 -1
  21. metaflow/version.py +1 -1
  22. {ob_metaflow-2.16.6.4rc0.data → ob_metaflow-2.16.6.5rc2.data}/data/share/metaflow/devtools/Tiltfile +43 -2
  23. {ob_metaflow-2.16.6.4rc0.data → ob_metaflow-2.16.6.5rc2.data}/data/share/metaflow/devtools/pick_services.sh +1 -0
  24. {ob_metaflow-2.16.6.4rc0.dist-info → ob_metaflow-2.16.6.5rc2.dist-info}/METADATA +2 -2
  25. {ob_metaflow-2.16.6.4rc0.dist-info → ob_metaflow-2.16.6.5rc2.dist-info}/RECORD +30 -30
  26. {ob_metaflow-2.16.6.4rc0.data → ob_metaflow-2.16.6.5rc2.data}/data/share/metaflow/devtools/Makefile +0 -0
  27. {ob_metaflow-2.16.6.4rc0.dist-info → ob_metaflow-2.16.6.5rc2.dist-info}/WHEEL +0 -0
  28. {ob_metaflow-2.16.6.4rc0.dist-info → ob_metaflow-2.16.6.5rc2.dist-info}/entry_points.txt +0 -0
  29. {ob_metaflow-2.16.6.4rc0.dist-info → ob_metaflow-2.16.6.5rc2.dist-info}/licenses/LICENSE +0 -0
  30. {ob_metaflow-2.16.6.4rc0.dist-info → ob_metaflow-2.16.6.5rc2.dist-info}/top_level.txt +0 -0
@@ -216,23 +216,14 @@ class ArgoWorkflows(object):
216
216
  return name.replace(".", "-")
217
217
 
218
218
  @staticmethod
219
- def list_templates(flow_name, all=False):
219
+ def list_templates(flow_name, all=False, page_size=100):
220
220
  client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
221
221
 
222
- templates = client.get_workflow_templates()
223
- if templates is None:
224
- return []
225
-
226
- template_names = [
227
- template["metadata"]["name"]
228
- for template in templates
229
- if all
230
- or flow_name
231
- == template["metadata"]
232
- .get("annotations", {})
233
- .get("metaflow/flow_name", None)
234
- ]
235
- return template_names
222
+ for template in client.get_workflow_templates(page_size=page_size):
223
+ if all or flow_name == template["metadata"].get("annotations", {}).get(
224
+ "metaflow/flow_name", None
225
+ ):
226
+ yield template["metadata"]["name"]
236
227
 
237
228
  @staticmethod
238
229
  def delete(name):
@@ -3286,8 +3277,8 @@ class ArgoWorkflows(object):
3286
3277
  Trigger().template(
3287
3278
  TriggerTemplate(self.name)
3288
3279
  # Trigger a deployed workflow template
3289
- .argo_workflow_trigger(
3290
- ArgoWorkflowTrigger()
3280
+ .k8s_trigger(
3281
+ StandardK8STrigger()
3291
3282
  .source(
3292
3283
  {
3293
3284
  "resource": {
@@ -4265,6 +4256,10 @@ class TriggerTemplate(object):
4265
4256
  self.payload = tree()
4266
4257
  self.payload["name"] = name
4267
4258
 
4259
+ def k8s_trigger(self, k8s_trigger):
4260
+ self.payload["k8s"] = k8s_trigger.to_json()
4261
+ return self
4262
+
4268
4263
  def argo_workflow_trigger(self, argo_workflow_trigger):
4269
4264
  self.payload["argoWorkflow"] = argo_workflow_trigger.to_json()
4270
4265
  return self
@@ -4339,3 +4334,57 @@ class TriggerParameter(object):
4339
4334
 
4340
4335
  def __str__(self):
4341
4336
  return json.dumps(self.payload, indent=4)
4337
+
4338
+
4339
+ class StandardK8STrigger(object):
4340
+ # https://pkg.go.dev/github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1#StandardK8STrigger
4341
+
4342
+ def __init__(self):
4343
+ tree = lambda: defaultdict(tree)
4344
+ self.payload = tree()
4345
+ self.payload["operation"] = "create"
4346
+
4347
+ def operation(self, operation):
4348
+ self.payload["operation"] = operation
4349
+ return self
4350
+
4351
+ def group(self, group):
4352
+ self.payload["group"] = group
4353
+ return self
4354
+
4355
+ def version(self, version):
4356
+ self.payload["version"] = version
4357
+ return self
4358
+
4359
+ def resource(self, resource):
4360
+ self.payload["resource"] = resource
4361
+ return self
4362
+
4363
+ def namespace(self, namespace):
4364
+ self.payload["namespace"] = namespace
4365
+ return self
4366
+
4367
+ def source(self, source):
4368
+ self.payload["source"] = source
4369
+ return self
4370
+
4371
+ def parameters(self, trigger_parameters):
4372
+ if "parameters" not in self.payload:
4373
+ self.payload["parameters"] = []
4374
+ for trigger_parameter in trigger_parameters:
4375
+ self.payload["parameters"].append(trigger_parameter.to_json())
4376
+ return self
4377
+
4378
+ def live_object(self, live_object=True):
4379
+ self.payload["liveObject"] = live_object
4380
+ return self
4381
+
4382
+ def patch_strategy(self, patch_strategy):
4383
+ self.payload["patchStrategy"] = patch_strategy
4384
+ return self
4385
+
4386
+ def to_json(self):
4387
+ return self.payload
4388
+
4389
+ def __str__(self):
4390
+ return json.dumps(self.payload, indent=4)
@@ -1011,8 +1011,7 @@ def terminate(obj, run_id, authorize=None):
1011
1011
  )
1012
1012
  @click.pass_obj
1013
1013
  def list_workflow_templates(obj, all=None):
1014
- templates = ArgoWorkflows.list_templates(obj.flow.name, all)
1015
- for template_name in templates:
1014
+ for template_name in ArgoWorkflows.list_templates(obj.flow.name, all):
1016
1015
  obj.echo_always(template_name)
1017
1016
 
1018
1017
 
@@ -203,6 +203,38 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
203
203
 
204
204
  TYPE: ClassVar[Optional[str]] = "argo-workflows"
205
205
 
206
+ @classmethod
207
+ def list_deployed_flows(cls, flow_name: Optional[str] = None):
208
+ """
209
+ List all deployed Argo Workflow templates.
210
+
211
+ Parameters
212
+ ----------
213
+ flow_name : str, optional, default None
214
+ If specified, only list deployed flows for this specific flow name.
215
+ If None, list all deployed flows.
216
+
217
+ Yields
218
+ ------
219
+ ArgoWorkflowsDeployedFlow
220
+ `ArgoWorkflowsDeployedFlow` objects representing deployed
221
+ workflow templates on Argo Workflows.
222
+ """
223
+ from metaflow.plugins.argo.argo_workflows import ArgoWorkflows
224
+
225
+ # When flow_name is None, use all=True to get all templates
226
+ # When flow_name is specified, use all=False to filter by flow_name
227
+ all_templates = flow_name is None
228
+ for template_name in ArgoWorkflows.list_templates(
229
+ flow_name=flow_name, all=all_templates
230
+ ):
231
+ try:
232
+ deployed_flow = cls.from_deployment(template_name)
233
+ yield deployed_flow
234
+ except Exception:
235
+ # Skip templates that can't be converted to DeployedFlow objects
236
+ continue
237
+
206
238
  @classmethod
207
239
  def from_deployment(cls, identifier: str, metadata: Optional[str] = None):
208
240
  """
@@ -56,6 +56,20 @@ class StepFunctionsDeployedFlow(DeployedFlow):
56
56
 
57
57
  TYPE: ClassVar[Optional[str]] = "step-functions"
58
58
 
59
+ @classmethod
60
+ def list_deployed_flows(cls, flow_name: Optional[str] = None):
61
+ """
62
+ This method is not currently implemented for Step Functions.
63
+
64
+ Raises
65
+ ------
66
+ NotImplementedError
67
+ This method is not implemented for Step Functions.
68
+ """
69
+ raise NotImplementedError(
70
+ "list_deployed_flows is not implemented for StepFunctions"
71
+ )
72
+
59
73
  @classmethod
60
74
  def from_deployment(cls, identifier: str, metadata: Optional[str] = None):
61
75
  """
@@ -98,7 +98,7 @@ class KubernetesDecorator(StepDecorator):
98
98
  the scheduled node should not have GPUs.
99
99
  gpu_vendor : str, default KUBERNETES_GPU_VENDOR
100
100
  The vendor of the GPUs to be used for this step.
101
- tolerations : List[str], default []
101
+ tolerations : List[Dict[str,str]], default []
102
102
  The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
103
103
  Kubernetes tolerations to use when launching pod in Kubernetes.
104
104
  labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
@@ -243,11 +243,9 @@ class CondaStepDecorator(StepDecorator):
243
243
  # Ensure local installation of Metaflow is visible to user code
244
244
  python_path = self.__class__._metaflow_home.name
245
245
  addl_env_vars = {}
246
- if self.__class__._addl_env_vars:
246
+ if self.__class__._addl_env_vars is not None:
247
247
  for key, value in self.__class__._addl_env_vars.items():
248
- if key.endswith(":"):
249
- addl_env_vars[key[:-1]] = value
250
- elif key == "PYTHONPATH":
248
+ if key == "PYTHONPATH":
251
249
  addl_env_vars[key] = os.pathsep.join([value, python_path])
252
250
  else:
253
251
  addl_env_vars[key] = value
@@ -43,7 +43,6 @@ from metaflow._vendor.click.types import (
43
43
  )
44
44
  from metaflow.decorators import add_decorator_options
45
45
  from metaflow.exception import MetaflowException
46
- from metaflow.flowspec import _FlowState
47
46
  from metaflow.includefile import FilePathClass
48
47
  from metaflow.metaflow_config import CLICK_API_PROCESS_CONFIG
49
48
  from metaflow.parameters import JSONTypeClass, flow_context
@@ -172,6 +171,7 @@ def _lazy_load_command(
172
171
  _self,
173
172
  name: str,
174
173
  ):
174
+
175
175
  # Context is not used in get_command so we can pass None. Since we pin click,
176
176
  # this won't change from under us.
177
177
 
@@ -516,11 +516,6 @@ class MetaflowAPI(object):
516
516
  # Note that if CLICK_API_PROCESS_CONFIG is False, we still do this because
517
517
  # it will init all parameters (config_options will be None)
518
518
  # We ignore any errors if we don't check the configs in the click API.
519
-
520
- # Init all values in the flow mutators and then process them
521
- for decorator in self._flow_cls._flow_state.get(_FlowState.FLOW_MUTATORS, []):
522
- decorator.external_init()
523
-
524
519
  new_cls = self._flow_cls._process_config_decorators(
525
520
  config_options, process_configs=CLICK_API_PROCESS_CONFIG
526
521
  )
@@ -546,16 +541,14 @@ def extract_all_params(cmd_obj: Union[click.Command, click.Group]):
546
541
 
547
542
  for each_param in cmd_obj.params:
548
543
  if isinstance(each_param, click.Argument):
549
- (
550
- arg_params_sigs[each_param.name],
551
- annotations[each_param.name],
552
- ) = get_inspect_param_obj(each_param, inspect.Parameter.POSITIONAL_ONLY)
544
+ arg_params_sigs[each_param.name], annotations[each_param.name] = (
545
+ get_inspect_param_obj(each_param, inspect.Parameter.POSITIONAL_ONLY)
546
+ )
553
547
  arg_parameters[each_param.name] = each_param
554
548
  elif isinstance(each_param, click.Option):
555
- (
556
- opt_params_sigs[each_param.name],
557
- annotations[each_param.name],
558
- ) = get_inspect_param_obj(each_param, inspect.Parameter.KEYWORD_ONLY)
549
+ opt_params_sigs[each_param.name], annotations[each_param.name] = (
550
+ get_inspect_param_obj(each_param, inspect.Parameter.KEYWORD_ONLY)
551
+ )
559
552
  opt_parameters[each_param.name] = each_param
560
553
 
561
554
  defaults[each_param.name] = each_param.default
@@ -13,7 +13,9 @@ def generate_fake_flow_file_contents(
13
13
  ):
14
14
  params_code = ""
15
15
  for _, param_details in param_info.items():
16
- param_python_var_name = param_details["python_var_name"]
16
+ param_python_var_name = param_details.get(
17
+ "python_var_name", param_details["name"]
18
+ )
17
19
  param_name = param_details["name"]
18
20
  param_type = param_details["type"]
19
21
  param_help = param_details["description"]
@@ -229,7 +231,7 @@ class DeployedFlowMeta(type):
229
231
  }
230
232
  )
231
233
 
232
- def _default_injected_method():
234
+ def _from_deployment_injected_method():
233
235
  def f(
234
236
  cls,
235
237
  identifier: str,
@@ -271,7 +273,7 @@ class DeployedFlowMeta(type):
271
273
  f.__name__ = "from_deployment"
272
274
  return f
273
275
 
274
- def _per_type_injected_method(method_name, impl):
276
+ def _per_type_from_deployment_injected_method(method_name, impl):
275
277
  def f(
276
278
  cls,
277
279
  identifier: str,
@@ -286,14 +288,88 @@ class DeployedFlowMeta(type):
286
288
  f.__name__ = method_name
287
289
  return f
288
290
 
289
- setattr(cls, "from_deployment", classmethod(_default_injected_method()))
291
+ def _list_deployed_flows_injected_method():
292
+ def f(
293
+ cls,
294
+ flow_name: Optional[str] = None,
295
+ impl: str = DEFAULT_FROM_DEPLOYMENT_IMPL.replace("-", "_"),
296
+ ):
297
+ """
298
+ List all deployed flows for the specified implementation.
299
+
300
+ Parameters
301
+ ----------
302
+ flow_name : str, optional, default None
303
+ If specified, only list deployed flows for this specific flow name.
304
+ If None, list all deployed flows.
305
+ impl : str, optional, default given by METAFLOW_DEFAULT_FROM_DEPLOYMENT_IMPL
306
+ The default implementation to use if not specified
307
+
308
+ Yields
309
+ ------
310
+ DeployedFlow
311
+ `DeployedFlow` objects representing deployed flows.
312
+ """
313
+ if impl in allowed_providers:
314
+ return (
315
+ allowed_providers[impl]
316
+ .deployed_flow_type()
317
+ .list_deployed_flows(flow_name)
318
+ )
319
+ else:
320
+ raise ValueError(
321
+ f"No deployer '{impl}' exists; valid deployers are: "
322
+ f"{list(allowed_providers.keys())}"
323
+ )
324
+
325
+ f.__name__ = "list_deployed_flows"
326
+ return f
327
+
328
+ def _per_type_list_deployed_flows_injected_method(method_name, impl):
329
+ def f(
330
+ cls,
331
+ flow_name: Optional[str] = None,
332
+ ):
333
+ return (
334
+ allowed_providers[impl]
335
+ .deployed_flow_type()
336
+ .list_deployed_flows(flow_name)
337
+ )
338
+
339
+ f.__name__ = method_name
340
+ return f
341
+
342
+ setattr(
343
+ cls, "from_deployment", classmethod(_from_deployment_injected_method())
344
+ )
345
+ setattr(
346
+ cls,
347
+ "list_deployed_flows",
348
+ classmethod(_list_deployed_flows_injected_method()),
349
+ )
290
350
 
291
351
  for impl in allowed_providers:
292
- method_name = f"from_{impl}"
352
+ from_deployment_method_name = f"from_{impl}"
353
+ list_deployed_flows_method_name = f"list_{impl}"
354
+
293
355
  setattr(
294
356
  cls,
295
- method_name,
296
- classmethod(_per_type_injected_method(method_name, impl)),
357
+ from_deployment_method_name,
358
+ classmethod(
359
+ _per_type_from_deployment_injected_method(
360
+ from_deployment_method_name, impl
361
+ )
362
+ ),
363
+ )
364
+
365
+ setattr(
366
+ cls,
367
+ list_deployed_flows_method_name,
368
+ classmethod(
369
+ _per_type_list_deployed_flows_injected_method(
370
+ list_deployed_flows_method_name, impl
371
+ )
372
+ ),
297
373
  )
298
374
 
299
375
  return cls
@@ -152,20 +152,12 @@ class SubprocessManager(object):
152
152
  int
153
153
  The process ID of the subprocess.
154
154
  """
155
- env = env or {}
156
- installed_root = os.environ.get("METAFLOW_EXTRACTED_ROOT", get_metaflow_root())
157
-
158
- for k, v in MetaflowCodeContent.get_env_vars_for_packaged_metaflow(
159
- installed_root
160
- ).items():
161
- if k.endswith(":"):
162
- # Override
163
- env[k[:-1]] = v
164
- elif k in env:
165
- env[k] = "%s:%s" % (v, env[k])
166
- else:
167
- env[k] = v
168
-
155
+ updated_env = MetaflowCodeContent.get_env_vars_for_packaged_metaflow(
156
+ get_metaflow_root()
157
+ )
158
+ if updated_env:
159
+ env = env or {}
160
+ env.update(updated_env)
169
161
  command_obj = CommandManager(command, env, cwd)
170
162
  pid = command_obj.run(show_output=show_output)
171
163
  self.commands[pid] = command_obj
@@ -196,12 +188,12 @@ class SubprocessManager(object):
196
188
  int
197
189
  The process ID of the subprocess.
198
190
  """
199
- env = env or {}
200
- if "PYTHONPATH" in env:
201
- env["PYTHONPATH"] = "%s:%s" % (get_metaflow_root(), env["PYTHONPATH"])
202
- else:
203
- env["PYTHONPATH"] = get_metaflow_root()
204
-
191
+ updated_env = MetaflowCodeContent.get_env_vars_for_packaged_metaflow(
192
+ get_metaflow_root()
193
+ )
194
+ if updated_env:
195
+ env = env or {}
196
+ env.update(updated_env)
205
197
  command_obj = CommandManager(command, env, cwd)
206
198
  pid = await command_obj.async_run()
207
199
  self.commands[pid] = command_obj
@@ -347,8 +347,10 @@ class MutableFlow:
347
347
  "Mutable flow adding flow decorator '%s'" % deco_type
348
348
  )
349
349
 
350
+ # self._flow_cls._flow_decorators is a dictionary of form :
351
+ # <deco_name> : [deco_instance, deco_instance, ...]
350
352
  existing_deco = [
351
- d for d in self._flow_cls._flow_decorators if d.name == flow_deco.name
353
+ d for d in self._flow_cls._flow_decorators if d == flow_deco.name
352
354
  ]
353
355
 
354
356
  if flow_deco.allow_multiple or not existing_deco:
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.16.6.4rc0"
1
+ metaflow_version = "2.16.6.5rc2"
@@ -14,6 +14,17 @@
14
14
  version_settings(constraint='>=0.22.2')
15
15
  allow_k8s_contexts('minikube')
16
16
 
17
+ # Version configuration for components
18
+ JOBSET_VERSION = os.getenv("JOBSET_VERSION", "v0.6.0")
19
+
20
+ # Argo Workflows versions
21
+ ARGO_WORKFLOWS_HELM_CHART_VERSION = os.getenv("ARGO_WORKFLOWS_HELM_CHART_VERSION", "0.45.2") # Helm chart version
22
+ ARGO_WORKFLOWS_IMAGE_TAG = os.getenv("ARGO_WORKFLOWS_IMAGE_TAG", "v3.6.0") # Argo Workflows application version
23
+
24
+ # Argo Events versions
25
+ ARGO_EVENTS_HELM_CHART_VERSION = os.getenv("ARGO_EVENTS_HELM_CHART_VERSION", "2.4.8") # Helm chart version
26
+ ARGO_EVENTS_IMAGE_TAG = os.getenv("ARGO_EVENTS_IMAGE_TAG", "v1.9.2") # Argo Events application version
27
+
17
28
  components = {
18
29
  "metadata-service": ["postgresql"],
19
30
  "ui": ["postgresql", "minio"],
@@ -21,9 +32,10 @@ components = {
21
32
  "postgresql": [],
22
33
  "argo-workflows": [],
23
34
  "argo-events": ["argo-workflows"],
35
+ "jobset": [],
24
36
  }
25
37
 
26
- services_env = os.getenv("SERVICES", "").strip().lower()
38
+ services_env = os.getenv("SERVICES", "all").strip().lower()
27
39
 
28
40
  if services_env:
29
41
  if services_env == "all":
@@ -205,6 +217,7 @@ if "postgresql" in enabled_components:
205
217
  if "argo-workflows" in enabled_components:
206
218
  helm_remote(
207
219
  'argo-workflows',
220
+ version=ARGO_WORKFLOWS_HELM_CHART_VERSION,
208
221
  repo_name='argo',
209
222
  repo_url='https://argoproj.github.io/argo-helm',
210
223
  set=[
@@ -220,7 +233,9 @@ if "argo-workflows" in enabled_components:
220
233
  'controller.resources.requests.memory=128Mi',
221
234
  'controller.resources.requests.cpu=50m',
222
235
  'controller.resources.limits.memory=256Mi',
223
- 'controller.resources.limits.cpu=100m'
236
+ 'controller.resources.limits.cpu=100m',
237
+ # Image version overrides
238
+ 'images.tag=%s' % ARGO_WORKFLOWS_IMAGE_TAG,
224
239
  ]
225
240
  )
226
241
 
@@ -307,6 +322,7 @@ if "argo-workflows" in enabled_components:
307
322
  if "argo-events" in enabled_components:
308
323
  helm_remote(
309
324
  'argo-events',
325
+ version=ARGO_EVENTS_HELM_CHART_VERSION,
310
326
  repo_name='argo',
311
327
  repo_url='https://argoproj.github.io/argo-helm',
312
328
  set=[
@@ -334,6 +350,8 @@ if "argo-events" in enabled_components:
334
350
  'configs.jetstream.versions[1].natsImage=nats:2.9.15',
335
351
  'configs.jetstream.versions[1].startCommand=/nats-server',
336
352
  'configs.jetstream.versions[1].version=2.9.15',
353
+ # Image version overrides
354
+ 'global.image.tag=%s' % ARGO_EVENTS_IMAGE_TAG,
337
355
  ]
338
356
  )
339
357
 
@@ -541,6 +559,29 @@ if "argo-events" in enabled_components:
541
559
  config_resources.append('argo-events-controller-manager')
542
560
  config_resources.append('argo-events-webhook-eventsource-svc')
543
561
 
562
+ #################################################
563
+ # JOBSET
564
+ #################################################
565
+ if "jobset" in enabled_components:
566
+ # Apply JobSet manifests directly from GitHub releases
567
+ jobset_manifest_url = "https://github.com/kubernetes-sigs/jobset/releases/download/%s/manifests.yaml" % JOBSET_VERSION
568
+
569
+ cmd = "curl -sSL %s" % (jobset_manifest_url)
570
+ k8s_yaml(
571
+ local(
572
+ cmd,
573
+ )
574
+ )
575
+
576
+ k8s_resource(
577
+ 'jobset-controller-manager',
578
+ labels=['jobset'],
579
+ )
580
+
581
+ metaflow_config["METAFLOW_KUBERNETES_JOBSET_ENABLED"] = "true"
582
+
583
+ config_resources.append('jobset-controller-manager')
584
+
544
585
  #################################################
545
586
  # METADATA SERVICE
546
587
  #################################################
@@ -18,6 +18,7 @@ SERVICE_OPTIONS=(
18
18
  "ui"
19
19
  "argo-workflows"
20
20
  "argo-events"
21
+ "jobset"
21
22
  )
22
23
 
23
24
  gum style "$LOGO" \
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ob-metaflow
3
- Version: 2.16.6.4rc0
3
+ Version: 2.16.6.5rc2
4
4
  Summary: Metaflow: More AI and ML, Less Engineering
5
5
  Author: Netflix, Outerbounds & the Metaflow Community
6
6
  Author-email: help@outerbounds.co
@@ -12,7 +12,7 @@ Requires-Dist: boto3
12
12
  Requires-Dist: pylint
13
13
  Requires-Dist: kubernetes
14
14
  Provides-Extra: stubs
15
- Requires-Dist: metaflow-stubs==2.16.6.4rc0; extra == "stubs"
15
+ Requires-Dist: metaflow-stubs==2.16.6.5rc2; extra == "stubs"
16
16
  Dynamic: author
17
17
  Dynamic: author-email
18
18
  Dynamic: description