ob-metaflow 2.16.8.2rc0__py2.py3-none-any.whl → 2.16.8.2rc2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (61) hide show
  1. metaflow/_vendor/click/core.py +4 -3
  2. metaflow/_vendor/imghdr/__init__.py +1 -7
  3. metaflow/cli.py +2 -11
  4. metaflow/cli_components/run_cmds.py +15 -0
  5. metaflow/client/core.py +1 -6
  6. metaflow/extension_support/__init__.py +3 -4
  7. metaflow/flowspec.py +113 -1
  8. metaflow/graph.py +134 -10
  9. metaflow/lint.py +70 -3
  10. metaflow/metaflow_environment.py +6 -14
  11. metaflow/package/__init__.py +9 -18
  12. metaflow/packaging_sys/__init__.py +43 -53
  13. metaflow/packaging_sys/backend.py +6 -21
  14. metaflow/packaging_sys/tar_backend.py +3 -16
  15. metaflow/packaging_sys/v1.py +21 -21
  16. metaflow/plugins/argo/argo_client.py +14 -31
  17. metaflow/plugins/argo/argo_workflows.py +22 -66
  18. metaflow/plugins/argo/argo_workflows_cli.py +2 -1
  19. metaflow/plugins/argo/argo_workflows_deployer_objects.py +0 -69
  20. metaflow/plugins/aws/step_functions/step_functions.py +6 -0
  21. metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +0 -30
  22. metaflow/plugins/cards/card_modules/basic.py +14 -2
  23. metaflow/plugins/cards/card_modules/convert_to_native_type.py +1 -7
  24. metaflow/plugins/kubernetes/kubernetes_decorator.py +1 -1
  25. metaflow/plugins/kubernetes/kubernetes_jobsets.py +28 -26
  26. metaflow/plugins/pypi/conda_decorator.py +2 -4
  27. metaflow/runner/click_api.py +7 -14
  28. metaflow/runner/deployer.py +7 -160
  29. metaflow/runner/subprocess_manager.py +12 -20
  30. metaflow/runtime.py +102 -27
  31. metaflow/task.py +46 -25
  32. metaflow/user_decorators/mutable_flow.py +1 -3
  33. metaflow/util.py +29 -0
  34. metaflow/vendor.py +6 -23
  35. metaflow/version.py +1 -1
  36. {ob_metaflow-2.16.8.2rc0.dist-info → ob_metaflow-2.16.8.2rc2.dist-info}/METADATA +2 -2
  37. {ob_metaflow-2.16.8.2rc0.dist-info → ob_metaflow-2.16.8.2rc2.dist-info}/RECORD +44 -61
  38. metaflow/_vendor/yaml/__init__.py +0 -427
  39. metaflow/_vendor/yaml/composer.py +0 -139
  40. metaflow/_vendor/yaml/constructor.py +0 -748
  41. metaflow/_vendor/yaml/cyaml.py +0 -101
  42. metaflow/_vendor/yaml/dumper.py +0 -62
  43. metaflow/_vendor/yaml/emitter.py +0 -1137
  44. metaflow/_vendor/yaml/error.py +0 -75
  45. metaflow/_vendor/yaml/events.py +0 -86
  46. metaflow/_vendor/yaml/loader.py +0 -63
  47. metaflow/_vendor/yaml/nodes.py +0 -49
  48. metaflow/_vendor/yaml/parser.py +0 -589
  49. metaflow/_vendor/yaml/reader.py +0 -185
  50. metaflow/_vendor/yaml/representer.py +0 -389
  51. metaflow/_vendor/yaml/resolver.py +0 -227
  52. metaflow/_vendor/yaml/scanner.py +0 -1435
  53. metaflow/_vendor/yaml/serializer.py +0 -111
  54. metaflow/_vendor/yaml/tokens.py +0 -104
  55. {ob_metaflow-2.16.8.2rc0.data → ob_metaflow-2.16.8.2rc2.data}/data/share/metaflow/devtools/Makefile +0 -0
  56. {ob_metaflow-2.16.8.2rc0.data → ob_metaflow-2.16.8.2rc2.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  57. {ob_metaflow-2.16.8.2rc0.data → ob_metaflow-2.16.8.2rc2.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  58. {ob_metaflow-2.16.8.2rc0.dist-info → ob_metaflow-2.16.8.2rc2.dist-info}/WHEEL +0 -0
  59. {ob_metaflow-2.16.8.2rc0.dist-info → ob_metaflow-2.16.8.2rc2.dist-info}/entry_points.txt +0 -0
  60. {ob_metaflow-2.16.8.2rc0.dist-info → ob_metaflow-2.16.8.2rc2.dist-info}/licenses/LICENSE +0 -0
  61. {ob_metaflow-2.16.8.2rc0.dist-info → ob_metaflow-2.16.8.2rc2.dist-info}/top_level.txt +0 -0
@@ -203,38 +203,6 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
203
203
 
204
204
  TYPE: ClassVar[Optional[str]] = "argo-workflows"
205
205
 
206
- @classmethod
207
- def list_deployed_flows(cls, flow_name: Optional[str] = None):
208
- """
209
- List all deployed Argo Workflow templates.
210
-
211
- Parameters
212
- ----------
213
- flow_name : str, optional, default None
214
- If specified, only list deployed flows for this specific flow name.
215
- If None, list all deployed flows.
216
-
217
- Yields
218
- ------
219
- ArgoWorkflowsDeployedFlow
220
- `ArgoWorkflowsDeployedFlow` objects representing deployed
221
- workflow templates on Argo Workflows.
222
- """
223
- from metaflow.plugins.argo.argo_workflows import ArgoWorkflows
224
-
225
- # When flow_name is None, use all=True to get all templates
226
- # When flow_name is specified, use all=False to filter by flow_name
227
- all_templates = flow_name is None
228
- for template_name in ArgoWorkflows.list_templates(
229
- flow_name=flow_name, all=all_templates
230
- ):
231
- try:
232
- deployed_flow = cls.from_deployment(template_name)
233
- yield deployed_flow
234
- except Exception:
235
- # Skip templates that can't be converted to DeployedFlow objects
236
- continue
237
-
238
206
  @classmethod
239
207
  def from_deployment(cls, identifier: str, metadata: Optional[str] = None):
240
208
  """
@@ -310,43 +278,6 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
310
278
 
311
279
  return cls(deployer=d)
312
280
 
313
- @classmethod
314
- def get_triggered_run(
315
- cls, identifier: str, run_id: str, metadata: Optional[str] = None
316
- ):
317
- """
318
- Retrieves a `ArgoWorkflowsTriggeredRun` object from an identifier, a run id and
319
- optional metadata.
320
-
321
- Parameters
322
- ----------
323
- identifier : str
324
- Deployer specific identifier for the workflow to retrieve
325
- run_id : str
326
- Run ID for the which to fetch the triggered run object
327
- metadata : str, optional, default None
328
- Optional deployer specific metadata.
329
-
330
- Returns
331
- -------
332
- ArgoWorkflowsTriggeredRun
333
- A `ArgoWorkflowsTriggeredRun` object representing the
334
- triggered run on argo workflows.
335
- """
336
- deployed_flow_obj = cls.from_deployment(identifier, metadata)
337
- return ArgoWorkflowsTriggeredRun(
338
- deployer=deployed_flow_obj.deployer,
339
- content=json.dumps(
340
- {
341
- "metadata": deployed_flow_obj.deployer.metadata,
342
- "pathspec": "/".join(
343
- (deployed_flow_obj.deployer.flow_name, run_id)
344
- ),
345
- "name": run_id,
346
- }
347
- ),
348
- )
349
-
350
281
  @property
351
282
  def production_token(self) -> Optional[str]:
352
283
  """
@@ -317,6 +317,12 @@ class StepFunctions(object):
317
317
  "to AWS Step Functions is not supported currently."
318
318
  )
319
319
 
320
+ if node.type == "split-switch":
321
+ raise StepFunctionsException(
322
+ "Deploying flows with switch statement "
323
+ "to AWS Step Functions is not supported currently."
324
+ )
325
+
320
326
  # Assign an AWS Batch job to the AWS Step Functions state
321
327
  # and pass the intermediate state by exposing `JobId` and
322
328
  # `Parameters` to the child job(s) as outputs. `Index` and
@@ -56,20 +56,6 @@ class StepFunctionsDeployedFlow(DeployedFlow):
56
56
 
57
57
  TYPE: ClassVar[Optional[str]] = "step-functions"
58
58
 
59
- @classmethod
60
- def list_deployed_flows(cls, flow_name: Optional[str] = None):
61
- """
62
- This method is not currently implemented for Step Functions.
63
-
64
- Raises
65
- ------
66
- NotImplementedError
67
- This method is not implemented for Step Functions.
68
- """
69
- raise NotImplementedError(
70
- "list_deployed_flows is not implemented for StepFunctions"
71
- )
72
-
73
59
  @classmethod
74
60
  def from_deployment(cls, identifier: str, metadata: Optional[str] = None):
75
61
  """
@@ -84,22 +70,6 @@ class StepFunctionsDeployedFlow(DeployedFlow):
84
70
  "from_deployment is not implemented for StepFunctions"
85
71
  )
86
72
 
87
- @classmethod
88
- def get_triggered_run(
89
- cls, identifier: str, run_id: str, metadata: Optional[str] = None
90
- ):
91
- """
92
- This method is not currently implemented for Step Functions.
93
-
94
- Raises
95
- ------
96
- NotImplementedError
97
- This method is not implemented for Step Functions.
98
- """
99
- raise NotImplementedError(
100
- "get_triggered_run is not implemented for StepFunctions"
101
- )
102
-
103
73
  @property
104
74
  def production_token(self: DeployedFlow) -> Optional[str]:
105
75
  """
@@ -20,12 +20,15 @@ def transform_flow_graph(step_info):
20
20
  return "split"
21
21
  elif node_type == "split-parallel" or node_type == "split-foreach":
22
22
  return "foreach"
23
+ elif node_type == "split-switch":
24
+ return "switch"
23
25
  return "unknown" # Should never happen
24
26
 
25
27
  graph_dict = {}
26
28
  for stepname in step_info:
27
- graph_dict[stepname] = {
28
- "type": node_to_type(step_info[stepname]["type"]),
29
+ node_type = node_to_type(step_info[stepname]["type"])
30
+ node_info = {
31
+ "type": node_type,
29
32
  "box_next": step_info[stepname]["type"] not in ("linear", "join"),
30
33
  "box_ends": (
31
34
  None
@@ -35,6 +38,15 @@ def transform_flow_graph(step_info):
35
38
  "next": step_info[stepname]["next"],
36
39
  "doc": step_info[stepname]["doc"],
37
40
  }
41
+
42
+ if node_type == "switch":
43
+ if "condition" in step_info[stepname]:
44
+ node_info["condition"] = step_info[stepname]["condition"]
45
+ if "switch_cases" in step_info[stepname]:
46
+ node_info["switch_cases"] = step_info[stepname]["switch_cases"]
47
+
48
+ graph_dict[stepname] = node_info
49
+
38
50
  return graph_dict
39
51
 
40
52
 
@@ -146,13 +146,7 @@ class TaskToDict:
146
146
  # Python 3.13 removes the standard ``imghdr`` module. Metaflow
147
147
  # vendors a copy so we can keep using ``what`` to detect image
148
148
  # formats irrespective of the Python version.
149
- import warnings
150
-
151
- with warnings.catch_warnings():
152
- warnings.filterwarnings(
153
- "ignore", category=DeprecationWarning, module="imghdr"
154
- )
155
- from metaflow._vendor import imghdr
149
+ from metaflow._vendor import imghdr
156
150
 
157
151
  resp = imghdr.what(None, h=data_object)
158
152
  # Only accept types supported on the web
@@ -98,7 +98,7 @@ class KubernetesDecorator(StepDecorator):
98
98
  the scheduled node should not have GPUs.
99
99
  gpu_vendor : str, default KUBERNETES_GPU_VENDOR
100
100
  The vendor of the GPUs to be used for this step.
101
- tolerations : List[Dict[str,str]], default []
101
+ tolerations : List[str], default []
102
102
  The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
103
103
  Kubernetes tolerations to use when launching pod in Kubernetes.
104
104
  labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
@@ -6,7 +6,6 @@ from collections import namedtuple
6
6
  from metaflow.exception import MetaflowException
7
7
  from metaflow.metaflow_config import KUBERNETES_JOBSET_GROUP, KUBERNETES_JOBSET_VERSION
8
8
  from metaflow.tracing import inject_tracing_vars
9
- from metaflow._vendor import yaml
10
9
 
11
10
  from .kube_utils import qos_requests_and_limits
12
11
 
@@ -1026,32 +1025,34 @@ class KubernetesArgoJobSet(object):
1026
1025
 
1027
1026
  def dump(self):
1028
1027
  client = self._kubernetes_sdk
1029
- js_dict = client.ApiClient().sanitize_for_serialization(
1030
- dict(
1031
- apiVersion=self._group + "/" + self._version,
1032
- kind="JobSet",
1033
- metadata=client.api_client.ApiClient().sanitize_for_serialization(
1034
- client.V1ObjectMeta(
1035
- name=self.name,
1036
- labels=self._labels,
1037
- annotations=self._annotations,
1038
- )
1039
- ),
1040
- spec=dict(
1041
- replicatedJobs=[self.control.dump(), self.worker.dump()],
1042
- suspend=False,
1043
- startupPolicy=None,
1044
- successPolicy=None,
1045
- # The Failure Policy helps setting the number of retries for the jobset.
1046
- # but we don't rely on it and instead rely on either the local scheduler
1047
- # or the Argo Workflows to handle retries.
1048
- failurePolicy=None,
1049
- network=None,
1050
- ),
1051
- status=None,
1028
+
1029
+ data = json.dumps(
1030
+ client.ApiClient().sanitize_for_serialization(
1031
+ dict(
1032
+ apiVersion=self._group + "/" + self._version,
1033
+ kind="JobSet",
1034
+ metadata=client.api_client.ApiClient().sanitize_for_serialization(
1035
+ client.V1ObjectMeta(
1036
+ name=self.name,
1037
+ labels=self._labels,
1038
+ annotations=self._annotations,
1039
+ )
1040
+ ),
1041
+ spec=dict(
1042
+ replicatedJobs=[self.control.dump(), self.worker.dump()],
1043
+ suspend=False,
1044
+ startupPolicy=None,
1045
+ successPolicy=None,
1046
+ # The Failure Policy helps setting the number of retries for the jobset.
1047
+ # but we don't rely on it and instead rely on either the local scheduler
1048
+ # or the Argo Workflows to handle retries.
1049
+ failurePolicy=None,
1050
+ network=None,
1051
+ ),
1052
+ status=None,
1053
+ )
1052
1054
  )
1053
1055
  )
1054
- data = yaml.dump(js_dict, default_flow_style=False, indent=2)
1055
1056
  # The values we populate in the Jobset manifest (for Argo Workflows) piggybacks on the Argo Workflow's templating engine.
1056
1057
  # Even though Argo Workflows's templating helps us constructing all the necessary IDs and populating the fields
1057
1058
  # required by Metaflow, we run into one glitch. When we construct JSON/YAML serializable objects,
@@ -1066,6 +1067,7 @@ class KubernetesArgoJobSet(object):
1066
1067
  # Since the value of `num_parallel` can be dynamic and can change from run to run, we need to ensure that the
1067
1068
  # value can be passed-down dynamically and is **explicitly set as a integer** in the Jobset Manifest submitted as a
1068
1069
  # part of the Argo Workflow
1069
- quoted_substring = "'{{=asInt(inputs.parameters.workerCount)}}'"
1070
+
1071
+ quoted_substring = '"{{=asInt(inputs.parameters.workerCount)}}"'
1070
1072
  unquoted_substring = "{{=asInt(inputs.parameters.workerCount)}}"
1071
1073
  return data.replace(quoted_substring, unquoted_substring)
@@ -243,11 +243,9 @@ class CondaStepDecorator(StepDecorator):
243
243
  # Ensure local installation of Metaflow is visible to user code
244
244
  python_path = self.__class__._metaflow_home.name
245
245
  addl_env_vars = {}
246
- if self.__class__._addl_env_vars:
246
+ if self.__class__._addl_env_vars is not None:
247
247
  for key, value in self.__class__._addl_env_vars.items():
248
- if key.endswith(":"):
249
- addl_env_vars[key[:-1]] = value
250
- elif key == "PYTHONPATH":
248
+ if key == "PYTHONPATH":
251
249
  addl_env_vars[key] = os.pathsep.join([value, python_path])
252
250
  else:
253
251
  addl_env_vars[key] = value
@@ -43,7 +43,6 @@ from metaflow._vendor.click.types import (
43
43
  )
44
44
  from metaflow.decorators import add_decorator_options
45
45
  from metaflow.exception import MetaflowException
46
- from metaflow.flowspec import _FlowState
47
46
  from metaflow.includefile import FilePathClass
48
47
  from metaflow.metaflow_config import CLICK_API_PROCESS_CONFIG
49
48
  from metaflow.parameters import JSONTypeClass, flow_context
@@ -172,6 +171,7 @@ def _lazy_load_command(
172
171
  _self,
173
172
  name: str,
174
173
  ):
174
+
175
175
  # Context is not used in get_command so we can pass None. Since we pin click,
176
176
  # this won't change from under us.
177
177
 
@@ -516,11 +516,6 @@ class MetaflowAPI(object):
516
516
  # Note that if CLICK_API_PROCESS_CONFIG is False, we still do this because
517
517
  # it will init all parameters (config_options will be None)
518
518
  # We ignore any errors if we don't check the configs in the click API.
519
-
520
- # Init all values in the flow mutators and then process them
521
- for decorator in self._flow_cls._flow_state.get(_FlowState.FLOW_MUTATORS, []):
522
- decorator.external_init()
523
-
524
519
  new_cls = self._flow_cls._process_config_decorators(
525
520
  config_options, process_configs=CLICK_API_PROCESS_CONFIG
526
521
  )
@@ -546,16 +541,14 @@ def extract_all_params(cmd_obj: Union[click.Command, click.Group]):
546
541
 
547
542
  for each_param in cmd_obj.params:
548
543
  if isinstance(each_param, click.Argument):
549
- (
550
- arg_params_sigs[each_param.name],
551
- annotations[each_param.name],
552
- ) = get_inspect_param_obj(each_param, inspect.Parameter.POSITIONAL_ONLY)
544
+ arg_params_sigs[each_param.name], annotations[each_param.name] = (
545
+ get_inspect_param_obj(each_param, inspect.Parameter.POSITIONAL_ONLY)
546
+ )
553
547
  arg_parameters[each_param.name] = each_param
554
548
  elif isinstance(each_param, click.Option):
555
- (
556
- opt_params_sigs[each_param.name],
557
- annotations[each_param.name],
558
- ) = get_inspect_param_obj(each_param, inspect.Parameter.KEYWORD_ONLY)
549
+ opt_params_sigs[each_param.name], annotations[each_param.name] = (
550
+ get_inspect_param_obj(each_param, inspect.Parameter.KEYWORD_ONLY)
551
+ )
559
552
  opt_parameters[each_param.name] = each_param
560
553
 
561
554
  defaults[each_param.name] = each_param.default
@@ -13,9 +13,7 @@ def generate_fake_flow_file_contents(
13
13
  ):
14
14
  params_code = ""
15
15
  for _, param_details in param_info.items():
16
- param_python_var_name = param_details.get(
17
- "python_var_name", param_details["name"]
18
- )
16
+ param_python_var_name = param_details["python_var_name"]
19
17
  param_name = param_details["name"]
20
18
  param_type = param_details["type"]
21
19
  param_help = param_details["description"]
@@ -231,68 +229,7 @@ class DeployedFlowMeta(type):
231
229
  }
232
230
  )
233
231
 
234
- def _get_triggered_run_injected_method():
235
- def f(
236
- cls,
237
- identifier: str,
238
- run_id: str,
239
- metadata: Optional[str] = None,
240
- impl: str = DEFAULT_FROM_DEPLOYMENT_IMPL.replace("-", "_"),
241
- ) -> "TriggeredRun":
242
- """
243
- Retrieves a `TriggeredRun` object from an identifier, a run id and optional
244
- metadata. The `impl` parameter specifies the deployer implementation
245
- to use (like `argo-workflows`).
246
-
247
- Parameters
248
- ----------
249
- identifier : str
250
- Deployer specific identifier for the workflow to retrieve
251
- run_id : str
252
- Run ID for the which to fetch the triggered run object
253
- metadata : str, optional, default None
254
- Optional deployer specific metadata.
255
- impl : str, optional, default given by METAFLOW_DEFAULT_FROM_DEPLOYMENT_IMPL
256
- The default implementation to use if not specified
257
-
258
- Returns
259
- -------
260
- TriggeredRun
261
- A `TriggeredRun` object representing the triggered run corresponding
262
- to the identifier and the run id.
263
- """
264
- if impl in allowed_providers:
265
- return (
266
- allowed_providers[impl]
267
- .deployed_flow_type()
268
- .get_triggered_run(identifier, run_id, metadata)
269
- )
270
- else:
271
- raise ValueError(
272
- f"No deployer '{impl}' exists; valid deployers are: "
273
- f"{list(allowed_providers.keys())}"
274
- )
275
-
276
- f.__name__ = "get_triggered_run"
277
- return f
278
-
279
- def _per_type_get_triggered_run_injected_method(method_name, impl):
280
- def f(
281
- cls,
282
- identifier: str,
283
- run_id: str,
284
- metadata: Optional[str] = None,
285
- ):
286
- return (
287
- allowed_providers[impl]
288
- .deployed_flow_type()
289
- .get_triggered_run(identifier, run_id, metadata)
290
- )
291
-
292
- f.__name__ = method_name
293
- return f
294
-
295
- def _from_deployment_injected_method():
232
+ def _default_injected_method():
296
233
  def f(
297
234
  cls,
298
235
  identifier: str,
@@ -334,7 +271,7 @@ class DeployedFlowMeta(type):
334
271
  f.__name__ = "from_deployment"
335
272
  return f
336
273
 
337
- def _per_type_from_deployment_injected_method(method_name, impl):
274
+ def _per_type_injected_method(method_name, impl):
338
275
  def f(
339
276
  cls,
340
277
  identifier: str,
@@ -349,104 +286,14 @@ class DeployedFlowMeta(type):
349
286
  f.__name__ = method_name
350
287
  return f
351
288
 
352
- def _list_deployed_flows_injected_method():
353
- def f(
354
- cls,
355
- flow_name: Optional[str] = None,
356
- impl: str = DEFAULT_FROM_DEPLOYMENT_IMPL.replace("-", "_"),
357
- ):
358
- """
359
- List all deployed flows for the specified implementation.
360
-
361
- Parameters
362
- ----------
363
- flow_name : str, optional, default None
364
- If specified, only list deployed flows for this specific flow name.
365
- If None, list all deployed flows.
366
- impl : str, optional, default given by METAFLOW_DEFAULT_FROM_DEPLOYMENT_IMPL
367
- The default implementation to use if not specified
368
-
369
- Yields
370
- ------
371
- DeployedFlow
372
- `DeployedFlow` objects representing deployed flows.
373
- """
374
- if impl in allowed_providers:
375
- return (
376
- allowed_providers[impl]
377
- .deployed_flow_type()
378
- .list_deployed_flows(flow_name)
379
- )
380
- else:
381
- raise ValueError(
382
- f"No deployer '{impl}' exists; valid deployers are: "
383
- f"{list(allowed_providers.keys())}"
384
- )
385
-
386
- f.__name__ = "list_deployed_flows"
387
- return f
388
-
389
- def _per_type_list_deployed_flows_injected_method(method_name, impl):
390
- def f(
391
- cls,
392
- flow_name: Optional[str] = None,
393
- ):
394
- return (
395
- allowed_providers[impl]
396
- .deployed_flow_type()
397
- .list_deployed_flows(flow_name)
398
- )
399
-
400
- f.__name__ = method_name
401
- return f
402
-
403
- setattr(
404
- cls, "from_deployment", classmethod(_from_deployment_injected_method())
405
- )
406
- setattr(
407
- cls,
408
- "list_deployed_flows",
409
- classmethod(_list_deployed_flows_injected_method()),
410
- )
411
- setattr(
412
- cls,
413
- "get_triggered_run",
414
- classmethod(_get_triggered_run_injected_method()),
415
- )
289
+ setattr(cls, "from_deployment", classmethod(_default_injected_method()))
416
290
 
417
291
  for impl in allowed_providers:
418
- from_deployment_method_name = f"from_{impl}"
419
- list_deployed_flows_method_name = f"list_{impl}"
420
- get_triggered_run_method_name = f"get_triggered_{impl}_run"
421
-
422
- setattr(
423
- cls,
424
- from_deployment_method_name,
425
- classmethod(
426
- _per_type_from_deployment_injected_method(
427
- from_deployment_method_name, impl
428
- )
429
- ),
430
- )
431
-
292
+ method_name = f"from_{impl}"
432
293
  setattr(
433
294
  cls,
434
- list_deployed_flows_method_name,
435
- classmethod(
436
- _per_type_list_deployed_flows_injected_method(
437
- list_deployed_flows_method_name, impl
438
- )
439
- ),
440
- )
441
-
442
- setattr(
443
- cls,
444
- get_triggered_run_method_name,
445
- classmethod(
446
- _per_type_get_triggered_run_injected_method(
447
- get_triggered_run_method_name, impl
448
- )
449
- ),
295
+ method_name,
296
+ classmethod(_per_type_injected_method(method_name, impl)),
450
297
  )
451
298
 
452
299
  return cls
@@ -152,20 +152,12 @@ class SubprocessManager(object):
152
152
  int
153
153
  The process ID of the subprocess.
154
154
  """
155
- env = env or {}
156
- installed_root = os.environ.get("METAFLOW_EXTRACTED_ROOT", get_metaflow_root())
157
-
158
- for k, v in MetaflowCodeContent.get_env_vars_for_packaged_metaflow(
159
- installed_root
160
- ).items():
161
- if k.endswith(":"):
162
- # Override
163
- env[k[:-1]] = v
164
- elif k in env:
165
- env[k] = "%s:%s" % (v, env[k])
166
- else:
167
- env[k] = v
168
-
155
+ updated_env = MetaflowCodeContent.get_env_vars_for_packaged_metaflow(
156
+ get_metaflow_root()
157
+ )
158
+ if updated_env:
159
+ env = env or {}
160
+ env.update(updated_env)
169
161
  command_obj = CommandManager(command, env, cwd)
170
162
  pid = command_obj.run(show_output=show_output)
171
163
  self.commands[pid] = command_obj
@@ -196,12 +188,12 @@ class SubprocessManager(object):
196
188
  int
197
189
  The process ID of the subprocess.
198
190
  """
199
- env = env or {}
200
- if "PYTHONPATH" in env:
201
- env["PYTHONPATH"] = "%s:%s" % (get_metaflow_root(), env["PYTHONPATH"])
202
- else:
203
- env["PYTHONPATH"] = get_metaflow_root()
204
-
191
+ updated_env = MetaflowCodeContent.get_env_vars_for_packaged_metaflow(
192
+ get_metaflow_root()
193
+ )
194
+ if updated_env:
195
+ env = env or {}
196
+ env.update(updated_env)
205
197
  command_obj = CommandManager(command, env, cwd)
206
198
  pid = await command_obj.async_run()
207
199
  self.commands[pid] = command_obj