metaflow 2.12.28__py2.py3-none-any.whl → 2.12.30__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. metaflow/__init__.py +2 -3
  2. metaflow/client/core.py +2 -2
  3. metaflow/clone_util.py +1 -1
  4. metaflow/cmd/develop/stub_generator.py +623 -233
  5. metaflow/datastore/task_datastore.py +1 -1
  6. metaflow/extension_support/plugins.py +1 -0
  7. metaflow/flowspec.py +2 -2
  8. metaflow/includefile.py +8 -14
  9. metaflow/metaflow_config.py +4 -0
  10. metaflow/metaflow_current.py +1 -1
  11. metaflow/parameters.py +3 -0
  12. metaflow/plugins/__init__.py +12 -3
  13. metaflow/plugins/airflow/airflow_cli.py +5 -0
  14. metaflow/plugins/airflow/airflow_decorator.py +1 -1
  15. metaflow/plugins/argo/argo_workflows_decorator.py +1 -1
  16. metaflow/plugins/argo/argo_workflows_deployer.py +77 -263
  17. metaflow/plugins/argo/argo_workflows_deployer_objects.py +381 -0
  18. metaflow/plugins/aws/batch/batch_cli.py +1 -1
  19. metaflow/plugins/aws/batch/batch_decorator.py +2 -2
  20. metaflow/plugins/aws/step_functions/step_functions_cli.py +7 -0
  21. metaflow/plugins/aws/step_functions/step_functions_decorator.py +1 -1
  22. metaflow/plugins/aws/step_functions/step_functions_deployer.py +65 -224
  23. metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +236 -0
  24. metaflow/plugins/azure/includefile_support.py +2 -0
  25. metaflow/plugins/cards/card_cli.py +3 -2
  26. metaflow/plugins/cards/card_modules/components.py +9 -9
  27. metaflow/plugins/cards/card_server.py +39 -14
  28. metaflow/plugins/datatools/local.py +2 -0
  29. metaflow/plugins/datatools/s3/s3.py +2 -0
  30. metaflow/plugins/env_escape/__init__.py +3 -3
  31. metaflow/plugins/gcp/includefile_support.py +3 -0
  32. metaflow/plugins/kubernetes/kubernetes_cli.py +1 -1
  33. metaflow/plugins/kubernetes/kubernetes_decorator.py +5 -4
  34. metaflow/plugins/{metadata → metadata_providers}/local.py +2 -2
  35. metaflow/plugins/{metadata → metadata_providers}/service.py +2 -2
  36. metaflow/plugins/parallel_decorator.py +1 -1
  37. metaflow/plugins/pypi/conda_decorator.py +1 -1
  38. metaflow/plugins/test_unbounded_foreach_decorator.py +1 -1
  39. metaflow/runner/click_api.py +4 -0
  40. metaflow/runner/deployer.py +139 -269
  41. metaflow/runner/deployer_impl.py +167 -0
  42. metaflow/runner/metaflow_runner.py +10 -9
  43. metaflow/runner/nbdeploy.py +12 -13
  44. metaflow/runner/nbrun.py +3 -3
  45. metaflow/runner/utils.py +55 -8
  46. metaflow/runtime.py +1 -1
  47. metaflow/task.py +1 -1
  48. metaflow/version.py +1 -1
  49. {metaflow-2.12.28.dist-info → metaflow-2.12.30.dist-info}/METADATA +2 -2
  50. {metaflow-2.12.28.dist-info → metaflow-2.12.30.dist-info}/RECORD +59 -56
  51. {metaflow-2.12.28.dist-info → metaflow-2.12.30.dist-info}/WHEEL +1 -1
  52. /metaflow/{metadata → metadata_provider}/__init__.py +0 -0
  53. /metaflow/{metadata → metadata_provider}/heartbeat.py +0 -0
  54. /metaflow/{metadata → metadata_provider}/metadata.py +0 -0
  55. /metaflow/{metadata → metadata_provider}/util.py +0 -0
  56. /metaflow/plugins/{metadata → metadata_providers}/__init__.py +0 -0
  57. {metaflow-2.12.28.dist-info → metaflow-2.12.30.dist-info}/LICENSE +0 -0
  58. {metaflow-2.12.28.dist-info → metaflow-2.12.30.dist-info}/entry_points.txt +0 -0
  59. {metaflow-2.12.28.dist-info → metaflow-2.12.30.dist-info}/top_level.txt +0 -0
@@ -10,7 +10,7 @@ from types import MethodType, FunctionType
10
10
 
11
11
  from .. import metaflow_config
12
12
  from ..exception import MetaflowInternalError
13
- from ..metadata import DataArtifact, MetaDatum
13
+ from ..metadata_provider import DataArtifact, MetaDatum
14
14
  from ..parameters import Parameter
15
15
  from ..util import Path, is_stringish, to_fileobj
16
16
 
@@ -178,6 +178,7 @@ _plugin_categories = {
178
178
  "environment": lambda x: x.TYPE,
179
179
  "metadata_provider": lambda x: x.TYPE,
180
180
  "datastore": lambda x: x.TYPE,
181
+ "dataclient": lambda x: x.TYPE,
181
182
  "secrets_provider": lambda x: x.TYPE,
182
183
  "gcp_client_provider": lambda x: x.name,
183
184
  "deployer_impl_provider": lambda x: x.TYPE,
metaflow/flowspec.py CHANGED
@@ -64,7 +64,7 @@ class ParallelUBF(UnboundedForeachInput):
64
64
  return item or 0 # item is None for the control task, but it is also split 0
65
65
 
66
66
 
67
- class _FlowSpecMeta(type):
67
+ class FlowSpecMeta(type):
68
68
  def __new__(cls, name, bases, dct):
69
69
  f = super().__new__(cls, name, bases, dct)
70
70
  # This makes sure to give _flow_decorators to each
@@ -75,7 +75,7 @@ class _FlowSpecMeta(type):
75
75
  return f
76
76
 
77
77
 
78
- class FlowSpec(metaclass=_FlowSpecMeta):
78
+ class FlowSpec(metaclass=FlowSpecMeta):
79
79
  """
80
80
  Main class from which all Flows should inherit.
81
81
 
metaflow/includefile.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from collections import namedtuple
2
2
  import gzip
3
3
 
4
+ import importlib
4
5
  import io
5
6
  import json
6
7
  import os
@@ -17,6 +18,8 @@ from .parameters import (
17
18
  Parameter,
18
19
  ParameterContext,
19
20
  )
21
+
22
+ from .plugins import DATACLIENTS
20
23
  from .util import get_username
21
24
 
22
25
  import functools
@@ -47,16 +50,7 @@ _DelayedExecContext = namedtuple(
47
50
 
48
51
 
49
52
  # From here on out, this is the IncludeFile implementation.
50
- from metaflow.plugins.datatools import Local, S3
51
- from metaflow.plugins.azure.includefile_support import Azure
52
- from metaflow.plugins.gcp.includefile_support import GS
53
-
54
- DATACLIENTS = {
55
- "local": Local,
56
- "s3": S3,
57
- "azure": Azure,
58
- "gs": GS,
59
- }
53
+ _dict_dataclients = {d.TYPE: d for d in DATACLIENTS}
60
54
 
61
55
 
62
56
  class IncludedFile(object):
@@ -167,7 +161,7 @@ class FilePathClass(click.ParamType):
167
161
  "IncludeFile using a direct reference to a file in cloud storage is no "
168
162
  "longer supported. Contact the Metaflow team if you need this supported"
169
163
  )
170
- # if DATACLIENTS.get(path[:prefix_pos]) is None:
164
+ # if _dict_dataclients.get(path[:prefix_pos]) is None:
171
165
  # self.fail(
172
166
  # "IncludeFile: no handler for external file of type '%s' "
173
167
  # "(given path is '%s')" % (path[:prefix_pos], path)
@@ -187,7 +181,7 @@ class FilePathClass(click.ParamType):
187
181
  pass
188
182
  except OSError:
189
183
  self.fail("IncludeFile: could not open file '%s' for reading" % path)
190
- handler = DATACLIENTS.get(ctx.ds_type)
184
+ handler = _dict_dataclients.get(ctx.ds_type)
191
185
  if handler is None:
192
186
  self.fail(
193
187
  "IncludeFile: no data-client for datastore of type '%s'"
@@ -213,7 +207,7 @@ class FilePathClass(click.ParamType):
213
207
  ctx.path,
214
208
  ctx.is_text,
215
209
  ctx.encoding,
216
- DATACLIENTS[ctx.handler_type],
210
+ _dict_dataclients[ctx.handler_type],
217
211
  ctx.echo,
218
212
  )
219
213
  )
@@ -425,7 +419,7 @@ class UploaderV1:
425
419
  if prefix_pos < 0:
426
420
  raise MetaflowException("Malformed URL: '%s'" % url)
427
421
  prefix = url[:prefix_pos]
428
- handler = DATACLIENTS.get(prefix)
422
+ handler = _dict_dataclients.get(prefix)
429
423
  if handler is None:
430
424
  raise MetaflowException("Could not find data client for '%s'" % prefix)
431
425
  return handler
@@ -43,6 +43,10 @@ DEFAULT_GCP_CLIENT_PROVIDER = from_conf("DEFAULT_GCP_CLIENT_PROVIDER", "gcp-defa
43
43
  DEFAULT_SECRETS_BACKEND_TYPE = from_conf("DEFAULT_SECRETS_BACKEND_TYPE")
44
44
  DEFAULT_SECRETS_ROLE = from_conf("DEFAULT_SECRETS_ROLE")
45
45
 
46
+ DEFAULT_FROM_DEPLOYMENT_IMPL = from_conf(
47
+ "DEFAULT_FROM_DEPLOYMENT_IMPL", "argo-workflows"
48
+ )
49
+
46
50
  ###
47
51
  # User configuration
48
52
  ###
@@ -30,7 +30,7 @@ class Current(object):
30
30
  raise ex
31
31
 
32
32
  self.__class__.graph = property(
33
- fget=lambda _: _raise(RuntimeError("Graph is not available"))
33
+ fget=lambda self: _raise(RuntimeError("Graph is not available"))
34
34
  )
35
35
 
36
36
  def _set_env(
metaflow/parameters.py CHANGED
@@ -438,3 +438,6 @@ def add_custom_parameters(deploy_mode=False):
438
438
  return cmd
439
439
 
440
440
  return wrapper
441
+
442
+
443
+ JSONType = JSONTypeClass()
@@ -73,8 +73,8 @@ ENVIRONMENTS_DESC = [
73
73
 
74
74
  # Add metadata providers here
75
75
  METADATA_PROVIDERS_DESC = [
76
- ("service", ".metadata.service.ServiceMetadataProvider"),
77
- ("local", ".metadata.local.LocalMetadataProvider"),
76
+ ("service", ".metadata_providers.service.ServiceMetadataProvider"),
77
+ ("local", ".metadata_providers.local.LocalMetadataProvider"),
78
78
  ]
79
79
 
80
80
  # Add datastore here
@@ -85,13 +85,21 @@ DATASTORES_DESC = [
85
85
  ("gs", ".datastores.gs_storage.GSStorage"),
86
86
  ]
87
87
 
88
+ # Dataclients are used for IncludeFile
89
+ DATACLIENTS_DESC = [
90
+ ("local", ".datatools.Local"),
91
+ ("s3", ".datatools.S3"),
92
+ ("azure", ".azure.includefile_support.Azure"),
93
+ ("gs", ".gcp.includefile_support.GS"),
94
+ ]
95
+
88
96
  # Add non monitoring/logging sidecars here
89
97
  SIDECARS_DESC = [
90
98
  (
91
99
  "save_logs_periodically",
92
100
  "..mflog.save_logs_periodically.SaveLogsPeriodicallySidecar",
93
101
  ),
94
- ("heartbeat", "metaflow.metadata.heartbeat.MetadataHeartBeat"),
102
+ ("heartbeat", "metaflow.metadata_provider.heartbeat.MetadataHeartBeat"),
95
103
  ]
96
104
 
97
105
  # Add logging sidecars here
@@ -161,6 +169,7 @@ FLOW_DECORATORS = resolve_plugins("flow_decorator")
161
169
  ENVIRONMENTS = resolve_plugins("environment")
162
170
  METADATA_PROVIDERS = resolve_plugins("metadata_provider")
163
171
  DATASTORES = resolve_plugins("datastore")
172
+ DATACLIENTS = resolve_plugins("dataclient")
164
173
  SIDECARS = resolve_plugins("sidecar")
165
174
  LOGGING_SIDECARS = resolve_plugins("logging_sidecar")
166
175
  MONITOR_SIDECARS = resolve_plugins("monitor_sidecar")
@@ -389,6 +389,11 @@ def _validate_workflow(flow, graph, flow_datastore, metadata, workflow_timeout):
389
389
  "Step *%s* is marked for execution on AWS Batch with Airflow which isn't currently supported."
390
390
  % node.name
391
391
  )
392
+ if any([d.name == "slurm" for d in node.decorators]):
393
+ raise NotSupportedException(
394
+ "Step *%s* is marked for execution on Slurm with Airflow which isn't currently supported."
395
+ % node.name
396
+ )
392
397
  SUPPORTED_DATASTORES = ("azure", "s3", "gs")
393
398
  if flow_datastore.TYPE not in SUPPORTED_DATASTORES:
394
399
  raise AirflowException(
@@ -1,7 +1,7 @@
1
1
  import json
2
2
  import os
3
3
  from metaflow.decorators import StepDecorator
4
- from metaflow.metadata import MetaDatum
4
+ from metaflow.metadata_provider import MetaDatum
5
5
 
6
6
  from .airflow_utils import (
7
7
  TASK_ID_XCOM_KEY,
@@ -6,7 +6,7 @@ import time
6
6
  from metaflow import current
7
7
  from metaflow.decorators import StepDecorator
8
8
  from metaflow.events import Trigger
9
- from metaflow.metadata import MetaDatum
9
+ from metaflow.metadata_provider import MetaDatum
10
10
  from metaflow.metaflow_config import ARGO_EVENTS_WEBHOOK_URL
11
11
  from metaflow.graph import DAGNode, FlowGraph
12
12
  from metaflow.flowspec import FlowSpec
@@ -1,292 +1,106 @@
1
- import sys
2
- import tempfile
3
- from typing import Optional, ClassVar
1
+ from typing import Any, ClassVar, Dict, Optional, TYPE_CHECKING, Type
4
2
 
5
- from metaflow.plugins.argo.argo_workflows import ArgoWorkflows
6
- from metaflow.runner.deployer import (
7
- DeployerImpl,
8
- DeployedFlow,
9
- TriggeredRun,
10
- get_lower_level_group,
11
- handle_timeout,
12
- )
3
+ from metaflow.runner.deployer_impl import DeployerImpl
13
4
 
14
-
15
- def suspend(instance: TriggeredRun, **kwargs):
16
- """
17
- Suspend the running workflow.
18
-
19
- Parameters
20
- ----------
21
- **kwargs : Any
22
- Additional arguments to pass to the suspend command.
23
-
24
- Returns
25
- -------
26
- bool
27
- True if the command was successful, False otherwise.
28
- """
29
- _, run_id = instance.pathspec.split("/")
30
-
31
- # every subclass needs to have `self.deployer_kwargs`
32
- command = get_lower_level_group(
33
- instance.deployer.api,
34
- instance.deployer.top_level_kwargs,
35
- instance.deployer.TYPE,
36
- instance.deployer.deployer_kwargs,
37
- ).suspend(run_id=run_id, **kwargs)
38
-
39
- pid = instance.deployer.spm.run_command(
40
- [sys.executable, *command],
41
- env=instance.deployer.env_vars,
42
- cwd=instance.deployer.cwd,
43
- show_output=instance.deployer.show_output,
44
- )
45
-
46
- command_obj = instance.deployer.spm.get(pid)
47
- return command_obj.process.returncode == 0
48
-
49
-
50
- def unsuspend(instance: TriggeredRun, **kwargs):
51
- """
52
- Unsuspend the suspended workflow.
53
-
54
- Parameters
55
- ----------
56
- **kwargs : Any
57
- Additional arguments to pass to the unsuspend command.
58
-
59
- Returns
60
- -------
61
- bool
62
- True if the command was successful, False otherwise.
63
- """
64
- _, run_id = instance.pathspec.split("/")
65
-
66
- # every subclass needs to have `self.deployer_kwargs`
67
- command = get_lower_level_group(
68
- instance.deployer.api,
69
- instance.deployer.top_level_kwargs,
70
- instance.deployer.TYPE,
71
- instance.deployer.deployer_kwargs,
72
- ).unsuspend(run_id=run_id, **kwargs)
73
-
74
- pid = instance.deployer.spm.run_command(
75
- [sys.executable, *command],
76
- env=instance.deployer.env_vars,
77
- cwd=instance.deployer.cwd,
78
- show_output=instance.deployer.show_output,
79
- )
80
-
81
- command_obj = instance.deployer.spm.get(pid)
82
- return command_obj.process.returncode == 0
83
-
84
-
85
- def terminate(instance: TriggeredRun, **kwargs):
86
- """
87
- Terminate the running workflow.
88
-
89
- Parameters
90
- ----------
91
- **kwargs : Any
92
- Additional arguments to pass to the terminate command.
93
-
94
- Returns
95
- -------
96
- bool
97
- True if the command was successful, False otherwise.
98
- """
99
- _, run_id = instance.pathspec.split("/")
100
-
101
- # every subclass needs to have `self.deployer_kwargs`
102
- command = get_lower_level_group(
103
- instance.deployer.api,
104
- instance.deployer.top_level_kwargs,
105
- instance.deployer.TYPE,
106
- instance.deployer.deployer_kwargs,
107
- ).terminate(run_id=run_id, **kwargs)
108
-
109
- pid = instance.deployer.spm.run_command(
110
- [sys.executable, *command],
111
- env=instance.deployer.env_vars,
112
- cwd=instance.deployer.cwd,
113
- show_output=instance.deployer.show_output,
114
- )
115
-
116
- command_obj = instance.deployer.spm.get(pid)
117
- return command_obj.process.returncode == 0
118
-
119
-
120
- def status(instance: TriggeredRun):
121
- """
122
- Get the status of the triggered run.
123
-
124
- Returns
125
- -------
126
- str, optional
127
- The status of the workflow considering the run object, or None if the status could not be retrieved.
128
- """
129
- from metaflow.plugins.argo.argo_workflows_cli import (
130
- get_status_considering_run_object,
131
- )
132
-
133
- flow_name, run_id = instance.pathspec.split("/")
134
- name = run_id[5:]
135
- status = ArgoWorkflows.get_workflow_status(flow_name, name)
136
- if status is not None:
137
- return get_status_considering_run_object(status, instance.run)
138
- return None
139
-
140
-
141
- def production_token(instance: DeployedFlow):
142
- """
143
- Get the production token for the deployed flow.
144
-
145
- Returns
146
- -------
147
- str, optional
148
- The production token, None if it cannot be retrieved.
149
- """
150
- try:
151
- _, production_token = ArgoWorkflows.get_existing_deployment(
152
- instance.deployer.name
153
- )
154
- return production_token
155
- except TypeError:
156
- return None
157
-
158
-
159
- def delete(instance: DeployedFlow, **kwargs):
160
- """
161
- Delete the deployed flow.
162
-
163
- Parameters
164
- ----------
165
- **kwargs : Any
166
- Additional arguments to pass to the delete command.
167
-
168
- Returns
169
- -------
170
- bool
171
- True if the command was successful, False otherwise.
172
- """
173
- command = get_lower_level_group(
174
- instance.deployer.api,
175
- instance.deployer.top_level_kwargs,
176
- instance.deployer.TYPE,
177
- instance.deployer.deployer_kwargs,
178
- ).delete(**kwargs)
179
-
180
- pid = instance.deployer.spm.run_command(
181
- [sys.executable, *command],
182
- env=instance.deployer.env_vars,
183
- cwd=instance.deployer.cwd,
184
- show_output=instance.deployer.show_output,
185
- )
186
-
187
- command_obj = instance.deployer.spm.get(pid)
188
- return command_obj.process.returncode == 0
189
-
190
-
191
- def trigger(instance: DeployedFlow, **kwargs):
192
- """
193
- Trigger a new run for the deployed flow.
194
-
195
- Parameters
196
- ----------
197
- **kwargs : Any
198
- Additional arguments to pass to the trigger command, `Parameters` in particular
199
-
200
- Returns
201
- -------
202
- ArgoWorkflowsTriggeredRun
203
- The triggered run instance.
204
-
205
- Raises
206
- ------
207
- Exception
208
- If there is an error during the trigger process.
209
- """
210
- with tempfile.TemporaryDirectory() as temp_dir:
211
- tfp_runner_attribute = tempfile.NamedTemporaryFile(dir=temp_dir, delete=False)
212
-
213
- # every subclass needs to have `self.deployer_kwargs`
214
- command = get_lower_level_group(
215
- instance.deployer.api,
216
- instance.deployer.top_level_kwargs,
217
- instance.deployer.TYPE,
218
- instance.deployer.deployer_kwargs,
219
- ).trigger(deployer_attribute_file=tfp_runner_attribute.name, **kwargs)
220
-
221
- pid = instance.deployer.spm.run_command(
222
- [sys.executable, *command],
223
- env=instance.deployer.env_vars,
224
- cwd=instance.deployer.cwd,
225
- show_output=instance.deployer.show_output,
226
- )
227
-
228
- command_obj = instance.deployer.spm.get(pid)
229
- content = handle_timeout(
230
- tfp_runner_attribute, command_obj, instance.deployer.file_read_timeout
231
- )
232
-
233
- if command_obj.process.returncode == 0:
234
- triggered_run = TriggeredRun(deployer=instance.deployer, content=content)
235
- triggered_run._enrich_object(
236
- {
237
- "status": property(status),
238
- "terminate": terminate,
239
- "suspend": suspend,
240
- "unsuspend": unsuspend,
241
- }
242
- )
243
- return triggered_run
244
-
245
- raise Exception(
246
- "Error triggering %s on %s for %s"
247
- % (instance.deployer.name, instance.deployer.TYPE, instance.deployer.flow_file)
248
- )
5
+ if TYPE_CHECKING:
6
+ import metaflow.plugins.argo.argo_workflows_deployer_objects
249
7
 
250
8
 
251
9
  class ArgoWorkflowsDeployer(DeployerImpl):
252
10
  """
253
11
  Deployer implementation for Argo Workflows.
254
12
 
255
- Attributes
13
+ Parameters
256
14
  ----------
257
- TYPE : ClassVar[Optional[str]]
258
- The type of the deployer, which is "argo-workflows".
15
+ name : str, optional, default None
16
+ Argo workflow name. The flow name is used instead if this option is not specified.
259
17
  """
260
18
 
261
19
  TYPE: ClassVar[Optional[str]] = "argo-workflows"
262
20
 
263
- def __init__(self, deployer_kwargs, **kwargs):
21
+ def __init__(self, deployer_kwargs: Dict[str, str], **kwargs):
264
22
  """
265
23
  Initialize the ArgoWorkflowsDeployer.
266
24
 
267
25
  Parameters
268
26
  ----------
269
- deployer_kwargs : dict
27
+ deployer_kwargs : Dict[str, str]
270
28
  The deployer-specific keyword arguments.
271
29
  **kwargs : Any
272
30
  Additional arguments to pass to the superclass constructor.
273
31
  """
274
- self.deployer_kwargs = deployer_kwargs
32
+ self._deployer_kwargs = deployer_kwargs
275
33
  super().__init__(**kwargs)
276
34
 
277
- def _enrich_deployed_flow(self, deployed_flow: DeployedFlow):
35
+ @property
36
+ def deployer_kwargs(self) -> Dict[str, Any]:
37
+ return self._deployer_kwargs
38
+
39
+ @staticmethod
40
+ def deployed_flow_type() -> (
41
+ Type[
42
+ "metaflow.plugins.argo.argo_workflows_deployer_objects.ArgoWorkflowsDeployedFlow"
43
+ ]
44
+ ):
45
+ from .argo_workflows_deployer_objects import ArgoWorkflowsDeployedFlow
46
+
47
+ return ArgoWorkflowsDeployedFlow
48
+
49
+ def create(
50
+ self, **kwargs
51
+ ) -> "metaflow.plugins.argo.argo_workflows_deployer_objects.ArgoWorkflowsDeployedFlow":
278
52
  """
279
- Enrich the DeployedFlow object with additional properties and methods.
53
+ Create a new ArgoWorkflow deployment.
280
54
 
281
55
  Parameters
282
56
  ----------
283
- deployed_flow : DeployedFlow
284
- The deployed flow object to enrich.
57
+ authorize : str, optional, default None
58
+ Authorize using this production token. Required when re-deploying an existing flow
59
+ for the first time. The token is cached in METAFLOW_HOME.
60
+ generate_new_token : bool, optional, default False
61
+ Generate a new production token for this flow. Moves the production flow to a new namespace.
62
+ given_token : str, optional, default None
63
+ Use the given production token for this flow. Moves the production flow to the given namespace.
64
+ tags : List[str], optional, default None
65
+ Annotate all objects produced by Argo Workflows runs with these tags.
66
+ user_namespace : str, optional, default None
67
+ Change the namespace from the default (production token) to the given tag.
68
+ only_json : bool, optional, default False
69
+ Only print out JSON sent to Argo Workflows without deploying anything.
70
+ max_workers : int, optional, default 100
71
+ Maximum number of parallel processes.
72
+ workflow_timeout : int, optional, default None
73
+ Workflow timeout in seconds.
74
+ workflow_priority : int, optional, default None
75
+ Workflow priority as an integer. Higher priority workflows are processed first
76
+ if Argo Workflows controller is configured to process limited parallel workflows.
77
+ auto_emit_argo_events : bool, optional, default True
78
+ Auto emits Argo Events when the run completes successfully.
79
+ notify_on_error : bool, optional, default False
80
+ Notify if the workflow fails.
81
+ notify_on_success : bool, optional, default False
82
+ Notify if the workflow succeeds.
83
+ notify_slack_webhook_url : str, optional, default ''
84
+ Slack incoming webhook url for workflow success/failure notifications.
85
+ notify_pager_duty_integration_key : str, optional, default ''
86
+ PagerDuty Events API V2 Integration key for workflow success/failure notifications.
87
+ enable_heartbeat_daemon : bool, optional, default False
88
+ Use a daemon container to broadcast heartbeats.
89
+ deployer_attribute_file : str, optional, default None
90
+ Write the workflow name to the specified file. Used internally for Metaflow's Deployer API.
91
+ enable_error_msg_capture : bool, optional, default True
92
+ Capture stack trace of first failed task in exit hook.
93
+
94
+ Returns
95
+ -------
96
+ ArgoWorkflowsDeployedFlow
97
+ The Flow deployed to Argo Workflows.
285
98
  """
286
- deployed_flow._enrich_object(
287
- {
288
- "production_token": property(production_token),
289
- "trigger": trigger,
290
- "delete": delete,
291
- }
292
- )
99
+
100
+ # Prevent circular import
101
+ from .argo_workflows_deployer_objects import ArgoWorkflowsDeployedFlow
102
+
103
+ return self._create(ArgoWorkflowsDeployedFlow, **kwargs)
104
+
105
+
106
+ _addl_stubgen_modules = ["metaflow.plugins.argo.argo_workflows_deployer_objects"]