metaflow 2.15.4__py2.py3-none-any.whl → 2.15.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. metaflow/_vendor/typeguard/_checkers.py +259 -95
  2. metaflow/_vendor/typeguard/_config.py +4 -4
  3. metaflow/_vendor/typeguard/_decorators.py +8 -12
  4. metaflow/_vendor/typeguard/_functions.py +33 -32
  5. metaflow/_vendor/typeguard/_pytest_plugin.py +40 -13
  6. metaflow/_vendor/typeguard/_suppression.py +3 -5
  7. metaflow/_vendor/typeguard/_transformer.py +84 -48
  8. metaflow/_vendor/typeguard/_union_transformer.py +1 -0
  9. metaflow/_vendor/typeguard/_utils.py +13 -9
  10. metaflow/_vendor/typing_extensions.py +1088 -500
  11. metaflow/_vendor/v3_7/__init__.py +1 -0
  12. metaflow/_vendor/v3_7/importlib_metadata/__init__.py +1063 -0
  13. metaflow/_vendor/v3_7/importlib_metadata/_adapters.py +68 -0
  14. metaflow/_vendor/v3_7/importlib_metadata/_collections.py +30 -0
  15. metaflow/_vendor/v3_7/importlib_metadata/_compat.py +71 -0
  16. metaflow/_vendor/v3_7/importlib_metadata/_functools.py +104 -0
  17. metaflow/_vendor/v3_7/importlib_metadata/_itertools.py +73 -0
  18. metaflow/_vendor/v3_7/importlib_metadata/_meta.py +48 -0
  19. metaflow/_vendor/v3_7/importlib_metadata/_text.py +99 -0
  20. metaflow/_vendor/v3_7/importlib_metadata/py.typed +0 -0
  21. metaflow/_vendor/v3_7/typeguard/__init__.py +48 -0
  22. metaflow/_vendor/v3_7/typeguard/_checkers.py +906 -0
  23. metaflow/_vendor/v3_7/typeguard/_config.py +108 -0
  24. metaflow/_vendor/v3_7/typeguard/_decorators.py +237 -0
  25. metaflow/_vendor/v3_7/typeguard/_exceptions.py +42 -0
  26. metaflow/_vendor/v3_7/typeguard/_functions.py +310 -0
  27. metaflow/_vendor/v3_7/typeguard/_importhook.py +213 -0
  28. metaflow/_vendor/v3_7/typeguard/_memo.py +48 -0
  29. metaflow/_vendor/v3_7/typeguard/_pytest_plugin.py +100 -0
  30. metaflow/_vendor/v3_7/typeguard/_suppression.py +88 -0
  31. metaflow/_vendor/v3_7/typeguard/_transformer.py +1207 -0
  32. metaflow/_vendor/v3_7/typeguard/_union_transformer.py +54 -0
  33. metaflow/_vendor/v3_7/typeguard/_utils.py +169 -0
  34. metaflow/_vendor/v3_7/typeguard/py.typed +0 -0
  35. metaflow/_vendor/v3_7/typing_extensions.py +3072 -0
  36. metaflow/_vendor/v3_7/zipp.py +329 -0
  37. metaflow/cmd/develop/stubs.py +1 -1
  38. metaflow/extension_support/__init__.py +1 -1
  39. metaflow/plugins/argo/argo_client.py +9 -2
  40. metaflow/plugins/argo/argo_workflows.py +79 -28
  41. metaflow/plugins/argo/argo_workflows_cli.py +16 -25
  42. metaflow/plugins/argo/argo_workflows_deployer_objects.py +5 -2
  43. metaflow/plugins/cards/card_modules/main.js +52 -50
  44. metaflow/plugins/metadata_providers/service.py +16 -7
  45. metaflow/plugins/pypi/utils.py +4 -0
  46. metaflow/runner/click_api.py +7 -2
  47. metaflow/runner/deployer.py +3 -2
  48. metaflow/vendor.py +1 -0
  49. metaflow/version.py +1 -1
  50. {metaflow-2.15.4.data → metaflow-2.15.6.data}/data/share/metaflow/devtools/Tiltfile +4 -4
  51. metaflow-2.15.6.dist-info/METADATA +103 -0
  52. {metaflow-2.15.4.dist-info → metaflow-2.15.6.dist-info}/RECORD +58 -32
  53. {metaflow-2.15.4.dist-info → metaflow-2.15.6.dist-info}/WHEEL +1 -1
  54. metaflow-2.15.4.dist-info/METADATA +0 -110
  55. {metaflow-2.15.4.data → metaflow-2.15.6.data}/data/share/metaflow/devtools/Makefile +0 -0
  56. {metaflow-2.15.4.data → metaflow-2.15.6.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  57. {metaflow-2.15.4.dist-info → metaflow-2.15.6.dist-info}/LICENSE +0 -0
  58. {metaflow-2.15.4.dist-info → metaflow-2.15.6.dist-info}/entry_points.txt +0 -0
  59. {metaflow-2.15.4.dist-info → metaflow-2.15.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,329 @@
1
+ import io
2
+ import posixpath
3
+ import zipfile
4
+ import itertools
5
+ import contextlib
6
+ import sys
7
+ import pathlib
8
+
9
+ if sys.version_info < (3, 7):
10
+ from collections import OrderedDict
11
+ else:
12
+ OrderedDict = dict
13
+
14
+
15
+ __all__ = ['Path']
16
+
17
+
18
+ def _parents(path):
19
+ """
20
+ Given a path with elements separated by
21
+ posixpath.sep, generate all parents of that path.
22
+
23
+ >>> list(_parents('b/d'))
24
+ ['b']
25
+ >>> list(_parents('/b/d/'))
26
+ ['/b']
27
+ >>> list(_parents('b/d/f/'))
28
+ ['b/d', 'b']
29
+ >>> list(_parents('b'))
30
+ []
31
+ >>> list(_parents(''))
32
+ []
33
+ """
34
+ return itertools.islice(_ancestry(path), 1, None)
35
+
36
+
37
+ def _ancestry(path):
38
+ """
39
+ Given a path with elements separated by
40
+ posixpath.sep, generate all elements of that path
41
+
42
+ >>> list(_ancestry('b/d'))
43
+ ['b/d', 'b']
44
+ >>> list(_ancestry('/b/d/'))
45
+ ['/b/d', '/b']
46
+ >>> list(_ancestry('b/d/f/'))
47
+ ['b/d/f', 'b/d', 'b']
48
+ >>> list(_ancestry('b'))
49
+ ['b']
50
+ >>> list(_ancestry(''))
51
+ []
52
+ """
53
+ path = path.rstrip(posixpath.sep)
54
+ while path and path != posixpath.sep:
55
+ yield path
56
+ path, tail = posixpath.split(path)
57
+
58
+
59
+ _dedupe = OrderedDict.fromkeys
60
+ """Deduplicate an iterable in original order"""
61
+
62
+
63
+ def _difference(minuend, subtrahend):
64
+ """
65
+ Return items in minuend not in subtrahend, retaining order
66
+ with O(1) lookup.
67
+ """
68
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
69
+
70
+
71
+ class CompleteDirs(zipfile.ZipFile):
72
+ """
73
+ A ZipFile subclass that ensures that implied directories
74
+ are always included in the namelist.
75
+ """
76
+
77
+ @staticmethod
78
+ def _implied_dirs(names):
79
+ parents = itertools.chain.from_iterable(map(_parents, names))
80
+ as_dirs = (p + posixpath.sep for p in parents)
81
+ return _dedupe(_difference(as_dirs, names))
82
+
83
+ def namelist(self):
84
+ names = super(CompleteDirs, self).namelist()
85
+ return names + list(self._implied_dirs(names))
86
+
87
+ def _name_set(self):
88
+ return set(self.namelist())
89
+
90
+ def resolve_dir(self, name):
91
+ """
92
+ If the name represents a directory, return that name
93
+ as a directory (with the trailing slash).
94
+ """
95
+ names = self._name_set()
96
+ dirname = name + '/'
97
+ dir_match = name not in names and dirname in names
98
+ return dirname if dir_match else name
99
+
100
+ @classmethod
101
+ def make(cls, source):
102
+ """
103
+ Given a source (filename or zipfile), return an
104
+ appropriate CompleteDirs subclass.
105
+ """
106
+ if isinstance(source, CompleteDirs):
107
+ return source
108
+
109
+ if not isinstance(source, zipfile.ZipFile):
110
+ return cls(_pathlib_compat(source))
111
+
112
+ # Only allow for FastLookup when supplied zipfile is read-only
113
+ if 'r' not in source.mode:
114
+ cls = CompleteDirs
115
+
116
+ source.__class__ = cls
117
+ return source
118
+
119
+
120
+ class FastLookup(CompleteDirs):
121
+ """
122
+ ZipFile subclass to ensure implicit
123
+ dirs exist and are resolved rapidly.
124
+ """
125
+
126
+ def namelist(self):
127
+ with contextlib.suppress(AttributeError):
128
+ return self.__names
129
+ self.__names = super(FastLookup, self).namelist()
130
+ return self.__names
131
+
132
+ def _name_set(self):
133
+ with contextlib.suppress(AttributeError):
134
+ return self.__lookup
135
+ self.__lookup = super(FastLookup, self)._name_set()
136
+ return self.__lookup
137
+
138
+
139
+ def _pathlib_compat(path):
140
+ """
141
+ For path-like objects, convert to a filename for compatibility
142
+ on Python 3.6.1 and earlier.
143
+ """
144
+ try:
145
+ return path.__fspath__()
146
+ except AttributeError:
147
+ return str(path)
148
+
149
+
150
+ class Path:
151
+ """
152
+ A pathlib-compatible interface for zip files.
153
+
154
+ Consider a zip file with this structure::
155
+
156
+ .
157
+ ├── a.txt
158
+ └── b
159
+ ├── c.txt
160
+ └── d
161
+ └── e.txt
162
+
163
+ >>> data = io.BytesIO()
164
+ >>> zf = zipfile.ZipFile(data, 'w')
165
+ >>> zf.writestr('a.txt', 'content of a')
166
+ >>> zf.writestr('b/c.txt', 'content of c')
167
+ >>> zf.writestr('b/d/e.txt', 'content of e')
168
+ >>> zf.filename = 'mem/abcde.zip'
169
+
170
+ Path accepts the zipfile object itself or a filename
171
+
172
+ >>> root = Path(zf)
173
+
174
+ From there, several path operations are available.
175
+
176
+ Directory iteration (including the zip file itself):
177
+
178
+ >>> a, b = root.iterdir()
179
+ >>> a
180
+ Path('mem/abcde.zip', 'a.txt')
181
+ >>> b
182
+ Path('mem/abcde.zip', 'b/')
183
+
184
+ name property:
185
+
186
+ >>> b.name
187
+ 'b'
188
+
189
+ join with divide operator:
190
+
191
+ >>> c = b / 'c.txt'
192
+ >>> c
193
+ Path('mem/abcde.zip', 'b/c.txt')
194
+ >>> c.name
195
+ 'c.txt'
196
+
197
+ Read text:
198
+
199
+ >>> c.read_text()
200
+ 'content of c'
201
+
202
+ existence:
203
+
204
+ >>> c.exists()
205
+ True
206
+ >>> (b / 'missing.txt').exists()
207
+ False
208
+
209
+ Coercion to string:
210
+
211
+ >>> import os
212
+ >>> str(c).replace(os.sep, posixpath.sep)
213
+ 'mem/abcde.zip/b/c.txt'
214
+
215
+ At the root, ``name``, ``filename``, and ``parent``
216
+ resolve to the zipfile. Note these attributes are not
217
+ valid and will raise a ``ValueError`` if the zipfile
218
+ has no filename.
219
+
220
+ >>> root.name
221
+ 'abcde.zip'
222
+ >>> str(root.filename).replace(os.sep, posixpath.sep)
223
+ 'mem/abcde.zip'
224
+ >>> str(root.parent)
225
+ 'mem'
226
+ """
227
+
228
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
229
+
230
+ def __init__(self, root, at=""):
231
+ """
232
+ Construct a Path from a ZipFile or filename.
233
+
234
+ Note: When the source is an existing ZipFile object,
235
+ its type (__class__) will be mutated to a
236
+ specialized type. If the caller wishes to retain the
237
+ original type, the caller should either create a
238
+ separate ZipFile object or pass a filename.
239
+ """
240
+ self.root = FastLookup.make(root)
241
+ self.at = at
242
+
243
+ def open(self, mode='r', *args, pwd=None, **kwargs):
244
+ """
245
+ Open this entry as text or binary following the semantics
246
+ of ``pathlib.Path.open()`` by passing arguments through
247
+ to io.TextIOWrapper().
248
+ """
249
+ if self.is_dir():
250
+ raise IsADirectoryError(self)
251
+ zip_mode = mode[0]
252
+ if not self.exists() and zip_mode == 'r':
253
+ raise FileNotFoundError(self)
254
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
255
+ if 'b' in mode:
256
+ if args or kwargs:
257
+ raise ValueError("encoding args invalid for binary operation")
258
+ return stream
259
+ return io.TextIOWrapper(stream, *args, **kwargs)
260
+
261
+ @property
262
+ def name(self):
263
+ return pathlib.Path(self.at).name or self.filename.name
264
+
265
+ @property
266
+ def suffix(self):
267
+ return pathlib.Path(self.at).suffix or self.filename.suffix
268
+
269
+ @property
270
+ def suffixes(self):
271
+ return pathlib.Path(self.at).suffixes or self.filename.suffixes
272
+
273
+ @property
274
+ def stem(self):
275
+ return pathlib.Path(self.at).stem or self.filename.stem
276
+
277
+ @property
278
+ def filename(self):
279
+ return pathlib.Path(self.root.filename).joinpath(self.at)
280
+
281
+ def read_text(self, *args, **kwargs):
282
+ with self.open('r', *args, **kwargs) as strm:
283
+ return strm.read()
284
+
285
+ def read_bytes(self):
286
+ with self.open('rb') as strm:
287
+ return strm.read()
288
+
289
+ def _is_child(self, path):
290
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
291
+
292
+ def _next(self, at):
293
+ return self.__class__(self.root, at)
294
+
295
+ def is_dir(self):
296
+ return not self.at or self.at.endswith("/")
297
+
298
+ def is_file(self):
299
+ return self.exists() and not self.is_dir()
300
+
301
+ def exists(self):
302
+ return self.at in self.root._name_set()
303
+
304
+ def iterdir(self):
305
+ if not self.is_dir():
306
+ raise ValueError("Can't listdir a file")
307
+ subs = map(self._next, self.root.namelist())
308
+ return filter(self._is_child, subs)
309
+
310
+ def __str__(self):
311
+ return posixpath.join(self.root.filename, self.at)
312
+
313
+ def __repr__(self):
314
+ return self.__repr.format(self=self)
315
+
316
+ def joinpath(self, *other):
317
+ next = posixpath.join(self.at, *map(_pathlib_compat, other))
318
+ return self._next(self.root.resolve_dir(next))
319
+
320
+ __truediv__ = joinpath
321
+
322
+ @property
323
+ def parent(self):
324
+ if not self.at:
325
+ return self.filename.parent
326
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
327
+ if parent_at:
328
+ parent_at += '/'
329
+ return self._next(parent_at)
@@ -24,7 +24,7 @@ def _check_stubs_supported():
24
24
  if _py_ver >= (3, 8):
25
25
  from importlib import metadata
26
26
  elif _py_ver >= (3, 7):
27
- from metaflow._vendor import importlib_metadata as metadata
27
+ from metaflow._vendor.v3_7 import importlib_metadata as metadata
28
28
  elif _py_ver >= (3, 6):
29
29
  from metaflow._vendor.v3_6 import importlib_metadata as metadata
30
30
  else:
@@ -321,7 +321,7 @@ if _py_ver >= (3, 4):
321
321
  if _py_ver >= (3, 8):
322
322
  from importlib import metadata
323
323
  elif _py_ver >= (3, 7):
324
- from metaflow._vendor import importlib_metadata as metadata
324
+ from metaflow._vendor.v3_7 import importlib_metadata as metadata
325
325
  elif _py_ver >= (3, 6):
326
326
  from metaflow._vendor.v3_6 import importlib_metadata as metadata
327
327
  else:
@@ -256,12 +256,19 @@ class ArgoClient(object):
256
256
  json.loads(e.body)["message"] if e.body is not None else e.reason
257
257
  )
258
258
 
259
- def trigger_workflow_template(self, name, parameters={}):
259
+ def trigger_workflow_template(self, name, usertype, username, parameters={}):
260
260
  client = self._client.get()
261
261
  body = {
262
262
  "apiVersion": "argoproj.io/v1alpha1",
263
263
  "kind": "Workflow",
264
- "metadata": {"generateName": name + "-"},
264
+ "metadata": {
265
+ "generateName": name + "-",
266
+ "annotations": {
267
+ "metaflow/triggered_by_user": json.dumps(
268
+ {"type": usertype, "name": username}
269
+ )
270
+ },
271
+ },
265
272
  "spec": {
266
273
  "workflowTemplateRef": {"name": name},
267
274
  "arguments": {
@@ -64,6 +64,7 @@ from metaflow.util import (
64
64
  )
65
65
 
66
66
  from .argo_client import ArgoClient
67
+ from metaflow.util import resolve_identity
67
68
 
68
69
 
69
70
  class ArgoWorkflowsException(MetaflowException):
@@ -108,8 +109,7 @@ class ArgoWorkflows(object):
108
109
  notify_slack_webhook_url=None,
109
110
  notify_pager_duty_integration_key=None,
110
111
  notify_incident_io_api_key=None,
111
- incident_io_success_severity_id=None,
112
- incident_io_error_severity_id=None,
112
+ incident_io_alert_source_config_id=None,
113
113
  enable_heartbeat_daemon=True,
114
114
  enable_error_msg_capture=False,
115
115
  ):
@@ -160,8 +160,7 @@ class ArgoWorkflows(object):
160
160
  self.notify_slack_webhook_url = notify_slack_webhook_url
161
161
  self.notify_pager_duty_integration_key = notify_pager_duty_integration_key
162
162
  self.notify_incident_io_api_key = notify_incident_io_api_key
163
- self.incident_io_success_severity_id = incident_io_success_severity_id
164
- self.incident_io_error_severity_id = incident_io_error_severity_id
163
+ self.incident_io_alert_source_config_id = incident_io_alert_source_config_id
165
164
  self.enable_heartbeat_daemon = enable_heartbeat_daemon
166
165
  self.enable_error_msg_capture = enable_error_msg_capture
167
166
  self.parameters = self._process_parameters()
@@ -315,8 +314,16 @@ class ArgoWorkflows(object):
315
314
  "Workflows before proceeding." % name
316
315
  )
317
316
  try:
317
+ id_parts = resolve_identity().split(":")
318
+ parts_size = len(id_parts)
319
+ usertype = id_parts[0] if parts_size > 0 else "unknown"
320
+ username = id_parts[1] if parts_size > 1 else "unknown"
321
+
318
322
  return ArgoClient(namespace=KUBERNETES_NAMESPACE).trigger_workflow_template(
319
- name, parameters
323
+ name,
324
+ usertype,
325
+ username,
326
+ parameters,
320
327
  )
321
328
  except Exception as e:
322
329
  raise ArgoWorkflowsException(str(e))
@@ -2505,25 +2512,49 @@ class ArgoWorkflows(object):
2505
2512
  def _incident_io_alert_template(self):
2506
2513
  if self.notify_incident_io_api_key is None:
2507
2514
  return None
2508
- if self.incident_io_error_severity_id is None:
2515
+ if self.incident_io_alert_source_config_id is None:
2509
2516
  raise MetaflowException(
2510
- "Creating incidents for errors requires a severity id."
2517
+ "Creating alerts for errors requires a alert source config ID."
2511
2518
  )
2519
+ ui_links = self._incident_io_ui_urls_for_run()
2512
2520
  return Template("notify-incident-io-on-error").http(
2513
2521
  Http("POST")
2514
- .url("https://api.incident.io/v2/incidents")
2522
+ .url(
2523
+ "https://api.incident.io/v2/alert_events/http/%s"
2524
+ % self.incident_io_alert_source_config_id
2525
+ )
2515
2526
  .header("Content-Type", "application/json")
2516
2527
  .header("Authorization", "Bearer %s" % self.notify_incident_io_api_key)
2517
2528
  .body(
2518
2529
  json.dumps(
2519
2530
  {
2520
2531
  "idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
2521
- "visibility": "public",
2522
- "severity_id": self.incident_io_error_severity_id,
2523
- "name": "Flow %s has failed." % self.flow.name,
2524
- "summary": "Metaflow run %s/argo-{{workflow.name}} failed! %s"
2525
- % (self.flow.name, self._incident_io_ui_urls_for_run()),
2526
- # TODO: Add support for custom field entries.
2532
+ "status": "firing",
2533
+ "title": "Flow %s has failed." % self.flow.name,
2534
+ "description": "Metaflow run {run_pathspec} failed!{urls}".format(
2535
+ run_pathspec="%s/argo-{{workflow.name}}" % self.flow.name,
2536
+ urls=(
2537
+ "\n\nSee details for the run at:\n\n"
2538
+ + "\n\n".join(ui_links)
2539
+ if ui_links
2540
+ else ""
2541
+ ),
2542
+ ),
2543
+ "source_url": (
2544
+ "%s/%s/%s"
2545
+ % (
2546
+ UI_URL.rstrip("/"),
2547
+ self.flow.name,
2548
+ "argo-{{workflow.name}}",
2549
+ )
2550
+ if UI_URL
2551
+ else None
2552
+ ),
2553
+ "metadata": {
2554
+ "run_status": "failed",
2555
+ "flow_name": self.flow.name,
2556
+ "run_id": "argo-{{workflow.name}}",
2557
+ },
2527
2558
  }
2528
2559
  )
2529
2560
  )
@@ -2532,27 +2563,49 @@ class ArgoWorkflows(object):
2532
2563
  def _incident_io_change_template(self):
2533
2564
  if self.notify_incident_io_api_key is None:
2534
2565
  return None
2535
- if self.incident_io_success_severity_id is None:
2566
+ if self.incident_io_alert_source_config_id is None:
2536
2567
  raise MetaflowException(
2537
- "Creating incidents for successes requires a severity id."
2568
+ "Creating alerts for successes requires an alert source config ID."
2538
2569
  )
2570
+ ui_links = self._incident_io_ui_urls_for_run()
2539
2571
  return Template("notify-incident-io-on-success").http(
2540
2572
  Http("POST")
2541
- .url("https://api.incident.io/v2/incidents")
2573
+ .url(
2574
+ "https://api.incident.io/v2/alert_events/http/%s"
2575
+ % self.incident_io_alert_source_config_id
2576
+ )
2542
2577
  .header("Content-Type", "application/json")
2543
2578
  .header("Authorization", "Bearer %s" % self.notify_incident_io_api_key)
2544
2579
  .body(
2545
2580
  json.dumps(
2546
2581
  {
2547
2582
  "idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
2548
- "visibility": "public",
2549
- "severity_id": self.incident_io_success_severity_id,
2550
- # TODO: Do we need to make incident type configurable for successes? otherwise they are created as 'investigating'
2551
- # "incident_type_id": ""
2552
- "name": "Flow %s has succeeded." % self.flow.name,
2553
- "summary": "Metaflow run %s/argo-{{workflow.name}} succeeded!%s"
2554
- % (self.flow.name, self._incident_io_ui_urls_for_run()),
2555
- # TODO: Add support for custom field entries.
2583
+ "status": "firing",
2584
+ "title": "Flow %s has succeeded." % self.flow.name,
2585
+ "description": "Metaflow run {run_pathspec} succeeded!{urls}".format(
2586
+ run_pathspec="%s/argo-{{workflow.name}}" % self.flow.name,
2587
+ urls=(
2588
+ "\n\nSee details for the run at:\n\n"
2589
+ + "\n\n".join(ui_links)
2590
+ if ui_links
2591
+ else ""
2592
+ ),
2593
+ ),
2594
+ "source_url": (
2595
+ "%s/%s/%s"
2596
+ % (
2597
+ UI_URL.rstrip("/"),
2598
+ self.flow.name,
2599
+ "argo-{{workflow.name}}",
2600
+ )
2601
+ if UI_URL
2602
+ else None
2603
+ ),
2604
+ "metadata": {
2605
+ "run_status": "succeeded",
2606
+ "flow_name": self.flow.name,
2607
+ "run_id": "argo-{{workflow.name}}",
2608
+ },
2556
2609
  }
2557
2610
  )
2558
2611
  )
@@ -2574,9 +2627,7 @@ class ArgoWorkflows(object):
2574
2627
  "{{workflow.name}}",
2575
2628
  )
2576
2629
  links.append(url)
2577
- if links:
2578
- links = ["See details for the run at: ", *links]
2579
- return "\n\n".join(links)
2630
+ return links
2580
2631
 
2581
2632
  def _pager_duty_change_template(self):
2582
2633
  # https://developer.pagerduty.com/docs/ZG9jOjExMDI5NTgy-send-a-change-event
@@ -130,6 +130,7 @@ def argo_workflows(obj, name=None):
130
130
  is_flag=True,
131
131
  default=False,
132
132
  help="Only print out JSON sent to Argo Workflows. Do not deploy anything.",
133
+ hidden=True,
133
134
  )
134
135
  @click.option(
135
136
  "--max-workers",
@@ -182,14 +183,9 @@ def argo_workflows(obj, name=None):
182
183
  help="Incident.io API V2 key for workflow success/failure notifications.",
183
184
  )
184
185
  @click.option(
185
- "--incident-io-success-severity-id",
186
- default=None,
187
- help="Incident.io severity id for success alerts.",
188
- )
189
- @click.option(
190
- "--incident-io-error-severity-id",
186
+ "--incident-io-alert-source-config-id",
191
187
  default=None,
192
- help="Incident.io severity id for error alerts.",
188
+ help="Incident.io Alert source config ID. Example '01GW2G3V0S59R238FAHPDS1R66'",
193
189
  )
194
190
  @click.option(
195
191
  "--enable-heartbeat-daemon/--no-enable-heartbeat-daemon",
@@ -229,8 +225,7 @@ def create(
229
225
  notify_slack_webhook_url=None,
230
226
  notify_pager_duty_integration_key=None,
231
227
  notify_incident_io_api_key=None,
232
- incident_io_success_severity_id=None,
233
- incident_io_error_severity_id=None,
228
+ incident_io_alert_source_config_id=None,
234
229
  enable_heartbeat_daemon=True,
235
230
  deployer_attribute_file=None,
236
231
  enable_error_msg_capture=False,
@@ -287,8 +282,7 @@ def create(
287
282
  notify_slack_webhook_url,
288
283
  notify_pager_duty_integration_key,
289
284
  notify_incident_io_api_key,
290
- incident_io_success_severity_id,
291
- incident_io_error_severity_id,
285
+ incident_io_alert_source_config_id,
292
286
  enable_heartbeat_daemon,
293
287
  enable_error_msg_capture,
294
288
  )
@@ -464,8 +458,7 @@ def make_flow(
464
458
  notify_slack_webhook_url,
465
459
  notify_pager_duty_integration_key,
466
460
  notify_incident_io_api_key,
467
- incident_io_success_severity_id,
468
- incident_io_error_severity_id,
461
+ incident_io_alert_source_config_id,
469
462
  enable_heartbeat_daemon,
470
463
  enable_error_msg_capture,
471
464
  ):
@@ -488,19 +481,18 @@ def make_flow(
488
481
  "https://api.slack.com/messaging/webhooks to generate a webhook url.\n"
489
482
  " For notifications through PagerDuty, generate an integration key by following the instructions at "
490
483
  "https://support.pagerduty.com/docs/services-and-integrations#create-a-generic-events-api-integration\n"
491
- " For notifications through Incident.io, generate an API key with a permission to create incidents."
484
+ " For notifications through Incident.io, generate an alert source config."
492
485
  )
493
486
 
494
- if notify_incident_io_api_key:
495
- if notify_on_error and incident_io_error_severity_id is None:
496
- raise MetaflowException(
497
- "Incident.io error notifications require a severity id. Please set one with --incident-io-error-severity-id"
498
- )
487
+ if (
488
+ (notify_on_error or notify_on_success)
489
+ and notify_incident_io_api_key
490
+ and incident_io_alert_source_config_id is None
491
+ ):
492
+ raise MetaflowException(
493
+ "Incident.io alerts require an alert source configuration ID. Please set one with --incident-io-alert-source-config-id"
494
+ )
499
495
 
500
- if notify_on_success and incident_io_success_severity_id is None:
501
- raise MetaflowException(
502
- "Incident.io success notifications require a severity id. Please set one with --incident-io-success-severity-id"
503
- )
504
496
  # Attach @kubernetes and @environment decorator to the flow to
505
497
  # ensure that the related decorator hooks are invoked.
506
498
  decorators._attach_decorators(
@@ -545,8 +537,7 @@ def make_flow(
545
537
  notify_slack_webhook_url=notify_slack_webhook_url,
546
538
  notify_pager_duty_integration_key=notify_pager_duty_integration_key,
547
539
  notify_incident_io_api_key=notify_incident_io_api_key,
548
- incident_io_success_severity_id=incident_io_success_severity_id,
549
- incident_io_error_severity_id=incident_io_error_severity_id,
540
+ incident_io_alert_source_config_id=incident_io_alert_source_config_id,
550
541
  enable_heartbeat_daemon=enable_heartbeat_daemon,
551
542
  enable_error_msg_capture=enable_error_msg_capture,
552
543
  )
@@ -171,12 +171,16 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
171
171
  command_obj.sync_wait()
172
172
  return command_obj.process.returncode == 0
173
173
 
174
- def wait_for_completion(self, timeout: Optional[int] = None):
174
+ def wait_for_completion(
175
+ self, check_interval: int = 5, timeout: Optional[int] = None
176
+ ):
175
177
  """
176
178
  Wait for the workflow to complete or timeout.
177
179
 
178
180
  Parameters
179
181
  ----------
182
+ check_interval: int, default: 5
183
+ Frequency of checking for workflow completion, in seconds.
180
184
  timeout : int, optional, default None
181
185
  Maximum time in seconds to wait for workflow completion.
182
186
  If None, waits indefinitely.
@@ -187,7 +191,6 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
187
191
  If the workflow does not complete within the specified timeout period.
188
192
  """
189
193
  start_time = time.time()
190
- check_interval = 5
191
194
  while self.is_running:
192
195
  if timeout is not None and (time.time() - start_time) > timeout:
193
196
  raise TimeoutError(