ob-metaflow 2.15.0.1__py2.py3-none-any.whl → 2.15.5.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (35) hide show
  1. metaflow/__init__.py +6 -0
  2. metaflow/cmd/code/__init__.py +230 -0
  3. metaflow/cmd/develop/stub_generator.py +5 -2
  4. metaflow/cmd/main_cli.py +1 -0
  5. metaflow/cmd/make_wrapper.py +35 -3
  6. metaflow/extension_support/plugins.py +1 -0
  7. metaflow/metaflow_config.py +2 -0
  8. metaflow/metaflow_environment.py +3 -1
  9. metaflow/mflog/__init__.py +4 -3
  10. metaflow/plugins/__init__.py +14 -0
  11. metaflow/plugins/argo/argo_client.py +9 -2
  12. metaflow/plugins/argo/argo_workflows.py +79 -28
  13. metaflow/plugins/argo/argo_workflows_cli.py +16 -25
  14. metaflow/plugins/argo/argo_workflows_deployer_objects.py +5 -2
  15. metaflow/plugins/cards/card_modules/main.js +52 -50
  16. metaflow/plugins/kubernetes/kubernetes_decorator.py +2 -1
  17. metaflow/plugins/kubernetes/kubernetes_jobsets.py +2 -0
  18. metaflow/plugins/metadata_providers/service.py +16 -7
  19. metaflow/plugins/pypi/bootstrap.py +17 -26
  20. metaflow/plugins/pypi/conda_environment.py +8 -8
  21. metaflow/plugins/pypi/parsers.py +268 -0
  22. metaflow/plugins/pypi/utils.py +18 -0
  23. metaflow/runner/click_api.py +5 -1
  24. metaflow/runner/deployer.py +3 -2
  25. metaflow/version.py +1 -1
  26. {ob_metaflow-2.15.0.1.data → ob_metaflow-2.15.5.1.data}/data/share/metaflow/devtools/Makefile +36 -17
  27. {ob_metaflow-2.15.0.1.data → ob_metaflow-2.15.5.1.data}/data/share/metaflow/devtools/Tiltfile +29 -10
  28. ob_metaflow-2.15.5.1.dist-info/METADATA +87 -0
  29. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.5.1.dist-info}/RECORD +34 -32
  30. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.5.1.dist-info}/WHEEL +1 -1
  31. ob_metaflow-2.15.0.1.dist-info/METADATA +0 -94
  32. {ob_metaflow-2.15.0.1.data → ob_metaflow-2.15.5.1.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  33. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.5.1.dist-info}/LICENSE +0 -0
  34. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.5.1.dist-info}/entry_points.txt +0 -0
  35. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.5.1.dist-info}/top_level.txt +0 -0
metaflow/__init__.py CHANGED
@@ -119,6 +119,12 @@ from .includefile import IncludeFile
119
119
  # Decorators
120
120
  from .decorators import step, _import_plugin_decorators
121
121
 
122
+
123
+ # Parsers (for configs) for now
124
+ from .plugins import _import_tl_plugins
125
+
126
+ _import_tl_plugins(globals())
127
+
122
128
  # this auto-generates decorator functions from Decorator objects
123
129
  # in the top-level metaflow namespace
124
130
  _import_plugin_decorators(globals())
@@ -0,0 +1,230 @@
1
+ import os
2
+ import shutil
3
+ import sys
4
+ from subprocess import PIPE, CompletedProcess, run
5
+ from tempfile import TemporaryDirectory
6
+ from typing import Any, Callable, List, Mapping, Optional, cast
7
+
8
+ from metaflow import Run
9
+ from metaflow._vendor import click
10
+ from metaflow.cli import echo_always
11
+
12
+
13
+ @click.group()
14
+ def cli():
15
+ pass
16
+
17
+
18
+ @cli.group(help="Access, compare, and manage code associated with Metaflow runs.")
19
+ def code():
20
+ pass
21
+
22
+
23
+ def echo(line: str) -> None:
24
+ echo_always(line, err=True, fg="magenta")
25
+
26
+
27
+ def extract_code_package(runspec: str) -> TemporaryDirectory:
28
+ try:
29
+ mf_run = Run(runspec, _namespace_check=False)
30
+ echo(f"✅ Run *{runspec}* found, downloading code..")
31
+ except Exception as e:
32
+ echo(f"❌ Run **{runspec}** not found")
33
+ raise e
34
+
35
+ if mf_run.code is None:
36
+ echo(
37
+ f"❌ Run **{runspec}** doesn't have a code package. Maybe it's a local run?"
38
+ )
39
+ raise RuntimeError("no code package found")
40
+
41
+ return mf_run.code.extract()
42
+
43
+
44
+ def perform_diff(
45
+ source_dir: str,
46
+ target_dir: Optional[str] = None,
47
+ output: bool = False,
48
+ **kwargs: Mapping[str, Any],
49
+ ) -> Optional[List[str]]:
50
+ if target_dir is None:
51
+ target_dir = os.getcwd()
52
+
53
+ diffs = []
54
+ for dirpath, dirnames, filenames in os.walk(source_dir, followlinks=True):
55
+ for fname in filenames:
56
+ # NOTE: the paths below need to be set up carefully
57
+ # for the `patch` command to work. Better not to touch
58
+ # the directories below. If you must, test that patches
59
+ # work after your changes.
60
+ #
61
+ # target_file is the git repo in the current working directory
62
+ rel = os.path.relpath(dirpath, source_dir)
63
+ target_file = os.path.join(rel, fname)
64
+ # source_file is the run file loaded in a tmp directory
65
+ source_file = os.path.join(dirpath, fname)
66
+
67
+ if sys.stdout.isatty() and not output:
68
+ color = ["--color"]
69
+ else:
70
+ color = ["--no-color"]
71
+
72
+ if os.path.exists(os.path.join(target_dir, target_file)):
73
+ cmd = (
74
+ ["git", "diff", "--no-index", "--exit-code"]
75
+ + color
76
+ + [
77
+ target_file,
78
+ source_file,
79
+ ]
80
+ )
81
+ result: CompletedProcess = run(
82
+ cmd, text=True, stdout=PIPE, cwd=target_dir
83
+ )
84
+ if result.returncode == 0:
85
+ if not output:
86
+ echo(f"✅ {target_file} is identical, skipping")
87
+ continue
88
+
89
+ if output:
90
+ diffs.append(result.stdout)
91
+ else:
92
+ run(["less", "-R"], input=result.stdout, text=True)
93
+ else:
94
+ if not output:
95
+ echo(f"❗ {target_file} not in the target directory, skipping")
96
+ return diffs if output else None
97
+
98
+
99
+ def run_op(
100
+ runspec: str, op: Callable[..., Optional[List[str]]], **op_args: Mapping[str, Any]
101
+ ) -> Optional[List[str]]:
102
+ tmp = None
103
+ try:
104
+ tmp = extract_code_package(runspec)
105
+ return op(tmp.name, **op_args)
106
+ finally:
107
+ if tmp and os.path.exists(tmp.name):
108
+ shutil.rmtree(tmp.name)
109
+
110
+
111
+ def run_op_diff_runs(
112
+ source_run_pathspec: str, target_run_pathspec: str, **op_args: Mapping[str, Any]
113
+ ) -> Optional[List[str]]:
114
+ source_tmp = None
115
+ target_tmp = None
116
+ try:
117
+ source_tmp = extract_code_package(source_run_pathspec)
118
+ target_tmp = extract_code_package(target_run_pathspec)
119
+ return perform_diff(source_tmp.name, target_tmp.name, **op_args)
120
+ finally:
121
+ for d in [source_tmp, target_tmp]:
122
+ if d and os.path.exists(d.name):
123
+ shutil.rmtree(d.name)
124
+
125
+
126
+ def op_diff(tmpdir: str, **kwargs: Mapping[str, Any]) -> Optional[List[str]]:
127
+ kwargs_dict = dict(kwargs)
128
+ target_dir = cast(Optional[str], kwargs_dict.pop("target_dir", None))
129
+ output: bool = bool(kwargs_dict.pop("output", False))
130
+ op_args: Mapping[str, Any] = {**kwargs_dict}
131
+ return perform_diff(tmpdir, target_dir=target_dir, output=output, **op_args)
132
+
133
+
134
+ def op_pull(tmpdir: str, dst: str, **op_args: Mapping[str, Any]) -> None:
135
+ if os.path.exists(dst):
136
+ echo(f"❌ Directory *{dst}* already exists")
137
+ else:
138
+ shutil.move(tmpdir, dst)
139
+ echo(f"Code downloaded to *{dst}*")
140
+
141
+
142
+ def op_patch(tmpdir: str, dst: str, **kwargs: Mapping[str, Any]) -> None:
143
+ diffs = perform_diff(tmpdir, output=True) or []
144
+ with open(dst, "w", encoding="utf-8") as f:
145
+ for out in diffs:
146
+ out = out.replace(tmpdir, "/.")
147
+ out = out.replace("+++ b/./", "+++ b/")
148
+ out = out.replace("--- b/./", "--- b/")
149
+ out = out.replace("--- a/./", "--- a/")
150
+ out = out.replace("+++ a/./", "+++ a/")
151
+ f.write(out)
152
+ echo(f"Patch saved in *{dst}*")
153
+ path = run(
154
+ ["git", "rev-parse", "--show-prefix"], text=True, stdout=PIPE
155
+ ).stdout.strip()
156
+ if path:
157
+ diropt = f" --directory={path.rstrip('/')}"
158
+ else:
159
+ diropt = ""
160
+ echo("Apply the patch by running:")
161
+ echo_always(
162
+ f"git apply --verbose{diropt} {dst}", highlight=True, bold=True, err=True
163
+ )
164
+
165
+
166
+ @code.command()
167
+ @click.argument("run_pathspec")
168
+ def diff(run_pathspec: str, **kwargs: Mapping[str, Any]) -> None:
169
+ """
170
+ Do a 'git diff' of the current directory and a Metaflow run.
171
+ """
172
+ _ = run_op(run_pathspec, op_diff, **kwargs)
173
+
174
+
175
+ @code.command()
176
+ @click.argument("source_run_pathspec")
177
+ @click.argument("target_run_pathspec")
178
+ def diff_runs(
179
+ source_run_pathspec: str, target_run_pathspec: str, **kwargs: Mapping[str, Any]
180
+ ) -> None:
181
+ """
182
+ Do a 'git diff' between two Metaflow runs.
183
+ """
184
+ _ = run_op_diff_runs(source_run_pathspec, target_run_pathspec, **kwargs)
185
+
186
+
187
+ @code.command()
188
+ @click.argument("run_pathspec")
189
+ @click.option(
190
+ "--dir", help="Destination directory (default: {run_pathspec}_code)", default=None
191
+ )
192
+ def pull(
193
+ run_pathspec: str, dir: Optional[str] = None, **kwargs: Mapping[str, Any]
194
+ ) -> None:
195
+ """
196
+ Pull the code of a Metaflow run.
197
+ """
198
+ if dir is None:
199
+ dir = run_pathspec.lower().replace("/", "_") + "_code"
200
+ op_args: Mapping[str, Any] = {**kwargs, "dst": dir}
201
+ run_op(run_pathspec, op_pull, **op_args)
202
+
203
+
204
+ @code.command()
205
+ @click.argument("run_pathspec")
206
+ @click.option(
207
+ "--file_path",
208
+ help="Patch file name. If not provided, defaults to a sanitized version of RUN_PATHSPEC "
209
+ "with slashes replaced by underscores, plus '.patch'.",
210
+ show_default=False,
211
+ )
212
+ @click.option(
213
+ "--overwrite", is_flag=True, help="Overwrite the patch file if it exists."
214
+ )
215
+ def patch(
216
+ run_pathspec: str,
217
+ file_path: Optional[str] = None,
218
+ overwrite: bool = False,
219
+ **kwargs: Mapping[str, Any],
220
+ ) -> None:
221
+ """
222
+ Create a patch by comparing current dir with a Metaflow run.
223
+ """
224
+ if file_path is None:
225
+ file_path = run_pathspec.lower().replace("/", "_") + ".patch"
226
+ if os.path.exists(file_path) and not overwrite:
227
+ echo(f"File *{file_path}* already exists. To overwrite, specify --overwrite.")
228
+ return
229
+ op_args: Mapping[str, Any] = {**kwargs, "dst": file_path}
230
+ run_op(run_pathspec, op_patch, **op_args)
@@ -1133,13 +1133,16 @@ class StubGenerator:
1133
1133
  result = result[1:]
1134
1134
  # Add doc to first and last overloads. Jedi uses the last one and pycharm
1135
1135
  # the first one. Go figure.
1136
+ result_docstring = docs["func_doc"]
1137
+ if docs["param_doc"]:
1138
+ result_docstring += "\nParameters\n----------\n" + docs["param_doc"]
1136
1139
  result[0] = (
1137
1140
  result[0][0],
1138
- docs["func_doc"] + "\nParameters\n----------\n" + docs["param_doc"],
1141
+ result_docstring,
1139
1142
  )
1140
1143
  result[-1] = (
1141
1144
  result[-1][0],
1142
- docs["func_doc"] + "\nParameters\n----------\n" + docs["param_doc"],
1145
+ result_docstring,
1143
1146
  )
1144
1147
  return result
1145
1148
 
metaflow/cmd/main_cli.py CHANGED
@@ -67,6 +67,7 @@ CMDS_DESC = [
67
67
  ("configure", ".configure_cmd.cli"),
68
68
  ("tutorials", ".tutorials_cmd.cli"),
69
69
  ("develop", ".develop.cli"),
70
+ ("code", ".code.cli"),
70
71
  ]
71
72
 
72
73
  process_cmds(globals())
@@ -2,15 +2,47 @@ import sys
2
2
  import subprocess
3
3
  from pathlib import Path
4
4
  import sysconfig
5
+ import site
6
+
7
+
8
+ def find_makefile():
9
+ possible_dirs = []
10
+
11
+ # 1) The standard sysconfig-based location
12
+ data_dir = sysconfig.get_paths()["data"]
13
+ possible_dirs.append(Path(data_dir) / "share" / "metaflow" / "devtools")
14
+
15
+ # 2) The user base (e.g. ~/.local on many systems)
16
+ user_base = site.getuserbase() # e.g. /home/runner/.local
17
+ possible_dirs.append(Path(user_base) / "share" / "metaflow" / "devtools")
18
+
19
+ # 3) site-packages can vary, we can guess share/.. near each site-packages
20
+ # (Works if pip actually placed devtools near site-packages.)
21
+ for p in site.getsitepackages():
22
+ possible_dirs.append(Path(p).parent / "share" / "metaflow" / "devtools")
23
+ user_site = site.getusersitepackages()
24
+ possible_dirs.append(Path(user_site).parent / "share" / "metaflow" / "devtools")
25
+
26
+ for candidate_dir in possible_dirs:
27
+ makefile_candidate = candidate_dir / "Makefile"
28
+ if makefile_candidate.is_file():
29
+ return makefile_candidate
30
+
31
+ return None
5
32
 
6
33
 
7
34
  def main():
8
- share_dir = Path(sysconfig.get_paths()["data"]) / "share" / "metaflow" / "devtools"
9
- makefile_path = share_dir / "Makefile"
35
+ makefile_path = find_makefile()
36
+ if not makefile_path:
37
+ print("ERROR: Could not find executable in any known location.")
38
+ sys.exit(1)
10
39
  cmd = ["make", "-f", str(makefile_path)] + sys.argv[1:]
11
- # subprocess.run(cmd, check=True)
40
+
12
41
  try:
13
42
  completed = subprocess.run(cmd, check=True)
14
43
  sys.exit(completed.returncode)
15
44
  except subprocess.CalledProcessError as ex:
16
45
  sys.exit(ex.returncode)
46
+ except KeyboardInterrupt:
47
+ print("Process interrupted by user. Exiting cleanly.")
48
+ sys.exit(1)
@@ -198,6 +198,7 @@ _plugin_categories = {
198
198
  list(x.commands)[0] if len(x.commands) == 1 else "too many commands"
199
199
  ),
200
200
  "runner_cli": lambda x: x.name,
201
+ "tl_plugin": None,
201
202
  }
202
203
 
203
204
 
@@ -382,6 +382,8 @@ KUBERNETES_DISK = from_conf("KUBERNETES_DISK", None)
382
382
  # Default kubernetes QoS class
383
383
  KUBERNETES_QOS = from_conf("KUBERNETES_QOS", "burstable")
384
384
 
385
+ # Architecture of kubernetes nodes - used for @conda/@pypi in metaflow-dev
386
+ KUBERNETES_CONDA_ARCH = from_conf("KUBERNETES_CONDA_ARCH")
385
387
  ARGO_WORKFLOWS_KUBERNETES_SECRETS = from_conf("ARGO_WORKFLOWS_KUBERNETES_SECRETS", "")
386
388
  ARGO_WORKFLOWS_ENV_VARS_TO_SKIP = from_conf("ARGO_WORKFLOWS_ENV_VARS_TO_SKIP", "")
387
389
 
@@ -6,7 +6,7 @@ from .util import get_username
6
6
  from . import metaflow_version
7
7
  from metaflow.exception import MetaflowException
8
8
  from metaflow.extension_support import dump_module_info
9
- from metaflow.mflog import BASH_MFLOG
9
+ from metaflow.mflog import BASH_MFLOG, BASH_FLUSH_LOGS
10
10
  from . import R
11
11
 
12
12
 
@@ -159,6 +159,7 @@ class MetaflowEnvironment(object):
159
159
  def get_package_commands(self, code_package_url, datastore_type):
160
160
  cmds = [
161
161
  BASH_MFLOG,
162
+ BASH_FLUSH_LOGS,
162
163
  "mflog 'Setting up task environment.'",
163
164
  self._get_install_dependencies_cmd(datastore_type),
164
165
  "mkdir metaflow",
@@ -176,6 +177,7 @@ class MetaflowEnvironment(object):
176
177
  "fi" % code_package_url,
177
178
  "TAR_OPTIONS='--warning=no-timestamp' tar xf job.tar",
178
179
  "mflog 'Task is starting.'",
180
+ "flush_mflogs",
179
181
  ]
180
182
  return cmds
181
183
 
@@ -44,6 +44,8 @@ BASH_MFLOG = (
44
44
  BASH_SAVE_LOGS_ARGS = ["python", "-m", "metaflow.mflog.save_logs"]
45
45
  BASH_SAVE_LOGS = " ".join(BASH_SAVE_LOGS_ARGS)
46
46
 
47
+ BASH_FLUSH_LOGS = "flush_mflogs(){ " f"{BASH_SAVE_LOGS}; " "}"
48
+
47
49
 
48
50
  # this function returns a bash expression that redirects stdout
49
51
  # and stderr of the given bash expression to mflog.tee
@@ -63,7 +65,7 @@ def bash_capture_logs(bash_expr, var_transform=None):
63
65
  # update_delay determines how often logs should be uploaded to S3
64
66
  # as a function of the task execution time
65
67
 
66
- MIN_UPDATE_DELAY = 1.0 # the most frequent update interval
68
+ MIN_UPDATE_DELAY = 0.25 # the most frequent update interval
67
69
  MAX_UPDATE_DELAY = 30.0 # the least frequent update interval
68
70
 
69
71
 
@@ -110,7 +112,6 @@ def export_mflog_env_vars(
110
112
 
111
113
  def tail_logs(prefix, stdout_tail, stderr_tail, echo, has_log_updates):
112
114
  def _available_logs(tail, stream, echo, should_persist=False):
113
- # print the latest batch of lines
114
115
  try:
115
116
  for line in tail:
116
117
  if should_persist:
@@ -128,7 +129,7 @@ def tail_logs(prefix, stdout_tail, stderr_tail, echo, has_log_updates):
128
129
 
129
130
  start_time = time.time()
130
131
  next_log_update = start_time
131
- log_update_delay = 1
132
+ log_update_delay = update_delay(0)
132
133
  while has_log_updates():
133
134
  if time.time() > next_log_update:
134
135
  _available_logs(stdout_tail, "stdout", echo)
@@ -167,6 +167,12 @@ DEPLOYER_IMPL_PROVIDERS_DESC = [
167
167
  ),
168
168
  ]
169
169
 
170
+ TL_PLUGINS_DESC = [
171
+ ("requirements_txt_parser", ".pypi.parsers.requirements_txt_parser"),
172
+ ("pyproject_toml_parser", ".pypi.parsers.pyproject_toml_parser"),
173
+ ("conda_environment_yml_parser", ".pypi.parsers.conda_environment_yml_parser"),
174
+ ]
175
+
170
176
  process_plugins(globals())
171
177
 
172
178
 
@@ -207,6 +213,8 @@ GCP_CLIENT_PROVIDERS = resolve_plugins("gcp_client_provider")
207
213
  if sys.version_info >= (3, 7):
208
214
  DEPLOYER_IMPL_PROVIDERS = resolve_plugins("deployer_impl_provider")
209
215
 
216
+ TL_PLUGINS = resolve_plugins("tl_plugin")
217
+
210
218
  from .cards.card_modules import MF_EXTERNAL_CARDS
211
219
 
212
220
  # Cards; due to the way cards were designed, it is harder to make them fit
@@ -251,3 +259,9 @@ CARDS = [
251
259
  TestRefreshComponentCard,
252
260
  ]
253
261
  merge_lists(CARDS, MF_EXTERNAL_CARDS, "type")
262
+
263
+
264
+ def _import_tl_plugins(globals_dict):
265
+
266
+ for name, p in TL_PLUGINS.items():
267
+ globals_dict[name] = p
@@ -256,12 +256,19 @@ class ArgoClient(object):
256
256
  json.loads(e.body)["message"] if e.body is not None else e.reason
257
257
  )
258
258
 
259
- def trigger_workflow_template(self, name, parameters={}):
259
+ def trigger_workflow_template(self, name, usertype, username, parameters={}):
260
260
  client = self._client.get()
261
261
  body = {
262
262
  "apiVersion": "argoproj.io/v1alpha1",
263
263
  "kind": "Workflow",
264
- "metadata": {"generateName": name + "-"},
264
+ "metadata": {
265
+ "generateName": name + "-",
266
+ "annotations": {
267
+ "metaflow/triggered_by_user": json.dumps(
268
+ {"type": usertype, "name": username}
269
+ )
270
+ },
271
+ },
265
272
  "spec": {
266
273
  "workflowTemplateRef": {"name": name},
267
274
  "arguments": {
@@ -65,6 +65,7 @@ from metaflow.util import (
65
65
  )
66
66
 
67
67
  from .argo_client import ArgoClient
68
+ from metaflow.util import resolve_identity
68
69
 
69
70
 
70
71
  class ArgoWorkflowsException(MetaflowException):
@@ -109,8 +110,7 @@ class ArgoWorkflows(object):
109
110
  notify_slack_webhook_url=None,
110
111
  notify_pager_duty_integration_key=None,
111
112
  notify_incident_io_api_key=None,
112
- incident_io_success_severity_id=None,
113
- incident_io_error_severity_id=None,
113
+ incident_io_alert_source_config_id=None,
114
114
  enable_heartbeat_daemon=True,
115
115
  enable_error_msg_capture=False,
116
116
  ):
@@ -161,8 +161,7 @@ class ArgoWorkflows(object):
161
161
  self.notify_slack_webhook_url = notify_slack_webhook_url
162
162
  self.notify_pager_duty_integration_key = notify_pager_duty_integration_key
163
163
  self.notify_incident_io_api_key = notify_incident_io_api_key
164
- self.incident_io_success_severity_id = incident_io_success_severity_id
165
- self.incident_io_error_severity_id = incident_io_error_severity_id
164
+ self.incident_io_alert_source_config_id = incident_io_alert_source_config_id
166
165
  self.enable_heartbeat_daemon = enable_heartbeat_daemon
167
166
  self.enable_error_msg_capture = enable_error_msg_capture
168
167
  self.parameters = self._process_parameters()
@@ -316,8 +315,16 @@ class ArgoWorkflows(object):
316
315
  "Workflows before proceeding." % name
317
316
  )
318
317
  try:
318
+ id_parts = resolve_identity().split(":")
319
+ parts_size = len(id_parts)
320
+ usertype = id_parts[0] if parts_size > 0 else "unknown"
321
+ username = id_parts[1] if parts_size > 1 else "unknown"
322
+
319
323
  return ArgoClient(namespace=KUBERNETES_NAMESPACE).trigger_workflow_template(
320
- name, parameters
324
+ name,
325
+ usertype,
326
+ username,
327
+ parameters,
321
328
  )
322
329
  except Exception as e:
323
330
  raise ArgoWorkflowsException(str(e))
@@ -2528,25 +2535,49 @@ class ArgoWorkflows(object):
2528
2535
  def _incident_io_alert_template(self):
2529
2536
  if self.notify_incident_io_api_key is None:
2530
2537
  return None
2531
- if self.incident_io_error_severity_id is None:
2538
+ if self.incident_io_alert_source_config_id is None:
2532
2539
  raise MetaflowException(
2533
- "Creating incidents for errors requires a severity id."
2540
+ "Creating alerts for errors requires a alert source config ID."
2534
2541
  )
2542
+ ui_links = self._incident_io_ui_urls_for_run()
2535
2543
  return Template("notify-incident-io-on-error").http(
2536
2544
  Http("POST")
2537
- .url("https://api.incident.io/v2/incidents")
2545
+ .url(
2546
+ "https://api.incident.io/v2/alert_events/http/%s"
2547
+ % self.incident_io_alert_source_config_id
2548
+ )
2538
2549
  .header("Content-Type", "application/json")
2539
2550
  .header("Authorization", "Bearer %s" % self.notify_incident_io_api_key)
2540
2551
  .body(
2541
2552
  json.dumps(
2542
2553
  {
2543
2554
  "idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
2544
- "visibility": "public",
2545
- "severity_id": self.incident_io_error_severity_id,
2546
- "name": "Flow %s has failed." % self.flow.name,
2547
- "summary": "Metaflow run %s/argo-{{workflow.name}} failed! %s"
2548
- % (self.flow.name, self._incident_io_ui_urls_for_run()),
2549
- # TODO: Add support for custom field entries.
2555
+ "status": "firing",
2556
+ "title": "Flow %s has failed." % self.flow.name,
2557
+ "description": "Metaflow run {run_pathspec} failed!{urls}".format(
2558
+ run_pathspec="%s/argo-{{workflow.name}}" % self.flow.name,
2559
+ urls=(
2560
+ "\n\nSee details for the run at:\n\n"
2561
+ + "\n\n".join(ui_links)
2562
+ if ui_links
2563
+ else ""
2564
+ ),
2565
+ ),
2566
+ "source_url": (
2567
+ "%s/%s/%s"
2568
+ % (
2569
+ UI_URL.rstrip("/"),
2570
+ self.flow.name,
2571
+ "argo-{{workflow.name}}",
2572
+ )
2573
+ if UI_URL
2574
+ else None
2575
+ ),
2576
+ "metadata": {
2577
+ "run_status": "failed",
2578
+ "flow_name": self.flow.name,
2579
+ "run_id": "argo-{{workflow.name}}",
2580
+ },
2550
2581
  }
2551
2582
  )
2552
2583
  )
@@ -2555,27 +2586,49 @@ class ArgoWorkflows(object):
2555
2586
  def _incident_io_change_template(self):
2556
2587
  if self.notify_incident_io_api_key is None:
2557
2588
  return None
2558
- if self.incident_io_success_severity_id is None:
2589
+ if self.incident_io_alert_source_config_id is None:
2559
2590
  raise MetaflowException(
2560
- "Creating incidents for successes requires a severity id."
2591
+ "Creating alerts for successes requires an alert source config ID."
2561
2592
  )
2593
+ ui_links = self._incident_io_ui_urls_for_run()
2562
2594
  return Template("notify-incident-io-on-success").http(
2563
2595
  Http("POST")
2564
- .url("https://api.incident.io/v2/incidents")
2596
+ .url(
2597
+ "https://api.incident.io/v2/alert_events/http/%s"
2598
+ % self.incident_io_alert_source_config_id
2599
+ )
2565
2600
  .header("Content-Type", "application/json")
2566
2601
  .header("Authorization", "Bearer %s" % self.notify_incident_io_api_key)
2567
2602
  .body(
2568
2603
  json.dumps(
2569
2604
  {
2570
2605
  "idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
2571
- "visibility": "public",
2572
- "severity_id": self.incident_io_success_severity_id,
2573
- # TODO: Do we need to make incident type configurable for successes? otherwise they are created as 'investigating'
2574
- # "incident_type_id": ""
2575
- "name": "Flow %s has succeeded." % self.flow.name,
2576
- "summary": "Metaflow run %s/argo-{{workflow.name}} succeeded!%s"
2577
- % (self.flow.name, self._incident_io_ui_urls_for_run()),
2578
- # TODO: Add support for custom field entries.
2606
+ "status": "firing",
2607
+ "title": "Flow %s has succeeded." % self.flow.name,
2608
+ "description": "Metaflow run {run_pathspec} succeeded!{urls}".format(
2609
+ run_pathspec="%s/argo-{{workflow.name}}" % self.flow.name,
2610
+ urls=(
2611
+ "\n\nSee details for the run at:\n\n"
2612
+ + "\n\n".join(ui_links)
2613
+ if ui_links
2614
+ else ""
2615
+ ),
2616
+ ),
2617
+ "source_url": (
2618
+ "%s/%s/%s"
2619
+ % (
2620
+ UI_URL.rstrip("/"),
2621
+ self.flow.name,
2622
+ "argo-{{workflow.name}}",
2623
+ )
2624
+ if UI_URL
2625
+ else None
2626
+ ),
2627
+ "metadata": {
2628
+ "run_status": "succeeded",
2629
+ "flow_name": self.flow.name,
2630
+ "run_id": "argo-{{workflow.name}}",
2631
+ },
2579
2632
  }
2580
2633
  )
2581
2634
  )
@@ -2597,9 +2650,7 @@ class ArgoWorkflows(object):
2597
2650
  "{{workflow.name}}",
2598
2651
  )
2599
2652
  links.append(url)
2600
- if links:
2601
- links = ["See details for the run at: ", *links]
2602
- return "\n\n".join(links)
2653
+ return links
2603
2654
 
2604
2655
  def _pager_duty_change_template(self):
2605
2656
  # https://developer.pagerduty.com/docs/ZG9jOjExMDI5NTgy-send-a-change-event