ob-metaflow 2.15.18.1__py2.py3-none-any.whl → 2.16.0.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow might be problematic. Click here for more details.
- metaflow/__init__.py +7 -1
- metaflow/_vendor/imghdr/__init__.py +180 -0
- metaflow/cli.py +16 -1
- metaflow/cli_components/init_cmd.py +1 -0
- metaflow/cli_components/run_cmds.py +6 -2
- metaflow/client/core.py +22 -30
- metaflow/cmd/develop/stub_generator.py +19 -2
- metaflow/datastore/task_datastore.py +0 -1
- metaflow/debug.py +5 -0
- metaflow/decorators.py +230 -70
- metaflow/extension_support/__init__.py +15 -8
- metaflow/extension_support/_empty_file.py +2 -2
- metaflow/flowspec.py +80 -53
- metaflow/graph.py +24 -2
- metaflow/meta_files.py +13 -0
- metaflow/metadata_provider/metadata.py +7 -1
- metaflow/metaflow_config.py +5 -0
- metaflow/metaflow_environment.py +82 -25
- metaflow/metaflow_version.py +1 -1
- metaflow/package/__init__.py +664 -0
- metaflow/packaging_sys/__init__.py +870 -0
- metaflow/packaging_sys/backend.py +113 -0
- metaflow/packaging_sys/distribution_support.py +153 -0
- metaflow/packaging_sys/tar_backend.py +86 -0
- metaflow/packaging_sys/utils.py +91 -0
- metaflow/packaging_sys/v1.py +476 -0
- metaflow/plugins/__init__.py +3 -0
- metaflow/plugins/airflow/airflow.py +11 -1
- metaflow/plugins/airflow/airflow_cli.py +15 -4
- metaflow/plugins/argo/argo_workflows.py +346 -301
- metaflow/plugins/argo/argo_workflows_cli.py +16 -4
- metaflow/plugins/argo/exit_hooks.py +209 -0
- metaflow/plugins/aws/aws_utils.py +1 -1
- metaflow/plugins/aws/batch/batch.py +22 -3
- metaflow/plugins/aws/batch/batch_cli.py +3 -0
- metaflow/plugins/aws/batch/batch_decorator.py +13 -5
- metaflow/plugins/aws/step_functions/step_functions.py +10 -1
- metaflow/plugins/aws/step_functions/step_functions_cli.py +15 -4
- metaflow/plugins/cards/card_cli.py +20 -1
- metaflow/plugins/cards/card_creator.py +24 -1
- metaflow/plugins/cards/card_decorator.py +57 -6
- metaflow/plugins/cards/card_modules/convert_to_native_type.py +5 -2
- metaflow/plugins/cards/card_modules/test_cards.py +16 -0
- metaflow/plugins/cards/metadata.py +22 -0
- metaflow/plugins/exit_hook/__init__.py +0 -0
- metaflow/plugins/exit_hook/exit_hook_decorator.py +46 -0
- metaflow/plugins/exit_hook/exit_hook_script.py +52 -0
- metaflow/plugins/kubernetes/kubernetes.py +8 -1
- metaflow/plugins/kubernetes/kubernetes_cli.py +3 -0
- metaflow/plugins/kubernetes/kubernetes_decorator.py +13 -5
- metaflow/plugins/package_cli.py +25 -23
- metaflow/plugins/parallel_decorator.py +4 -2
- metaflow/plugins/pypi/bootstrap.py +8 -2
- metaflow/plugins/pypi/conda_decorator.py +39 -82
- metaflow/plugins/pypi/conda_environment.py +6 -2
- metaflow/plugins/pypi/pypi_decorator.py +4 -4
- metaflow/plugins/secrets/__init__.py +3 -0
- metaflow/plugins/secrets/secrets_decorator.py +9 -173
- metaflow/plugins/secrets/secrets_func.py +49 -0
- metaflow/plugins/secrets/secrets_spec.py +101 -0
- metaflow/plugins/secrets/utils.py +74 -0
- metaflow/plugins/test_unbounded_foreach_decorator.py +2 -2
- metaflow/plugins/timeout_decorator.py +0 -1
- metaflow/plugins/uv/bootstrap.py +11 -0
- metaflow/plugins/uv/uv_environment.py +4 -2
- metaflow/pylint_wrapper.py +5 -1
- metaflow/runner/click_api.py +5 -4
- metaflow/runner/metaflow_runner.py +16 -1
- metaflow/runner/subprocess_manager.py +14 -2
- metaflow/runtime.py +82 -11
- metaflow/task.py +91 -7
- metaflow/user_configs/config_options.py +13 -8
- metaflow/user_configs/config_parameters.py +0 -4
- metaflow/user_decorators/__init__.py +0 -0
- metaflow/user_decorators/common.py +144 -0
- metaflow/user_decorators/mutable_flow.py +499 -0
- metaflow/user_decorators/mutable_step.py +424 -0
- metaflow/user_decorators/user_flow_decorator.py +263 -0
- metaflow/user_decorators/user_step_decorator.py +712 -0
- metaflow/util.py +4 -1
- metaflow/version.py +1 -1
- {ob_metaflow-2.15.18.1.data → ob_metaflow-2.16.0.1.data}/data/share/metaflow/devtools/Tiltfile +27 -2
- {ob_metaflow-2.15.18.1.dist-info → ob_metaflow-2.16.0.1.dist-info}/METADATA +2 -2
- {ob_metaflow-2.15.18.1.dist-info → ob_metaflow-2.16.0.1.dist-info}/RECORD +90 -70
- metaflow/info_file.py +0 -25
- metaflow/package.py +0 -203
- metaflow/user_configs/config_decorators.py +0 -568
- {ob_metaflow-2.15.18.1.data → ob_metaflow-2.16.0.1.data}/data/share/metaflow/devtools/Makefile +0 -0
- {ob_metaflow-2.15.18.1.data → ob_metaflow-2.16.0.1.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
- {ob_metaflow-2.15.18.1.dist-info → ob_metaflow-2.16.0.1.dist-info}/WHEEL +0 -0
- {ob_metaflow-2.15.18.1.dist-info → ob_metaflow-2.16.0.1.dist-info}/entry_points.txt +0 -0
- {ob_metaflow-2.15.18.1.dist-info → ob_metaflow-2.16.0.1.dist-info}/licenses/LICENSE +0 -0
- {ob_metaflow-2.15.18.1.dist-info → ob_metaflow-2.16.0.1.dist-info}/top_level.txt +0 -0
|
@@ -66,6 +66,7 @@ from metaflow.util import (
|
|
|
66
66
|
)
|
|
67
67
|
|
|
68
68
|
from .argo_client import ArgoClient
|
|
69
|
+
from .exit_hooks import ExitHookHack, HttpExitHook, ContainerHook
|
|
69
70
|
from metaflow.util import resolve_identity
|
|
70
71
|
|
|
71
72
|
|
|
@@ -91,6 +92,7 @@ class ArgoWorkflows(object):
|
|
|
91
92
|
name,
|
|
92
93
|
graph: FlowGraph,
|
|
93
94
|
flow,
|
|
95
|
+
code_package_metadata,
|
|
94
96
|
code_package_sha,
|
|
95
97
|
code_package_url,
|
|
96
98
|
production_token,
|
|
@@ -140,9 +142,18 @@ class ArgoWorkflows(object):
|
|
|
140
142
|
# ensure that your Argo Workflows controller doesn't restrict
|
|
141
143
|
# templateReferencing.
|
|
142
144
|
|
|
145
|
+
# get initial configs
|
|
146
|
+
self.initial_configs = init_config()
|
|
147
|
+
for entry in ["OBP_PERIMETER", "OBP_INTEGRATIONS_URL"]:
|
|
148
|
+
if entry not in self.initial_configs:
|
|
149
|
+
raise ArgoWorkflowsException(
|
|
150
|
+
f"{entry} was not found in metaflow config. Please make sure to run `outerbounds configure <...>` command which can be found on the Outerbounds UI or reach out to your Outerbounds support team."
|
|
151
|
+
)
|
|
152
|
+
|
|
143
153
|
self.name = name
|
|
144
154
|
self.graph = graph
|
|
145
155
|
self.flow = flow
|
|
156
|
+
self.code_package_metadata = code_package_metadata
|
|
146
157
|
self.code_package_sha = code_package_sha
|
|
147
158
|
self.code_package_url = code_package_url
|
|
148
159
|
self.production_token = production_token
|
|
@@ -551,7 +562,7 @@ class ArgoWorkflows(object):
|
|
|
551
562
|
type=param_type,
|
|
552
563
|
description=param.kwargs.get("help"),
|
|
553
564
|
is_required=is_required,
|
|
554
|
-
**extra_attrs
|
|
565
|
+
**extra_attrs,
|
|
555
566
|
)
|
|
556
567
|
return parameters
|
|
557
568
|
|
|
@@ -796,6 +807,7 @@ class ArgoWorkflows(object):
|
|
|
796
807
|
|
|
797
808
|
dag_annotation = {"metaflow/dag": json.dumps(graph_info)}
|
|
798
809
|
|
|
810
|
+
lifecycle_hooks = self._lifecycle_hooks()
|
|
799
811
|
return (
|
|
800
812
|
WorkflowTemplate()
|
|
801
813
|
.metadata(
|
|
@@ -904,97 +916,20 @@ class ArgoWorkflows(object):
|
|
|
904
916
|
if self.enable_error_msg_capture
|
|
905
917
|
else None
|
|
906
918
|
)
|
|
907
|
-
# Set
|
|
919
|
+
# Set lifecycle hooks if notifications are enabled
|
|
908
920
|
.hooks(
|
|
909
921
|
{
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
"notify-slack-on-success": LifecycleHook()
|
|
914
|
-
.expression("workflow.status == 'Succeeded'")
|
|
915
|
-
.template("notify-slack-on-success"),
|
|
916
|
-
}
|
|
917
|
-
if self.notify_on_success and self.notify_slack_webhook_url
|
|
918
|
-
else {}
|
|
919
|
-
),
|
|
920
|
-
**(
|
|
921
|
-
{
|
|
922
|
-
# workflow status maps to Completed
|
|
923
|
-
"notify-pager-duty-on-success": LifecycleHook()
|
|
924
|
-
.expression("workflow.status == 'Succeeded'")
|
|
925
|
-
.template("notify-pager-duty-on-success"),
|
|
926
|
-
}
|
|
927
|
-
if self.notify_on_success
|
|
928
|
-
and self.notify_pager_duty_integration_key
|
|
929
|
-
else {}
|
|
930
|
-
),
|
|
931
|
-
**(
|
|
932
|
-
{
|
|
933
|
-
# workflow status maps to Completed
|
|
934
|
-
"notify-incident-io-on-success": LifecycleHook()
|
|
935
|
-
.expression("workflow.status == 'Succeeded'")
|
|
936
|
-
.template("notify-incident-io-on-success"),
|
|
937
|
-
}
|
|
938
|
-
if self.notify_on_success
|
|
939
|
-
and self.notify_incident_io_api_key
|
|
940
|
-
else {}
|
|
941
|
-
),
|
|
942
|
-
**(
|
|
943
|
-
{
|
|
944
|
-
# workflow status maps to Failed or Error
|
|
945
|
-
"notify-slack-on-failure": LifecycleHook()
|
|
946
|
-
.expression("workflow.status == 'Failed'")
|
|
947
|
-
.template("notify-slack-on-error"),
|
|
948
|
-
"notify-slack-on-error": LifecycleHook()
|
|
949
|
-
.expression("workflow.status == 'Error'")
|
|
950
|
-
.template("notify-slack-on-error"),
|
|
951
|
-
}
|
|
952
|
-
if self.notify_on_error and self.notify_slack_webhook_url
|
|
953
|
-
else {}
|
|
954
|
-
),
|
|
955
|
-
**(
|
|
956
|
-
{
|
|
957
|
-
# workflow status maps to Failed or Error
|
|
958
|
-
"notify-pager-duty-on-failure": LifecycleHook()
|
|
959
|
-
.expression("workflow.status == 'Failed'")
|
|
960
|
-
.template("notify-pager-duty-on-error"),
|
|
961
|
-
"notify-pager-duty-on-error": LifecycleHook()
|
|
962
|
-
.expression("workflow.status == 'Error'")
|
|
963
|
-
.template("notify-pager-duty-on-error"),
|
|
964
|
-
}
|
|
965
|
-
if self.notify_on_error
|
|
966
|
-
and self.notify_pager_duty_integration_key
|
|
967
|
-
else {}
|
|
968
|
-
),
|
|
969
|
-
**(
|
|
970
|
-
{
|
|
971
|
-
# workflow status maps to Failed or Error
|
|
972
|
-
"notify-incident-io-on-failure": LifecycleHook()
|
|
973
|
-
.expression("workflow.status == 'Failed'")
|
|
974
|
-
.template("notify-incident-io-on-error"),
|
|
975
|
-
"notify-incident-io-on-error": LifecycleHook()
|
|
976
|
-
.expression("workflow.status == 'Error'")
|
|
977
|
-
.template("notify-incident-io-on-error"),
|
|
978
|
-
}
|
|
979
|
-
if self.notify_on_error and self.notify_incident_io_api_key
|
|
980
|
-
else {}
|
|
981
|
-
),
|
|
982
|
-
# Warning: terrible hack to workaround a bug in Argo Workflow
|
|
983
|
-
# where the hooks listed above do not execute unless
|
|
984
|
-
# there is an explicit exit hook. as and when this
|
|
985
|
-
# bug is patched, we should remove this effectively
|
|
986
|
-
# no-op hook.
|
|
987
|
-
**(
|
|
988
|
-
{"exit": LifecycleHook().template("exit-hook-hack")}
|
|
989
|
-
if self.notify_on_error or self.notify_on_success
|
|
990
|
-
else {}
|
|
991
|
-
),
|
|
922
|
+
lifecycle.name: lifecycle
|
|
923
|
+
for hook in lifecycle_hooks
|
|
924
|
+
for lifecycle in hook.lifecycle_hooks
|
|
992
925
|
}
|
|
993
926
|
)
|
|
994
927
|
# Top-level DAG template(s)
|
|
995
928
|
.templates(self._dag_templates())
|
|
996
929
|
# Container templates
|
|
997
930
|
.templates(self._container_templates())
|
|
931
|
+
# Lifecycle hook template(s)
|
|
932
|
+
.templates([hook.template for hook in lifecycle_hooks])
|
|
998
933
|
# Exit hook template(s)
|
|
999
934
|
.templates(self._exit_hook_templates())
|
|
1000
935
|
# Sidecar templates (Daemon Containers)
|
|
@@ -1571,7 +1506,9 @@ class ArgoWorkflows(object):
|
|
|
1571
1506
|
mflog_expr,
|
|
1572
1507
|
]
|
|
1573
1508
|
+ self.environment.get_package_commands(
|
|
1574
|
-
self.code_package_url,
|
|
1509
|
+
self.code_package_url,
|
|
1510
|
+
self.flow_datastore.TYPE,
|
|
1511
|
+
self.code_package_metadata,
|
|
1575
1512
|
)
|
|
1576
1513
|
)
|
|
1577
1514
|
step_cmds = self.environment.bootstrap_commands(
|
|
@@ -1583,6 +1520,7 @@ class ArgoWorkflows(object):
|
|
|
1583
1520
|
decorator.make_decorator_spec()
|
|
1584
1521
|
for decorator in node.decorators
|
|
1585
1522
|
if not decorator.statically_defined
|
|
1523
|
+
and decorator.inserted_by is None
|
|
1586
1524
|
]
|
|
1587
1525
|
}
|
|
1588
1526
|
# FlowDecorators can define their own top-level options. They are
|
|
@@ -1749,6 +1687,7 @@ class ArgoWorkflows(object):
|
|
|
1749
1687
|
**{
|
|
1750
1688
|
# These values are needed by Metaflow to set it's internal
|
|
1751
1689
|
# state appropriately.
|
|
1690
|
+
"METAFLOW_CODE_METADATA": self.code_package_metadata,
|
|
1752
1691
|
"METAFLOW_CODE_URL": self.code_package_url,
|
|
1753
1692
|
"METAFLOW_CODE_SHA": self.code_package_sha,
|
|
1754
1693
|
"METAFLOW_CODE_DS": self.flow_datastore.TYPE,
|
|
@@ -1955,17 +1894,10 @@ class ArgoWorkflows(object):
|
|
|
1955
1894
|
and k not in set(ARGO_WORKFLOWS_ENV_VARS_TO_SKIP.split(","))
|
|
1956
1895
|
}
|
|
1957
1896
|
|
|
1958
|
-
#
|
|
1959
|
-
initial_configs = init_config()
|
|
1960
|
-
for entry in ["OBP_PERIMETER", "OBP_INTEGRATIONS_URL"]:
|
|
1961
|
-
if entry not in initial_configs:
|
|
1962
|
-
raise ArgoWorkflowsException(
|
|
1963
|
-
f"{entry} was not found in metaflow config. Please make sure to run `outerbounds configure <...>` command which can be found on the Ourebounds UI or reach out to your Outerbounds support team."
|
|
1964
|
-
)
|
|
1965
|
-
|
|
1897
|
+
# OBP configs
|
|
1966
1898
|
additional_obp_configs = {
|
|
1967
|
-
"OBP_PERIMETER": initial_configs["OBP_PERIMETER"],
|
|
1968
|
-
"OBP_INTEGRATIONS_URL": initial_configs["OBP_INTEGRATIONS_URL"],
|
|
1899
|
+
"OBP_PERIMETER": self.initial_configs["OBP_PERIMETER"],
|
|
1900
|
+
"OBP_INTEGRATIONS_URL": self.initial_configs["OBP_INTEGRATIONS_URL"],
|
|
1969
1901
|
}
|
|
1970
1902
|
|
|
1971
1903
|
# Tmpfs variables
|
|
@@ -2359,40 +2291,190 @@ class ArgoWorkflows(object):
|
|
|
2359
2291
|
templates.append(self._heartbeat_daemon_template())
|
|
2360
2292
|
return templates
|
|
2361
2293
|
|
|
2362
|
-
# Return
|
|
2363
|
-
def
|
|
2364
|
-
|
|
2294
|
+
# Return lifecycle hooks for workflow execution notifications.
|
|
2295
|
+
def _lifecycle_hooks(self):
|
|
2296
|
+
hooks = []
|
|
2365
2297
|
if self.notify_on_error:
|
|
2366
|
-
|
|
2367
|
-
|
|
2368
|
-
|
|
2298
|
+
hooks.append(self._slack_error_template())
|
|
2299
|
+
hooks.append(self._pager_duty_alert_template())
|
|
2300
|
+
hooks.append(self._incident_io_alert_template())
|
|
2369
2301
|
if self.notify_on_success:
|
|
2370
|
-
|
|
2371
|
-
|
|
2372
|
-
|
|
2302
|
+
hooks.append(self._slack_success_template())
|
|
2303
|
+
hooks.append(self._pager_duty_change_template())
|
|
2304
|
+
hooks.append(self._incident_io_change_template())
|
|
2305
|
+
|
|
2306
|
+
exit_hook_decos = self.flow._flow_decorators.get("exit_hook", [])
|
|
2307
|
+
|
|
2308
|
+
for deco in exit_hook_decos:
|
|
2309
|
+
hooks.extend(self._lifecycle_hook_from_deco(deco))
|
|
2373
2310
|
|
|
2374
2311
|
# Clean up None values from templates.
|
|
2375
|
-
|
|
2376
|
-
|
|
2377
|
-
if
|
|
2378
|
-
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
# remove this effectively no-op template.
|
|
2382
|
-
# Note: We use the Http template because changing this to an actual no-op container had the side-effect of
|
|
2383
|
-
# leaving LifecycleHooks in a pending state even when they have finished execution.
|
|
2384
|
-
templates.append(
|
|
2385
|
-
Template("exit-hook-hack").http(
|
|
2386
|
-
Http("GET")
|
|
2387
|
-
.url(
|
|
2312
|
+
hooks = list(filter(None, hooks))
|
|
2313
|
+
|
|
2314
|
+
if hooks:
|
|
2315
|
+
hooks.append(
|
|
2316
|
+
ExitHookHack(
|
|
2317
|
+
url=(
|
|
2388
2318
|
self.notify_slack_webhook_url
|
|
2389
2319
|
or "https://events.pagerduty.com/v2/enqueue"
|
|
2390
2320
|
)
|
|
2391
|
-
.success_condition("true == true")
|
|
2392
2321
|
)
|
|
2393
2322
|
)
|
|
2323
|
+
return hooks
|
|
2324
|
+
|
|
2325
|
+
def _lifecycle_hook_from_deco(self, deco):
|
|
2326
|
+
from kubernetes import client as kubernetes_sdk
|
|
2327
|
+
|
|
2328
|
+
start_step = [step for step in self.graph if step.name == "start"][0]
|
|
2329
|
+
# We want to grab the base image used by the start step, as this is known to be pullable from within the cluster,
|
|
2330
|
+
# and it might contain the required libraries, allowing us to start up faster.
|
|
2331
|
+
start_kube_deco = [
|
|
2332
|
+
deco for deco in start_step.decorators if deco.name == "kubernetes"
|
|
2333
|
+
][0]
|
|
2334
|
+
resources = dict(start_kube_deco.attributes)
|
|
2335
|
+
kube_defaults = dict(start_kube_deco.defaults)
|
|
2336
|
+
|
|
2337
|
+
# OBP Configs
|
|
2338
|
+
additional_obp_configs = {
|
|
2339
|
+
"OBP_PERIMETER": self.initial_configs["OBP_PERIMETER"],
|
|
2340
|
+
"OBP_INTEGRATIONS_URL": self.initial_configs["OBP_INTEGRATIONS_URL"],
|
|
2341
|
+
}
|
|
2342
|
+
|
|
2343
|
+
run_id_template = "argo-{{workflow.name}}"
|
|
2344
|
+
metaflow_version = self.environment.get_environment_info()
|
|
2345
|
+
metaflow_version["flow_name"] = self.graph.name
|
|
2346
|
+
metaflow_version["production_token"] = self.production_token
|
|
2347
|
+
env = {
|
|
2348
|
+
# These values are needed by Metaflow to set it's internal
|
|
2349
|
+
# state appropriately.
|
|
2350
|
+
"METAFLOW_CODE_URL": self.code_package_url,
|
|
2351
|
+
"METAFLOW_CODE_SHA": self.code_package_sha,
|
|
2352
|
+
"METAFLOW_CODE_DS": self.flow_datastore.TYPE,
|
|
2353
|
+
"METAFLOW_SERVICE_URL": SERVICE_INTERNAL_URL,
|
|
2354
|
+
"METAFLOW_SERVICE_HEADERS": json.dumps(SERVICE_HEADERS),
|
|
2355
|
+
"METAFLOW_USER": "argo-workflows",
|
|
2356
|
+
"METAFLOW_DEFAULT_DATASTORE": self.flow_datastore.TYPE,
|
|
2357
|
+
"METAFLOW_DEFAULT_METADATA": DEFAULT_METADATA,
|
|
2358
|
+
"METAFLOW_OWNER": self.username,
|
|
2359
|
+
}
|
|
2360
|
+
# pass on the Run pathspec for script
|
|
2361
|
+
env["RUN_PATHSPEC"] = f"{self.graph.name}/{run_id_template}"
|
|
2362
|
+
|
|
2363
|
+
# support Metaflow sandboxes
|
|
2364
|
+
env["METAFLOW_INIT_SCRIPT"] = KUBERNETES_SANDBOX_INIT_SCRIPT
|
|
2365
|
+
|
|
2366
|
+
# support fetching secrets
|
|
2367
|
+
env.update(additional_obp_configs)
|
|
2368
|
+
|
|
2369
|
+
env["METAFLOW_WORKFLOW_NAME"] = "{{workflow.name}}"
|
|
2370
|
+
env["METAFLOW_WORKFLOW_NAMESPACE"] = "{{workflow.namespace}}"
|
|
2371
|
+
env = {
|
|
2372
|
+
k: v
|
|
2373
|
+
for k, v in env.items()
|
|
2374
|
+
if v is not None
|
|
2375
|
+
and k not in set(ARGO_WORKFLOWS_ENV_VARS_TO_SKIP.split(","))
|
|
2376
|
+
}
|
|
2377
|
+
|
|
2378
|
+
def _cmd(fn_name):
|
|
2379
|
+
mflog_expr = export_mflog_env_vars(
|
|
2380
|
+
datastore_type=self.flow_datastore.TYPE,
|
|
2381
|
+
stdout_path="$PWD/.logs/mflog_stdout",
|
|
2382
|
+
stderr_path="$PWD/.logs/mflog_stderr",
|
|
2383
|
+
flow_name=self.flow.name,
|
|
2384
|
+
run_id=run_id_template,
|
|
2385
|
+
step_name=f"_hook_{fn_name}",
|
|
2386
|
+
task_id="1",
|
|
2387
|
+
retry_count="0",
|
|
2388
|
+
)
|
|
2389
|
+
cmds = " && ".join(
|
|
2390
|
+
[
|
|
2391
|
+
# For supporting sandboxes, ensure that a custom script is executed
|
|
2392
|
+
# before anything else is executed. The script is passed in as an
|
|
2393
|
+
# env var.
|
|
2394
|
+
'${METAFLOW_INIT_SCRIPT:+eval \\"${METAFLOW_INIT_SCRIPT}\\"}',
|
|
2395
|
+
"mkdir -p $PWD/.logs",
|
|
2396
|
+
mflog_expr,
|
|
2397
|
+
]
|
|
2398
|
+
+ self.environment.get_package_commands(
|
|
2399
|
+
self.code_package_url, self.flow_datastore.TYPE
|
|
2400
|
+
)[:-1]
|
|
2401
|
+
# Replace the line 'Task in starting'
|
|
2402
|
+
+ [f"mflog 'Lifecycle hook {fn_name} is starting.'"]
|
|
2403
|
+
+ [
|
|
2404
|
+
f"python -m metaflow.plugins.exit_hook.exit_hook_script {metaflow_version['script']} {fn_name} $RUN_PATHSPEC"
|
|
2405
|
+
]
|
|
2406
|
+
)
|
|
2407
|
+
|
|
2408
|
+
cmds = shlex.split('bash -c "%s"' % cmds)
|
|
2409
|
+
return cmds
|
|
2410
|
+
|
|
2411
|
+
def _container(cmds):
|
|
2412
|
+
return to_camelcase(
|
|
2413
|
+
kubernetes_sdk.V1Container(
|
|
2414
|
+
name="main",
|
|
2415
|
+
command=cmds,
|
|
2416
|
+
image=deco.attributes["options"].get("image", None)
|
|
2417
|
+
or resources["image"],
|
|
2418
|
+
env=[
|
|
2419
|
+
kubernetes_sdk.V1EnvVar(name=k, value=str(v))
|
|
2420
|
+
for k, v in env.items()
|
|
2421
|
+
],
|
|
2422
|
+
env_from=[
|
|
2423
|
+
kubernetes_sdk.V1EnvFromSource(
|
|
2424
|
+
secret_ref=kubernetes_sdk.V1SecretEnvSource(
|
|
2425
|
+
name=str(k),
|
|
2426
|
+
# optional=True
|
|
2427
|
+
)
|
|
2428
|
+
)
|
|
2429
|
+
for k in list(
|
|
2430
|
+
[]
|
|
2431
|
+
if not resources.get("secrets")
|
|
2432
|
+
else (
|
|
2433
|
+
[resources.get("secrets")]
|
|
2434
|
+
if isinstance(resources.get("secrets"), str)
|
|
2435
|
+
else resources.get("secrets")
|
|
2436
|
+
)
|
|
2437
|
+
)
|
|
2438
|
+
+ KUBERNETES_SECRETS.split(",")
|
|
2439
|
+
+ ARGO_WORKFLOWS_KUBERNETES_SECRETS.split(",")
|
|
2440
|
+
if k
|
|
2441
|
+
],
|
|
2442
|
+
resources=kubernetes_sdk.V1ResourceRequirements(
|
|
2443
|
+
requests={
|
|
2444
|
+
"cpu": str(kube_defaults["cpu"]),
|
|
2445
|
+
"memory": "%sM" % str(kube_defaults["memory"]),
|
|
2446
|
+
}
|
|
2447
|
+
),
|
|
2448
|
+
).to_dict()
|
|
2449
|
+
)
|
|
2450
|
+
|
|
2451
|
+
# create lifecycle hooks from deco
|
|
2452
|
+
hooks = []
|
|
2453
|
+
for success_fn_name in deco.success_hooks:
|
|
2454
|
+
hook = ContainerHook(
|
|
2455
|
+
name=f"success-{success_fn_name.replace('_', '-')}",
|
|
2456
|
+
container=_container(cmds=_cmd(success_fn_name)),
|
|
2457
|
+
service_account_name=resources["service_account"],
|
|
2458
|
+
on_success=True,
|
|
2459
|
+
)
|
|
2460
|
+
hooks.append(hook)
|
|
2461
|
+
|
|
2462
|
+
for error_fn_name in deco.error_hooks:
|
|
2463
|
+
hook = ContainerHook(
|
|
2464
|
+
name=f"error-{error_fn_name.replace('_', '-')}",
|
|
2465
|
+
service_account_name=resources["service_account"],
|
|
2466
|
+
container=_container(cmds=_cmd(error_fn_name)),
|
|
2467
|
+
on_error=True,
|
|
2468
|
+
)
|
|
2469
|
+
hooks.append(hook)
|
|
2470
|
+
|
|
2471
|
+
return hooks
|
|
2472
|
+
|
|
2473
|
+
def _exit_hook_templates(self):
|
|
2474
|
+
templates = []
|
|
2394
2475
|
if self.enable_error_msg_capture:
|
|
2395
2476
|
templates.extend(self._error_msg_capture_hook_templates())
|
|
2477
|
+
|
|
2396
2478
|
return templates
|
|
2397
2479
|
|
|
2398
2480
|
def _error_msg_capture_hook_templates(self):
|
|
@@ -2433,7 +2515,9 @@ class ArgoWorkflows(object):
|
|
|
2433
2515
|
mflog_expr,
|
|
2434
2516
|
]
|
|
2435
2517
|
+ self.environment.get_package_commands(
|
|
2436
|
-
self.code_package_url,
|
|
2518
|
+
self.code_package_url,
|
|
2519
|
+
self.flow_datastore.TYPE,
|
|
2520
|
+
self.code_package_metadata,
|
|
2437
2521
|
)[:-1]
|
|
2438
2522
|
# Replace the line 'Task in starting'
|
|
2439
2523
|
# FIXME: this can be brittle.
|
|
@@ -2453,6 +2537,7 @@ class ArgoWorkflows(object):
|
|
|
2453
2537
|
env = {
|
|
2454
2538
|
# These values are needed by Metaflow to set it's internal
|
|
2455
2539
|
# state appropriately.
|
|
2540
|
+
"METAFLOW_CODE_METADATA": self.code_package_metadata,
|
|
2456
2541
|
"METAFLOW_CODE_URL": self.code_package_url,
|
|
2457
2542
|
"METAFLOW_CODE_SHA": self.code_package_sha,
|
|
2458
2543
|
"METAFLOW_CODE_DS": self.flow_datastore.TYPE,
|
|
@@ -2541,30 +2626,30 @@ class ArgoWorkflows(object):
|
|
|
2541
2626
|
# https://developer.pagerduty.com/docs/ZG9jOjExMDI5NTgx-send-an-alert-event
|
|
2542
2627
|
if self.notify_pager_duty_integration_key is None:
|
|
2543
2628
|
return None
|
|
2544
|
-
return
|
|
2545
|
-
|
|
2546
|
-
|
|
2547
|
-
.
|
|
2548
|
-
|
|
2549
|
-
|
|
2550
|
-
|
|
2551
|
-
|
|
2552
|
-
|
|
2553
|
-
|
|
2554
|
-
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
|
|
2560
|
-
|
|
2561
|
-
|
|
2562
|
-
},
|
|
2629
|
+
return HttpExitHook(
|
|
2630
|
+
name="notify-pager-duty-on-error",
|
|
2631
|
+
method="POST",
|
|
2632
|
+
url="https://events.pagerduty.com/v2/enqueue",
|
|
2633
|
+
headers={"Content-Type": "application/json"},
|
|
2634
|
+
body=json.dumps(
|
|
2635
|
+
{
|
|
2636
|
+
"event_action": "trigger",
|
|
2637
|
+
"routing_key": self.notify_pager_duty_integration_key,
|
|
2638
|
+
# "dedup_key": self.flow.name, # TODO: Do we need deduplication?
|
|
2639
|
+
"payload": {
|
|
2640
|
+
"source": "{{workflow.name}}",
|
|
2641
|
+
"severity": "info",
|
|
2642
|
+
"summary": "Metaflow run %s/argo-{{workflow.name}} failed!"
|
|
2643
|
+
% self.flow.name,
|
|
2644
|
+
"custom_details": {
|
|
2645
|
+
"Flow": self.flow.name,
|
|
2646
|
+
"Run ID": "argo-{{workflow.name}}",
|
|
2563
2647
|
},
|
|
2564
|
-
|
|
2565
|
-
|
|
2566
|
-
|
|
2567
|
-
)
|
|
2648
|
+
},
|
|
2649
|
+
"links": self._pager_duty_notification_links(),
|
|
2650
|
+
}
|
|
2651
|
+
),
|
|
2652
|
+
on_error=True,
|
|
2568
2653
|
)
|
|
2569
2654
|
|
|
2570
2655
|
def _incident_io_alert_template(self):
|
|
@@ -2575,50 +2660,52 @@ class ArgoWorkflows(object):
|
|
|
2575
2660
|
"Creating alerts for errors requires a alert source config ID."
|
|
2576
2661
|
)
|
|
2577
2662
|
ui_links = self._incident_io_ui_urls_for_run()
|
|
2578
|
-
return
|
|
2579
|
-
|
|
2580
|
-
|
|
2663
|
+
return HttpExitHook(
|
|
2664
|
+
name="notify-incident-io-on-error",
|
|
2665
|
+
method="POST",
|
|
2666
|
+
url=(
|
|
2581
2667
|
"https://api.incident.io/v2/alert_events/http/%s"
|
|
2582
2668
|
% self.incident_io_alert_source_config_id
|
|
2583
|
-
)
|
|
2584
|
-
|
|
2585
|
-
|
|
2586
|
-
|
|
2587
|
-
|
|
2588
|
-
|
|
2589
|
-
|
|
2590
|
-
|
|
2591
|
-
|
|
2592
|
-
|
|
2593
|
-
|
|
2594
|
-
|
|
2595
|
-
|
|
2596
|
-
|
|
2597
|
-
|
|
2598
|
-
|
|
2599
|
-
|
|
2600
|
-
),
|
|
2601
|
-
"source_url": (
|
|
2602
|
-
"%s/%s/%s"
|
|
2603
|
-
% (
|
|
2604
|
-
UI_URL.rstrip("/"),
|
|
2605
|
-
self.flow.name,
|
|
2606
|
-
"argo-{{workflow.name}}",
|
|
2607
|
-
)
|
|
2608
|
-
if UI_URL
|
|
2609
|
-
else None
|
|
2669
|
+
),
|
|
2670
|
+
headers={
|
|
2671
|
+
"Content-Type": "application/json",
|
|
2672
|
+
"Authorization": "Bearer %s" % self.notify_incident_io_api_key,
|
|
2673
|
+
},
|
|
2674
|
+
body=json.dumps(
|
|
2675
|
+
{
|
|
2676
|
+
"idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
|
|
2677
|
+
"status": "firing",
|
|
2678
|
+
"title": "Flow %s has failed." % self.flow.name,
|
|
2679
|
+
"description": "Metaflow run {run_pathspec} failed!{urls}".format(
|
|
2680
|
+
run_pathspec="%s/argo-{{workflow.name}}" % self.flow.name,
|
|
2681
|
+
urls=(
|
|
2682
|
+
"\n\nSee details for the run at:\n\n"
|
|
2683
|
+
+ "\n\n".join(ui_links)
|
|
2684
|
+
if ui_links
|
|
2685
|
+
else ""
|
|
2610
2686
|
),
|
|
2611
|
-
|
|
2612
|
-
|
|
2613
|
-
|
|
2614
|
-
|
|
2615
|
-
|
|
2616
|
-
|
|
2617
|
-
},
|
|
2687
|
+
),
|
|
2688
|
+
"source_url": (
|
|
2689
|
+
"%s/%s/%s"
|
|
2690
|
+
% (
|
|
2691
|
+
UI_URL.rstrip("/"),
|
|
2692
|
+
self.flow.name,
|
|
2693
|
+
"argo-{{workflow.name}}",
|
|
2694
|
+
)
|
|
2695
|
+
if UI_URL
|
|
2696
|
+
else None
|
|
2697
|
+
),
|
|
2698
|
+
"metadata": {
|
|
2699
|
+
**(self.incident_io_metadata or {}),
|
|
2700
|
+
**{
|
|
2701
|
+
"run_status": "failed",
|
|
2702
|
+
"flow_name": self.flow.name,
|
|
2703
|
+
"run_id": "argo-{{workflow.name}}",
|
|
2618
2704
|
},
|
|
2619
|
-
}
|
|
2620
|
-
|
|
2621
|
-
)
|
|
2705
|
+
},
|
|
2706
|
+
}
|
|
2707
|
+
),
|
|
2708
|
+
on_error=True,
|
|
2622
2709
|
)
|
|
2623
2710
|
|
|
2624
2711
|
def _incident_io_change_template(self):
|
|
@@ -2629,50 +2716,52 @@ class ArgoWorkflows(object):
|
|
|
2629
2716
|
"Creating alerts for successes requires an alert source config ID."
|
|
2630
2717
|
)
|
|
2631
2718
|
ui_links = self._incident_io_ui_urls_for_run()
|
|
2632
|
-
return
|
|
2633
|
-
|
|
2634
|
-
|
|
2719
|
+
return HttpExitHook(
|
|
2720
|
+
name="notify-incident-io-on-success",
|
|
2721
|
+
method="POST",
|
|
2722
|
+
url=(
|
|
2635
2723
|
"https://api.incident.io/v2/alert_events/http/%s"
|
|
2636
2724
|
% self.incident_io_alert_source_config_id
|
|
2637
|
-
)
|
|
2638
|
-
|
|
2639
|
-
|
|
2640
|
-
|
|
2641
|
-
|
|
2642
|
-
|
|
2643
|
-
|
|
2644
|
-
|
|
2645
|
-
|
|
2646
|
-
|
|
2647
|
-
|
|
2648
|
-
|
|
2649
|
-
|
|
2650
|
-
|
|
2651
|
-
|
|
2652
|
-
|
|
2653
|
-
|
|
2654
|
-
),
|
|
2655
|
-
"source_url": (
|
|
2656
|
-
"%s/%s/%s"
|
|
2657
|
-
% (
|
|
2658
|
-
UI_URL.rstrip("/"),
|
|
2659
|
-
self.flow.name,
|
|
2660
|
-
"argo-{{workflow.name}}",
|
|
2661
|
-
)
|
|
2662
|
-
if UI_URL
|
|
2663
|
-
else None
|
|
2725
|
+
),
|
|
2726
|
+
headers={
|
|
2727
|
+
"Content-Type": "application/json",
|
|
2728
|
+
"Authorization": "Bearer %s" % self.notify_incident_io_api_key,
|
|
2729
|
+
},
|
|
2730
|
+
body=json.dumps(
|
|
2731
|
+
{
|
|
2732
|
+
"idempotency_key": "argo-{{workflow.name}}", # use run id to deduplicate alerts.
|
|
2733
|
+
"status": "firing",
|
|
2734
|
+
"title": "Flow %s has succeeded." % self.flow.name,
|
|
2735
|
+
"description": "Metaflow run {run_pathspec} succeeded!{urls}".format(
|
|
2736
|
+
run_pathspec="%s/argo-{{workflow.name}}" % self.flow.name,
|
|
2737
|
+
urls=(
|
|
2738
|
+
"\n\nSee details for the run at:\n\n"
|
|
2739
|
+
+ "\n\n".join(ui_links)
|
|
2740
|
+
if ui_links
|
|
2741
|
+
else ""
|
|
2664
2742
|
),
|
|
2665
|
-
|
|
2666
|
-
|
|
2667
|
-
|
|
2668
|
-
|
|
2669
|
-
|
|
2670
|
-
|
|
2671
|
-
},
|
|
2743
|
+
),
|
|
2744
|
+
"source_url": (
|
|
2745
|
+
"%s/%s/%s"
|
|
2746
|
+
% (
|
|
2747
|
+
UI_URL.rstrip("/"),
|
|
2748
|
+
self.flow.name,
|
|
2749
|
+
"argo-{{workflow.name}}",
|
|
2750
|
+
)
|
|
2751
|
+
if UI_URL
|
|
2752
|
+
else None
|
|
2753
|
+
),
|
|
2754
|
+
"metadata": {
|
|
2755
|
+
**(self.incident_io_metadata or {}),
|
|
2756
|
+
**{
|
|
2757
|
+
"run_status": "succeeded",
|
|
2758
|
+
"flow_name": self.flow.name,
|
|
2759
|
+
"run_id": "argo-{{workflow.name}}",
|
|
2672
2760
|
},
|
|
2673
|
-
}
|
|
2674
|
-
|
|
2675
|
-
)
|
|
2761
|
+
},
|
|
2762
|
+
}
|
|
2763
|
+
),
|
|
2764
|
+
on_success=True,
|
|
2676
2765
|
)
|
|
2677
2766
|
|
|
2678
2767
|
def _incident_io_ui_urls_for_run(self):
|
|
@@ -2697,27 +2786,27 @@ class ArgoWorkflows(object):
|
|
|
2697
2786
|
# https://developer.pagerduty.com/docs/ZG9jOjExMDI5NTgy-send-a-change-event
|
|
2698
2787
|
if self.notify_pager_duty_integration_key is None:
|
|
2699
2788
|
return None
|
|
2700
|
-
return
|
|
2701
|
-
|
|
2702
|
-
|
|
2703
|
-
.
|
|
2704
|
-
|
|
2705
|
-
|
|
2706
|
-
|
|
2707
|
-
|
|
2708
|
-
|
|
2709
|
-
|
|
2710
|
-
|
|
2711
|
-
|
|
2712
|
-
|
|
2713
|
-
|
|
2714
|
-
|
|
2715
|
-
},
|
|
2789
|
+
return HttpExitHook(
|
|
2790
|
+
name="notify-pager-duty-on-success",
|
|
2791
|
+
method="POST",
|
|
2792
|
+
url="https://events.pagerduty.com/v2/change/enqueue",
|
|
2793
|
+
headers={"Content-Type": "application/json"},
|
|
2794
|
+
body=json.dumps(
|
|
2795
|
+
{
|
|
2796
|
+
"routing_key": self.notify_pager_duty_integration_key,
|
|
2797
|
+
"payload": {
|
|
2798
|
+
"summary": "Metaflow run %s/argo-{{workflow.name}} Succeeded"
|
|
2799
|
+
% self.flow.name,
|
|
2800
|
+
"source": "{{workflow.name}}",
|
|
2801
|
+
"custom_details": {
|
|
2802
|
+
"Flow": self.flow.name,
|
|
2803
|
+
"Run ID": "argo-{{workflow.name}}",
|
|
2716
2804
|
},
|
|
2717
|
-
|
|
2718
|
-
|
|
2719
|
-
|
|
2720
|
-
)
|
|
2805
|
+
},
|
|
2806
|
+
"links": self._pager_duty_notification_links(),
|
|
2807
|
+
}
|
|
2808
|
+
),
|
|
2809
|
+
on_success=True,
|
|
2721
2810
|
)
|
|
2722
2811
|
|
|
2723
2812
|
def _pager_duty_notification_links(self):
|
|
@@ -2839,8 +2928,12 @@ class ArgoWorkflows(object):
|
|
|
2839
2928
|
blocks = self._get_slack_blocks(message)
|
|
2840
2929
|
payload = {"text": message, "blocks": blocks}
|
|
2841
2930
|
|
|
2842
|
-
return
|
|
2843
|
-
|
|
2931
|
+
return HttpExitHook(
|
|
2932
|
+
name="notify-slack-on-error",
|
|
2933
|
+
method="POST",
|
|
2934
|
+
url=self.notify_slack_webhook_url,
|
|
2935
|
+
body=json.dumps(payload),
|
|
2936
|
+
on_error=True,
|
|
2844
2937
|
)
|
|
2845
2938
|
|
|
2846
2939
|
def _slack_success_template(self):
|
|
@@ -2855,8 +2948,12 @@ class ArgoWorkflows(object):
|
|
|
2855
2948
|
blocks = self._get_slack_blocks(message)
|
|
2856
2949
|
payload = {"text": message, "blocks": blocks}
|
|
2857
2950
|
|
|
2858
|
-
return
|
|
2859
|
-
|
|
2951
|
+
return HttpExitHook(
|
|
2952
|
+
name="notify-slack-on-success",
|
|
2953
|
+
method="POST",
|
|
2954
|
+
url=self.notify_slack_webhook_url,
|
|
2955
|
+
body=json.dumps(payload),
|
|
2956
|
+
on_success=True,
|
|
2860
2957
|
)
|
|
2861
2958
|
|
|
2862
2959
|
def _heartbeat_daemon_template(self):
|
|
@@ -2915,7 +3012,8 @@ class ArgoWorkflows(object):
|
|
|
2915
3012
|
mflog_expr,
|
|
2916
3013
|
]
|
|
2917
3014
|
+ self.environment.get_package_commands(
|
|
2918
|
-
self.code_package_url,
|
|
3015
|
+
self.code_package_url,
|
|
3016
|
+
self.flow_datastore.TYPE,
|
|
2919
3017
|
)[:-1]
|
|
2920
3018
|
# Replace the line 'Task in starting'
|
|
2921
3019
|
# FIXME: this can be brittle.
|
|
@@ -2930,6 +3028,7 @@ class ArgoWorkflows(object):
|
|
|
2930
3028
|
env = {
|
|
2931
3029
|
# These values are needed by Metaflow to set it's internal
|
|
2932
3030
|
# state appropriately.
|
|
3031
|
+
"METAFLOW_CODE_METADATA": self.code_package_metadata,
|
|
2933
3032
|
"METAFLOW_CODE_URL": self.code_package_url,
|
|
2934
3033
|
"METAFLOW_CODE_SHA": self.code_package_sha,
|
|
2935
3034
|
"METAFLOW_CODE_DS": self.flow_datastore.TYPE,
|
|
@@ -4227,57 +4326,3 @@ class TriggerParameter(object):
|
|
|
4227
4326
|
|
|
4228
4327
|
def __str__(self):
|
|
4229
4328
|
return json.dumps(self.payload, indent=4)
|
|
4230
|
-
|
|
4231
|
-
|
|
4232
|
-
class Http(object):
|
|
4233
|
-
# https://argoproj.github.io/argo-workflows/fields/#http
|
|
4234
|
-
|
|
4235
|
-
def __init__(self, method):
|
|
4236
|
-
tree = lambda: defaultdict(tree)
|
|
4237
|
-
self.payload = tree()
|
|
4238
|
-
self.payload["method"] = method
|
|
4239
|
-
self.payload["headers"] = []
|
|
4240
|
-
|
|
4241
|
-
def header(self, header, value):
|
|
4242
|
-
self.payload["headers"].append({"name": header, "value": value})
|
|
4243
|
-
return self
|
|
4244
|
-
|
|
4245
|
-
def body(self, body):
|
|
4246
|
-
self.payload["body"] = str(body)
|
|
4247
|
-
return self
|
|
4248
|
-
|
|
4249
|
-
def url(self, url):
|
|
4250
|
-
self.payload["url"] = url
|
|
4251
|
-
return self
|
|
4252
|
-
|
|
4253
|
-
def success_condition(self, success_condition):
|
|
4254
|
-
self.payload["successCondition"] = success_condition
|
|
4255
|
-
return self
|
|
4256
|
-
|
|
4257
|
-
def to_json(self):
|
|
4258
|
-
return self.payload
|
|
4259
|
-
|
|
4260
|
-
def __str__(self):
|
|
4261
|
-
return json.dumps(self.payload, indent=4)
|
|
4262
|
-
|
|
4263
|
-
|
|
4264
|
-
class LifecycleHook(object):
|
|
4265
|
-
# https://argoproj.github.io/argo-workflows/fields/#lifecyclehook
|
|
4266
|
-
|
|
4267
|
-
def __init__(self):
|
|
4268
|
-
tree = lambda: defaultdict(tree)
|
|
4269
|
-
self.payload = tree()
|
|
4270
|
-
|
|
4271
|
-
def expression(self, expression):
|
|
4272
|
-
self.payload["expression"] = str(expression)
|
|
4273
|
-
return self
|
|
4274
|
-
|
|
4275
|
-
def template(self, template):
|
|
4276
|
-
self.payload["template"] = template
|
|
4277
|
-
return self
|
|
4278
|
-
|
|
4279
|
-
def to_json(self):
|
|
4280
|
-
return self.payload
|
|
4281
|
-
|
|
4282
|
-
def __str__(self):
|
|
4283
|
-
return json.dumps(self.payload, indent=4)
|