metaflow 2.18.1__py2.py3-none-any.whl → 2.18.3__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/client/core.py +1 -1
- metaflow/metadata_provider/metadata.py +6 -1
- metaflow/metaflow_config.py +5 -0
- metaflow/plugins/argo/argo_client.py +10 -6
- metaflow/plugins/argo/argo_workflows.py +219 -42
- metaflow/plugins/argo/conditional_input_paths.py +17 -3
- metaflow/plugins/aws/batch/batch.py +42 -2
- metaflow/plugins/aws/step_functions/step_functions.py +6 -1
- metaflow/plugins/aws/step_functions/step_functions_cli.py +11 -0
- metaflow/plugins/aws/step_functions/step_functions_deployer.py +3 -0
- metaflow/version.py +1 -1
- {metaflow-2.18.1.dist-info → metaflow-2.18.3.dist-info}/METADATA +2 -2
- {metaflow-2.18.1.dist-info → metaflow-2.18.3.dist-info}/RECORD +20 -20
- {metaflow-2.18.1.data → metaflow-2.18.3.data}/data/share/metaflow/devtools/Makefile +0 -0
- {metaflow-2.18.1.data → metaflow-2.18.3.data}/data/share/metaflow/devtools/Tiltfile +0 -0
- {metaflow-2.18.1.data → metaflow-2.18.3.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
- {metaflow-2.18.1.dist-info → metaflow-2.18.3.dist-info}/WHEEL +0 -0
- {metaflow-2.18.1.dist-info → metaflow-2.18.3.dist-info}/entry_points.txt +0 -0
- {metaflow-2.18.1.dist-info → metaflow-2.18.3.dist-info}/licenses/LICENSE +0 -0
- {metaflow-2.18.1.dist-info → metaflow-2.18.3.dist-info}/top_level.txt +0 -0
metaflow/client/core.py
CHANGED
@@ -303,7 +303,7 @@ class MetaflowObject(object):
|
|
303
303
|
# distinguish between "attempt will happen" and "no such
|
304
304
|
# attempt exists".
|
305
305
|
|
306
|
-
if pathspec:
|
306
|
+
if pathspec and _object is None:
|
307
307
|
ids = pathspec.split("/")
|
308
308
|
|
309
309
|
if self._NAME == "flow" and len(ids) != 1:
|
@@ -632,7 +632,12 @@ class MetadataProvider(object):
|
|
632
632
|
|
633
633
|
def _get_git_info_as_dict(self):
|
634
634
|
git_info = {}
|
635
|
-
|
635
|
+
# NOTE: For flows executing remotely, we want to read from the INFO file of the code package that contains
|
636
|
+
# information on the original environment that deployed the flow.
|
637
|
+
# Otherwise git related info will be missing, as the repository is not part of the codepackage.
|
638
|
+
from metaflow.packaging_sys import MetaflowCodeContent
|
639
|
+
|
640
|
+
env = MetaflowCodeContent.get_info() or self._environment.get_environment_info()
|
636
641
|
for key in [
|
637
642
|
"repo_url",
|
638
643
|
"branch_name",
|
metaflow/metaflow_config.py
CHANGED
@@ -344,6 +344,8 @@ SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH = from_conf(
|
|
344
344
|
else None
|
345
345
|
),
|
346
346
|
)
|
347
|
+
# Toggle for step command being part of the Step Function payload, or if it should be offloaded to S3
|
348
|
+
SFN_COMPRESS_STATE_MACHINE = from_conf("SFN_COMPRESS_STATE_MACHINE", False)
|
347
349
|
###
|
348
350
|
# Kubernetes configuration
|
349
351
|
###
|
@@ -409,6 +411,9 @@ ARGO_EVENTS_INTERNAL_WEBHOOK_URL = from_conf(
|
|
409
411
|
"ARGO_EVENTS_INTERNAL_WEBHOOK_URL", ARGO_EVENTS_WEBHOOK_URL
|
410
412
|
)
|
411
413
|
ARGO_EVENTS_WEBHOOK_AUTH = from_conf("ARGO_EVENTS_WEBHOOK_AUTH", "none")
|
414
|
+
ARGO_EVENTS_SENSOR_NAMESPACE = from_conf(
|
415
|
+
"ARGO_EVENTS_SENSOR_NAMESPACE", KUBERNETES_NAMESPACE
|
416
|
+
)
|
412
417
|
|
413
418
|
ARGO_WORKFLOWS_UI_URL = from_conf("ARGO_WORKFLOWS_UI_URL")
|
414
419
|
|
@@ -1,5 +1,6 @@
|
|
1
1
|
import json
|
2
2
|
|
3
|
+
from metaflow.metaflow_config import ARGO_EVENTS_SENSOR_NAMESPACE
|
3
4
|
from metaflow.exception import MetaflowException
|
4
5
|
from metaflow.plugins.kubernetes.kubernetes_client import KubernetesClient
|
5
6
|
|
@@ -377,12 +378,15 @@ class ArgoClient(object):
|
|
377
378
|
json.loads(e.body)["message"] if e.body is not None else e.reason
|
378
379
|
)
|
379
380
|
|
380
|
-
def register_sensor(
|
381
|
+
def register_sensor(
|
382
|
+
self, name, sensor=None, sensor_namespace=ARGO_EVENTS_SENSOR_NAMESPACE
|
383
|
+
):
|
381
384
|
if sensor is None:
|
382
385
|
sensor = {}
|
383
386
|
# Unfortunately, Kubernetes client does not handle optimistic
|
384
387
|
# concurrency control by itself unlike kubectl
|
385
388
|
client = self._client.get()
|
389
|
+
|
386
390
|
if not sensor:
|
387
391
|
sensor["metadata"] = {}
|
388
392
|
|
@@ -392,7 +396,7 @@ class ArgoClient(object):
|
|
392
396
|
] = client.CustomObjectsApi().get_namespaced_custom_object(
|
393
397
|
group=self._group,
|
394
398
|
version=self._version,
|
395
|
-
namespace=
|
399
|
+
namespace=sensor_namespace,
|
396
400
|
plural="sensors",
|
397
401
|
name=name,
|
398
402
|
)[
|
@@ -407,7 +411,7 @@ class ArgoClient(object):
|
|
407
411
|
return client.CustomObjectsApi().create_namespaced_custom_object(
|
408
412
|
group=self._group,
|
409
413
|
version=self._version,
|
410
|
-
namespace=
|
414
|
+
namespace=sensor_namespace,
|
411
415
|
plural="sensors",
|
412
416
|
body=sensor,
|
413
417
|
)
|
@@ -425,7 +429,7 @@ class ArgoClient(object):
|
|
425
429
|
return client.CustomObjectsApi().replace_namespaced_custom_object(
|
426
430
|
group=self._group,
|
427
431
|
version=self._version,
|
428
|
-
namespace=
|
432
|
+
namespace=sensor_namespace,
|
429
433
|
plural="sensors",
|
430
434
|
body=sensor,
|
431
435
|
name=name,
|
@@ -435,7 +439,7 @@ class ArgoClient(object):
|
|
435
439
|
json.loads(e.body)["message"] if e.body is not None else e.reason
|
436
440
|
)
|
437
441
|
|
438
|
-
def delete_sensor(self, name):
|
442
|
+
def delete_sensor(self, name, sensor_namespace):
|
439
443
|
"""
|
440
444
|
Issues an API call for deleting a sensor
|
441
445
|
|
@@ -447,7 +451,7 @@ class ArgoClient(object):
|
|
447
451
|
return client.CustomObjectsApi().delete_namespaced_custom_object(
|
448
452
|
group=self._group,
|
449
453
|
version=self._version,
|
450
|
-
namespace=
|
454
|
+
namespace=sensor_namespace,
|
451
455
|
plural="sensors",
|
452
456
|
name=name,
|
453
457
|
)
|
@@ -19,6 +19,7 @@ from metaflow.metaflow_config import (
|
|
19
19
|
ARGO_EVENTS_EVENT_BUS,
|
20
20
|
ARGO_EVENTS_EVENT_SOURCE,
|
21
21
|
ARGO_EVENTS_INTERNAL_WEBHOOK_URL,
|
22
|
+
ARGO_EVENTS_SENSOR_NAMESPACE,
|
22
23
|
ARGO_EVENTS_SERVICE_ACCOUNT,
|
23
24
|
ARGO_EVENTS_WEBHOOK_AUTH,
|
24
25
|
ARGO_WORKFLOWS_CAPTURE_ERROR_SCRIPT,
|
@@ -73,6 +74,10 @@ class ArgoWorkflowsException(MetaflowException):
|
|
73
74
|
headline = "Argo Workflows error"
|
74
75
|
|
75
76
|
|
77
|
+
class ArgoWorkflowsSensorCleanupException(MetaflowException):
|
78
|
+
headline = "Argo Workflows sensor clean up error"
|
79
|
+
|
80
|
+
|
76
81
|
class ArgoWorkflowsSchedulingException(MetaflowException):
|
77
82
|
headline = "Argo Workflows scheduling error"
|
78
83
|
|
@@ -186,6 +191,7 @@ class ArgoWorkflows(object):
|
|
186
191
|
return str(self._workflow_template)
|
187
192
|
|
188
193
|
def deploy(self):
|
194
|
+
self.cleanup_previous_sensors()
|
189
195
|
try:
|
190
196
|
# Register workflow template.
|
191
197
|
ArgoClient(namespace=KUBERNETES_NAMESPACE).register_workflow_template(
|
@@ -194,6 +200,37 @@ class ArgoWorkflows(object):
|
|
194
200
|
except Exception as e:
|
195
201
|
raise ArgoWorkflowsException(str(e))
|
196
202
|
|
203
|
+
def cleanup_previous_sensors(self):
|
204
|
+
try:
|
205
|
+
client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
|
206
|
+
# Check for existing deployment and do cleanup
|
207
|
+
old_template = client.get_workflow_template(self.name)
|
208
|
+
if not old_template:
|
209
|
+
return None
|
210
|
+
# Clean up old sensors
|
211
|
+
old_sensor_namespace = old_template["metadata"]["annotations"].get(
|
212
|
+
"metaflow/sensor_namespace"
|
213
|
+
)
|
214
|
+
|
215
|
+
if old_sensor_namespace is None:
|
216
|
+
# This workflow was created before sensor annotations
|
217
|
+
# and may have a sensor in the default namespace
|
218
|
+
# we will delete it and it'll get recreated if need be
|
219
|
+
old_sensor_name = ArgoWorkflows._sensor_name(self.name)
|
220
|
+
client.delete_sensor(old_sensor_name, client._namespace)
|
221
|
+
else:
|
222
|
+
# delete old sensor only if it was somewhere else, otherwise it'll get replaced
|
223
|
+
old_sensor_name = old_template["metadata"]["annotations"][
|
224
|
+
"metaflow/sensor_name"
|
225
|
+
]
|
226
|
+
if (
|
227
|
+
not self._sensor
|
228
|
+
or old_sensor_namespace != ARGO_EVENTS_SENSOR_NAMESPACE
|
229
|
+
):
|
230
|
+
client.delete_sensor(old_sensor_name, old_sensor_namespace)
|
231
|
+
except Exception as e:
|
232
|
+
raise ArgoWorkflowsSensorCleanupException(str(e))
|
233
|
+
|
197
234
|
@staticmethod
|
198
235
|
def _sanitize(name):
|
199
236
|
# Metaflow allows underscores in node names, which are disallowed in Argo
|
@@ -221,6 +258,17 @@ class ArgoWorkflows(object):
|
|
221
258
|
def delete(name):
|
222
259
|
client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
|
223
260
|
|
261
|
+
workflow_template = client.get_workflow_template(name)
|
262
|
+
sensor_name = ArgoWorkflows._sensor_name(
|
263
|
+
workflow_template["metadata"]["annotations"].get(
|
264
|
+
"metaflow/sensor_name", name
|
265
|
+
)
|
266
|
+
)
|
267
|
+
# if below is missing then it was deployed before custom sensor namespaces
|
268
|
+
sensor_namespace = workflow_template["metadata"]["annotations"].get(
|
269
|
+
"metaflow/sensor_namespace", KUBERNETES_NAMESPACE
|
270
|
+
)
|
271
|
+
|
224
272
|
# Always try to delete the schedule. Failure in deleting the schedule should not
|
225
273
|
# be treated as an error, due to any of the following reasons
|
226
274
|
# - there might not have been a schedule, or it was deleted by some other means
|
@@ -230,7 +278,7 @@ class ArgoWorkflows(object):
|
|
230
278
|
|
231
279
|
# The workflow might have sensors attached to it, which consume actual resources.
|
232
280
|
# Try to delete these as well.
|
233
|
-
sensor_deleted = client.delete_sensor(
|
281
|
+
sensor_deleted = client.delete_sensor(sensor_name, sensor_namespace)
|
234
282
|
|
235
283
|
# After cleaning up related resources, delete the workflow in question.
|
236
284
|
# Failure in deleting is treated as critical and will be made visible to the user
|
@@ -399,11 +447,10 @@ class ArgoWorkflows(object):
|
|
399
447
|
# Metaflow will overwrite any existing sensor.
|
400
448
|
sensor_name = ArgoWorkflows._sensor_name(self.name)
|
401
449
|
if self._sensor:
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
argo_client.delete_sensor(sensor_name)
|
450
|
+
# The new sensor will go into the sensor namespace specified
|
451
|
+
ArgoClient(namespace=ARGO_EVENTS_SENSOR_NAMESPACE).register_sensor(
|
452
|
+
sensor_name, self._sensor.to_json(), ARGO_EVENTS_SENSOR_NAMESPACE
|
453
|
+
)
|
407
454
|
except Exception as e:
|
408
455
|
raise ArgoWorkflowsSchedulingException(str(e))
|
409
456
|
|
@@ -730,6 +777,7 @@ class ArgoWorkflows(object):
|
|
730
777
|
# references to them within the DAGTask.
|
731
778
|
|
732
779
|
annotations = {}
|
780
|
+
|
733
781
|
if self._schedule is not None:
|
734
782
|
# timezone is an optional field and json dumps on None will result in null
|
735
783
|
# hence configuring it to an empty string
|
@@ -752,7 +800,9 @@ class ArgoWorkflows(object):
|
|
752
800
|
{key: trigger.get(key) for key in ["name", "type"]}
|
753
801
|
for trigger in self.triggers
|
754
802
|
]
|
755
|
-
)
|
803
|
+
),
|
804
|
+
"metaflow/sensor_name": ArgoWorkflows._sensor_name(self.name),
|
805
|
+
"metaflow/sensor_namespace": ARGO_EVENTS_SENSOR_NAMESPACE,
|
756
806
|
}
|
757
807
|
)
|
758
808
|
if self.notify_on_error:
|
@@ -931,7 +981,7 @@ class ArgoWorkflows(object):
|
|
931
981
|
node_conditional_parents = {}
|
932
982
|
node_conditional_branches = {}
|
933
983
|
|
934
|
-
def _visit(node,
|
984
|
+
def _visit(node, conditional_branch, conditional_parents=None):
|
935
985
|
if not node.type == "split-switch" and not (
|
936
986
|
conditional_branch and conditional_parents
|
937
987
|
):
|
@@ -940,7 +990,10 @@ class ArgoWorkflows(object):
|
|
940
990
|
|
941
991
|
if node.type == "split-switch":
|
942
992
|
conditional_branch = conditional_branch + [node.name]
|
943
|
-
node_conditional_branches
|
993
|
+
c_br = node_conditional_branches.get(node.name, [])
|
994
|
+
node_conditional_branches[node.name] = c_br + [
|
995
|
+
b for b in conditional_branch if b not in c_br
|
996
|
+
]
|
944
997
|
|
945
998
|
conditional_parents = (
|
946
999
|
[node.name]
|
@@ -958,21 +1011,36 @@ class ArgoWorkflows(object):
|
|
958
1011
|
if conditional_parents and not node.type == "split-switch":
|
959
1012
|
node_conditional_parents[node.name] = conditional_parents
|
960
1013
|
conditional_branch = conditional_branch + [node.name]
|
961
|
-
node_conditional_branches
|
1014
|
+
c_br = node_conditional_branches.get(node.name, [])
|
1015
|
+
node_conditional_branches[node.name] = c_br + [
|
1016
|
+
b for b in conditional_branch if b not in c_br
|
1017
|
+
]
|
962
1018
|
|
963
1019
|
self.conditional_nodes.add(node.name)
|
964
1020
|
|
965
1021
|
if conditional_branch and conditional_parents:
|
966
1022
|
for n in node.out_funcs:
|
967
1023
|
child = self.graph[n]
|
968
|
-
if
|
969
|
-
|
970
|
-
|
971
|
-
)
|
1024
|
+
if child.name == node.name:
|
1025
|
+
continue
|
1026
|
+
_visit(child, conditional_branch, conditional_parents)
|
972
1027
|
|
973
1028
|
# First we visit all nodes to determine conditional parents and branches
|
974
1029
|
for n in self.graph:
|
975
|
-
_visit(n, []
|
1030
|
+
_visit(n, [])
|
1031
|
+
|
1032
|
+
# helper to clean up conditional info for all children of a node, until a new split-switch is encountered.
|
1033
|
+
def _cleanup_conditional_status(node_name, seen):
|
1034
|
+
if self.graph[node_name].type == "split-switch":
|
1035
|
+
# stop recursive cleanup if we hit a new split-switch
|
1036
|
+
return
|
1037
|
+
if node_name in self.conditional_nodes:
|
1038
|
+
self.conditional_nodes.remove(node_name)
|
1039
|
+
node_conditional_parents[node_name] = []
|
1040
|
+
node_conditional_branches[node_name] = []
|
1041
|
+
for p in self.graph[node_name].out_funcs:
|
1042
|
+
if p not in seen:
|
1043
|
+
_cleanup_conditional_status(p, seen + [p])
|
976
1044
|
|
977
1045
|
# Then we traverse again in order to determine conditional join nodes, and matching conditional join info
|
978
1046
|
for node in self.graph:
|
@@ -1005,14 +1073,44 @@ class ArgoWorkflows(object):
|
|
1005
1073
|
last_conditional_split_nodes = self.graph[
|
1006
1074
|
last_split_switch
|
1007
1075
|
].out_funcs
|
1008
|
-
#
|
1009
|
-
if all
|
1010
|
-
|
1011
|
-
|
1012
|
-
|
1076
|
+
# NOTE: How do we define a conditional join step?
|
1077
|
+
# The idea here is that we check if the conditional branches(e.g. chains of conditional steps leading to) of all the in_funcs
|
1078
|
+
# manage to tick off every step name that follows a split-switch
|
1079
|
+
# For example, consider the following structure
|
1080
|
+
# switch_step -> A, B, C
|
1081
|
+
# A -> A2 -> A3 -> A4 -> B2
|
1082
|
+
# B -> B2 -> B3 -> C3
|
1083
|
+
# C -> C2 -> C3 -> end
|
1084
|
+
#
|
1085
|
+
# if we look at the in_funcs for C3, they are (C2, B3)
|
1086
|
+
# B3 closes off branches started by A and B
|
1087
|
+
# C3 closes off branches started by C
|
1088
|
+
# therefore C3 is a conditional join step for the 'switch_step'
|
1089
|
+
# NOTE: Then what about a skip step?
|
1090
|
+
# some switch cases might not introduce any distinct steps of their own, opting to instead skip ahead to a later common step.
|
1091
|
+
# Example:
|
1092
|
+
# switch_step -> A, B, C
|
1093
|
+
# A -> A1 -> B2 -> C
|
1094
|
+
# B -> B1 -> B2 -> C
|
1095
|
+
#
|
1096
|
+
# In this case, C is a skip step as it does not add any conditional branching of its own.
|
1097
|
+
# C is also a conditional join, as it closes all branches started by 'switch_step'
|
1098
|
+
|
1099
|
+
closes_branches = all(
|
1100
|
+
(
|
1101
|
+
# branch_root_node_name needs to be in at least one conditional_branch for it to be closed.
|
1102
|
+
any(
|
1103
|
+
branch_root_node_name
|
1104
|
+
in node_conditional_branches.get(in_func, [])
|
1105
|
+
for in_func in conditional_in_funcs
|
1106
|
+
)
|
1107
|
+
# need to account for a switch case skipping completely, not having a conditional-branch of its own.
|
1108
|
+
if branch_root_node_name != node.name
|
1109
|
+
else True
|
1013
1110
|
)
|
1014
|
-
for
|
1015
|
-
)
|
1111
|
+
for branch_root_node_name in last_conditional_split_nodes
|
1112
|
+
)
|
1113
|
+
if closes_branches:
|
1016
1114
|
closed_conditional_parents.append(last_split_switch)
|
1017
1115
|
|
1018
1116
|
self.conditional_join_nodes.add(node.name)
|
@@ -1026,25 +1124,45 @@ class ArgoWorkflows(object):
|
|
1026
1124
|
for p in node_conditional_parents.get(node.name, [])
|
1027
1125
|
if p not in closed_conditional_parents
|
1028
1126
|
]:
|
1029
|
-
|
1030
|
-
self.conditional_nodes.remove(node.name)
|
1031
|
-
node_conditional_parents[node.name] = []
|
1032
|
-
for p in node.out_funcs:
|
1033
|
-
if p in self.conditional_nodes:
|
1034
|
-
self.conditional_nodes.remove(p)
|
1035
|
-
node_conditional_parents[p] = []
|
1127
|
+
_cleanup_conditional_status(node.name, [])
|
1036
1128
|
|
1037
1129
|
def _is_conditional_node(self, node):
|
1038
1130
|
return node.name in self.conditional_nodes
|
1039
1131
|
|
1132
|
+
def _is_conditional_skip_node(self, node):
|
1133
|
+
return (
|
1134
|
+
self._is_conditional_node(node)
|
1135
|
+
and any(
|
1136
|
+
self.graph[in_func].type == "split-switch" for in_func in node.in_funcs
|
1137
|
+
)
|
1138
|
+
and len(
|
1139
|
+
[
|
1140
|
+
in_func
|
1141
|
+
for in_func in node.in_funcs
|
1142
|
+
if self._is_conditional_node(self.graph[in_func])
|
1143
|
+
or self.graph[in_func].type == "split-switch"
|
1144
|
+
]
|
1145
|
+
)
|
1146
|
+
> 1
|
1147
|
+
)
|
1148
|
+
|
1040
1149
|
def _is_conditional_join_node(self, node):
|
1041
1150
|
return node.name in self.conditional_join_nodes
|
1042
1151
|
|
1152
|
+
def _many_in_funcs_all_conditional(self, node):
|
1153
|
+
cond_in_funcs = [
|
1154
|
+
in_func
|
1155
|
+
for in_func in node.in_funcs
|
1156
|
+
if self._is_conditional_node(self.graph[in_func])
|
1157
|
+
]
|
1158
|
+
return len(cond_in_funcs) > 1 and len(cond_in_funcs) == len(node.in_funcs)
|
1159
|
+
|
1043
1160
|
def _is_recursive_node(self, node):
|
1044
1161
|
return node.name in self.recursive_nodes
|
1045
1162
|
|
1046
1163
|
def _matching_conditional_join(self, node):
|
1047
|
-
|
1164
|
+
# If no earlier conditional join step is found during parsing, then 'end' is always one.
|
1165
|
+
return self.matching_conditional_join_dict.get(node.name, "end")
|
1048
1166
|
|
1049
1167
|
# Visit every node and yield the uber DAGTemplate(s).
|
1050
1168
|
def _dag_templates(self):
|
@@ -1224,12 +1342,24 @@ class ArgoWorkflows(object):
|
|
1224
1342
|
"%s.Succeeded" % self._sanitize(in_func)
|
1225
1343
|
for in_func in node.in_funcs
|
1226
1344
|
if self._is_conditional_node(self.graph[in_func])
|
1345
|
+
or self.graph[in_func].type == "split-switch"
|
1227
1346
|
]
|
1228
1347
|
required_deps = [
|
1229
1348
|
"%s.Succeeded" % self._sanitize(in_func)
|
1230
1349
|
for in_func in node.in_funcs
|
1231
1350
|
if not self._is_conditional_node(self.graph[in_func])
|
1351
|
+
and self.graph[in_func].type != "split-switch"
|
1232
1352
|
]
|
1353
|
+
if self._is_conditional_skip_node(
|
1354
|
+
node
|
1355
|
+
) or self._many_in_funcs_all_conditional(node):
|
1356
|
+
# skip nodes need unique condition handling
|
1357
|
+
conditional_deps = [
|
1358
|
+
"%s.Succeeded" % self._sanitize(in_func)
|
1359
|
+
for in_func in node.in_funcs
|
1360
|
+
]
|
1361
|
+
required_deps = []
|
1362
|
+
|
1233
1363
|
both_conditions = required_deps and conditional_deps
|
1234
1364
|
|
1235
1365
|
depends_str = "{required}{_and}{conditional}".format(
|
@@ -1247,16 +1377,46 @@ class ArgoWorkflows(object):
|
|
1247
1377
|
)
|
1248
1378
|
|
1249
1379
|
# Add conditional if this is the first step in a conditional branch
|
1380
|
+
switch_in_funcs = [
|
1381
|
+
in_func
|
1382
|
+
for in_func in node.in_funcs
|
1383
|
+
if self.graph[in_func].type == "split-switch"
|
1384
|
+
]
|
1250
1385
|
if (
|
1251
1386
|
self._is_conditional_node(node)
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1387
|
+
or self._is_conditional_skip_node(node)
|
1388
|
+
or self._is_conditional_join_node(node)
|
1389
|
+
) and switch_in_funcs:
|
1390
|
+
conditional_when = "||".join(
|
1391
|
+
[
|
1392
|
+
"{{tasks.%s.outputs.parameters.switch-step}}==%s"
|
1393
|
+
% (self._sanitize(switch_in_func), node.name)
|
1394
|
+
for switch_in_func in switch_in_funcs
|
1395
|
+
]
|
1258
1396
|
)
|
1259
1397
|
|
1398
|
+
non_switch_in_funcs = [
|
1399
|
+
in_func
|
1400
|
+
for in_func in node.in_funcs
|
1401
|
+
if in_func not in switch_in_funcs
|
1402
|
+
]
|
1403
|
+
status_when = ""
|
1404
|
+
if non_switch_in_funcs:
|
1405
|
+
status_when = "||".join(
|
1406
|
+
[
|
1407
|
+
"{{tasks.%s.status}}==Succeeded"
|
1408
|
+
% self._sanitize(in_func)
|
1409
|
+
for in_func in non_switch_in_funcs
|
1410
|
+
]
|
1411
|
+
)
|
1412
|
+
|
1413
|
+
total_when = (
|
1414
|
+
f"({status_when}) || ({conditional_when})"
|
1415
|
+
if status_when
|
1416
|
+
else conditional_when
|
1417
|
+
)
|
1418
|
+
dag_task.when(total_when)
|
1419
|
+
|
1260
1420
|
dag_tasks.append(dag_task)
|
1261
1421
|
# End the workflow if we have reached the end of the flow
|
1262
1422
|
if node.type == "end":
|
@@ -1699,7 +1859,11 @@ class ArgoWorkflows(object):
|
|
1699
1859
|
input_paths_expr = (
|
1700
1860
|
"export INPUT_PATHS={{inputs.parameters.input-paths}}"
|
1701
1861
|
)
|
1702
|
-
if
|
1862
|
+
if (
|
1863
|
+
self._is_conditional_join_node(node)
|
1864
|
+
or self._many_in_funcs_all_conditional(node)
|
1865
|
+
or self._is_conditional_skip_node(node)
|
1866
|
+
):
|
1703
1867
|
# NOTE: Argo template expressions that fail to resolve, output the expression itself as a value.
|
1704
1868
|
# With conditional steps, some of the input-paths are therefore 'broken' due to containing a nil expression
|
1705
1869
|
# e.g. "{{ tasks['A'].outputs.parameters.task-id }}" when task A never executed.
|
@@ -1879,20 +2043,33 @@ class ArgoWorkflows(object):
|
|
1879
2043
|
)
|
1880
2044
|
input_paths = "%s/_parameters/%s" % (run_id, task_id_params)
|
1881
2045
|
# Only for static joins and conditional_joins
|
1882
|
-
elif
|
2046
|
+
elif (
|
2047
|
+
self._is_conditional_join_node(node)
|
2048
|
+
or self._many_in_funcs_all_conditional(node)
|
2049
|
+
or self._is_conditional_skip_node(node)
|
2050
|
+
) and not (
|
1883
2051
|
node.type == "join"
|
1884
2052
|
and self.graph[node.split_parents[-1]].type == "foreach"
|
1885
2053
|
):
|
2054
|
+
# we need to pass in the set of conditional in_funcs to the pathspec generating script as in the case of split-switch skipping cases,
|
2055
|
+
# non-conditional input-paths need to be ignored in favour of conditional ones when they have executed.
|
2056
|
+
skippable_input_steps = ",".join(
|
2057
|
+
[
|
2058
|
+
in_func
|
2059
|
+
for in_func in node.in_funcs
|
2060
|
+
if self.graph[in_func].type == "split-switch"
|
2061
|
+
]
|
2062
|
+
)
|
1886
2063
|
input_paths = (
|
1887
|
-
"$(python -m metaflow.plugins.argo.conditional_input_paths %s)"
|
1888
|
-
% input_paths
|
2064
|
+
"$(python -m metaflow.plugins.argo.conditional_input_paths %s %s)"
|
2065
|
+
% (input_paths, skippable_input_steps)
|
1889
2066
|
)
|
1890
2067
|
elif (
|
1891
2068
|
node.type == "join"
|
1892
2069
|
and self.graph[node.split_parents[-1]].type == "foreach"
|
1893
2070
|
):
|
1894
2071
|
# foreach-joins straight out of conditional branches are not yet supported
|
1895
|
-
if self._is_conditional_join_node(node):
|
2072
|
+
if self._is_conditional_join_node(node) and len(node.in_funcs) > 1:
|
1896
2073
|
raise ArgoWorkflowsException(
|
1897
2074
|
"Conditional steps inside a foreach that transition directly into a join step are not currently supported.\n"
|
1898
2075
|
"As a workaround, add a common step after the conditional steps %s "
|
@@ -3521,7 +3698,7 @@ class ArgoWorkflows(object):
|
|
3521
3698
|
# Sensor metadata.
|
3522
3699
|
ObjectMeta()
|
3523
3700
|
.name(ArgoWorkflows._sensor_name(self.name))
|
3524
|
-
.namespace(
|
3701
|
+
.namespace(ARGO_EVENTS_SENSOR_NAMESPACE)
|
3525
3702
|
.labels(self._base_labels)
|
3526
3703
|
.label("app.kubernetes.io/name", "metaflow-sensor")
|
3527
3704
|
.annotations(self._base_annotations)
|
@@ -4,7 +4,7 @@ from metaflow.util import decompress_list, compress_list
|
|
4
4
|
import base64
|
5
5
|
|
6
6
|
|
7
|
-
def generate_input_paths(input_paths):
|
7
|
+
def generate_input_paths(input_paths, skippable_steps):
|
8
8
|
# => run_id/step/:foo,bar
|
9
9
|
# input_paths are base64 encoded due to Argo shenanigans
|
10
10
|
decoded = base64.b64decode(input_paths).decode("utf-8")
|
@@ -13,9 +13,23 @@ def generate_input_paths(input_paths):
|
|
13
13
|
# some of the paths are going to be malformed due to never having executed per conditional.
|
14
14
|
# strip these out of the list.
|
15
15
|
|
16
|
+
# all pathspecs of leading steps that executed.
|
16
17
|
trimmed = [path for path in paths if not "{{" in path]
|
17
|
-
|
18
|
+
|
19
|
+
# pathspecs of leading steps that are conditional, and should be used instead of non-conditional ones
|
20
|
+
# e.g. the case of skipping switches: start -> case_step -> conditional_a or end
|
21
|
+
conditionals = [
|
22
|
+
path for path in trimmed if not any(step in path for step in skippable_steps)
|
23
|
+
]
|
24
|
+
pathspecs_to_use = conditionals if conditionals else trimmed
|
25
|
+
return compress_list(pathspecs_to_use, zlibmin=inf)
|
18
26
|
|
19
27
|
|
20
28
|
if __name__ == "__main__":
|
21
|
-
|
29
|
+
input_paths = sys.argv[1]
|
30
|
+
try:
|
31
|
+
skippable_steps = sys.argv[2].split(",")
|
32
|
+
except IndexError:
|
33
|
+
skippable_steps = []
|
34
|
+
|
35
|
+
print(generate_input_paths(input_paths, skippable_steps))
|
@@ -53,9 +53,10 @@ class BatchKilledException(MetaflowException):
|
|
53
53
|
|
54
54
|
|
55
55
|
class Batch(object):
|
56
|
-
def __init__(self, metadata, environment):
|
56
|
+
def __init__(self, metadata, environment, flow_datastore=None):
|
57
57
|
self.metadata = metadata
|
58
58
|
self.environment = environment
|
59
|
+
self.flow_datastore = flow_datastore
|
59
60
|
self._client = BatchClient()
|
60
61
|
atexit.register(lambda: self.job.kill() if hasattr(self, "job") else None)
|
61
62
|
|
@@ -67,6 +68,7 @@ class Batch(object):
|
|
67
68
|
step_name,
|
68
69
|
step_cmds,
|
69
70
|
task_spec,
|
71
|
+
offload_command_to_s3,
|
70
72
|
):
|
71
73
|
mflog_expr = export_mflog_env_vars(
|
72
74
|
datastore_type="s3",
|
@@ -104,7 +106,43 @@ class Batch(object):
|
|
104
106
|
# We lose the last logs in this scenario (although they are visible
|
105
107
|
# still through AWS CloudWatch console).
|
106
108
|
cmd_str += "c=$?; %s; exit $c" % BASH_SAVE_LOGS
|
107
|
-
|
109
|
+
command = shlex.split('bash -c "%s"' % cmd_str)
|
110
|
+
|
111
|
+
if not offload_command_to_s3:
|
112
|
+
return command
|
113
|
+
|
114
|
+
# If S3 upload is enabled, we need to modify the command after it's created
|
115
|
+
if self.flow_datastore is None:
|
116
|
+
raise MetaflowException(
|
117
|
+
"Can not offload Batch command to S3 without a datastore configured."
|
118
|
+
)
|
119
|
+
|
120
|
+
from metaflow.plugins.aws.aws_utils import parse_s3_full_path
|
121
|
+
|
122
|
+
# Get the command that was created
|
123
|
+
# Upload the command to S3 during deployment
|
124
|
+
try:
|
125
|
+
command_bytes = cmd_str.encode("utf-8")
|
126
|
+
result_paths = self.flow_datastore.save_data([command_bytes], len_hint=1)
|
127
|
+
s3_path, _key = result_paths[0]
|
128
|
+
|
129
|
+
bucket, s3_object = parse_s3_full_path(s3_path)
|
130
|
+
download_script = "{python} -c '{script}'".format(
|
131
|
+
python=self.environment._python(),
|
132
|
+
script='import boto3, os; ep=os.getenv(\\"METAFLOW_S3_ENDPOINT_URL\\"); boto3.client(\\"s3\\", **({\\"endpoint_url\\":ep} if ep else {})).download_file(\\"%s\\", \\"%s\\", \\"/tmp/step_command.sh\\")'
|
133
|
+
% (bucket, s3_object),
|
134
|
+
)
|
135
|
+
download_cmd = (
|
136
|
+
f"{self.environment._get_install_dependencies_cmd('s3')} && " # required for boto3 due to the original dependencies cmd getting packaged, and not being downloaded in time.
|
137
|
+
f"{download_script} && "
|
138
|
+
f"chmod +x /tmp/step_command.sh && "
|
139
|
+
f"bash /tmp/step_command.sh"
|
140
|
+
)
|
141
|
+
new_cmd = shlex.split('bash -c "%s"' % download_cmd)
|
142
|
+
return new_cmd
|
143
|
+
except Exception as e:
|
144
|
+
print(f"Warning: Failed to upload command to S3: {e}")
|
145
|
+
print("Falling back to inline command")
|
108
146
|
|
109
147
|
def _search_jobs(self, flow_name, run_id, user):
|
110
148
|
if user is None:
|
@@ -207,6 +245,7 @@ class Batch(object):
|
|
207
245
|
ephemeral_storage=None,
|
208
246
|
log_driver=None,
|
209
247
|
log_options=None,
|
248
|
+
offload_command_to_s3=False,
|
210
249
|
):
|
211
250
|
job_name = self._job_name(
|
212
251
|
attrs.get("metaflow.user"),
|
@@ -228,6 +267,7 @@ class Batch(object):
|
|
228
267
|
step_name,
|
229
268
|
[step_cli],
|
230
269
|
task_spec,
|
270
|
+
offload_command_to_s3,
|
231
271
|
)
|
232
272
|
)
|
233
273
|
.image(image)
|
@@ -57,6 +57,7 @@ class StepFunctions(object):
|
|
57
57
|
workflow_timeout=None,
|
58
58
|
is_project=False,
|
59
59
|
use_distributed_map=False,
|
60
|
+
compress_state_machine=False,
|
60
61
|
):
|
61
62
|
self.name = name
|
62
63
|
self.graph = graph
|
@@ -81,6 +82,9 @@ class StepFunctions(object):
|
|
81
82
|
# https://aws.amazon.com/blogs/aws/step-functions-distributed-map-a-serverless-solution-for-large-scale-parallel-data-processing/
|
82
83
|
self.use_distributed_map = use_distributed_map
|
83
84
|
|
85
|
+
# S3 command upload configuration
|
86
|
+
self.compress_state_machine = compress_state_machine
|
87
|
+
|
84
88
|
self._client = StepFunctionsClient()
|
85
89
|
self._workflow = self._compile()
|
86
90
|
self._cron = self._cron()
|
@@ -858,7 +862,7 @@ class StepFunctions(object):
|
|
858
862
|
# merge batch tags supplied through step-fuctions CLI and ones defined in decorator
|
859
863
|
batch_tags = {**self.aws_batch_tags, **resources["aws_batch_tags"]}
|
860
864
|
return (
|
861
|
-
Batch(self.metadata, self.environment)
|
865
|
+
Batch(self.metadata, self.environment, self.flow_datastore)
|
862
866
|
.create_job(
|
863
867
|
step_name=node.name,
|
864
868
|
step_cli=self._step_cli(
|
@@ -894,6 +898,7 @@ class StepFunctions(object):
|
|
894
898
|
ephemeral_storage=resources["ephemeral_storage"],
|
895
899
|
log_driver=resources["log_driver"],
|
896
900
|
log_options=resources["log_options"],
|
901
|
+
offload_command_to_s3=self.compress_state_machine,
|
897
902
|
)
|
898
903
|
.attempts(total_retries + 1)
|
899
904
|
)
|
@@ -10,6 +10,7 @@ from metaflow.metaflow_config import (
|
|
10
10
|
FEAT_ALWAYS_UPLOAD_CODE_PACKAGE,
|
11
11
|
SERVICE_VERSION_CHECK,
|
12
12
|
SFN_STATE_MACHINE_PREFIX,
|
13
|
+
SFN_COMPRESS_STATE_MACHINE,
|
13
14
|
UI_URL,
|
14
15
|
)
|
15
16
|
from metaflow.package import MetaflowPackage
|
@@ -140,6 +141,12 @@ def step_functions(obj, name=None):
|
|
140
141
|
help="Use AWS Step Functions Distributed Map instead of Inline Map for "
|
141
142
|
"defining foreach tasks in Amazon State Language.",
|
142
143
|
)
|
144
|
+
@click.option(
|
145
|
+
"--compress-state-machine/--no-compress-state-machine",
|
146
|
+
is_flag=True,
|
147
|
+
default=SFN_COMPRESS_STATE_MACHINE,
|
148
|
+
help="Compress AWS Step Functions state machine to fit within the 8K limit.",
|
149
|
+
)
|
143
150
|
@click.option(
|
144
151
|
"--deployer-attribute-file",
|
145
152
|
default=None,
|
@@ -162,6 +169,7 @@ def create(
|
|
162
169
|
workflow_timeout=None,
|
163
170
|
log_execution_history=False,
|
164
171
|
use_distributed_map=False,
|
172
|
+
compress_state_machine=False,
|
165
173
|
deployer_attribute_file=None,
|
166
174
|
):
|
167
175
|
for node in obj.graph:
|
@@ -212,6 +220,7 @@ def create(
|
|
212
220
|
workflow_timeout,
|
213
221
|
obj.is_project,
|
214
222
|
use_distributed_map,
|
223
|
+
compress_state_machine,
|
215
224
|
)
|
216
225
|
|
217
226
|
if only_json:
|
@@ -332,6 +341,7 @@ def make_flow(
|
|
332
341
|
workflow_timeout,
|
333
342
|
is_project,
|
334
343
|
use_distributed_map,
|
344
|
+
compress_state_machine=False,
|
335
345
|
):
|
336
346
|
if obj.flow_datastore.TYPE != "s3":
|
337
347
|
raise MetaflowException("AWS Step Functions requires --datastore=s3.")
|
@@ -390,6 +400,7 @@ def make_flow(
|
|
390
400
|
workflow_timeout=workflow_timeout,
|
391
401
|
is_project=is_project,
|
392
402
|
use_distributed_map=use_distributed_map,
|
403
|
+
compress_state_machine=compress_state_machine,
|
393
404
|
)
|
394
405
|
|
395
406
|
|
@@ -76,6 +76,9 @@ class StepFunctionsDeployer(DeployerImpl):
|
|
76
76
|
use_distributed_map : bool, optional, default False
|
77
77
|
Use AWS Step Functions Distributed Map instead of Inline Map for defining foreach
|
78
78
|
tasks in Amazon State Language.
|
79
|
+
compress_state_machine : bool, optional, default False
|
80
|
+
Compress AWS Step Functions state machine to fit within the 8K limit.
|
81
|
+
|
79
82
|
deployer_attribute_file : str, optional, default None
|
80
83
|
Write the workflow name to the specified file. Used internally for Metaflow's Deployer API.
|
81
84
|
|
metaflow/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
metaflow_version = "2.18.
|
1
|
+
metaflow_version = "2.18.3"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: metaflow
|
3
|
-
Version: 2.18.
|
3
|
+
Version: 2.18.3
|
4
4
|
Summary: Metaflow: More AI and ML, Less Engineering
|
5
5
|
Author: Metaflow Developers
|
6
6
|
Author-email: help@metaflow.org
|
@@ -26,7 +26,7 @@ License-File: LICENSE
|
|
26
26
|
Requires-Dist: requests
|
27
27
|
Requires-Dist: boto3
|
28
28
|
Provides-Extra: stubs
|
29
|
-
Requires-Dist: metaflow-stubs==2.18.
|
29
|
+
Requires-Dist: metaflow-stubs==2.18.3; extra == "stubs"
|
30
30
|
Dynamic: author
|
31
31
|
Dynamic: author-email
|
32
32
|
Dynamic: classifier
|
@@ -16,7 +16,7 @@ metaflow/includefile.py,sha256=RtISGl1V48qjkJBakUZ9yPpHV102h7pOIFiKP8PLHpc,20927
|
|
16
16
|
metaflow/integrations.py,sha256=LlsaoePRg03DjENnmLxZDYto3NwWc9z_PtU6nJxLldg,1480
|
17
17
|
metaflow/lint.py,sha256=A2NdUq_MnQal_RUCMC8ZOSR0VYZGyi2mSgwPQB0UzQo,15343
|
18
18
|
metaflow/meta_files.py,sha256=vlgJHI8GJUKzXoxdrVoH8yyCF5bhFgwYemUgnyd1wgM,342
|
19
|
-
metaflow/metaflow_config.py,sha256=
|
19
|
+
metaflow/metaflow_config.py,sha256=Abn_4SiqpdrmMOSGYlFnEUUzVJqwfTFpvYwXlpXwcMw,24551
|
20
20
|
metaflow/metaflow_config_funcs.py,sha256=5GlvoafV6SxykwfL8D12WXSfwjBN_NsyuKE_Q3gjGVE,6738
|
21
21
|
metaflow/metaflow_current.py,sha256=pfkXmkyHeMJhxIs6HBJNBEaBDpcl5kz9Wx5mW6F_3qo,7164
|
22
22
|
metaflow/metaflow_environment.py,sha256=20PIhA5R_rJneNj8f8UaWRmznGRPcEd6hP7goj_rc1s,11477
|
@@ -36,7 +36,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
|
|
36
36
|
metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
|
37
37
|
metaflow/util.py,sha256=g2SOU_CRzJLgDM_UGF9QDMANMAIHAsDRXE6S76_YzsY,14594
|
38
38
|
metaflow/vendor.py,sha256=A82CGHfStZGDP5pQ5XzRjFkbN1ZC-vFmghXIrzMDDNg,5868
|
39
|
-
metaflow/version.py,sha256=
|
39
|
+
metaflow/version.py,sha256=tMCDMFSk5MlOlJbUQrrKKLwLAp53oxtJaV317s14sEM,28
|
40
40
|
metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
|
41
41
|
metaflow/_vendor/typing_extensions.py,sha256=q9zxWa6p6CzF1zZvSkygSlklduHf_b3K7MCxGz7MJRc,134519
|
42
42
|
metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
@@ -157,7 +157,7 @@ metaflow/cli_components/run_cmds.py,sha256=_xk2asy3SkqsJfZVhbgYSJ2rkkJe7cvLik6b0
|
|
157
157
|
metaflow/cli_components/step_cmd.py,sha256=zGJgTv7wxrv34nWDi__CHaC2eS6kItR95EdVGJX803w,4766
|
158
158
|
metaflow/cli_components/utils.py,sha256=gpoDociadjnJD7MuiJup_MDR02ZJjjleejr0jPBu29c,6057
|
159
159
|
metaflow/client/__init__.py,sha256=1GtQB4Y_CBkzaxg32L1syNQSlfj762wmLrfrDxGi1b8,226
|
160
|
-
metaflow/client/core.py,sha256=
|
160
|
+
metaflow/client/core.py,sha256=6h4oohNXZkP2QC0M4dKdrwz5Zx5s6j1_DIWB8VYmbN0,83734
|
161
161
|
metaflow/client/filecache.py,sha256=Wy0yhhCqC1JZgebqi7z52GCwXYnkAqMZHTtxThvwBgM,15229
|
162
162
|
metaflow/cmd/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
163
163
|
metaflow/cmd/configure_cmd.py,sha256=o-DKnUf2FBo_HiMVyoyzQaGBSMtpbEPEdFTQZ0hkU-k,33396
|
@@ -184,7 +184,7 @@ metaflow/extension_support/integrations.py,sha256=AWAh-AZ-vo9IxuAVEjGw3s8p_NMm2D
|
|
184
184
|
metaflow/extension_support/plugins.py,sha256=gl7NbIJLJyLTb5LELsj1D9paQip6t6Lqz6Rhmvqvyrw,11286
|
185
185
|
metaflow/metadata_provider/__init__.py,sha256=FZNSnz26VB_m18DQG8mup6-Gfl7r1U6lRMljJBp3VAM,64
|
186
186
|
metaflow/metadata_provider/heartbeat.py,sha256=42mQo6wOHdFuaCh426uV6Kn8swe7e5I3gqA_G7cI_LA,3127
|
187
|
-
metaflow/metadata_provider/metadata.py,sha256=
|
187
|
+
metaflow/metadata_provider/metadata.py,sha256=XTt8EBLF7r5SJtnWa54ZcP53v4xS9Q0rTsK0XK880Ss,28808
|
188
188
|
metaflow/metadata_provider/util.py,sha256=lYoQKbqoTM1iZChgyVWN-gX-HyM9tt9bXEMJexY9XmM,1723
|
189
189
|
metaflow/mflog/__init__.py,sha256=TkR9ny_JYvNCWJTdLiHsbLSLc9cUvzAzpDuHLdG8nkA,6020
|
190
190
|
metaflow/mflog/mflog.py,sha256=VebXxqitOtNAs7VJixnNfziO_i_urG7bsJ5JiB5IXgY,4370
|
@@ -228,15 +228,15 @@ metaflow/plugins/airflow/sensors/base_sensor.py,sha256=s-OQBfPWZ_T3wn96Ua59CCEj1
|
|
228
228
|
metaflow/plugins/airflow/sensors/external_task_sensor.py,sha256=zhYlrZnXT20KW8-fVk0fCNtTyNiKJB5PMVASacu30r0,6034
|
229
229
|
metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqNA90nARrjjjEEk_x4,3275
|
230
230
|
metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
231
|
-
metaflow/plugins/argo/argo_client.py,sha256=
|
231
|
+
metaflow/plugins/argo/argo_client.py,sha256=oT4ZrCyE7CYEbqNN0SfoZfSHd5fYW9XtuOrQEiUd1co,17230
|
232
232
|
metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
|
233
|
-
metaflow/plugins/argo/argo_workflows.py,sha256=
|
233
|
+
metaflow/plugins/argo/argo_workflows.py,sha256=t132jaRyVWRLNM8IF0wupyp0cW-_6VDO-woIPrWG_B4,215964
|
234
234
|
metaflow/plugins/argo/argo_workflows_cli.py,sha256=-blfZp-kAS8oWFTarfou9gRyE4QCnnJwa-0g8QuE0zk,52280
|
235
235
|
metaflow/plugins/argo/argo_workflows_decorator.py,sha256=CLSjPqFTGucZ2_dSQGAYkoWWUZBQ9TCBXul4rxhDj3w,8282
|
236
236
|
metaflow/plugins/argo/argo_workflows_deployer.py,sha256=6kHxEnYXJwzNCM9swI8-0AckxtPWqwhZLerYkX8fxUM,4444
|
237
237
|
metaflow/plugins/argo/argo_workflows_deployer_objects.py,sha256=ydBE-lP42eNKvep36nQdUBPS3rQQErvoA7rCgyp5M6I,14949
|
238
238
|
metaflow/plugins/argo/capture_error.py,sha256=Ys9dscGrTpW-ZCirLBU0gD9qBM0BjxyxGlUMKcwewQc,1852
|
239
|
-
metaflow/plugins/argo/conditional_input_paths.py,sha256=
|
239
|
+
metaflow/plugins/argo/conditional_input_paths.py,sha256=Vtca74XbhnqAXgJJXKasLEa28jZbKBZPC5w4NAIOURc,1251
|
240
240
|
metaflow/plugins/argo/exit_hooks.py,sha256=nh8IEkzAtQnbKVnh3N9CVnVKZB39Bjm3e0LFrACsLz8,6109
|
241
241
|
metaflow/plugins/argo/generate_input_paths.py,sha256=loYsI6RFX9LlFsHb7Fe-mzlTTtRdySoOu7sYDy-uXK0,881
|
242
242
|
metaflow/plugins/argo/jobset_input_paths.py,sha256=-h0E_e0w6FMiBUod9Rf_XOSCtZv_C0exacw4q1SfIfg,501
|
@@ -244,7 +244,7 @@ metaflow/plugins/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
|
|
244
244
|
metaflow/plugins/aws/aws_client.py,sha256=BTiLMXa1agjja-N73oWinaOZHs-lGPbfKJG8CqdRgaU,4287
|
245
245
|
metaflow/plugins/aws/aws_utils.py,sha256=5mGZLu6wwTo2KzIke_MFqiomM3sYjAkU7Fx55dIMLfg,8561
|
246
246
|
metaflow/plugins/aws/batch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
247
|
-
metaflow/plugins/aws/batch/batch.py,sha256=
|
247
|
+
metaflow/plugins/aws/batch/batch.py,sha256=ugQ4YOjEs_PS5VtilMF0u1AArAZJTZPHCrCoVHPSqHc,20395
|
248
248
|
metaflow/plugins/aws/batch/batch_cli.py,sha256=yZAy6WqZPsCqxjMnRhnTellCjLo27tD_OcoBLHezH8E,12508
|
249
249
|
metaflow/plugins/aws/batch/batch_client.py,sha256=J50RMEXeEXFe5RqNUM1HN22BuDQFYFVQ4FSMOK55VWY,28798
|
250
250
|
metaflow/plugins/aws/batch/batch_decorator.py,sha256=apKBywlseCGxoKxpLLYQSjK3rz8QEJom4ekKf6OvzNU,20104
|
@@ -256,11 +256,11 @@ metaflow/plugins/aws/step_functions/event_bridge_client.py,sha256=U9-tqKdih4KR-Z
|
|
256
256
|
metaflow/plugins/aws/step_functions/production_token.py,sha256=rREx9djJzKYDiGhPCZ919pSpfrBCYuhSL5WlwnAojNM,1890
|
257
257
|
metaflow/plugins/aws/step_functions/schedule_decorator.py,sha256=Ab1rW8O_no4HNZm4__iBmFDCDW0Z8-TgK4lnxHHA6HI,1940
|
258
258
|
metaflow/plugins/aws/step_functions/set_batch_environment.py,sha256=ibiGWFHDjKcLfprH3OsX-g2M9lUsh6J-bp7v2cdLhD4,1294
|
259
|
-
metaflow/plugins/aws/step_functions/step_functions.py,sha256=
|
260
|
-
metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=
|
259
|
+
metaflow/plugins/aws/step_functions/step_functions.py,sha256=jj21WBIvsNULnIjBDFwbn6h__GPRF1sxH6X-DjLGCY4,54340
|
260
|
+
metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=dgCA3RbRfYf48wqzM_JJYiIH-eqcMjAXGb_jdXCfsp4,27556
|
261
261
|
metaflow/plugins/aws/step_functions/step_functions_client.py,sha256=DKpNwAIWElvWjFANs5Ku3rgzjxFoqAD6k-EF8Xhkg3Q,4754
|
262
262
|
metaflow/plugins/aws/step_functions/step_functions_decorator.py,sha256=jzDHYmgU_XvLffZDazR_1viow_1qQFblx9UKyjtoM_0,3788
|
263
|
-
metaflow/plugins/aws/step_functions/step_functions_deployer.py,sha256=
|
263
|
+
metaflow/plugins/aws/step_functions/step_functions_deployer.py,sha256=MOQ6H42szNHJw0ii3RJm7eyVGCmHL2j8kuZp7AuoiA8,3860
|
264
264
|
metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py,sha256=n7AEPs3uULXEuG3TVf2ZlTNq1LFd2n7x1IPVO2T5Ekk,8174
|
265
265
|
metaflow/plugins/azure/__init__.py,sha256=GuuhTVC-zSdyAf79a1wiERMq0Zts7fwVT7t9fAf234A,100
|
266
266
|
metaflow/plugins/azure/azure_credential.py,sha256=JmdGEbVzgxy8ucqnQDdTTI_atyMX9WSZUw3qYOo7RhE,2174
|
@@ -428,12 +428,12 @@ metaflow/user_decorators/mutable_flow.py,sha256=EywKTN3cnXPQF_s62wQaC4a4aH14j8oe
|
|
428
428
|
metaflow/user_decorators/mutable_step.py,sha256=-BY0UDXf_RCAEnC5JlLzEXGdiw1KD9oSrSxS_SWaB9Y,16791
|
429
429
|
metaflow/user_decorators/user_flow_decorator.py,sha256=2yDwZq9QGv9W-7kEuKwa8o4ZkTvuHJ5ESz7VVrGViAI,9890
|
430
430
|
metaflow/user_decorators/user_step_decorator.py,sha256=4558NR8RJtN22OyTwCXO80bAMhMTaRGMoX12b1GMcPc,27232
|
431
|
-
metaflow-2.18.
|
432
|
-
metaflow-2.18.
|
433
|
-
metaflow-2.18.
|
434
|
-
metaflow-2.18.
|
435
|
-
metaflow-2.18.
|
436
|
-
metaflow-2.18.
|
437
|
-
metaflow-2.18.
|
438
|
-
metaflow-2.18.
|
439
|
-
metaflow-2.18.
|
431
|
+
metaflow-2.18.3.data/data/share/metaflow/devtools/Makefile,sha256=TT4TCq8ALSfqYyGqDPocN5oPcZe2FqoCZxmGO1LmyCc,13760
|
432
|
+
metaflow-2.18.3.data/data/share/metaflow/devtools/Tiltfile,sha256=Ty5p6AD3MwJAcAnOGv4yMz8fExAsnNQ11r8whK6uzzw,21381
|
433
|
+
metaflow-2.18.3.data/data/share/metaflow/devtools/pick_services.sh,sha256=DCnrMXwtApfx3B4S-YiZESMyAFHbXa3VuNL0MxPLyiE,2196
|
434
|
+
metaflow-2.18.3.dist-info/licenses/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
|
435
|
+
metaflow-2.18.3.dist-info/METADATA,sha256=yH6g8ewcnbbincQ9dCZO6YAifTpoA8DzJyIyEbq1Ojw,6740
|
436
|
+
metaflow-2.18.3.dist-info/WHEEL,sha256=JNWh1Fm1UdwIQV075glCn4MVuCRs0sotJIq-J6rbxCU,109
|
437
|
+
metaflow-2.18.3.dist-info/entry_points.txt,sha256=RvEq8VFlgGe_FfqGOZi0D7ze1hLD0pAtXeNyGfzc_Yc,103
|
438
|
+
metaflow-2.18.3.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
|
439
|
+
metaflow-2.18.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|