ob-metaflow 2.18.2.1__py2.py3-none-any.whl → 2.18.3.2__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow might be problematic. Click here for more details.
- metaflow/client/core.py +1 -1
- metaflow/metaflow_config.py +5 -0
- metaflow/plugins/argo/argo_client.py +10 -6
- metaflow/plugins/argo/argo_workflows.py +222 -42
- metaflow/plugins/argo/conditional_input_paths.py +17 -3
- metaflow/plugins/aws/batch/batch.py +42 -2
- metaflow/plugins/aws/step_functions/step_functions.py +6 -1
- metaflow/plugins/aws/step_functions/step_functions_cli.py +11 -0
- metaflow/plugins/aws/step_functions/step_functions_deployer.py +3 -0
- metaflow/version.py +1 -1
- {ob_metaflow-2.18.2.1.dist-info → ob_metaflow-2.18.3.2.dist-info}/METADATA +2 -2
- {ob_metaflow-2.18.2.1.dist-info → ob_metaflow-2.18.3.2.dist-info}/RECORD +19 -19
- {ob_metaflow-2.18.2.1.data → ob_metaflow-2.18.3.2.data}/data/share/metaflow/devtools/Makefile +0 -0
- {ob_metaflow-2.18.2.1.data → ob_metaflow-2.18.3.2.data}/data/share/metaflow/devtools/Tiltfile +0 -0
- {ob_metaflow-2.18.2.1.data → ob_metaflow-2.18.3.2.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
- {ob_metaflow-2.18.2.1.dist-info → ob_metaflow-2.18.3.2.dist-info}/WHEEL +0 -0
- {ob_metaflow-2.18.2.1.dist-info → ob_metaflow-2.18.3.2.dist-info}/entry_points.txt +0 -0
- {ob_metaflow-2.18.2.1.dist-info → ob_metaflow-2.18.3.2.dist-info}/licenses/LICENSE +0 -0
- {ob_metaflow-2.18.2.1.dist-info → ob_metaflow-2.18.3.2.dist-info}/top_level.txt +0 -0
metaflow/client/core.py
CHANGED
|
@@ -303,7 +303,7 @@ class MetaflowObject(object):
|
|
|
303
303
|
# distinguish between "attempt will happen" and "no such
|
|
304
304
|
# attempt exists".
|
|
305
305
|
|
|
306
|
-
if pathspec:
|
|
306
|
+
if pathspec and _object is None:
|
|
307
307
|
ids = pathspec.split("/")
|
|
308
308
|
|
|
309
309
|
if self._NAME == "flow" and len(ids) != 1:
|
metaflow/metaflow_config.py
CHANGED
|
@@ -345,6 +345,8 @@ SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH = from_conf(
|
|
|
345
345
|
else None
|
|
346
346
|
),
|
|
347
347
|
)
|
|
348
|
+
# Toggle for step command being part of the Step Function payload, or if it should be offloaded to S3
|
|
349
|
+
SFN_COMPRESS_STATE_MACHINE = from_conf("SFN_COMPRESS_STATE_MACHINE", False)
|
|
348
350
|
###
|
|
349
351
|
# Kubernetes configuration
|
|
350
352
|
###
|
|
@@ -410,6 +412,9 @@ ARGO_EVENTS_INTERNAL_WEBHOOK_URL = from_conf(
|
|
|
410
412
|
"ARGO_EVENTS_INTERNAL_WEBHOOK_URL", ARGO_EVENTS_WEBHOOK_URL
|
|
411
413
|
)
|
|
412
414
|
ARGO_EVENTS_WEBHOOK_AUTH = from_conf("ARGO_EVENTS_WEBHOOK_AUTH", "none")
|
|
415
|
+
ARGO_EVENTS_SENSOR_NAMESPACE = from_conf(
|
|
416
|
+
"ARGO_EVENTS_SENSOR_NAMESPACE", KUBERNETES_NAMESPACE
|
|
417
|
+
)
|
|
413
418
|
|
|
414
419
|
ARGO_WORKFLOWS_UI_URL = from_conf("ARGO_WORKFLOWS_UI_URL")
|
|
415
420
|
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import json
|
|
2
2
|
|
|
3
|
+
from metaflow.metaflow_config import ARGO_EVENTS_SENSOR_NAMESPACE
|
|
3
4
|
from metaflow.exception import MetaflowException
|
|
4
5
|
from metaflow.plugins.kubernetes.kubernetes_client import KubernetesClient
|
|
5
6
|
|
|
@@ -377,12 +378,15 @@ class ArgoClient(object):
|
|
|
377
378
|
json.loads(e.body)["message"] if e.body is not None else e.reason
|
|
378
379
|
)
|
|
379
380
|
|
|
380
|
-
def register_sensor(
|
|
381
|
+
def register_sensor(
|
|
382
|
+
self, name, sensor=None, sensor_namespace=ARGO_EVENTS_SENSOR_NAMESPACE
|
|
383
|
+
):
|
|
381
384
|
if sensor is None:
|
|
382
385
|
sensor = {}
|
|
383
386
|
# Unfortunately, Kubernetes client does not handle optimistic
|
|
384
387
|
# concurrency control by itself unlike kubectl
|
|
385
388
|
client = self._client.get()
|
|
389
|
+
|
|
386
390
|
if not sensor:
|
|
387
391
|
sensor["metadata"] = {}
|
|
388
392
|
|
|
@@ -392,7 +396,7 @@ class ArgoClient(object):
|
|
|
392
396
|
] = client.CustomObjectsApi().get_namespaced_custom_object(
|
|
393
397
|
group=self._group,
|
|
394
398
|
version=self._version,
|
|
395
|
-
namespace=
|
|
399
|
+
namespace=sensor_namespace,
|
|
396
400
|
plural="sensors",
|
|
397
401
|
name=name,
|
|
398
402
|
)[
|
|
@@ -407,7 +411,7 @@ class ArgoClient(object):
|
|
|
407
411
|
return client.CustomObjectsApi().create_namespaced_custom_object(
|
|
408
412
|
group=self._group,
|
|
409
413
|
version=self._version,
|
|
410
|
-
namespace=
|
|
414
|
+
namespace=sensor_namespace,
|
|
411
415
|
plural="sensors",
|
|
412
416
|
body=sensor,
|
|
413
417
|
)
|
|
@@ -425,7 +429,7 @@ class ArgoClient(object):
|
|
|
425
429
|
return client.CustomObjectsApi().replace_namespaced_custom_object(
|
|
426
430
|
group=self._group,
|
|
427
431
|
version=self._version,
|
|
428
|
-
namespace=
|
|
432
|
+
namespace=sensor_namespace,
|
|
429
433
|
plural="sensors",
|
|
430
434
|
body=sensor,
|
|
431
435
|
name=name,
|
|
@@ -435,7 +439,7 @@ class ArgoClient(object):
|
|
|
435
439
|
json.loads(e.body)["message"] if e.body is not None else e.reason
|
|
436
440
|
)
|
|
437
441
|
|
|
438
|
-
def delete_sensor(self, name):
|
|
442
|
+
def delete_sensor(self, name, sensor_namespace):
|
|
439
443
|
"""
|
|
440
444
|
Issues an API call for deleting a sensor
|
|
441
445
|
|
|
@@ -447,7 +451,7 @@ class ArgoClient(object):
|
|
|
447
451
|
return client.CustomObjectsApi().delete_namespaced_custom_object(
|
|
448
452
|
group=self._group,
|
|
449
453
|
version=self._version,
|
|
450
|
-
namespace=
|
|
454
|
+
namespace=sensor_namespace,
|
|
451
455
|
plural="sensors",
|
|
452
456
|
name=name,
|
|
453
457
|
)
|
|
@@ -19,6 +19,7 @@ from metaflow.metaflow_config import (
|
|
|
19
19
|
ARGO_EVENTS_EVENT_BUS,
|
|
20
20
|
ARGO_EVENTS_EVENT_SOURCE,
|
|
21
21
|
ARGO_EVENTS_INTERNAL_WEBHOOK_URL,
|
|
22
|
+
ARGO_EVENTS_SENSOR_NAMESPACE,
|
|
22
23
|
ARGO_EVENTS_SERVICE_ACCOUNT,
|
|
23
24
|
ARGO_EVENTS_WEBHOOK_AUTH,
|
|
24
25
|
ARGO_WORKFLOWS_CAPTURE_ERROR_SCRIPT,
|
|
@@ -74,6 +75,10 @@ class ArgoWorkflowsException(MetaflowException):
|
|
|
74
75
|
headline = "Argo Workflows error"
|
|
75
76
|
|
|
76
77
|
|
|
78
|
+
class ArgoWorkflowsSensorCleanupException(MetaflowException):
|
|
79
|
+
headline = "Argo Workflows sensor clean up error"
|
|
80
|
+
|
|
81
|
+
|
|
77
82
|
class ArgoWorkflowsSchedulingException(MetaflowException):
|
|
78
83
|
headline = "Argo Workflows scheduling error"
|
|
79
84
|
|
|
@@ -195,6 +200,7 @@ class ArgoWorkflows(object):
|
|
|
195
200
|
return str(self._workflow_template)
|
|
196
201
|
|
|
197
202
|
def deploy(self):
|
|
203
|
+
self.cleanup_previous_sensors()
|
|
198
204
|
try:
|
|
199
205
|
# Register workflow template.
|
|
200
206
|
ArgoClient(namespace=KUBERNETES_NAMESPACE).register_workflow_template(
|
|
@@ -203,6 +209,37 @@ class ArgoWorkflows(object):
|
|
|
203
209
|
except Exception as e:
|
|
204
210
|
raise ArgoWorkflowsException(str(e))
|
|
205
211
|
|
|
212
|
+
def cleanup_previous_sensors(self):
|
|
213
|
+
try:
|
|
214
|
+
client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
|
|
215
|
+
# Check for existing deployment and do cleanup
|
|
216
|
+
old_template = client.get_workflow_template(self.name)
|
|
217
|
+
if not old_template:
|
|
218
|
+
return None
|
|
219
|
+
# Clean up old sensors
|
|
220
|
+
old_sensor_namespace = old_template["metadata"]["annotations"].get(
|
|
221
|
+
"metaflow/sensor_namespace"
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
if old_sensor_namespace is None:
|
|
225
|
+
# This workflow was created before sensor annotations
|
|
226
|
+
# and may have a sensor in the default namespace
|
|
227
|
+
# we will delete it and it'll get recreated if need be
|
|
228
|
+
old_sensor_name = ArgoWorkflows._sensor_name(self.name)
|
|
229
|
+
client.delete_sensor(old_sensor_name, client._namespace)
|
|
230
|
+
else:
|
|
231
|
+
# delete old sensor only if it was somewhere else, otherwise it'll get replaced
|
|
232
|
+
old_sensor_name = old_template["metadata"]["annotations"][
|
|
233
|
+
"metaflow/sensor_name"
|
|
234
|
+
]
|
|
235
|
+
if (
|
|
236
|
+
not self._sensor
|
|
237
|
+
or old_sensor_namespace != ARGO_EVENTS_SENSOR_NAMESPACE
|
|
238
|
+
):
|
|
239
|
+
client.delete_sensor(old_sensor_name, old_sensor_namespace)
|
|
240
|
+
except Exception as e:
|
|
241
|
+
raise ArgoWorkflowsSensorCleanupException(str(e))
|
|
242
|
+
|
|
206
243
|
@staticmethod
|
|
207
244
|
def _sanitize(name):
|
|
208
245
|
# Metaflow allows underscores in node names, which are disallowed in Argo
|
|
@@ -230,6 +267,20 @@ class ArgoWorkflows(object):
|
|
|
230
267
|
def delete(name):
|
|
231
268
|
client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
|
|
232
269
|
|
|
270
|
+
# the workflow template might not exist, but we still want to try clean up associated sensors and schedules.
|
|
271
|
+
workflow_template = client.get_workflow_template(name) or {}
|
|
272
|
+
workflow_annotations = workflow_template.get("metadata", {}).get(
|
|
273
|
+
"annotations", {}
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
sensor_name = ArgoWorkflows._sensor_name(
|
|
277
|
+
workflow_annotations.get("metaflow/sensor_name", name)
|
|
278
|
+
)
|
|
279
|
+
# if below is missing then it was deployed before custom sensor namespaces
|
|
280
|
+
sensor_namespace = workflow_annotations.get(
|
|
281
|
+
"metaflow/sensor_namespace", KUBERNETES_NAMESPACE
|
|
282
|
+
)
|
|
283
|
+
|
|
233
284
|
# Always try to delete the schedule. Failure in deleting the schedule should not
|
|
234
285
|
# be treated as an error, due to any of the following reasons
|
|
235
286
|
# - there might not have been a schedule, or it was deleted by some other means
|
|
@@ -239,7 +290,7 @@ class ArgoWorkflows(object):
|
|
|
239
290
|
|
|
240
291
|
# The workflow might have sensors attached to it, which consume actual resources.
|
|
241
292
|
# Try to delete these as well.
|
|
242
|
-
sensor_deleted = client.delete_sensor(
|
|
293
|
+
sensor_deleted = client.delete_sensor(sensor_name, sensor_namespace)
|
|
243
294
|
|
|
244
295
|
# After cleaning up related resources, delete the workflow in question.
|
|
245
296
|
# Failure in deleting is treated as critical and will be made visible to the user
|
|
@@ -408,11 +459,10 @@ class ArgoWorkflows(object):
|
|
|
408
459
|
# Metaflow will overwrite any existing sensor.
|
|
409
460
|
sensor_name = ArgoWorkflows._sensor_name(self.name)
|
|
410
461
|
if self._sensor:
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
argo_client.delete_sensor(sensor_name)
|
|
462
|
+
# The new sensor will go into the sensor namespace specified
|
|
463
|
+
ArgoClient(namespace=ARGO_EVENTS_SENSOR_NAMESPACE).register_sensor(
|
|
464
|
+
sensor_name, self._sensor.to_json(), ARGO_EVENTS_SENSOR_NAMESPACE
|
|
465
|
+
)
|
|
416
466
|
except Exception as e:
|
|
417
467
|
raise ArgoWorkflowsSchedulingException(str(e))
|
|
418
468
|
|
|
@@ -739,6 +789,7 @@ class ArgoWorkflows(object):
|
|
|
739
789
|
# references to them within the DAGTask.
|
|
740
790
|
|
|
741
791
|
annotations = {}
|
|
792
|
+
|
|
742
793
|
if self._schedule is not None:
|
|
743
794
|
# timezone is an optional field and json dumps on None will result in null
|
|
744
795
|
# hence configuring it to an empty string
|
|
@@ -761,7 +812,9 @@ class ArgoWorkflows(object):
|
|
|
761
812
|
{key: trigger.get(key) for key in ["name", "type"]}
|
|
762
813
|
for trigger in self.triggers
|
|
763
814
|
]
|
|
764
|
-
)
|
|
815
|
+
),
|
|
816
|
+
"metaflow/sensor_name": ArgoWorkflows._sensor_name(self.name),
|
|
817
|
+
"metaflow/sensor_namespace": ARGO_EVENTS_SENSOR_NAMESPACE,
|
|
765
818
|
}
|
|
766
819
|
)
|
|
767
820
|
if self.notify_on_error:
|
|
@@ -940,7 +993,7 @@ class ArgoWorkflows(object):
|
|
|
940
993
|
node_conditional_parents = {}
|
|
941
994
|
node_conditional_branches = {}
|
|
942
995
|
|
|
943
|
-
def _visit(node,
|
|
996
|
+
def _visit(node, conditional_branch, conditional_parents=None):
|
|
944
997
|
if not node.type == "split-switch" and not (
|
|
945
998
|
conditional_branch and conditional_parents
|
|
946
999
|
):
|
|
@@ -949,7 +1002,10 @@ class ArgoWorkflows(object):
|
|
|
949
1002
|
|
|
950
1003
|
if node.type == "split-switch":
|
|
951
1004
|
conditional_branch = conditional_branch + [node.name]
|
|
952
|
-
node_conditional_branches
|
|
1005
|
+
c_br = node_conditional_branches.get(node.name, [])
|
|
1006
|
+
node_conditional_branches[node.name] = c_br + [
|
|
1007
|
+
b for b in conditional_branch if b not in c_br
|
|
1008
|
+
]
|
|
953
1009
|
|
|
954
1010
|
conditional_parents = (
|
|
955
1011
|
[node.name]
|
|
@@ -967,21 +1023,36 @@ class ArgoWorkflows(object):
|
|
|
967
1023
|
if conditional_parents and not node.type == "split-switch":
|
|
968
1024
|
node_conditional_parents[node.name] = conditional_parents
|
|
969
1025
|
conditional_branch = conditional_branch + [node.name]
|
|
970
|
-
node_conditional_branches
|
|
1026
|
+
c_br = node_conditional_branches.get(node.name, [])
|
|
1027
|
+
node_conditional_branches[node.name] = c_br + [
|
|
1028
|
+
b for b in conditional_branch if b not in c_br
|
|
1029
|
+
]
|
|
971
1030
|
|
|
972
1031
|
self.conditional_nodes.add(node.name)
|
|
973
1032
|
|
|
974
1033
|
if conditional_branch and conditional_parents:
|
|
975
1034
|
for n in node.out_funcs:
|
|
976
1035
|
child = self.graph[n]
|
|
977
|
-
if
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
)
|
|
1036
|
+
if child.name == node.name:
|
|
1037
|
+
continue
|
|
1038
|
+
_visit(child, conditional_branch, conditional_parents)
|
|
981
1039
|
|
|
982
1040
|
# First we visit all nodes to determine conditional parents and branches
|
|
983
1041
|
for n in self.graph:
|
|
984
|
-
_visit(n, []
|
|
1042
|
+
_visit(n, [])
|
|
1043
|
+
|
|
1044
|
+
# helper to clean up conditional info for all children of a node, until a new split-switch is encountered.
|
|
1045
|
+
def _cleanup_conditional_status(node_name, seen):
|
|
1046
|
+
if self.graph[node_name].type == "split-switch":
|
|
1047
|
+
# stop recursive cleanup if we hit a new split-switch
|
|
1048
|
+
return
|
|
1049
|
+
if node_name in self.conditional_nodes:
|
|
1050
|
+
self.conditional_nodes.remove(node_name)
|
|
1051
|
+
node_conditional_parents[node_name] = []
|
|
1052
|
+
node_conditional_branches[node_name] = []
|
|
1053
|
+
for p in self.graph[node_name].out_funcs:
|
|
1054
|
+
if p not in seen:
|
|
1055
|
+
_cleanup_conditional_status(p, seen + [p])
|
|
985
1056
|
|
|
986
1057
|
# Then we traverse again in order to determine conditional join nodes, and matching conditional join info
|
|
987
1058
|
for node in self.graph:
|
|
@@ -1014,14 +1085,44 @@ class ArgoWorkflows(object):
|
|
|
1014
1085
|
last_conditional_split_nodes = self.graph[
|
|
1015
1086
|
last_split_switch
|
|
1016
1087
|
].out_funcs
|
|
1017
|
-
#
|
|
1018
|
-
if all
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1088
|
+
# NOTE: How do we define a conditional join step?
|
|
1089
|
+
# The idea here is that we check if the conditional branches(e.g. chains of conditional steps leading to) of all the in_funcs
|
|
1090
|
+
# manage to tick off every step name that follows a split-switch
|
|
1091
|
+
# For example, consider the following structure
|
|
1092
|
+
# switch_step -> A, B, C
|
|
1093
|
+
# A -> A2 -> A3 -> A4 -> B2
|
|
1094
|
+
# B -> B2 -> B3 -> C3
|
|
1095
|
+
# C -> C2 -> C3 -> end
|
|
1096
|
+
#
|
|
1097
|
+
# if we look at the in_funcs for C3, they are (C2, B3)
|
|
1098
|
+
# B3 closes off branches started by A and B
|
|
1099
|
+
# C3 closes off branches started by C
|
|
1100
|
+
# therefore C3 is a conditional join step for the 'switch_step'
|
|
1101
|
+
# NOTE: Then what about a skip step?
|
|
1102
|
+
# some switch cases might not introduce any distinct steps of their own, opting to instead skip ahead to a later common step.
|
|
1103
|
+
# Example:
|
|
1104
|
+
# switch_step -> A, B, C
|
|
1105
|
+
# A -> A1 -> B2 -> C
|
|
1106
|
+
# B -> B1 -> B2 -> C
|
|
1107
|
+
#
|
|
1108
|
+
# In this case, C is a skip step as it does not add any conditional branching of its own.
|
|
1109
|
+
# C is also a conditional join, as it closes all branches started by 'switch_step'
|
|
1110
|
+
|
|
1111
|
+
closes_branches = all(
|
|
1112
|
+
(
|
|
1113
|
+
# branch_root_node_name needs to be in at least one conditional_branch for it to be closed.
|
|
1114
|
+
any(
|
|
1115
|
+
branch_root_node_name
|
|
1116
|
+
in node_conditional_branches.get(in_func, [])
|
|
1117
|
+
for in_func in conditional_in_funcs
|
|
1118
|
+
)
|
|
1119
|
+
# need to account for a switch case skipping completely, not having a conditional-branch of its own.
|
|
1120
|
+
if branch_root_node_name != node.name
|
|
1121
|
+
else True
|
|
1022
1122
|
)
|
|
1023
|
-
for
|
|
1024
|
-
)
|
|
1123
|
+
for branch_root_node_name in last_conditional_split_nodes
|
|
1124
|
+
)
|
|
1125
|
+
if closes_branches:
|
|
1025
1126
|
closed_conditional_parents.append(last_split_switch)
|
|
1026
1127
|
|
|
1027
1128
|
self.conditional_join_nodes.add(node.name)
|
|
@@ -1035,25 +1136,45 @@ class ArgoWorkflows(object):
|
|
|
1035
1136
|
for p in node_conditional_parents.get(node.name, [])
|
|
1036
1137
|
if p not in closed_conditional_parents
|
|
1037
1138
|
]:
|
|
1038
|
-
|
|
1039
|
-
self.conditional_nodes.remove(node.name)
|
|
1040
|
-
node_conditional_parents[node.name] = []
|
|
1041
|
-
for p in node.out_funcs:
|
|
1042
|
-
if p in self.conditional_nodes:
|
|
1043
|
-
self.conditional_nodes.remove(p)
|
|
1044
|
-
node_conditional_parents[p] = []
|
|
1139
|
+
_cleanup_conditional_status(node.name, [])
|
|
1045
1140
|
|
|
1046
1141
|
def _is_conditional_node(self, node):
|
|
1047
1142
|
return node.name in self.conditional_nodes
|
|
1048
1143
|
|
|
1144
|
+
def _is_conditional_skip_node(self, node):
|
|
1145
|
+
return (
|
|
1146
|
+
self._is_conditional_node(node)
|
|
1147
|
+
and any(
|
|
1148
|
+
self.graph[in_func].type == "split-switch" for in_func in node.in_funcs
|
|
1149
|
+
)
|
|
1150
|
+
and len(
|
|
1151
|
+
[
|
|
1152
|
+
in_func
|
|
1153
|
+
for in_func in node.in_funcs
|
|
1154
|
+
if self._is_conditional_node(self.graph[in_func])
|
|
1155
|
+
or self.graph[in_func].type == "split-switch"
|
|
1156
|
+
]
|
|
1157
|
+
)
|
|
1158
|
+
> 1
|
|
1159
|
+
)
|
|
1160
|
+
|
|
1049
1161
|
def _is_conditional_join_node(self, node):
|
|
1050
1162
|
return node.name in self.conditional_join_nodes
|
|
1051
1163
|
|
|
1164
|
+
def _many_in_funcs_all_conditional(self, node):
|
|
1165
|
+
cond_in_funcs = [
|
|
1166
|
+
in_func
|
|
1167
|
+
for in_func in node.in_funcs
|
|
1168
|
+
if self._is_conditional_node(self.graph[in_func])
|
|
1169
|
+
]
|
|
1170
|
+
return len(cond_in_funcs) > 1 and len(cond_in_funcs) == len(node.in_funcs)
|
|
1171
|
+
|
|
1052
1172
|
def _is_recursive_node(self, node):
|
|
1053
1173
|
return node.name in self.recursive_nodes
|
|
1054
1174
|
|
|
1055
1175
|
def _matching_conditional_join(self, node):
|
|
1056
|
-
|
|
1176
|
+
# If no earlier conditional join step is found during parsing, then 'end' is always one.
|
|
1177
|
+
return self.matching_conditional_join_dict.get(node.name, "end")
|
|
1057
1178
|
|
|
1058
1179
|
# Visit every node and yield the uber DAGTemplate(s).
|
|
1059
1180
|
def _dag_templates(self):
|
|
@@ -1233,12 +1354,24 @@ class ArgoWorkflows(object):
|
|
|
1233
1354
|
"%s.Succeeded" % self._sanitize(in_func)
|
|
1234
1355
|
for in_func in node.in_funcs
|
|
1235
1356
|
if self._is_conditional_node(self.graph[in_func])
|
|
1357
|
+
or self.graph[in_func].type == "split-switch"
|
|
1236
1358
|
]
|
|
1237
1359
|
required_deps = [
|
|
1238
1360
|
"%s.Succeeded" % self._sanitize(in_func)
|
|
1239
1361
|
for in_func in node.in_funcs
|
|
1240
1362
|
if not self._is_conditional_node(self.graph[in_func])
|
|
1363
|
+
and self.graph[in_func].type != "split-switch"
|
|
1241
1364
|
]
|
|
1365
|
+
if self._is_conditional_skip_node(
|
|
1366
|
+
node
|
|
1367
|
+
) or self._many_in_funcs_all_conditional(node):
|
|
1368
|
+
# skip nodes need unique condition handling
|
|
1369
|
+
conditional_deps = [
|
|
1370
|
+
"%s.Succeeded" % self._sanitize(in_func)
|
|
1371
|
+
for in_func in node.in_funcs
|
|
1372
|
+
]
|
|
1373
|
+
required_deps = []
|
|
1374
|
+
|
|
1242
1375
|
both_conditions = required_deps and conditional_deps
|
|
1243
1376
|
|
|
1244
1377
|
depends_str = "{required}{_and}{conditional}".format(
|
|
@@ -1256,15 +1389,45 @@ class ArgoWorkflows(object):
|
|
|
1256
1389
|
)
|
|
1257
1390
|
|
|
1258
1391
|
# Add conditional if this is the first step in a conditional branch
|
|
1392
|
+
switch_in_funcs = [
|
|
1393
|
+
in_func
|
|
1394
|
+
for in_func in node.in_funcs
|
|
1395
|
+
if self.graph[in_func].type == "split-switch"
|
|
1396
|
+
]
|
|
1259
1397
|
if (
|
|
1260
1398
|
self._is_conditional_node(node)
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1399
|
+
or self._is_conditional_skip_node(node)
|
|
1400
|
+
or self._is_conditional_join_node(node)
|
|
1401
|
+
) and switch_in_funcs:
|
|
1402
|
+
conditional_when = "||".join(
|
|
1403
|
+
[
|
|
1404
|
+
"{{tasks.%s.outputs.parameters.switch-step}}==%s"
|
|
1405
|
+
% (self._sanitize(switch_in_func), node.name)
|
|
1406
|
+
for switch_in_func in switch_in_funcs
|
|
1407
|
+
]
|
|
1408
|
+
)
|
|
1409
|
+
|
|
1410
|
+
non_switch_in_funcs = [
|
|
1411
|
+
in_func
|
|
1412
|
+
for in_func in node.in_funcs
|
|
1413
|
+
if in_func not in switch_in_funcs
|
|
1414
|
+
]
|
|
1415
|
+
status_when = ""
|
|
1416
|
+
if non_switch_in_funcs:
|
|
1417
|
+
status_when = "||".join(
|
|
1418
|
+
[
|
|
1419
|
+
"{{tasks.%s.status}}==Succeeded"
|
|
1420
|
+
% self._sanitize(in_func)
|
|
1421
|
+
for in_func in non_switch_in_funcs
|
|
1422
|
+
]
|
|
1423
|
+
)
|
|
1424
|
+
|
|
1425
|
+
total_when = (
|
|
1426
|
+
f"({status_when}) || ({conditional_when})"
|
|
1427
|
+
if status_when
|
|
1428
|
+
else conditional_when
|
|
1267
1429
|
)
|
|
1430
|
+
dag_task.when(total_when)
|
|
1268
1431
|
|
|
1269
1432
|
dag_tasks.append(dag_task)
|
|
1270
1433
|
# End the workflow if we have reached the end of the flow
|
|
@@ -1708,7 +1871,11 @@ class ArgoWorkflows(object):
|
|
|
1708
1871
|
input_paths_expr = (
|
|
1709
1872
|
"export INPUT_PATHS={{inputs.parameters.input-paths}}"
|
|
1710
1873
|
)
|
|
1711
|
-
if
|
|
1874
|
+
if (
|
|
1875
|
+
self._is_conditional_join_node(node)
|
|
1876
|
+
or self._many_in_funcs_all_conditional(node)
|
|
1877
|
+
or self._is_conditional_skip_node(node)
|
|
1878
|
+
):
|
|
1712
1879
|
# NOTE: Argo template expressions that fail to resolve, output the expression itself as a value.
|
|
1713
1880
|
# With conditional steps, some of the input-paths are therefore 'broken' due to containing a nil expression
|
|
1714
1881
|
# e.g. "{{ tasks['A'].outputs.parameters.task-id }}" when task A never executed.
|
|
@@ -1888,20 +2055,33 @@ class ArgoWorkflows(object):
|
|
|
1888
2055
|
)
|
|
1889
2056
|
input_paths = "%s/_parameters/%s" % (run_id, task_id_params)
|
|
1890
2057
|
# Only for static joins and conditional_joins
|
|
1891
|
-
elif
|
|
2058
|
+
elif (
|
|
2059
|
+
self._is_conditional_join_node(node)
|
|
2060
|
+
or self._many_in_funcs_all_conditional(node)
|
|
2061
|
+
or self._is_conditional_skip_node(node)
|
|
2062
|
+
) and not (
|
|
1892
2063
|
node.type == "join"
|
|
1893
2064
|
and self.graph[node.split_parents[-1]].type == "foreach"
|
|
1894
2065
|
):
|
|
2066
|
+
# we need to pass in the set of conditional in_funcs to the pathspec generating script as in the case of split-switch skipping cases,
|
|
2067
|
+
# non-conditional input-paths need to be ignored in favour of conditional ones when they have executed.
|
|
2068
|
+
skippable_input_steps = ",".join(
|
|
2069
|
+
[
|
|
2070
|
+
in_func
|
|
2071
|
+
for in_func in node.in_funcs
|
|
2072
|
+
if self.graph[in_func].type == "split-switch"
|
|
2073
|
+
]
|
|
2074
|
+
)
|
|
1895
2075
|
input_paths = (
|
|
1896
|
-
"$(python -m metaflow.plugins.argo.conditional_input_paths %s)"
|
|
1897
|
-
% input_paths
|
|
2076
|
+
"$(python -m metaflow.plugins.argo.conditional_input_paths %s %s)"
|
|
2077
|
+
% (input_paths, skippable_input_steps)
|
|
1898
2078
|
)
|
|
1899
2079
|
elif (
|
|
1900
2080
|
node.type == "join"
|
|
1901
2081
|
and self.graph[node.split_parents[-1]].type == "foreach"
|
|
1902
2082
|
):
|
|
1903
2083
|
# foreach-joins straight out of conditional branches are not yet supported
|
|
1904
|
-
if self._is_conditional_join_node(node):
|
|
2084
|
+
if self._is_conditional_join_node(node) and len(node.in_funcs) > 1:
|
|
1905
2085
|
raise ArgoWorkflowsException(
|
|
1906
2086
|
"Conditional steps inside a foreach that transition directly into a join step are not currently supported.\n"
|
|
1907
2087
|
"As a workaround, add a common step after the conditional steps %s "
|
|
@@ -3572,7 +3752,7 @@ class ArgoWorkflows(object):
|
|
|
3572
3752
|
# Sensor metadata.
|
|
3573
3753
|
ObjectMeta()
|
|
3574
3754
|
.name(ArgoWorkflows._sensor_name(self.name))
|
|
3575
|
-
.namespace(
|
|
3755
|
+
.namespace(ARGO_EVENTS_SENSOR_NAMESPACE)
|
|
3576
3756
|
.labels(self._base_labels)
|
|
3577
3757
|
.label("app.kubernetes.io/name", "metaflow-sensor")
|
|
3578
3758
|
.annotations(self._base_annotations)
|
|
@@ -4,7 +4,7 @@ from metaflow.util import decompress_list, compress_list
|
|
|
4
4
|
import base64
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
def generate_input_paths(input_paths):
|
|
7
|
+
def generate_input_paths(input_paths, skippable_steps):
|
|
8
8
|
# => run_id/step/:foo,bar
|
|
9
9
|
# input_paths are base64 encoded due to Argo shenanigans
|
|
10
10
|
decoded = base64.b64decode(input_paths).decode("utf-8")
|
|
@@ -13,9 +13,23 @@ def generate_input_paths(input_paths):
|
|
|
13
13
|
# some of the paths are going to be malformed due to never having executed per conditional.
|
|
14
14
|
# strip these out of the list.
|
|
15
15
|
|
|
16
|
+
# all pathspecs of leading steps that executed.
|
|
16
17
|
trimmed = [path for path in paths if not "{{" in path]
|
|
17
|
-
|
|
18
|
+
|
|
19
|
+
# pathspecs of leading steps that are conditional, and should be used instead of non-conditional ones
|
|
20
|
+
# e.g. the case of skipping switches: start -> case_step -> conditional_a or end
|
|
21
|
+
conditionals = [
|
|
22
|
+
path for path in trimmed if not any(step in path for step in skippable_steps)
|
|
23
|
+
]
|
|
24
|
+
pathspecs_to_use = conditionals if conditionals else trimmed
|
|
25
|
+
return compress_list(pathspecs_to_use, zlibmin=inf)
|
|
18
26
|
|
|
19
27
|
|
|
20
28
|
if __name__ == "__main__":
|
|
21
|
-
|
|
29
|
+
input_paths = sys.argv[1]
|
|
30
|
+
try:
|
|
31
|
+
skippable_steps = sys.argv[2].split(",")
|
|
32
|
+
except IndexError:
|
|
33
|
+
skippable_steps = []
|
|
34
|
+
|
|
35
|
+
print(generate_input_paths(input_paths, skippable_steps))
|
|
@@ -53,9 +53,10 @@ class BatchKilledException(MetaflowException):
|
|
|
53
53
|
|
|
54
54
|
|
|
55
55
|
class Batch(object):
|
|
56
|
-
def __init__(self, metadata, environment):
|
|
56
|
+
def __init__(self, metadata, environment, flow_datastore=None):
|
|
57
57
|
self.metadata = metadata
|
|
58
58
|
self.environment = environment
|
|
59
|
+
self.flow_datastore = flow_datastore
|
|
59
60
|
self._client = BatchClient()
|
|
60
61
|
atexit.register(lambda: self.job.kill() if hasattr(self, "job") else None)
|
|
61
62
|
|
|
@@ -67,6 +68,7 @@ class Batch(object):
|
|
|
67
68
|
step_name,
|
|
68
69
|
step_cmds,
|
|
69
70
|
task_spec,
|
|
71
|
+
offload_command_to_s3,
|
|
70
72
|
):
|
|
71
73
|
mflog_expr = export_mflog_env_vars(
|
|
72
74
|
datastore_type="s3",
|
|
@@ -104,7 +106,43 @@ class Batch(object):
|
|
|
104
106
|
# We lose the last logs in this scenario (although they are visible
|
|
105
107
|
# still through AWS CloudWatch console).
|
|
106
108
|
cmd_str += "c=$?; %s; exit $c" % BASH_SAVE_LOGS
|
|
107
|
-
|
|
109
|
+
command = shlex.split('bash -c "%s"' % cmd_str)
|
|
110
|
+
|
|
111
|
+
if not offload_command_to_s3:
|
|
112
|
+
return command
|
|
113
|
+
|
|
114
|
+
# If S3 upload is enabled, we need to modify the command after it's created
|
|
115
|
+
if self.flow_datastore is None:
|
|
116
|
+
raise MetaflowException(
|
|
117
|
+
"Can not offload Batch command to S3 without a datastore configured."
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
from metaflow.plugins.aws.aws_utils import parse_s3_full_path
|
|
121
|
+
|
|
122
|
+
# Get the command that was created
|
|
123
|
+
# Upload the command to S3 during deployment
|
|
124
|
+
try:
|
|
125
|
+
command_bytes = cmd_str.encode("utf-8")
|
|
126
|
+
result_paths = self.flow_datastore.save_data([command_bytes], len_hint=1)
|
|
127
|
+
s3_path, _key = result_paths[0]
|
|
128
|
+
|
|
129
|
+
bucket, s3_object = parse_s3_full_path(s3_path)
|
|
130
|
+
download_script = "{python} -c '{script}'".format(
|
|
131
|
+
python=self.environment._python(),
|
|
132
|
+
script='import boto3, os; ep=os.getenv(\\"METAFLOW_S3_ENDPOINT_URL\\"); boto3.client(\\"s3\\", **({\\"endpoint_url\\":ep} if ep else {})).download_file(\\"%s\\", \\"%s\\", \\"/tmp/step_command.sh\\")'
|
|
133
|
+
% (bucket, s3_object),
|
|
134
|
+
)
|
|
135
|
+
download_cmd = (
|
|
136
|
+
f"{self.environment._get_install_dependencies_cmd('s3')} && " # required for boto3 due to the original dependencies cmd getting packaged, and not being downloaded in time.
|
|
137
|
+
f"{download_script} && "
|
|
138
|
+
f"chmod +x /tmp/step_command.sh && "
|
|
139
|
+
f"bash /tmp/step_command.sh"
|
|
140
|
+
)
|
|
141
|
+
new_cmd = shlex.split('bash -c "%s"' % download_cmd)
|
|
142
|
+
return new_cmd
|
|
143
|
+
except Exception as e:
|
|
144
|
+
print(f"Warning: Failed to upload command to S3: {e}")
|
|
145
|
+
print("Falling back to inline command")
|
|
108
146
|
|
|
109
147
|
def _search_jobs(self, flow_name, run_id, user):
|
|
110
148
|
if user is None:
|
|
@@ -207,6 +245,7 @@ class Batch(object):
|
|
|
207
245
|
ephemeral_storage=None,
|
|
208
246
|
log_driver=None,
|
|
209
247
|
log_options=None,
|
|
248
|
+
offload_command_to_s3=False,
|
|
210
249
|
):
|
|
211
250
|
job_name = self._job_name(
|
|
212
251
|
attrs.get("metaflow.user"),
|
|
@@ -228,6 +267,7 @@ class Batch(object):
|
|
|
228
267
|
step_name,
|
|
229
268
|
[step_cli],
|
|
230
269
|
task_spec,
|
|
270
|
+
offload_command_to_s3,
|
|
231
271
|
)
|
|
232
272
|
)
|
|
233
273
|
.image(image)
|
|
@@ -57,6 +57,7 @@ class StepFunctions(object):
|
|
|
57
57
|
workflow_timeout=None,
|
|
58
58
|
is_project=False,
|
|
59
59
|
use_distributed_map=False,
|
|
60
|
+
compress_state_machine=False,
|
|
60
61
|
):
|
|
61
62
|
self.name = name
|
|
62
63
|
self.graph = graph
|
|
@@ -81,6 +82,9 @@ class StepFunctions(object):
|
|
|
81
82
|
# https://aws.amazon.com/blogs/aws/step-functions-distributed-map-a-serverless-solution-for-large-scale-parallel-data-processing/
|
|
82
83
|
self.use_distributed_map = use_distributed_map
|
|
83
84
|
|
|
85
|
+
# S3 command upload configuration
|
|
86
|
+
self.compress_state_machine = compress_state_machine
|
|
87
|
+
|
|
84
88
|
self._client = StepFunctionsClient()
|
|
85
89
|
self._workflow = self._compile()
|
|
86
90
|
self._cron = self._cron()
|
|
@@ -858,7 +862,7 @@ class StepFunctions(object):
|
|
|
858
862
|
# merge batch tags supplied through step-fuctions CLI and ones defined in decorator
|
|
859
863
|
batch_tags = {**self.aws_batch_tags, **resources["aws_batch_tags"]}
|
|
860
864
|
return (
|
|
861
|
-
Batch(self.metadata, self.environment)
|
|
865
|
+
Batch(self.metadata, self.environment, self.flow_datastore)
|
|
862
866
|
.create_job(
|
|
863
867
|
step_name=node.name,
|
|
864
868
|
step_cli=self._step_cli(
|
|
@@ -894,6 +898,7 @@ class StepFunctions(object):
|
|
|
894
898
|
ephemeral_storage=resources["ephemeral_storage"],
|
|
895
899
|
log_driver=resources["log_driver"],
|
|
896
900
|
log_options=resources["log_options"],
|
|
901
|
+
offload_command_to_s3=self.compress_state_machine,
|
|
897
902
|
)
|
|
898
903
|
.attempts(total_retries + 1)
|
|
899
904
|
)
|
|
@@ -10,6 +10,7 @@ from metaflow.metaflow_config import (
|
|
|
10
10
|
FEAT_ALWAYS_UPLOAD_CODE_PACKAGE,
|
|
11
11
|
SERVICE_VERSION_CHECK,
|
|
12
12
|
SFN_STATE_MACHINE_PREFIX,
|
|
13
|
+
SFN_COMPRESS_STATE_MACHINE,
|
|
13
14
|
UI_URL,
|
|
14
15
|
)
|
|
15
16
|
from metaflow.package import MetaflowPackage
|
|
@@ -140,6 +141,12 @@ def step_functions(obj, name=None):
|
|
|
140
141
|
help="Use AWS Step Functions Distributed Map instead of Inline Map for "
|
|
141
142
|
"defining foreach tasks in Amazon State Language.",
|
|
142
143
|
)
|
|
144
|
+
@click.option(
|
|
145
|
+
"--compress-state-machine/--no-compress-state-machine",
|
|
146
|
+
is_flag=True,
|
|
147
|
+
default=SFN_COMPRESS_STATE_MACHINE,
|
|
148
|
+
help="Compress AWS Step Functions state machine to fit within the 8K limit.",
|
|
149
|
+
)
|
|
143
150
|
@click.option(
|
|
144
151
|
"--deployer-attribute-file",
|
|
145
152
|
default=None,
|
|
@@ -162,6 +169,7 @@ def create(
|
|
|
162
169
|
workflow_timeout=None,
|
|
163
170
|
log_execution_history=False,
|
|
164
171
|
use_distributed_map=False,
|
|
172
|
+
compress_state_machine=False,
|
|
165
173
|
deployer_attribute_file=None,
|
|
166
174
|
):
|
|
167
175
|
for node in obj.graph:
|
|
@@ -212,6 +220,7 @@ def create(
|
|
|
212
220
|
workflow_timeout,
|
|
213
221
|
obj.is_project,
|
|
214
222
|
use_distributed_map,
|
|
223
|
+
compress_state_machine,
|
|
215
224
|
)
|
|
216
225
|
|
|
217
226
|
if only_json:
|
|
@@ -332,6 +341,7 @@ def make_flow(
|
|
|
332
341
|
workflow_timeout,
|
|
333
342
|
is_project,
|
|
334
343
|
use_distributed_map,
|
|
344
|
+
compress_state_machine=False,
|
|
335
345
|
):
|
|
336
346
|
if obj.flow_datastore.TYPE != "s3":
|
|
337
347
|
raise MetaflowException("AWS Step Functions requires --datastore=s3.")
|
|
@@ -390,6 +400,7 @@ def make_flow(
|
|
|
390
400
|
workflow_timeout=workflow_timeout,
|
|
391
401
|
is_project=is_project,
|
|
392
402
|
use_distributed_map=use_distributed_map,
|
|
403
|
+
compress_state_machine=compress_state_machine,
|
|
393
404
|
)
|
|
394
405
|
|
|
395
406
|
|
|
@@ -76,6 +76,9 @@ class StepFunctionsDeployer(DeployerImpl):
|
|
|
76
76
|
use_distributed_map : bool, optional, default False
|
|
77
77
|
Use AWS Step Functions Distributed Map instead of Inline Map for defining foreach
|
|
78
78
|
tasks in Amazon State Language.
|
|
79
|
+
compress_state_machine : bool, optional, default False
|
|
80
|
+
Compress AWS Step Functions state machine to fit within the 8K limit.
|
|
81
|
+
|
|
79
82
|
deployer_attribute_file : str, optional, default None
|
|
80
83
|
Write the workflow name to the specified file. Used internally for Metaflow's Deployer API.
|
|
81
84
|
|
metaflow/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
metaflow_version = "2.18.2
|
|
1
|
+
metaflow_version = "2.18.3.2"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ob-metaflow
|
|
3
|
-
Version: 2.18.2
|
|
3
|
+
Version: 2.18.3.2
|
|
4
4
|
Summary: Metaflow: More AI and ML, Less Engineering
|
|
5
5
|
Author: Netflix, Outerbounds & the Metaflow Community
|
|
6
6
|
Author-email: help@outerbounds.co
|
|
@@ -12,7 +12,7 @@ Requires-Dist: boto3
|
|
|
12
12
|
Requires-Dist: pylint
|
|
13
13
|
Requires-Dist: kubernetes
|
|
14
14
|
Provides-Extra: stubs
|
|
15
|
-
Requires-Dist: metaflow-stubs==2.18.2
|
|
15
|
+
Requires-Dist: metaflow-stubs==2.18.3.2; extra == "stubs"
|
|
16
16
|
Dynamic: author
|
|
17
17
|
Dynamic: author-email
|
|
18
18
|
Dynamic: description
|
|
@@ -16,7 +16,7 @@ metaflow/includefile.py,sha256=RtISGl1V48qjkJBakUZ9yPpHV102h7pOIFiKP8PLHpc,20927
|
|
|
16
16
|
metaflow/integrations.py,sha256=LlsaoePRg03DjENnmLxZDYto3NwWc9z_PtU6nJxLldg,1480
|
|
17
17
|
metaflow/lint.py,sha256=A2NdUq_MnQal_RUCMC8ZOSR0VYZGyi2mSgwPQB0UzQo,15343
|
|
18
18
|
metaflow/meta_files.py,sha256=vlgJHI8GJUKzXoxdrVoH8yyCF5bhFgwYemUgnyd1wgM,342
|
|
19
|
-
metaflow/metaflow_config.py,sha256=
|
|
19
|
+
metaflow/metaflow_config.py,sha256=0gkNCCKbX_mMRg0sjy1sCXaymCHKBtxeQpq4X8qahuM,24716
|
|
20
20
|
metaflow/metaflow_config_funcs.py,sha256=5GlvoafV6SxykwfL8D12WXSfwjBN_NsyuKE_Q3gjGVE,6738
|
|
21
21
|
metaflow/metaflow_current.py,sha256=pfkXmkyHeMJhxIs6HBJNBEaBDpcl5kz9Wx5mW6F_3qo,7164
|
|
22
22
|
metaflow/metaflow_environment.py,sha256=20PIhA5R_rJneNj8f8UaWRmznGRPcEd6hP7goj_rc1s,11477
|
|
@@ -36,7 +36,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
|
|
|
36
36
|
metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
|
|
37
37
|
metaflow/util.py,sha256=g2SOU_CRzJLgDM_UGF9QDMANMAIHAsDRXE6S76_YzsY,14594
|
|
38
38
|
metaflow/vendor.py,sha256=A82CGHfStZGDP5pQ5XzRjFkbN1ZC-vFmghXIrzMDDNg,5868
|
|
39
|
-
metaflow/version.py,sha256=
|
|
39
|
+
metaflow/version.py,sha256=KWAmF4twiMrleI_VE_XeX1X2JjbMtDyWf45CajVu2rk,30
|
|
40
40
|
metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
|
|
41
41
|
metaflow/_vendor/typing_extensions.py,sha256=q9zxWa6p6CzF1zZvSkygSlklduHf_b3K7MCxGz7MJRc,134519
|
|
42
42
|
metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
|
@@ -157,7 +157,7 @@ metaflow/cli_components/run_cmds.py,sha256=_xk2asy3SkqsJfZVhbgYSJ2rkkJe7cvLik6b0
|
|
|
157
157
|
metaflow/cli_components/step_cmd.py,sha256=zGJgTv7wxrv34nWDi__CHaC2eS6kItR95EdVGJX803w,4766
|
|
158
158
|
metaflow/cli_components/utils.py,sha256=gpoDociadjnJD7MuiJup_MDR02ZJjjleejr0jPBu29c,6057
|
|
159
159
|
metaflow/client/__init__.py,sha256=1GtQB4Y_CBkzaxg32L1syNQSlfj762wmLrfrDxGi1b8,226
|
|
160
|
-
metaflow/client/core.py,sha256=
|
|
160
|
+
metaflow/client/core.py,sha256=6h4oohNXZkP2QC0M4dKdrwz5Zx5s6j1_DIWB8VYmbN0,83734
|
|
161
161
|
metaflow/client/filecache.py,sha256=Wy0yhhCqC1JZgebqi7z52GCwXYnkAqMZHTtxThvwBgM,15229
|
|
162
162
|
metaflow/cmd/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
163
163
|
metaflow/cmd/configure_cmd.py,sha256=o-DKnUf2FBo_HiMVyoyzQaGBSMtpbEPEdFTQZ0hkU-k,33396
|
|
@@ -228,15 +228,15 @@ metaflow/plugins/airflow/sensors/base_sensor.py,sha256=s-OQBfPWZ_T3wn96Ua59CCEj1
|
|
|
228
228
|
metaflow/plugins/airflow/sensors/external_task_sensor.py,sha256=zhYlrZnXT20KW8-fVk0fCNtTyNiKJB5PMVASacu30r0,6034
|
|
229
229
|
metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqNA90nARrjjjEEk_x4,3275
|
|
230
230
|
metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
231
|
-
metaflow/plugins/argo/argo_client.py,sha256=
|
|
231
|
+
metaflow/plugins/argo/argo_client.py,sha256=oT4ZrCyE7CYEbqNN0SfoZfSHd5fYW9XtuOrQEiUd1co,17230
|
|
232
232
|
metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
|
|
233
|
-
metaflow/plugins/argo/argo_workflows.py,sha256=
|
|
233
|
+
metaflow/plugins/argo/argo_workflows.py,sha256=GNTckkqlT6t-In-153xEVdw6WUWNYDLXoYT3gDCrXyY,218304
|
|
234
234
|
metaflow/plugins/argo/argo_workflows_cli.py,sha256=-blfZp-kAS8oWFTarfou9gRyE4QCnnJwa-0g8QuE0zk,52280
|
|
235
235
|
metaflow/plugins/argo/argo_workflows_decorator.py,sha256=CLSjPqFTGucZ2_dSQGAYkoWWUZBQ9TCBXul4rxhDj3w,8282
|
|
236
236
|
metaflow/plugins/argo/argo_workflows_deployer.py,sha256=6kHxEnYXJwzNCM9swI8-0AckxtPWqwhZLerYkX8fxUM,4444
|
|
237
237
|
metaflow/plugins/argo/argo_workflows_deployer_objects.py,sha256=ydBE-lP42eNKvep36nQdUBPS3rQQErvoA7rCgyp5M6I,14949
|
|
238
238
|
metaflow/plugins/argo/capture_error.py,sha256=Ys9dscGrTpW-ZCirLBU0gD9qBM0BjxyxGlUMKcwewQc,1852
|
|
239
|
-
metaflow/plugins/argo/conditional_input_paths.py,sha256=
|
|
239
|
+
metaflow/plugins/argo/conditional_input_paths.py,sha256=Vtca74XbhnqAXgJJXKasLEa28jZbKBZPC5w4NAIOURc,1251
|
|
240
240
|
metaflow/plugins/argo/exit_hooks.py,sha256=nh8IEkzAtQnbKVnh3N9CVnVKZB39Bjm3e0LFrACsLz8,6109
|
|
241
241
|
metaflow/plugins/argo/generate_input_paths.py,sha256=loYsI6RFX9LlFsHb7Fe-mzlTTtRdySoOu7sYDy-uXK0,881
|
|
242
242
|
metaflow/plugins/argo/jobset_input_paths.py,sha256=-h0E_e0w6FMiBUod9Rf_XOSCtZv_C0exacw4q1SfIfg,501
|
|
@@ -244,7 +244,7 @@ metaflow/plugins/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
|
|
|
244
244
|
metaflow/plugins/aws/aws_client.py,sha256=BTiLMXa1agjja-N73oWinaOZHs-lGPbfKJG8CqdRgaU,4287
|
|
245
245
|
metaflow/plugins/aws/aws_utils.py,sha256=1RVMpmVECda2ztTGlG6oJ3LXbuot1uRnHgTL9DMlGjM,9319
|
|
246
246
|
metaflow/plugins/aws/batch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
247
|
-
metaflow/plugins/aws/batch/batch.py,sha256=
|
|
247
|
+
metaflow/plugins/aws/batch/batch.py,sha256=ugQ4YOjEs_PS5VtilMF0u1AArAZJTZPHCrCoVHPSqHc,20395
|
|
248
248
|
metaflow/plugins/aws/batch/batch_cli.py,sha256=yZAy6WqZPsCqxjMnRhnTellCjLo27tD_OcoBLHezH8E,12508
|
|
249
249
|
metaflow/plugins/aws/batch/batch_client.py,sha256=J50RMEXeEXFe5RqNUM1HN22BuDQFYFVQ4FSMOK55VWY,28798
|
|
250
250
|
metaflow/plugins/aws/batch/batch_decorator.py,sha256=0zAckLYpAwnqTB_U77u_mZC_mEZkMeM0BFposFTG16M,20110
|
|
@@ -256,11 +256,11 @@ metaflow/plugins/aws/step_functions/event_bridge_client.py,sha256=U9-tqKdih4KR-Z
|
|
|
256
256
|
metaflow/plugins/aws/step_functions/production_token.py,sha256=rREx9djJzKYDiGhPCZ919pSpfrBCYuhSL5WlwnAojNM,1890
|
|
257
257
|
metaflow/plugins/aws/step_functions/schedule_decorator.py,sha256=Ab1rW8O_no4HNZm4__iBmFDCDW0Z8-TgK4lnxHHA6HI,1940
|
|
258
258
|
metaflow/plugins/aws/step_functions/set_batch_environment.py,sha256=ibiGWFHDjKcLfprH3OsX-g2M9lUsh6J-bp7v2cdLhD4,1294
|
|
259
|
-
metaflow/plugins/aws/step_functions/step_functions.py,sha256=
|
|
260
|
-
metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=
|
|
259
|
+
metaflow/plugins/aws/step_functions/step_functions.py,sha256=jj21WBIvsNULnIjBDFwbn6h__GPRF1sxH6X-DjLGCY4,54340
|
|
260
|
+
metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=dgCA3RbRfYf48wqzM_JJYiIH-eqcMjAXGb_jdXCfsp4,27556
|
|
261
261
|
metaflow/plugins/aws/step_functions/step_functions_client.py,sha256=DKpNwAIWElvWjFANs5Ku3rgzjxFoqAD6k-EF8Xhkg3Q,4754
|
|
262
262
|
metaflow/plugins/aws/step_functions/step_functions_decorator.py,sha256=jzDHYmgU_XvLffZDazR_1viow_1qQFblx9UKyjtoM_0,3788
|
|
263
|
-
metaflow/plugins/aws/step_functions/step_functions_deployer.py,sha256=
|
|
263
|
+
metaflow/plugins/aws/step_functions/step_functions_deployer.py,sha256=MOQ6H42szNHJw0ii3RJm7eyVGCmHL2j8kuZp7AuoiA8,3860
|
|
264
264
|
metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py,sha256=n7AEPs3uULXEuG3TVf2ZlTNq1LFd2n7x1IPVO2T5Ekk,8174
|
|
265
265
|
metaflow/plugins/azure/__init__.py,sha256=GuuhTVC-zSdyAf79a1wiERMq0Zts7fwVT7t9fAf234A,100
|
|
266
266
|
metaflow/plugins/azure/azure_credential.py,sha256=JmdGEbVzgxy8ucqnQDdTTI_atyMX9WSZUw3qYOo7RhE,2174
|
|
@@ -428,12 +428,12 @@ metaflow/user_decorators/mutable_flow.py,sha256=EywKTN3cnXPQF_s62wQaC4a4aH14j8oe
|
|
|
428
428
|
metaflow/user_decorators/mutable_step.py,sha256=-BY0UDXf_RCAEnC5JlLzEXGdiw1KD9oSrSxS_SWaB9Y,16791
|
|
429
429
|
metaflow/user_decorators/user_flow_decorator.py,sha256=2yDwZq9QGv9W-7kEuKwa8o4ZkTvuHJ5ESz7VVrGViAI,9890
|
|
430
430
|
metaflow/user_decorators/user_step_decorator.py,sha256=4558NR8RJtN22OyTwCXO80bAMhMTaRGMoX12b1GMcPc,27232
|
|
431
|
-
ob_metaflow-2.18.2.
|
|
432
|
-
ob_metaflow-2.18.2.
|
|
433
|
-
ob_metaflow-2.18.2.
|
|
434
|
-
ob_metaflow-2.18.2.
|
|
435
|
-
ob_metaflow-2.18.2.
|
|
436
|
-
ob_metaflow-2.18.2.
|
|
437
|
-
ob_metaflow-2.18.2.
|
|
438
|
-
ob_metaflow-2.18.2.
|
|
439
|
-
ob_metaflow-2.18.2.
|
|
431
|
+
ob_metaflow-2.18.3.2.data/data/share/metaflow/devtools/Makefile,sha256=TT4TCq8ALSfqYyGqDPocN5oPcZe2FqoCZxmGO1LmyCc,13760
|
|
432
|
+
ob_metaflow-2.18.3.2.data/data/share/metaflow/devtools/Tiltfile,sha256=Ty5p6AD3MwJAcAnOGv4yMz8fExAsnNQ11r8whK6uzzw,21381
|
|
433
|
+
ob_metaflow-2.18.3.2.data/data/share/metaflow/devtools/pick_services.sh,sha256=DCnrMXwtApfx3B4S-YiZESMyAFHbXa3VuNL0MxPLyiE,2196
|
|
434
|
+
ob_metaflow-2.18.3.2.dist-info/licenses/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
|
|
435
|
+
ob_metaflow-2.18.3.2.dist-info/METADATA,sha256=SJ7GhYJEnhUGiuGRrsG9eRUa_lHkHIy-AKFx9MxYmAU,5935
|
|
436
|
+
ob_metaflow-2.18.3.2.dist-info/WHEEL,sha256=JNWh1Fm1UdwIQV075glCn4MVuCRs0sotJIq-J6rbxCU,109
|
|
437
|
+
ob_metaflow-2.18.3.2.dist-info/entry_points.txt,sha256=RvEq8VFlgGe_FfqGOZi0D7ze1hLD0pAtXeNyGfzc_Yc,103
|
|
438
|
+
ob_metaflow-2.18.3.2.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
|
|
439
|
+
ob_metaflow-2.18.3.2.dist-info/RECORD,,
|
{ob_metaflow-2.18.2.1.data → ob_metaflow-2.18.3.2.data}/data/share/metaflow/devtools/Makefile
RENAMED
|
File without changes
|
{ob_metaflow-2.18.2.1.data → ob_metaflow-2.18.3.2.data}/data/share/metaflow/devtools/Tiltfile
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|