ob-metaflow 2.18.2.1__py2.py3-none-any.whl → 2.18.3.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

metaflow/client/core.py CHANGED
@@ -303,7 +303,7 @@ class MetaflowObject(object):
303
303
  # distinguish between "attempt will happen" and "no such
304
304
  # attempt exists".
305
305
 
306
- if pathspec:
306
+ if pathspec and _object is None:
307
307
  ids = pathspec.split("/")
308
308
 
309
309
  if self._NAME == "flow" and len(ids) != 1:
@@ -345,6 +345,8 @@ SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH = from_conf(
345
345
  else None
346
346
  ),
347
347
  )
348
+ # Toggle for step command being part of the Step Function payload, or if it should be offloaded to S3
349
+ SFN_COMPRESS_STATE_MACHINE = from_conf("SFN_COMPRESS_STATE_MACHINE", False)
348
350
  ###
349
351
  # Kubernetes configuration
350
352
  ###
@@ -410,6 +412,9 @@ ARGO_EVENTS_INTERNAL_WEBHOOK_URL = from_conf(
410
412
  "ARGO_EVENTS_INTERNAL_WEBHOOK_URL", ARGO_EVENTS_WEBHOOK_URL
411
413
  )
412
414
  ARGO_EVENTS_WEBHOOK_AUTH = from_conf("ARGO_EVENTS_WEBHOOK_AUTH", "none")
415
+ ARGO_EVENTS_SENSOR_NAMESPACE = from_conf(
416
+ "ARGO_EVENTS_SENSOR_NAMESPACE", KUBERNETES_NAMESPACE
417
+ )
413
418
 
414
419
  ARGO_WORKFLOWS_UI_URL = from_conf("ARGO_WORKFLOWS_UI_URL")
415
420
 
@@ -1,5 +1,6 @@
1
1
  import json
2
2
 
3
+ from metaflow.metaflow_config import ARGO_EVENTS_SENSOR_NAMESPACE
3
4
  from metaflow.exception import MetaflowException
4
5
  from metaflow.plugins.kubernetes.kubernetes_client import KubernetesClient
5
6
 
@@ -377,12 +378,15 @@ class ArgoClient(object):
377
378
  json.loads(e.body)["message"] if e.body is not None else e.reason
378
379
  )
379
380
 
380
- def register_sensor(self, name, sensor=None):
381
+ def register_sensor(
382
+ self, name, sensor=None, sensor_namespace=ARGO_EVENTS_SENSOR_NAMESPACE
383
+ ):
381
384
  if sensor is None:
382
385
  sensor = {}
383
386
  # Unfortunately, Kubernetes client does not handle optimistic
384
387
  # concurrency control by itself unlike kubectl
385
388
  client = self._client.get()
389
+
386
390
  if not sensor:
387
391
  sensor["metadata"] = {}
388
392
 
@@ -392,7 +396,7 @@ class ArgoClient(object):
392
396
  ] = client.CustomObjectsApi().get_namespaced_custom_object(
393
397
  group=self._group,
394
398
  version=self._version,
395
- namespace=self._namespace,
399
+ namespace=sensor_namespace,
396
400
  plural="sensors",
397
401
  name=name,
398
402
  )[
@@ -407,7 +411,7 @@ class ArgoClient(object):
407
411
  return client.CustomObjectsApi().create_namespaced_custom_object(
408
412
  group=self._group,
409
413
  version=self._version,
410
- namespace=self._namespace,
414
+ namespace=sensor_namespace,
411
415
  plural="sensors",
412
416
  body=sensor,
413
417
  )
@@ -425,7 +429,7 @@ class ArgoClient(object):
425
429
  return client.CustomObjectsApi().replace_namespaced_custom_object(
426
430
  group=self._group,
427
431
  version=self._version,
428
- namespace=self._namespace,
432
+ namespace=sensor_namespace,
429
433
  plural="sensors",
430
434
  body=sensor,
431
435
  name=name,
@@ -435,7 +439,7 @@ class ArgoClient(object):
435
439
  json.loads(e.body)["message"] if e.body is not None else e.reason
436
440
  )
437
441
 
438
- def delete_sensor(self, name):
442
+ def delete_sensor(self, name, sensor_namespace):
439
443
  """
440
444
  Issues an API call for deleting a sensor
441
445
 
@@ -447,7 +451,7 @@ class ArgoClient(object):
447
451
  return client.CustomObjectsApi().delete_namespaced_custom_object(
448
452
  group=self._group,
449
453
  version=self._version,
450
- namespace=self._namespace,
454
+ namespace=sensor_namespace,
451
455
  plural="sensors",
452
456
  name=name,
453
457
  )
@@ -19,6 +19,7 @@ from metaflow.metaflow_config import (
19
19
  ARGO_EVENTS_EVENT_BUS,
20
20
  ARGO_EVENTS_EVENT_SOURCE,
21
21
  ARGO_EVENTS_INTERNAL_WEBHOOK_URL,
22
+ ARGO_EVENTS_SENSOR_NAMESPACE,
22
23
  ARGO_EVENTS_SERVICE_ACCOUNT,
23
24
  ARGO_EVENTS_WEBHOOK_AUTH,
24
25
  ARGO_WORKFLOWS_CAPTURE_ERROR_SCRIPT,
@@ -74,6 +75,10 @@ class ArgoWorkflowsException(MetaflowException):
74
75
  headline = "Argo Workflows error"
75
76
 
76
77
 
78
+ class ArgoWorkflowsSensorCleanupException(MetaflowException):
79
+ headline = "Argo Workflows sensor clean up error"
80
+
81
+
77
82
  class ArgoWorkflowsSchedulingException(MetaflowException):
78
83
  headline = "Argo Workflows scheduling error"
79
84
 
@@ -195,6 +200,7 @@ class ArgoWorkflows(object):
195
200
  return str(self._workflow_template)
196
201
 
197
202
  def deploy(self):
203
+ self.cleanup_previous_sensors()
198
204
  try:
199
205
  # Register workflow template.
200
206
  ArgoClient(namespace=KUBERNETES_NAMESPACE).register_workflow_template(
@@ -203,6 +209,37 @@ class ArgoWorkflows(object):
203
209
  except Exception as e:
204
210
  raise ArgoWorkflowsException(str(e))
205
211
 
212
+ def cleanup_previous_sensors(self):
213
+ try:
214
+ client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
215
+ # Check for existing deployment and do cleanup
216
+ old_template = client.get_workflow_template(self.name)
217
+ if not old_template:
218
+ return None
219
+ # Clean up old sensors
220
+ old_sensor_namespace = old_template["metadata"]["annotations"].get(
221
+ "metaflow/sensor_namespace"
222
+ )
223
+
224
+ if old_sensor_namespace is None:
225
+ # This workflow was created before sensor annotations
226
+ # and may have a sensor in the default namespace
227
+ # we will delete it and it'll get recreated if need be
228
+ old_sensor_name = ArgoWorkflows._sensor_name(self.name)
229
+ client.delete_sensor(old_sensor_name, client._namespace)
230
+ else:
231
+ # delete old sensor only if it was somewhere else, otherwise it'll get replaced
232
+ old_sensor_name = old_template["metadata"]["annotations"][
233
+ "metaflow/sensor_name"
234
+ ]
235
+ if (
236
+ not self._sensor
237
+ or old_sensor_namespace != ARGO_EVENTS_SENSOR_NAMESPACE
238
+ ):
239
+ client.delete_sensor(old_sensor_name, old_sensor_namespace)
240
+ except Exception as e:
241
+ raise ArgoWorkflowsSensorCleanupException(str(e))
242
+
206
243
  @staticmethod
207
244
  def _sanitize(name):
208
245
  # Metaflow allows underscores in node names, which are disallowed in Argo
@@ -230,6 +267,17 @@ class ArgoWorkflows(object):
230
267
  def delete(name):
231
268
  client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
232
269
 
270
+ workflow_template = client.get_workflow_template(name)
271
+ sensor_name = ArgoWorkflows._sensor_name(
272
+ workflow_template["metadata"]["annotations"].get(
273
+ "metaflow/sensor_name", name
274
+ )
275
+ )
276
+ # if below is missing then it was deployed before custom sensor namespaces
277
+ sensor_namespace = workflow_template["metadata"]["annotations"].get(
278
+ "metaflow/sensor_namespace", KUBERNETES_NAMESPACE
279
+ )
280
+
233
281
  # Always try to delete the schedule. Failure in deleting the schedule should not
234
282
  # be treated as an error, due to any of the following reasons
235
283
  # - there might not have been a schedule, or it was deleted by some other means
@@ -239,7 +287,7 @@ class ArgoWorkflows(object):
239
287
 
240
288
  # The workflow might have sensors attached to it, which consume actual resources.
241
289
  # Try to delete these as well.
242
- sensor_deleted = client.delete_sensor(ArgoWorkflows._sensor_name(name))
290
+ sensor_deleted = client.delete_sensor(sensor_name, sensor_namespace)
243
291
 
244
292
  # After cleaning up related resources, delete the workflow in question.
245
293
  # Failure in deleting is treated as critical and will be made visible to the user
@@ -408,11 +456,10 @@ class ArgoWorkflows(object):
408
456
  # Metaflow will overwrite any existing sensor.
409
457
  sensor_name = ArgoWorkflows._sensor_name(self.name)
410
458
  if self._sensor:
411
- argo_client.register_sensor(sensor_name, self._sensor.to_json())
412
- else:
413
- # Since sensors occupy real resources, delete existing sensor if needed
414
- # Deregister sensors that might have existed before this deployment
415
- argo_client.delete_sensor(sensor_name)
459
+ # The new sensor will go into the sensor namespace specified
460
+ ArgoClient(namespace=ARGO_EVENTS_SENSOR_NAMESPACE).register_sensor(
461
+ sensor_name, self._sensor.to_json(), ARGO_EVENTS_SENSOR_NAMESPACE
462
+ )
416
463
  except Exception as e:
417
464
  raise ArgoWorkflowsSchedulingException(str(e))
418
465
 
@@ -739,6 +786,7 @@ class ArgoWorkflows(object):
739
786
  # references to them within the DAGTask.
740
787
 
741
788
  annotations = {}
789
+
742
790
  if self._schedule is not None:
743
791
  # timezone is an optional field and json dumps on None will result in null
744
792
  # hence configuring it to an empty string
@@ -761,7 +809,9 @@ class ArgoWorkflows(object):
761
809
  {key: trigger.get(key) for key in ["name", "type"]}
762
810
  for trigger in self.triggers
763
811
  ]
764
- )
812
+ ),
813
+ "metaflow/sensor_name": ArgoWorkflows._sensor_name(self.name),
814
+ "metaflow/sensor_namespace": ARGO_EVENTS_SENSOR_NAMESPACE,
765
815
  }
766
816
  )
767
817
  if self.notify_on_error:
@@ -940,7 +990,7 @@ class ArgoWorkflows(object):
940
990
  node_conditional_parents = {}
941
991
  node_conditional_branches = {}
942
992
 
943
- def _visit(node, seen, conditional_branch, conditional_parents=None):
993
+ def _visit(node, conditional_branch, conditional_parents=None):
944
994
  if not node.type == "split-switch" and not (
945
995
  conditional_branch and conditional_parents
946
996
  ):
@@ -949,7 +999,10 @@ class ArgoWorkflows(object):
949
999
 
950
1000
  if node.type == "split-switch":
951
1001
  conditional_branch = conditional_branch + [node.name]
952
- node_conditional_branches[node.name] = conditional_branch
1002
+ c_br = node_conditional_branches.get(node.name, [])
1003
+ node_conditional_branches[node.name] = c_br + [
1004
+ b for b in conditional_branch if b not in c_br
1005
+ ]
953
1006
 
954
1007
  conditional_parents = (
955
1008
  [node.name]
@@ -967,21 +1020,36 @@ class ArgoWorkflows(object):
967
1020
  if conditional_parents and not node.type == "split-switch":
968
1021
  node_conditional_parents[node.name] = conditional_parents
969
1022
  conditional_branch = conditional_branch + [node.name]
970
- node_conditional_branches[node.name] = conditional_branch
1023
+ c_br = node_conditional_branches.get(node.name, [])
1024
+ node_conditional_branches[node.name] = c_br + [
1025
+ b for b in conditional_branch if b not in c_br
1026
+ ]
971
1027
 
972
1028
  self.conditional_nodes.add(node.name)
973
1029
 
974
1030
  if conditional_branch and conditional_parents:
975
1031
  for n in node.out_funcs:
976
1032
  child = self.graph[n]
977
- if n not in seen:
978
- _visit(
979
- child, seen + [n], conditional_branch, conditional_parents
980
- )
1033
+ if child.name == node.name:
1034
+ continue
1035
+ _visit(child, conditional_branch, conditional_parents)
981
1036
 
982
1037
  # First we visit all nodes to determine conditional parents and branches
983
1038
  for n in self.graph:
984
- _visit(n, [], [])
1039
+ _visit(n, [])
1040
+
1041
+ # helper to clean up conditional info for all children of a node, until a new split-switch is encountered.
1042
+ def _cleanup_conditional_status(node_name, seen):
1043
+ if self.graph[node_name].type == "split-switch":
1044
+ # stop recursive cleanup if we hit a new split-switch
1045
+ return
1046
+ if node_name in self.conditional_nodes:
1047
+ self.conditional_nodes.remove(node_name)
1048
+ node_conditional_parents[node_name] = []
1049
+ node_conditional_branches[node_name] = []
1050
+ for p in self.graph[node_name].out_funcs:
1051
+ if p not in seen:
1052
+ _cleanup_conditional_status(p, seen + [p])
985
1053
 
986
1054
  # Then we traverse again in order to determine conditional join nodes, and matching conditional join info
987
1055
  for node in self.graph:
@@ -1014,14 +1082,44 @@ class ArgoWorkflows(object):
1014
1082
  last_conditional_split_nodes = self.graph[
1015
1083
  last_split_switch
1016
1084
  ].out_funcs
1017
- # p needs to be in at least one conditional_branch for it to be closed.
1018
- if all(
1019
- any(
1020
- p in node_conditional_branches.get(in_func, [])
1021
- for in_func in conditional_in_funcs
1085
+ # NOTE: How do we define a conditional join step?
1086
+ # The idea here is that we check if the conditional branches(e.g. chains of conditional steps leading to) of all the in_funcs
1087
+ # manage to tick off every step name that follows a split-switch
1088
+ # For example, consider the following structure
1089
+ # switch_step -> A, B, C
1090
+ # A -> A2 -> A3 -> A4 -> B2
1091
+ # B -> B2 -> B3 -> C3
1092
+ # C -> C2 -> C3 -> end
1093
+ #
1094
+ # if we look at the in_funcs for C3, they are (C2, B3)
1095
+ # B3 closes off branches started by A and B
1096
+ # C3 closes off branches started by C
1097
+ # therefore C3 is a conditional join step for the 'switch_step'
1098
+ # NOTE: Then what about a skip step?
1099
+ # some switch cases might not introduce any distinct steps of their own, opting to instead skip ahead to a later common step.
1100
+ # Example:
1101
+ # switch_step -> A, B, C
1102
+ # A -> A1 -> B2 -> C
1103
+ # B -> B1 -> B2 -> C
1104
+ #
1105
+ # In this case, C is a skip step as it does not add any conditional branching of its own.
1106
+ # C is also a conditional join, as it closes all branches started by 'switch_step'
1107
+
1108
+ closes_branches = all(
1109
+ (
1110
+ # branch_root_node_name needs to be in at least one conditional_branch for it to be closed.
1111
+ any(
1112
+ branch_root_node_name
1113
+ in node_conditional_branches.get(in_func, [])
1114
+ for in_func in conditional_in_funcs
1115
+ )
1116
+ # need to account for a switch case skipping completely, not having a conditional-branch of its own.
1117
+ if branch_root_node_name != node.name
1118
+ else True
1022
1119
  )
1023
- for p in last_conditional_split_nodes
1024
- ):
1120
+ for branch_root_node_name in last_conditional_split_nodes
1121
+ )
1122
+ if closes_branches:
1025
1123
  closed_conditional_parents.append(last_split_switch)
1026
1124
 
1027
1125
  self.conditional_join_nodes.add(node.name)
@@ -1035,25 +1133,45 @@ class ArgoWorkflows(object):
1035
1133
  for p in node_conditional_parents.get(node.name, [])
1036
1134
  if p not in closed_conditional_parents
1037
1135
  ]:
1038
- if node.name in self.conditional_nodes:
1039
- self.conditional_nodes.remove(node.name)
1040
- node_conditional_parents[node.name] = []
1041
- for p in node.out_funcs:
1042
- if p in self.conditional_nodes:
1043
- self.conditional_nodes.remove(p)
1044
- node_conditional_parents[p] = []
1136
+ _cleanup_conditional_status(node.name, [])
1045
1137
 
1046
1138
  def _is_conditional_node(self, node):
1047
1139
  return node.name in self.conditional_nodes
1048
1140
 
1141
+ def _is_conditional_skip_node(self, node):
1142
+ return (
1143
+ self._is_conditional_node(node)
1144
+ and any(
1145
+ self.graph[in_func].type == "split-switch" for in_func in node.in_funcs
1146
+ )
1147
+ and len(
1148
+ [
1149
+ in_func
1150
+ for in_func in node.in_funcs
1151
+ if self._is_conditional_node(self.graph[in_func])
1152
+ or self.graph[in_func].type == "split-switch"
1153
+ ]
1154
+ )
1155
+ > 1
1156
+ )
1157
+
1049
1158
  def _is_conditional_join_node(self, node):
1050
1159
  return node.name in self.conditional_join_nodes
1051
1160
 
1161
+ def _many_in_funcs_all_conditional(self, node):
1162
+ cond_in_funcs = [
1163
+ in_func
1164
+ for in_func in node.in_funcs
1165
+ if self._is_conditional_node(self.graph[in_func])
1166
+ ]
1167
+ return len(cond_in_funcs) > 1 and len(cond_in_funcs) == len(node.in_funcs)
1168
+
1052
1169
  def _is_recursive_node(self, node):
1053
1170
  return node.name in self.recursive_nodes
1054
1171
 
1055
1172
  def _matching_conditional_join(self, node):
1056
- return self.matching_conditional_join_dict.get(node.name, None)
1173
+ # If no earlier conditional join step is found during parsing, then 'end' is always one.
1174
+ return self.matching_conditional_join_dict.get(node.name, "end")
1057
1175
 
1058
1176
  # Visit every node and yield the uber DAGTemplate(s).
1059
1177
  def _dag_templates(self):
@@ -1233,12 +1351,24 @@ class ArgoWorkflows(object):
1233
1351
  "%s.Succeeded" % self._sanitize(in_func)
1234
1352
  for in_func in node.in_funcs
1235
1353
  if self._is_conditional_node(self.graph[in_func])
1354
+ or self.graph[in_func].type == "split-switch"
1236
1355
  ]
1237
1356
  required_deps = [
1238
1357
  "%s.Succeeded" % self._sanitize(in_func)
1239
1358
  for in_func in node.in_funcs
1240
1359
  if not self._is_conditional_node(self.graph[in_func])
1360
+ and self.graph[in_func].type != "split-switch"
1241
1361
  ]
1362
+ if self._is_conditional_skip_node(
1363
+ node
1364
+ ) or self._many_in_funcs_all_conditional(node):
1365
+ # skip nodes need unique condition handling
1366
+ conditional_deps = [
1367
+ "%s.Succeeded" % self._sanitize(in_func)
1368
+ for in_func in node.in_funcs
1369
+ ]
1370
+ required_deps = []
1371
+
1242
1372
  both_conditions = required_deps and conditional_deps
1243
1373
 
1244
1374
  depends_str = "{required}{_and}{conditional}".format(
@@ -1256,16 +1386,46 @@ class ArgoWorkflows(object):
1256
1386
  )
1257
1387
 
1258
1388
  # Add conditional if this is the first step in a conditional branch
1389
+ switch_in_funcs = [
1390
+ in_func
1391
+ for in_func in node.in_funcs
1392
+ if self.graph[in_func].type == "split-switch"
1393
+ ]
1259
1394
  if (
1260
1395
  self._is_conditional_node(node)
1261
- and self.graph[node.in_funcs[0]].type == "split-switch"
1262
- ):
1263
- in_func = node.in_funcs[0]
1264
- dag_task.when(
1265
- "{{tasks.%s.outputs.parameters.switch-step}}==%s"
1266
- % (self._sanitize(in_func), node.name)
1396
+ or self._is_conditional_skip_node(node)
1397
+ or self._is_conditional_join_node(node)
1398
+ ) and switch_in_funcs:
1399
+ conditional_when = "||".join(
1400
+ [
1401
+ "{{tasks.%s.outputs.parameters.switch-step}}==%s"
1402
+ % (self._sanitize(switch_in_func), node.name)
1403
+ for switch_in_func in switch_in_funcs
1404
+ ]
1267
1405
  )
1268
1406
 
1407
+ non_switch_in_funcs = [
1408
+ in_func
1409
+ for in_func in node.in_funcs
1410
+ if in_func not in switch_in_funcs
1411
+ ]
1412
+ status_when = ""
1413
+ if non_switch_in_funcs:
1414
+ status_when = "||".join(
1415
+ [
1416
+ "{{tasks.%s.status}}==Succeeded"
1417
+ % self._sanitize(in_func)
1418
+ for in_func in non_switch_in_funcs
1419
+ ]
1420
+ )
1421
+
1422
+ total_when = (
1423
+ f"({status_when}) || ({conditional_when})"
1424
+ if status_when
1425
+ else conditional_when
1426
+ )
1427
+ dag_task.when(total_when)
1428
+
1269
1429
  dag_tasks.append(dag_task)
1270
1430
  # End the workflow if we have reached the end of the flow
1271
1431
  if node.type == "end":
@@ -1708,7 +1868,11 @@ class ArgoWorkflows(object):
1708
1868
  input_paths_expr = (
1709
1869
  "export INPUT_PATHS={{inputs.parameters.input-paths}}"
1710
1870
  )
1711
- if self._is_conditional_join_node(node):
1871
+ if (
1872
+ self._is_conditional_join_node(node)
1873
+ or self._many_in_funcs_all_conditional(node)
1874
+ or self._is_conditional_skip_node(node)
1875
+ ):
1712
1876
  # NOTE: Argo template expressions that fail to resolve, output the expression itself as a value.
1713
1877
  # With conditional steps, some of the input-paths are therefore 'broken' due to containing a nil expression
1714
1878
  # e.g. "{{ tasks['A'].outputs.parameters.task-id }}" when task A never executed.
@@ -1888,20 +2052,33 @@ class ArgoWorkflows(object):
1888
2052
  )
1889
2053
  input_paths = "%s/_parameters/%s" % (run_id, task_id_params)
1890
2054
  # Only for static joins and conditional_joins
1891
- elif self._is_conditional_join_node(node) and not (
2055
+ elif (
2056
+ self._is_conditional_join_node(node)
2057
+ or self._many_in_funcs_all_conditional(node)
2058
+ or self._is_conditional_skip_node(node)
2059
+ ) and not (
1892
2060
  node.type == "join"
1893
2061
  and self.graph[node.split_parents[-1]].type == "foreach"
1894
2062
  ):
2063
+ # we need to pass in the set of conditional in_funcs to the pathspec generating script as in the case of split-switch skipping cases,
2064
+ # non-conditional input-paths need to be ignored in favour of conditional ones when they have executed.
2065
+ skippable_input_steps = ",".join(
2066
+ [
2067
+ in_func
2068
+ for in_func in node.in_funcs
2069
+ if self.graph[in_func].type == "split-switch"
2070
+ ]
2071
+ )
1895
2072
  input_paths = (
1896
- "$(python -m metaflow.plugins.argo.conditional_input_paths %s)"
1897
- % input_paths
2073
+ "$(python -m metaflow.plugins.argo.conditional_input_paths %s %s)"
2074
+ % (input_paths, skippable_input_steps)
1898
2075
  )
1899
2076
  elif (
1900
2077
  node.type == "join"
1901
2078
  and self.graph[node.split_parents[-1]].type == "foreach"
1902
2079
  ):
1903
2080
  # foreach-joins straight out of conditional branches are not yet supported
1904
- if self._is_conditional_join_node(node):
2081
+ if self._is_conditional_join_node(node) and len(node.in_funcs) > 1:
1905
2082
  raise ArgoWorkflowsException(
1906
2083
  "Conditional steps inside a foreach that transition directly into a join step are not currently supported.\n"
1907
2084
  "As a workaround, add a common step after the conditional steps %s "
@@ -3572,7 +3749,7 @@ class ArgoWorkflows(object):
3572
3749
  # Sensor metadata.
3573
3750
  ObjectMeta()
3574
3751
  .name(ArgoWorkflows._sensor_name(self.name))
3575
- .namespace(KUBERNETES_NAMESPACE)
3752
+ .namespace(ARGO_EVENTS_SENSOR_NAMESPACE)
3576
3753
  .labels(self._base_labels)
3577
3754
  .label("app.kubernetes.io/name", "metaflow-sensor")
3578
3755
  .annotations(self._base_annotations)
@@ -4,7 +4,7 @@ from metaflow.util import decompress_list, compress_list
4
4
  import base64
5
5
 
6
6
 
7
- def generate_input_paths(input_paths):
7
+ def generate_input_paths(input_paths, skippable_steps):
8
8
  # => run_id/step/:foo,bar
9
9
  # input_paths are base64 encoded due to Argo shenanigans
10
10
  decoded = base64.b64decode(input_paths).decode("utf-8")
@@ -13,9 +13,23 @@ def generate_input_paths(input_paths):
13
13
  # some of the paths are going to be malformed due to never having executed per conditional.
14
14
  # strip these out of the list.
15
15
 
16
+ # all pathspecs of leading steps that executed.
16
17
  trimmed = [path for path in paths if not "{{" in path]
17
- return compress_list(trimmed, zlibmin=inf)
18
+
19
+ # pathspecs of leading steps that are conditional, and should be used instead of non-conditional ones
20
+ # e.g. the case of skipping switches: start -> case_step -> conditional_a or end
21
+ conditionals = [
22
+ path for path in trimmed if not any(step in path for step in skippable_steps)
23
+ ]
24
+ pathspecs_to_use = conditionals if conditionals else trimmed
25
+ return compress_list(pathspecs_to_use, zlibmin=inf)
18
26
 
19
27
 
20
28
  if __name__ == "__main__":
21
- print(generate_input_paths(sys.argv[1]))
29
+ input_paths = sys.argv[1]
30
+ try:
31
+ skippable_steps = sys.argv[2].split(",")
32
+ except IndexError:
33
+ skippable_steps = []
34
+
35
+ print(generate_input_paths(input_paths, skippable_steps))
@@ -53,9 +53,10 @@ class BatchKilledException(MetaflowException):
53
53
 
54
54
 
55
55
  class Batch(object):
56
- def __init__(self, metadata, environment):
56
+ def __init__(self, metadata, environment, flow_datastore=None):
57
57
  self.metadata = metadata
58
58
  self.environment = environment
59
+ self.flow_datastore = flow_datastore
59
60
  self._client = BatchClient()
60
61
  atexit.register(lambda: self.job.kill() if hasattr(self, "job") else None)
61
62
 
@@ -67,6 +68,7 @@ class Batch(object):
67
68
  step_name,
68
69
  step_cmds,
69
70
  task_spec,
71
+ offload_command_to_s3,
70
72
  ):
71
73
  mflog_expr = export_mflog_env_vars(
72
74
  datastore_type="s3",
@@ -104,7 +106,43 @@ class Batch(object):
104
106
  # We lose the last logs in this scenario (although they are visible
105
107
  # still through AWS CloudWatch console).
106
108
  cmd_str += "c=$?; %s; exit $c" % BASH_SAVE_LOGS
107
- return shlex.split('bash -c "%s"' % cmd_str)
109
+ command = shlex.split('bash -c "%s"' % cmd_str)
110
+
111
+ if not offload_command_to_s3:
112
+ return command
113
+
114
+ # If S3 upload is enabled, we need to modify the command after it's created
115
+ if self.flow_datastore is None:
116
+ raise MetaflowException(
117
+ "Can not offload Batch command to S3 without a datastore configured."
118
+ )
119
+
120
+ from metaflow.plugins.aws.aws_utils import parse_s3_full_path
121
+
122
+ # Get the command that was created
123
+ # Upload the command to S3 during deployment
124
+ try:
125
+ command_bytes = cmd_str.encode("utf-8")
126
+ result_paths = self.flow_datastore.save_data([command_bytes], len_hint=1)
127
+ s3_path, _key = result_paths[0]
128
+
129
+ bucket, s3_object = parse_s3_full_path(s3_path)
130
+ download_script = "{python} -c '{script}'".format(
131
+ python=self.environment._python(),
132
+ script='import boto3, os; ep=os.getenv(\\"METAFLOW_S3_ENDPOINT_URL\\"); boto3.client(\\"s3\\", **({\\"endpoint_url\\":ep} if ep else {})).download_file(\\"%s\\", \\"%s\\", \\"/tmp/step_command.sh\\")'
133
+ % (bucket, s3_object),
134
+ )
135
+ download_cmd = (
136
+ f"{self.environment._get_install_dependencies_cmd('s3')} && " # required for boto3 due to the original dependencies cmd getting packaged, and not being downloaded in time.
137
+ f"{download_script} && "
138
+ f"chmod +x /tmp/step_command.sh && "
139
+ f"bash /tmp/step_command.sh"
140
+ )
141
+ new_cmd = shlex.split('bash -c "%s"' % download_cmd)
142
+ return new_cmd
143
+ except Exception as e:
144
+ print(f"Warning: Failed to upload command to S3: {e}")
145
+ print("Falling back to inline command")
108
146
 
109
147
  def _search_jobs(self, flow_name, run_id, user):
110
148
  if user is None:
@@ -207,6 +245,7 @@ class Batch(object):
207
245
  ephemeral_storage=None,
208
246
  log_driver=None,
209
247
  log_options=None,
248
+ offload_command_to_s3=False,
210
249
  ):
211
250
  job_name = self._job_name(
212
251
  attrs.get("metaflow.user"),
@@ -228,6 +267,7 @@ class Batch(object):
228
267
  step_name,
229
268
  [step_cli],
230
269
  task_spec,
270
+ offload_command_to_s3,
231
271
  )
232
272
  )
233
273
  .image(image)
@@ -57,6 +57,7 @@ class StepFunctions(object):
57
57
  workflow_timeout=None,
58
58
  is_project=False,
59
59
  use_distributed_map=False,
60
+ compress_state_machine=False,
60
61
  ):
61
62
  self.name = name
62
63
  self.graph = graph
@@ -81,6 +82,9 @@ class StepFunctions(object):
81
82
  # https://aws.amazon.com/blogs/aws/step-functions-distributed-map-a-serverless-solution-for-large-scale-parallel-data-processing/
82
83
  self.use_distributed_map = use_distributed_map
83
84
 
85
+ # S3 command upload configuration
86
+ self.compress_state_machine = compress_state_machine
87
+
84
88
  self._client = StepFunctionsClient()
85
89
  self._workflow = self._compile()
86
90
  self._cron = self._cron()
@@ -858,7 +862,7 @@ class StepFunctions(object):
858
862
  # merge batch tags supplied through step-fuctions CLI and ones defined in decorator
859
863
  batch_tags = {**self.aws_batch_tags, **resources["aws_batch_tags"]}
860
864
  return (
861
- Batch(self.metadata, self.environment)
865
+ Batch(self.metadata, self.environment, self.flow_datastore)
862
866
  .create_job(
863
867
  step_name=node.name,
864
868
  step_cli=self._step_cli(
@@ -894,6 +898,7 @@ class StepFunctions(object):
894
898
  ephemeral_storage=resources["ephemeral_storage"],
895
899
  log_driver=resources["log_driver"],
896
900
  log_options=resources["log_options"],
901
+ offload_command_to_s3=self.compress_state_machine,
897
902
  )
898
903
  .attempts(total_retries + 1)
899
904
  )
@@ -10,6 +10,7 @@ from metaflow.metaflow_config import (
10
10
  FEAT_ALWAYS_UPLOAD_CODE_PACKAGE,
11
11
  SERVICE_VERSION_CHECK,
12
12
  SFN_STATE_MACHINE_PREFIX,
13
+ SFN_COMPRESS_STATE_MACHINE,
13
14
  UI_URL,
14
15
  )
15
16
  from metaflow.package import MetaflowPackage
@@ -140,6 +141,12 @@ def step_functions(obj, name=None):
140
141
  help="Use AWS Step Functions Distributed Map instead of Inline Map for "
141
142
  "defining foreach tasks in Amazon State Language.",
142
143
  )
144
+ @click.option(
145
+ "--compress-state-machine/--no-compress-state-machine",
146
+ is_flag=True,
147
+ default=SFN_COMPRESS_STATE_MACHINE,
148
+ help="Compress AWS Step Functions state machine to fit within the 8K limit.",
149
+ )
143
150
  @click.option(
144
151
  "--deployer-attribute-file",
145
152
  default=None,
@@ -162,6 +169,7 @@ def create(
162
169
  workflow_timeout=None,
163
170
  log_execution_history=False,
164
171
  use_distributed_map=False,
172
+ compress_state_machine=False,
165
173
  deployer_attribute_file=None,
166
174
  ):
167
175
  for node in obj.graph:
@@ -212,6 +220,7 @@ def create(
212
220
  workflow_timeout,
213
221
  obj.is_project,
214
222
  use_distributed_map,
223
+ compress_state_machine,
215
224
  )
216
225
 
217
226
  if only_json:
@@ -332,6 +341,7 @@ def make_flow(
332
341
  workflow_timeout,
333
342
  is_project,
334
343
  use_distributed_map,
344
+ compress_state_machine=False,
335
345
  ):
336
346
  if obj.flow_datastore.TYPE != "s3":
337
347
  raise MetaflowException("AWS Step Functions requires --datastore=s3.")
@@ -390,6 +400,7 @@ def make_flow(
390
400
  workflow_timeout=workflow_timeout,
391
401
  is_project=is_project,
392
402
  use_distributed_map=use_distributed_map,
403
+ compress_state_machine=compress_state_machine,
393
404
  )
394
405
 
395
406
 
@@ -76,6 +76,9 @@ class StepFunctionsDeployer(DeployerImpl):
76
76
  use_distributed_map : bool, optional, default False
77
77
  Use AWS Step Functions Distributed Map instead of Inline Map for defining foreach
78
78
  tasks in Amazon State Language.
79
+ compress_state_machine : bool, optional, default False
80
+ Compress AWS Step Functions state machine to fit within the 8K limit.
81
+
79
82
  deployer_attribute_file : str, optional, default None
80
83
  Write the workflow name to the specified file. Used internally for Metaflow's Deployer API.
81
84
 
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.18.2.1"
1
+ metaflow_version = "2.18.3.1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ob-metaflow
3
- Version: 2.18.2.1
3
+ Version: 2.18.3.1
4
4
  Summary: Metaflow: More AI and ML, Less Engineering
5
5
  Author: Netflix, Outerbounds & the Metaflow Community
6
6
  Author-email: help@outerbounds.co
@@ -12,7 +12,7 @@ Requires-Dist: boto3
12
12
  Requires-Dist: pylint
13
13
  Requires-Dist: kubernetes
14
14
  Provides-Extra: stubs
15
- Requires-Dist: metaflow-stubs==2.18.2.1; extra == "stubs"
15
+ Requires-Dist: metaflow-stubs==2.18.3.1; extra == "stubs"
16
16
  Dynamic: author
17
17
  Dynamic: author-email
18
18
  Dynamic: description
@@ -16,7 +16,7 @@ metaflow/includefile.py,sha256=RtISGl1V48qjkJBakUZ9yPpHV102h7pOIFiKP8PLHpc,20927
16
16
  metaflow/integrations.py,sha256=LlsaoePRg03DjENnmLxZDYto3NwWc9z_PtU6nJxLldg,1480
17
17
  metaflow/lint.py,sha256=A2NdUq_MnQal_RUCMC8ZOSR0VYZGyi2mSgwPQB0UzQo,15343
18
18
  metaflow/meta_files.py,sha256=vlgJHI8GJUKzXoxdrVoH8yyCF5bhFgwYemUgnyd1wgM,342
19
- metaflow/metaflow_config.py,sha256=n-xYyC3yjwaW1pcSHdQG2vmEa9XZJPksB_9mMPep0mw,24437
19
+ metaflow/metaflow_config.py,sha256=0gkNCCKbX_mMRg0sjy1sCXaymCHKBtxeQpq4X8qahuM,24716
20
20
  metaflow/metaflow_config_funcs.py,sha256=5GlvoafV6SxykwfL8D12WXSfwjBN_NsyuKE_Q3gjGVE,6738
21
21
  metaflow/metaflow_current.py,sha256=pfkXmkyHeMJhxIs6HBJNBEaBDpcl5kz9Wx5mW6F_3qo,7164
22
22
  metaflow/metaflow_environment.py,sha256=20PIhA5R_rJneNj8f8UaWRmznGRPcEd6hP7goj_rc1s,11477
@@ -36,7 +36,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
36
36
  metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
37
37
  metaflow/util.py,sha256=g2SOU_CRzJLgDM_UGF9QDMANMAIHAsDRXE6S76_YzsY,14594
38
38
  metaflow/vendor.py,sha256=A82CGHfStZGDP5pQ5XzRjFkbN1ZC-vFmghXIrzMDDNg,5868
39
- metaflow/version.py,sha256=jnPRiLyNr_eesPDDTvFxMFxkWrVAs_PI4cupoZIOILA,30
39
+ metaflow/version.py,sha256=52JCZ3dhY050c61YzJ42Y1Fbq9k3NYtXuASQhIRcmjM,30
40
40
  metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
41
41
  metaflow/_vendor/typing_extensions.py,sha256=q9zxWa6p6CzF1zZvSkygSlklduHf_b3K7MCxGz7MJRc,134519
42
42
  metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
@@ -157,7 +157,7 @@ metaflow/cli_components/run_cmds.py,sha256=_xk2asy3SkqsJfZVhbgYSJ2rkkJe7cvLik6b0
157
157
  metaflow/cli_components/step_cmd.py,sha256=zGJgTv7wxrv34nWDi__CHaC2eS6kItR95EdVGJX803w,4766
158
158
  metaflow/cli_components/utils.py,sha256=gpoDociadjnJD7MuiJup_MDR02ZJjjleejr0jPBu29c,6057
159
159
  metaflow/client/__init__.py,sha256=1GtQB4Y_CBkzaxg32L1syNQSlfj762wmLrfrDxGi1b8,226
160
- metaflow/client/core.py,sha256=tj2PuqQt1RXg8GuyLQ_WRuxYEvTaDyi_lW18YGwWvAQ,83714
160
+ metaflow/client/core.py,sha256=6h4oohNXZkP2QC0M4dKdrwz5Zx5s6j1_DIWB8VYmbN0,83734
161
161
  metaflow/client/filecache.py,sha256=Wy0yhhCqC1JZgebqi7z52GCwXYnkAqMZHTtxThvwBgM,15229
162
162
  metaflow/cmd/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
163
163
  metaflow/cmd/configure_cmd.py,sha256=o-DKnUf2FBo_HiMVyoyzQaGBSMtpbEPEdFTQZ0hkU-k,33396
@@ -228,15 +228,15 @@ metaflow/plugins/airflow/sensors/base_sensor.py,sha256=s-OQBfPWZ_T3wn96Ua59CCEj1
228
228
  metaflow/plugins/airflow/sensors/external_task_sensor.py,sha256=zhYlrZnXT20KW8-fVk0fCNtTyNiKJB5PMVASacu30r0,6034
229
229
  metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqNA90nARrjjjEEk_x4,3275
230
230
  metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
231
- metaflow/plugins/argo/argo_client.py,sha256=jLz0FjCTBvFLZt-8lZcMQhDcInhgEcGdPrU2Gvh67zA,17080
231
+ metaflow/plugins/argo/argo_client.py,sha256=oT4ZrCyE7CYEbqNN0SfoZfSHd5fYW9XtuOrQEiUd1co,17230
232
232
  metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
233
- metaflow/plugins/argo/argo_workflows.py,sha256=wE5uL8nMNzguJSQmdaldmv9XSV4_RHkhoTUcN14YmN4,209779
233
+ metaflow/plugins/argo/argo_workflows.py,sha256=5K3AdwTGr7Goqbk2S3SZJDH7PRvnDXFb0BOth21lnX0,218144
234
234
  metaflow/plugins/argo/argo_workflows_cli.py,sha256=-blfZp-kAS8oWFTarfou9gRyE4QCnnJwa-0g8QuE0zk,52280
235
235
  metaflow/plugins/argo/argo_workflows_decorator.py,sha256=CLSjPqFTGucZ2_dSQGAYkoWWUZBQ9TCBXul4rxhDj3w,8282
236
236
  metaflow/plugins/argo/argo_workflows_deployer.py,sha256=6kHxEnYXJwzNCM9swI8-0AckxtPWqwhZLerYkX8fxUM,4444
237
237
  metaflow/plugins/argo/argo_workflows_deployer_objects.py,sha256=ydBE-lP42eNKvep36nQdUBPS3rQQErvoA7rCgyp5M6I,14949
238
238
  metaflow/plugins/argo/capture_error.py,sha256=Ys9dscGrTpW-ZCirLBU0gD9qBM0BjxyxGlUMKcwewQc,1852
239
- metaflow/plugins/argo/conditional_input_paths.py,sha256=kpzwyjXjO7PjeAJMapYX_ajUulFQJvCj-2DhH7OHzy0,645
239
+ metaflow/plugins/argo/conditional_input_paths.py,sha256=Vtca74XbhnqAXgJJXKasLEa28jZbKBZPC5w4NAIOURc,1251
240
240
  metaflow/plugins/argo/exit_hooks.py,sha256=nh8IEkzAtQnbKVnh3N9CVnVKZB39Bjm3e0LFrACsLz8,6109
241
241
  metaflow/plugins/argo/generate_input_paths.py,sha256=loYsI6RFX9LlFsHb7Fe-mzlTTtRdySoOu7sYDy-uXK0,881
242
242
  metaflow/plugins/argo/jobset_input_paths.py,sha256=-h0E_e0w6FMiBUod9Rf_XOSCtZv_C0exacw4q1SfIfg,501
@@ -244,7 +244,7 @@ metaflow/plugins/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
244
244
  metaflow/plugins/aws/aws_client.py,sha256=BTiLMXa1agjja-N73oWinaOZHs-lGPbfKJG8CqdRgaU,4287
245
245
  metaflow/plugins/aws/aws_utils.py,sha256=1RVMpmVECda2ztTGlG6oJ3LXbuot1uRnHgTL9DMlGjM,9319
246
246
  metaflow/plugins/aws/batch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
247
- metaflow/plugins/aws/batch/batch.py,sha256=sdhpQVG3QAWD6KlpZH1MRCQcdwO0S4D-qXxgRK2A4bE,18466
247
+ metaflow/plugins/aws/batch/batch.py,sha256=ugQ4YOjEs_PS5VtilMF0u1AArAZJTZPHCrCoVHPSqHc,20395
248
248
  metaflow/plugins/aws/batch/batch_cli.py,sha256=yZAy6WqZPsCqxjMnRhnTellCjLo27tD_OcoBLHezH8E,12508
249
249
  metaflow/plugins/aws/batch/batch_client.py,sha256=J50RMEXeEXFe5RqNUM1HN22BuDQFYFVQ4FSMOK55VWY,28798
250
250
  metaflow/plugins/aws/batch/batch_decorator.py,sha256=0zAckLYpAwnqTB_U77u_mZC_mEZkMeM0BFposFTG16M,20110
@@ -256,11 +256,11 @@ metaflow/plugins/aws/step_functions/event_bridge_client.py,sha256=U9-tqKdih4KR-Z
256
256
  metaflow/plugins/aws/step_functions/production_token.py,sha256=rREx9djJzKYDiGhPCZ919pSpfrBCYuhSL5WlwnAojNM,1890
257
257
  metaflow/plugins/aws/step_functions/schedule_decorator.py,sha256=Ab1rW8O_no4HNZm4__iBmFDCDW0Z8-TgK4lnxHHA6HI,1940
258
258
  metaflow/plugins/aws/step_functions/set_batch_environment.py,sha256=ibiGWFHDjKcLfprH3OsX-g2M9lUsh6J-bp7v2cdLhD4,1294
259
- metaflow/plugins/aws/step_functions/step_functions.py,sha256=dAdJVo2UMsUe2StQak9VlUJK3-GtJ_50jlG8jk5QWLc,54110
260
- metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=MF3UPdZvRgVfZrcD4UCu0Fq-n-EUMM8m7RK57iUvOQs,27152
259
+ metaflow/plugins/aws/step_functions/step_functions.py,sha256=jj21WBIvsNULnIjBDFwbn6h__GPRF1sxH6X-DjLGCY4,54340
260
+ metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=dgCA3RbRfYf48wqzM_JJYiIH-eqcMjAXGb_jdXCfsp4,27556
261
261
  metaflow/plugins/aws/step_functions/step_functions_client.py,sha256=DKpNwAIWElvWjFANs5Ku3rgzjxFoqAD6k-EF8Xhkg3Q,4754
262
262
  metaflow/plugins/aws/step_functions/step_functions_decorator.py,sha256=jzDHYmgU_XvLffZDazR_1viow_1qQFblx9UKyjtoM_0,3788
263
- metaflow/plugins/aws/step_functions/step_functions_deployer.py,sha256=JKYtDhKivtXUWPklprZFzkqezh14loGDmk8mNk6QtpI,3714
263
+ metaflow/plugins/aws/step_functions/step_functions_deployer.py,sha256=MOQ6H42szNHJw0ii3RJm7eyVGCmHL2j8kuZp7AuoiA8,3860
264
264
  metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py,sha256=n7AEPs3uULXEuG3TVf2ZlTNq1LFd2n7x1IPVO2T5Ekk,8174
265
265
  metaflow/plugins/azure/__init__.py,sha256=GuuhTVC-zSdyAf79a1wiERMq0Zts7fwVT7t9fAf234A,100
266
266
  metaflow/plugins/azure/azure_credential.py,sha256=JmdGEbVzgxy8ucqnQDdTTI_atyMX9WSZUw3qYOo7RhE,2174
@@ -428,12 +428,12 @@ metaflow/user_decorators/mutable_flow.py,sha256=EywKTN3cnXPQF_s62wQaC4a4aH14j8oe
428
428
  metaflow/user_decorators/mutable_step.py,sha256=-BY0UDXf_RCAEnC5JlLzEXGdiw1KD9oSrSxS_SWaB9Y,16791
429
429
  metaflow/user_decorators/user_flow_decorator.py,sha256=2yDwZq9QGv9W-7kEuKwa8o4ZkTvuHJ5ESz7VVrGViAI,9890
430
430
  metaflow/user_decorators/user_step_decorator.py,sha256=4558NR8RJtN22OyTwCXO80bAMhMTaRGMoX12b1GMcPc,27232
431
- ob_metaflow-2.18.2.1.data/data/share/metaflow/devtools/Makefile,sha256=TT4TCq8ALSfqYyGqDPocN5oPcZe2FqoCZxmGO1LmyCc,13760
432
- ob_metaflow-2.18.2.1.data/data/share/metaflow/devtools/Tiltfile,sha256=Ty5p6AD3MwJAcAnOGv4yMz8fExAsnNQ11r8whK6uzzw,21381
433
- ob_metaflow-2.18.2.1.data/data/share/metaflow/devtools/pick_services.sh,sha256=DCnrMXwtApfx3B4S-YiZESMyAFHbXa3VuNL0MxPLyiE,2196
434
- ob_metaflow-2.18.2.1.dist-info/licenses/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
435
- ob_metaflow-2.18.2.1.dist-info/METADATA,sha256=DwRlgbxYaRWS3xO031TiRKrz0GpGj3JFryjy8u3K6Bw,5935
436
- ob_metaflow-2.18.2.1.dist-info/WHEEL,sha256=JNWh1Fm1UdwIQV075glCn4MVuCRs0sotJIq-J6rbxCU,109
437
- ob_metaflow-2.18.2.1.dist-info/entry_points.txt,sha256=RvEq8VFlgGe_FfqGOZi0D7ze1hLD0pAtXeNyGfzc_Yc,103
438
- ob_metaflow-2.18.2.1.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
439
- ob_metaflow-2.18.2.1.dist-info/RECORD,,
431
+ ob_metaflow-2.18.3.1.data/data/share/metaflow/devtools/Makefile,sha256=TT4TCq8ALSfqYyGqDPocN5oPcZe2FqoCZxmGO1LmyCc,13760
432
+ ob_metaflow-2.18.3.1.data/data/share/metaflow/devtools/Tiltfile,sha256=Ty5p6AD3MwJAcAnOGv4yMz8fExAsnNQ11r8whK6uzzw,21381
433
+ ob_metaflow-2.18.3.1.data/data/share/metaflow/devtools/pick_services.sh,sha256=DCnrMXwtApfx3B4S-YiZESMyAFHbXa3VuNL0MxPLyiE,2196
434
+ ob_metaflow-2.18.3.1.dist-info/licenses/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
435
+ ob_metaflow-2.18.3.1.dist-info/METADATA,sha256=6n3Bm88J5Jd25Q71xNIqB-BEXCBZ1uFgrxu44C90TB8,5935
436
+ ob_metaflow-2.18.3.1.dist-info/WHEEL,sha256=JNWh1Fm1UdwIQV075glCn4MVuCRs0sotJIq-J6rbxCU,109
437
+ ob_metaflow-2.18.3.1.dist-info/entry_points.txt,sha256=RvEq8VFlgGe_FfqGOZi0D7ze1hLD0pAtXeNyGfzc_Yc,103
438
+ ob_metaflow-2.18.3.1.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
439
+ ob_metaflow-2.18.3.1.dist-info/RECORD,,