metaflow 2.11.2__py2.py3-none-any.whl → 2.11.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
metaflow/flowspec.py CHANGED
@@ -17,7 +17,7 @@ from .exception import (
17
17
  )
18
18
  from .graph import FlowGraph
19
19
  from .unbounded_foreach import UnboundedForeachInput
20
- from .metaflow_config import INCLUDE_FOREACH_STACK
20
+ from .metaflow_config import INCLUDE_FOREACH_STACK, MAXIMUM_FOREACH_VALUE_CHARS
21
21
 
22
22
  # For Python 3 compatibility
23
23
  try:
@@ -28,6 +28,8 @@ except NameError:
28
28
 
29
29
  from .datastore.inputs import Inputs
30
30
 
31
+ INTERNAL_ARTIFACTS_SET = set(["_foreach_values"])
32
+
31
33
 
32
34
  class InvalidNextException(MetaflowException):
33
35
  headline = "Invalid self.next() transition detected"
@@ -446,7 +448,9 @@ class FlowSpec(object):
446
448
  available_vars = (
447
449
  (var, sha)
448
450
  for var, sha in inp._datastore.items()
449
- if (var not in exclude) and (not hasattr(self, var))
451
+ if (var not in exclude)
452
+ and (not hasattr(self, var))
453
+ and (var not in INTERNAL_ARTIFACTS_SET)
450
454
  )
451
455
  for var, sha in available_vars:
452
456
  _, previous_sha = to_merge.setdefault(var, (inp, sha))
@@ -529,7 +533,7 @@ class FlowSpec(object):
529
533
  )
530
534
 
531
535
  value = item if _is_primitive_type(item) else reprlib.Repr().repr(item)
532
- return basestring(value)
536
+ return basestring(value)[:MAXIMUM_FOREACH_VALUE_CHARS]
533
537
 
534
538
  def next(self, *dsts: Callable[..., None], **kwargs) -> None:
535
539
  """
@@ -205,6 +205,8 @@ DEFAULT_CONTAINER_REGISTRY = from_conf("DEFAULT_CONTAINER_REGISTRY")
205
205
  # Controls whether to include foreach stack information in metadata.
206
206
  # TODO(Darin, 05/01/24): Remove this flag once we are confident with this feature.
207
207
  INCLUDE_FOREACH_STACK = from_conf("INCLUDE_FOREACH_STACK", False)
208
+ # Maximum length of the foreach value string to be stored in each ForeachFrame.
209
+ MAXIMUM_FOREACH_VALUE_CHARS = from_conf("MAXIMUM_FOREACH_VALUE_CHARS", 30)
208
210
 
209
211
  ###
210
212
  # Organization customizations
@@ -268,7 +270,13 @@ SFN_STATE_MACHINE_PREFIX = from_conf("SFN_STATE_MACHINE_PREFIX")
268
270
  # machine execution logs. This needs to be available when using the
269
271
  # `step-functions create --log-execution-history` command.
270
272
  SFN_EXECUTION_LOG_GROUP_ARN = from_conf("SFN_EXECUTION_LOG_GROUP_ARN")
271
-
273
+ # Amazon S3 path for storing the results of AWS Step Functions Distributed Map
274
+ SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH = from_conf(
275
+ "SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH",
276
+ os.path.join(DATASTORE_SYSROOT_S3, "sfn_distributed_map_output")
277
+ if DATASTORE_SYSROOT_S3
278
+ else None,
279
+ )
272
280
  ###
273
281
  # Kubernetes configuration
274
282
  ###
@@ -1540,7 +1540,6 @@ class ArgoWorkflows(object):
1540
1540
 
1541
1541
  # Return exit hook templates for workflow execution notifications.
1542
1542
  def _exit_hook_templates(self):
1543
- # TODO: Add details to slack message
1544
1543
  templates = []
1545
1544
  if self.notify_on_error:
1546
1545
  templates.append(self._slack_error_template())
@@ -1649,36 +1648,100 @@ class ArgoWorkflows(object):
1649
1648
 
1650
1649
  return links
1651
1650
 
1651
+ def _get_slack_blocks(self, message):
1652
+ """
1653
+ Use Slack's Block Kit to add general information about the environment and
1654
+ execution metadata, including a link to the UI and an optional message.
1655
+ """
1656
+ ui_link = "%s%s/argo-{{workflow.name}}" % (UI_URL, self.flow.name)
1657
+ # fmt: off
1658
+ if getattr(current, "project_name", None):
1659
+ # Add @project metadata when available.
1660
+ environment_details_block = {
1661
+ "type": "section",
1662
+ "text": {
1663
+ "type": "mrkdwn",
1664
+ "text": ":metaflow: Environment details"
1665
+ },
1666
+ "fields": [
1667
+ {
1668
+ "type": "mrkdwn",
1669
+ "text": "*Project:* %s" % current.project_name
1670
+ },
1671
+ {
1672
+ "type": "mrkdwn",
1673
+ "text": "*Project Branch:* %s" % current.branch_name
1674
+ }
1675
+ ]
1676
+ }
1677
+ else:
1678
+ environment_details_block = {
1679
+ "type": "section",
1680
+ "text": {
1681
+ "type": "mrkdwn",
1682
+ "text": ":metaflow: Environment details"
1683
+ }
1684
+ }
1685
+
1686
+ blocks = [
1687
+ environment_details_block,
1688
+ {
1689
+ "type": "context",
1690
+ "elements": [
1691
+ {
1692
+ "type": "mrkdwn",
1693
+ "text": " :information_source: *<%s>*" % ui_link,
1694
+ }
1695
+ ],
1696
+ },
1697
+ {
1698
+ "type": "divider"
1699
+ },
1700
+ ]
1701
+
1702
+ if message:
1703
+ blocks += [
1704
+ {
1705
+ "type": "section",
1706
+ "text": {
1707
+ "type": "mrkdwn",
1708
+ "text": message
1709
+ }
1710
+ }
1711
+ ]
1712
+ # fmt: on
1713
+ return blocks
1714
+
1652
1715
  def _slack_error_template(self):
1653
1716
  if self.notify_slack_webhook_url is None:
1654
1717
  return None
1718
+
1719
+ message = (
1720
+ ":rotating_light: _%s/argo-{{workflow.name}}_ failed!" % self.flow.name
1721
+ )
1722
+ payload = {"text": message}
1723
+ if UI_URL:
1724
+ blocks = self._get_slack_blocks(message)
1725
+ payload = {"text": message, "blocks": blocks}
1726
+
1655
1727
  return Template("notify-slack-on-error").http(
1656
- Http("POST")
1657
- .url(self.notify_slack_webhook_url)
1658
- .body(
1659
- json.dumps(
1660
- {
1661
- "text": ":rotating_light: _%s/argo-{{workflow.name}}_ failed!"
1662
- % self.flow.name
1663
- }
1664
- )
1665
- )
1728
+ Http("POST").url(self.notify_slack_webhook_url).body(json.dumps(payload))
1666
1729
  )
1667
1730
 
1668
1731
  def _slack_success_template(self):
1669
1732
  if self.notify_slack_webhook_url is None:
1670
1733
  return None
1734
+
1735
+ message = (
1736
+ ":white_check_mark: _%s/argo-{{workflow.name}}_ succeeded!" % self.flow.name
1737
+ )
1738
+ payload = {"text": message}
1739
+ if UI_URL:
1740
+ blocks = self._get_slack_blocks(message)
1741
+ payload = {"text": message, "blocks": blocks}
1742
+
1671
1743
  return Template("notify-slack-on-success").http(
1672
- Http("POST")
1673
- .url(self.notify_slack_webhook_url)
1674
- .body(
1675
- json.dumps(
1676
- {
1677
- "text": ":white_check_mark: _%s/argo-{{workflow.name}}_ succeeded!"
1678
- % self.flow.name
1679
- }
1680
- )
1681
- )
1744
+ Http("POST").url(self.notify_slack_webhook_url).body(json.dumps(payload))
1682
1745
  )
1683
1746
 
1684
1747
  def _compile_sensor(self):
@@ -11,6 +11,7 @@ from metaflow.plugins.datatools.s3.s3tail import S3Tail
11
11
  from metaflow.plugins.aws.aws_utils import sanitize_batch_tag
12
12
  from metaflow.exception import MetaflowException
13
13
  from metaflow.metaflow_config import (
14
+ OTEL_ENDPOINT,
14
15
  SERVICE_INTERNAL_URL,
15
16
  DATATOOLS_S3ROOT,
16
17
  DATASTORE_SYSROOT_S3,
@@ -255,6 +256,7 @@ class Batch(object):
255
256
  .environment_variable("METAFLOW_DEFAULT_DATASTORE", "s3")
256
257
  .environment_variable("METAFLOW_DEFAULT_METADATA", DEFAULT_METADATA)
257
258
  .environment_variable("METAFLOW_CARD_S3ROOT", CARD_S3ROOT)
259
+ .environment_variable("METAFLOW_OTEL_ENDPOINT", OTEL_ENDPOINT)
258
260
  .environment_variable("METAFLOW_RUNTIME_ENVIRONMENT", "aws-batch")
259
261
  )
260
262
 
@@ -1,5 +1,8 @@
1
1
  import os
2
+ import time
3
+
2
4
  import requests
5
+
3
6
  from metaflow.metaflow_config import SFN_DYNAMO_DB_TABLE
4
7
 
5
8
 
@@ -25,12 +28,31 @@ class DynamoDbClient(object):
25
28
  def save_parent_task_id_for_foreach_join(
26
29
  self, foreach_split_task_id, foreach_join_parent_task_id
27
30
  ):
28
- return self._client.update_item(
29
- TableName=self.name,
30
- Key={"pathspec": {"S": foreach_split_task_id}},
31
- UpdateExpression="ADD parent_task_ids_for_foreach_join :val",
32
- ExpressionAttributeValues={":val": {"SS": [foreach_join_parent_task_id]}},
33
- )
31
+ ex = None
32
+ for attempt in range(10):
33
+ try:
34
+ return self._client.update_item(
35
+ TableName=self.name,
36
+ Key={"pathspec": {"S": foreach_split_task_id}},
37
+ UpdateExpression="ADD parent_task_ids_for_foreach_join :val",
38
+ ExpressionAttributeValues={
39
+ ":val": {"SS": [foreach_join_parent_task_id]}
40
+ },
41
+ )
42
+ except self._client.exceptions.ClientError as error:
43
+ ex = error
44
+ if (
45
+ error.response["Error"]["Code"]
46
+ == "ProvisionedThroughputExceededException"
47
+ ):
48
+ # hopefully, enough time for AWS to scale up! otherwise
49
+ # ensure sufficient on-demand throughput for dynamo db
50
+ # is provisioned ahead of time
51
+ sleep_time = min((2**attempt) * 10, 60)
52
+ time.sleep(sleep_time)
53
+ else:
54
+ raise
55
+ raise ex
34
56
 
35
57
  def get_parent_task_ids_for_foreach_join(self, foreach_split_task_id):
36
58
  response = self._client.get_item(
@@ -1,5 +1,5 @@
1
- import os
2
1
  import json
2
+ import os
3
3
  import random
4
4
  import string
5
5
  import zlib
@@ -15,6 +15,7 @@ from metaflow.metaflow_config import (
15
15
  SFN_DYNAMO_DB_TABLE,
16
16
  SFN_EXECUTION_LOG_GROUP_ARN,
17
17
  SFN_IAM_ROLE,
18
+ SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH,
18
19
  )
19
20
  from metaflow.parameters import deploy_time_eval
20
21
  from metaflow.util import dict_to_cli_options, to_pascalcase
@@ -52,6 +53,7 @@ class StepFunctions(object):
52
53
  max_workers=None,
53
54
  workflow_timeout=None,
54
55
  is_project=False,
56
+ use_distributed_map=False,
55
57
  ):
56
58
  self.name = name
57
59
  self.graph = graph
@@ -70,6 +72,9 @@ class StepFunctions(object):
70
72
  self.max_workers = max_workers
71
73
  self.workflow_timeout = workflow_timeout
72
74
 
75
+ # https://aws.amazon.com/blogs/aws/step-functions-distributed-map-a-serverless-solution-for-large-scale-parallel-data-processing/
76
+ self.use_distributed_map = use_distributed_map
77
+
73
78
  self._client = StepFunctionsClient()
74
79
  self._workflow = self._compile()
75
80
  self._cron = self._cron()
@@ -365,17 +370,80 @@ class StepFunctions(object):
365
370
  .parameter("SplitParentTaskId.$", "$.JobId")
366
371
  .parameter("Parameters.$", "$.Parameters")
367
372
  .parameter("Index.$", "$$.Map.Item.Value")
368
- .next(node.matching_join)
373
+ .next(
374
+ "%s_*GetManifest" % iterator_name
375
+ if self.use_distributed_map
376
+ else node.matching_join
377
+ )
369
378
  .iterator(
370
379
  _visit(
371
380
  self.graph[node.out_funcs[0]],
372
- Workflow(node.out_funcs[0]).start_at(node.out_funcs[0]),
381
+ Workflow(node.out_funcs[0])
382
+ .start_at(node.out_funcs[0])
383
+ .mode(
384
+ "DISTRIBUTED" if self.use_distributed_map else "INLINE"
385
+ ),
373
386
  node.matching_join,
374
387
  )
375
388
  )
376
389
  .max_concurrency(self.max_workers)
377
- .output_path("$.[0]")
390
+ # AWS Step Functions has a short coming for DistributedMap at the
391
+ # moment that does not allow us to subset the output of for-each
392
+ # to just a single element. We have to rely on a rather terrible
393
+ # hack and resort to using ResultWriter to write the state to
394
+ # Amazon S3 and process it in another task. But, well what can we
395
+ # do...
396
+ .result_writer(
397
+ *(
398
+ (
399
+ (
400
+ SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH[len("s3://") :]
401
+ if SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH.startswith(
402
+ "s3://"
403
+ )
404
+ else SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH
405
+ ).split("/", 1)
406
+ + [""]
407
+ )[:2]
408
+ if self.use_distributed_map
409
+ else (None, None)
410
+ )
411
+ )
412
+ .output_path("$" if self.use_distributed_map else "$.[0]")
378
413
  )
414
+ if self.use_distributed_map:
415
+ workflow.add_state(
416
+ State("%s_*GetManifest" % iterator_name)
417
+ .resource("arn:aws:states:::aws-sdk:s3:getObject")
418
+ .parameter("Bucket.$", "$.ResultWriterDetails.Bucket")
419
+ .parameter("Key.$", "$.ResultWriterDetails.Key")
420
+ .next("%s_*Map" % iterator_name)
421
+ .result_selector("Body.$", "States.StringToJson($.Body)")
422
+ )
423
+ workflow.add_state(
424
+ Map("%s_*Map" % iterator_name)
425
+ .iterator(
426
+ Workflow("%s_*PassWorkflow" % iterator_name)
427
+ .mode("DISTRIBUTED")
428
+ .start_at("%s_*Pass" % iterator_name)
429
+ .add_state(
430
+ Pass("%s_*Pass" % iterator_name)
431
+ .end()
432
+ .parameter("Output.$", "States.StringToJson($.Output)")
433
+ .output_path("$.Output")
434
+ )
435
+ )
436
+ .next(node.matching_join)
437
+ .max_concurrency(1000)
438
+ .item_reader(
439
+ JSONItemReader()
440
+ .resource("arn:aws:states:::s3:getObject")
441
+ .parameter("Bucket.$", "$.Body.DestinationBucket")
442
+ .parameter("Key.$", "$.Body.ResultFiles.SUCCEEDED.[0].Key")
443
+ )
444
+ .output_path("$.[0]")
445
+ )
446
+
379
447
  # Continue the traversal from the matching_join.
380
448
  _visit(self.graph[node.matching_join], workflow, exit_node)
381
449
  # We shouldn't ideally ever get here.
@@ -444,7 +512,6 @@ class StepFunctions(object):
444
512
  "metaflow.owner": self.username,
445
513
  "metaflow.flow_name": self.flow.name,
446
514
  "metaflow.step_name": node.name,
447
- "metaflow.run_id.$": "$$.Execution.Name",
448
515
  # Unfortunately we can't set the task id here since AWS Step
449
516
  # Functions lacks any notion of run-scoped task identifiers. We
450
517
  # instead co-opt the AWS Batch job id as the task id. This also
@@ -456,6 +523,10 @@ class StepFunctions(object):
456
523
  # `$$.State.RetryCount` resolves to an int dynamically and
457
524
  # AWS Batch job specification only accepts strings. We handle
458
525
  # retries/catch within AWS Batch to get around this limitation.
526
+ # And, we also cannot set the run id here since the run id maps to
527
+ # the execution name of the AWS Step Functions State Machine, which
528
+ # is different when executing inside a distributed map. We set it once
529
+ # in the start step and move it along to be consumed by all the children.
459
530
  "metaflow.version": self.environment.get_environment_info()[
460
531
  "metaflow_version"
461
532
  ],
@@ -492,6 +563,12 @@ class StepFunctions(object):
492
563
  env["METAFLOW_S3_ENDPOINT_URL"] = S3_ENDPOINT_URL
493
564
 
494
565
  if node.name == "start":
566
+ # metaflow.run_id maps to AWS Step Functions State Machine Execution in all
567
+ # cases except for when within a for-each construct that relies on
568
+ # Distributed Map. To work around this issue, we pass the run id from the
569
+ # start step to all subsequent tasks.
570
+ attrs["metaflow.run_id.$"] = "$$.Execution.Name"
571
+
495
572
  # Initialize parameters for the flow in the `start` step.
496
573
  parameters = self._process_parameters()
497
574
  if parameters:
@@ -550,6 +627,8 @@ class StepFunctions(object):
550
627
  env["METAFLOW_SPLIT_PARENT_TASK_ID"] = (
551
628
  "$.Parameters.split_parent_task_id_%s" % node.split_parents[-1]
552
629
  )
630
+ # Inherit the run id from the parent and pass it along to children.
631
+ attrs["metaflow.run_id.$"] = "$.Parameters.['metaflow.run_id']"
553
632
  else:
554
633
  # Set appropriate environment variables for runtime replacement.
555
634
  if len(node.in_funcs) == 1:
@@ -558,6 +637,8 @@ class StepFunctions(object):
558
637
  % node.in_funcs[0]
559
638
  )
560
639
  env["METAFLOW_PARENT_TASK_ID"] = "$.JobId"
640
+ # Inherit the run id from the parent and pass it along to children.
641
+ attrs["metaflow.run_id.$"] = "$.Parameters.['metaflow.run_id']"
561
642
  else:
562
643
  # Generate the input paths in a quasi-compressed format.
563
644
  # See util.decompress_list for why this is written the way
@@ -567,6 +648,8 @@ class StepFunctions(object):
567
648
  "${METAFLOW_PARENT_%s_TASK_ID}" % (idx, idx)
568
649
  for idx, _ in enumerate(node.in_funcs)
569
650
  )
651
+ # Inherit the run id from the parent and pass it along to children.
652
+ attrs["metaflow.run_id.$"] = "$.[0].Parameters.['metaflow.run_id']"
570
653
  for idx, _ in enumerate(node.in_funcs):
571
654
  env["METAFLOW_PARENT_%s_TASK_ID" % idx] = "$.[%s].JobId" % idx
572
655
  env["METAFLOW_PARENT_%s_STEP" % idx] = (
@@ -893,6 +976,12 @@ class Workflow(object):
893
976
  tree = lambda: defaultdict(tree)
894
977
  self.payload = tree()
895
978
 
979
+ def mode(self, mode):
980
+ self.payload["ProcessorConfig"] = {"Mode": mode}
981
+ if mode == "DISTRIBUTED":
982
+ self.payload["ProcessorConfig"]["ExecutionType"] = "STANDARD"
983
+ return self
984
+
896
985
  def start_at(self, start_at):
897
986
  self.payload["StartAt"] = start_at
898
987
  return self
@@ -940,10 +1029,18 @@ class State(object):
940
1029
  self.payload["ResultPath"] = result_path
941
1030
  return self
942
1031
 
1032
+ def result_selector(self, name, value):
1033
+ self.payload["ResultSelector"][name] = value
1034
+ return self
1035
+
943
1036
  def _partition(self):
944
1037
  # This is needed to support AWS Gov Cloud and AWS CN regions
945
1038
  return SFN_IAM_ROLE.split(":")[1]
946
1039
 
1040
+ def retry_strategy(self, retry_strategy):
1041
+ self.payload["Retry"] = [retry_strategy]
1042
+ return self
1043
+
947
1044
  def batch(self, job):
948
1045
  self.resource(
949
1046
  "arn:%s:states:::batch:submitJob.sync" % self._partition()
@@ -963,6 +1060,19 @@ class State(object):
963
1060
  # tags may not be present in all scenarios
964
1061
  if "tags" in job.payload:
965
1062
  self.parameter("Tags", job.payload["tags"])
1063
+ # set retry strategy for AWS Batch job submission to account for the
1064
+ # measily 50 jobs / second queue admission limit which people can
1065
+ # run into very quickly.
1066
+ self.retry_strategy(
1067
+ {
1068
+ "ErrorEquals": ["Batch.AWSBatchException"],
1069
+ "BackoffRate": 2,
1070
+ "IntervalSeconds": 2,
1071
+ "MaxDelaySeconds": 60,
1072
+ "MaxAttempts": 10,
1073
+ "JitterStrategy": "FULL",
1074
+ }
1075
+ )
966
1076
  return self
967
1077
 
968
1078
  def dynamo_db(self, table_name, primary_key, values):
@@ -976,6 +1086,26 @@ class State(object):
976
1086
  return self
977
1087
 
978
1088
 
1089
+ class Pass(object):
1090
+ def __init__(self, name):
1091
+ self.name = name
1092
+ tree = lambda: defaultdict(tree)
1093
+ self.payload = tree()
1094
+ self.payload["Type"] = "Pass"
1095
+
1096
+ def end(self):
1097
+ self.payload["End"] = True
1098
+ return self
1099
+
1100
+ def parameter(self, name, value):
1101
+ self.payload["Parameters"][name] = value
1102
+ return self
1103
+
1104
+ def output_path(self, output_path):
1105
+ self.payload["OutputPath"] = output_path
1106
+ return self
1107
+
1108
+
979
1109
  class Parallel(object):
980
1110
  def __init__(self, name):
981
1111
  self.name = name
@@ -1037,3 +1167,37 @@ class Map(object):
1037
1167
  def result_path(self, result_path):
1038
1168
  self.payload["ResultPath"] = result_path
1039
1169
  return self
1170
+
1171
+ def item_reader(self, item_reader):
1172
+ self.payload["ItemReader"] = item_reader.payload
1173
+ return self
1174
+
1175
+ def result_writer(self, bucket, prefix):
1176
+ if bucket is not None and prefix is not None:
1177
+ self.payload["ResultWriter"] = {
1178
+ "Resource": "arn:aws:states:::s3:putObject",
1179
+ "Parameters": {
1180
+ "Bucket": bucket,
1181
+ "Prefix": prefix,
1182
+ },
1183
+ }
1184
+ return self
1185
+
1186
+
1187
+ class JSONItemReader(object):
1188
+ def __init__(self):
1189
+ tree = lambda: defaultdict(tree)
1190
+ self.payload = tree()
1191
+ self.payload["ReaderConfig"] = {"InputType": "JSON", "MaxItems": 1}
1192
+
1193
+ def resource(self, resource):
1194
+ self.payload["Resource"] = resource
1195
+ return self
1196
+
1197
+ def parameter(self, name, value):
1198
+ self.payload["Parameters"][name] = value
1199
+ return self
1200
+
1201
+ def output_path(self, output_path):
1202
+ self.payload["OutputPath"] = output_path
1203
+ return self
@@ -1,23 +1,23 @@
1
1
  import base64
2
- from metaflow._vendor import click
3
- from hashlib import sha1
4
2
  import json
5
3
  import re
4
+ from hashlib import sha1
6
5
 
7
- from metaflow import current, decorators, parameters, JSONType
6
+ from metaflow import JSONType, current, decorators, parameters
7
+ from metaflow._vendor import click
8
+ from metaflow.exception import MetaflowException, MetaflowInternalError
8
9
  from metaflow.metaflow_config import (
9
10
  SERVICE_VERSION_CHECK,
10
11
  SFN_STATE_MACHINE_PREFIX,
11
12
  UI_URL,
12
13
  )
13
- from metaflow.exception import MetaflowException, MetaflowInternalError
14
14
  from metaflow.package import MetaflowPackage
15
15
  from metaflow.plugins.aws.batch.batch_decorator import BatchDecorator
16
16
  from metaflow.tagging_util import validate_tags
17
17
  from metaflow.util import get_username, to_bytes, to_unicode, version_parse
18
18
 
19
+ from .production_token import load_token, new_token, store_token
19
20
  from .step_functions import StepFunctions
20
- from .production_token import load_token, store_token, new_token
21
21
 
22
22
  VALID_NAME = re.compile(r"[^a-zA-Z0-9_\-\.]")
23
23
 
@@ -124,6 +124,12 @@ def step_functions(obj, name=None):
124
124
  help="Log AWS Step Functions execution history to AWS CloudWatch "
125
125
  "Logs log group.",
126
126
  )
127
+ @click.option(
128
+ "--use-distributed-map/--no-use-distributed-map",
129
+ is_flag=True,
130
+ help="Use AWS Step Functions Distributed Map instead of Inline Map for "
131
+ "defining foreach tasks in Amazon State Language.",
132
+ )
127
133
  @click.pass_obj
128
134
  def create(
129
135
  obj,
@@ -136,6 +142,7 @@ def create(
136
142
  max_workers=None,
137
143
  workflow_timeout=None,
138
144
  log_execution_history=False,
145
+ use_distributed_map=False,
139
146
  ):
140
147
  validate_tags(tags)
141
148
 
@@ -165,6 +172,7 @@ def create(
165
172
  max_workers,
166
173
  workflow_timeout,
167
174
  obj.is_project,
175
+ use_distributed_map,
168
176
  )
169
177
 
170
178
  if only_json:
@@ -273,7 +281,15 @@ def resolve_state_machine_name(obj, name):
273
281
 
274
282
 
275
283
  def make_flow(
276
- obj, token, name, tags, namespace, max_workers, workflow_timeout, is_project
284
+ obj,
285
+ token,
286
+ name,
287
+ tags,
288
+ namespace,
289
+ max_workers,
290
+ workflow_timeout,
291
+ is_project,
292
+ use_distributed_map,
277
293
  ):
278
294
  if obj.flow_datastore.TYPE != "s3":
279
295
  raise MetaflowException("AWS Step Functions requires --datastore=s3.")
@@ -309,6 +325,7 @@ def make_flow(
309
325
  username=get_username(),
310
326
  workflow_timeout=workflow_timeout,
311
327
  is_project=is_project,
328
+ use_distributed_map=use_distributed_map,
312
329
  )
313
330
 
314
331
 
@@ -1,5 +1,5 @@
1
- import os
2
1
  import json
2
+ import os
3
3
  import time
4
4
 
5
5
  from metaflow.decorators import StepDecorator
@@ -17,6 +17,7 @@ import random
17
17
  from contextlib import contextmanager
18
18
  from functools import wraps
19
19
  from metaflow.exception import MetaflowNamespaceMismatch
20
+
20
21
  from .card_datastore import CardDatastore, NUM_SHORT_HASH_CHARS
21
22
  from .exception import (
22
23
  CardClassFoundException,
@@ -736,8 +737,7 @@ def create(
736
737
 
737
738
  if error_stack_trace is not None and mode != "refresh":
738
739
  rendered_content = error_card().render(task, stack_trace=error_stack_trace)
739
-
740
- if (
740
+ elif (
741
741
  rendered_info.is_implemented
742
742
  and rendered_info.timed_out
743
743
  and mode != "refresh"
@@ -45,7 +45,7 @@ if __name__ == "__main__":
45
45
  # fi
46
46
  # fi
47
47
 
48
- prefix = os.path.join(os.getcwd(), id_)
48
+ prefix = os.path.join(os.getcwd(), architecture, id_)
49
49
  pkgs_dir = os.path.join(os.getcwd(), ".pkgs")
50
50
  manifest_dir = os.path.join(os.getcwd(), DATASTORE_LOCAL_DIR, flow_name)
51
51
 
@@ -2,11 +2,13 @@ import importlib
2
2
  import json
3
3
  import os
4
4
  import platform
5
+ import re
5
6
  import sys
6
7
  import tempfile
7
8
 
8
9
  from metaflow.decorators import FlowDecorator, StepDecorator
9
10
  from metaflow.extension_support import EXT_PKG
11
+ from metaflow.metadata import MetaDatum
10
12
  from metaflow.metaflow_environment import InvalidEnvironmentException
11
13
  from metaflow.util import get_metaflow_root
12
14
 
@@ -241,7 +243,25 @@ class CondaStepDecorator(StepDecorator):
241
243
  ),
242
244
  )
243
245
  )
244
- # TODO: Register metadata
246
+
247
+ # Infer environment prefix from Python interpreter
248
+ match = re.search(
249
+ r"(?:.*\/)(metaflow\/[^/]+\/[^/]+)(?=\/bin\/python)", sys.executable
250
+ )
251
+ if match:
252
+ meta.register_metadata(
253
+ run_id,
254
+ step_name,
255
+ task_id,
256
+ [
257
+ MetaDatum(
258
+ field="conda_env_prefix",
259
+ value=match.group(1),
260
+ type="conda_env_prefix",
261
+ tags=["attempt_id:{0}".format(retry_count)],
262
+ )
263
+ ],
264
+ )
245
265
 
246
266
  def runtime_step_cli(
247
267
  self, cli_args, retry_count, max_user_code_retries, ubf_context
@@ -6,10 +6,11 @@ import io
6
6
  import json
7
7
  import os
8
8
  import sys
9
+ import tarfile
9
10
  import time
10
11
  from concurrent.futures import ThreadPoolExecutor
11
12
  from hashlib import sha256
12
- from io import BufferedIOBase
13
+ from io import BufferedIOBase, BytesIO
13
14
  from itertools import chain
14
15
  from urllib.parse import unquote, urlparse
15
16
 
@@ -33,6 +34,7 @@ class CondaEnvironmentException(MetaflowException):
33
34
 
34
35
  class CondaEnvironment(MetaflowEnvironment):
35
36
  TYPE = "conda"
37
+ _filecache = None
36
38
 
37
39
  def __init__(self, flow):
38
40
  self.flow = flow
@@ -188,7 +190,7 @@ class CondaEnvironment(MetaflowEnvironment):
188
190
  if id_:
189
191
  # bootstrap.py is responsible for ensuring the validity of this executable.
190
192
  # -s is important! Can otherwise leak packages to other environments.
191
- return os.path.join(id_, "bin/python -s")
193
+ return os.path.join("linux-64", id_, "bin/python -s")
192
194
  else:
193
195
  # for @conda/@pypi(disabled=True).
194
196
  return super().executable(step_name, default)
@@ -320,8 +322,23 @@ class CondaEnvironment(MetaflowEnvironment):
320
322
 
321
323
  @classmethod
322
324
  def get_client_info(cls, flow_name, metadata):
323
- # TODO: Decide this method's fate
324
- return None
325
+ if cls._filecache is None:
326
+ from metaflow.client.filecache import FileCache
327
+
328
+ cls._filecache = FileCache()
329
+
330
+ info = metadata.get("code-package")
331
+ prefix = metadata.get("conda_env_prefix")
332
+ if info is None or prefix is None:
333
+ return {}
334
+ info = json.loads(info)
335
+ _, blobdata = cls._filecache.get_data(
336
+ info["ds_type"], flow_name, info["location"], info["sha"]
337
+ )
338
+ with tarfile.open(fileobj=BytesIO(blobdata), mode="r:gz") as tar:
339
+ manifest = tar.extractfile(MAGIC_FILE)
340
+ info = json.loads(manifest.read().decode("utf-8"))
341
+ return info[prefix.split("/")[2]][prefix.split("/")[1]]
325
342
 
326
343
  def add_to_package(self):
327
344
  # Add manifest file to job package at the top level.
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.11.2"
1
+ metaflow_version = "2.11.3"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: metaflow
3
- Version: 2.11.2
3
+ Version: 2.11.3
4
4
  Summary: Metaflow: More Data Science, Less Engineering
5
5
  Author: Metaflow Developers
6
6
  Author-email: help@metaflow.org
@@ -26,7 +26,7 @@ License-File: LICENSE
26
26
  Requires-Dist: requests
27
27
  Requires-Dist: boto3
28
28
  Provides-Extra: stubs
29
- Requires-Dist: metaflow-stubs ==2.11.2 ; extra == 'stubs'
29
+ Requires-Dist: metaflow-stubs ==2.11.3 ; extra == 'stubs'
30
30
 
31
31
  ![Metaflow_Logo_Horizontal_FullColor_Ribbon_Dark_RGB](https://user-images.githubusercontent.com/763451/89453116-96a57e00-d713-11ea-9fa6-82b29d4d6eff.png)
32
32
 
@@ -9,12 +9,12 @@ metaflow/decorators.py,sha256=EGL1_nkdxoYG5AZiOQ8sLGA1bprGK8ENwlSIOYQmLhs,21357
9
9
  metaflow/event_logger.py,sha256=joTVRqZPL87nvah4ZOwtqWX8NeraM_CXKXXGVpKGD8o,780
10
10
  metaflow/events.py,sha256=ahjzkSbSnRCK9RZ-9vTfUviz_6gMvSO9DGkJ86X80-k,5300
11
11
  metaflow/exception.py,sha256=KC1LHJQzzYkWib0DeQ4l_A2r8VaudywsSqIQuq1RDZU,4954
12
- metaflow/flowspec.py,sha256=wA6Ci-vZ_CrfLEEMnCqTEkNv14O5Bq5AFvH-8Xt1sDw,26684
12
+ metaflow/flowspec.py,sha256=YMo2zNEy3UcclYys5rPpMk7bk0JvtABo-EpmFTi-IPQ,26874
13
13
  metaflow/graph.py,sha256=ZPxyG8uwVMk5YYgX4pQEQaPZtZM5Wy-G4NtJK73IEuA,11818
14
14
  metaflow/includefile.py,sha256=yHczcZ_U0SrasxSNhZb3DIBzx8UZnrJCl3FzvpEQLOA,19753
15
15
  metaflow/integrations.py,sha256=LlsaoePRg03DjENnmLxZDYto3NwWc9z_PtU6nJxLldg,1480
16
16
  metaflow/lint.py,sha256=_kYAbAtsP7IG1Rd0FqNbo8I8Zs66_0WXbaZJFARO3dE,10394
17
- metaflow/metaflow_config.py,sha256=4F1HEvEHiGpEj-J5ccYw5b39myh7X2xkp5AyPM5rnAY,19225
17
+ metaflow/metaflow_config.py,sha256=_--DonM1Sj6PbQH7Am_0r-9oZFhI1DLJmiJgJkpEkJY,19662
18
18
  metaflow/metaflow_config_funcs.py,sha256=pCaiQ2ez9wXixJI3ehmf3QiW9lUqFrZnBZx1my_0wIg,4874
19
19
  metaflow/metaflow_current.py,sha256=sCENPBiji3LcPbwgOG0ukGd_yEc5tST8EowES8DzRtA,7430
20
20
  metaflow/metaflow_environment.py,sha256=JdsmQsYp1SDQniQ0-q1mKRrmzSFfYuzrf6jLEHmyaiM,7352
@@ -33,7 +33,7 @@ metaflow/task.py,sha256=rGBlG18vnHKC65CYiFkja0GsXYWmRVrNOeCD4TY_nL4,27032
33
33
  metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
34
34
  metaflow/util.py,sha256=RrjsvADLKxSqjL76CxKh_J4OJl840B9Ak3V-vXleGas,13429
35
35
  metaflow/vendor.py,sha256=LZgXrh7ZSDmD32D1T5jj3OKKpXIqqxKzdMAOc5V0SD4,5162
36
- metaflow/version.py,sha256=TE2PAReb-M2lZz1GS382YFZ7Fg6NCktRPZ4qBMZ9wyg,28
36
+ metaflow/version.py,sha256=gJROyhqslzcmekVHrVdDybqVCjTbJnSNU5-tig72mrA,28
37
37
  metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
38
38
  metaflow/_vendor/click/__init__.py,sha256=FkyGDQ-cbiQxP_lxgUspyFYS48f2S_pTcfKPz-d_RMo,2463
39
39
  metaflow/_vendor/click/_bashcomplete.py,sha256=9J98IHQYmCAr2Jup6TDshUr5FJEen-AoQCZR0K5nKxQ,12309
@@ -148,7 +148,7 @@ metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=JUKoGNoTCtrO9MNEneEC7ldRNwg
148
148
  metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
149
149
  metaflow/plugins/argo/argo_client.py,sha256=MKKhMCbWOPzf6z5zQQiyDRHHkAXcO7ipboDZDqAAvOk,15849
150
150
  metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
151
- metaflow/plugins/argo/argo_workflows.py,sha256=bFpTxpTztYNe-uG0WbOL7HRca8JY2UyxIbBwQcodKm8,119689
151
+ metaflow/plugins/argo/argo_workflows.py,sha256=gKGmrcVwEktaqa7dOh9pWwAktGUQwwTACDT5-uUpJ4s,121723
152
152
  metaflow/plugins/argo/argo_workflows_cli.py,sha256=sZTpgfmc50eT3e0qIxpVqUgWhTcYlO1HM4gU6Oaya8g,33259
153
153
  metaflow/plugins/argo/argo_workflows_decorator.py,sha256=CfKVoHCOsCCQMghhPE30xw15gacwp3hR23HCo9ZZFVg,6580
154
154
  metaflow/plugins/argo/process_input_paths.py,sha256=4SiUoxbnTX4rCt0RSLcxG5jysbyd8oU-5JT0UOgy-vk,555
@@ -156,22 +156,22 @@ metaflow/plugins/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
156
156
  metaflow/plugins/aws/aws_client.py,sha256=mO8UD6pxFaOnxDb3hTP3HB7Gqb_ZxoR-76LT683WHvI,4036
157
157
  metaflow/plugins/aws/aws_utils.py,sha256=pkkH8Cy9sF5tp3HoZ84wkN-84NmksgCdNN4cMSdsLaA,6455
158
158
  metaflow/plugins/aws/batch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
159
- metaflow/plugins/aws/batch/batch.py,sha256=aVZOWdD9d7Zvhr1RYcJTKT9DDQmaPaU7Zu7ASZbnHgA,17273
159
+ metaflow/plugins/aws/batch/batch.py,sha256=ziYpcolvbAZ4DoI63-A9mZcBM5oKUcueeGMjPVLdCaI,17367
160
160
  metaflow/plugins/aws/batch/batch_cli.py,sha256=W4DW1Ldx4wK6pIdDOsNusUphpD-2a93qTBMRTRh5a40,11048
161
161
  metaflow/plugins/aws/batch/batch_client.py,sha256=_etUOsqz5d9tSmO9HGsNV_zzJz2Z25CDyqzOqWcORnI,27066
162
162
  metaflow/plugins/aws/batch/batch_decorator.py,sha256=C_K476zuU6-FF7DImijgKeGLdNXbBIcp4WC7K1bMDsc,15851
163
163
  metaflow/plugins/aws/secrets_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
164
164
  metaflow/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.py,sha256=JtFUVu00Cg0FzAizgrPLXmrMqsT7YeQMkQlgeivUxcE,7986
165
165
  metaflow/plugins/aws/step_functions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
166
- metaflow/plugins/aws/step_functions/dynamo_db_client.py,sha256=KF8omWGe93_Qiec41LNdSuIHa0hmv1lbX7L8IwOR8Kw,1532
166
+ metaflow/plugins/aws/step_functions/dynamo_db_client.py,sha256=c1nFZc9v_dLTD3iD-75I6wrlM6oM7YmxpFPTRo8Tra0,2327
167
167
  metaflow/plugins/aws/step_functions/event_bridge_client.py,sha256=U9-tqKdih4KR-ZDRhFc-jHmYIcHgpS4swfgtTxNMB94,2690
168
- metaflow/plugins/aws/step_functions/production_token.py,sha256=jEsDH0etkzORgeDTUx6UDRjRsy8QQHYjHWEDLE-0rVY,1898
168
+ metaflow/plugins/aws/step_functions/production_token.py,sha256=_o4emv3rozYZoWpaj1Y6UfKhTMlYpQc7GDDDBfZ2G7s,1898
169
169
  metaflow/plugins/aws/step_functions/schedule_decorator.py,sha256=Ab1rW8O_no4HNZm4__iBmFDCDW0Z8-TgK4lnxHHA6HI,1940
170
170
  metaflow/plugins/aws/step_functions/set_batch_environment.py,sha256=ibiGWFHDjKcLfprH3OsX-g2M9lUsh6J-bp7v2cdLhD4,1294
171
- metaflow/plugins/aws/step_functions/step_functions.py,sha256=FjymSGA3-lTlaneZK5CKiC0yPBwVNblVWbmh1KZfvn0,44545
172
- metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=eHy9RGaSYOnN6VcCC_Nhvq5QBQPBlreX4Xem8Vu3R-w,24041
171
+ metaflow/plugins/aws/step_functions/step_functions.py,sha256=V079UmCANB6clTzmOZe8Gq7hImb12hd_bw3DCLdMeZw,51673
172
+ metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=KlH9jJL0VfsT0JqBhLwaWdYjaccU8UEArKAFnIJbSoU,24426
173
173
  metaflow/plugins/aws/step_functions/step_functions_client.py,sha256=DKpNwAIWElvWjFANs5Ku3rgzjxFoqAD6k-EF8Xhkg3Q,4754
174
- metaflow/plugins/aws/step_functions/step_functions_decorator.py,sha256=R1C1EYdoYJUILXdpV-vdXOlyEBNiyDWfNXsTcG8URww,3791
174
+ metaflow/plugins/aws/step_functions/step_functions_decorator.py,sha256=9hw_MX36RyFp6IowuAYaJzJg9UC5KCe1FNt1PcG7_J0,3791
175
175
  metaflow/plugins/azure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
176
176
  metaflow/plugins/azure/azure_exceptions.py,sha256=uvxE3E3nsbQq1dxCx1Yl9O54frbnMS5Elk8Z4qQ2Oh4,404
177
177
  metaflow/plugins/azure/azure_tail.py,sha256=JAqV4mC42bMpR0O7m6X4cpFuh0peV1ufs_jJXrmicTc,3362
@@ -179,7 +179,7 @@ metaflow/plugins/azure/azure_utils.py,sha256=efJnGl15s2HxxCtmLUsYDDsn1ek2tUl-5lr
179
179
  metaflow/plugins/azure/blob_service_client_factory.py,sha256=j1FJa06h6V-QcYIir9tcoEb23XcufcecyJQRsGhD2Qw,6489
180
180
  metaflow/plugins/azure/includefile_support.py,sha256=Wv3g3RlGtLbxAh3Reg0BDLWwqavYibQNCDWddlH7XCE,4706
181
181
  metaflow/plugins/cards/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
182
- metaflow/plugins/cards/card_cli.py,sha256=Nd-tEQJ2mnJGgDlFfDiZRrcvD_f2CKiMkK0H8bxRjDY,34625
182
+ metaflow/plugins/cards/card_cli.py,sha256=wE5IbcS2za2Tv8SrLmTjPDub48879g_JkNMyLlnh_4w,34627
183
183
  metaflow/plugins/cards/card_client.py,sha256=LnRDqpyheDIehiHufpvMVTwE7cYEVIiMq8ggqZF4I2E,9368
184
184
  metaflow/plugins/cards/card_creator.py,sha256=E_NCmWPK6DzkqigtpUpeddCDbjnKF6dJcE6IvWzwiyA,7740
185
185
  metaflow/plugins/cards/card_datastore.py,sha256=3K19wE0CZVvOpuYUytftIYYnHHn3pMZJE87FMD6OYlM,14244
@@ -254,9 +254,9 @@ metaflow/plugins/metadata/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-
254
254
  metaflow/plugins/metadata/local.py,sha256=YhLJC5zjVJrvQFIyQ92ZBByiUmhCC762RUX7ITX12O8,22428
255
255
  metaflow/plugins/metadata/service.py,sha256=ihq5F7KQZlxvYwzH_-jyP2aWN_I96i2vp92j_d697s8,20204
256
256
  metaflow/plugins/pypi/__init__.py,sha256=0YFZpXvX7HCkyBFglatual7XGifdA1RwC3U4kcizyak,1037
257
- metaflow/plugins/pypi/bootstrap.py,sha256=PBIStzaEuhAl0xBd0Ap6oUaWMDwvz5bZEWo6h2SFru0,5073
258
- metaflow/plugins/pypi/conda_decorator.py,sha256=mkHzyWSEPSbIH6qk8UUvIjtDxzyl7tQxpLffO9ZBFLA,13547
259
- metaflow/plugins/pypi/conda_environment.py,sha256=FeHwE9xkI3nYv_OZ_iFzMI2Z2s5BG7llFSi-ndPeId8,17867
257
+ metaflow/plugins/pypi/bootstrap.py,sha256=nCe8FadqfIM19yj64m4JWdv_QEnQEp01bzQZrxzo5bs,5087
258
+ metaflow/plugins/pypi/conda_decorator.py,sha256=-bPxNtZKjxqOo4sj89uIp8ZVrCIontWhAp7wwRFjYpg,14189
259
+ metaflow/plugins/pypi/conda_environment.py,sha256=gfSa50ukez72aB16RZmILPuc8GXCfw0W-sbmWzcmfsE,18575
260
260
  metaflow/plugins/pypi/micromamba.py,sha256=wlVN2fm4WXFh3jVNtpDfu4XEz6VJKbmFNp0QvqlMIuI,12179
261
261
  metaflow/plugins/pypi/pip.py,sha256=paL-hbj5j-vcYOeSm04OiBix-TpoKMF7bRfGVdXfGZs,11685
262
262
  metaflow/plugins/pypi/pypi_decorator.py,sha256=syWk_oSQhIK9Y7OeOINMG2XVyxh9sj5uJhapwAXRBDw,5583
@@ -297,9 +297,9 @@ metaflow/tutorials/07-worldview/README.md,sha256=5vQTrFqulJ7rWN6r20dhot9lI2sVj9W
297
297
  metaflow/tutorials/07-worldview/worldview.ipynb,sha256=ztPZPI9BXxvW1QdS2Tfe7LBuVzvFvv0AToDnsDJhLdE,2237
298
298
  metaflow/tutorials/08-autopilot/README.md,sha256=GnePFp_q76jPs991lMUqfIIh5zSorIeWznyiUxzeUVE,1039
299
299
  metaflow/tutorials/08-autopilot/autopilot.ipynb,sha256=DQoJlILV7Mq9vfPBGW-QV_kNhWPjS5n6SJLqePjFYLY,3191
300
- metaflow-2.11.2.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
301
- metaflow-2.11.2.dist-info/METADATA,sha256=-g4FgX7It1THhPXL0GVND89oCcx21LYN2cj5cbcZ0_o,5906
302
- metaflow-2.11.2.dist-info/WHEEL,sha256=-G_t0oGuE7UD0DrSpVZnq1hHMBV9DD2XkS5v7XpmTnk,110
303
- metaflow-2.11.2.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
304
- metaflow-2.11.2.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
305
- metaflow-2.11.2.dist-info/RECORD,,
300
+ metaflow-2.11.3.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
301
+ metaflow-2.11.3.dist-info/METADATA,sha256=h3w2Ot3jIufP4RiQdkcOOhicH_MoMEXgF_34jpbsIZQ,5906
302
+ metaflow-2.11.3.dist-info/WHEEL,sha256=-G_t0oGuE7UD0DrSpVZnq1hHMBV9DD2XkS5v7XpmTnk,110
303
+ metaflow-2.11.3.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
304
+ metaflow-2.11.3.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
305
+ metaflow-2.11.3.dist-info/RECORD,,