metaflow 2.12.9__py2.py3-none-any.whl → 2.12.10__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -117,6 +117,7 @@ class ArgoWorkflows(object):
117
117
  notify_on_success=False,
118
118
  notify_slack_webhook_url=None,
119
119
  notify_pager_duty_integration_key=None,
120
+ enable_heartbeat_daemon=True,
120
121
  ):
121
122
  # Some high-level notes -
122
123
  #
@@ -164,6 +165,7 @@ class ArgoWorkflows(object):
164
165
  self.notify_on_success = notify_on_success
165
166
  self.notify_slack_webhook_url = notify_slack_webhook_url
166
167
  self.notify_pager_duty_integration_key = notify_pager_duty_integration_key
168
+ self.enable_heartbeat_daemon = enable_heartbeat_daemon
167
169
 
168
170
  self.parameters = self._process_parameters()
169
171
  self.triggers, self.trigger_options = self._process_triggers()
@@ -853,6 +855,8 @@ class ArgoWorkflows(object):
853
855
  .templates(self._container_templates())
854
856
  # Exit hook template(s)
855
857
  .templates(self._exit_hook_templates())
858
+ # Sidecar templates (Daemon Containers)
859
+ .templates(self._daemon_templates())
856
860
  )
857
861
  )
858
862
 
@@ -1265,7 +1269,13 @@ class ArgoWorkflows(object):
1265
1269
  "Argo Workflows." % (node.type, node.name)
1266
1270
  )
1267
1271
 
1268
- templates, _ = _visit(node=self.graph["start"])
1272
+ # Generate daemon tasks
1273
+ daemon_tasks = [
1274
+ DAGTask("%s-task" % daemon_template.name).template(daemon_template.name)
1275
+ for daemon_template in self._daemon_templates()
1276
+ ]
1277
+
1278
+ templates, _ = _visit(node=self.graph["start"], dag_tasks=daemon_tasks)
1269
1279
  return templates
1270
1280
 
1271
1281
  # Visit every node and yield ContainerTemplates.
@@ -2015,6 +2025,7 @@ class ArgoWorkflows(object):
2015
2025
  kubernetes_sdk.V1Container(
2016
2026
  name=self._sanitize(node.name),
2017
2027
  command=cmds,
2028
+ termination_message_policy="FallbackToLogsOnError",
2018
2029
  ports=[kubernetes_sdk.V1ContainerPort(container_port=port)]
2019
2030
  if port
2020
2031
  else None,
@@ -2067,9 +2078,11 @@ class ArgoWorkflows(object):
2067
2078
  for k in list(
2068
2079
  []
2069
2080
  if not resources.get("secrets")
2070
- else [resources.get("secrets")]
2071
- if isinstance(resources.get("secrets"), str)
2072
- else resources.get("secrets")
2081
+ else (
2082
+ [resources.get("secrets")]
2083
+ if isinstance(resources.get("secrets"), str)
2084
+ else resources.get("secrets")
2085
+ )
2073
2086
  )
2074
2087
  + KUBERNETES_SECRETS.split(",")
2075
2088
  + ARGO_WORKFLOWS_KUBERNETES_SECRETS.split(",")
@@ -2121,6 +2134,13 @@ class ArgoWorkflows(object):
2121
2134
  )
2122
2135
  )
2123
2136
 
2137
+ # Return daemon container templates for workflow execution notifications.
2138
+ def _daemon_templates(self):
2139
+ templates = []
2140
+ if self.enable_heartbeat_daemon:
2141
+ templates.append(self._heartbeat_daemon_template())
2142
+ return templates
2143
+
2124
2144
  # Return exit hook templates for workflow execution notifications.
2125
2145
  def _exit_hook_templates(self):
2126
2146
  templates = []
@@ -2327,6 +2347,117 @@ class ArgoWorkflows(object):
2327
2347
  Http("POST").url(self.notify_slack_webhook_url).body(json.dumps(payload))
2328
2348
  )
2329
2349
 
2350
+ def _heartbeat_daemon_template(self):
2351
+ # Use all the affordances available to _parameters task
2352
+ executable = self.environment.executable("_parameters")
2353
+ run_id = "argo-{{workflow.name}}"
2354
+ entrypoint = [executable, "-m metaflow.plugins.argo.daemon"]
2355
+ heartbeat_cmds = "{entrypoint} --flow_name {flow_name} --run_id {run_id} {tags} heartbeat".format(
2356
+ entrypoint=" ".join(entrypoint),
2357
+ flow_name=self.flow.name,
2358
+ run_id=run_id,
2359
+ tags=" ".join(["--tag %s" % t for t in self.tags]) if self.tags else "",
2360
+ )
2361
+
2362
+ # TODO: we do not really need MFLOG logging for the daemon at the moment, but might be good for the future.
2363
+ # Consider if we can do without this setup.
2364
+ # Configure log capture.
2365
+ mflog_expr = export_mflog_env_vars(
2366
+ datastore_type=self.flow_datastore.TYPE,
2367
+ stdout_path="$PWD/.logs/mflog_stdout",
2368
+ stderr_path="$PWD/.logs/mflog_stderr",
2369
+ flow_name=self.flow.name,
2370
+ run_id=run_id,
2371
+ step_name="_run_heartbeat_daemon",
2372
+ task_id="1",
2373
+ retry_count="0",
2374
+ )
2375
+ # TODO: Can the init be trimmed down?
2376
+ # Can we do without get_package_commands fetching the whole code package?
2377
+ init_cmds = " && ".join(
2378
+ [
2379
+ # For supporting sandboxes, ensure that a custom script is executed
2380
+ # before anything else is executed. The script is passed in as an
2381
+ # env var.
2382
+ '${METAFLOW_INIT_SCRIPT:+eval \\"${METAFLOW_INIT_SCRIPT}\\"}',
2383
+ "mkdir -p $PWD/.logs",
2384
+ mflog_expr,
2385
+ ]
2386
+ + self.environment.get_package_commands(
2387
+ self.code_package_url, self.flow_datastore.TYPE
2388
+ )[:-1]
2389
+ # Replace the line 'Task in starting'
2390
+ # FIXME: this can be brittle.
2391
+ + ["mflog 'Heartbeat daemon is starting.'"]
2392
+ )
2393
+
2394
+ cmd_str = " && ".join([init_cmds, heartbeat_cmds])
2395
+ cmds = shlex.split('bash -c "%s"' % cmd_str)
2396
+
2397
+ # TODO: Check that this is the minimal env.
2398
+ # Env required for sending heartbeats to the metadata service, nothing extra.
2399
+ env = {
2400
+ # These values are needed by Metaflow to set it's internal
2401
+ # state appropriately.
2402
+ "METAFLOW_CODE_URL": self.code_package_url,
2403
+ "METAFLOW_CODE_SHA": self.code_package_sha,
2404
+ "METAFLOW_CODE_DS": self.flow_datastore.TYPE,
2405
+ "METAFLOW_SERVICE_URL": SERVICE_INTERNAL_URL,
2406
+ "METAFLOW_SERVICE_HEADERS": json.dumps(SERVICE_HEADERS),
2407
+ "METAFLOW_USER": "argo-workflows",
2408
+ "METAFLOW_DEFAULT_DATASTORE": self.flow_datastore.TYPE,
2409
+ "METAFLOW_DEFAULT_METADATA": DEFAULT_METADATA,
2410
+ "METAFLOW_OWNER": self.username,
2411
+ }
2412
+ # support Metaflow sandboxes
2413
+ env["METAFLOW_INIT_SCRIPT"] = KUBERNETES_SANDBOX_INIT_SCRIPT
2414
+
2415
+ # cleanup env values
2416
+ env = {
2417
+ k: v
2418
+ for k, v in env.items()
2419
+ if v is not None
2420
+ and k not in set(ARGO_WORKFLOWS_ENV_VARS_TO_SKIP.split(","))
2421
+ }
2422
+
2423
+ # We want to grab the base image used by the start step, as this is known to be pullable from within the cluster,
2424
+ # and it might contain the required libraries, allowing us to start up faster.
2425
+ start_step = next(step for step in self.flow if step.name == "start")
2426
+ resources = dict(
2427
+ [deco for deco in start_step.decorators if deco.name == "kubernetes"][
2428
+ 0
2429
+ ].attributes
2430
+ )
2431
+ from kubernetes import client as kubernetes_sdk
2432
+
2433
+ return DaemonTemplate("heartbeat-daemon").container(
2434
+ to_camelcase(
2435
+ kubernetes_sdk.V1Container(
2436
+ name="main",
2437
+ # TODO: Make the image configurable
2438
+ image=resources["image"],
2439
+ command=cmds,
2440
+ env=[
2441
+ kubernetes_sdk.V1EnvVar(name=k, value=str(v))
2442
+ for k, v in env.items()
2443
+ ],
2444
+ resources=kubernetes_sdk.V1ResourceRequirements(
2445
+ # NOTE: base resources for this are kept to a minimum to save on running costs.
2446
+ # This has an adverse effect on startup time for the daemon, which can be completely
2447
+ # alleviated by using a base image that has the required dependencies pre-installed
2448
+ requests={
2449
+ "cpu": "200m",
2450
+ "memory": "100Mi",
2451
+ },
2452
+ limits={
2453
+ "cpu": "200m",
2454
+ "memory": "100Mi",
2455
+ },
2456
+ ),
2457
+ )
2458
+ )
2459
+ )
2460
+
2330
2461
  def _compile_sensor(self):
2331
2462
  # This method compiles a Metaflow @trigger decorator into Argo Events Sensor.
2332
2463
  #
@@ -2899,6 +3030,25 @@ class Metadata(object):
2899
3030
  return json.dumps(self.to_json(), indent=4)
2900
3031
 
2901
3032
 
3033
+ class DaemonTemplate(object):
3034
+ def __init__(self, name):
3035
+ tree = lambda: defaultdict(tree)
3036
+ self.name = name
3037
+ self.payload = tree()
3038
+ self.payload["daemon"] = True
3039
+ self.payload["name"] = name
3040
+
3041
+ def container(self, container):
3042
+ self.payload["container"] = container
3043
+ return self
3044
+
3045
+ def to_json(self):
3046
+ return self.payload
3047
+
3048
+ def __str__(self):
3049
+ return json.dumps(self.payload, indent=4)
3050
+
3051
+
2902
3052
  class Template(object):
2903
3053
  # https://argoproj.github.io/argo-workflows/fields/#template
2904
3054
 
@@ -167,6 +167,12 @@ def argo_workflows(obj, name=None):
167
167
  default="",
168
168
  help="PagerDuty Events API V2 Integration key for workflow success/failure notifications.",
169
169
  )
170
+ @click.option(
171
+ "--enable-heartbeat-daemon/--no-enable-heartbeat-daemon",
172
+ default=False,
173
+ show_default=True,
174
+ help="Use a daemon container to broadcast heartbeats.",
175
+ )
170
176
  @click.option(
171
177
  "--deployer-attribute-file",
172
178
  default=None,
@@ -192,6 +198,7 @@ def create(
192
198
  notify_on_success=False,
193
199
  notify_slack_webhook_url=None,
194
200
  notify_pager_duty_integration_key=None,
201
+ enable_heartbeat_daemon=True,
195
202
  deployer_attribute_file=None,
196
203
  ):
197
204
  validate_tags(tags)
@@ -240,6 +247,7 @@ def create(
240
247
  notify_on_success,
241
248
  notify_slack_webhook_url,
242
249
  notify_pager_duty_integration_key,
250
+ enable_heartbeat_daemon,
243
251
  )
244
252
 
245
253
  if only_json:
@@ -412,6 +420,7 @@ def make_flow(
412
420
  notify_on_success,
413
421
  notify_slack_webhook_url,
414
422
  notify_pager_duty_integration_key,
423
+ enable_heartbeat_daemon,
415
424
  ):
416
425
  # TODO: Make this check less specific to Amazon S3 as we introduce
417
426
  # support for more cloud object stores.
@@ -474,6 +483,7 @@ def make_flow(
474
483
  notify_on_success=notify_on_success,
475
484
  notify_slack_webhook_url=notify_slack_webhook_url,
476
485
  notify_pager_duty_integration_key=notify_pager_duty_integration_key,
486
+ enable_heartbeat_daemon=enable_heartbeat_daemon,
477
487
  )
478
488
 
479
489
 
@@ -0,0 +1,59 @@
1
+ from collections import namedtuple
2
+ from time import sleep
3
+ from metaflow.metaflow_config import DEFAULT_METADATA
4
+ from metaflow.metaflow_environment import MetaflowEnvironment
5
+ from metaflow.plugins import METADATA_PROVIDERS
6
+ from metaflow._vendor import click
7
+
8
+
9
+ class CliState:
10
+ pass
11
+
12
+
13
+ @click.group()
14
+ @click.option("--flow_name", required=True)
15
+ @click.option("--run_id", required=True)
16
+ @click.option(
17
+ "--tag",
18
+ "tags",
19
+ multiple=True,
20
+ default=None,
21
+ help="Annotate all objects produced by Argo Workflows runs "
22
+ "with the given tag. You can specify this option multiple "
23
+ "times to attach multiple tags.",
24
+ )
25
+ @click.pass_context
26
+ def cli(ctx, flow_name, run_id, tags=None):
27
+ ctx.obj = CliState()
28
+ ctx.obj.flow_name = flow_name
29
+ ctx.obj.run_id = run_id
30
+ ctx.obj.tags = tags
31
+ # Use a dummy flow to initialize the environment and metadata service,
32
+ # as we only need a name for the flow object.
33
+ flow = namedtuple("DummyFlow", "name")
34
+ dummyflow = flow(flow_name)
35
+
36
+ # Initialize a proper metadata service instance
37
+ environment = MetaflowEnvironment(dummyflow)
38
+
39
+ ctx.obj.metadata = [m for m in METADATA_PROVIDERS if m.TYPE == DEFAULT_METADATA][0](
40
+ environment, dummyflow, None, None
41
+ )
42
+
43
+
44
+ @cli.command(help="start heartbeat process for a run")
45
+ @click.pass_obj
46
+ def heartbeat(obj):
47
+ # Try to register a run in case the start task has not taken care of it yet.
48
+ obj.metadata.register_run_id(obj.run_id, obj.tags)
49
+ # Start run heartbeat
50
+ obj.metadata.start_run_heartbeat(obj.flow_name, obj.run_id)
51
+ # Keepalive loop
52
+ while True:
53
+ # Do not pollute daemon logs with anything unnecessary,
54
+ # as they might be extremely long running.
55
+ sleep(10)
56
+
57
+
58
+ if __name__ == "__main__":
59
+ cli()
@@ -98,6 +98,7 @@ class KubernetesJob(object):
98
98
  containers=[
99
99
  client.V1Container(
100
100
  command=self._kwargs["command"],
101
+ termination_message_policy="FallbackToLogsOnError",
101
102
  ports=[]
102
103
  if self._kwargs["port"] is None
103
104
  else [
@@ -586,6 +586,7 @@ class JobSetSpec(object):
586
586
  containers=[
587
587
  client.V1Container(
588
588
  command=self._kwargs["command"],
589
+ termination_message_policy="FallbackToLogsOnError",
589
590
  ports=[]
590
591
  if self._kwargs["port"] is None
591
592
  else [
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.12.9"
1
+ metaflow_version = "2.12.10"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: metaflow
3
- Version: 2.12.9
3
+ Version: 2.12.10
4
4
  Summary: Metaflow: More Data Science, Less Engineering
5
5
  Author: Metaflow Developers
6
6
  Author-email: help@metaflow.org
@@ -26,7 +26,7 @@ License-File: LICENSE
26
26
  Requires-Dist: requests
27
27
  Requires-Dist: boto3
28
28
  Provides-Extra: stubs
29
- Requires-Dist: metaflow-stubs ==2.12.9 ; extra == 'stubs'
29
+ Requires-Dist: metaflow-stubs ==2.12.10 ; extra == 'stubs'
30
30
 
31
31
  ![Metaflow_Logo_Horizontal_FullColor_Ribbon_Dark_RGB](https://user-images.githubusercontent.com/763451/89453116-96a57e00-d713-11ea-9fa6-82b29d4d6eff.png)
32
32
 
@@ -35,7 +35,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
35
35
  metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
36
36
  metaflow/util.py,sha256=m5womQ7y-jXehuMyHPfByDbZ4HwTJxzs869cPOlMR8s,13057
37
37
  metaflow/vendor.py,sha256=FchtA9tH22JM-eEtJ2c9FpUdMn8sSb1VHuQS56EcdZk,5139
38
- metaflow/version.py,sha256=TkQOTBK44Rf1jdRra0slObbAYISL1VgE5Ar3nFR7PC0,28
38
+ metaflow/version.py,sha256=t4RI_U0iAZBohPGuAobz1uO1iHJdG0OWNq9m857GkRw,29
39
39
  metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
40
40
  metaflow/_vendor/typing_extensions.py,sha256=0nUs5p1A_UrZigrAVBoOEM6TxU37zzPDUtiij1ZwpNc,110417
41
41
  metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
@@ -174,10 +174,11 @@ metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqN
174
174
  metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
175
175
  metaflow/plugins/argo/argo_client.py,sha256=MKKhMCbWOPzf6z5zQQiyDRHHkAXcO7ipboDZDqAAvOk,15849
176
176
  metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
177
- metaflow/plugins/argo/argo_workflows.py,sha256=sWS-uG1csYkUGEv1LMex8tWhoF0GzIoQdXRdusbIJLw,154446
178
- metaflow/plugins/argo/argo_workflows_cli.py,sha256=FnjnXPXas-ANfZ5m9s4YCHjejrkF7SbOS87J7z6339c,35036
177
+ metaflow/plugins/argo/argo_workflows.py,sha256=a5el_pkK6B-XymAbFyxBdPjuy8OvtijHuSvWYLqe8iA,160728
178
+ metaflow/plugins/argo/argo_workflows_cli.py,sha256=N_QMeFypWLFBtBzQ68WadCzOqdaA1FQQ4HHw5lizImE,35370
179
179
  metaflow/plugins/argo/argo_workflows_decorator.py,sha256=A23cRd8IBDUFJRmtQ0U8kik4gk0lg5s8UKrSO7obSEM,8475
180
180
  metaflow/plugins/argo/argo_workflows_deployer.py,sha256=yMIXAVoAuBLHCqQyFriV_Wc_Lp5D041Ay83R5pYNoXE,8066
181
+ metaflow/plugins/argo/daemon.py,sha256=dJOS_UUISXBYffi3oGVKPwq4Pa4P_nGBGL15piPaPto,1776
181
182
  metaflow/plugins/argo/generate_input_paths.py,sha256=loYsI6RFX9LlFsHb7Fe-mzlTTtRdySoOu7sYDy-uXK0,881
182
183
  metaflow/plugins/argo/jobset_input_paths.py,sha256=_JhZWngA6p9Q_O2fx3pdzKI0WE-HPRHz_zFvY2pHPTQ,525
183
184
  metaflow/plugins/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -281,8 +282,8 @@ metaflow/plugins/kubernetes/kubernetes.py,sha256=0UjKZy5_5KOGYC-k0DZqpv-ca_tZBkw
281
282
  metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=qBDdr1Lvtt-RO9pB-9_HTOPdzAmDvvJ0aiQ1OoCcrMU,10892
282
283
  metaflow/plugins/kubernetes/kubernetes_client.py,sha256=GKg-gT3qhXMRQV-sG1YyoOf3Z32NXr_wwEN2ytMVSEg,2471
283
284
  metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=DDc6N1Q0Cmcl44U-uZVBVr0tS4yL9N4GcAnf4eeX3Bk,24623
284
- metaflow/plugins/kubernetes/kubernetes_job.py,sha256=1uQmEHsLyJ5jewmIA5smrFtSO8MymOqPDb-YIS65u50,31614
285
- metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=Cr5M7m0lKShv6o8xjRJap8hSRgf51fz4RR0THlCSM08,40780
285
+ metaflow/plugins/kubernetes/kubernetes_job.py,sha256=QYmb2VIEniJa0xZlRBofyoxpuhQjbpDbSjBdB0X6N70,31694
286
+ metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=60331lf8UE95zJrkM0jtTV9vxJvre2SU63j-_i2e-8I,40872
286
287
  metaflow/plugins/metadata/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
287
288
  metaflow/plugins/metadata/local.py,sha256=YhLJC5zjVJrvQFIyQ92ZBByiUmhCC762RUX7ITX12O8,22428
288
289
  metaflow/plugins/metadata/service.py,sha256=ihq5F7KQZlxvYwzH_-jyP2aWN_I96i2vp92j_d697s8,20204
@@ -342,9 +343,9 @@ metaflow/tutorials/07-worldview/README.md,sha256=5vQTrFqulJ7rWN6r20dhot9lI2sVj9W
342
343
  metaflow/tutorials/07-worldview/worldview.ipynb,sha256=ztPZPI9BXxvW1QdS2Tfe7LBuVzvFvv0AToDnsDJhLdE,2237
343
344
  metaflow/tutorials/08-autopilot/README.md,sha256=GnePFp_q76jPs991lMUqfIIh5zSorIeWznyiUxzeUVE,1039
344
345
  metaflow/tutorials/08-autopilot/autopilot.ipynb,sha256=DQoJlILV7Mq9vfPBGW-QV_kNhWPjS5n6SJLqePjFYLY,3191
345
- metaflow-2.12.9.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
346
- metaflow-2.12.9.dist-info/METADATA,sha256=VVvmqX24fHz8PbJ483_p9Tq7i42DuZkfQFQ1T96alYo,5906
347
- metaflow-2.12.9.dist-info/WHEEL,sha256=ANi2y8tYx-p70pj7MSdqQMJNFJEUgAOyfPpHzqz0w84,109
348
- metaflow-2.12.9.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
349
- metaflow-2.12.9.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
350
- metaflow-2.12.9.dist-info/RECORD,,
346
+ metaflow-2.12.10.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
347
+ metaflow-2.12.10.dist-info/METADATA,sha256=KgfU7CsRqUnD_STSlhUclySsag0ieBOT8qH_vbH5ISE,5908
348
+ metaflow-2.12.10.dist-info/WHEEL,sha256=XRxW4r1PNiVhMpP4bT9oWtu3HyndxpJ84SkubFgzp_Y,109
349
+ metaflow-2.12.10.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
350
+ metaflow-2.12.10.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
351
+ metaflow-2.12.10.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (71.1.0)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py2-none-any
5
5
  Tag: py3-none-any