metaflow 2.13.4__py2.py3-none-any.whl → 2.13.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,6 +16,7 @@ CLIS_DESC = [
16
16
  ("argo-workflows", ".argo.argo_workflows_cli.cli"),
17
17
  ("card", ".cards.card_cli.cli"),
18
18
  ("tag", ".tag_cli.cli"),
19
+ ("spot-metadata", ".kubernetes.spot_metadata_cli.cli"),
19
20
  ("logs", ".logs_cli.cli"),
20
21
  ]
21
22
 
@@ -104,6 +105,10 @@ SIDECARS_DESC = [
104
105
  "save_logs_periodically",
105
106
  "..mflog.save_logs_periodically.SaveLogsPeriodicallySidecar",
106
107
  ),
108
+ (
109
+ "spot_termination_monitor",
110
+ ".kubernetes.spot_monitor_sidecar.SpotTerminationMonitorSidecar",
111
+ ),
107
112
  ("heartbeat", "metaflow.metadata_provider.heartbeat.MetadataHeartBeat"),
108
113
  ]
109
114
 
@@ -624,6 +624,16 @@ class ArgoWorkflows(object):
624
624
  for event in trigger_on_finish_deco.triggers:
625
625
  # Actual filters are deduced here since we don't have access to
626
626
  # the current object in the @trigger_on_finish decorator.
627
+ project_name = event.get("project") or current.get("project_name")
628
+ branch_name = event.get("branch") or current.get("branch_name")
629
+ # validate that we have complete project info for an event name
630
+ if project_name or branch_name:
631
+ if not (project_name and branch_name):
632
+ # if one of the two is missing, we would end up listening to an event that will never be broadcast.
633
+ raise ArgoWorkflowsException(
634
+ "Incomplete project info. Please specify both 'project' and 'project_branch' or use the @project decorator"
635
+ )
636
+
627
637
  triggers.append(
628
638
  {
629
639
  # Make sure this remains consistent with the event name format
@@ -632,18 +642,16 @@ class ArgoWorkflows(object):
632
642
  % ".".join(
633
643
  v
634
644
  for v in [
635
- event.get("project") or current.get("project_name"),
636
- event.get("branch") or current.get("branch_name"),
645
+ project_name,
646
+ branch_name,
637
647
  event["flow"],
638
648
  ]
639
649
  if v
640
650
  ),
641
651
  "filters": {
642
652
  "auto-generated-by-metaflow": True,
643
- "project_name": event.get("project")
644
- or current.get("project_name"),
645
- "branch_name": event.get("branch")
646
- or current.get("branch_name"),
653
+ "project_name": project_name,
654
+ "branch_name": branch_name,
647
655
  # TODO: Add a time filters to guard against cached events
648
656
  },
649
657
  "type": "run",
@@ -841,8 +849,15 @@ class ArgoWorkflows(object):
841
849
  Metadata()
842
850
  .labels(self._base_labels)
843
851
  .label("app.kubernetes.io/name", "metaflow-task")
844
- .annotations(annotations)
845
- .annotations(self._base_annotations)
852
+ .annotations(
853
+ {
854
+ **annotations,
855
+ **self._base_annotations,
856
+ **{
857
+ "metaflow/run_id": "argo-{{workflow.name}}"
858
+ }, # we want pods of the workflow to have the run_id as an annotation as well
859
+ }
860
+ )
846
861
  )
847
862
  # Set the entrypoint to flow name
848
863
  .entrypoint(self.flow.name)
@@ -1705,6 +1720,7 @@ class ArgoWorkflows(object):
1705
1720
  },
1706
1721
  **{
1707
1722
  # Some optional values for bookkeeping
1723
+ "METAFLOW_FLOW_FILENAME": os.path.basename(sys.argv[0]),
1708
1724
  "METAFLOW_FLOW_NAME": self.flow.name,
1709
1725
  "METAFLOW_STEP_NAME": node.name,
1710
1726
  "METAFLOW_RUN_ID": run_id,
@@ -398,111 +398,23 @@ class TriggerOnFinishDecorator(FlowDecorator):
398
398
  )
399
399
  elif self.attributes["flow"]:
400
400
  # flow supports the format @trigger_on_finish(flow='FooFlow')
401
- if is_stringish(self.attributes["flow"]):
402
- self.triggers.append(
403
- {
404
- "fq_name": self.attributes["flow"],
405
- }
406
- )
407
- elif isinstance(self.attributes["flow"], dict):
408
- if "name" not in self.attributes["flow"]:
409
- raise MetaflowException(
410
- "The *flow* attribute for *@trigger_on_finish* is missing the "
411
- "*name* key."
412
- )
413
- flow_name = self.attributes["flow"]["name"]
414
-
415
- if not is_stringish(flow_name) or "." in flow_name:
416
- raise MetaflowException(
417
- "The *name* attribute of the *flow* is not a valid string"
418
- )
419
- result = {"fq_name": flow_name}
420
- if "project" in self.attributes["flow"]:
421
- if is_stringish(self.attributes["flow"]["project"]):
422
- result["project"] = self.attributes["flow"]["project"]
423
- else:
424
- raise MetaflowException(
425
- "The *project* attribute of the *flow* is not a string"
426
- )
427
- if "project_branch" in self.attributes["flow"]:
428
- if is_stringish(self.attributes["flow"]["project_branch"]):
429
- result["branch"] = self.attributes["flow"]["project_branch"]
430
- else:
431
- raise MetaflowException(
432
- "The *project_branch* attribute of the *flow* is not a string"
433
- )
434
- self.triggers.append(result)
435
- elif callable(self.attributes["flow"]) and not isinstance(
401
+ flow = self.attributes["flow"]
402
+ if callable(flow) and not isinstance(
436
403
  self.attributes["flow"], DeployTimeField
437
404
  ):
438
- trig = DeployTimeField(
439
- "fq_name", [str, dict], None, self.attributes["flow"], False
440
- )
405
+ trig = DeployTimeField("fq_name", [str, dict], None, flow, False)
441
406
  self.triggers.append(trig)
442
407
  else:
443
- raise MetaflowException(
444
- "Incorrect type for *flow* attribute in *@trigger_on_finish* "
445
- " decorator. Supported type is string or Dict[str, str] - \n"
446
- "@trigger_on_finish(flow='FooFlow') or "
447
- "@trigger_on_finish(flow={'name':'FooFlow', 'project_branch': 'branch'})"
448
- )
408
+ self.triggers.extend(self._parse_static_triggers([flow]))
449
409
  elif self.attributes["flows"]:
450
410
  # flows attribute supports the following formats -
451
411
  # 1. flows=['FooFlow', 'BarFlow']
452
- if isinstance(self.attributes["flows"], list):
453
- for flow in self.attributes["flows"]:
454
- if is_stringish(flow):
455
- self.triggers.append(
456
- {
457
- "fq_name": flow,
458
- }
459
- )
460
- elif isinstance(flow, dict):
461
- if "name" not in flow:
462
- raise MetaflowException(
463
- "One or more flows in the *flows* attribute for "
464
- "*@trigger_on_finish* is missing the "
465
- "*name* key."
466
- )
467
- flow_name = flow["name"]
468
-
469
- if not is_stringish(flow_name) or "." in flow_name:
470
- raise MetaflowException(
471
- "The *name* attribute '%s' is not a valid string"
472
- % str(flow_name)
473
- )
474
- result = {"fq_name": flow_name}
475
- if "project" in flow:
476
- if is_stringish(flow["project"]):
477
- result["project"] = flow["project"]
478
- else:
479
- raise MetaflowException(
480
- "The *project* attribute of the *flow* '%s' is not "
481
- "a string" % flow_name
482
- )
483
- if "project_branch" in flow:
484
- if is_stringish(flow["project_branch"]):
485
- result["branch"] = flow["project_branch"]
486
- else:
487
- raise MetaflowException(
488
- "The *project_branch* attribute of the *flow* %s "
489
- "is not a string" % flow_name
490
- )
491
- self.triggers.append(result)
492
- else:
493
- raise MetaflowException(
494
- "One or more flows in *flows* attribute in "
495
- "*@trigger_on_finish* decorator have an incorrect type. "
496
- "Supported type is string or Dict[str, str]- \n"
497
- "@trigger_on_finish(flows=['FooFlow', 'BarFlow']"
498
- )
499
- elif callable(self.attributes["flows"]) and not isinstance(
500
- self.attributes["flows"], DeployTimeField
501
- ):
502
- trig = DeployTimeField(
503
- "flows", list, None, self.attributes["flows"], False
504
- )
412
+ flows = self.attributes["flows"]
413
+ if callable(flows) and not isinstance(flows, DeployTimeField):
414
+ trig = DeployTimeField("flows", list, None, flows, False)
505
415
  self.triggers.append(trig)
416
+ elif isinstance(flows, list):
417
+ self.triggers.extend(self._parse_static_triggers(flows))
506
418
  else:
507
419
  raise MetaflowException(
508
420
  "Incorrect type for *flows* attribute in *@trigger_on_finish* "
@@ -519,26 +431,7 @@ class TriggerOnFinishDecorator(FlowDecorator):
519
431
  for trigger in self.triggers:
520
432
  if isinstance(trigger, DeployTimeField):
521
433
  continue
522
- if trigger["fq_name"].count(".") == 0:
523
- # fully qualified name is just the flow name
524
- trigger["flow"] = trigger["fq_name"]
525
- elif trigger["fq_name"].count(".") >= 2:
526
- # fully qualified name is of the format - project.branch.flow_name
527
- trigger["project"], tail = trigger["fq_name"].split(".", maxsplit=1)
528
- trigger["branch"], trigger["flow"] = tail.rsplit(".", maxsplit=1)
529
- else:
530
- raise MetaflowException(
531
- "Incorrect format for *flow* in *@trigger_on_finish* "
532
- "decorator. Specify either just the *flow_name* or a fully "
533
- "qualified name like *project_name.branch_name.flow_name*."
534
- )
535
- # TODO: Also sanity check project and branch names
536
- if not re.match(r"^[A-Za-z0-9_]+$", trigger["flow"]):
537
- raise MetaflowException(
538
- "Invalid flow name *%s* in *@trigger_on_finish* "
539
- "decorator. Only alphanumeric characters and "
540
- "underscores(_) are allowed." % trigger["flow"]
541
- )
434
+ self._parse_fq_name(trigger)
542
435
 
543
436
  self.options = self.attributes["options"]
544
437
 
@@ -593,9 +486,67 @@ class TriggerOnFinishDecorator(FlowDecorator):
593
486
  run_objs.append(run_obj)
594
487
  current._update_env({"trigger": Trigger.from_runs(run_objs)})
595
488
 
489
+ @staticmethod
490
+ def _parse_static_triggers(flows):
491
+ results = []
492
+ for flow in flows:
493
+ if is_stringish(flow):
494
+ results.append(
495
+ {
496
+ "fq_name": flow,
497
+ }
498
+ )
499
+ elif isinstance(flow, dict):
500
+ if "name" not in flow:
501
+ if len(flows) > 1:
502
+ raise MetaflowException(
503
+ "One or more flows in the *flows* attribute for "
504
+ "*@trigger_on_finish* is missing the "
505
+ "*name* key."
506
+ )
507
+ raise MetaflowException(
508
+ "The *flow* attribute for *@trigger_on_finish* is missing the "
509
+ "*name* key."
510
+ )
511
+ flow_name = flow["name"]
512
+
513
+ if not is_stringish(flow_name) or "." in flow_name:
514
+ raise MetaflowException(
515
+ f"The *name* attribute of the *flow* {flow_name} is not a valid string"
516
+ )
517
+ result = {"fq_name": flow_name}
518
+ if "project" in flow:
519
+ if is_stringish(flow["project"]):
520
+ result["project"] = flow["project"]
521
+ else:
522
+ raise MetaflowException(
523
+ f"The *project* attribute of the *flow* {flow_name} is not a string"
524
+ )
525
+ if "project_branch" in flow:
526
+ if is_stringish(flow["project_branch"]):
527
+ result["branch"] = flow["project_branch"]
528
+ else:
529
+ raise MetaflowException(
530
+ f"The *project_branch* attribute of the *flow* {flow_name} is not a string"
531
+ )
532
+ results.append(result)
533
+ else:
534
+ if len(flows) > 1:
535
+ raise MetaflowException(
536
+ "One or more flows in the *flows* attribute for "
537
+ "*@trigger_on_finish* decorator have an incorrect type. "
538
+ "Supported type is string or Dict[str, str]- \n"
539
+ "@trigger_on_finish(flows=['FooFlow', 'BarFlow']"
540
+ )
541
+ raise MetaflowException(
542
+ "Incorrect type for *flow* attribute in *@trigger_on_finish* "
543
+ " decorator. Supported type is string or Dict[str, str] - \n"
544
+ "@trigger_on_finish(flow='FooFlow') or "
545
+ "@trigger_on_finish(flow={'name':'FooFlow', 'project_branch': 'branch'})"
546
+ )
547
+ return results
548
+
596
549
  def _parse_fq_name(self, trigger):
597
- if isinstance(trigger, DeployTimeField):
598
- trigger["fq_name"] = deploy_time_eval(trigger["fq_name"])
599
550
  if trigger["fq_name"].count(".") == 0:
600
551
  # fully qualified name is just the flow name
601
552
  trigger["flow"] = trigger["fq_name"]
@@ -615,32 +566,18 @@ class TriggerOnFinishDecorator(FlowDecorator):
615
566
  "decorator. Only alphanumeric characters and "
616
567
  "underscores(_) are allowed." % trigger["flow"]
617
568
  )
618
- return trigger
619
569
 
620
570
  def format_deploytime_value(self):
621
- for trigger in self.triggers:
622
- # Case were trigger is a function that returns a list
623
- # Need to do this bc we need to iterate over list and process
624
- if isinstance(trigger, DeployTimeField):
625
- deploy_value = deploy_time_eval(trigger)
626
- if isinstance(deploy_value, list):
627
- self.triggers = deploy_value
571
+ if len(self.triggers) == 1 and isinstance(self.triggers[0], DeployTimeField):
572
+ deploy_value = deploy_time_eval(self.triggers[0])
573
+ if isinstance(deploy_value, list):
574
+ self.triggers = deploy_value
628
575
  else:
629
- break
630
- for trigger in self.triggers:
631
- # Entire trigger is a function (returns either string or dict)
632
- old_trig = trigger
633
- if isinstance(trigger, DeployTimeField):
634
- trigger = deploy_time_eval(trigger)
635
- if isinstance(trigger, dict):
636
- trigger["fq_name"] = trigger.get("name")
637
- trigger["project"] = trigger.get("project")
638
- trigger["branch"] = trigger.get("project_branch")
639
- # We also added this bc it won't be formatted yet
640
- if isinstance(trigger, str):
641
- trigger = {"fq_name": trigger}
642
- trigger = self._parse_fq_name(trigger)
643
- self.triggers[self.triggers.index(old_trig)] = trigger
576
+ self.triggers = [deploy_value]
577
+ triggers = self._parse_static_triggers(self.triggers)
578
+ for trigger in triggers:
579
+ self._parse_fq_name(trigger)
580
+ self.triggers = triggers
644
581
 
645
582
  def get_top_level_options(self):
646
583
  return list(self._option_values.items())
@@ -669,15 +669,6 @@ class Kubernetes(object):
669
669
  for name, value in system_annotations.items():
670
670
  job.annotation(name, value)
671
671
 
672
- (
673
- job.annotation("metaflow/run_id", run_id)
674
- .annotation("metaflow/step_name", step_name)
675
- .annotation("metaflow/task_id", task_id)
676
- .annotation("metaflow/attempt", attempt)
677
- .label("app.kubernetes.io/name", "metaflow-task")
678
- .label("app.kubernetes.io/part-of", "metaflow")
679
- )
680
-
681
672
  return job
682
673
 
683
674
  def create_k8sjob(self, job):
@@ -190,7 +190,7 @@ def step(
190
190
  executable = ctx.obj.environment.executable(step_name, executable)
191
191
 
192
192
  # Set environment
193
- env = {}
193
+ env = {"METAFLOW_FLOW_FILENAME": os.path.basename(sys.argv[0])}
194
194
  env_deco = [deco for deco in node.decorators if deco.name == "environment"]
195
195
  if env_deco:
196
196
  env = env_deco[0].attributes["vars"]
@@ -547,6 +547,13 @@ class KubernetesDecorator(StepDecorator):
547
547
  self._save_logs_sidecar = Sidecar("save_logs_periodically")
548
548
  self._save_logs_sidecar.start()
549
549
 
550
+ # Start spot termination monitor sidecar.
551
+ current._update_env(
552
+ {"spot_termination_notice": "/tmp/spot_termination_notice"}
553
+ )
554
+ self._spot_monitor_sidecar = Sidecar("spot_termination_monitor")
555
+ self._spot_monitor_sidecar.start()
556
+
550
557
  num_parallel = None
551
558
  if hasattr(flow, "_parallel_ubf_iter"):
552
559
  num_parallel = flow._parallel_ubf_iter.num_parallel
@@ -605,6 +612,7 @@ class KubernetesDecorator(StepDecorator):
605
612
 
606
613
  try:
607
614
  self._save_logs_sidecar.terminate()
615
+ self._spot_monitor_sidecar.terminate()
608
616
  except:
609
617
  # Best effort kill
610
618
  pass
@@ -0,0 +1,69 @@
1
+ from metaflow._vendor import click
2
+ from datetime import datetime, timezone
3
+ from metaflow.tagging_util import validate_tags
4
+ from metaflow.metadata_provider import MetaDatum
5
+
6
+
7
+ @click.group()
8
+ def cli():
9
+ pass
10
+
11
+
12
+ @cli.group(help="Commands related to spot metadata.")
13
+ def spot_metadata():
14
+ pass
15
+
16
+
17
+ @spot_metadata.command(help="Record spot termination metadata for a task.")
18
+ @click.option(
19
+ "--run-id",
20
+ required=True,
21
+ help="Run ID for which metadata is to be recorded.",
22
+ )
23
+ @click.option(
24
+ "--step-name",
25
+ required=True,
26
+ help="Step Name for which metadata is to be recorded.",
27
+ )
28
+ @click.option(
29
+ "--task-id",
30
+ required=True,
31
+ help="Task ID for which metadata is to be recorded.",
32
+ )
33
+ @click.option(
34
+ "--termination-notice-time",
35
+ required=True,
36
+ help="Spot termination notice time.",
37
+ )
38
+ @click.option(
39
+ "--tag",
40
+ "tags",
41
+ multiple=True,
42
+ required=False,
43
+ default=None,
44
+ help="List of tags.",
45
+ )
46
+ @click.pass_obj
47
+ def record(obj, run_id, step_name, task_id, termination_notice_time, tags=None):
48
+ validate_tags(tags)
49
+
50
+ tag_list = list(tags) if tags else []
51
+
52
+ entries = [
53
+ MetaDatum(
54
+ field="spot-termination-received-at",
55
+ value=datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
56
+ type="spot-termination-received-at",
57
+ tags=tag_list,
58
+ ),
59
+ MetaDatum(
60
+ field="spot-termination-time",
61
+ value=termination_notice_time,
62
+ type="spot-termination-time",
63
+ tags=tag_list,
64
+ ),
65
+ ]
66
+
67
+ obj.metadata.register_metadata(
68
+ run_id=run_id, step_name=step_name, task_id=task_id, metadata=entries
69
+ )
@@ -0,0 +1,109 @@
1
+ import os
2
+ import sys
3
+ import time
4
+ import signal
5
+ import requests
6
+ import subprocess
7
+ from multiprocessing import Process
8
+ from datetime import datetime, timezone
9
+ from metaflow.sidecar import MessageTypes
10
+
11
+
12
+ class SpotTerminationMonitorSidecar(object):
13
+ EC2_TYPE_URL = "http://169.254.169.254/latest/meta-data/instance-life-cycle"
14
+ METADATA_URL = "http://169.254.169.254/latest/meta-data/spot/termination-time"
15
+ TOKEN_URL = "http://169.254.169.254/latest/api/token"
16
+ POLL_INTERVAL = 5 # seconds
17
+
18
+ def __init__(self):
19
+ self.is_alive = True
20
+ self._process = None
21
+ self._token = None
22
+ self._token_expiry = 0
23
+
24
+ if self._is_aws_spot_instance():
25
+ self._process = Process(target=self._monitor_loop)
26
+ self._process.start()
27
+
28
+ def process_message(self, msg):
29
+ if msg.msg_type == MessageTypes.SHUTDOWN:
30
+ self.is_alive = False
31
+ if self._process:
32
+ self._process.terminate()
33
+
34
+ @classmethod
35
+ def get_worker(cls):
36
+ return cls
37
+
38
+ def _get_imds_token(self):
39
+ current_time = time.time()
40
+ if current_time >= self._token_expiry - 60: # Refresh 60s before expiry
41
+ try:
42
+ response = requests.put(
43
+ url=self.TOKEN_URL,
44
+ headers={"X-aws-ec2-metadata-token-ttl-seconds": "300"},
45
+ timeout=1,
46
+ )
47
+ if response.status_code == 200:
48
+ self._token = response.text
49
+ self._token_expiry = current_time + 240 # Slightly less than TTL
50
+ except requests.exceptions.RequestException:
51
+ pass
52
+ return self._token
53
+
54
+ def _make_ec2_request(self, url, timeout):
55
+ token = self._get_imds_token()
56
+ headers = {"X-aws-ec2-metadata-token": token} if token else {}
57
+ response = requests.get(url=url, headers=headers, timeout=timeout)
58
+ return response
59
+
60
+ def _is_aws_spot_instance(self):
61
+ try:
62
+ response = self._make_ec2_request(url=self.EC2_TYPE_URL, timeout=1)
63
+ return response.status_code == 200 and response.text == "spot"
64
+ except (requests.exceptions.RequestException, requests.exceptions.Timeout):
65
+ return False
66
+
67
+ def _monitor_loop(self):
68
+ while self.is_alive:
69
+ try:
70
+ response = self._make_ec2_request(url=self.METADATA_URL, timeout=1)
71
+ if response.status_code == 200:
72
+ termination_time = response.text
73
+ self._emit_termination_metadata(termination_time)
74
+ os.kill(os.getppid(), signal.SIGTERM)
75
+ break
76
+ except (requests.exceptions.RequestException, requests.exceptions.Timeout):
77
+ pass
78
+ time.sleep(self.POLL_INTERVAL)
79
+
80
+ def _emit_termination_metadata(self, termination_time):
81
+ flow_filename = os.getenv("METAFLOW_FLOW_FILENAME")
82
+ pathspec = os.getenv("MF_PATHSPEC")
83
+ _, run_id, step_name, task_id = pathspec.split("/")
84
+ retry_count = os.getenv("MF_ATTEMPT")
85
+
86
+ with open("/tmp/spot_termination_notice", "w") as fp:
87
+ fp.write(termination_time)
88
+
89
+ command = [
90
+ sys.executable,
91
+ f"/metaflow/{flow_filename}",
92
+ "spot-metadata",
93
+ "record",
94
+ "--run-id",
95
+ run_id,
96
+ "--step-name",
97
+ step_name,
98
+ "--task-id",
99
+ task_id,
100
+ "--termination-notice-time",
101
+ termination_time,
102
+ "--tag",
103
+ "attempt_id:{}".format(retry_count),
104
+ ]
105
+
106
+ result = subprocess.run(command, capture_output=True, text=True)
107
+
108
+ if result.returncode != 0:
109
+ print(f"Failed to record spot termination metadata: {result.stderr}")
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.13.4"
1
+ metaflow_version = "2.13.6"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: metaflow
3
- Version: 2.13.4
3
+ Version: 2.13.6
4
4
  Summary: Metaflow: More Data Science, Less Engineering
5
5
  Author: Metaflow Developers
6
6
  Author-email: help@metaflow.org
@@ -26,7 +26,7 @@ License-File: LICENSE
26
26
  Requires-Dist: requests
27
27
  Requires-Dist: boto3
28
28
  Provides-Extra: stubs
29
- Requires-Dist: metaflow-stubs==2.13.4; extra == "stubs"
29
+ Requires-Dist: metaflow-stubs==2.13.6; extra == "stubs"
30
30
  Dynamic: author
31
31
  Dynamic: author-email
32
32
  Dynamic: classifier
@@ -36,7 +36,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
36
36
  metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
37
37
  metaflow/util.py,sha256=hKjHl6NYJkKBSU2tzdVbddfOX1zWK73T4GCO42A0XB4,14666
38
38
  metaflow/vendor.py,sha256=FchtA9tH22JM-eEtJ2c9FpUdMn8sSb1VHuQS56EcdZk,5139
39
- metaflow/version.py,sha256=drr-g04woqZf71pNYGX7pz8CSPausyn9M8cgyFIvKlE,28
39
+ metaflow/version.py,sha256=p2wlGBdWzsaPFEcG9sZp37O29SjTggTsTxrhb2gRZvc,28
40
40
  metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
41
41
  metaflow/_vendor/typing_extensions.py,sha256=0nUs5p1A_UrZigrAVBoOEM6TxU37zzPDUtiij1ZwpNc,110417
42
42
  metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
@@ -149,12 +149,12 @@ metaflow/mflog/mflog.py,sha256=VebXxqitOtNAs7VJixnNfziO_i_urG7bsJ5JiB5IXgY,4370
149
149
  metaflow/mflog/save_logs.py,sha256=ZBAF4BMukw4FMAC7odpr9OI2BC_2petPtDX0ca6srC4,2352
150
150
  metaflow/mflog/save_logs_periodically.py,sha256=2Uvk9hi-zlCqXxOQoXmmjH1SCugfw6eG6w70WgfI-ho,1256
151
151
  metaflow/mflog/tee.py,sha256=wTER15qeHuiRpCkOqo-bd-r3Gj-EVlf3IvWRCA4beW4,887
152
- metaflow/plugins/__init__.py,sha256=NXlwhFvhLYhAVhjCyRJZMIpTBBBJlzFupM7MgDKNYv0,7872
152
+ metaflow/plugins/__init__.py,sha256=Lr7i7ssJI_-czorJYjMFcRhGspqArobNoXUl9T1p3MY,8055
153
153
  metaflow/plugins/catch_decorator.py,sha256=UOM2taN_OL2RPpuJhwEOA9ZALm0-hHD0XS2Hn2GUev0,4061
154
154
  metaflow/plugins/debug_logger.py,sha256=mcF5HYzJ0NQmqCMjyVUk3iAP-heroHRIiVWQC6Ha2-I,879
155
155
  metaflow/plugins/debug_monitor.py,sha256=Md5X_sDOSssN9pt2D8YcaIjTK5JaQD55UAYTcF6xYF0,1099
156
156
  metaflow/plugins/environment_decorator.py,sha256=6m9j2B77d-Ja_l_9CTJ__0O6aB2a8Qt_lAZu6UjAcUA,587
157
- metaflow/plugins/events_decorator.py,sha256=WsLcuy-FmXpQ6mvm431deTB2hE-fPYILgbVSWHRXslQ,28121
157
+ metaflow/plugins/events_decorator.py,sha256=T_YSK-DlgZhd3ge9PlpTRNaMi15GK0tKZMZl1NdV9DQ,24403
158
158
  metaflow/plugins/logs_cli.py,sha256=77W5UNagU2mOKSMMvrQxQmBLRzvmjK-c8dWxd-Ygbqs,11410
159
159
  metaflow/plugins/package_cli.py,sha256=-J6D4cupHfWSZ4GEFo2yy9Je9oL3owRWm5pEJwaiqd4,1649
160
160
  metaflow/plugins/parallel_decorator.py,sha256=GR6LKIW7_S7AoU50Ar2_0nndVtO2epdn3LuthE0vKMQ,9127
@@ -181,7 +181,7 @@ metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqN
181
181
  metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
182
182
  metaflow/plugins/argo/argo_client.py,sha256=PS_cYGnPw9h4X7TP_plObDH3clMw4reOsBLkkGPTd0Y,16282
183
183
  metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
184
- metaflow/plugins/argo/argo_workflows.py,sha256=CdZoBZU8aSkza1wRw60VogJv9QdF4CLbfD4o8XPAf5o,175510
184
+ metaflow/plugins/argo/argo_workflows.py,sha256=72zuqG23uBHDhoCJl52jHmbLE_w1lvZV5EnrkVggBJg,176389
185
185
  metaflow/plugins/argo/argo_workflows_cli.py,sha256=11_8l4IrtkwviKsijInTZPt7YK5TZzClREnw_Cf4D5o,36706
186
186
  metaflow/plugins/argo/argo_workflows_decorator.py,sha256=ogCSBmwsC2C3eusydrgjuAJd4qK18f1sI4jJwA4Fd-o,7800
187
187
  metaflow/plugins/argo/argo_workflows_deployer.py,sha256=6kHxEnYXJwzNCM9swI8-0AckxtPWqwhZLerYkX8fxUM,4444
@@ -288,12 +288,14 @@ metaflow/plugins/gcp/gs_utils.py,sha256=ZmIGFse1qYyvAVrwga23PQUzF6dXEDLLsZ2F-YRm
288
288
  metaflow/plugins/gcp/includefile_support.py,sha256=OQO0IVWv4ObboL0VqEZwcDOyj9ORLdur66JToxQ84vU,3887
289
289
  metaflow/plugins/kubernetes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
290
290
  metaflow/plugins/kubernetes/kube_utils.py,sha256=jdFMGbEmIow-oli26v31W9CmbZXigx06b3D_xIobpk0,4140
291
- metaflow/plugins/kubernetes/kubernetes.py,sha256=7yaa1TL3TcC-Js6_kAi0HGFLbXesMw3WiKWPlN9yIxo,30028
292
- metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=A6hI6KZ6sadPAOAyGhjwITMfnabr6voBXLRlDDxylcg,13874
291
+ metaflow/plugins/kubernetes/kubernetes.py,sha256=g_E5jBhoMSDzGFnP5PDQiVPPllUr1wulVcG3tz247G8,29668
292
+ metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=o_o0BDEJFpTuga7txRmkvZH8OIuTb5kI4UaG6xbzf84,13929
293
293
  metaflow/plugins/kubernetes/kubernetes_client.py,sha256=tuvXP-QKpdeSmzVolB2R_TaacOr5DIb0j642eKcjsiM,6491
294
- metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=5NfrCZaGf2a2oQK4CeJExcizbojynCnEXzXqSN5Hoz0,30500
294
+ metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=OwIuB9MAeO_fmTv3_IurDnbL_szXH7et3TwEva4PCfc,30853
295
295
  metaflow/plugins/kubernetes/kubernetes_job.py,sha256=pO9ExyAVCDoAoWFn9oFcos2aa0MQk4_D61O-T4E10E8,31826
296
296
  metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=9kU43eE5IvIa7y-POzBdxnJOazWsedKhwQ51Tu1HN_A,42471
297
+ metaflow/plugins/kubernetes/spot_metadata_cli.py,sha256=an0nWCxgflmqIPBCBrlb4m3DereDFFJBLt-KKhqcHc8,1670
298
+ metaflow/plugins/kubernetes/spot_monitor_sidecar.py,sha256=zrWU-smQwPnL6MBHmzTxWyEA00R6iKKQbhhy50xFwQ8,3832
297
299
  metaflow/plugins/metadata_providers/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
298
300
  metaflow/plugins/metadata_providers/local.py,sha256=9UAxe9caN6kU1lkSlIoJbRGgTqsMa62cBTnyMwhqiaA,22446
299
301
  metaflow/plugins/metadata_providers/service.py,sha256=NKZfFMamx6upP6aFRJfXlfYIhySgFNzz6kbp1yPD7LA,20222
@@ -358,9 +360,9 @@ metaflow/user_configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3h
358
360
  metaflow/user_configs/config_decorators.py,sha256=Tj0H88UT8Q6pylXxHXgiA6cqnNlw4d3mR7M8J9g3ZUg,20139
359
361
  metaflow/user_configs/config_options.py,sha256=Knpiax_YGmYAdR3zKmaepN8puW1MyL9g6-eMGAkcylo,20942
360
362
  metaflow/user_configs/config_parameters.py,sha256=T0Zz18o9zKEV7mMcKotFWvXixhJpotLRBVrKx6ENErQ,15416
361
- metaflow-2.13.4.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
362
- metaflow-2.13.4.dist-info/METADATA,sha256=PBmkd2ZuaamBlNTJCyBWZWWVO8xs8J1anqV4YV_P4kQ,6121
363
- metaflow-2.13.4.dist-info/WHEEL,sha256=9Hm2OB-j1QcCUq9Jguht7ayGIIZBRTdOXD1qg9cCgPM,109
364
- metaflow-2.13.4.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
365
- metaflow-2.13.4.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
366
- metaflow-2.13.4.dist-info/RECORD,,
363
+ metaflow-2.13.6.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
364
+ metaflow-2.13.6.dist-info/METADATA,sha256=7bPykdKjdrtqd0D8QhWa-ItxZbzsh_JAOp4gpTvE6v4,6121
365
+ metaflow-2.13.6.dist-info/WHEEL,sha256=9Hm2OB-j1QcCUq9Jguht7ayGIIZBRTdOXD1qg9cCgPM,109
366
+ metaflow-2.13.6.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
367
+ metaflow-2.13.6.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
368
+ metaflow-2.13.6.dist-info/RECORD,,