ob-metaflow 2.13.4.1__py2.py3-none-any.whl → 2.13.7.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow might be problematic. Click here for more details.
- metaflow/decorators.py +6 -2
- metaflow/flowspec.py +30 -0
- metaflow/parameters.py +1 -1
- metaflow/plugins/__init__.py +5 -0
- metaflow/plugins/argo/argo_workflows.py +24 -8
- metaflow/plugins/events_decorator.py +79 -142
- metaflow/plugins/kubernetes/kubernetes.py +0 -9
- metaflow/plugins/kubernetes/kubernetes_cli.py +1 -1
- metaflow/plugins/kubernetes/kubernetes_decorator.py +8 -0
- metaflow/plugins/kubernetes/spot_metadata_cli.py +69 -0
- metaflow/plugins/kubernetes/spot_monitor_sidecar.py +109 -0
- metaflow/runner/click_api.py +2 -0
- metaflow/user_configs/config_options.py +7 -4
- metaflow/user_configs/config_parameters.py +7 -7
- metaflow/version.py +1 -1
- {ob_metaflow-2.13.4.1.dist-info → ob_metaflow-2.13.7.1.dist-info}/METADATA +2 -2
- {ob_metaflow-2.13.4.1.dist-info → ob_metaflow-2.13.7.1.dist-info}/RECORD +21 -19
- {ob_metaflow-2.13.4.1.dist-info → ob_metaflow-2.13.7.1.dist-info}/LICENSE +0 -0
- {ob_metaflow-2.13.4.1.dist-info → ob_metaflow-2.13.7.1.dist-info}/WHEEL +0 -0
- {ob_metaflow-2.13.4.1.dist-info → ob_metaflow-2.13.7.1.dist-info}/entry_points.txt +0 -0
- {ob_metaflow-2.13.4.1.dist-info → ob_metaflow-2.13.7.1.dist-info}/top_level.txt +0 -0
metaflow/decorators.py
CHANGED
|
@@ -591,9 +591,13 @@ def _init_flow_decorators(
|
|
|
591
591
|
)
|
|
592
592
|
else:
|
|
593
593
|
# Each "non-multiple" flow decorator is only allowed to have one set of options
|
|
594
|
+
# Note that there may be no deco_options if a MutableFlow config injected
|
|
595
|
+
# the decorator.
|
|
594
596
|
deco_flow_init_options = {
|
|
595
|
-
option: deco_options
|
|
596
|
-
|
|
597
|
+
option: deco_options.get(
|
|
598
|
+
option.replace("-", "_"), option_info["default"]
|
|
599
|
+
)
|
|
600
|
+
for option, option_info in deco.options.items()
|
|
597
601
|
}
|
|
598
602
|
for deco in decorators:
|
|
599
603
|
deco.flow_init(
|
metaflow/flowspec.py
CHANGED
|
@@ -86,6 +86,11 @@ class FlowSpecMeta(type):
|
|
|
86
86
|
super().__init__(name, bases, attrs)
|
|
87
87
|
if name == "FlowSpec":
|
|
88
88
|
return
|
|
89
|
+
|
|
90
|
+
from .decorators import (
|
|
91
|
+
DuplicateFlowDecoratorException,
|
|
92
|
+
) # Prevent circular import
|
|
93
|
+
|
|
89
94
|
# We store some state in the flow class itself. This is primarily used to
|
|
90
95
|
# attach global state to a flow. It is *not* an actual global because of
|
|
91
96
|
# Runner/NBRunner. This is also created here in the meta class to avoid it being
|
|
@@ -98,6 +103,31 @@ class FlowSpecMeta(type):
|
|
|
98
103
|
# Keys are _FlowState enum values
|
|
99
104
|
cls._flow_state = {}
|
|
100
105
|
|
|
106
|
+
# We inherit stuff from our parent classes as well -- we need to be careful
|
|
107
|
+
# in terms of the order; we will follow the MRO with the following rules:
|
|
108
|
+
# - decorators (cls._flow_decorators) will cause an error if they do not
|
|
109
|
+
# support multiple and we see multiple instances of the same
|
|
110
|
+
# - config decorators will be joined
|
|
111
|
+
# - configs will be added later directly by the class; base class configs will
|
|
112
|
+
# be taken into account as they would be inherited.
|
|
113
|
+
|
|
114
|
+
# We only need to do this for the base classes since the current class will
|
|
115
|
+
# get updated as decorators are parsed.
|
|
116
|
+
for base in cls.__mro__:
|
|
117
|
+
if base != cls and base != FlowSpec and issubclass(base, FlowSpec):
|
|
118
|
+
# Take care of decorators
|
|
119
|
+
for deco_name, deco in base._flow_decorators.items():
|
|
120
|
+
if deco_name in cls._flow_decorators and not deco.allow_multiple:
|
|
121
|
+
raise DuplicateFlowDecoratorException(deco_name)
|
|
122
|
+
cls._flow_decorators.setdefault(deco_name, []).extend(deco)
|
|
123
|
+
|
|
124
|
+
# Take care of configs and config decorators
|
|
125
|
+
base_configs = base._flow_state.get(_FlowState.CONFIG_DECORATORS)
|
|
126
|
+
if base_configs:
|
|
127
|
+
cls._flow_state.setdefault(_FlowState.CONFIG_DECORATORS, []).extend(
|
|
128
|
+
base_configs
|
|
129
|
+
)
|
|
130
|
+
|
|
101
131
|
cls._init_attrs()
|
|
102
132
|
|
|
103
133
|
def _init_attrs(cls):
|
metaflow/parameters.py
CHANGED
|
@@ -316,7 +316,7 @@ class Parameter(object):
|
|
|
316
316
|
help : str, optional, default None
|
|
317
317
|
Help text to show in `run --help`.
|
|
318
318
|
required : bool, optional, default None
|
|
319
|
-
Require that the user
|
|
319
|
+
Require that the user specifies a value for the parameter. Note that if
|
|
320
320
|
a default is provide, the required flag is ignored.
|
|
321
321
|
A value of None is equivalent to False.
|
|
322
322
|
show_default : bool, optional, default None
|
metaflow/plugins/__init__.py
CHANGED
|
@@ -16,6 +16,7 @@ CLIS_DESC = [
|
|
|
16
16
|
("argo-workflows", ".argo.argo_workflows_cli.cli"),
|
|
17
17
|
("card", ".cards.card_cli.cli"),
|
|
18
18
|
("tag", ".tag_cli.cli"),
|
|
19
|
+
("spot-metadata", ".kubernetes.spot_metadata_cli.cli"),
|
|
19
20
|
("logs", ".logs_cli.cli"),
|
|
20
21
|
]
|
|
21
22
|
|
|
@@ -104,6 +105,10 @@ SIDECARS_DESC = [
|
|
|
104
105
|
"save_logs_periodically",
|
|
105
106
|
"..mflog.save_logs_periodically.SaveLogsPeriodicallySidecar",
|
|
106
107
|
),
|
|
108
|
+
(
|
|
109
|
+
"spot_termination_monitor",
|
|
110
|
+
".kubernetes.spot_monitor_sidecar.SpotTerminationMonitorSidecar",
|
|
111
|
+
),
|
|
107
112
|
("heartbeat", "metaflow.metadata_provider.heartbeat.MetadataHeartBeat"),
|
|
108
113
|
]
|
|
109
114
|
|
|
@@ -625,6 +625,16 @@ class ArgoWorkflows(object):
|
|
|
625
625
|
for event in trigger_on_finish_deco.triggers:
|
|
626
626
|
# Actual filters are deduced here since we don't have access to
|
|
627
627
|
# the current object in the @trigger_on_finish decorator.
|
|
628
|
+
project_name = event.get("project") or current.get("project_name")
|
|
629
|
+
branch_name = event.get("branch") or current.get("branch_name")
|
|
630
|
+
# validate that we have complete project info for an event name
|
|
631
|
+
if project_name or branch_name:
|
|
632
|
+
if not (project_name and branch_name):
|
|
633
|
+
# if one of the two is missing, we would end up listening to an event that will never be broadcast.
|
|
634
|
+
raise ArgoWorkflowsException(
|
|
635
|
+
"Incomplete project info. Please specify both 'project' and 'project_branch' or use the @project decorator"
|
|
636
|
+
)
|
|
637
|
+
|
|
628
638
|
triggers.append(
|
|
629
639
|
{
|
|
630
640
|
# Make sure this remains consistent with the event name format
|
|
@@ -633,18 +643,16 @@ class ArgoWorkflows(object):
|
|
|
633
643
|
% ".".join(
|
|
634
644
|
v
|
|
635
645
|
for v in [
|
|
636
|
-
|
|
637
|
-
|
|
646
|
+
project_name,
|
|
647
|
+
branch_name,
|
|
638
648
|
event["flow"],
|
|
639
649
|
]
|
|
640
650
|
if v
|
|
641
651
|
),
|
|
642
652
|
"filters": {
|
|
643
653
|
"auto-generated-by-metaflow": True,
|
|
644
|
-
"project_name":
|
|
645
|
-
|
|
646
|
-
"branch_name": event.get("branch")
|
|
647
|
-
or current.get("branch_name"),
|
|
654
|
+
"project_name": project_name,
|
|
655
|
+
"branch_name": branch_name,
|
|
648
656
|
# TODO: Add a time filters to guard against cached events
|
|
649
657
|
},
|
|
650
658
|
"type": "run",
|
|
@@ -842,8 +850,15 @@ class ArgoWorkflows(object):
|
|
|
842
850
|
Metadata()
|
|
843
851
|
.labels(self._base_labels)
|
|
844
852
|
.label("app.kubernetes.io/name", "metaflow-task")
|
|
845
|
-
.annotations(
|
|
846
|
-
|
|
853
|
+
.annotations(
|
|
854
|
+
{
|
|
855
|
+
**annotations,
|
|
856
|
+
**self._base_annotations,
|
|
857
|
+
**{
|
|
858
|
+
"metaflow/run_id": "argo-{{workflow.name}}"
|
|
859
|
+
}, # we want pods of the workflow to have the run_id as an annotation as well
|
|
860
|
+
}
|
|
861
|
+
)
|
|
847
862
|
)
|
|
848
863
|
# Set the entrypoint to flow name
|
|
849
864
|
.entrypoint(self.flow.name)
|
|
@@ -1706,6 +1721,7 @@ class ArgoWorkflows(object):
|
|
|
1706
1721
|
},
|
|
1707
1722
|
**{
|
|
1708
1723
|
# Some optional values for bookkeeping
|
|
1724
|
+
"METAFLOW_FLOW_FILENAME": os.path.basename(sys.argv[0]),
|
|
1709
1725
|
"METAFLOW_FLOW_NAME": self.flow.name,
|
|
1710
1726
|
"METAFLOW_STEP_NAME": node.name,
|
|
1711
1727
|
"METAFLOW_RUN_ID": run_id,
|
|
@@ -398,111 +398,23 @@ class TriggerOnFinishDecorator(FlowDecorator):
|
|
|
398
398
|
)
|
|
399
399
|
elif self.attributes["flow"]:
|
|
400
400
|
# flow supports the format @trigger_on_finish(flow='FooFlow')
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
{
|
|
404
|
-
"fq_name": self.attributes["flow"],
|
|
405
|
-
}
|
|
406
|
-
)
|
|
407
|
-
elif isinstance(self.attributes["flow"], dict):
|
|
408
|
-
if "name" not in self.attributes["flow"]:
|
|
409
|
-
raise MetaflowException(
|
|
410
|
-
"The *flow* attribute for *@trigger_on_finish* is missing the "
|
|
411
|
-
"*name* key."
|
|
412
|
-
)
|
|
413
|
-
flow_name = self.attributes["flow"]["name"]
|
|
414
|
-
|
|
415
|
-
if not is_stringish(flow_name) or "." in flow_name:
|
|
416
|
-
raise MetaflowException(
|
|
417
|
-
"The *name* attribute of the *flow* is not a valid string"
|
|
418
|
-
)
|
|
419
|
-
result = {"fq_name": flow_name}
|
|
420
|
-
if "project" in self.attributes["flow"]:
|
|
421
|
-
if is_stringish(self.attributes["flow"]["project"]):
|
|
422
|
-
result["project"] = self.attributes["flow"]["project"]
|
|
423
|
-
else:
|
|
424
|
-
raise MetaflowException(
|
|
425
|
-
"The *project* attribute of the *flow* is not a string"
|
|
426
|
-
)
|
|
427
|
-
if "project_branch" in self.attributes["flow"]:
|
|
428
|
-
if is_stringish(self.attributes["flow"]["project_branch"]):
|
|
429
|
-
result["branch"] = self.attributes["flow"]["project_branch"]
|
|
430
|
-
else:
|
|
431
|
-
raise MetaflowException(
|
|
432
|
-
"The *project_branch* attribute of the *flow* is not a string"
|
|
433
|
-
)
|
|
434
|
-
self.triggers.append(result)
|
|
435
|
-
elif callable(self.attributes["flow"]) and not isinstance(
|
|
401
|
+
flow = self.attributes["flow"]
|
|
402
|
+
if callable(flow) and not isinstance(
|
|
436
403
|
self.attributes["flow"], DeployTimeField
|
|
437
404
|
):
|
|
438
|
-
trig = DeployTimeField(
|
|
439
|
-
"fq_name", [str, dict], None, self.attributes["flow"], False
|
|
440
|
-
)
|
|
405
|
+
trig = DeployTimeField("fq_name", [str, dict], None, flow, False)
|
|
441
406
|
self.triggers.append(trig)
|
|
442
407
|
else:
|
|
443
|
-
|
|
444
|
-
"Incorrect type for *flow* attribute in *@trigger_on_finish* "
|
|
445
|
-
" decorator. Supported type is string or Dict[str, str] - \n"
|
|
446
|
-
"@trigger_on_finish(flow='FooFlow') or "
|
|
447
|
-
"@trigger_on_finish(flow={'name':'FooFlow', 'project_branch': 'branch'})"
|
|
448
|
-
)
|
|
408
|
+
self.triggers.extend(self._parse_static_triggers([flow]))
|
|
449
409
|
elif self.attributes["flows"]:
|
|
450
410
|
# flows attribute supports the following formats -
|
|
451
411
|
# 1. flows=['FooFlow', 'BarFlow']
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
self.triggers.append(
|
|
456
|
-
{
|
|
457
|
-
"fq_name": flow,
|
|
458
|
-
}
|
|
459
|
-
)
|
|
460
|
-
elif isinstance(flow, dict):
|
|
461
|
-
if "name" not in flow:
|
|
462
|
-
raise MetaflowException(
|
|
463
|
-
"One or more flows in the *flows* attribute for "
|
|
464
|
-
"*@trigger_on_finish* is missing the "
|
|
465
|
-
"*name* key."
|
|
466
|
-
)
|
|
467
|
-
flow_name = flow["name"]
|
|
468
|
-
|
|
469
|
-
if not is_stringish(flow_name) or "." in flow_name:
|
|
470
|
-
raise MetaflowException(
|
|
471
|
-
"The *name* attribute '%s' is not a valid string"
|
|
472
|
-
% str(flow_name)
|
|
473
|
-
)
|
|
474
|
-
result = {"fq_name": flow_name}
|
|
475
|
-
if "project" in flow:
|
|
476
|
-
if is_stringish(flow["project"]):
|
|
477
|
-
result["project"] = flow["project"]
|
|
478
|
-
else:
|
|
479
|
-
raise MetaflowException(
|
|
480
|
-
"The *project* attribute of the *flow* '%s' is not "
|
|
481
|
-
"a string" % flow_name
|
|
482
|
-
)
|
|
483
|
-
if "project_branch" in flow:
|
|
484
|
-
if is_stringish(flow["project_branch"]):
|
|
485
|
-
result["branch"] = flow["project_branch"]
|
|
486
|
-
else:
|
|
487
|
-
raise MetaflowException(
|
|
488
|
-
"The *project_branch* attribute of the *flow* %s "
|
|
489
|
-
"is not a string" % flow_name
|
|
490
|
-
)
|
|
491
|
-
self.triggers.append(result)
|
|
492
|
-
else:
|
|
493
|
-
raise MetaflowException(
|
|
494
|
-
"One or more flows in *flows* attribute in "
|
|
495
|
-
"*@trigger_on_finish* decorator have an incorrect type. "
|
|
496
|
-
"Supported type is string or Dict[str, str]- \n"
|
|
497
|
-
"@trigger_on_finish(flows=['FooFlow', 'BarFlow']"
|
|
498
|
-
)
|
|
499
|
-
elif callable(self.attributes["flows"]) and not isinstance(
|
|
500
|
-
self.attributes["flows"], DeployTimeField
|
|
501
|
-
):
|
|
502
|
-
trig = DeployTimeField(
|
|
503
|
-
"flows", list, None, self.attributes["flows"], False
|
|
504
|
-
)
|
|
412
|
+
flows = self.attributes["flows"]
|
|
413
|
+
if callable(flows) and not isinstance(flows, DeployTimeField):
|
|
414
|
+
trig = DeployTimeField("flows", list, None, flows, False)
|
|
505
415
|
self.triggers.append(trig)
|
|
416
|
+
elif isinstance(flows, list):
|
|
417
|
+
self.triggers.extend(self._parse_static_triggers(flows))
|
|
506
418
|
else:
|
|
507
419
|
raise MetaflowException(
|
|
508
420
|
"Incorrect type for *flows* attribute in *@trigger_on_finish* "
|
|
@@ -519,26 +431,7 @@ class TriggerOnFinishDecorator(FlowDecorator):
|
|
|
519
431
|
for trigger in self.triggers:
|
|
520
432
|
if isinstance(trigger, DeployTimeField):
|
|
521
433
|
continue
|
|
522
|
-
|
|
523
|
-
# fully qualified name is just the flow name
|
|
524
|
-
trigger["flow"] = trigger["fq_name"]
|
|
525
|
-
elif trigger["fq_name"].count(".") >= 2:
|
|
526
|
-
# fully qualified name is of the format - project.branch.flow_name
|
|
527
|
-
trigger["project"], tail = trigger["fq_name"].split(".", maxsplit=1)
|
|
528
|
-
trigger["branch"], trigger["flow"] = tail.rsplit(".", maxsplit=1)
|
|
529
|
-
else:
|
|
530
|
-
raise MetaflowException(
|
|
531
|
-
"Incorrect format for *flow* in *@trigger_on_finish* "
|
|
532
|
-
"decorator. Specify either just the *flow_name* or a fully "
|
|
533
|
-
"qualified name like *project_name.branch_name.flow_name*."
|
|
534
|
-
)
|
|
535
|
-
# TODO: Also sanity check project and branch names
|
|
536
|
-
if not re.match(r"^[A-Za-z0-9_]+$", trigger["flow"]):
|
|
537
|
-
raise MetaflowException(
|
|
538
|
-
"Invalid flow name *%s* in *@trigger_on_finish* "
|
|
539
|
-
"decorator. Only alphanumeric characters and "
|
|
540
|
-
"underscores(_) are allowed." % trigger["flow"]
|
|
541
|
-
)
|
|
434
|
+
self._parse_fq_name(trigger)
|
|
542
435
|
|
|
543
436
|
self.options = self.attributes["options"]
|
|
544
437
|
|
|
@@ -593,9 +486,67 @@ class TriggerOnFinishDecorator(FlowDecorator):
|
|
|
593
486
|
run_objs.append(run_obj)
|
|
594
487
|
current._update_env({"trigger": Trigger.from_runs(run_objs)})
|
|
595
488
|
|
|
489
|
+
@staticmethod
|
|
490
|
+
def _parse_static_triggers(flows):
|
|
491
|
+
results = []
|
|
492
|
+
for flow in flows:
|
|
493
|
+
if is_stringish(flow):
|
|
494
|
+
results.append(
|
|
495
|
+
{
|
|
496
|
+
"fq_name": flow,
|
|
497
|
+
}
|
|
498
|
+
)
|
|
499
|
+
elif isinstance(flow, dict):
|
|
500
|
+
if "name" not in flow:
|
|
501
|
+
if len(flows) > 1:
|
|
502
|
+
raise MetaflowException(
|
|
503
|
+
"One or more flows in the *flows* attribute for "
|
|
504
|
+
"*@trigger_on_finish* is missing the "
|
|
505
|
+
"*name* key."
|
|
506
|
+
)
|
|
507
|
+
raise MetaflowException(
|
|
508
|
+
"The *flow* attribute for *@trigger_on_finish* is missing the "
|
|
509
|
+
"*name* key."
|
|
510
|
+
)
|
|
511
|
+
flow_name = flow["name"]
|
|
512
|
+
|
|
513
|
+
if not is_stringish(flow_name) or "." in flow_name:
|
|
514
|
+
raise MetaflowException(
|
|
515
|
+
f"The *name* attribute of the *flow* {flow_name} is not a valid string"
|
|
516
|
+
)
|
|
517
|
+
result = {"fq_name": flow_name}
|
|
518
|
+
if "project" in flow:
|
|
519
|
+
if is_stringish(flow["project"]):
|
|
520
|
+
result["project"] = flow["project"]
|
|
521
|
+
else:
|
|
522
|
+
raise MetaflowException(
|
|
523
|
+
f"The *project* attribute of the *flow* {flow_name} is not a string"
|
|
524
|
+
)
|
|
525
|
+
if "project_branch" in flow:
|
|
526
|
+
if is_stringish(flow["project_branch"]):
|
|
527
|
+
result["branch"] = flow["project_branch"]
|
|
528
|
+
else:
|
|
529
|
+
raise MetaflowException(
|
|
530
|
+
f"The *project_branch* attribute of the *flow* {flow_name} is not a string"
|
|
531
|
+
)
|
|
532
|
+
results.append(result)
|
|
533
|
+
else:
|
|
534
|
+
if len(flows) > 1:
|
|
535
|
+
raise MetaflowException(
|
|
536
|
+
"One or more flows in the *flows* attribute for "
|
|
537
|
+
"*@trigger_on_finish* decorator have an incorrect type. "
|
|
538
|
+
"Supported type is string or Dict[str, str]- \n"
|
|
539
|
+
"@trigger_on_finish(flows=['FooFlow', 'BarFlow']"
|
|
540
|
+
)
|
|
541
|
+
raise MetaflowException(
|
|
542
|
+
"Incorrect type for *flow* attribute in *@trigger_on_finish* "
|
|
543
|
+
" decorator. Supported type is string or Dict[str, str] - \n"
|
|
544
|
+
"@trigger_on_finish(flow='FooFlow') or "
|
|
545
|
+
"@trigger_on_finish(flow={'name':'FooFlow', 'project_branch': 'branch'})"
|
|
546
|
+
)
|
|
547
|
+
return results
|
|
548
|
+
|
|
596
549
|
def _parse_fq_name(self, trigger):
|
|
597
|
-
if isinstance(trigger, DeployTimeField):
|
|
598
|
-
trigger["fq_name"] = deploy_time_eval(trigger["fq_name"])
|
|
599
550
|
if trigger["fq_name"].count(".") == 0:
|
|
600
551
|
# fully qualified name is just the flow name
|
|
601
552
|
trigger["flow"] = trigger["fq_name"]
|
|
@@ -615,32 +566,18 @@ class TriggerOnFinishDecorator(FlowDecorator):
|
|
|
615
566
|
"decorator. Only alphanumeric characters and "
|
|
616
567
|
"underscores(_) are allowed." % trigger["flow"]
|
|
617
568
|
)
|
|
618
|
-
return trigger
|
|
619
569
|
|
|
620
570
|
def format_deploytime_value(self):
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
deploy_value = deploy_time_eval(trigger)
|
|
626
|
-
if isinstance(deploy_value, list):
|
|
627
|
-
self.triggers = deploy_value
|
|
571
|
+
if len(self.triggers) == 1 and isinstance(self.triggers[0], DeployTimeField):
|
|
572
|
+
deploy_value = deploy_time_eval(self.triggers[0])
|
|
573
|
+
if isinstance(deploy_value, list):
|
|
574
|
+
self.triggers = deploy_value
|
|
628
575
|
else:
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
trigger = deploy_time_eval(trigger)
|
|
635
|
-
if isinstance(trigger, dict):
|
|
636
|
-
trigger["fq_name"] = trigger.get("name")
|
|
637
|
-
trigger["project"] = trigger.get("project")
|
|
638
|
-
trigger["branch"] = trigger.get("project_branch")
|
|
639
|
-
# We also added this bc it won't be formatted yet
|
|
640
|
-
if isinstance(trigger, str):
|
|
641
|
-
trigger = {"fq_name": trigger}
|
|
642
|
-
trigger = self._parse_fq_name(trigger)
|
|
643
|
-
self.triggers[self.triggers.index(old_trig)] = trigger
|
|
576
|
+
self.triggers = [deploy_value]
|
|
577
|
+
triggers = self._parse_static_triggers(self.triggers)
|
|
578
|
+
for trigger in triggers:
|
|
579
|
+
self._parse_fq_name(trigger)
|
|
580
|
+
self.triggers = triggers
|
|
644
581
|
|
|
645
582
|
def get_top_level_options(self):
|
|
646
583
|
return list(self._option_values.items())
|
|
@@ -685,15 +685,6 @@ class Kubernetes(object):
|
|
|
685
685
|
for name, value in system_annotations.items():
|
|
686
686
|
job.annotation(name, value)
|
|
687
687
|
|
|
688
|
-
(
|
|
689
|
-
job.annotation("metaflow/run_id", run_id)
|
|
690
|
-
.annotation("metaflow/step_name", step_name)
|
|
691
|
-
.annotation("metaflow/task_id", task_id)
|
|
692
|
-
.annotation("metaflow/attempt", attempt)
|
|
693
|
-
.label("app.kubernetes.io/name", "metaflow-task")
|
|
694
|
-
.label("app.kubernetes.io/part-of", "metaflow")
|
|
695
|
-
)
|
|
696
|
-
|
|
697
688
|
return job
|
|
698
689
|
|
|
699
690
|
def create_k8sjob(self, job):
|
|
@@ -190,7 +190,7 @@ def step(
|
|
|
190
190
|
executable = ctx.obj.environment.executable(step_name, executable)
|
|
191
191
|
|
|
192
192
|
# Set environment
|
|
193
|
-
env = {}
|
|
193
|
+
env = {"METAFLOW_FLOW_FILENAME": os.path.basename(sys.argv[0])}
|
|
194
194
|
env_deco = [deco for deco in node.decorators if deco.name == "environment"]
|
|
195
195
|
if env_deco:
|
|
196
196
|
env = env_deco[0].attributes["vars"]
|
|
@@ -562,6 +562,13 @@ class KubernetesDecorator(StepDecorator):
|
|
|
562
562
|
self._save_logs_sidecar = Sidecar("save_logs_periodically")
|
|
563
563
|
self._save_logs_sidecar.start()
|
|
564
564
|
|
|
565
|
+
# Start spot termination monitor sidecar.
|
|
566
|
+
current._update_env(
|
|
567
|
+
{"spot_termination_notice": "/tmp/spot_termination_notice"}
|
|
568
|
+
)
|
|
569
|
+
self._spot_monitor_sidecar = Sidecar("spot_termination_monitor")
|
|
570
|
+
self._spot_monitor_sidecar.start()
|
|
571
|
+
|
|
565
572
|
num_parallel = None
|
|
566
573
|
if hasattr(flow, "_parallel_ubf_iter"):
|
|
567
574
|
num_parallel = flow._parallel_ubf_iter.num_parallel
|
|
@@ -620,6 +627,7 @@ class KubernetesDecorator(StepDecorator):
|
|
|
620
627
|
|
|
621
628
|
try:
|
|
622
629
|
self._save_logs_sidecar.terminate()
|
|
630
|
+
self._spot_monitor_sidecar.terminate()
|
|
623
631
|
except:
|
|
624
632
|
# Best effort kill
|
|
625
633
|
pass
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from metaflow._vendor import click
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from metaflow.tagging_util import validate_tags
|
|
4
|
+
from metaflow.metadata_provider import MetaDatum
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@click.group()
|
|
8
|
+
def cli():
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@cli.group(help="Commands related to spot metadata.")
|
|
13
|
+
def spot_metadata():
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@spot_metadata.command(help="Record spot termination metadata for a task.")
|
|
18
|
+
@click.option(
|
|
19
|
+
"--run-id",
|
|
20
|
+
required=True,
|
|
21
|
+
help="Run ID for which metadata is to be recorded.",
|
|
22
|
+
)
|
|
23
|
+
@click.option(
|
|
24
|
+
"--step-name",
|
|
25
|
+
required=True,
|
|
26
|
+
help="Step Name for which metadata is to be recorded.",
|
|
27
|
+
)
|
|
28
|
+
@click.option(
|
|
29
|
+
"--task-id",
|
|
30
|
+
required=True,
|
|
31
|
+
help="Task ID for which metadata is to be recorded.",
|
|
32
|
+
)
|
|
33
|
+
@click.option(
|
|
34
|
+
"--termination-notice-time",
|
|
35
|
+
required=True,
|
|
36
|
+
help="Spot termination notice time.",
|
|
37
|
+
)
|
|
38
|
+
@click.option(
|
|
39
|
+
"--tag",
|
|
40
|
+
"tags",
|
|
41
|
+
multiple=True,
|
|
42
|
+
required=False,
|
|
43
|
+
default=None,
|
|
44
|
+
help="List of tags.",
|
|
45
|
+
)
|
|
46
|
+
@click.pass_obj
|
|
47
|
+
def record(obj, run_id, step_name, task_id, termination_notice_time, tags=None):
|
|
48
|
+
validate_tags(tags)
|
|
49
|
+
|
|
50
|
+
tag_list = list(tags) if tags else []
|
|
51
|
+
|
|
52
|
+
entries = [
|
|
53
|
+
MetaDatum(
|
|
54
|
+
field="spot-termination-received-at",
|
|
55
|
+
value=datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
|
56
|
+
type="spot-termination-received-at",
|
|
57
|
+
tags=tag_list,
|
|
58
|
+
),
|
|
59
|
+
MetaDatum(
|
|
60
|
+
field="spot-termination-time",
|
|
61
|
+
value=termination_notice_time,
|
|
62
|
+
type="spot-termination-time",
|
|
63
|
+
tags=tag_list,
|
|
64
|
+
),
|
|
65
|
+
]
|
|
66
|
+
|
|
67
|
+
obj.metadata.register_metadata(
|
|
68
|
+
run_id=run_id, step_name=step_name, task_id=task_id, metadata=entries
|
|
69
|
+
)
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import time
|
|
4
|
+
import signal
|
|
5
|
+
import requests
|
|
6
|
+
import subprocess
|
|
7
|
+
from multiprocessing import Process
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from metaflow.sidecar import MessageTypes
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SpotTerminationMonitorSidecar(object):
|
|
13
|
+
EC2_TYPE_URL = "http://169.254.169.254/latest/meta-data/instance-life-cycle"
|
|
14
|
+
METADATA_URL = "http://169.254.169.254/latest/meta-data/spot/termination-time"
|
|
15
|
+
TOKEN_URL = "http://169.254.169.254/latest/api/token"
|
|
16
|
+
POLL_INTERVAL = 5 # seconds
|
|
17
|
+
|
|
18
|
+
def __init__(self):
|
|
19
|
+
self.is_alive = True
|
|
20
|
+
self._process = None
|
|
21
|
+
self._token = None
|
|
22
|
+
self._token_expiry = 0
|
|
23
|
+
|
|
24
|
+
if self._is_aws_spot_instance():
|
|
25
|
+
self._process = Process(target=self._monitor_loop)
|
|
26
|
+
self._process.start()
|
|
27
|
+
|
|
28
|
+
def process_message(self, msg):
|
|
29
|
+
if msg.msg_type == MessageTypes.SHUTDOWN:
|
|
30
|
+
self.is_alive = False
|
|
31
|
+
if self._process:
|
|
32
|
+
self._process.terminate()
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def get_worker(cls):
|
|
36
|
+
return cls
|
|
37
|
+
|
|
38
|
+
def _get_imds_token(self):
|
|
39
|
+
current_time = time.time()
|
|
40
|
+
if current_time >= self._token_expiry - 60: # Refresh 60s before expiry
|
|
41
|
+
try:
|
|
42
|
+
response = requests.put(
|
|
43
|
+
url=self.TOKEN_URL,
|
|
44
|
+
headers={"X-aws-ec2-metadata-token-ttl-seconds": "300"},
|
|
45
|
+
timeout=1,
|
|
46
|
+
)
|
|
47
|
+
if response.status_code == 200:
|
|
48
|
+
self._token = response.text
|
|
49
|
+
self._token_expiry = current_time + 240 # Slightly less than TTL
|
|
50
|
+
except requests.exceptions.RequestException:
|
|
51
|
+
pass
|
|
52
|
+
return self._token
|
|
53
|
+
|
|
54
|
+
def _make_ec2_request(self, url, timeout):
|
|
55
|
+
token = self._get_imds_token()
|
|
56
|
+
headers = {"X-aws-ec2-metadata-token": token} if token else {}
|
|
57
|
+
response = requests.get(url=url, headers=headers, timeout=timeout)
|
|
58
|
+
return response
|
|
59
|
+
|
|
60
|
+
def _is_aws_spot_instance(self):
|
|
61
|
+
try:
|
|
62
|
+
response = self._make_ec2_request(url=self.EC2_TYPE_URL, timeout=1)
|
|
63
|
+
return response.status_code == 200 and response.text == "spot"
|
|
64
|
+
except (requests.exceptions.RequestException, requests.exceptions.Timeout):
|
|
65
|
+
return False
|
|
66
|
+
|
|
67
|
+
def _monitor_loop(self):
|
|
68
|
+
while self.is_alive:
|
|
69
|
+
try:
|
|
70
|
+
response = self._make_ec2_request(url=self.METADATA_URL, timeout=1)
|
|
71
|
+
if response.status_code == 200:
|
|
72
|
+
termination_time = response.text
|
|
73
|
+
self._emit_termination_metadata(termination_time)
|
|
74
|
+
os.kill(os.getppid(), signal.SIGTERM)
|
|
75
|
+
break
|
|
76
|
+
except (requests.exceptions.RequestException, requests.exceptions.Timeout):
|
|
77
|
+
pass
|
|
78
|
+
time.sleep(self.POLL_INTERVAL)
|
|
79
|
+
|
|
80
|
+
def _emit_termination_metadata(self, termination_time):
|
|
81
|
+
flow_filename = os.getenv("METAFLOW_FLOW_FILENAME")
|
|
82
|
+
pathspec = os.getenv("MF_PATHSPEC")
|
|
83
|
+
_, run_id, step_name, task_id = pathspec.split("/")
|
|
84
|
+
retry_count = os.getenv("MF_ATTEMPT")
|
|
85
|
+
|
|
86
|
+
with open("/tmp/spot_termination_notice", "w") as fp:
|
|
87
|
+
fp.write(termination_time)
|
|
88
|
+
|
|
89
|
+
command = [
|
|
90
|
+
sys.executable,
|
|
91
|
+
f"/metaflow/{flow_filename}",
|
|
92
|
+
"spot-metadata",
|
|
93
|
+
"record",
|
|
94
|
+
"--run-id",
|
|
95
|
+
run_id,
|
|
96
|
+
"--step-name",
|
|
97
|
+
step_name,
|
|
98
|
+
"--task-id",
|
|
99
|
+
task_id,
|
|
100
|
+
"--termination-notice-time",
|
|
101
|
+
termination_time,
|
|
102
|
+
"--tag",
|
|
103
|
+
"attempt_id:{}".format(retry_count),
|
|
104
|
+
]
|
|
105
|
+
|
|
106
|
+
result = subprocess.run(command, capture_output=True, text=True)
|
|
107
|
+
|
|
108
|
+
if result.returncode != 0:
|
|
109
|
+
print(f"Failed to record spot termination metadata: {result.stderr}")
|
metaflow/runner/click_api.py
CHANGED
|
@@ -169,7 +169,7 @@ class ConfigInput:
|
|
|
169
169
|
"Please contact support."
|
|
170
170
|
)
|
|
171
171
|
cls.loaded_configs = all_configs
|
|
172
|
-
return cls.loaded_configs
|
|
172
|
+
return cls.loaded_configs[config_name]
|
|
173
173
|
|
|
174
174
|
def process_configs(
|
|
175
175
|
self,
|
|
@@ -326,6 +326,8 @@ class ConfigInput:
|
|
|
326
326
|
for name, val in merged_configs.items():
|
|
327
327
|
if val is None:
|
|
328
328
|
missing_configs.add(name)
|
|
329
|
+
to_return[name] = None
|
|
330
|
+
flow_cls._flow_state[_FlowState.CONFIGS][name] = None
|
|
329
331
|
continue
|
|
330
332
|
if val.startswith(_CONVERTED_DEFAULT_NO_FILE):
|
|
331
333
|
no_default_file.append(name)
|
|
@@ -339,15 +341,16 @@ class ConfigInput:
|
|
|
339
341
|
val = val[len(_DEFAULT_PREFIX) :]
|
|
340
342
|
if val.startswith("kv."):
|
|
341
343
|
# This means to load it from a file
|
|
342
|
-
|
|
343
|
-
|
|
344
|
+
try:
|
|
345
|
+
read_value = self.get_config(val[3:])
|
|
346
|
+
except KeyError as e:
|
|
344
347
|
exc = click.UsageError(
|
|
345
348
|
"Could not find configuration '%s' in INFO file" % val
|
|
346
349
|
)
|
|
347
350
|
if click_obj:
|
|
348
351
|
click_obj.delayed_config_exception = exc
|
|
349
352
|
return None
|
|
350
|
-
raise exc
|
|
353
|
+
raise exc from e
|
|
351
354
|
flow_cls._flow_state[_FlowState.CONFIGS][name] = read_value
|
|
352
355
|
to_return[name] = ConfigValue(read_value)
|
|
353
356
|
else:
|
|
@@ -290,17 +290,17 @@ class Config(Parameter, collections.abc.Mapping):
|
|
|
290
290
|
default : Union[str, Callable[[ParameterContext], str], optional, default None
|
|
291
291
|
Default path from where to read this configuration. A function implies that the
|
|
292
292
|
value will be computed using that function.
|
|
293
|
-
You can only specify default or default_value.
|
|
293
|
+
You can only specify default or default_value, not both.
|
|
294
294
|
default_value : Union[str, Dict[str, Any], Callable[[ParameterContext, Union[str, Dict[str, Any]]], Any], optional, default None
|
|
295
295
|
Default value for the parameter. A function
|
|
296
296
|
implies that the value will be computed using that function.
|
|
297
|
-
You can only specify default or default_value.
|
|
297
|
+
You can only specify default or default_value, not both.
|
|
298
298
|
help : str, optional, default None
|
|
299
299
|
Help text to show in `run --help`.
|
|
300
300
|
required : bool, optional, default None
|
|
301
|
-
Require that the user
|
|
302
|
-
a default is provided, the required flag is ignored.
|
|
303
|
-
equivalent to False.
|
|
301
|
+
Require that the user specifies a value for the configuration. Note that if
|
|
302
|
+
a default or default_value is provided, the required flag is ignored.
|
|
303
|
+
A value of None is equivalent to False.
|
|
304
304
|
parser : Union[str, Callable[[str], Dict[Any, Any]]], optional, default None
|
|
305
305
|
If a callable, it is a function that can parse the configuration string
|
|
306
306
|
into an arbitrarily nested dictionary. If a string, the string should refer to
|
|
@@ -330,13 +330,13 @@ class Config(Parameter, collections.abc.Mapping):
|
|
|
330
330
|
**kwargs: Dict[str, str]
|
|
331
331
|
):
|
|
332
332
|
|
|
333
|
-
if default and default_value:
|
|
333
|
+
if default is not None and default_value is not None:
|
|
334
334
|
raise MetaflowException(
|
|
335
335
|
"For config '%s', you can only specify default or default_value, not both"
|
|
336
336
|
% name
|
|
337
337
|
)
|
|
338
338
|
self._default_is_file = default is not None
|
|
339
|
-
kwargs["default"] = default
|
|
339
|
+
kwargs["default"] = default if default is not None else default_value
|
|
340
340
|
super(Config, self).__init__(
|
|
341
341
|
name, required=required, help=help, type=str, **kwargs
|
|
342
342
|
)
|
metaflow/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
metaflow_version = "2.13.
|
|
1
|
+
metaflow_version = "2.13.7.1"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: ob-metaflow
|
|
3
|
-
Version: 2.13.
|
|
3
|
+
Version: 2.13.7.1
|
|
4
4
|
Summary: Metaflow: More Data Science, Less Engineering
|
|
5
5
|
Author: Netflix, Outerbounds & the Metaflow Community
|
|
6
6
|
Author-email: help@outerbounds.co
|
|
@@ -12,7 +12,7 @@ Requires-Dist: boto3
|
|
|
12
12
|
Requires-Dist: pylint
|
|
13
13
|
Requires-Dist: kubernetes
|
|
14
14
|
Provides-Extra: stubs
|
|
15
|
-
Requires-Dist: metaflow-stubs==2.13.
|
|
15
|
+
Requires-Dist: metaflow-stubs==2.13.7.1; extra == "stubs"
|
|
16
16
|
Dynamic: author
|
|
17
17
|
Dynamic: author-email
|
|
18
18
|
Dynamic: description
|
|
@@ -6,11 +6,11 @@ metaflow/cli_args.py,sha256=muIh9pdVqMRG09uAYFKcAcUKFyDE4N3Wm6YahWRaUNI,3594
|
|
|
6
6
|
metaflow/clone_util.py,sha256=LSuVbFpPUh92UW32DBcnZbL0FFw-4w3CLa0tpEbCkzk,2066
|
|
7
7
|
metaflow/cmd_with_io.py,sha256=kl53HkAIyv0ecpItv08wZYczv7u3msD1VCcciqigqf0,588
|
|
8
8
|
metaflow/debug.py,sha256=HEmt_16tJtqHXQXsqD9pqOFe3CWR5GZ7VwpaYQgnRdU,1466
|
|
9
|
-
metaflow/decorators.py,sha256=
|
|
9
|
+
metaflow/decorators.py,sha256=cbOCahmwVlnHklMN2O_j5DKvZA7m_Q72_6LBzzBZRhk,24131
|
|
10
10
|
metaflow/event_logger.py,sha256=joTVRqZPL87nvah4ZOwtqWX8NeraM_CXKXXGVpKGD8o,780
|
|
11
11
|
metaflow/events.py,sha256=ahjzkSbSnRCK9RZ-9vTfUviz_6gMvSO9DGkJ86X80-k,5300
|
|
12
12
|
metaflow/exception.py,sha256=_m9ZBJM0cooHRslDqfxCPQmkChqaTh6fGxp7HvISnYI,5161
|
|
13
|
-
metaflow/flowspec.py,sha256=
|
|
13
|
+
metaflow/flowspec.py,sha256=YtLlqg-MeH16rjsOU38NfPg_F-0cmzm2w4w-CPSfLxE,35510
|
|
14
14
|
metaflow/graph.py,sha256=cdpnWr85aEj_rRn-7EjbndWjr_i8Dt3P7-oPUW0NNpI,12393
|
|
15
15
|
metaflow/includefile.py,sha256=kWKDSlzVcRVNGG9PV5eB3o2ynrzqhVsfaLtkqjshn7Q,20948
|
|
16
16
|
metaflow/info_file.py,sha256=wtf2_F0M6dgiUu74AFImM8lfy5RrUw5Yj7Rgs2swKRY,686
|
|
@@ -25,7 +25,7 @@ metaflow/metaflow_version.py,sha256=duhIzfKZtcxMVMs2uiBqBvUarSHJqyWDwMhaBOQd_g0,
|
|
|
25
25
|
metaflow/monitor.py,sha256=T0NMaBPvXynlJAO_avKtk8OIIRMyEuMAyF8bIp79aZU,5323
|
|
26
26
|
metaflow/multicore_utils.py,sha256=yEo5T6Gemn4_vl8b6IOz7fsTUYtEyqa3AaKZgJY96Wc,4974
|
|
27
27
|
metaflow/package.py,sha256=yfwVMVB1mD-Sw94KwXNK3N-26YHoKMn6btrcgd67Izs,7845
|
|
28
|
-
metaflow/parameters.py,sha256=
|
|
28
|
+
metaflow/parameters.py,sha256=zyxDTkHXqVr7CUw509qsrBXGFpBMlqLL2-iwbIr0oiw,18621
|
|
29
29
|
metaflow/procpoll.py,sha256=U2tE4iK_Mwj2WDyVTx_Uglh6xZ-jixQOo4wrM9OOhxg,2859
|
|
30
30
|
metaflow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
31
|
metaflow/pylint_wrapper.py,sha256=zzBY9YaSUZOGH-ypDKAv2B_7XcoyMZj-zCoCrmYqNRc,2865
|
|
@@ -36,7 +36,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
|
|
|
36
36
|
metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
|
|
37
37
|
metaflow/util.py,sha256=hKjHl6NYJkKBSU2tzdVbddfOX1zWK73T4GCO42A0XB4,14666
|
|
38
38
|
metaflow/vendor.py,sha256=FchtA9tH22JM-eEtJ2c9FpUdMn8sSb1VHuQS56EcdZk,5139
|
|
39
|
-
metaflow/version.py,sha256=
|
|
39
|
+
metaflow/version.py,sha256=NDWIKa2f6hUitR0aS6Y_HvRxGKVgSs_8dhsi2Uf52Qo,30
|
|
40
40
|
metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
|
|
41
41
|
metaflow/_vendor/typing_extensions.py,sha256=0nUs5p1A_UrZigrAVBoOEM6TxU37zzPDUtiij1ZwpNc,110417
|
|
42
42
|
metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
|
@@ -149,12 +149,12 @@ metaflow/mflog/mflog.py,sha256=VebXxqitOtNAs7VJixnNfziO_i_urG7bsJ5JiB5IXgY,4370
|
|
|
149
149
|
metaflow/mflog/save_logs.py,sha256=ZBAF4BMukw4FMAC7odpr9OI2BC_2petPtDX0ca6srC4,2352
|
|
150
150
|
metaflow/mflog/save_logs_periodically.py,sha256=2Uvk9hi-zlCqXxOQoXmmjH1SCugfw6eG6w70WgfI-ho,1256
|
|
151
151
|
metaflow/mflog/tee.py,sha256=wTER15qeHuiRpCkOqo-bd-r3Gj-EVlf3IvWRCA4beW4,887
|
|
152
|
-
metaflow/plugins/__init__.py,sha256=
|
|
152
|
+
metaflow/plugins/__init__.py,sha256=Lr7i7ssJI_-czorJYjMFcRhGspqArobNoXUl9T1p3MY,8055
|
|
153
153
|
metaflow/plugins/catch_decorator.py,sha256=UOM2taN_OL2RPpuJhwEOA9ZALm0-hHD0XS2Hn2GUev0,4061
|
|
154
154
|
metaflow/plugins/debug_logger.py,sha256=mcF5HYzJ0NQmqCMjyVUk3iAP-heroHRIiVWQC6Ha2-I,879
|
|
155
155
|
metaflow/plugins/debug_monitor.py,sha256=Md5X_sDOSssN9pt2D8YcaIjTK5JaQD55UAYTcF6xYF0,1099
|
|
156
156
|
metaflow/plugins/environment_decorator.py,sha256=6m9j2B77d-Ja_l_9CTJ__0O6aB2a8Qt_lAZu6UjAcUA,587
|
|
157
|
-
metaflow/plugins/events_decorator.py,sha256=
|
|
157
|
+
metaflow/plugins/events_decorator.py,sha256=T_YSK-DlgZhd3ge9PlpTRNaMi15GK0tKZMZl1NdV9DQ,24403
|
|
158
158
|
metaflow/plugins/logs_cli.py,sha256=77W5UNagU2mOKSMMvrQxQmBLRzvmjK-c8dWxd-Ygbqs,11410
|
|
159
159
|
metaflow/plugins/package_cli.py,sha256=-J6D4cupHfWSZ4GEFo2yy9Je9oL3owRWm5pEJwaiqd4,1649
|
|
160
160
|
metaflow/plugins/parallel_decorator.py,sha256=GR6LKIW7_S7AoU50Ar2_0nndVtO2epdn3LuthE0vKMQ,9127
|
|
@@ -181,7 +181,7 @@ metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqN
|
|
|
181
181
|
metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
182
182
|
metaflow/plugins/argo/argo_client.py,sha256=PS_cYGnPw9h4X7TP_plObDH3clMw4reOsBLkkGPTd0Y,16282
|
|
183
183
|
metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
|
|
184
|
-
metaflow/plugins/argo/argo_workflows.py,sha256=
|
|
184
|
+
metaflow/plugins/argo/argo_workflows.py,sha256=Qiv30NsIUCYYsU6L9iPm6deHs9zfuIjG77lHOmwNGPY,177976
|
|
185
185
|
metaflow/plugins/argo/argo_workflows_cli.py,sha256=11_8l4IrtkwviKsijInTZPt7YK5TZzClREnw_Cf4D5o,36706
|
|
186
186
|
metaflow/plugins/argo/argo_workflows_decorator.py,sha256=ogCSBmwsC2C3eusydrgjuAJd4qK18f1sI4jJwA4Fd-o,7800
|
|
187
187
|
metaflow/plugins/argo/argo_workflows_deployer.py,sha256=6kHxEnYXJwzNCM9swI8-0AckxtPWqwhZLerYkX8fxUM,4444
|
|
@@ -288,12 +288,14 @@ metaflow/plugins/gcp/gs_utils.py,sha256=ZmIGFse1qYyvAVrwga23PQUzF6dXEDLLsZ2F-YRm
|
|
|
288
288
|
metaflow/plugins/gcp/includefile_support.py,sha256=OQO0IVWv4ObboL0VqEZwcDOyj9ORLdur66JToxQ84vU,3887
|
|
289
289
|
metaflow/plugins/kubernetes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
290
290
|
metaflow/plugins/kubernetes/kube_utils.py,sha256=jdFMGbEmIow-oli26v31W9CmbZXigx06b3D_xIobpk0,4140
|
|
291
|
-
metaflow/plugins/kubernetes/kubernetes.py,sha256=
|
|
292
|
-
metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=
|
|
291
|
+
metaflow/plugins/kubernetes/kubernetes.py,sha256=4WHVs421w5JfFSRAdpiZ6X9w7xEK8UPYGNcc9e0JOFc,30420
|
|
292
|
+
metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=o_o0BDEJFpTuga7txRmkvZH8OIuTb5kI4UaG6xbzf84,13929
|
|
293
293
|
metaflow/plugins/kubernetes/kubernetes_client.py,sha256=tuvXP-QKpdeSmzVolB2R_TaacOr5DIb0j642eKcjsiM,6491
|
|
294
|
-
metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=
|
|
294
|
+
metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=xh0nIgAnOg9ppasW7vfOq5XpF4TdmO0IbhMsg8FkxSw,31693
|
|
295
295
|
metaflow/plugins/kubernetes/kubernetes_job.py,sha256=0PjcVgJkmSqjBNEkibk8y0xxBrgu99D8ar9RsXnzCeY,32833
|
|
296
296
|
metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=9kU43eE5IvIa7y-POzBdxnJOazWsedKhwQ51Tu1HN_A,42471
|
|
297
|
+
metaflow/plugins/kubernetes/spot_metadata_cli.py,sha256=an0nWCxgflmqIPBCBrlb4m3DereDFFJBLt-KKhqcHc8,1670
|
|
298
|
+
metaflow/plugins/kubernetes/spot_monitor_sidecar.py,sha256=zrWU-smQwPnL6MBHmzTxWyEA00R6iKKQbhhy50xFwQ8,3832
|
|
297
299
|
metaflow/plugins/metadata_providers/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
298
300
|
metaflow/plugins/metadata_providers/local.py,sha256=9UAxe9caN6kU1lkSlIoJbRGgTqsMa62cBTnyMwhqiaA,22446
|
|
299
301
|
metaflow/plugins/metadata_providers/service.py,sha256=NKZfFMamx6upP6aFRJfXlfYIhySgFNzz6kbp1yPD7LA,20222
|
|
@@ -310,7 +312,7 @@ metaflow/plugins/secrets/__init__.py,sha256=mhJaN2eMS_ZZVewAMR2E-JdP5i0t3v9e6Dcw
|
|
|
310
312
|
metaflow/plugins/secrets/inline_secrets_provider.py,sha256=EChmoBGA1i7qM3jtYwPpLZDBybXLergiDlN63E0u3x8,294
|
|
311
313
|
metaflow/plugins/secrets/secrets_decorator.py,sha256=s-sFzPWOjahhpr5fMj-ZEaHkDYAPTO0isYXGvaUwlG8,11273
|
|
312
314
|
metaflow/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
313
|
-
metaflow/runner/click_api.py,sha256=
|
|
315
|
+
metaflow/runner/click_api.py,sha256=2uj3y-pZ2OF3J-mz4VbMQqcXFYy6NUeoN2OgjKZT5-c,21839
|
|
314
316
|
metaflow/runner/deployer.py,sha256=Yas_SZCss3kfJw3hLC8_IyzgiytUFGoEGHz-l-rBBKk,8980
|
|
315
317
|
metaflow/runner/deployer_impl.py,sha256=nzQJiJxjgZxewkkK5pHshfVeZOUUf5-FzS0pPJimktM,5930
|
|
316
318
|
metaflow/runner/metaflow_runner.py,sha256=T41AWkuQq56ID90B7I-RFr9zexuZYtknsstSoqell7A,15861
|
|
@@ -356,11 +358,11 @@ metaflow/tutorials/08-autopilot/README.md,sha256=GnePFp_q76jPs991lMUqfIIh5zSorIe
|
|
|
356
358
|
metaflow/tutorials/08-autopilot/autopilot.ipynb,sha256=DQoJlILV7Mq9vfPBGW-QV_kNhWPjS5n6SJLqePjFYLY,3191
|
|
357
359
|
metaflow/user_configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
358
360
|
metaflow/user_configs/config_decorators.py,sha256=Tj0H88UT8Q6pylXxHXgiA6cqnNlw4d3mR7M8J9g3ZUg,20139
|
|
359
|
-
metaflow/user_configs/config_options.py,sha256=
|
|
360
|
-
metaflow/user_configs/config_parameters.py,sha256=
|
|
361
|
-
ob_metaflow-2.13.
|
|
362
|
-
ob_metaflow-2.13.
|
|
363
|
-
ob_metaflow-2.13.
|
|
364
|
-
ob_metaflow-2.13.
|
|
365
|
-
ob_metaflow-2.13.
|
|
366
|
-
ob_metaflow-2.13.
|
|
361
|
+
metaflow/user_configs/config_options.py,sha256=t6c9KNVGz9GNK55YAow74Lof4sDZqCbeeZSzldUBFmA,21072
|
|
362
|
+
metaflow/user_configs/config_parameters.py,sha256=oeJGVKu1ao_YQX6Lg6P2FEv5k5-_F4sARLlVpTW9ezM,15502
|
|
363
|
+
ob_metaflow-2.13.7.1.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
|
|
364
|
+
ob_metaflow-2.13.7.1.dist-info/METADATA,sha256=r1jkGmaV6jJOxncmdKwAve3PLHtVaeVG7h8_CDwxaWE,5316
|
|
365
|
+
ob_metaflow-2.13.7.1.dist-info/WHEEL,sha256=9Hm2OB-j1QcCUq9Jguht7ayGIIZBRTdOXD1qg9cCgPM,109
|
|
366
|
+
ob_metaflow-2.13.7.1.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
|
|
367
|
+
ob_metaflow-2.13.7.1.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
|
|
368
|
+
ob_metaflow-2.13.7.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|