metaflow 2.12.5__py2.py3-none-any.whl → 2.12.7__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/cli.py +3 -0
- metaflow/client/core.py +5 -0
- metaflow/plugins/argo/argo_workflows.py +6 -2
- metaflow/plugins/pypi/conda_decorator.py +8 -0
- metaflow/plugins/pypi/conda_environment.py +3 -1
- metaflow/plugins/pypi/pip.py +31 -3
- metaflow/plugins/pypi/pypi_decorator.py +8 -0
- metaflow/system/__init__.py +5 -0
- metaflow/system/system_logger.py +103 -0
- metaflow/system/system_monitor.py +132 -0
- metaflow/system/system_utils.py +19 -0
- metaflow/task.py +240 -194
- metaflow/version.py +1 -1
- {metaflow-2.12.5.dist-info → metaflow-2.12.7.dist-info}/METADATA +2 -2
- {metaflow-2.12.5.dist-info → metaflow-2.12.7.dist-info}/RECORD +19 -15
- {metaflow-2.12.5.dist-info → metaflow-2.12.7.dist-info}/WHEEL +1 -1
- {metaflow-2.12.5.dist-info → metaflow-2.12.7.dist-info}/LICENSE +0 -0
- {metaflow-2.12.5.dist-info → metaflow-2.12.7.dist-info}/entry_points.txt +0 -0
- {metaflow-2.12.5.dist-info → metaflow-2.12.7.dist-info}/top_level.txt +0 -0
metaflow/cli.py
CHANGED
@@ -23,6 +23,7 @@ from .metaflow_config import (
|
|
23
23
|
DEFAULT_PACKAGE_SUFFIXES,
|
24
24
|
)
|
25
25
|
from .metaflow_current import current
|
26
|
+
from metaflow.system import _system_monitor, _system_logger
|
26
27
|
from .metaflow_environment import MetaflowEnvironment
|
27
28
|
from .mflog import LOG_SOURCES, mflog
|
28
29
|
from .package import MetaflowPackage
|
@@ -952,11 +953,13 @@ def start(
|
|
952
953
|
flow=ctx.obj.flow, env=ctx.obj.environment
|
953
954
|
)
|
954
955
|
ctx.obj.event_logger.start()
|
956
|
+
_system_logger.init_system_logger(ctx.obj.flow.name, ctx.obj.event_logger)
|
955
957
|
|
956
958
|
ctx.obj.monitor = MONITOR_SIDECARS[monitor](
|
957
959
|
flow=ctx.obj.flow, env=ctx.obj.environment
|
958
960
|
)
|
959
961
|
ctx.obj.monitor.start()
|
962
|
+
_system_monitor.init_system_monitor(ctx.obj.flow.name, ctx.obj.monitor)
|
960
963
|
|
961
964
|
ctx.obj.metadata = [m for m in METADATA_PROVIDERS if m.TYPE == metadata][0](
|
962
965
|
ctx.obj.environment, ctx.obj.flow, ctx.obj.event_logger, ctx.obj.monitor
|
metaflow/client/core.py
CHANGED
@@ -276,6 +276,11 @@ class MetaflowObject(object):
|
|
276
276
|
self._attempt = attempt
|
277
277
|
self._current_namespace = _current_namespace or get_namespace()
|
278
278
|
self._namespace_check = _namespace_check
|
279
|
+
# If the current namespace is False, we disable checking for namespace for this
|
280
|
+
# and all children objects. Not setting namespace_check to False has the consequence
|
281
|
+
# of preventing access to children objects after the namespace changes
|
282
|
+
if self._current_namespace is None:
|
283
|
+
self._namespace_check = False
|
279
284
|
|
280
285
|
if self._attempt is not None:
|
281
286
|
if self._NAME not in ["task", "artifact"]:
|
@@ -6,6 +6,7 @@ import shlex
|
|
6
6
|
import sys
|
7
7
|
from collections import defaultdict
|
8
8
|
from hashlib import sha1
|
9
|
+
from math import inf
|
9
10
|
|
10
11
|
from metaflow import JSONType, current
|
11
12
|
from metaflow.decorators import flow_decorators
|
@@ -901,7 +902,9 @@ class ArgoWorkflows(object):
|
|
901
902
|
"argo-{{workflow.name}}/%s/{{tasks.%s.outputs.parameters.task-id}}"
|
902
903
|
% (n, self._sanitize(n))
|
903
904
|
for n in node.in_funcs
|
904
|
-
]
|
905
|
+
],
|
906
|
+
# NOTE: We set zlibmin to infinite because zlib compression for the Argo input-paths breaks template value substitution.
|
907
|
+
zlibmin=inf,
|
905
908
|
)
|
906
909
|
)
|
907
910
|
]
|
@@ -2154,7 +2157,8 @@ class ArgoWorkflows(object):
|
|
2154
2157
|
# everything within the body.
|
2155
2158
|
# NOTE: We need the conditional logic in order to successfully fall back to the default value
|
2156
2159
|
# when the event payload does not contain a key for a parameter.
|
2157
|
-
|
2160
|
+
# NOTE: Keys might contain dashes, so use the safer 'get' for fetching the value
|
2161
|
+
data_template='{{ if (hasKey $.Input.body.payload "%s") }}{{- (get $.Input.body.payload "%s" | toRawJson) -}}{{- else -}}{{ (fail "use-default-instead") }}{{- end -}}'
|
2158
2162
|
% (v, v),
|
2159
2163
|
# Unfortunately the sensor needs to
|
2160
2164
|
# record the default values for
|
@@ -100,6 +100,10 @@ class CondaStepDecorator(StepDecorator):
|
|
100
100
|
# --environment=pypi to --environment=conda
|
101
101
|
_supported_virtual_envs.extend(["pypi"])
|
102
102
|
|
103
|
+
# TODO: Hardcoded for now to support Docker environment.
|
104
|
+
# We should introduce a more robust mechanism for appending supported environments, for example from within extensions.
|
105
|
+
_supported_virtual_envs.extend(["docker"])
|
106
|
+
|
103
107
|
# The --environment= requirement ensures that valid virtual environments are
|
104
108
|
# created for every step to execute it, greatly simplifying the @conda
|
105
109
|
# implementation.
|
@@ -340,6 +344,10 @@ class CondaFlowDecorator(FlowDecorator):
|
|
340
344
|
# --environment=pypi to --environment=conda
|
341
345
|
_supported_virtual_envs.extend(["pypi"])
|
342
346
|
|
347
|
+
# TODO: Hardcoded for now to support Docker environment.
|
348
|
+
# We should introduce a more robust mechanism for appending supported environments, for example from within extensions.
|
349
|
+
_supported_virtual_envs.extend(["docker"])
|
350
|
+
|
343
351
|
# The --environment= requirement ensures that valid virtual environments are
|
344
352
|
# created for every step to execute it, greatly simplifying the @conda
|
345
353
|
# implementation.
|
@@ -65,7 +65,7 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
65
65
|
micromamba = Micromamba()
|
66
66
|
self.solvers = {"conda": micromamba, "pypi": Pip(micromamba)}
|
67
67
|
|
68
|
-
def init_environment(self, echo):
|
68
|
+
def init_environment(self, echo, only_steps=None):
|
69
69
|
# The implementation optimizes for latency to ensure as many operations can
|
70
70
|
# be turned into cheap no-ops as feasible. Otherwise, we focus on maintaining
|
71
71
|
# a balance between latency and maintainability of code without re-implementing
|
@@ -77,6 +77,8 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
77
77
|
def environments(type_):
|
78
78
|
seen = set()
|
79
79
|
for step in self.flow:
|
80
|
+
if only_steps and step.name not in only_steps:
|
81
|
+
continue
|
80
82
|
environment = self.get_environment(step)
|
81
83
|
if type_ in environment and environment["id_"] not in seen:
|
82
84
|
seen.add(environment["id_"])
|
metaflow/plugins/pypi/pip.py
CHANGED
@@ -9,7 +9,6 @@ from itertools import chain, product
|
|
9
9
|
from urllib.parse import unquote
|
10
10
|
|
11
11
|
from metaflow.exception import MetaflowException
|
12
|
-
from metaflow.util import which
|
13
12
|
|
14
13
|
from .micromamba import Micromamba
|
15
14
|
from .utils import pip_tags, wheel_tags
|
@@ -25,6 +24,23 @@ class PipException(MetaflowException):
|
|
25
24
|
super(PipException, self).__init__(msg)
|
26
25
|
|
27
26
|
|
27
|
+
class PipPackageNotFound(Exception):
|
28
|
+
"Wrapper for pip package resolve errors."
|
29
|
+
|
30
|
+
def __init__(self, error):
|
31
|
+
self.error = error
|
32
|
+
try:
|
33
|
+
# Parse the package spec from error message:
|
34
|
+
# ERROR: ERROR: Could not find a version that satisfies the requirement pkg==0.0.1 (from versions: none)
|
35
|
+
# ERROR: No matching distribution found for pkg==0.0.1
|
36
|
+
self.package_spec = re.search(
|
37
|
+
"ERROR: No matching distribution found for (.*)", self.error
|
38
|
+
)[1]
|
39
|
+
self.package_name = re.match("\w*", self.package_spec)[0]
|
40
|
+
except Exception:
|
41
|
+
pass
|
42
|
+
|
43
|
+
|
28
44
|
METADATA_FILE = "{prefix}/.pip/metadata"
|
29
45
|
INSTALLATION_MARKER = "{prefix}/.pip/id"
|
30
46
|
|
@@ -81,7 +97,16 @@ class Pip(object):
|
|
81
97
|
cmd.append(f"{package}{version}")
|
82
98
|
else:
|
83
99
|
cmd.append(f"{package}=={version}")
|
84
|
-
|
100
|
+
try:
|
101
|
+
self._call(prefix, cmd)
|
102
|
+
except PipPackageNotFound as ex:
|
103
|
+
# pretty print package errors
|
104
|
+
raise PipException(
|
105
|
+
"Could not find a binary distribution for %s \n"
|
106
|
+
"for the platform %s\n\n"
|
107
|
+
"Note that ***@pypi*** does not currently support source distributions"
|
108
|
+
% (ex.package_spec, platform)
|
109
|
+
)
|
85
110
|
|
86
111
|
def _format(dl_info):
|
87
112
|
res = {k: v for k, v in dl_info.items() if k in ["url"]}
|
@@ -302,11 +327,14 @@ class Pip(object):
|
|
302
327
|
.strip()
|
303
328
|
)
|
304
329
|
except subprocess.CalledProcessError as e:
|
330
|
+
errors = e.stderr.decode()
|
331
|
+
if "No matching distribution" in errors:
|
332
|
+
raise PipPackageNotFound(errors)
|
305
333
|
raise PipException(
|
306
334
|
"command '{cmd}' returned error ({code}) {output}\n{stderr}".format(
|
307
335
|
cmd=" ".join(e.cmd),
|
308
336
|
code=e.returncode,
|
309
337
|
output=e.output.decode(),
|
310
|
-
stderr=
|
338
|
+
stderr=errors,
|
311
339
|
)
|
312
340
|
)
|
@@ -70,6 +70,10 @@ class PyPIStepDecorator(StepDecorator):
|
|
70
70
|
# --environment=pypi to --environment=conda
|
71
71
|
_supported_virtual_envs.extend(["pypi"])
|
72
72
|
|
73
|
+
# TODO: Hardcoded for now to support Docker environment.
|
74
|
+
# We should introduce a more robust mechanism for appending supported environments, for example from within extensions.
|
75
|
+
_supported_virtual_envs.extend(["docker"])
|
76
|
+
|
73
77
|
# The --environment= requirement ensures that valid virtual environments are
|
74
78
|
# created for every step to execute it, greatly simplifying the @pypi
|
75
79
|
# implementation.
|
@@ -119,6 +123,10 @@ class PyPIFlowDecorator(FlowDecorator):
|
|
119
123
|
# --environment=pypi to --environment=conda
|
120
124
|
_supported_virtual_envs.extend(["pypi"])
|
121
125
|
|
126
|
+
# TODO: Hardcoded for now to support Docker environment.
|
127
|
+
# We should introduce a more robust mechanism for appending supported environments, for example from within extensions.
|
128
|
+
_supported_virtual_envs.extend(["docker"])
|
129
|
+
|
122
130
|
# The --environment= requirement ensures that valid virtual environments are
|
123
131
|
# created for every step to execute it, greatly simplifying the @conda
|
124
132
|
# implementation.
|
@@ -0,0 +1,103 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
from typing import Dict, Any, Optional, Union
|
4
|
+
|
5
|
+
|
6
|
+
class SystemLogger(object):
|
7
|
+
def __init__(self):
|
8
|
+
self._logger = None
|
9
|
+
self._flow_name = None
|
10
|
+
self._context = {}
|
11
|
+
self._is_context_updated = False
|
12
|
+
|
13
|
+
def __del__(self):
|
14
|
+
if self._flow_name == "not_a_real_flow":
|
15
|
+
self.logger.terminate()
|
16
|
+
|
17
|
+
def update_context(self, context: Dict[str, Any]):
|
18
|
+
"""
|
19
|
+
Update the global context maintained by the system logger.
|
20
|
+
|
21
|
+
Parameters
|
22
|
+
----------
|
23
|
+
context : Dict[str, Any]
|
24
|
+
A dictionary containing the context to update.
|
25
|
+
|
26
|
+
"""
|
27
|
+
self._is_context_updated = True
|
28
|
+
self._context.update(context)
|
29
|
+
|
30
|
+
def init_system_logger(
|
31
|
+
self, flow_name: str, logger: "metaflow.event_logger.NullEventLogger"
|
32
|
+
):
|
33
|
+
self._flow_name = flow_name
|
34
|
+
self._logger = logger
|
35
|
+
|
36
|
+
def _init_logger_outside_flow(self):
|
37
|
+
from .system_utils import DummyFlow
|
38
|
+
from .system_utils import init_environment_outside_flow
|
39
|
+
from metaflow.plugins import LOGGING_SIDECARS
|
40
|
+
from metaflow.metaflow_config import DEFAULT_EVENT_LOGGER
|
41
|
+
|
42
|
+
self._flow_name = "not_a_real_flow"
|
43
|
+
_flow = DummyFlow(self._flow_name)
|
44
|
+
_environment = init_environment_outside_flow(_flow)
|
45
|
+
_logger = LOGGING_SIDECARS[DEFAULT_EVENT_LOGGER](_flow, _environment)
|
46
|
+
return _logger
|
47
|
+
|
48
|
+
@property
|
49
|
+
def logger(self) -> Optional["metaflow.event_logger.NullEventLogger"]:
|
50
|
+
if self._logger is None:
|
51
|
+
# This happens if the logger is being accessed outside of a flow
|
52
|
+
# We start a logger with a dummy flow and a default environment
|
53
|
+
self._debug("Started logger outside of a flow")
|
54
|
+
self._logger = self._init_logger_outside_flow()
|
55
|
+
self._logger.start()
|
56
|
+
return self._logger
|
57
|
+
|
58
|
+
@staticmethod
|
59
|
+
def _debug(msg: str):
|
60
|
+
"""
|
61
|
+
Log a debug message to stderr.
|
62
|
+
|
63
|
+
Parameters
|
64
|
+
----------
|
65
|
+
msg : str
|
66
|
+
Message to log.
|
67
|
+
|
68
|
+
"""
|
69
|
+
if os.environ.get("METAFLOW_DEBUG_SIDECAR", "0").lower() not in (
|
70
|
+
"0",
|
71
|
+
"false",
|
72
|
+
"",
|
73
|
+
):
|
74
|
+
print("system monitor: %s" % msg, file=sys.stderr)
|
75
|
+
|
76
|
+
def log_event(
|
77
|
+
self, level: str, module: str, name: str, payload: Optional[Any] = None
|
78
|
+
):
|
79
|
+
"""
|
80
|
+
Log an event to the event logger.
|
81
|
+
|
82
|
+
Parameters
|
83
|
+
----------
|
84
|
+
level : str
|
85
|
+
Log level of the event. Can be one of "info", "warning", "error", "critical", "debug".
|
86
|
+
module : str
|
87
|
+
Module of the event. Usually the name of the class, function, or module that the event is being logged from.
|
88
|
+
name : str
|
89
|
+
Name of the event. Used to qualify the event type.
|
90
|
+
payload : Optional[Any], default None
|
91
|
+
Payload of the event. Contains the event data.
|
92
|
+
"""
|
93
|
+
self.logger.log(
|
94
|
+
{
|
95
|
+
"level": level,
|
96
|
+
"module": module,
|
97
|
+
"name": name,
|
98
|
+
"payload": payload if payload is not None else {},
|
99
|
+
"context": self._context,
|
100
|
+
"is_context_updated": self._is_context_updated,
|
101
|
+
}
|
102
|
+
)
|
103
|
+
self._is_context_updated = False
|
@@ -0,0 +1,132 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
from ..debug import debug
|
4
|
+
from contextlib import contextmanager
|
5
|
+
from typing import Optional, Union, Dict, Any
|
6
|
+
|
7
|
+
|
8
|
+
class SystemMonitor(object):
|
9
|
+
def __init__(self):
|
10
|
+
self._monitor = None
|
11
|
+
self._flow_name = None
|
12
|
+
self._context = {}
|
13
|
+
|
14
|
+
def __del__(self):
|
15
|
+
if self._flow_name == "not_a_real_flow":
|
16
|
+
self.monitor.terminate()
|
17
|
+
|
18
|
+
def update_context(self, context: Dict[str, Any]):
|
19
|
+
"""
|
20
|
+
Update the global context maintained by the system monitor.
|
21
|
+
|
22
|
+
Parameters
|
23
|
+
----------
|
24
|
+
context : Dict[str, Any]
|
25
|
+
A dictionary containing the context to update.
|
26
|
+
|
27
|
+
"""
|
28
|
+
from metaflow.sidecar import Message, MessageTypes
|
29
|
+
|
30
|
+
self._context.update(context)
|
31
|
+
self.monitor.send(
|
32
|
+
Message(
|
33
|
+
MessageTypes.MUST_SEND,
|
34
|
+
{
|
35
|
+
"is_context_updated": True,
|
36
|
+
**self._context,
|
37
|
+
},
|
38
|
+
)
|
39
|
+
)
|
40
|
+
|
41
|
+
def init_system_monitor(
|
42
|
+
self, flow_name: str, monitor: "metaflow.monitor.NullMonitor"
|
43
|
+
):
|
44
|
+
self._flow_name = flow_name
|
45
|
+
self._monitor = monitor
|
46
|
+
|
47
|
+
def _init_system_monitor_outside_flow(self):
|
48
|
+
from .system_utils import DummyFlow
|
49
|
+
from .system_utils import init_environment_outside_flow
|
50
|
+
from metaflow.plugins import MONITOR_SIDECARS
|
51
|
+
from metaflow.metaflow_config import DEFAULT_MONITOR
|
52
|
+
|
53
|
+
self._flow_name = "not_a_real_flow"
|
54
|
+
_flow = DummyFlow(self._flow_name)
|
55
|
+
_environment = init_environment_outside_flow(_flow)
|
56
|
+
_monitor = MONITOR_SIDECARS[DEFAULT_MONITOR](_flow, _environment)
|
57
|
+
return _monitor
|
58
|
+
|
59
|
+
@property
|
60
|
+
def monitor(self) -> Optional["metaflow.monitor.NullMonitor"]:
|
61
|
+
if self._monitor is None:
|
62
|
+
# This happens if the monitor is being accessed outside of a flow
|
63
|
+
self._debug("Started monitor outside of a flow")
|
64
|
+
self._monitor = self._init_system_monitor_outside_flow()
|
65
|
+
self._monitor.start()
|
66
|
+
return self._monitor
|
67
|
+
|
68
|
+
@staticmethod
|
69
|
+
def _debug(msg: str):
|
70
|
+
"""
|
71
|
+
Log a debug message to stderr.
|
72
|
+
|
73
|
+
Parameters
|
74
|
+
----------
|
75
|
+
msg : str
|
76
|
+
Message to log.
|
77
|
+
|
78
|
+
"""
|
79
|
+
if os.environ.get("METAFLOW_DEBUG_SIDECAR", "0").lower() not in (
|
80
|
+
"0",
|
81
|
+
"false",
|
82
|
+
"",
|
83
|
+
):
|
84
|
+
print("system monitor: %s" % msg, file=sys.stderr)
|
85
|
+
|
86
|
+
@contextmanager
|
87
|
+
def measure(self, name: str):
|
88
|
+
"""
|
89
|
+
Context manager to measure the execution duration and counter of a block of code.
|
90
|
+
|
91
|
+
Parameters
|
92
|
+
----------
|
93
|
+
name : str
|
94
|
+
The name to associate with the timer and counter.
|
95
|
+
|
96
|
+
Yields
|
97
|
+
------
|
98
|
+
None
|
99
|
+
"""
|
100
|
+
# Delegating the context management to the monitor's measure method
|
101
|
+
with self.monitor.measure(name):
|
102
|
+
yield
|
103
|
+
|
104
|
+
@contextmanager
|
105
|
+
def count(self, name: str):
|
106
|
+
"""
|
107
|
+
Context manager to increment a counter.
|
108
|
+
|
109
|
+
Parameters
|
110
|
+
----------
|
111
|
+
name : str
|
112
|
+
The name of the counter.
|
113
|
+
|
114
|
+
Yields
|
115
|
+
------
|
116
|
+
None
|
117
|
+
"""
|
118
|
+
# Delegating the context management to the monitor's count method
|
119
|
+
with self.monitor.count(name):
|
120
|
+
yield
|
121
|
+
|
122
|
+
def gauge(self, gauge: "metaflow.monitor.Gauge"):
|
123
|
+
"""
|
124
|
+
Log a gauge.
|
125
|
+
|
126
|
+
Parameters
|
127
|
+
----------
|
128
|
+
gauge : metaflow.monitor.Gauge
|
129
|
+
The gauge to log.
|
130
|
+
|
131
|
+
"""
|
132
|
+
self.monitor.gauge(gauge)
|
@@ -0,0 +1,19 @@
|
|
1
|
+
from typing import Union
|
2
|
+
|
3
|
+
|
4
|
+
class DummyFlow(object):
|
5
|
+
def __init__(self, name="not_a_real_flow"):
|
6
|
+
self.name = name
|
7
|
+
|
8
|
+
|
9
|
+
# This function is used to initialize the environment outside a flow.
|
10
|
+
def init_environment_outside_flow(
|
11
|
+
flow: Union["metaflow.flowspec.FlowSpec", "metaflow.sidecar.DummyFlow"]
|
12
|
+
) -> "metaflow.metaflow_environment.MetaflowEnvironment":
|
13
|
+
from metaflow.plugins import ENVIRONMENTS
|
14
|
+
from metaflow.metaflow_config import DEFAULT_ENVIRONMENT
|
15
|
+
from metaflow.metaflow_environment import MetaflowEnvironment
|
16
|
+
|
17
|
+
return [
|
18
|
+
e for e in ENVIRONMENTS + [MetaflowEnvironment] if e.TYPE == DEFAULT_ENVIRONMENT
|
19
|
+
][0](flow)
|
metaflow/task.py
CHANGED
@@ -4,9 +4,11 @@ import math
|
|
4
4
|
import sys
|
5
5
|
import os
|
6
6
|
import time
|
7
|
+
import traceback
|
7
8
|
|
8
9
|
from types import MethodType, FunctionType
|
9
10
|
|
11
|
+
from metaflow.sidecar import Message, MessageTypes
|
10
12
|
from metaflow.datastore.exceptions import DataException
|
11
13
|
|
12
14
|
from .metaflow_config import MAX_ATTEMPTS
|
@@ -22,6 +24,7 @@ from .unbounded_foreach import UBF_CONTROL
|
|
22
24
|
from .util import all_equal, get_username, resolve_identity, unicode_type
|
23
25
|
from .clone_util import clone_task_helper
|
24
26
|
from .metaflow_current import current
|
27
|
+
from metaflow.system import _system_logger, _system_monitor
|
25
28
|
from metaflow.tracing import get_trace_id
|
26
29
|
from metaflow.tuple_util import ForeachFrame
|
27
30
|
|
@@ -280,25 +283,42 @@ class MetaflowTask(object):
|
|
280
283
|
"task.clone_only needs a valid clone_origin_task value."
|
281
284
|
)
|
282
285
|
origin_run_id, _, origin_task_id = clone_origin_task.split("/")
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
self.flow.name,
|
288
|
-
origin_run_id,
|
289
|
-
step_name,
|
290
|
-
origin_task_id,
|
291
|
-
self.flow.name,
|
292
|
-
run_id,
|
293
|
-
step_name,
|
294
|
-
task_id,
|
295
|
-
),
|
296
|
-
"step_name": step_name,
|
286
|
+
# Update system logger and monitor context
|
287
|
+
# We also pass this context as part of the task payload to support implementations that
|
288
|
+
# can't access the context directly
|
289
|
+
task_payload = {
|
297
290
|
"run_id": run_id,
|
298
|
-
"
|
299
|
-
"
|
291
|
+
"step_name": step_name,
|
292
|
+
"task_id": task_id,
|
293
|
+
"retry_count": retry_count,
|
294
|
+
"project_name": current.get("project_name"),
|
295
|
+
"branch_name": current.get("branch_name"),
|
296
|
+
"is_user_branch": current.get("is_user_branch"),
|
297
|
+
"is_production": current.get("is_production"),
|
298
|
+
"project_flow_name": current.get("project_flow_name"),
|
299
|
+
"origin_run_id": origin_run_id,
|
300
|
+
"origin_task_id": origin_task_id,
|
300
301
|
}
|
301
|
-
|
302
|
+
_system_logger.update_context(task_payload)
|
303
|
+
_system_monitor.update_context(task_payload)
|
304
|
+
|
305
|
+
msg = "Cloning task from {}/{}/{}/{} to {}/{}/{}/{}".format(
|
306
|
+
self.flow.name,
|
307
|
+
origin_run_id,
|
308
|
+
step_name,
|
309
|
+
origin_task_id,
|
310
|
+
self.flow.name,
|
311
|
+
run_id,
|
312
|
+
step_name,
|
313
|
+
task_id,
|
314
|
+
)
|
315
|
+
with _system_monitor.count("metaflow.task.clone"):
|
316
|
+
_system_logger.log_event(
|
317
|
+
level="info",
|
318
|
+
module="metaflow.task",
|
319
|
+
name="clone",
|
320
|
+
payload={**task_payload, "msg": msg},
|
321
|
+
)
|
302
322
|
# If we actually have to do the clone ourselves, proceed...
|
303
323
|
clone_task_helper(
|
304
324
|
self.flow.name,
|
@@ -502,204 +522,230 @@ class MetaflowTask(object):
|
|
502
522
|
}
|
503
523
|
}
|
504
524
|
)
|
505
|
-
|
525
|
+
|
526
|
+
# 6. Update system logger and monitor context
|
527
|
+
# We also pass this context as part of the task payload to support implementations that
|
528
|
+
# can't access the context directly
|
529
|
+
|
530
|
+
task_payload = {
|
531
|
+
"run_id": run_id,
|
532
|
+
"step_name": step_name,
|
533
|
+
"task_id": task_id,
|
534
|
+
"retry_count": retry_count,
|
535
|
+
"project_name": current.get("project_name"),
|
536
|
+
"branch_name": current.get("branch_name"),
|
537
|
+
"is_user_branch": current.get("is_user_branch"),
|
538
|
+
"is_production": current.get("is_production"),
|
539
|
+
"project_flow_name": current.get("project_flow_name"),
|
540
|
+
"trace_id": trace_id or None,
|
541
|
+
}
|
542
|
+
|
543
|
+
_system_logger.update_context(task_payload)
|
544
|
+
_system_monitor.update_context(task_payload)
|
506
545
|
start = time.time()
|
507
546
|
self.metadata.start_task_heartbeat(self.flow.name, run_id, step_name, task_id)
|
508
|
-
|
509
|
-
|
510
|
-
"
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
516
|
-
}
|
517
|
-
logger.log(msg)
|
518
|
-
|
519
|
-
self.flow._current_step = step_name
|
520
|
-
self.flow._success = False
|
521
|
-
self.flow._task_ok = None
|
522
|
-
self.flow._exception = None
|
523
|
-
# Note: All internal flow attributes (ie: non-user artifacts)
|
524
|
-
# should either be set prior to running the user code or listed in
|
525
|
-
# FlowSpec._EPHEMERAL to allow for proper merging/importing of
|
526
|
-
# user artifacts in the user's step code.
|
527
|
-
|
528
|
-
if join_type:
|
529
|
-
# Join step:
|
530
|
-
|
531
|
-
# Ensure that we have the right number of inputs. The
|
532
|
-
# foreach case is checked above.
|
533
|
-
if join_type != "foreach" and len(inputs) != len(node.in_funcs):
|
534
|
-
raise MetaflowDataMissing(
|
535
|
-
"Join *%s* expected %d "
|
536
|
-
"inputs but only %d inputs "
|
537
|
-
"were found" % (step_name, len(node.in_funcs), len(inputs))
|
547
|
+
with self.monitor.measure("metaflow.task.duration"):
|
548
|
+
try:
|
549
|
+
with self.monitor.count("metaflow.task.start"):
|
550
|
+
_system_logger.log_event(
|
551
|
+
level="info",
|
552
|
+
module="metaflow.task",
|
553
|
+
name="start",
|
554
|
+
payload={**task_payload, "msg": "Task started"},
|
538
555
|
)
|
539
556
|
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
self.flow.
|
544
|
-
#
|
545
|
-
#
|
546
|
-
#
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
561
|
-
|
562
|
-
|
563
|
-
|
557
|
+
self.flow._current_step = step_name
|
558
|
+
self.flow._success = False
|
559
|
+
self.flow._task_ok = None
|
560
|
+
self.flow._exception = None
|
561
|
+
# Note: All internal flow attributes (ie: non-user artifacts)
|
562
|
+
# should either be set prior to running the user code or listed in
|
563
|
+
# FlowSpec._EPHEMERAL to allow for proper merging/importing of
|
564
|
+
# user artifacts in the user's step code.
|
565
|
+
|
566
|
+
if join_type:
|
567
|
+
# Join step:
|
568
|
+
|
569
|
+
# Ensure that we have the right number of inputs. The
|
570
|
+
# foreach case is checked above.
|
571
|
+
if join_type != "foreach" and len(inputs) != len(node.in_funcs):
|
572
|
+
raise MetaflowDataMissing(
|
573
|
+
"Join *%s* expected %d "
|
574
|
+
"inputs but only %d inputs "
|
575
|
+
"were found" % (step_name, len(node.in_funcs), len(inputs))
|
576
|
+
)
|
577
|
+
|
578
|
+
# Multiple input contexts are passed in as an argument
|
579
|
+
# to the step function.
|
580
|
+
input_obj = Inputs(self._clone_flow(inp) for inp in inputs)
|
581
|
+
self.flow._set_datastore(output)
|
564
582
|
# initialize parameters (if they exist)
|
565
583
|
# We take Parameter values from the first input,
|
566
584
|
# which is always safe since parameters are read-only
|
567
585
|
current._update_env(
|
568
586
|
{
|
569
587
|
"parameter_names": self._init_parameters(
|
570
|
-
inputs[0], passdown=
|
588
|
+
inputs[0], passdown=True
|
571
589
|
)
|
572
590
|
}
|
573
591
|
)
|
592
|
+
else:
|
593
|
+
# Linear step:
|
594
|
+
# We are running with a single input context.
|
595
|
+
# The context is embedded in the flow.
|
596
|
+
if len(inputs) > 1:
|
597
|
+
# This should be captured by static checking but
|
598
|
+
# let's assert this again
|
599
|
+
raise MetaflowInternalError(
|
600
|
+
"Step *%s* is not a join "
|
601
|
+
"step but it gets multiple "
|
602
|
+
"inputs." % step_name
|
603
|
+
)
|
604
|
+
self.flow._set_datastore(inputs[0])
|
605
|
+
if input_paths:
|
606
|
+
# initialize parameters (if they exist)
|
607
|
+
# We take Parameter values from the first input,
|
608
|
+
# which is always safe since parameters are read-only
|
609
|
+
current._update_env(
|
610
|
+
{
|
611
|
+
"parameter_names": self._init_parameters(
|
612
|
+
inputs[0], passdown=False
|
613
|
+
)
|
614
|
+
}
|
615
|
+
)
|
616
|
+
|
617
|
+
for deco in decorators:
|
618
|
+
deco.task_pre_step(
|
619
|
+
step_name,
|
620
|
+
output,
|
621
|
+
self.metadata,
|
622
|
+
run_id,
|
623
|
+
task_id,
|
624
|
+
self.flow,
|
625
|
+
self.flow._graph,
|
626
|
+
retry_count,
|
627
|
+
max_user_code_retries,
|
628
|
+
self.ubf_context,
|
629
|
+
inputs,
|
630
|
+
)
|
574
631
|
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
584
|
-
|
585
|
-
|
586
|
-
|
587
|
-
|
588
|
-
|
589
|
-
|
590
|
-
for deco in decorators:
|
591
|
-
# decorators can actually decorate the step function,
|
592
|
-
# or they can replace it altogether. This functionality
|
593
|
-
# is used e.g. by catch_decorator which switches to a
|
594
|
-
# fallback code if the user code has failed too many
|
595
|
-
# times.
|
596
|
-
step_func = deco.task_decorate(
|
597
|
-
step_func,
|
598
|
-
self.flow,
|
599
|
-
self.flow._graph,
|
600
|
-
retry_count,
|
601
|
-
max_user_code_retries,
|
602
|
-
self.ubf_context,
|
603
|
-
)
|
632
|
+
for deco in decorators:
|
633
|
+
# decorators can actually decorate the step function,
|
634
|
+
# or they can replace it altogether. This functionality
|
635
|
+
# is used e.g. by catch_decorator which switches to a
|
636
|
+
# fallback code if the user code has failed too many
|
637
|
+
# times.
|
638
|
+
step_func = deco.task_decorate(
|
639
|
+
step_func,
|
640
|
+
self.flow,
|
641
|
+
self.flow._graph,
|
642
|
+
retry_count,
|
643
|
+
max_user_code_retries,
|
644
|
+
self.ubf_context,
|
645
|
+
)
|
604
646
|
|
605
|
-
|
606
|
-
|
607
|
-
|
608
|
-
|
647
|
+
if join_type:
|
648
|
+
self._exec_step_function(step_func, input_obj)
|
649
|
+
else:
|
650
|
+
self._exec_step_function(step_func)
|
651
|
+
|
652
|
+
for deco in decorators:
|
653
|
+
deco.task_post_step(
|
654
|
+
step_name,
|
655
|
+
self.flow,
|
656
|
+
self.flow._graph,
|
657
|
+
retry_count,
|
658
|
+
max_user_code_retries,
|
659
|
+
)
|
609
660
|
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
|
661
|
+
self.flow._task_ok = True
|
662
|
+
self.flow._success = True
|
663
|
+
|
664
|
+
except Exception as ex:
|
665
|
+
with self.monitor.count("metaflow.task.exception"):
|
666
|
+
_system_logger.log_event(
|
667
|
+
level="error",
|
668
|
+
module="metaflow.task",
|
669
|
+
name="exception",
|
670
|
+
payload={**task_payload, "msg": traceback.format_exc()},
|
671
|
+
)
|
618
672
|
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
629
|
-
|
630
|
-
|
631
|
-
|
673
|
+
exception_handled = False
|
674
|
+
for deco in decorators:
|
675
|
+
res = deco.task_exception(
|
676
|
+
ex,
|
677
|
+
step_name,
|
678
|
+
self.flow,
|
679
|
+
self.flow._graph,
|
680
|
+
retry_count,
|
681
|
+
max_user_code_retries,
|
682
|
+
)
|
683
|
+
exception_handled = bool(res) or exception_handled
|
684
|
+
|
685
|
+
if exception_handled:
|
686
|
+
self.flow._task_ok = True
|
687
|
+
else:
|
688
|
+
self.flow._task_ok = False
|
689
|
+
self.flow._exception = MetaflowExceptionWrapper(ex)
|
690
|
+
print("%s failed:" % self.flow, file=sys.stderr)
|
691
|
+
raise
|
692
|
+
|
693
|
+
finally:
|
694
|
+
if self.ubf_context == UBF_CONTROL:
|
695
|
+
self._finalize_control_task()
|
696
|
+
|
697
|
+
# Emit metrics to logger/monitor sidecar implementations
|
698
|
+
with self.monitor.count("metaflow.task.end"):
|
699
|
+
_system_logger.log_event(
|
700
|
+
level="info",
|
701
|
+
module="metaflow.task",
|
702
|
+
name="end",
|
703
|
+
payload={**task_payload, "msg": "Task ended"},
|
704
|
+
)
|
632
705
|
|
633
|
-
|
634
|
-
|
635
|
-
|
636
|
-
ex,
|
706
|
+
attempt_ok = str(bool(self.flow._task_ok))
|
707
|
+
self.metadata.register_metadata(
|
708
|
+
run_id,
|
637
709
|
step_name,
|
638
|
-
|
639
|
-
|
640
|
-
|
641
|
-
|
710
|
+
task_id,
|
711
|
+
[
|
712
|
+
MetaDatum(
|
713
|
+
field="attempt_ok",
|
714
|
+
value=attempt_ok,
|
715
|
+
type="internal_attempt_status",
|
716
|
+
tags=["attempt_id:{0}".format(retry_count)],
|
717
|
+
)
|
718
|
+
],
|
642
719
|
)
|
643
|
-
exception_handled = bool(res) or exception_handled
|
644
|
-
|
645
|
-
if exception_handled:
|
646
|
-
self.flow._task_ok = True
|
647
|
-
else:
|
648
|
-
self.flow._task_ok = False
|
649
|
-
self.flow._exception = MetaflowExceptionWrapper(ex)
|
650
|
-
print("%s failed:" % self.flow, file=sys.stderr)
|
651
|
-
raise
|
652
|
-
|
653
|
-
finally:
|
654
|
-
if self.ubf_context == UBF_CONTROL:
|
655
|
-
self._finalize_control_task()
|
656
|
-
|
657
|
-
end = time.time() - start
|
658
|
-
|
659
|
-
msg = {
|
660
|
-
"task_id": task_id,
|
661
|
-
"msg": "task ending",
|
662
|
-
"step_name": step_name,
|
663
|
-
"run_id": run_id,
|
664
|
-
"flow_name": self.flow.name,
|
665
|
-
"ts": round(time.time()),
|
666
|
-
"runtime": round(end),
|
667
|
-
}
|
668
|
-
logger.log(msg)
|
669
720
|
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
721
|
+
output.save_metadata({"task_end": {}})
|
722
|
+
output.persist(self.flow)
|
723
|
+
|
724
|
+
# this writes a success marker indicating that the
|
725
|
+
# "transaction" is done
|
726
|
+
output.done()
|
727
|
+
|
728
|
+
# final decorator hook: The task results are now
|
729
|
+
# queryable through the client API / datastore
|
730
|
+
for deco in decorators:
|
731
|
+
deco.task_finished(
|
732
|
+
step_name,
|
733
|
+
self.flow,
|
734
|
+
self.flow._graph,
|
735
|
+
self.flow._task_ok,
|
736
|
+
retry_count,
|
737
|
+
max_user_code_retries,
|
681
738
|
)
|
682
|
-
],
|
683
|
-
)
|
684
|
-
|
685
|
-
output.save_metadata({"task_end": {}})
|
686
|
-
output.persist(self.flow)
|
687
739
|
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
|
699
|
-
self.flow._task_ok,
|
700
|
-
retry_count,
|
701
|
-
max_user_code_retries,
|
740
|
+
# terminate side cars
|
741
|
+
self.metadata.stop_heartbeat()
|
742
|
+
|
743
|
+
# Task duration consists of the time taken to run the task as well as the time taken to
|
744
|
+
# persist the task metadata and data to the datastore.
|
745
|
+
duration = time.time() - start
|
746
|
+
_system_logger.log_event(
|
747
|
+
level="info",
|
748
|
+
module="metaflow.task",
|
749
|
+
name="duration",
|
750
|
+
payload={**task_payload, "msg": str(duration)},
|
702
751
|
)
|
703
|
-
|
704
|
-
# terminate side cars
|
705
|
-
self.metadata.stop_heartbeat()
|
metaflow/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
metaflow_version = "2.12.
|
1
|
+
metaflow_version = "2.12.7"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: metaflow
|
3
|
-
Version: 2.12.
|
3
|
+
Version: 2.12.7
|
4
4
|
Summary: Metaflow: More Data Science, Less Engineering
|
5
5
|
Author: Metaflow Developers
|
6
6
|
Author-email: help@metaflow.org
|
@@ -26,7 +26,7 @@ License-File: LICENSE
|
|
26
26
|
Requires-Dist: requests
|
27
27
|
Requires-Dist: boto3
|
28
28
|
Provides-Extra: stubs
|
29
|
-
Requires-Dist: metaflow-stubs ==2.12.
|
29
|
+
Requires-Dist: metaflow-stubs ==2.12.7 ; extra == 'stubs'
|
30
30
|
|
31
31
|

|
32
32
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
metaflow/R.py,sha256=CqVfIatvmjciuICNnoyyNGrwE7Va9iXfLdFbQa52hwA,3958
|
2
2
|
metaflow/__init__.py,sha256=3GEqivYycw6mvjn-ndEFGuCdYnGztciQgEWX87vjf6M,5885
|
3
3
|
metaflow/cards.py,sha256=tP1_RrtmqdFh741pqE4t98S7SA0MtGRlGvRICRZF1Mg,426
|
4
|
-
metaflow/cli.py,sha256=
|
4
|
+
metaflow/cli.py,sha256=YQBqwaPsD4OAkt_UQypZ0spB3LRlgomi-tExT_n2vdQ,33961
|
5
5
|
metaflow/cli_args.py,sha256=lcgBGNTvfaiPxiUnejAe60Upt9swG6lRy1_3OqbU6MY,2616
|
6
6
|
metaflow/clone_util.py,sha256=XfUX0vssu_hPlyZfhFl1AOnKkLqvt33Qp8xNrmdocGg,2057
|
7
7
|
metaflow/cmd_with_io.py,sha256=kl53HkAIyv0ecpItv08wZYczv7u3msD1VCcciqigqf0,588
|
@@ -30,12 +30,12 @@ metaflow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
30
30
|
metaflow/pylint_wrapper.py,sha256=zzBY9YaSUZOGH-ypDKAv2B_7XcoyMZj-zCoCrmYqNRc,2865
|
31
31
|
metaflow/runtime.py,sha256=KQbLI4zH9V6L6YwOgYz28CPMOT_akoCWEBb49avSQvo,63993
|
32
32
|
metaflow/tagging_util.py,sha256=ctyf0Q1gBi0RyZX6J0e9DQGNkNHblV_CITfy66axXB4,2346
|
33
|
-
metaflow/task.py,sha256=
|
33
|
+
metaflow/task.py,sha256=uJHl8K4n3jNllWHSsG1vAZtDza0U2QbQcdg9GS_YPBE,28660
|
34
34
|
metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
|
35
35
|
metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
|
36
36
|
metaflow/util.py,sha256=m5womQ7y-jXehuMyHPfByDbZ4HwTJxzs869cPOlMR8s,13057
|
37
37
|
metaflow/vendor.py,sha256=FchtA9tH22JM-eEtJ2c9FpUdMn8sSb1VHuQS56EcdZk,5139
|
38
|
-
metaflow/version.py,sha256=
|
38
|
+
metaflow/version.py,sha256=ve6PO5pM0roPNs0Q25TvJGmen3p4hLZdP2jdojcp-sM,28
|
39
39
|
metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
|
40
40
|
metaflow/_vendor/typing_extensions.py,sha256=0nUs5p1A_UrZigrAVBoOEM6TxU37zzPDUtiij1ZwpNc,110417
|
41
41
|
metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
@@ -110,7 +110,7 @@ metaflow/_vendor/v3_6/importlib_metadata/_meta.py,sha256=_F48Hu_jFxkfKWz5wcYS8vO
|
|
110
110
|
metaflow/_vendor/v3_6/importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
|
111
111
|
metaflow/_vendor/v3_6/importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
112
112
|
metaflow/client/__init__.py,sha256=1GtQB4Y_CBkzaxg32L1syNQSlfj762wmLrfrDxGi1b8,226
|
113
|
-
metaflow/client/core.py,sha256=
|
113
|
+
metaflow/client/core.py,sha256=aFAAacCf3F7Iq0ZGMKr_HW2jh-CKV7p2uC1GhZumPxI,74067
|
114
114
|
metaflow/client/filecache.py,sha256=QdD1sW6w4Nnza-ioz4I1fEZI843X33AFIV3eSxq-cuU,14868
|
115
115
|
metaflow/cmd/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
116
116
|
metaflow/cmd/configure_cmd.py,sha256=o-DKnUf2FBo_HiMVyoyzQaGBSMtpbEPEdFTQZ0hkU-k,33396
|
@@ -174,7 +174,7 @@ metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=iDReG-7FKnumrtQg-HY6cCUAAqN
|
|
174
174
|
metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
175
175
|
metaflow/plugins/argo/argo_client.py,sha256=MKKhMCbWOPzf6z5zQQiyDRHHkAXcO7ipboDZDqAAvOk,15849
|
176
176
|
metaflow/plugins/argo/argo_events.py,sha256=_C1KWztVqgi3zuH57pInaE9OzABc2NnncC-zdwOMZ-w,5909
|
177
|
-
metaflow/plugins/argo/argo_workflows.py,sha256=
|
177
|
+
metaflow/plugins/argo/argo_workflows.py,sha256=7WFmAjSbcJagQD8acih6iRZjeC5SPNXuJ2bFggPPoXA,130686
|
178
178
|
metaflow/plugins/argo/argo_workflows_cli.py,sha256=sZTpgfmc50eT3e0qIxpVqUgWhTcYlO1HM4gU6Oaya8g,33259
|
179
179
|
metaflow/plugins/argo/argo_workflows_decorator.py,sha256=K5t4uIk2IXPdK7v7DEjj3buSB8ikLjLycKjbZUYeiaw,6781
|
180
180
|
metaflow/plugins/argo/generate_input_paths.py,sha256=loYsI6RFX9LlFsHb7Fe-mzlTTtRdySoOu7sYDy-uXK0,881
|
@@ -285,11 +285,11 @@ metaflow/plugins/metadata/local.py,sha256=YhLJC5zjVJrvQFIyQ92ZBByiUmhCC762RUX7IT
|
|
285
285
|
metaflow/plugins/metadata/service.py,sha256=ihq5F7KQZlxvYwzH_-jyP2aWN_I96i2vp92j_d697s8,20204
|
286
286
|
metaflow/plugins/pypi/__init__.py,sha256=0YFZpXvX7HCkyBFglatual7XGifdA1RwC3U4kcizyak,1037
|
287
287
|
metaflow/plugins/pypi/bootstrap.py,sha256=Hik3PZ_RQC8T6hEf-NE2Xr_jq2ZIUkpgUtJlx-rqJgU,5107
|
288
|
-
metaflow/plugins/pypi/conda_decorator.py,sha256
|
289
|
-
metaflow/plugins/pypi/conda_environment.py,sha256=
|
288
|
+
metaflow/plugins/pypi/conda_decorator.py,sha256=phrUvVC5QrfNwPqIByrXsnpRDg1SNVsfpl1wbAVrykI,14679
|
289
|
+
metaflow/plugins/pypi/conda_environment.py,sha256=COybS4bogDm956UzOzbd4JupE7PBbEMqq1dfl-f9DYM,19339
|
290
290
|
metaflow/plugins/pypi/micromamba.py,sha256=wlVN2fm4WXFh3jVNtpDfu4XEz6VJKbmFNp0QvqlMIuI,12179
|
291
|
-
metaflow/plugins/pypi/pip.py,sha256=
|
292
|
-
metaflow/plugins/pypi/pypi_decorator.py,sha256=
|
291
|
+
metaflow/plugins/pypi/pip.py,sha256=uYPEHYV1_PtY4QA3NqUcVSPBAlRucGeY9tuyz7sB7aY,13641
|
292
|
+
metaflow/plugins/pypi/pypi_decorator.py,sha256=Plmm4fhLECW-sj1QSFI84Gva7qqqwlJsqJ8laCRKIzw,6073
|
293
293
|
metaflow/plugins/pypi/pypi_environment.py,sha256=FYMg8kF3lXqcLfRYWD83a9zpVjcoo_TARqMGZ763rRk,230
|
294
294
|
metaflow/plugins/pypi/utils.py,sha256=ds1Mnv_DaxGnLAYp7ozg_K6oyguGyNhvHfE-75Ia1YA,2836
|
295
295
|
metaflow/plugins/secrets/__init__.py,sha256=mhJaN2eMS_ZZVewAMR2E-JdP5i0t3v9e6Dcwd-WpruE,310
|
@@ -305,6 +305,10 @@ metaflow/sidecar/sidecar.py,sha256=EspKXvPPNiyRToaUZ51PS5TT_PzrBNAurn_wbFnmGr0,1
|
|
305
305
|
metaflow/sidecar/sidecar_messages.py,sha256=zPsCoYgDIcDkkvdC9MEpJTJ3y6TSGm2JWkRc4vxjbFA,1071
|
306
306
|
metaflow/sidecar/sidecar_subprocess.py,sha256=f72n5iJJAYfCIbz4D94-RxR37VvM7kVvE3c8E9dYHe8,9708
|
307
307
|
metaflow/sidecar/sidecar_worker.py,sha256=4DfpxtnuphngOnIehKjNR_Knhu1hY7DYBcHl4Svpe3Y,2050
|
308
|
+
metaflow/system/__init__.py,sha256=SB9Py7Acecqi76MY9MonSHXFuDD1yIJEGJtEQH8cNq4,149
|
309
|
+
metaflow/system/system_logger.py,sha256=31noRo2qFdFyVtlyvIV2coUw4x6YdJGCdhctNBbBUso,3299
|
310
|
+
metaflow/system/system_monitor.py,sha256=NracI4ITbUZWziGsAvE1910ODZhXGVYro7jD-rLWe48,3578
|
311
|
+
metaflow/system/system_utils.py,sha256=E5C66_oeAxEE4-2okKlA-X6rgeYndPFP7KmWKHvvFZ8,657
|
308
312
|
metaflow/tracing/__init__.py,sha256=xYTOT5BS5jbwhjk6hskxqNSU9st2LYtfeLN2Hknm3EI,1551
|
309
313
|
metaflow/tracing/propagator.py,sha256=AdPeAqoeRauH82pTw01hLFNPRAzm29nlwM7C2iqKFFk,2502
|
310
314
|
metaflow/tracing/span_exporter.py,sha256=kbi92INM8yKPqwLEVbWy7OcdsVJoh5nCbwAK9VyPx_w,1520
|
@@ -332,9 +336,9 @@ metaflow/tutorials/07-worldview/README.md,sha256=5vQTrFqulJ7rWN6r20dhot9lI2sVj9W
|
|
332
336
|
metaflow/tutorials/07-worldview/worldview.ipynb,sha256=ztPZPI9BXxvW1QdS2Tfe7LBuVzvFvv0AToDnsDJhLdE,2237
|
333
337
|
metaflow/tutorials/08-autopilot/README.md,sha256=GnePFp_q76jPs991lMUqfIIh5zSorIeWznyiUxzeUVE,1039
|
334
338
|
metaflow/tutorials/08-autopilot/autopilot.ipynb,sha256=DQoJlILV7Mq9vfPBGW-QV_kNhWPjS5n6SJLqePjFYLY,3191
|
335
|
-
metaflow-2.12.
|
336
|
-
metaflow-2.12.
|
337
|
-
metaflow-2.12.
|
338
|
-
metaflow-2.12.
|
339
|
-
metaflow-2.12.
|
340
|
-
metaflow-2.12.
|
339
|
+
metaflow-2.12.7.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
|
340
|
+
metaflow-2.12.7.dist-info/METADATA,sha256=NkSjEHjnJp5ts8eQ8FV1y_z8a_d2nIeGbRGCx3OCW14,5906
|
341
|
+
metaflow-2.12.7.dist-info/WHEEL,sha256=0XQbNV6JE5ziJsWjIU8TRRv0N6SohNonLWgP86g5fiI,109
|
342
|
+
metaflow-2.12.7.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
|
343
|
+
metaflow-2.12.7.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
|
344
|
+
metaflow-2.12.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|