ob-metaflow 2.15.21.4__py2.py3-none-any.whl → 2.15.21.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (37) hide show
  1. metaflow/_vendor/click/core.py +3 -4
  2. metaflow/_vendor/imghdr/__init__.py +11 -0
  3. metaflow/_vendor/yaml/__init__.py +427 -0
  4. metaflow/_vendor/yaml/composer.py +139 -0
  5. metaflow/_vendor/yaml/constructor.py +748 -0
  6. metaflow/_vendor/yaml/cyaml.py +101 -0
  7. metaflow/_vendor/yaml/dumper.py +62 -0
  8. metaflow/_vendor/yaml/emitter.py +1137 -0
  9. metaflow/_vendor/yaml/error.py +75 -0
  10. metaflow/_vendor/yaml/events.py +86 -0
  11. metaflow/_vendor/yaml/loader.py +63 -0
  12. metaflow/_vendor/yaml/nodes.py +49 -0
  13. metaflow/_vendor/yaml/parser.py +589 -0
  14. metaflow/_vendor/yaml/reader.py +185 -0
  15. metaflow/_vendor/yaml/representer.py +389 -0
  16. metaflow/_vendor/yaml/resolver.py +227 -0
  17. metaflow/_vendor/yaml/scanner.py +1435 -0
  18. metaflow/_vendor/yaml/serializer.py +111 -0
  19. metaflow/_vendor/yaml/tokens.py +104 -0
  20. metaflow/cli.py +11 -2
  21. metaflow/plugins/argo/argo_workflows.py +60 -2
  22. metaflow/plugins/argo/argo_workflows_deployer_objects.py +37 -0
  23. metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +16 -0
  24. metaflow/plugins/cards/card_modules/convert_to_native_type.py +7 -1
  25. metaflow/plugins/kubernetes/kubernetes_jobsets.py +26 -28
  26. metaflow/runner/deployer.py +77 -0
  27. metaflow/vendor.py +23 -6
  28. metaflow/version.py +1 -1
  29. {ob_metaflow-2.15.21.4.dist-info → ob_metaflow-2.15.21.5.dist-info}/METADATA +2 -2
  30. {ob_metaflow-2.15.21.4.dist-info → ob_metaflow-2.15.21.5.dist-info}/RECORD +37 -20
  31. {ob_metaflow-2.15.21.4.data → ob_metaflow-2.15.21.5.data}/data/share/metaflow/devtools/Makefile +0 -0
  32. {ob_metaflow-2.15.21.4.data → ob_metaflow-2.15.21.5.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  33. {ob_metaflow-2.15.21.4.data → ob_metaflow-2.15.21.5.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  34. {ob_metaflow-2.15.21.4.dist-info → ob_metaflow-2.15.21.5.dist-info}/WHEEL +0 -0
  35. {ob_metaflow-2.15.21.4.dist-info → ob_metaflow-2.15.21.5.dist-info}/entry_points.txt +0 -0
  36. {ob_metaflow-2.15.21.4.dist-info → ob_metaflow-2.15.21.5.dist-info}/licenses/LICENSE +0 -0
  37. {ob_metaflow-2.15.21.4.dist-info → ob_metaflow-2.15.21.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,111 @@
1
+
2
+ __all__ = ['Serializer', 'SerializerError']
3
+
4
+ from .error import YAMLError
5
+ from .events import *
6
+ from .nodes import *
7
+
8
+ class SerializerError(YAMLError):
9
+ pass
10
+
11
+ class Serializer:
12
+
13
+ ANCHOR_TEMPLATE = 'id%03d'
14
+
15
+ def __init__(self, encoding=None,
16
+ explicit_start=None, explicit_end=None, version=None, tags=None):
17
+ self.use_encoding = encoding
18
+ self.use_explicit_start = explicit_start
19
+ self.use_explicit_end = explicit_end
20
+ self.use_version = version
21
+ self.use_tags = tags
22
+ self.serialized_nodes = {}
23
+ self.anchors = {}
24
+ self.last_anchor_id = 0
25
+ self.closed = None
26
+
27
+ def open(self):
28
+ if self.closed is None:
29
+ self.emit(StreamStartEvent(encoding=self.use_encoding))
30
+ self.closed = False
31
+ elif self.closed:
32
+ raise SerializerError("serializer is closed")
33
+ else:
34
+ raise SerializerError("serializer is already opened")
35
+
36
+ def close(self):
37
+ if self.closed is None:
38
+ raise SerializerError("serializer is not opened")
39
+ elif not self.closed:
40
+ self.emit(StreamEndEvent())
41
+ self.closed = True
42
+
43
+ #def __del__(self):
44
+ # self.close()
45
+
46
+ def serialize(self, node):
47
+ if self.closed is None:
48
+ raise SerializerError("serializer is not opened")
49
+ elif self.closed:
50
+ raise SerializerError("serializer is closed")
51
+ self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
52
+ version=self.use_version, tags=self.use_tags))
53
+ self.anchor_node(node)
54
+ self.serialize_node(node, None, None)
55
+ self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
56
+ self.serialized_nodes = {}
57
+ self.anchors = {}
58
+ self.last_anchor_id = 0
59
+
60
+ def anchor_node(self, node):
61
+ if node in self.anchors:
62
+ if self.anchors[node] is None:
63
+ self.anchors[node] = self.generate_anchor(node)
64
+ else:
65
+ self.anchors[node] = None
66
+ if isinstance(node, SequenceNode):
67
+ for item in node.value:
68
+ self.anchor_node(item)
69
+ elif isinstance(node, MappingNode):
70
+ for key, value in node.value:
71
+ self.anchor_node(key)
72
+ self.anchor_node(value)
73
+
74
+ def generate_anchor(self, node):
75
+ self.last_anchor_id += 1
76
+ return self.ANCHOR_TEMPLATE % self.last_anchor_id
77
+
78
+ def serialize_node(self, node, parent, index):
79
+ alias = self.anchors[node]
80
+ if node in self.serialized_nodes:
81
+ self.emit(AliasEvent(alias))
82
+ else:
83
+ self.serialized_nodes[node] = True
84
+ self.descend_resolver(parent, index)
85
+ if isinstance(node, ScalarNode):
86
+ detected_tag = self.resolve(ScalarNode, node.value, (True, False))
87
+ default_tag = self.resolve(ScalarNode, node.value, (False, True))
88
+ implicit = (node.tag == detected_tag), (node.tag == default_tag)
89
+ self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
90
+ style=node.style))
91
+ elif isinstance(node, SequenceNode):
92
+ implicit = (node.tag
93
+ == self.resolve(SequenceNode, node.value, True))
94
+ self.emit(SequenceStartEvent(alias, node.tag, implicit,
95
+ flow_style=node.flow_style))
96
+ index = 0
97
+ for item in node.value:
98
+ self.serialize_node(item, node, index)
99
+ index += 1
100
+ self.emit(SequenceEndEvent())
101
+ elif isinstance(node, MappingNode):
102
+ implicit = (node.tag
103
+ == self.resolve(MappingNode, node.value, True))
104
+ self.emit(MappingStartEvent(alias, node.tag, implicit,
105
+ flow_style=node.flow_style))
106
+ for key, value in node.value:
107
+ self.serialize_node(key, node, None)
108
+ self.serialize_node(value, node, key)
109
+ self.emit(MappingEndEvent())
110
+ self.ascend_resolver()
111
+
@@ -0,0 +1,104 @@
1
+
2
+ class Token(object):
3
+ def __init__(self, start_mark, end_mark):
4
+ self.start_mark = start_mark
5
+ self.end_mark = end_mark
6
+ def __repr__(self):
7
+ attributes = [key for key in self.__dict__
8
+ if not key.endswith('_mark')]
9
+ attributes.sort()
10
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
11
+ for key in attributes])
12
+ return '%s(%s)' % (self.__class__.__name__, arguments)
13
+
14
+ #class BOMToken(Token):
15
+ # id = '<byte order mark>'
16
+
17
+ class DirectiveToken(Token):
18
+ id = '<directive>'
19
+ def __init__(self, name, value, start_mark, end_mark):
20
+ self.name = name
21
+ self.value = value
22
+ self.start_mark = start_mark
23
+ self.end_mark = end_mark
24
+
25
+ class DocumentStartToken(Token):
26
+ id = '<document start>'
27
+
28
+ class DocumentEndToken(Token):
29
+ id = '<document end>'
30
+
31
+ class StreamStartToken(Token):
32
+ id = '<stream start>'
33
+ def __init__(self, start_mark=None, end_mark=None,
34
+ encoding=None):
35
+ self.start_mark = start_mark
36
+ self.end_mark = end_mark
37
+ self.encoding = encoding
38
+
39
+ class StreamEndToken(Token):
40
+ id = '<stream end>'
41
+
42
+ class BlockSequenceStartToken(Token):
43
+ id = '<block sequence start>'
44
+
45
+ class BlockMappingStartToken(Token):
46
+ id = '<block mapping start>'
47
+
48
+ class BlockEndToken(Token):
49
+ id = '<block end>'
50
+
51
+ class FlowSequenceStartToken(Token):
52
+ id = '['
53
+
54
+ class FlowMappingStartToken(Token):
55
+ id = '{'
56
+
57
+ class FlowSequenceEndToken(Token):
58
+ id = ']'
59
+
60
+ class FlowMappingEndToken(Token):
61
+ id = '}'
62
+
63
+ class KeyToken(Token):
64
+ id = '?'
65
+
66
+ class ValueToken(Token):
67
+ id = ':'
68
+
69
+ class BlockEntryToken(Token):
70
+ id = '-'
71
+
72
+ class FlowEntryToken(Token):
73
+ id = ','
74
+
75
+ class AliasToken(Token):
76
+ id = '<alias>'
77
+ def __init__(self, value, start_mark, end_mark):
78
+ self.value = value
79
+ self.start_mark = start_mark
80
+ self.end_mark = end_mark
81
+
82
+ class AnchorToken(Token):
83
+ id = '<anchor>'
84
+ def __init__(self, value, start_mark, end_mark):
85
+ self.value = value
86
+ self.start_mark = start_mark
87
+ self.end_mark = end_mark
88
+
89
+ class TagToken(Token):
90
+ id = '<tag>'
91
+ def __init__(self, value, start_mark, end_mark):
92
+ self.value = value
93
+ self.start_mark = start_mark
94
+ self.end_mark = end_mark
95
+
96
+ class ScalarToken(Token):
97
+ id = '<scalar>'
98
+ def __init__(self, value, plain, start_mark, end_mark, style=None):
99
+ self.value = value
100
+ self.plain = plain
101
+ self.start_mark = start_mark
102
+ self.end_mark = end_mark
103
+ self.style = style
104
+
metaflow/cli.py CHANGED
@@ -7,6 +7,7 @@ from datetime import datetime
7
7
 
8
8
  import metaflow.tracing as tracing
9
9
  from metaflow._vendor import click
10
+ from metaflow.system import _system_logger, _system_monitor
10
11
 
11
12
  from . import decorators, lint, metaflow_version, parameters, plugins
12
13
  from .cli_args import cli_args
@@ -26,7 +27,6 @@ from .metaflow_config import (
26
27
  DEFAULT_PACKAGE_SUFFIXES,
27
28
  )
28
29
  from .metaflow_current import current
29
- from metaflow.system import _system_monitor, _system_logger
30
30
  from .metaflow_environment import MetaflowEnvironment
31
31
  from .plugins import (
32
32
  DATASTORES,
@@ -37,9 +37,9 @@ from .plugins import (
37
37
  )
38
38
  from .pylint_wrapper import PyLint
39
39
  from .R import metaflow_r_version, use_r
40
- from .util import get_latest_run_id, resolve_identity
41
40
  from .user_configs.config_options import LocalFileInput, config_options
42
41
  from .user_configs.config_parameters import ConfigValue
42
+ from .util import get_latest_run_id, resolve_identity
43
43
 
44
44
  ERASE_TO_EOL = "\033[K"
45
45
  HIGHLIGHT = "red"
@@ -55,6 +55,15 @@ def echo_dev_null(*args, **kwargs):
55
55
 
56
56
 
57
57
  def echo_always(line, **kwargs):
58
+ if kwargs.pop("wrap", False):
59
+ import textwrap
60
+
61
+ indent_str = INDENT if kwargs.get("indent", None) else ""
62
+ effective_width = 80 - len(indent_str)
63
+ wrapped = textwrap.wrap(line, width=effective_width, break_long_words=False)
64
+ line = "\n".join(indent_str + l for l in wrapped)
65
+ kwargs["indent"] = False
66
+
58
67
  kwargs["err"] = kwargs.get("err", True)
59
68
  if kwargs.pop("indent", None):
60
69
  line = "\n".join(INDENT + x for x in line.splitlines())
@@ -3266,8 +3266,8 @@ class ArgoWorkflows(object):
3266
3266
  Trigger().template(
3267
3267
  TriggerTemplate(self.name)
3268
3268
  # Trigger a deployed workflow template
3269
- .argo_workflow_trigger(
3270
- ArgoWorkflowTrigger()
3269
+ .k8s_trigger(
3270
+ StandardK8STrigger()
3271
3271
  .source(
3272
3272
  {
3273
3273
  "resource": {
@@ -4245,6 +4245,10 @@ class TriggerTemplate(object):
4245
4245
  self.payload = tree()
4246
4246
  self.payload["name"] = name
4247
4247
 
4248
+ def k8s_trigger(self, k8s_trigger):
4249
+ self.payload["k8s"] = k8s_trigger.to_json()
4250
+ return self
4251
+
4248
4252
  def argo_workflow_trigger(self, argo_workflow_trigger):
4249
4253
  self.payload["argoWorkflow"] = argo_workflow_trigger.to_json()
4250
4254
  return self
@@ -4319,3 +4323,57 @@ class TriggerParameter(object):
4319
4323
 
4320
4324
  def __str__(self):
4321
4325
  return json.dumps(self.payload, indent=4)
4326
+
4327
+
4328
+ class StandardK8STrigger(object):
4329
+ # https://pkg.go.dev/github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1#StandardK8STrigger
4330
+
4331
+ def __init__(self):
4332
+ tree = lambda: defaultdict(tree)
4333
+ self.payload = tree()
4334
+ self.payload["operation"] = "create"
4335
+
4336
+ def operation(self, operation):
4337
+ self.payload["operation"] = operation
4338
+ return self
4339
+
4340
+ def group(self, group):
4341
+ self.payload["group"] = group
4342
+ return self
4343
+
4344
+ def version(self, version):
4345
+ self.payload["version"] = version
4346
+ return self
4347
+
4348
+ def resource(self, resource):
4349
+ self.payload["resource"] = resource
4350
+ return self
4351
+
4352
+ def namespace(self, namespace):
4353
+ self.payload["namespace"] = namespace
4354
+ return self
4355
+
4356
+ def source(self, source):
4357
+ self.payload["source"] = source
4358
+ return self
4359
+
4360
+ def parameters(self, trigger_parameters):
4361
+ if "parameters" not in self.payload:
4362
+ self.payload["parameters"] = []
4363
+ for trigger_parameter in trigger_parameters:
4364
+ self.payload["parameters"].append(trigger_parameter.to_json())
4365
+ return self
4366
+
4367
+ def live_object(self, live_object=True):
4368
+ self.payload["liveObject"] = live_object
4369
+ return self
4370
+
4371
+ def patch_strategy(self, patch_strategy):
4372
+ self.payload["patchStrategy"] = patch_strategy
4373
+ return self
4374
+
4375
+ def to_json(self):
4376
+ return self.payload
4377
+
4378
+ def __str__(self):
4379
+ return json.dumps(self.payload, indent=4)
@@ -310,6 +310,43 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
310
310
 
311
311
  return cls(deployer=d)
312
312
 
313
+ @classmethod
314
+ def get_triggered_run(
315
+ cls, identifier: str, run_id: str, metadata: Optional[str] = None
316
+ ):
317
+ """
318
+ Retrieves a `ArgoWorkflowsTriggeredRun` object from an identifier, a run id and
319
+ optional metadata.
320
+
321
+ Parameters
322
+ ----------
323
+ identifier : str
324
+ Deployer specific identifier for the workflow to retrieve
325
+ run_id : str
326
+ Run ID for the which to fetch the triggered run object
327
+ metadata : str, optional, default None
328
+ Optional deployer specific metadata.
329
+
330
+ Returns
331
+ -------
332
+ ArgoWorkflowsTriggeredRun
333
+ A `ArgoWorkflowsTriggeredRun` object representing the
334
+ triggered run on argo workflows.
335
+ """
336
+ deployed_flow_obj = cls.from_deployment(identifier, metadata)
337
+ return ArgoWorkflowsTriggeredRun(
338
+ deployer=deployed_flow_obj.deployer,
339
+ content=json.dumps(
340
+ {
341
+ "metadata": deployed_flow_obj.deployer.metadata,
342
+ "pathspec": "/".join(
343
+ (deployed_flow_obj.deployer.flow_name, run_id)
344
+ ),
345
+ "name": run_id,
346
+ }
347
+ ),
348
+ )
349
+
313
350
  @property
314
351
  def production_token(self) -> Optional[str]:
315
352
  """
@@ -84,6 +84,22 @@ class StepFunctionsDeployedFlow(DeployedFlow):
84
84
  "from_deployment is not implemented for StepFunctions"
85
85
  )
86
86
 
87
+ @classmethod
88
+ def get_triggered_run(
89
+ cls, identifier: str, run_id: str, metadata: Optional[str] = None
90
+ ):
91
+ """
92
+ This method is not currently implemented for Step Functions.
93
+
94
+ Raises
95
+ ------
96
+ NotImplementedError
97
+ This method is not implemented for Step Functions.
98
+ """
99
+ raise NotImplementedError(
100
+ "get_triggered_run is not implemented for StepFunctions"
101
+ )
102
+
87
103
  @property
88
104
  def production_token(self: DeployedFlow) -> Optional[str]:
89
105
  """
@@ -146,7 +146,13 @@ class TaskToDict:
146
146
  # Python 3.13 removes the standard ``imghdr`` module. Metaflow
147
147
  # vendors a copy so we can keep using ``what`` to detect image
148
148
  # formats irrespective of the Python version.
149
- from metaflow._vendor import imghdr
149
+ import warnings
150
+
151
+ with warnings.catch_warnings():
152
+ warnings.filterwarnings(
153
+ "ignore", category=DeprecationWarning, module="imghdr"
154
+ )
155
+ from metaflow._vendor import imghdr
150
156
 
151
157
  resp = imghdr.what(None, h=data_object)
152
158
  # Only accept types supported on the web
@@ -6,6 +6,7 @@ from collections import namedtuple
6
6
  from metaflow.exception import MetaflowException
7
7
  from metaflow.metaflow_config import KUBERNETES_JOBSET_GROUP, KUBERNETES_JOBSET_VERSION
8
8
  from metaflow.tracing import inject_tracing_vars
9
+ from metaflow._vendor import yaml
9
10
 
10
11
  from .kube_utils import qos_requests_and_limits
11
12
 
@@ -1025,34 +1026,32 @@ class KubernetesArgoJobSet(object):
1025
1026
 
1026
1027
  def dump(self):
1027
1028
  client = self._kubernetes_sdk
1028
-
1029
- data = json.dumps(
1030
- client.ApiClient().sanitize_for_serialization(
1031
- dict(
1032
- apiVersion=self._group + "/" + self._version,
1033
- kind="JobSet",
1034
- metadata=client.api_client.ApiClient().sanitize_for_serialization(
1035
- client.V1ObjectMeta(
1036
- name=self.name,
1037
- labels=self._labels,
1038
- annotations=self._annotations,
1039
- )
1040
- ),
1041
- spec=dict(
1042
- replicatedJobs=[self.control.dump(), self.worker.dump()],
1043
- suspend=False,
1044
- startupPolicy=None,
1045
- successPolicy=None,
1046
- # The Failure Policy helps setting the number of retries for the jobset.
1047
- # but we don't rely on it and instead rely on either the local scheduler
1048
- # or the Argo Workflows to handle retries.
1049
- failurePolicy=None,
1050
- network=None,
1051
- ),
1052
- status=None,
1053
- )
1029
+ js_dict = client.ApiClient().sanitize_for_serialization(
1030
+ dict(
1031
+ apiVersion=self._group + "/" + self._version,
1032
+ kind="JobSet",
1033
+ metadata=client.api_client.ApiClient().sanitize_for_serialization(
1034
+ client.V1ObjectMeta(
1035
+ name=self.name,
1036
+ labels=self._labels,
1037
+ annotations=self._annotations,
1038
+ )
1039
+ ),
1040
+ spec=dict(
1041
+ replicatedJobs=[self.control.dump(), self.worker.dump()],
1042
+ suspend=False,
1043
+ startupPolicy=None,
1044
+ successPolicy=None,
1045
+ # The Failure Policy helps setting the number of retries for the jobset.
1046
+ # but we don't rely on it and instead rely on either the local scheduler
1047
+ # or the Argo Workflows to handle retries.
1048
+ failurePolicy=None,
1049
+ network=None,
1050
+ ),
1051
+ status=None,
1054
1052
  )
1055
1053
  )
1054
+ data = yaml.dump(js_dict, default_flow_style=False, indent=2)
1056
1055
  # The values we populate in the Jobset manifest (for Argo Workflows) piggybacks on the Argo Workflow's templating engine.
1057
1056
  # Even though Argo Workflows's templating helps us constructing all the necessary IDs and populating the fields
1058
1057
  # required by Metaflow, we run into one glitch. When we construct JSON/YAML serializable objects,
@@ -1067,7 +1066,6 @@ class KubernetesArgoJobSet(object):
1067
1066
  # Since the value of `num_parallel` can be dynamic and can change from run to run, we need to ensure that the
1068
1067
  # value can be passed-down dynamically and is **explicitly set as a integer** in the Jobset Manifest submitted as a
1069
1068
  # part of the Argo Workflow
1070
-
1071
- quoted_substring = '"{{=asInt(inputs.parameters.workerCount)}}"'
1069
+ quoted_substring = "'{{=asInt(inputs.parameters.workerCount)}}'"
1072
1070
  unquoted_substring = "{{=asInt(inputs.parameters.workerCount)}}"
1073
1071
  return data.replace(quoted_substring, unquoted_substring)
@@ -231,6 +231,67 @@ class DeployedFlowMeta(type):
231
231
  }
232
232
  )
233
233
 
234
+ def _get_triggered_run_injected_method():
235
+ def f(
236
+ cls,
237
+ identifier: str,
238
+ run_id: str,
239
+ metadata: Optional[str] = None,
240
+ impl: str = DEFAULT_FROM_DEPLOYMENT_IMPL.replace("-", "_"),
241
+ ) -> "TriggeredRun":
242
+ """
243
+ Retrieves a `TriggeredRun` object from an identifier, a run id and optional
244
+ metadata. The `impl` parameter specifies the deployer implementation
245
+ to use (like `argo-workflows`).
246
+
247
+ Parameters
248
+ ----------
249
+ identifier : str
250
+ Deployer specific identifier for the workflow to retrieve
251
+ run_id : str
252
+ Run ID for the which to fetch the triggered run object
253
+ metadata : str, optional, default None
254
+ Optional deployer specific metadata.
255
+ impl : str, optional, default given by METAFLOW_DEFAULT_FROM_DEPLOYMENT_IMPL
256
+ The default implementation to use if not specified
257
+
258
+ Returns
259
+ -------
260
+ TriggeredRun
261
+ A `TriggeredRun` object representing the triggered run corresponding
262
+ to the identifier and the run id.
263
+ """
264
+ if impl in allowed_providers:
265
+ return (
266
+ allowed_providers[impl]
267
+ .deployed_flow_type()
268
+ .get_triggered_run(identifier, run_id, metadata)
269
+ )
270
+ else:
271
+ raise ValueError(
272
+ f"No deployer '{impl}' exists; valid deployers are: "
273
+ f"{list(allowed_providers.keys())}"
274
+ )
275
+
276
+ f.__name__ = "get_triggered_run"
277
+ return f
278
+
279
+ def _per_type_get_triggered_run_injected_method(method_name, impl):
280
+ def f(
281
+ cls,
282
+ identifier: str,
283
+ run_id: str,
284
+ metadata: Optional[str] = None,
285
+ ):
286
+ return (
287
+ allowed_providers[impl]
288
+ .deployed_flow_type()
289
+ .get_triggered_run(identifier, run_id, metadata)
290
+ )
291
+
292
+ f.__name__ = method_name
293
+ return f
294
+
234
295
  def _from_deployment_injected_method():
235
296
  def f(
236
297
  cls,
@@ -347,10 +408,16 @@ class DeployedFlowMeta(type):
347
408
  "list_deployed_flows",
348
409
  classmethod(_list_deployed_flows_injected_method()),
349
410
  )
411
+ setattr(
412
+ cls,
413
+ "get_triggered_run",
414
+ classmethod(_get_triggered_run_injected_method()),
415
+ )
350
416
 
351
417
  for impl in allowed_providers:
352
418
  from_deployment_method_name = f"from_{impl}"
353
419
  list_deployed_flows_method_name = f"list_{impl}"
420
+ get_triggered_run_method_name = f"get_triggered_{impl}_run"
354
421
 
355
422
  setattr(
356
423
  cls,
@@ -372,6 +439,16 @@ class DeployedFlowMeta(type):
372
439
  ),
373
440
  )
374
441
 
442
+ setattr(
443
+ cls,
444
+ get_triggered_run_method_name,
445
+ classmethod(
446
+ _per_type_get_triggered_run_injected_method(
447
+ get_triggered_run_method_name, impl
448
+ )
449
+ ),
450
+ )
451
+
375
452
  return cls
376
453
 
377
454
 
metaflow/vendor.py CHANGED
@@ -63,14 +63,29 @@ def find_vendored_libs(vendor_dir, whitelist, whitelist_dirs):
63
63
  return vendored_libs, paths
64
64
 
65
65
 
66
- def fetch_licenses(*info_dir, vendor_dir):
67
- for file in chain.from_iterable(map(iter_subtree, info_dir)):
68
- if "LICENSE" in file.name:
69
- library = file.parent.name.split("-")[0]
70
- shutil.copy(file, vendor_dir / ("%s.LICENSE" % library))
71
- else:
66
+ def fetch_licenses(*info_dirs, vendor_dir):
67
+ for dist_info in info_dirs:
68
+ metadata_file = dist_info / "METADATA"
69
+ if not metadata_file.exists():
70
+ continue
71
+
72
+ project_name = None
73
+ for line in metadata_file.read_text("utf-8").splitlines():
74
+ if line.startswith("Name: "):
75
+ project_name = line.split("Name: ", 1)[1].strip()
76
+ break
77
+ if not project_name:
72
78
  continue
73
79
 
80
+ for item in dist_info.iterdir():
81
+ if item.is_file() and re.search(r"(LICENSE|COPYING)", item.name, re.I):
82
+ shutil.copy(item, vendor_dir / f"{project_name}.LICENSE")
83
+ elif item.is_dir() and item.name.lower() == "licenses":
84
+ for license_file in item.iterdir():
85
+ if license_file.is_file():
86
+ dest_name = f"{project_name}.{license_file.name}"
87
+ shutil.copy(license_file, vendor_dir / dest_name)
88
+
74
89
 
75
90
  def vendor(vendor_dir):
76
91
  # remove everything
@@ -108,6 +123,8 @@ def vendor(vendor_dir):
108
123
  "-r",
109
124
  "_vendor/vendor_%s.txt" % subdir,
110
125
  "--no-compile",
126
+ "--no-binary",
127
+ ":all:",
111
128
  ]
112
129
  )
113
130
 
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.15.21.4"
1
+ metaflow_version = "2.15.21.5"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ob-metaflow
3
- Version: 2.15.21.4
3
+ Version: 2.15.21.5
4
4
  Summary: Metaflow: More AI and ML, Less Engineering
5
5
  Author: Netflix, Outerbounds & the Metaflow Community
6
6
  Author-email: help@outerbounds.co
@@ -12,7 +12,7 @@ Requires-Dist: boto3
12
12
  Requires-Dist: pylint
13
13
  Requires-Dist: kubernetes
14
14
  Provides-Extra: stubs
15
- Requires-Dist: metaflow-stubs==2.15.21.4; extra == "stubs"
15
+ Requires-Dist: metaflow-stubs==2.15.21.5; extra == "stubs"
16
16
  Dynamic: author
17
17
  Dynamic: author-email
18
18
  Dynamic: description