ob-metaflow 2.10.7.4__py2.py3-none-any.whl → 2.10.9.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (52) hide show
  1. metaflow/cards.py +2 -0
  2. metaflow/decorators.py +1 -1
  3. metaflow/metaflow_config.py +2 -0
  4. metaflow/plugins/__init__.py +4 -0
  5. metaflow/plugins/airflow/airflow_cli.py +1 -1
  6. metaflow/plugins/argo/argo_workflows_cli.py +1 -1
  7. metaflow/plugins/aws/aws_utils.py +1 -1
  8. metaflow/plugins/aws/batch/batch.py +4 -0
  9. metaflow/plugins/aws/batch/batch_cli.py +3 -0
  10. metaflow/plugins/aws/batch/batch_client.py +40 -11
  11. metaflow/plugins/aws/batch/batch_decorator.py +1 -0
  12. metaflow/plugins/aws/step_functions/step_functions.py +1 -0
  13. metaflow/plugins/aws/step_functions/step_functions_cli.py +1 -1
  14. metaflow/plugins/azure/azure_exceptions.py +1 -1
  15. metaflow/plugins/cards/card_cli.py +413 -28
  16. metaflow/plugins/cards/card_client.py +16 -7
  17. metaflow/plugins/cards/card_creator.py +228 -0
  18. metaflow/plugins/cards/card_datastore.py +124 -26
  19. metaflow/plugins/cards/card_decorator.py +40 -86
  20. metaflow/plugins/cards/card_modules/base.html +12 -0
  21. metaflow/plugins/cards/card_modules/basic.py +74 -8
  22. metaflow/plugins/cards/card_modules/bundle.css +1 -170
  23. metaflow/plugins/cards/card_modules/card.py +65 -0
  24. metaflow/plugins/cards/card_modules/components.py +446 -81
  25. metaflow/plugins/cards/card_modules/convert_to_native_type.py +9 -3
  26. metaflow/plugins/cards/card_modules/main.js +250 -21
  27. metaflow/plugins/cards/card_modules/test_cards.py +117 -0
  28. metaflow/plugins/cards/card_resolver.py +0 -2
  29. metaflow/plugins/cards/card_server.py +361 -0
  30. metaflow/plugins/cards/component_serializer.py +506 -42
  31. metaflow/plugins/cards/exception.py +20 -1
  32. metaflow/plugins/datastores/azure_storage.py +1 -2
  33. metaflow/plugins/datastores/gs_storage.py +1 -2
  34. metaflow/plugins/datastores/s3_storage.py +2 -1
  35. metaflow/plugins/datatools/s3/s3.py +24 -11
  36. metaflow/plugins/env_escape/client.py +2 -12
  37. metaflow/plugins/env_escape/client_modules.py +18 -14
  38. metaflow/plugins/env_escape/server.py +18 -11
  39. metaflow/plugins/env_escape/utils.py +12 -0
  40. metaflow/plugins/gcp/gs_exceptions.py +1 -1
  41. metaflow/plugins/gcp/gs_utils.py +1 -1
  42. metaflow/plugins/pypi/conda_environment.py +5 -6
  43. metaflow/plugins/pypi/pip.py +2 -2
  44. metaflow/plugins/pypi/utils.py +15 -0
  45. metaflow/task.py +1 -0
  46. metaflow/version.py +1 -1
  47. {ob_metaflow-2.10.7.4.dist-info → ob_metaflow-2.10.9.1.dist-info}/METADATA +1 -1
  48. {ob_metaflow-2.10.7.4.dist-info → ob_metaflow-2.10.9.1.dist-info}/RECORD +52 -50
  49. {ob_metaflow-2.10.7.4.dist-info → ob_metaflow-2.10.9.1.dist-info}/LICENSE +0 -0
  50. {ob_metaflow-2.10.7.4.dist-info → ob_metaflow-2.10.9.1.dist-info}/WHEEL +0 -0
  51. {ob_metaflow-2.10.7.4.dist-info → ob_metaflow-2.10.9.1.dist-info}/entry_points.txt +0 -0
  52. {ob_metaflow-2.10.7.4.dist-info → ob_metaflow-2.10.9.1.dist-info}/top_level.txt +0 -0
@@ -116,7 +116,7 @@ class UnresolvableDatastoreException(MetaflowException):
116
116
  super(UnresolvableDatastoreException, self).__init__(msg)
117
117
 
118
118
 
119
- class IncorrectArguementException(MetaflowException):
119
+ class IncorrectArgumentException(MetaflowException):
120
120
  headline = (
121
121
  "`get_cards` function requires a `Task` object or pathspec as an argument"
122
122
  )
@@ -138,3 +138,22 @@ class IncorrectPathspecException(MetaflowException):
138
138
  % pthspec
139
139
  )
140
140
  super().__init__(msg=msg, lineno=None)
141
+
142
+
143
+ class ComponentOverwriteNotSupportedException(MetaflowException):
144
+ headline = "Component overwrite is not supported"
145
+
146
+ def __init__(self, component_id, card_id, card_type):
147
+ id_str = ""
148
+ if card_id is not None:
149
+ id_str = "id='%s'" % card_id
150
+ msg = (
151
+ "Card component overwrite is not supported. "
152
+ "Component with id %s already exists in the @card(type='%s', %s). \n"
153
+ "Instead of calling `current.card.components[ID] = MyComponent`. "
154
+ "You can overwrite the entire component Array by calling "
155
+ "`current.card.components = [MyComponent]`"
156
+ ) % (component_id, card_type, id_str)
157
+ super().__init__(
158
+ msg=msg,
159
+ )
@@ -250,9 +250,8 @@ class _AzureRootClient(object):
250
250
  class AzureStorage(DataStoreStorage):
251
251
  TYPE = "azure"
252
252
 
253
+ @check_azure_deps
253
254
  def __init__(self, root=None):
254
- # cannot decorate __init__... invoke it with dummy decoratee
255
- check_azure_deps(lambda: 0)
256
255
  super(AzureStorage, self).__init__(root)
257
256
  self._tmproot = ARTIFACT_LOCALROOT
258
257
  self._default_scope_token = None
@@ -145,9 +145,8 @@ class _GSRootClient(object):
145
145
  class GSStorage(DataStoreStorage):
146
146
  TYPE = "gs"
147
147
 
148
+ @check_gs_deps
148
149
  def __init__(self, root=None):
149
- # cannot decorate __init__... invoke it with dummy decoratee
150
- check_gs_deps(lambda: 0)
151
150
  super(GSStorage, self).__init__(root)
152
151
  self._tmproot = ARTIFACT_LOCALROOT
153
152
  self._root_client = None
@@ -2,7 +2,7 @@ import os
2
2
 
3
3
  from itertools import starmap
4
4
 
5
- from metaflow.plugins.datatools.s3.s3 import S3, S3Client, S3PutObject
5
+ from metaflow.plugins.datatools.s3.s3 import S3, S3Client, S3PutObject, check_s3_deps
6
6
  from metaflow.metaflow_config import DATASTORE_SYSROOT_S3, ARTIFACT_LOCALROOT
7
7
  from metaflow.datastore.datastore_storage import CloseAfterUse, DataStoreStorage
8
8
 
@@ -18,6 +18,7 @@ except:
18
18
  class S3Storage(DataStoreStorage):
19
19
  TYPE = "s3"
20
20
 
21
+ @check_s3_deps
21
22
  def __init__(self, root=None):
22
23
  super(S3Storage, self).__init__(root)
23
24
  self.s3_client = S3Client()
@@ -51,15 +51,25 @@ from .s3util import (
51
51
  if TYPE_CHECKING:
52
52
  from metaflow.client import Run
53
53
 
54
- try:
55
- import boto3
56
- from boto3.s3.transfer import TransferConfig
57
54
 
58
- DOWNLOAD_FILE_THRESHOLD = 2 * TransferConfig().multipart_threshold
59
- DOWNLOAD_MAX_CHUNK = 2 * 1024 * 1024 * 1024 - 1
60
- boto_found = True
61
- except:
62
- boto_found = False
55
+ def _check_and_init_s3_deps():
56
+ try:
57
+ import boto3
58
+ from boto3.s3.transfer import TransferConfig
59
+ except (ImportError, ModuleNotFoundError):
60
+ raise MetaflowException("You need to install 'boto3' in order to use S3.")
61
+
62
+
63
+ def check_s3_deps(func):
64
+ """The decorated function checks S3 dependencies (as needed for AWS S3 storage backend).
65
+ This includes boto3.
66
+ """
67
+
68
+ def _inner_func(*args, **kwargs):
69
+ _check_and_init_s3_deps()
70
+ return func(*args, **kwargs)
71
+
72
+ return _inner_func
63
73
 
64
74
 
65
75
  TEST_INJECT_RETRYABLE_FAILURES = int(
@@ -498,6 +508,7 @@ class S3(object):
498
508
  def get_root_from_config(cls, echo, create_on_absent=True):
499
509
  return DATATOOLS_S3ROOT
500
510
 
511
+ @check_s3_deps
501
512
  def __init__(
502
513
  self,
503
514
  tmproot: str = TEMPDIR,
@@ -508,9 +519,6 @@ class S3(object):
508
519
  encryption: Optional[str] = S3_SERVER_SIDE_ENCRYPTION,
509
520
  **kwargs
510
521
  ):
511
- if not boto_found:
512
- raise MetaflowException("You need to install 'boto3' in order to use S3.")
513
-
514
522
  if run:
515
523
  # 1. use a (current) run ID with optional customizations
516
524
  if DATATOOLS_S3ROOT is None:
@@ -875,6 +883,11 @@ class S3(object):
875
883
  `S3Object`
876
884
  An S3Object corresponding to the object requested.
877
885
  """
886
+ from boto3.s3.transfer import TransferConfig
887
+
888
+ DOWNLOAD_FILE_THRESHOLD = 2 * TransferConfig().multipart_threshold
889
+ DOWNLOAD_MAX_CHUNK = 2 * 1024 * 1024 * 1024 - 1
890
+
878
891
  url, r = self._url_and_range(key)
879
892
  src = urlparse(url)
880
893
 
@@ -37,6 +37,7 @@ from .data_transferer import DataTransferer, ObjReference
37
37
  from .exception_transferer import load_exception
38
38
  from .override_decorators import LocalAttrOverride, LocalException, LocalOverride
39
39
  from .stub import create_class
40
+ from .utils import get_canonical_name
40
41
 
41
42
  BIND_TIMEOUT = 0.1
42
43
  BIND_RETRY = 0
@@ -336,7 +337,7 @@ class Client(object):
336
337
  def get_local_class(self, name, obj_id=None):
337
338
  # Gets (and creates if needed), the class mapping to the remote
338
339
  # class of name 'name'.
339
- name = self._get_canonical_name(name)
340
+ name = get_canonical_name(name, self._aliases)
340
341
  if name == "function":
341
342
  # Special handling of pickled functions. We create a new class that
342
343
  # simply has a __call__ method that will forward things back to
@@ -398,17 +399,6 @@ class Client(object):
398
399
  local_instance = local_class(self, remote_class_name, obj_id)
399
400
  return local_instance
400
401
 
401
- def _get_canonical_name(self, name):
402
- # We look at the aliases looking for the most specific match first
403
- base_name = self._aliases.get(name)
404
- if base_name is not None:
405
- return base_name
406
- for idx in reversed([pos for pos, char in enumerate(name) if char == "."]):
407
- base_name = self._aliases.get(name[:idx])
408
- if base_name is not None:
409
- return ".".join([base_name, name[idx + 1 :]])
410
- return name
411
-
412
402
  def _communicate(self, msg):
413
403
  if os.getpid() != self._active_pid:
414
404
  raise RuntimeError(
@@ -8,6 +8,7 @@ import sys
8
8
  from .consts import OP_CALLFUNC, OP_GETVAL, OP_SETVAL
9
9
  from .client import Client
10
10
  from .override_decorators import LocalException
11
+ from .utils import get_canonical_name
11
12
 
12
13
 
13
14
  def _clean_client(client):
@@ -23,6 +24,7 @@ class _WrappedModule(object):
23
24
  r"^%s\.([a-zA-Z_][a-zA-Z0-9_]*)$" % prefix.replace(".", r"\.") # noqa W605
24
25
  )
25
26
  self._exports = {}
27
+ self._aliases = exports["aliases"]
26
28
  for k in ("classes", "functions", "values"):
27
29
  result = []
28
30
  for item in exports[k]:
@@ -43,6 +45,11 @@ class _WrappedModule(object):
43
45
  return self._prefix
44
46
  if name in ("__file__", "__path__"):
45
47
  return self._client.name
48
+
49
+ # Make the name canonical because the prefix is also canonical.
50
+ name = get_canonical_name(self._prefix + "." + name, self._aliases)[
51
+ len(self._prefix) + 1 :
52
+ ]
46
53
  if name in self._exports["classes"]:
47
54
  # We load classes lazily
48
55
  return self._client.get_local_class("%s.%s" % (self._prefix, name))
@@ -87,6 +94,7 @@ class _WrappedModule(object):
87
94
  "_client",
88
95
  "_exports",
89
96
  "_exception_classes",
97
+ "_aliases",
90
98
  ):
91
99
  object.__setattr__(self, name, value)
92
100
  return
@@ -95,6 +103,11 @@ class _WrappedModule(object):
95
103
  # module when loading
96
104
  object.__setattr__(self, name, value)
97
105
  return
106
+
107
+ # Make the name canonical because the prefix is also canonical.
108
+ name = get_canonical_name(self._prefix + "." + name, self._aliases)[
109
+ len(self._prefix) + 1 :
110
+ ]
98
111
  if name in self._exports["values"]:
99
112
  self._client.stub_request(
100
113
  None, OP_SETVAL, "%s.%s" % (self._prefix, name), value
@@ -126,7 +139,7 @@ class ModuleImporter(object):
126
139
 
127
140
  def find_module(self, fullname, path=None):
128
141
  if self._handled_modules is not None:
129
- if fullname in self._handled_modules:
142
+ if get_canonical_name(fullname, self._aliases) in self._handled_modules:
130
143
  return self
131
144
  return None
132
145
  if any([fullname.startswith(prefix) for prefix in self._module_prefixes]):
@@ -224,24 +237,15 @@ class ModuleImporter(object):
224
237
  self._handled_modules[prefix] = _WrappedModule(
225
238
  self, prefix, exports, formed_exception_classes, self._client
226
239
  )
227
- fullname = self._get_canonical_name(fullname)
228
- module = self._handled_modules.get(fullname)
240
+ canonical_fullname = get_canonical_name(fullname, self._aliases)
241
+ # Modules are created canonically but we need to return something for any
242
+ # of the aliases.
243
+ module = self._handled_modules.get(canonical_fullname)
229
244
  if module is None:
230
245
  raise ImportError
231
246
  sys.modules[fullname] = module
232
247
  return module
233
248
 
234
- def _get_canonical_name(self, name):
235
- # We look at the aliases looking for the most specific match first
236
- base_name = self._aliases.get(name)
237
- if base_name is not None:
238
- return base_name
239
- for idx in reversed([pos for pos, char in enumerate(name) if char == "."]):
240
- base_name = self._aliases.get(name[:idx])
241
- if base_name is not None:
242
- return ".".join([base_name, name[idx + 1 :]])
243
- return name
244
-
245
249
 
246
250
  def create_modules(python_executable, pythonpath, max_pickle_version, path, prefixes):
247
251
  # This is an extra verification to make sure we are not trying to use the
@@ -53,7 +53,7 @@ from .override_decorators import (
53
53
  RemoteExceptionSerializer,
54
54
  )
55
55
  from .exception_transferer import dump_exception
56
- from .utils import get_methods
56
+ from .utils import get_methods, get_canonical_name
57
57
 
58
58
  BIND_TIMEOUT = 0.1
59
59
  BIND_RETRY = 1
@@ -61,7 +61,6 @@ BIND_RETRY = 1
61
61
 
62
62
  class Server(object):
63
63
  def __init__(self, config_dir, max_pickle_version):
64
-
65
64
  self._max_pickle_version = data_transferer.defaultProtocol = max_pickle_version
66
65
  try:
67
66
  mappings = importlib.import_module(".server_mappings", package=config_dir)
@@ -108,6 +107,11 @@ class Server(object):
108
107
  for alias in aliases:
109
108
  a = self._aliases.setdefault(alias, base_name)
110
109
  if a != base_name:
110
+ # Technically we could have a that aliases b and b that aliases c
111
+ # and then a that aliases c. This would error out in that case
112
+ # even though it is valid. It is easy for the user to get around
113
+ # this by listing aliases in the same order so we don't support
114
+ # it for now.
111
115
  raise ValueError(
112
116
  "%s is an alias to both %s and %s" % (alias, base_name, a)
113
117
  )
@@ -155,12 +159,13 @@ class Server(object):
155
159
  parent_to_child = {}
156
160
 
157
161
  for ex_name, ex_cls in self._known_exceptions.items():
162
+ ex_name_canonical = get_canonical_name(ex_name, self._aliases)
158
163
  parents = []
159
164
  for base in ex_cls.__mro__[1:]:
160
165
  if base is object:
161
166
  raise ValueError(
162
- "Exported exceptions not rooted in a builtin exception are not supported: %s"
163
- % ex_name
167
+ "Exported exceptions not rooted in a builtin exception "
168
+ "are not supported: %s." % ex_name
164
169
  )
165
170
  if base.__module__ == "builtins":
166
171
  # We found our base exception
@@ -168,17 +173,19 @@ class Server(object):
168
173
  break
169
174
  else:
170
175
  fqn = ".".join([base.__module__, base.__name__])
171
- if fqn in self._known_exceptions:
172
- parents.append(fqn)
173
- children = parent_to_child.setdefault(fqn, [])
174
- children.append(ex_name)
176
+ canonical_fqn = get_canonical_name(fqn, self._aliases)
177
+ if canonical_fqn in self._known_exceptions:
178
+ parents.append(canonical_fqn)
179
+ children = parent_to_child.setdefault(canonical_fqn, [])
180
+ children.append(ex_name_canonical)
175
181
  else:
176
182
  raise ValueError(
177
183
  "Exported exception %s has non exported and non builtin parent "
178
- "exception: %s" % (ex_name, fqn)
184
+ "exception: %s. Known exceptions: %s"
185
+ % (ex_name, fqn, str(self._known_exceptions))
179
186
  )
180
- name_to_parent_count[ex_name] = len(parents) - 1
181
- name_to_parents[ex_name] = parents
187
+ name_to_parent_count[ex_name_canonical] = len(parents) - 1
188
+ name_to_parents[ex_name_canonical] = parents
182
189
 
183
190
  # We now form the exceptions and put them in self._known_exceptions in
184
191
  # the proper order (topologically)
@@ -20,3 +20,15 @@ def get_methods(class_object):
20
20
  elif isinstance(attribute, classmethod):
21
21
  all_methods["___c___%s" % name] = inspect.getdoc(attribute)
22
22
  return all_methods
23
+
24
+
25
+ def get_canonical_name(name, aliases):
26
+ # We look at the aliases looking for the most specific match first
27
+ base_name = aliases.get(name)
28
+ if base_name is not None:
29
+ return base_name
30
+ for idx in reversed([pos for pos, char in enumerate(name) if char == "."]):
31
+ base_name = aliases.get(name[:idx])
32
+ if base_name is not None:
33
+ return ".".join([base_name, name[idx + 1 :]])
34
+ return name
@@ -2,4 +2,4 @@ from metaflow.exception import MetaflowException
2
2
 
3
3
 
4
4
  class MetaflowGSPackageError(MetaflowException):
5
- headline = "Missing required packages google-cloud-storage google-auth"
5
+ headline = "Missing required packages 'google-cloud-storage' and 'google-auth'"
@@ -34,7 +34,7 @@ def _check_and_init_gs_deps():
34
34
 
35
35
 
36
36
  def check_gs_deps(func):
37
- """The decorated function checks GS dependencies (as needed for Azure storage backend). This includes
37
+ """The decorated function checks GS dependencies (as needed for Google Cloud storage backend). This includes
38
38
  various GCP SDK packages, as well as a Python version of >=3.7
39
39
  """
40
40
 
@@ -20,7 +20,7 @@ from metaflow.metaflow_environment import MetaflowEnvironment
20
20
  from metaflow.metaflow_profile import profile
21
21
 
22
22
  from . import MAGIC_FILE, _datastore_packageroot
23
- from .utils import conda_platform
23
+ from .utils import conda_platform, generate_cache_path, parse_filename_from_url
24
24
 
25
25
 
26
26
  class CondaEnvironmentException(MetaflowException):
@@ -107,7 +107,7 @@ class CondaEnvironment(MetaflowEnvironment):
107
107
  local_packages = {
108
108
  url: {
109
109
  # Path to package in datastore.
110
- "path": urlparse(url).netloc + urlparse(url).path,
110
+ "path": generate_cache_path(url, local_path),
111
111
  # Path to package on local disk.
112
112
  "local_path": local_path,
113
113
  }
@@ -122,9 +122,8 @@ class CondaEnvironment(MetaflowEnvironment):
122
122
  # Cache only those packages that manifest is unaware of
123
123
  local_packages.pop(package["url"], None)
124
124
  else:
125
- package["path"] = (
126
- urlparse(package["url"]).netloc
127
- + urlparse(package["url"]).path
125
+ package["path"] = generate_cache_path(
126
+ package["url"], parse_filename_from_url(package["url"])
128
127
  )
129
128
  dirty.add(id_)
130
129
 
@@ -187,7 +186,7 @@ class CondaEnvironment(MetaflowEnvironment):
187
186
  if decorator.name in ["conda", "pypi"]:
188
187
  # handle @conda/@pypi(disabled=True)
189
188
  disabled = decorator.attributes["disabled"]
190
- return disabled or str(disabled).lower() != "false"
189
+ return str(disabled).lower() == "true"
191
190
  return False
192
191
 
193
192
  @functools.lru_cache(maxsize=None)
@@ -9,7 +9,7 @@ from metaflow.exception import MetaflowException
9
9
  from metaflow.util import which
10
10
 
11
11
  from .micromamba import Micromamba
12
- from .utils import pip_tags
12
+ from .utils import parse_filename_from_url, pip_tags
13
13
 
14
14
 
15
15
  class PipException(MetaflowException):
@@ -118,7 +118,7 @@ class Pip(object):
118
118
  for package in packages:
119
119
  cmd.append("{url}".format(**package))
120
120
  metadata["{url}".format(**package)] = "{prefix}/.pip/wheels/{wheel}".format(
121
- prefix=prefix, wheel=package["url"].split("/")[-1]
121
+ prefix=prefix, wheel=parse_filename_from_url(package["url"])
122
122
  )
123
123
  self._call(prefix, cmd)
124
124
  # write the url to wheel mappings in a magic location
@@ -1,3 +1,4 @@
1
+ import os
1
2
  import platform
2
3
  import sys
3
4
 
@@ -13,6 +14,7 @@ else:
13
14
  from metaflow._vendor.packaging import tags
14
15
 
15
16
  from metaflow.exception import MetaflowException
17
+ from urllib.parse import unquote, urlparse
16
18
 
17
19
 
18
20
  def conda_platform():
@@ -73,3 +75,16 @@ def pip_tags(python_version, mamba_platform):
73
75
  supported.extend(tags.cpython_tags(py_version, abis, platforms))
74
76
  supported.extend(tags.compatible_tags(py_version, interpreter, platforms))
75
77
  return supported
78
+
79
+
80
+ def parse_filename_from_url(url):
81
+ # Separate method as it might require additional checks for the parsing.
82
+ filename = url.split("/")[-1]
83
+ return unquote(filename)
84
+
85
+
86
+ def generate_cache_path(url, local_path):
87
+ base, _ = os.path.split(urlparse(url).path)
88
+ _, localfile = os.path.split(local_path)
89
+ unquoted_base = unquote(base)
90
+ return urlparse(url).netloc + os.path.join(unquoted_base, localfile)
metaflow/task.py CHANGED
@@ -576,6 +576,7 @@ class MetaflowTask(object):
576
576
  inputs,
577
577
  )
578
578
 
579
+ for deco in decorators:
579
580
  # decorators can actually decorate the step function,
580
581
  # or they can replace it altogether. This functionality
581
582
  # is used e.g. by catch_decorator which switches to a
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.10.7.4"
1
+ metaflow_version = "2.10.9.1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ob-metaflow
3
- Version: 2.10.7.4
3
+ Version: 2.10.9.1
4
4
  Summary: Metaflow: More Data Science, Less Engineering
5
5
  Author: Netflix, Outerbounds & the Metaflow Community
6
6
  Author-email: help@outerbounds.co