metaflow 2.8.2__py2.py3-none-any.whl → 2.8.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. metaflow/client/core.py +14 -4
  2. metaflow/cmd/configure_cmd.py +3 -3
  3. metaflow/cmd/main_cli.py +9 -14
  4. metaflow/current.py +15 -0
  5. metaflow/metaflow_config.py +21 -0
  6. metaflow/metaflow_environment.py +5 -4
  7. metaflow/package.py +1 -1
  8. metaflow/plugins/airflow/airflow.py +0 -1
  9. metaflow/plugins/argo/argo_workflows.py +2 -0
  10. metaflow/plugins/aws/aws_utils.py +6 -1
  11. metaflow/plugins/aws/batch/batch.py +30 -8
  12. metaflow/plugins/aws/batch/batch_cli.py +12 -0
  13. metaflow/plugins/aws/batch/batch_client.py +39 -2
  14. metaflow/plugins/aws/batch/batch_decorator.py +23 -0
  15. metaflow/plugins/aws/step_functions/step_functions.py +7 -4
  16. metaflow/plugins/aws/step_functions/step_functions_cli.py +0 -1
  17. metaflow/plugins/cards/card_modules/convert_to_native_type.py +67 -5
  18. metaflow/plugins/conda/conda_environment.py +2 -2
  19. metaflow/plugins/conda/conda_step_decorator.py +7 -1
  20. metaflow/plugins/datatools/s3/s3.py +2 -2
  21. metaflow/plugins/env_escape/communication/channel.py +1 -1
  22. metaflow/plugins/kubernetes/kubernetes.py +4 -0
  23. metaflow/plugins/kubernetes/kubernetes_decorator.py +6 -2
  24. metaflow/plugins/kubernetes/kubernetes_job.py +17 -2
  25. metaflow/plugins/metadata/service.py +3 -2
  26. {metaflow-2.8.2.dist-info → metaflow-2.8.3.dist-info}/METADATA +1 -6
  27. {metaflow-2.8.2.dist-info → metaflow-2.8.3.dist-info}/RECORD +31 -31
  28. {metaflow-2.8.2.dist-info → metaflow-2.8.3.dist-info}/LICENSE +0 -0
  29. {metaflow-2.8.2.dist-info → metaflow-2.8.3.dist-info}/WHEEL +0 -0
  30. {metaflow-2.8.2.dist-info → metaflow-2.8.3.dist-info}/entry_points.txt +0 -0
  31. {metaflow-2.8.2.dist-info → metaflow-2.8.3.dist-info}/top_level.txt +0 -0
metaflow/client/core.py CHANGED
@@ -265,6 +265,7 @@ class MetaflowObject(object):
265
265
  self._parent = _parent
266
266
  self._path_components = None
267
267
  self._attempt = attempt
268
+ self._namespace_check = _namespace_check
268
269
 
269
270
  if self._attempt is not None:
270
271
  if self._NAME not in ["task", "artifact"]:
@@ -315,7 +316,7 @@ class MetaflowObject(object):
315
316
  self._user_tags = frozenset(self._object.get("tags") or [])
316
317
  self._system_tags = frozenset(self._object.get("system_tags") or [])
317
318
 
318
- if _namespace_check and not self.is_in_namespace():
319
+ if self._namespace_check and not self.is_in_namespace():
319
320
  raise MetaflowNamespaceMismatch(current_namespace)
320
321
 
321
322
  def _get_object(self, *path_components):
@@ -330,7 +331,8 @@ class MetaflowObject(object):
330
331
  """
331
332
  Iterate over all child objects of this object if any.
332
333
 
333
- Note that only children present in the current namespace are returned.
334
+ Note that only children present in the current namespace are returned iff
335
+ _namespace_check is set.
334
336
 
335
337
  Returns
336
338
  -------
@@ -338,7 +340,8 @@ class MetaflowObject(object):
338
340
  Iterator over all children
339
341
  """
340
342
  query_filter = {}
341
- if current_namespace:
343
+ # skip namespace filtering if _namespace_check is False
344
+ if self._namespace_check and current_namespace:
342
345
  query_filter = {"any_tags": current_namespace}
343
346
 
344
347
  unfiltered_children = self._metaflow.metadata.get_object(
@@ -381,6 +384,10 @@ class MetaflowObject(object):
381
384
  if all(tag in child.tags for tag in tags):
382
385
  yield child
383
386
 
387
+ def _ipython_key_completions_(self):
388
+ """Returns available options for ipython auto-complete."""
389
+ return [child.id for child in self._filtered_children()]
390
+
384
391
  @classmethod
385
392
  def _url_token(cls):
386
393
  return "%ss" % cls._NAME
@@ -444,7 +451,10 @@ class MetaflowObject(object):
444
451
  obj = self._get_child(id)
445
452
  if obj:
446
453
  return _CLASSES[self._CHILD_CLASS](
447
- attempt=self._attempt, _object=obj, _parent=self
454
+ attempt=self._attempt,
455
+ _object=obj,
456
+ _parent=self,
457
+ _namespace_check=self._namespace_check,
448
458
  )
449
459
  else:
450
460
  raise KeyError(id)
@@ -249,13 +249,13 @@ def configure_s3_datastore(existing_env):
249
249
  show_default=True,
250
250
  )
251
251
  # Set Amazon S3 folder for datatools.
252
- env["METAFLOW_DATATOOLS_SYSROOT_S3"] = click.prompt(
253
- cyan("[METAFLOW_DATATOOLS_SYSROOT_S3]")
252
+ env["METAFLOW_DATATOOLS_S3ROOT"] = click.prompt(
253
+ cyan("[METAFLOW_DATATOOLS_S3ROOT]")
254
254
  + yellow(" (optional)")
255
255
  + " Amazon S3 folder for Metaflow datatools "
256
256
  + "(s3://<bucket>/<prefix>).",
257
257
  default=existing_env.get(
258
- "METAFLOW_DATATOOLS_SYSROOT_S3",
258
+ "METAFLOW_DATATOOLS_S3ROOT",
259
259
  os.path.join(env["METAFLOW_DATASTORE_SYSROOT_S3"], "data"),
260
260
  ),
261
261
  show_default=True,
metaflow/cmd/main_cli.py CHANGED
@@ -4,7 +4,8 @@ from metaflow._vendor import click
4
4
 
5
5
  from metaflow.extension_support.cmd import process_cmds, resolve_cmds
6
6
  from metaflow.plugins.datastores.local_storage import LocalStorage
7
- from metaflow.metaflow_config import DATASTORE_LOCAL_DIR
7
+ from metaflow.metaflow_config import DATASTORE_LOCAL_DIR, CONTACT_INFO
8
+ from metaflow.metaflow_version import get_version
8
9
 
9
10
  from .util import echo_always
10
11
 
@@ -80,24 +81,18 @@ def start(ctx):
80
81
  echo("Metaflow ", fg="magenta", bold=True, nl=False)
81
82
 
82
83
  if ctx.invoked_subcommand is None:
83
- echo("(%s): " % metaflow.__version__, fg="magenta", bold=False, nl=False)
84
+ echo("(%s): " % get_version(), fg="magenta", bold=False, nl=False)
84
85
  else:
85
- echo("(%s)\n" % metaflow.__version__, fg="magenta", bold=False)
86
+ echo("(%s)\n" % get_version(), fg="magenta", bold=False)
86
87
 
87
88
  if ctx.invoked_subcommand is None:
88
89
  echo("More data science, less engineering\n", fg="magenta")
89
90
 
90
- # metaflow URL
91
- echo("http://docs.metaflow.org", fg="cyan", nl=False)
92
- echo(" - Read the documentation")
93
-
94
- # metaflow chat
95
- echo("http://chat.metaflow.org", fg="cyan", nl=False)
96
- echo(" - Chat with us")
97
-
98
- # metaflow help email
99
- echo("help@metaflow.org", fg="cyan", nl=False)
100
- echo(" - Get help by email\n")
91
+ lnk_sz = max(len(lnk) for lnk in CONTACT_INFO.values()) + 1
92
+ for what, lnk in CONTACT_INFO.items():
93
+ echo("%s%s" % (lnk, " " * (lnk_sz - len(lnk))), fg="cyan", nl=False)
94
+ echo("- %s" % what)
95
+ echo("")
101
96
 
102
97
  print(ctx.get_help())
103
98
 
metaflow/current.py CHANGED
@@ -2,6 +2,8 @@ from collections import namedtuple
2
2
  import os
3
3
  from typing import Any, Optional
4
4
 
5
+ from metaflow.metaflow_config import TEMPDIR
6
+
5
7
  Parallel = namedtuple("Parallel", ["main_ip", "num_nodes", "node_index"])
6
8
 
7
9
 
@@ -17,6 +19,7 @@ class Current(object):
17
19
  self._username = None
18
20
  self._metadata_str = None
19
21
  self._is_running = False
22
+ self._tempdir = TEMPDIR
20
23
 
21
24
  def _raise(ex):
22
25
  raise ex
@@ -230,6 +233,18 @@ class Current(object):
230
233
  """
231
234
  return self._tags
232
235
 
236
+ @property
237
+ def tempdir(self) -> str:
238
+ """
239
+ Currently configured temp dir.
240
+
241
+ Returns
242
+ -------
243
+ str
244
+ temp dir.
245
+ """
246
+ return self._tempdir
247
+
233
248
 
234
249
  # instantiate the Current singleton. This will be populated
235
250
  # by task.MetaflowTask before a task is executed.
@@ -96,6 +96,8 @@ DATATOOLS_S3ROOT = from_conf(
96
96
  else None,
97
97
  )
98
98
 
99
+ TEMPDIR = from_conf("TEMPDIR", ".")
100
+
99
101
  DATATOOLS_CLIENT_PARAMS = from_conf("DATATOOLS_CLIENT_PARAMS", {})
100
102
  if S3_ENDPOINT_URL:
101
103
  DATATOOLS_CLIENT_PARAMS["endpoint_url"] = S3_ENDPOINT_URL
@@ -193,8 +195,25 @@ DEFAULT_CONTAINER_IMAGE = from_conf("DEFAULT_CONTAINER_IMAGE")
193
195
  # Default container registry
194
196
  DEFAULT_CONTAINER_REGISTRY = from_conf("DEFAULT_CONTAINER_REGISTRY")
195
197
 
198
+ ###
199
+ # Organization customizations
200
+ ###
196
201
  UI_URL = from_conf("UI_URL")
197
202
 
203
+ # Contact information displayed when running the `metaflow` command.
204
+ # Value should be a dictionary where:
205
+ # - key is a string describing contact method
206
+ # - value is a string describing contact itself (email, web address, etc.)
207
+ # The default value shows an example of this
208
+ CONTACT_INFO = from_conf(
209
+ "CONTACT_INFO",
210
+ {
211
+ "Read the documentation": "http://docs.metaflow.org",
212
+ "Chat with us": "http://chat.metaflow.org",
213
+ "Get help by email": "help@metaflow.org",
214
+ },
215
+ )
216
+
198
217
  ###
199
218
  # AWS Batch configuration
200
219
  ###
@@ -260,6 +279,8 @@ KUBERNETES_CONTAINER_IMAGE = from_conf(
260
279
  KUBERNETES_CONTAINER_REGISTRY = from_conf(
261
280
  "KUBERNETES_CONTAINER_REGISTRY", DEFAULT_CONTAINER_REGISTRY
262
281
  )
282
+ # Toggle for trying to fetch EC2 instance metadata
283
+ KUBERNETES_FETCH_EC2_METADATA = from_conf("KUBERNETES_FETCH_EC2_METADATA", False)
263
284
 
264
285
  ARGO_WORKFLOWS_KUBERNETES_SECRETS = from_conf("ARGO_WORKFLOWS_KUBERNETES_SECRETS", "")
265
286
  ARGO_WORKFLOWS_ENV_VARS_TO_SKIP = from_conf("ARGO_WORKFLOWS_ENV_VARS_TO_SKIP", "")
@@ -162,7 +162,7 @@ class MetaflowEnvironment(object):
162
162
  ]
163
163
  return cmds
164
164
 
165
- def get_environment_info(self):
165
+ def get_environment_info(self, include_ext_info=False):
166
166
  global version_cache
167
167
  if version_cache is None:
168
168
  version_cache = metaflow_version.get_version()
@@ -187,9 +187,10 @@ class MetaflowEnvironment(object):
187
187
  env["metaflow_r_version"] = R.metaflow_r_version()
188
188
  env["r_version"] = R.r_version()
189
189
  env["r_version_code"] = R.r_version_code()
190
- # Information about extension modules (to load them in the proper order)
191
- ext_key, ext_val = dump_module_info()
192
- env[ext_key] = ext_val
190
+ if include_ext_info:
191
+ # Information about extension modules (to load them in the proper order)
192
+ ext_key, ext_val = dump_module_info()
193
+ env[ext_key] = ext_val
193
194
  return env
194
195
 
195
196
  def executable(self, step_name):
metaflow/package.py CHANGED
@@ -151,7 +151,7 @@ class MetaflowPackage(object):
151
151
 
152
152
  def _add_info(self, tar):
153
153
  info = tarfile.TarInfo(os.path.basename(INFO_FILE))
154
- env = self.environment.get_environment_info()
154
+ env = self.environment.get_environment_info(include_ext_info=True)
155
155
  buf = BytesIO()
156
156
  buf.write(json.dumps(env).encode("utf-8"))
157
157
  buf.seek(0)
@@ -54,7 +54,6 @@ AIRFLOW_DEPLOY_TEMPLATE_FILE = os.path.join(os.path.dirname(__file__), "dag.py")
54
54
 
55
55
 
56
56
  class Airflow(object):
57
-
58
57
  TOKEN_STORAGE_ROOT = "mf.airflow"
59
58
 
60
59
  def __init__(
@@ -18,6 +18,7 @@ from metaflow.metaflow_config import (
18
18
  KUBERNETES_NODE_SELECTOR,
19
19
  KUBERNETES_SANDBOX_INIT_SCRIPT,
20
20
  KUBERNETES_SECRETS,
21
+ KUBERNETES_FETCH_EC2_METADATA,
21
22
  S3_ENDPOINT_URL,
22
23
  AZURE_STORAGE_BLOB_SERVICE_ENDPOINT,
23
24
  DATASTORE_SYSROOT_AZURE,
@@ -785,6 +786,7 @@ class ArgoWorkflows(object):
785
786
  "METAFLOW_DEFAULT_METADATA": DEFAULT_METADATA,
786
787
  "METAFLOW_CARD_S3ROOT": CARD_S3ROOT,
787
788
  "METAFLOW_KUBERNETES_WORKLOAD": 1,
789
+ "METAFLOW_KUBERNETES_FETCH_EC2_METADATA": KUBERNETES_FETCH_EC2_METADATA,
788
790
  "METAFLOW_RUNTIME_ENVIRONMENT": "kubernetes",
789
791
  "METAFLOW_OWNER": self.username,
790
792
  },
@@ -20,8 +20,13 @@ def get_ec2_instance_metadata():
20
20
  # for non-AWS deployments.
21
21
  # https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html
22
22
  try:
23
+ # Set a very aggressive timeout, as the communication is happening in the same subnet,
24
+ # there should not be any significant delay in the response.
25
+ # Having a long default timeout here introduces unnecessary delay in launching tasks when the
26
+ # instance is unreachable.
23
27
  instance_meta = requests.get(
24
- url="http://169.254.169.254/latest/dynamic/instance-identity/document"
28
+ url="http://169.254.169.254/latest/dynamic/instance-identity/document",
29
+ timeout=(1, 10),
25
30
  ).json()
26
31
  meta["ec2-instance-id"] = instance_meta.get("instanceId")
27
32
  meta["ec2-instance-type"] = instance_meta.get("instanceType")
@@ -179,6 +179,10 @@ class Batch(object):
179
179
  env={},
180
180
  attrs={},
181
181
  host_volumes=None,
182
+ use_tmpfs=None,
183
+ tmpfs_tempdir=None,
184
+ tmpfs_size=None,
185
+ tmpfs_path=None,
182
186
  num_parallel=0,
183
187
  ):
184
188
  job_name = self._job_name(
@@ -201,6 +205,14 @@ class Batch(object):
201
205
  .image(image)
202
206
  .iam_role(iam_role)
203
207
  .execution_role(execution_role)
208
+ .cpu(cpu)
209
+ .gpu(gpu)
210
+ .memory(memory)
211
+ .shared_memory(shared_memory)
212
+ .max_swap(max_swap)
213
+ .swappiness(swappiness)
214
+ .inferentia(inferentia)
215
+ .timeout_in_secs(run_time_limit)
204
216
  .job_def(
205
217
  image,
206
218
  iam_role,
@@ -210,17 +222,14 @@ class Batch(object):
210
222
  max_swap,
211
223
  swappiness,
212
224
  inferentia,
225
+ memory=memory,
213
226
  host_volumes=host_volumes,
227
+ use_tmpfs=use_tmpfs,
228
+ tmpfs_tempdir=tmpfs_tempdir,
229
+ tmpfs_size=tmpfs_size,
230
+ tmpfs_path=tmpfs_path,
214
231
  num_parallel=num_parallel,
215
232
  )
216
- .cpu(cpu)
217
- .gpu(gpu)
218
- .memory(memory)
219
- .shared_memory(shared_memory)
220
- .max_swap(max_swap)
221
- .swappiness(swappiness)
222
- .inferentia(inferentia)
223
- .timeout_in_secs(run_time_limit)
224
233
  .task_id(attrs.get("metaflow.task_id"))
225
234
  .environment_variable("AWS_DEFAULT_REGION", self._client.region())
226
235
  .environment_variable("METAFLOW_CODE_SHA", code_package_sha)
@@ -248,6 +257,11 @@ class Batch(object):
248
257
  AWS_SECRETS_MANAGER_DEFAULT_REGION,
249
258
  )
250
259
 
260
+ tmpfs_enabled = use_tmpfs or (tmpfs_size and use_tmpfs is None)
261
+
262
+ if tmpfs_enabled and tmpfs_tempdir:
263
+ job.environment_variable("METAFLOW_TEMPDIR", tmpfs_path)
264
+
251
265
  # Skip setting METAFLOW_DATASTORE_SYSROOT_LOCAL because metadata sync between the local user
252
266
  # instance and the remote AWS Batch instance assumes metadata is stored in DATASTORE_LOCAL_DIR
253
267
  # on the remote AWS Batch instance; this happens when METAFLOW_DATASTORE_SYSROOT_LOCAL
@@ -300,6 +314,10 @@ class Batch(object):
300
314
  swappiness=None,
301
315
  inferentia=None,
302
316
  host_volumes=None,
317
+ use_tmpfs=None,
318
+ tmpfs_tempdir=None,
319
+ tmpfs_size=None,
320
+ tmpfs_path=None,
303
321
  num_parallel=0,
304
322
  env={},
305
323
  attrs={},
@@ -333,6 +351,10 @@ class Batch(object):
333
351
  env=env,
334
352
  attrs=attrs,
335
353
  host_volumes=host_volumes,
354
+ use_tmpfs=use_tmpfs,
355
+ tmpfs_tempdir=tmpfs_tempdir,
356
+ tmpfs_size=tmpfs_size,
357
+ tmpfs_path=tmpfs_path,
336
358
  num_parallel=num_parallel,
337
359
  )
338
360
  self.num_parallel = num_parallel
@@ -141,6 +141,10 @@ def kill(ctx, run_id, user, my_runs):
141
141
  @click.option("--max-swap", help="Max Swap requirement for AWS Batch.")
142
142
  @click.option("--swappiness", help="Swappiness requirement for AWS Batch.")
143
143
  @click.option("--inferentia", help="Inferentia requirement for AWS Batch.")
144
+ @click.option("--use-tmpfs", is_flag=True, help="tmpfs requirement for AWS Batch.")
145
+ @click.option("--tmpfs-tempdir", is_flag=True, help="tmpfs requirement for AWS Batch.")
146
+ @click.option("--tmpfs-size", help="tmpfs requirement for AWS Batch.")
147
+ @click.option("--tmpfs-path", help="tmpfs requirement for AWS Batch.")
144
148
  # TODO: Maybe remove it altogether since it's not used here
145
149
  @click.option("--ubf-context", default=None, type=click.Choice([None, "ubf_control"]))
146
150
  @click.option("--host-volumes", multiple=True)
@@ -169,6 +173,10 @@ def step(
169
173
  max_swap=None,
170
174
  swappiness=None,
171
175
  inferentia=None,
176
+ use_tmpfs=None,
177
+ tmpfs_tempdir=None,
178
+ tmpfs_size=None,
179
+ tmpfs_path=None,
172
180
  host_volumes=None,
173
181
  num_parallel=None,
174
182
  **kwargs
@@ -296,6 +304,10 @@ def step(
296
304
  env=env,
297
305
  attrs=attrs,
298
306
  host_volumes=host_volumes,
307
+ use_tmpfs=use_tmpfs,
308
+ tmpfs_tempdir=tmpfs_tempdir,
309
+ tmpfs_size=tmpfs_size,
310
+ tmpfs_path=tmpfs_path,
299
311
  num_parallel=num_parallel,
300
312
  )
301
313
  except Exception as e:
@@ -149,7 +149,12 @@ class BatchJob(object):
149
149
  max_swap,
150
150
  swappiness,
151
151
  inferentia,
152
+ memory,
152
153
  host_volumes,
154
+ use_tmpfs,
155
+ tmpfs_tempdir,
156
+ tmpfs_size,
157
+ tmpfs_path,
153
158
  num_parallel,
154
159
  ):
155
160
  # identify platform from any compute environment associated with the
@@ -251,7 +256,7 @@ class BatchJob(object):
251
256
  if inferentia:
252
257
  if not (isinstance(inferentia, (int, unicode, basestring))):
253
258
  raise BatchJobException(
254
- "invalid inferentia value: ({}) (should be 0 or greater)".format(
259
+ "Invalid inferentia value: ({}) (should be 0 or greater)".format(
255
260
  inferentia
256
261
  )
257
262
  )
@@ -282,6 +287,29 @@ class BatchJob(object):
282
287
  {"sourceVolume": name, "containerPath": host_path}
283
288
  )
284
289
 
290
+ if use_tmpfs or (tmpfs_size and use_tmpfs is None):
291
+ if tmpfs_size:
292
+ if not (isinstance(tmpfs_size, (int, unicode, basestring))):
293
+ raise BatchJobException(
294
+ "Invalid tmpfs value: ({}) (should be 0 or greater)".format(
295
+ tmpfs_size
296
+ )
297
+ )
298
+ else:
299
+ # default tmpfs behavior - https://man7.org/linux/man-pages/man5/tmpfs.5.html
300
+ tmpfs_size = int(memory) / 2
301
+
302
+ job_definition["containerProperties"]["linuxParameters"]["tmpfs"] = [
303
+ {
304
+ "containerPath": tmpfs_path,
305
+ "size": int(tmpfs_size),
306
+ "mountOptions": [
307
+ # should map to rw, suid, dev, exec, auto, nouser, and async
308
+ "defaults"
309
+ ],
310
+ }
311
+ ]
312
+
285
313
  self.num_parallel = num_parallel or 0
286
314
  if self.num_parallel >= 1:
287
315
  job_definition["type"] = "multinode"
@@ -343,7 +371,12 @@ class BatchJob(object):
343
371
  max_swap,
344
372
  swappiness,
345
373
  inferentia,
374
+ memory,
346
375
  host_volumes,
376
+ use_tmpfs,
377
+ tmpfs_tempdir,
378
+ tmpfs_size,
379
+ tmpfs_path,
347
380
  num_parallel,
348
381
  ):
349
382
  self.payload["jobDefinition"] = self._register_job_definition(
@@ -355,7 +388,12 @@ class BatchJob(object):
355
388
  max_swap,
356
389
  swappiness,
357
390
  inferentia,
391
+ memory,
358
392
  host_volumes,
393
+ use_tmpfs,
394
+ tmpfs_tempdir,
395
+ tmpfs_size,
396
+ tmpfs_path,
359
397
  num_parallel,
360
398
  )
361
399
  return self
@@ -522,7 +560,6 @@ class TriableException(Exception):
522
560
 
523
561
 
524
562
  class RunningJob(object):
525
-
526
563
  NUM_RETRIES = 8
527
564
 
528
565
  def __init__(self, id, client):
@@ -71,6 +71,16 @@ class BatchDecorator(StepDecorator):
71
71
  A swappiness value of 0 causes swapping not to happen unless absolutely
72
72
  necessary. A swappiness value of 100 causes pages to be swapped very
73
73
  aggressively. Accepted values are whole numbers between 0 and 100.
74
+ use_tmpfs: bool, default: False
75
+ This enables an explicit tmpfs mount for this step.
76
+ tmpfs_tempdir: bool, default: True
77
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
78
+ tmpfs_size: int, optional
79
+ The value for the size (in MiB) of the tmpfs mount for this step.
80
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
81
+ memory allocated for this step.
82
+ tmpfs_path: string, optional
83
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
74
84
  inferentia : int, default: 0
75
85
  Number of Inferentia chips required for this step.
76
86
  """
@@ -89,6 +99,10 @@ class BatchDecorator(StepDecorator):
89
99
  "swappiness": None,
90
100
  "inferentia": None,
91
101
  "host_volumes": None,
102
+ "use_tmpfs": False,
103
+ "tmpfs_tempdir": True,
104
+ "tmpfs_size": None,
105
+ "tmpfs_path": "/metaflow_temp",
92
106
  }
93
107
  resource_defaults = {
94
108
  "cpu": "1",
@@ -153,6 +167,10 @@ class BatchDecorator(StepDecorator):
153
167
  "least 60 seconds for execution on AWS Batch.".format(step=step)
154
168
  )
155
169
 
170
+ # Validate tmpfs_path. Batch requires this to be an absolute path
171
+ if self.attributes["tmpfs_path"] and self.attributes["tmpfs_path"][0] != "/":
172
+ raise BatchException("'tmpfs_path' needs to be an absolute path")
173
+
156
174
  def runtime_init(self, flow, graph, package, run_id):
157
175
  # Set some more internal state.
158
176
  self.flow = flow
@@ -198,6 +216,11 @@ class BatchDecorator(StepDecorator):
198
216
  self.metadata = metadata
199
217
  self.task_datastore = task_datastore
200
218
 
219
+ # current.tempdir reflects the value of METAFLOW_TEMPDIR (the current working
220
+ # directory by default), or the value of tmpfs_path if tmpfs_tempdir=False.
221
+ if not self.attributes["tmpfs_tempdir"]:
222
+ current._update_env({"tempdir": self.attributes["tmpfs_path"]})
223
+
201
224
  # task_pre_step may run locally if fallback is activated for @catch
202
225
  # decorator. In that scenario, we skip collecting AWS Batch execution
203
226
  # metadata. A rudimentary way to detect non-local execution is to
@@ -227,7 +227,6 @@ class StepFunctions(object):
227
227
  return None
228
228
 
229
229
  def _compile(self):
230
-
231
230
  # Visit every node of the flow and recursively build the state machine.
232
231
  def _visit(node, workflow, exit_node=None):
233
232
  if node.parallel_foreach:
@@ -637,8 +636,7 @@ class StepFunctions(object):
637
636
  env["METAFLOW_SFN_DYNAMO_DB_TABLE"] = SFN_DYNAMO_DB_TABLE
638
637
 
639
638
  # It makes no sense to set env vars to None (shows up as "None" string)
640
- env_without_none_values = {k: v for k, v in env.items() if v is not None}
641
- del env
639
+ env = {k: v for k, v in env.items() if v is not None}
642
640
 
643
641
  # Resolve AWS Batch resource requirements.
644
642
  batch_deco = [deco for deco in node.decorators if deco.name == "batch"][0]
@@ -686,7 +684,12 @@ class StepFunctions(object):
686
684
  shared_memory=resources["shared_memory"],
687
685
  max_swap=resources["max_swap"],
688
686
  swappiness=resources["swappiness"],
689
- env=env_without_none_values,
687
+ use_tmpfs=resources["use_tmpfs"],
688
+ tmpfs_tempdir=resources["tmpfs_tempdir"],
689
+ tmpfs_size=resources["tmpfs_size"],
690
+ tmpfs_path=resources["tmpfs_path"],
691
+ inferentia=resources["inferentia"],
692
+ env=env,
690
693
  attrs=attrs,
691
694
  host_volumes=resources["host_volumes"],
692
695
  )
@@ -312,7 +312,6 @@ def make_flow(
312
312
  def resolve_token(
313
313
  name, token_prefix, obj, authorize, given_token, generate_new_token, is_project
314
314
  ):
315
-
316
315
  # 1) retrieve the previous deployment, if one exists
317
316
  workflow = StepFunctions.get_existing_deployment(name)
318
317
  if workflow is None:
@@ -295,6 +295,70 @@ class TaskToDict:
295
295
  def _parse_range(self, data_object):
296
296
  return self._get_repr().repr(data_object)
297
297
 
298
+ @staticmethod
299
+ def _parse_pandas_column(column_object):
300
+ # There are two types of parsing we do here.
301
+ # 1. We explicitly parse the types we know how to parse
302
+ # 2. We try to partially match a type name to the column's type.
303
+ # - We do this because `datetime64` can match `datetime64[ns]` and `datetime64[ns, UTC]`
304
+ # - We do this because period can match `period[D]` and `period[2D]` etc.
305
+ # - There are just too many types to explicitly parse so we go by this heuristic
306
+ # We have a default parser called `truncate_long_objects` which type casts any column to string
307
+ # and truncates it to 30 characters.
308
+ # If there is any form of TypeError or ValueError we set the column value to "Unsupported Type"
309
+ # We also set columns which are have null values to "null" strings
310
+ time_format = "%Y-%m-%dT%H:%M:%SZ"
311
+ truncate_long_objects = (
312
+ lambda x: x.astype("string").str.slice(0, 30) + "..."
313
+ if x.astype("string").str.len().max() > 30
314
+ else x.astype("string")
315
+ )
316
+ type_parser = {
317
+ "int64": lambda x: x,
318
+ "float64": lambda x: x,
319
+ "bool": lambda x: x,
320
+ "object": lambda x: truncate_long_objects(x.fillna("null")),
321
+ "category": truncate_long_objects,
322
+ }
323
+
324
+ partial_type_name_match_parsers = {
325
+ "complex": {
326
+ "complex": lambda x: x.astype("string"),
327
+ },
328
+ "datetime": {
329
+ "datetime64": lambda x: x.dt.strftime(time_format),
330
+ "timedelta": lambda x: x.dt.total_seconds(),
331
+ },
332
+ "interval": {
333
+ "interval": lambda x: x.astype("string"),
334
+ },
335
+ "period": {
336
+ "period": lambda x: x.astype("string"),
337
+ },
338
+ }
339
+
340
+ def _match_partial_type():
341
+ col_type = column_object.dtype
342
+ for _, type_parsers in partial_type_name_match_parsers.items():
343
+ for type_name, parser in type_parsers.items():
344
+ if type_name in str(col_type):
345
+ return parser(column_object)
346
+ return None
347
+
348
+ try:
349
+ col_type = str(column_object.dtype)
350
+ if col_type in type_parser:
351
+ return type_parser[col_type](column_object)
352
+ else:
353
+ parsed_col = _match_partial_type()
354
+ if parsed_col is not None:
355
+ return parsed_col
356
+ return truncate_long_objects(column_object)
357
+ except ValueError as e:
358
+ return "Unsupported type: {0}".format(col_type)
359
+ except TypeError as e:
360
+ return "Unsupported type: {0}".format(col_type)
361
+
298
362
  def _parse_pandas_dataframe(self, data_object, truncate=True):
299
363
  headers = list(data_object.columns)
300
364
  data = data_object
@@ -302,18 +366,16 @@ class TaskToDict:
302
366
  data = data_object.head()
303
367
  index_column = data.index
304
368
  time_format = "%Y-%m-%dT%H:%M:%SZ"
305
- if index_column.dtype == "datetime64[ns]":
369
+
370
+ if "datetime64" in str(index_column.dtype):
306
371
  if index_column.__class__.__name__ == "DatetimeIndex":
307
372
  index_column = index_column.strftime(time_format)
308
373
  else:
309
374
  index_column = index_column.dt.strftime(time_format)
310
375
 
311
376
  for col in data.columns:
312
- # we convert datetime columns to strings
313
- if data[col].dtype == "datetime64[ns]":
314
- data[col] = data[col].dt.strftime(time_format)
377
+ data[col] = self._parse_pandas_column(data[col])
315
378
 
316
- data = data.astype(object).where(data.notnull(), None)
317
379
  data_vals = data.values.tolist()
318
380
  for row, idx in zip(data_vals, index_column.values.tolist()):
319
381
  row.insert(0, idx)
@@ -132,5 +132,5 @@ class CondaEnvironment(MetaflowEnvironment):
132
132
  def get_package_commands(self, code_package_url, datastore_type):
133
133
  return self.base_env.get_package_commands(code_package_url, datastore_type)
134
134
 
135
- def get_environment_info(self):
136
- return self.base_env.get_environment_info()
135
+ def get_environment_info(self, include_ext_info=False):
136
+ return self.base_env.get_environment_info(include_ext_info)
@@ -285,7 +285,13 @@ class CondaStepDecorator(StepDecorator):
285
285
  mode="wt",
286
286
  encoding="utf-8",
287
287
  ) as f:
288
- f.write(json.dumps(self._cur_environment.get_environment_info()))
288
+ f.write(
289
+ json.dumps(
290
+ self._cur_environment.get_environment_info(
291
+ include_ext_info=True
292
+ )
293
+ )
294
+ )
289
295
 
290
296
  # Do the same for EXT_PKG
291
297
  try:
@@ -17,6 +17,7 @@ from metaflow.metaflow_config import (
17
17
  DATATOOLS_S3ROOT,
18
18
  S3_RETRY_COUNT,
19
19
  S3_TRANSIENT_RETRY_COUNT,
20
+ TEMPDIR,
20
21
  )
21
22
  from metaflow.util import (
22
23
  namedtuple_with_defaults,
@@ -142,7 +143,6 @@ class S3Object(object):
142
143
  range_info: Optional[RangeInfo] = None,
143
144
  last_modified: int = None,
144
145
  ):
145
-
146
146
  # all fields of S3Object should return a unicode object
147
147
  prefix, url, path = map(ensure_unicode, (prefix, url, path))
148
148
 
@@ -481,7 +481,7 @@ class S3(object):
481
481
 
482
482
  def __init__(
483
483
  self,
484
- tmproot: str = ".",
484
+ tmproot: str = TEMPDIR,
485
485
  bucket: Optional[str] = None,
486
486
  prefix: Optional[str] = None,
487
487
  run: Optional[Union[FlowSpec, "Run"]] = None,
@@ -36,7 +36,7 @@ class Channel(object):
36
36
  sz_bytes = self._stream.read(self._fmt.size, timeout)
37
37
  msg_sz = self._fmt.unpack(sz_bytes)[0]
38
38
  obj_bytes = self._stream.read(msg_sz, timeout)
39
- return json.loads(obj_bytes, encoding="utf-8")
39
+ return json.loads(obj_bytes)
40
40
  except EOFError as e:
41
41
  raise RuntimeError("Cannot receive object over streaming interface: %s" % e)
42
42
  except BaseException as e:
@@ -15,6 +15,7 @@ from metaflow.metaflow_config import (
15
15
  DEFAULT_AWS_CLIENT_PROVIDER,
16
16
  DEFAULT_METADATA,
17
17
  KUBERNETES_SANDBOX_INIT_SCRIPT,
18
+ KUBERNETES_FETCH_EC2_METADATA,
18
19
  S3_ENDPOINT_URL,
19
20
  AZURE_STORAGE_BLOB_SERVICE_ENDPOINT,
20
21
  DATASTORE_SYSROOT_AZURE,
@@ -199,6 +200,9 @@ class Kubernetes(object):
199
200
  .environment_variable("METAFLOW_DEFAULT_DATASTORE", self._datastore.TYPE)
200
201
  .environment_variable("METAFLOW_DEFAULT_METADATA", DEFAULT_METADATA)
201
202
  .environment_variable("METAFLOW_KUBERNETES_WORKLOAD", 1)
203
+ .environment_variable(
204
+ "METAFLOW_KUBERNETES_FETCH_EC2_METADATA", KUBERNETES_FETCH_EC2_METADATA
205
+ )
202
206
  .environment_variable("METAFLOW_RUNTIME_ENVIRONMENT", "kubernetes")
203
207
  .environment_variable(
204
208
  "METAFLOW_DEFAULT_SECRETS_BACKEND_TYPE", DEFAULT_SECRETS_BACKEND_TYPE
@@ -17,6 +17,7 @@ from metaflow.metaflow_config import (
17
17
  KUBERNETES_TOLERATIONS,
18
18
  KUBERNETES_SERVICE_ACCOUNT,
19
19
  KUBERNETES_SECRETS,
20
+ KUBERNETES_FETCH_EC2_METADATA,
20
21
  )
21
22
  from metaflow.plugins.resources_decorator import ResourcesDecorator
22
23
  from metaflow.plugins.timeout_decorator import get_run_time_limit_for_task
@@ -327,8 +328,11 @@ class KubernetesDecorator(StepDecorator):
327
328
 
328
329
  # TODO (savin): Introduce equivalent support for Microsoft Azure and
329
330
  # Google Cloud Platform
330
- instance_meta = get_ec2_instance_metadata()
331
- meta.update(instance_meta)
331
+ # TODO: Introduce a way to detect Cloud Provider, so unnecessary requests (and delays)
332
+ # can be avoided by not having to try out all providers.
333
+ if KUBERNETES_FETCH_EC2_METADATA:
334
+ instance_meta = get_ec2_instance_metadata()
335
+ meta.update(instance_meta)
332
336
 
333
337
  # Unfortunately, there doesn't seem to be any straight forward way right
334
338
  # now to attach the Batch/v1 name - While we can rely on a hacky approach
@@ -413,8 +413,23 @@ class RunningJob(object):
413
413
  except:
414
414
  # Best effort. It's likely that this API call could be
415
415
  # blocked for the user.
416
- pass
417
- # raise
416
+ # --------------------------------------------------------
417
+ # We try patching Job parallelism anyway. Stopping any runaway
418
+ # jobs (and their pods) is secondary to correctly showing
419
+ # "Killed" status on the Kubernetes pod.
420
+ #
421
+ # This has the effect of pausing the job.
422
+ try:
423
+ client.BatchV1Api().patch_namespaced_job(
424
+ name=self._name,
425
+ namespace=self._namespace,
426
+ field_manager="metaflow",
427
+ body={"spec": {"parallelism": 0}},
428
+ )
429
+ except:
430
+ # Best effort.
431
+ pass
432
+ # raise
418
433
  else:
419
434
  # Case 2.
420
435
  # This has the effect of pausing the job.
@@ -137,9 +137,10 @@ class ServiceMetadataProvider(MetadataProvider):
137
137
  payload[HB_URL_KEY] = self.url_run_template.format(**data)
138
138
  else:
139
139
  raise Exception("invalid heartbeat type")
140
- payload["service_version"] = self.version()
140
+ service_version = self.version()
141
+ payload["service_version"] = service_version
141
142
  # start sidecar
142
- if self.version() is None or LooseVersion(self.version()) < LooseVersion(
143
+ if service_version is None or LooseVersion(service_version) < LooseVersion(
143
144
  "2.0.4"
144
145
  ):
145
146
  # if old version of the service is running
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: metaflow
3
- Version: 2.8.2
3
+ Version: 2.8.3
4
4
  Summary: Metaflow: More Data Science, Less Engineering
5
5
  Author: Metaflow Developers
6
6
  Author-email: help@metaflow.org
@@ -76,8 +76,3 @@ There are several ways to get in touch with us:
76
76
  ## Contributing
77
77
 
78
78
  We welcome contributions to Metaflow. Please see our [contribution guide](https://docs.metaflow.org/introduction/contributing-to-metaflow) for more details.
79
-
80
- ### Code style
81
-
82
- We use [black](https://black.readthedocs.io/en/stable/) as a code formatter. The easiest way to ensure your commits are always formatted with the correct version of `black` it is to use [pre-commit](https://pre-commit.com/): install it and then run `pre-commit install` once in your local copy of the repo.
83
-
@@ -4,7 +4,7 @@ metaflow/cards.py,sha256=YM-ZAntXrt05dWWr5E1knhwXP82E36kT-cTVWLOrsvg,394
4
4
  metaflow/cli.py,sha256=skd-ARvBrXr3k5p11QYMoCGZCgxWRheOV2WcME8jFd8,35024
5
5
  metaflow/cli_args.py,sha256=lcgBGNTvfaiPxiUnejAe60Upt9swG6lRy1_3OqbU6MY,2616
6
6
  metaflow/cmd_with_io.py,sha256=kl53HkAIyv0ecpItv08wZYczv7u3msD1VCcciqigqf0,588
7
- metaflow/current.py,sha256=dkPyUM_qUcF-ShftGygNV9DcuVAz3ddZuVjS_Aw9b3Q,5824
7
+ metaflow/current.py,sha256=7xigMvkVwczP6mXTZ2lJzTepeFI2cv-ZL3i_odOwebw,6106
8
8
  metaflow/debug.py,sha256=HEmt_16tJtqHXQXsqD9pqOFe3CWR5GZ7VwpaYQgnRdU,1466
9
9
  metaflow/decorators.py,sha256=Yrr6Tr_d_f0Z_MKVTOLhrwPB0KS8B3BXl_3vHRLgN2E,19860
10
10
  metaflow/event_logger.py,sha256=joTVRqZPL87nvah4ZOwtqWX8NeraM_CXKXXGVpKGD8o,780
@@ -14,14 +14,14 @@ metaflow/graph.py,sha256=ZPxyG8uwVMk5YYgX4pQEQaPZtZM5Wy-G4NtJK73IEuA,11818
14
14
  metaflow/includefile.py,sha256=ulrQ03CZcKCEL_dTP46Yk0_NF6XRIOEBp1u_Ud5D0jg,19546
15
15
  metaflow/integrations.py,sha256=EEgR76LW5x3UwE0hKME6MMcgGUXvL-3u0Hv9AB45e-E,1402
16
16
  metaflow/lint.py,sha256=_kYAbAtsP7IG1Rd0FqNbo8I8Zs66_0WXbaZJFARO3dE,10394
17
- metaflow/metaflow_config.py,sha256=Xn9UtvMKf_4gTnqBqkCu2oWH-pwUww8rPyJdQut7y50,17121
17
+ metaflow/metaflow_config.py,sha256=gapuBV9Itp8b7AIavBWWN7Jqh1ZafsicapWHcdRGhUs,17830
18
18
  metaflow/metaflow_config_funcs.py,sha256=DpCpdVV-pfaAitWMurQdunbnRRtO38zNDel8riWRI1Q,4309
19
- metaflow/metaflow_environment.py,sha256=vCM399TIGF_24oEhXAhaJoTl1jEFo_ZHLbuATPM7dVw,7214
19
+ metaflow/metaflow_environment.py,sha256=2nzo9gyixcH74djRZm3ifDzO_xEKSaA1dOfsVOXSFoo,7279
20
20
  metaflow/metaflow_profile.py,sha256=jKPEW-hmAQO-htSxb9hXaeloLacAh41A35rMZH6G8pA,418
21
21
  metaflow/metaflow_version.py,sha256=mPQ6g_3XjNdi0NrxDzwlW8ZH0nMyYpwqmJ04P7TIdP0,4774
22
22
  metaflow/monitor.py,sha256=LSY-NDWmEhx7B_Afo0cLh2QX9esJa73p6F6pmdwCpbk,5297
23
23
  metaflow/multicore_utils.py,sha256=mjPdHZbw6SQFwdPNAiOtxHcai8sywiYmuo7z3HF4MUI,2604
24
- metaflow/package.py,sha256=YA6oX1svEcSkN5Le5UL8CqOhRljK70zfxTEEK25sXgo,7161
24
+ metaflow/package.py,sha256=YWoMEISA0vylYwEWzwTlxjQPcfNKnnftDJJ4-v54Zbo,7182
25
25
  metaflow/parameters.py,sha256=d_op5psX6yAx-2demD-lffW8HJyiWu_Ft2HgvxuBUWw,13765
26
26
  metaflow/procpoll.py,sha256=22ppTUyaTYVn1UUG4RNG1LnCKBwRbaTmhYiYN_7OVN8,2861
27
27
  metaflow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -66,11 +66,11 @@ metaflow/_vendor/v3_6/importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5l
66
66
  metaflow/_vendor/v3_6/importlib_metadata/_meta.py,sha256=_F48Hu_jFxkfKWz5wcYS8vO23qEygbVdF9r-6qh-hjE,1154
67
67
  metaflow/_vendor/v3_6/importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
68
68
  metaflow/client/__init__.py,sha256=1GtQB4Y_CBkzaxg32L1syNQSlfj762wmLrfrDxGi1b8,226
69
- metaflow/client/core.py,sha256=_mGhg3HvkAnRUXOaTXyJnuYgfqtCr8hkO0eSYi1qyx8,63310
69
+ metaflow/client/core.py,sha256=C5g5RghHzPlynZSno0JNub6csfyogCC58Ul3VAHDM-4,63753
70
70
  metaflow/client/filecache.py,sha256=QdD1sW6w4Nnza-ioz4I1fEZI843X33AFIV3eSxq-cuU,14868
71
71
  metaflow/cmd/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
72
- metaflow/cmd/configure_cmd.py,sha256=nG8ItQu2xNLVtB5CXued2roTWSiMGQtBIpRMFUDZMZY,31051
73
- metaflow/cmd/main_cli.py,sha256=r9UeqvUOAlJUFFPaEsmBy8OYE4o4xdSU1-k5RItDIIk,2918
72
+ metaflow/cmd/configure_cmd.py,sha256=lcmZAl3p2jO-UqeufMAlUvvUJcVdi3F55VJqbVS42Fg,31039
73
+ metaflow/cmd/main_cli.py,sha256=6WIteupxXLqtNOMyBjf6j7cPetFOx_F1b9aCCNgeWvg,2835
74
74
  metaflow/cmd/tutorials_cmd.py,sha256=8FdlKkicTOhCIDKcBR5b0Oz6giDvS-EMY3o9skIrRqw,5156
75
75
  metaflow/cmd/util.py,sha256=jS_0rUjOnGGzPT65fzRLdGjrYAOOLA4jU2S0HJLV0oc,406
76
76
  metaflow/datastore/__init__.py,sha256=VxP6ddJt3rwiCkpiSfAhyVkUCOe1pgZZsytVEJzFmSQ,155
@@ -110,7 +110,7 @@ metaflow/plugins/tag_cli.py,sha256=xXlhv6Y9kU0BHvNWntQ9QoG6swBVww-FzrpeFt_m_98,1
110
110
  metaflow/plugins/test_unbounded_foreach_decorator.py,sha256=cB_2OWb38eYfmbVck72ZwU0qgzi6hqJXZAxglpHU_qg,5216
111
111
  metaflow/plugins/timeout_decorator.py,sha256=9aAMualSW1AzUV--tmkGR5rxQ_dBZIWtCKXbQ1_Pt9A,3598
112
112
  metaflow/plugins/airflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
113
- metaflow/plugins/airflow/airflow.py,sha256=8gbkxZRn7_tXlw-U9nUqeB85XTYrHeDDBl5vwB2sdV0,30230
113
+ metaflow/plugins/airflow/airflow.py,sha256=YUhvboQE3aJ1mnP6XsVKdAM4z-AEeVNr_UrBpGIe5Dg,30229
114
114
  metaflow/plugins/airflow/airflow_cli.py,sha256=mvqrUWBW5Ty6MIhEXIDNSqCHU_YpQuIJw6UY6uTdU1o,14429
115
115
  metaflow/plugins/airflow/airflow_decorator.py,sha256=H9-QnRP4x8tSomLmmpGeuVUI48-CxHR7tlvn_ceX1Zs,1772
116
116
  metaflow/plugins/airflow/airflow_utils.py,sha256=qd6lV2X4VpCO2sLsRc35JMOU4DVz_tQacrM_wWNkQug,28865
@@ -124,18 +124,18 @@ metaflow/plugins/airflow/sensors/external_task_sensor.py,sha256=GAWfc2GJqSSAKOJy
124
124
  metaflow/plugins/airflow/sensors/s3_sensor.py,sha256=zym4mUm_f_gBsvHHVqGtX_OOxRjM3WG217gaT-XDBnk,3274
125
125
  metaflow/plugins/argo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
126
126
  metaflow/plugins/argo/argo_client.py,sha256=xPNWFw_OvYD8G1NUcLfLbFGwxaFx2OQ4NbM1h5NIwos,6818
127
- metaflow/plugins/argo/argo_workflows.py,sha256=JhgrYnqNB6p5fNSYHykcFBT4QcTI5jT92e4gSd0q04M,57957
127
+ metaflow/plugins/argo/argo_workflows.py,sha256=60l0EiU2xezruwd0Kei-ZtBYz0jh6rWPTC9-yVukx2w,58089
128
128
  metaflow/plugins/argo/argo_workflows_cli.py,sha256=5n9F1BSgP9z7R-qZqUeN1Gso1Oh2TomgJ1ukAobMNgg,17737
129
129
  metaflow/plugins/argo/argo_workflows_decorator.py,sha256=5JS5_gFEPtGz3ZQSwkL1zoChqY1Hk4YPlExf33e5eSw,2395
130
130
  metaflow/plugins/argo/process_input_paths.py,sha256=BYo8eykdPEG8Zyt_aWH92uWNb1gicX_0KfLWK4mUfNs,555
131
131
  metaflow/plugins/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
132
  metaflow/plugins/aws/aws_client.py,sha256=mO8UD6pxFaOnxDb3hTP3HB7Gqb_ZxoR-76LT683WHvI,4036
133
- metaflow/plugins/aws/aws_utils.py,sha256=lvn5g9Lbm-vHrInqOe580Asaj6kKEbttt6f-kMlBcMo,5472
133
+ metaflow/plugins/aws/aws_utils.py,sha256=K_rs3VYUEl1Wx31vh3782kBCJT6xajq0A6tjuavkgr0,5803
134
134
  metaflow/plugins/aws/batch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
135
- metaflow/plugins/aws/batch/batch.py,sha256=9rgD2uklDDEN4oxWxlwVV32V1SioCPz8YNi5ijUyh6g,15392
136
- metaflow/plugins/aws/batch/batch_cli.py,sha256=f5haA2eup3TKVp64igEC29vRXVWKe8ROnvGHQCeo6DA,10198
137
- metaflow/plugins/aws/batch/batch_client.py,sha256=hn3w44CvgN4CBCgUnZ61Qr51momA7PyMUJC8JmUMPxw,22700
138
- metaflow/plugins/aws/batch/batch_decorator.py,sha256=CNy31WmEIDVKvb9OwPlS6F7IJtf9qZcVRqoGe5hZiog,14393
135
+ metaflow/plugins/aws/batch/batch.py,sha256=nvHT6MlsFv86R2IcIE4sAt80if07G98ggFY4xpTDmbY,16118
136
+ metaflow/plugins/aws/batch/batch_cli.py,sha256=jprzV_wdb0GcwxVNpDxKAfHfeYJlB1D5BrRDosqlQQo,10758
137
+ metaflow/plugins/aws/batch/batch_client.py,sha256=xF1TYbItGECKrh3amCcvqUyavgZ4cjGFRuE1jfa9yUM,23960
138
+ metaflow/plugins/aws/batch/batch_decorator.py,sha256=YAXVN6xuU_8CXZEXke2Qb7ogPtT5b5x6S9dRLQbHPa0,15590
139
139
  metaflow/plugins/aws/secrets_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
140
140
  metaflow/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.py,sha256=QJflSbE6wBRDTjTcvNzCBt5JFyigWvvDHk3k6n1zXrA,7496
141
141
  metaflow/plugins/aws/step_functions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -144,8 +144,8 @@ metaflow/plugins/aws/step_functions/event_bridge_client.py,sha256=jPc3AExk2DofdR
144
144
  metaflow/plugins/aws/step_functions/production_token.py,sha256=jEsDH0etkzORgeDTUx6UDRjRsy8QQHYjHWEDLE-0rVY,1898
145
145
  metaflow/plugins/aws/step_functions/schedule_decorator.py,sha256=I-R1ki4kZ-cFb0zuCUa1EEE6uQ-FZRBXFxPdkxx8Lq4,1915
146
146
  metaflow/plugins/aws/step_functions/set_batch_environment.py,sha256=ibiGWFHDjKcLfprH3OsX-g2M9lUsh6J-bp7v2cdLhD4,1294
147
- metaflow/plugins/aws/step_functions/step_functions.py,sha256=bh-ZANyId0yAR-KYScQ3VflJpLnKpo7uWBhIPxG0CBw,41866
148
- metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=23T2pa4hElmTJo6DnT7LkyGXp4S9F88ehyPNVg637yw,17971
147
+ metaflow/plugins/aws/step_functions/step_functions.py,sha256=9Fd4EUE4wzm5YeG1chg6AUUx1PUdpeBHtGJOa8eDMOs,42073
148
+ metaflow/plugins/aws/step_functions/step_functions_cli.py,sha256=wMXSwB-n1HHD1v1VkiV4heeEzx5riM4Ah-A1WmyUPUA,17970
149
149
  metaflow/plugins/aws/step_functions/step_functions_client.py,sha256=vjmV8AXHlUc2fo1i0iID2twuBBxKtIYmjid2ZCQBPs0,4224
150
150
  metaflow/plugins/aws/step_functions/step_functions_decorator.py,sha256=R1C1EYdoYJUILXdpV-vdXOlyEBNiyDWfNXsTcG8URww,3791
151
151
  metaflow/plugins/azure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -168,7 +168,7 @@ metaflow/plugins/cards/card_modules/basic.py,sha256=mgLnoySFE-kQPGKKSb0DYfIrCv44
168
168
  metaflow/plugins/cards/card_modules/bundle.css,sha256=1kRe1j039KJR3eJE_Boy1P-W-zX-tx3zOEHvyi82Ih0,11534
169
169
  metaflow/plugins/cards/card_modules/card.py,sha256=0u8Sm4PLvZeg4vHxwPseEr98JOrLmgFOGGDG9nLC90c,2359
170
170
  metaflow/plugins/cards/card_modules/components.py,sha256=Tb4G9xm_TPBOeCIzjCOwBRTNcqUuiEuk90wTb0aCGrU,13566
171
- metaflow/plugins/cards/card_modules/convert_to_native_type.py,sha256=TFTYTHy4_NFMCT9ebJGnGpW0julelRjDEW6FpGFCyaQ,12569
171
+ metaflow/plugins/cards/card_modules/convert_to_native_type.py,sha256=BusJvnhLTwmZDeQ-zL1ZDBQEnMZUH1-EYaNWJdK0fTE,15171
172
172
  metaflow/plugins/cards/card_modules/main.js,sha256=dcOy5oN1DqREHLwIli9IRw-pg6Lxq5--4hzzJXikDVY,353143
173
173
  metaflow/plugins/cards/card_modules/renderer_tools.py,sha256=V3H92FvIl2DtzVsU7SQuQd5_MFwvG6GSAmpqqrqeIQM,1641
174
174
  metaflow/plugins/cards/card_modules/test_cards.py,sha256=sMjpPEhv184Kc7NQeBZXH3AyiHlxUahqE1aYBCKQdsg,2439
@@ -180,9 +180,9 @@ metaflow/plugins/cards/card_modules/chevron/tokenizer.py,sha256=lQU9OELUE9a5Xu4s
180
180
  metaflow/plugins/conda/__init__.py,sha256=Zamj1DqbZDhkXnq9jhmEGVWK18s1Js8e5jA4eyhL4k8,2839
181
181
  metaflow/plugins/conda/batch_bootstrap.py,sha256=_7aQ-YAHlABC6aloRS5m17p7WIv_qJdWmZP7UQCFdd8,4693
182
182
  metaflow/plugins/conda/conda.py,sha256=-CNTWjxmcbLoqbrwY8AJHwbQjuxvd-NSjChnZ95_pFc,8804
183
- metaflow/plugins/conda/conda_environment.py,sha256=h0TRl6VcZaQ5VI0FpqQVixNmfjt5wP4I40wBB-oCH8w,4992
183
+ metaflow/plugins/conda/conda_environment.py,sha256=KaLhyuKq0Doiz1DlTsckS7mJe_13aQkCVTMxMllmJac,5032
184
184
  metaflow/plugins/conda/conda_flow_decorator.py,sha256=A9kU9DayjNT3df7T415mlFocZ5lVnJkhpNyzMZv8-UU,1189
185
- metaflow/plugins/conda/conda_step_decorator.py,sha256=DJrdss1f5YKUC5PlUTzKXtT-I-3Z4iGAWjoR1-e2GxA,15560
185
+ metaflow/plugins/conda/conda_step_decorator.py,sha256=sf5Hx3PIabAaTBfYlSetWoOI526yiAVYIsZFt6uXzfg,15719
186
186
  metaflow/plugins/datastores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
187
187
  metaflow/plugins/datastores/azure_storage.py,sha256=lsQ6G-nz1j0_O3sRipqMpu-A-hJzg5LhaGpCH-Sbtgg,16317
188
188
  metaflow/plugins/datastores/gs_storage.py,sha256=1dzkQPi6y5xNeZbmdCeh9ef5NK1yweTs3AJYcBwd63k,9705
@@ -191,7 +191,7 @@ metaflow/plugins/datastores/s3_storage.py,sha256=W3PIDbDyk67ecDfRf10mQeRGgaRq4zc
191
191
  metaflow/plugins/datatools/__init__.py,sha256=ge4L16OBQLy2J_MMvoHg3lMfdm-MluQgRWoyZ5GCRnk,1267
192
192
  metaflow/plugins/datatools/local.py,sha256=67hx3O_vInERlL0aJV0Sd-jUTd_2DOw4sJ4-IyEKNKM,4213
193
193
  metaflow/plugins/datatools/s3/__init__.py,sha256=14tr9fPjN3ULW5IOfKHeG7Uhjmgm7LMtQHfz1SFv-h8,248
194
- metaflow/plugins/datatools/s3/s3.py,sha256=0paYyxQJWH_A2ME8IuoqUl4wcK7SG77HxNSMp_8DAJE,62959
194
+ metaflow/plugins/datatools/s3/s3.py,sha256=AdpuyUoGuRKFahX7WaQKjWZlcUEt0GizOAAmimS3cbI,62975
195
195
  metaflow/plugins/datatools/s3/s3op.py,sha256=7hBvTff_MC5oWPZPuQhEoxX46jk2vdawV2VTyUUZ5pA,42475
196
196
  metaflow/plugins/datatools/s3/s3tail.py,sha256=boQjQGQMI-bvTqcMP2y7uSlSYLcvWOy7J3ZUaF78NAA,2597
197
197
  metaflow/plugins/datatools/s3/s3util.py,sha256=FgRgaVmEq7-i2dV7q8XK5w5PfFt-xJjZa8WrK8IJfdI,3769
@@ -207,7 +207,7 @@ metaflow/plugins/env_escape/stub.py,sha256=K7eXwWv7-YAxMmioRD635swBbN2sAs0YAvoNW
207
207
  metaflow/plugins/env_escape/utils.py,sha256=q91oeiBFVjiTYcLvZ1TNJaSUZxDL89_N5lreu5FUqoA,810
208
208
  metaflow/plugins/env_escape/communication/__init__.py,sha256=Ff5AB88gOAvBzN2pp_2YNiD0PhUIt2SFE8nyOAKnxXg,38
209
209
  metaflow/plugins/env_escape/communication/bytestream.py,sha256=weQBm-c6yPlGv1TAmQbYanqvQ0IRDh7x_6hZPvWh_Uw,1866
210
- metaflow/plugins/env_escape/communication/channel.py,sha256=d2szjXXyaQgHdv5AJSymCtXOk49LglIa4r_L-erGAvw,1668
210
+ metaflow/plugins/env_escape/communication/channel.py,sha256=7vs23ZGsdy1nYM0V9xAdodBPSXIWgkCwp5l5ymgIEdU,1650
211
211
  metaflow/plugins/env_escape/communication/socket_bytestream.py,sha256=aQ9jC4OZH6_pfgWArt8tOIwLL41P2OBivtFu8ZmlyfQ,3572
212
212
  metaflow/plugins/env_escape/communication/utils.py,sha256=vV20EUjXqo5gWBtObbJBb4WL44N-EBBZsDiPBGL7tl0,874
213
213
  metaflow/plugins/env_escape/configurations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -225,14 +225,14 @@ metaflow/plugins/gcp/gs_tail.py,sha256=Jl_wvnzU7dub07A-DOAuP5FeccNIrPM-CeL1xKFs1
225
225
  metaflow/plugins/gcp/gs_utils.py,sha256=YgtzUn9A0DlYRWR75h533RVYZsqR7eEZPRRR8QSFZik,2023
226
226
  metaflow/plugins/gcp/includefile_support.py,sha256=vIDeR-MiJuUh-2S2pV7Z7FBkhIWwtHXaRrj76MWGRiY,3869
227
227
  metaflow/plugins/kubernetes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
228
- metaflow/plugins/kubernetes/kubernetes.py,sha256=VldzKYwp_BTomZpOE1ZuU9Noyl_dz9VEwI6B2kackVc,12545
228
+ metaflow/plugins/kubernetes/kubernetes.py,sha256=GvrnOLfaZ_lkN5XJS5pnBIqEgdusA4SZ3yXDT-9dJxg,12717
229
229
  metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=Cdt1Bhfevi9dv_Ok2wyIyHjgUccJZ0UIzs0fxGkWO-Q,7847
230
230
  metaflow/plugins/kubernetes/kubernetes_client.py,sha256=dV3TEGQMBbljmv6Gs1EKfmHTorKt21lhSiYsNx0To08,1901
231
- metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=N6i4Tzvw8767vqycO684DCrTwfFfHPWrVx6pmt50Jz8,17152
232
- metaflow/plugins/kubernetes/kubernetes_job.py,sha256=32XbDO1PebiR_IxEbGOgcn1dIk0YLoBKaRz3wM-CGrY,24217
231
+ metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=iQ5NgB6M4IcJAXHGuZrCG4qoT_XXmRj1K_RiDG4_2ZY,17409
232
+ metaflow/plugins/kubernetes/kubernetes_job.py,sha256=nA2eqOPVd2Hr5_feD-9P9z7kM0ur0lU7CDhvNDcu0hA,25011
233
233
  metaflow/plugins/metadata/__init__.py,sha256=q5OifpVipqhZuyrXBp9YIJN_jUov14_CNMveim9L8Rs,86
234
234
  metaflow/plugins/metadata/local.py,sha256=YhLJC5zjVJrvQFIyQ92ZBByiUmhCC762RUX7ITX12O8,22428
235
- metaflow/plugins/metadata/service.py,sha256=QwSkYEXMQEYzNtqbZAdi4xYmb2xxQ-iclheSUTS7wL0,19987
235
+ metaflow/plugins/metadata/service.py,sha256=-qqSENTUAE7-xc5dlR3EHHrfP2MY9PmQN1ueNnpL6tQ,20031
236
236
  metaflow/plugins/secrets/__init__.py,sha256=JOiQbP7vGpcBv8SPfsIvSARk1tigypX26Hv9IL9tu4s,299
237
237
  metaflow/plugins/secrets/inline_secrets_provider.py,sha256=iQ2a881pAbFAiIcqG6MSi_XHuN05jsa7t6sM-6ejQ8A,283
238
238
  metaflow/plugins/secrets/secrets_decorator.py,sha256=QQtcLLfz-I26Nkm80212LLBtUx8Z8ty8lrYqJfpMjBE,9235
@@ -264,9 +264,9 @@ metaflow/tutorials/07-worldview/README.md,sha256=5vQTrFqulJ7rWN6r20dhot9lI2sVj9W
264
264
  metaflow/tutorials/07-worldview/worldview.ipynb,sha256=ztPZPI9BXxvW1QdS2Tfe7LBuVzvFvv0AToDnsDJhLdE,2237
265
265
  metaflow/tutorials/08-autopilot/README.md,sha256=GnePFp_q76jPs991lMUqfIIh5zSorIeWznyiUxzeUVE,1039
266
266
  metaflow/tutorials/08-autopilot/autopilot.ipynb,sha256=DQoJlILV7Mq9vfPBGW-QV_kNhWPjS5n6SJLqePjFYLY,3191
267
- metaflow-2.8.2.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
268
- metaflow-2.8.2.dist-info/METADATA,sha256=BwxHx_nk2wJAIgz0jwtDEbnR_EJwfrV_ospdCOTD9ls,2918
269
- metaflow-2.8.2.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
270
- metaflow-2.8.2.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
271
- metaflow-2.8.2.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
272
- metaflow-2.8.2.dist-info/RECORD,,
267
+ metaflow-2.8.3.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
268
+ metaflow-2.8.3.dist-info/METADATA,sha256=cYAvHq6oRvXVAyY4Hn0lb_NfpyAehrv3VTh6F-9aHJM,2593
269
+ metaflow-2.8.3.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
270
+ metaflow-2.8.3.dist-info/entry_points.txt,sha256=IKwTN1T3I5eJL3uo_vnkyxVffcgnRdFbKwlghZfn27k,57
271
+ metaflow-2.8.3.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
272
+ metaflow-2.8.3.dist-info/RECORD,,