metaflow 2.15.21__py2.py3-none-any.whl → 2.16.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. metaflow/__init__.py +7 -1
  2. metaflow/cli.py +16 -1
  3. metaflow/cli_components/init_cmd.py +1 -0
  4. metaflow/cli_components/run_cmds.py +6 -2
  5. metaflow/client/core.py +22 -30
  6. metaflow/datastore/task_datastore.py +0 -1
  7. metaflow/debug.py +5 -0
  8. metaflow/decorators.py +230 -70
  9. metaflow/extension_support/__init__.py +15 -8
  10. metaflow/extension_support/_empty_file.py +2 -2
  11. metaflow/flowspec.py +80 -53
  12. metaflow/graph.py +24 -2
  13. metaflow/meta_files.py +13 -0
  14. metaflow/metadata_provider/metadata.py +7 -1
  15. metaflow/metaflow_config.py +5 -0
  16. metaflow/metaflow_environment.py +82 -25
  17. metaflow/metaflow_version.py +1 -1
  18. metaflow/package/__init__.py +664 -0
  19. metaflow/packaging_sys/__init__.py +870 -0
  20. metaflow/packaging_sys/backend.py +113 -0
  21. metaflow/packaging_sys/distribution_support.py +153 -0
  22. metaflow/packaging_sys/tar_backend.py +86 -0
  23. metaflow/packaging_sys/utils.py +91 -0
  24. metaflow/packaging_sys/v1.py +476 -0
  25. metaflow/plugins/airflow/airflow.py +5 -1
  26. metaflow/plugins/airflow/airflow_cli.py +15 -4
  27. metaflow/plugins/argo/argo_workflows.py +15 -4
  28. metaflow/plugins/argo/argo_workflows_cli.py +16 -4
  29. metaflow/plugins/aws/batch/batch.py +22 -3
  30. metaflow/plugins/aws/batch/batch_cli.py +3 -0
  31. metaflow/plugins/aws/batch/batch_decorator.py +13 -5
  32. metaflow/plugins/aws/step_functions/step_functions.py +4 -1
  33. metaflow/plugins/aws/step_functions/step_functions_cli.py +15 -4
  34. metaflow/plugins/cards/card_decorator.py +0 -5
  35. metaflow/plugins/kubernetes/kubernetes.py +8 -1
  36. metaflow/plugins/kubernetes/kubernetes_cli.py +3 -0
  37. metaflow/plugins/kubernetes/kubernetes_decorator.py +13 -5
  38. metaflow/plugins/package_cli.py +25 -23
  39. metaflow/plugins/parallel_decorator.py +4 -2
  40. metaflow/plugins/pypi/bootstrap.py +8 -2
  41. metaflow/plugins/pypi/conda_decorator.py +39 -82
  42. metaflow/plugins/pypi/conda_environment.py +6 -2
  43. metaflow/plugins/pypi/pypi_decorator.py +4 -4
  44. metaflow/plugins/test_unbounded_foreach_decorator.py +2 -2
  45. metaflow/plugins/timeout_decorator.py +0 -1
  46. metaflow/plugins/uv/bootstrap.py +11 -0
  47. metaflow/plugins/uv/uv_environment.py +4 -2
  48. metaflow/pylint_wrapper.py +5 -1
  49. metaflow/runner/click_api.py +5 -4
  50. metaflow/runner/subprocess_manager.py +14 -2
  51. metaflow/runtime.py +37 -11
  52. metaflow/task.py +91 -7
  53. metaflow/user_configs/config_options.py +13 -8
  54. metaflow/user_configs/config_parameters.py +0 -4
  55. metaflow/user_decorators/__init__.py +0 -0
  56. metaflow/user_decorators/common.py +144 -0
  57. metaflow/user_decorators/mutable_flow.py +499 -0
  58. metaflow/user_decorators/mutable_step.py +424 -0
  59. metaflow/user_decorators/user_flow_decorator.py +263 -0
  60. metaflow/user_decorators/user_step_decorator.py +712 -0
  61. metaflow/util.py +4 -1
  62. metaflow/version.py +1 -1
  63. {metaflow-2.15.21.dist-info → metaflow-2.16.0.dist-info}/METADATA +2 -2
  64. {metaflow-2.15.21.dist-info → metaflow-2.16.0.dist-info}/RECORD +71 -60
  65. metaflow/info_file.py +0 -25
  66. metaflow/package.py +0 -203
  67. metaflow/user_configs/config_decorators.py +0 -568
  68. {metaflow-2.15.21.data → metaflow-2.16.0.data}/data/share/metaflow/devtools/Makefile +0 -0
  69. {metaflow-2.15.21.data → metaflow-2.16.0.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  70. {metaflow-2.15.21.data → metaflow-2.16.0.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  71. {metaflow-2.15.21.dist-info → metaflow-2.16.0.dist-info}/WHEEL +0 -0
  72. {metaflow-2.15.21.dist-info → metaflow-2.16.0.dist-info}/entry_points.txt +0 -0
  73. {metaflow-2.15.21.dist-info → metaflow-2.16.0.dist-info}/licenses/LICENSE +0 -0
  74. {metaflow-2.15.21.dist-info → metaflow-2.16.0.dist-info}/top_level.txt +0 -0
@@ -59,14 +59,24 @@ class Batch(object):
59
59
  self._client = BatchClient()
60
60
  atexit.register(lambda: self.job.kill() if hasattr(self, "job") else None)
61
61
 
62
- def _command(self, environment, code_package_url, step_name, step_cmds, task_spec):
62
+ def _command(
63
+ self,
64
+ environment,
65
+ code_package_metadata,
66
+ code_package_url,
67
+ step_name,
68
+ step_cmds,
69
+ task_spec,
70
+ ):
63
71
  mflog_expr = export_mflog_env_vars(
64
72
  datastore_type="s3",
65
73
  stdout_path=STDOUT_PATH,
66
74
  stderr_path=STDERR_PATH,
67
75
  **task_spec
68
76
  )
69
- init_cmds = environment.get_package_commands(code_package_url, "s3")
77
+ init_cmds = environment.get_package_commands(
78
+ code_package_url, "s3", code_package_metadata
79
+ )
70
80
  init_expr = " && ".join(init_cmds)
71
81
  step_expr = bash_capture_logs(
72
82
  " && ".join(environment.bootstrap_commands(step_name, "s3") + step_cmds)
@@ -167,6 +177,7 @@ class Batch(object):
167
177
  step_name,
168
178
  step_cli,
169
179
  task_spec,
180
+ code_package_metadata,
170
181
  code_package_sha,
171
182
  code_package_url,
172
183
  code_package_ds,
@@ -210,7 +221,12 @@ class Batch(object):
210
221
  .job_queue(queue)
211
222
  .command(
212
223
  self._command(
213
- self.environment, code_package_url, step_name, [step_cli], task_spec
224
+ self.environment,
225
+ code_package_metadata,
226
+ code_package_url,
227
+ step_name,
228
+ [step_cli],
229
+ task_spec,
214
230
  )
215
231
  )
216
232
  .image(image)
@@ -249,6 +265,7 @@ class Batch(object):
249
265
  )
250
266
  .task_id(attrs.get("metaflow.task_id"))
251
267
  .environment_variable("AWS_DEFAULT_REGION", self._client.region())
268
+ .environment_variable("METAFLOW_CODE_METADATA", code_package_metadata)
252
269
  .environment_variable("METAFLOW_CODE_SHA", code_package_sha)
253
270
  .environment_variable("METAFLOW_CODE_URL", code_package_url)
254
271
  .environment_variable("METAFLOW_CODE_DS", code_package_ds)
@@ -334,6 +351,7 @@ class Batch(object):
334
351
  step_name,
335
352
  step_cli,
336
353
  task_spec,
354
+ code_package_metadata,
337
355
  code_package_sha,
338
356
  code_package_url,
339
357
  code_package_ds,
@@ -374,6 +392,7 @@ class Batch(object):
374
392
  step_name,
375
393
  step_cli,
376
394
  task_spec,
395
+ code_package_metadata,
377
396
  code_package_sha,
378
397
  code_package_url,
379
398
  code_package_ds,
@@ -100,6 +100,7 @@ def kill(ctx, run_id, user, my_runs):
100
100
  "Metaflow."
101
101
  )
102
102
  @click.argument("step-name")
103
+ @click.argument("code-package-metadata")
103
104
  @click.argument("code-package-sha")
104
105
  @click.argument("code-package-url")
105
106
  @click.option("--executable", help="Executable requirement for AWS Batch.")
@@ -185,6 +186,7 @@ def kill(ctx, run_id, user, my_runs):
185
186
  def step(
186
187
  ctx,
187
188
  step_name,
189
+ code_package_metadata,
188
190
  code_package_sha,
189
191
  code_package_url,
190
192
  executable=None,
@@ -317,6 +319,7 @@ def step(
317
319
  step_name,
318
320
  step_cli,
319
321
  task_spec,
322
+ code_package_metadata,
320
323
  code_package_sha,
321
324
  code_package_url,
322
325
  ctx.obj.flow_datastore.TYPE,
@@ -14,6 +14,7 @@ from metaflow.metaflow_config import (
14
14
  DATASTORE_LOCAL_DIR,
15
15
  ECS_FARGATE_EXECUTION_ROLE,
16
16
  ECS_S3_ACCESS_IAM_ROLE,
17
+ FEAT_ALWAYS_UPLOAD_CODE_PACKAGE,
17
18
  )
18
19
  from metaflow.plugins.timeout_decorator import get_run_time_limit_for_task
19
20
  from metaflow.sidecar import Sidecar
@@ -126,6 +127,7 @@ class BatchDecorator(StepDecorator):
126
127
  "gpu": "0",
127
128
  "memory": "4096",
128
129
  }
130
+ package_metadata = None
129
131
  package_url = None
130
132
  package_sha = None
131
133
  run_time_limit = None
@@ -135,8 +137,6 @@ class BatchDecorator(StepDecorator):
135
137
  target_platform = "linux-64"
136
138
 
137
139
  def init(self):
138
- super(BatchDecorator, self).init()
139
-
140
140
  # If no docker image is explicitly specified, impute a default image.
141
141
  if not self.attributes["image"]:
142
142
  # If metaflow-config specifies a docker image, just use that.
@@ -228,6 +228,7 @@ class BatchDecorator(StepDecorator):
228
228
  # to execute on AWS Batch anymore. We can execute possible fallback
229
229
  # code locally.
230
230
  cli_args.commands = ["batch", "step"]
231
+ cli_args.command_args.append(self.package_metadata)
231
232
  cli_args.command_args.append(self.package_sha)
232
233
  cli_args.command_args.append(self.package_url)
233
234
  cli_args.command_options.update(self.attributes)
@@ -403,9 +404,16 @@ class BatchDecorator(StepDecorator):
403
404
  @classmethod
404
405
  def _save_package_once(cls, flow_datastore, package):
405
406
  if cls.package_url is None:
406
- cls.package_url, cls.package_sha = flow_datastore.save_data(
407
- [package.blob], len_hint=1
408
- )[0]
407
+ if not FEAT_ALWAYS_UPLOAD_CODE_PACKAGE:
408
+ cls.package_url, cls.package_sha = flow_datastore.save_data(
409
+ [package.blob], len_hint=1
410
+ )[0]
411
+ cls.package_metadata = package.package_metadata
412
+ else:
413
+ # Blocks until the package is uploaded
414
+ cls.package_url = package.package_url()
415
+ cls.package_sha = package.package_sha()
416
+ cls.package_metadata = package.package_metadata
409
417
 
410
418
 
411
419
  def _setup_multinode_environment():
@@ -40,6 +40,7 @@ class StepFunctions(object):
40
40
  name,
41
41
  graph,
42
42
  flow,
43
+ code_package_metadata,
43
44
  code_package_sha,
44
45
  code_package_url,
45
46
  production_token,
@@ -59,6 +60,7 @@ class StepFunctions(object):
59
60
  self.name = name
60
61
  self.graph = graph
61
62
  self.flow = flow
63
+ self.code_package_metadata = code_package_metadata
62
64
  self.code_package_sha = code_package_sha
63
65
  self.code_package_url = code_package_url
64
66
  self.production_token = production_token
@@ -853,6 +855,7 @@ class StepFunctions(object):
853
855
  node, input_paths, self.code_package_url, user_code_retries
854
856
  ),
855
857
  task_spec=task_spec,
858
+ code_package_metadata=self.code_package_metadata,
856
859
  code_package_sha=self.code_package_sha,
857
860
  code_package_url=self.code_package_url,
858
861
  code_package_ds=self.flow_datastore.TYPE,
@@ -913,7 +916,7 @@ class StepFunctions(object):
913
916
  "with": [
914
917
  decorator.make_decorator_spec()
915
918
  for decorator in node.decorators
916
- if not decorator.statically_defined
919
+ if not decorator.statically_defined and decorator.inserted_by is None
917
920
  ]
918
921
  }
919
922
  # FlowDecorators can define their own top-level options. They are
@@ -7,6 +7,7 @@ from metaflow import JSONType, current, decorators, parameters
7
7
  from metaflow._vendor import click
8
8
  from metaflow.exception import MetaflowException, MetaflowInternalError
9
9
  from metaflow.metaflow_config import (
10
+ FEAT_ALWAYS_UPLOAD_CODE_PACKAGE,
10
11
  SERVICE_VERSION_CHECK,
11
12
  SFN_STATE_MACHINE_PREFIX,
12
13
  UI_URL,
@@ -331,16 +332,26 @@ def make_flow(
331
332
  )
332
333
 
333
334
  obj.package = MetaflowPackage(
334
- obj.flow, obj.environment, obj.echo, obj.package_suffixes
335
+ obj.flow,
336
+ obj.environment,
337
+ obj.echo,
338
+ suffixes=obj.package_suffixes,
339
+ flow_datastore=obj.flow_datastore if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE else None,
335
340
  )
336
- package_url, package_sha = obj.flow_datastore.save_data(
337
- [obj.package.blob], len_hint=1
338
- )[0]
341
+ # This blocks until the package is created
342
+ if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE:
343
+ package_url = obj.package.package_url()
344
+ package_sha = obj.package.package_sha()
345
+ else:
346
+ package_url, package_sha = obj.flow_datastore.save_data(
347
+ [obj.package.blob], len_hint=1
348
+ )[0]
339
349
 
340
350
  return StepFunctions(
341
351
  name,
342
352
  obj.graph,
343
353
  obj.flow,
354
+ obj.package.package_metadata,
344
355
  package_sha,
345
356
  package_url,
346
357
  token,
@@ -361,11 +361,6 @@ class CardDecorator(StepDecorator):
361
361
 
362
362
  return list(self._options(top_level_options))
363
363
 
364
- def task_exception(
365
- self, exception, step_name, flow, graph, retry_count, max_user_code_retries
366
- ):
367
- self._cleanup(step_name)
368
-
369
364
  def _cleanup(self, step_name):
370
365
  self._increment_completed_counter()
371
366
  if self.task_finished_decos == self.total_decos_on_step[step_name]:
@@ -90,6 +90,7 @@ class Kubernetes(object):
90
90
  step_name,
91
91
  task_id,
92
92
  attempt,
93
+ code_package_metadata,
93
94
  code_package_url,
94
95
  step_cmds,
95
96
  ):
@@ -104,7 +105,7 @@ class Kubernetes(object):
104
105
  stderr_path=STDERR_PATH,
105
106
  )
106
107
  init_cmds = self._environment.get_package_commands(
107
- code_package_url, self._datastore.TYPE
108
+ code_package_url, self._datastore.TYPE, code_package_metadata
108
109
  )
109
110
  init_expr = " && ".join(init_cmds)
110
111
  step_expr = bash_capture_logs(
@@ -165,6 +166,7 @@ class Kubernetes(object):
165
166
  task_id,
166
167
  attempt,
167
168
  user,
169
+ code_package_metadata,
168
170
  code_package_sha,
169
171
  code_package_url,
170
172
  code_package_ds,
@@ -232,6 +234,7 @@ class Kubernetes(object):
232
234
  qos=qos,
233
235
  security_context=security_context,
234
236
  )
237
+ .environment_variable("METAFLOW_CODE_METADATA", code_package_metadata)
235
238
  .environment_variable("METAFLOW_CODE_SHA", code_package_sha)
236
239
  .environment_variable("METAFLOW_CODE_URL", code_package_url)
237
240
  .environment_variable("METAFLOW_CODE_DS", code_package_ds)
@@ -415,6 +418,7 @@ class Kubernetes(object):
415
418
  step_name=step_name,
416
419
  task_id=_tskid,
417
420
  attempt=attempt,
421
+ code_package_metadata=code_package_metadata,
418
422
  code_package_url=code_package_url,
419
423
  step_cmds=[
420
424
  step_cli.replace(
@@ -463,6 +467,7 @@ class Kubernetes(object):
463
467
  task_id,
464
468
  attempt,
465
469
  user,
470
+ code_package_metadata,
466
471
  code_package_sha,
467
472
  code_package_url,
468
473
  code_package_ds,
@@ -511,6 +516,7 @@ class Kubernetes(object):
511
516
  step_name=step_name,
512
517
  task_id=task_id,
513
518
  attempt=attempt,
519
+ code_package_metadata=code_package_metadata,
514
520
  code_package_url=code_package_url,
515
521
  step_cmds=[step_cli],
516
522
  ),
@@ -539,6 +545,7 @@ class Kubernetes(object):
539
545
  qos=qos,
540
546
  security_context=security_context,
541
547
  )
548
+ .environment_variable("METAFLOW_CODE_METADATA", code_package_metadata)
542
549
  .environment_variable("METAFLOW_CODE_SHA", code_package_sha)
543
550
  .environment_variable("METAFLOW_CODE_URL", code_package_url)
544
551
  .environment_variable("METAFLOW_CODE_DS", code_package_ds)
@@ -41,6 +41,7 @@ def kubernetes():
41
41
  )
42
42
  @tracing.cli("kubernetes/step")
43
43
  @click.argument("step-name")
44
+ @click.argument("code-package-metadata")
44
45
  @click.argument("code-package-sha")
45
46
  @click.argument("code-package-url")
46
47
  @click.option(
@@ -161,6 +162,7 @@ def kubernetes():
161
162
  def step(
162
163
  ctx,
163
164
  step_name,
165
+ code_package_metadata,
164
166
  code_package_sha,
165
167
  code_package_url,
166
168
  executable=None,
@@ -304,6 +306,7 @@ def step(
304
306
  task_id=task_id,
305
307
  attempt=str(retry_count),
306
308
  user=util.get_username(),
309
+ code_package_metadata=code_package_metadata,
307
310
  code_package_sha=code_package_sha,
308
311
  code_package_url=code_package_url,
309
312
  code_package_ds=ctx.obj.flow_datastore.TYPE,
@@ -11,6 +11,7 @@ from metaflow.metadata_provider import MetaDatum
11
11
  from metaflow.metadata_provider.util import sync_local_metadata_to_datastore
12
12
  from metaflow.metaflow_config import (
13
13
  DATASTORE_LOCAL_DIR,
14
+ FEAT_ALWAYS_UPLOAD_CODE_PACKAGE,
14
15
  KUBERNETES_CONTAINER_IMAGE,
15
16
  KUBERNETES_CONTAINER_REGISTRY,
16
17
  KUBERNETES_CPU,
@@ -168,6 +169,7 @@ class KubernetesDecorator(StepDecorator):
168
169
  "qos": KUBERNETES_QOS,
169
170
  "security_context": None,
170
171
  }
172
+ package_metadata = None
171
173
  package_url = None
172
174
  package_sha = None
173
175
  run_time_limit = None
@@ -177,8 +179,6 @@ class KubernetesDecorator(StepDecorator):
177
179
  target_platform = KUBERNETES_CONDA_ARCH or "linux-64"
178
180
 
179
181
  def init(self):
180
- super(KubernetesDecorator, self).init()
181
-
182
182
  if not self.attributes["namespace"]:
183
183
  self.attributes["namespace"] = KUBERNETES_NAMESPACE
184
184
  if not self.attributes["service_account"]:
@@ -471,6 +471,7 @@ class KubernetesDecorator(StepDecorator):
471
471
  # to execute on Kubernetes anymore. We can execute possible fallback
472
472
  # code locally.
473
473
  cli_args.commands = ["kubernetes", "step"]
474
+ cli_args.command_args.append(self.package_metadata)
474
475
  cli_args.command_args.append(self.package_sha)
475
476
  cli_args.command_args.append(self.package_url)
476
477
 
@@ -642,9 +643,16 @@ class KubernetesDecorator(StepDecorator):
642
643
  @classmethod
643
644
  def _save_package_once(cls, flow_datastore, package):
644
645
  if cls.package_url is None:
645
- cls.package_url, cls.package_sha = flow_datastore.save_data(
646
- [package.blob], len_hint=1
647
- )[0]
646
+ if not FEAT_ALWAYS_UPLOAD_CODE_PACKAGE:
647
+ cls.package_url, cls.package_sha = flow_datastore.save_data(
648
+ [package.blob], len_hint=1
649
+ )[0]
650
+ cls.package_metadata = package.package_metadata
651
+ else:
652
+ # Blocks until the package is uploaded
653
+ cls.package_url = package.package_url()
654
+ cls.package_sha = package.package_sha()
655
+ cls.package_metadata = package.package_metadata
648
656
 
649
657
 
650
658
  # TODO: Unify this method with the multi-node setup in @batch
@@ -9,35 +9,30 @@ def cli():
9
9
 
10
10
 
11
11
  @cli.group(help="Commands related to code packages.")
12
+ @click.option(
13
+ "--timeout", default=60, help="Timeout for package operations in seconds."
14
+ )
12
15
  @click.pass_obj
13
- def package(obj):
16
+ def package(obj, timeout):
14
17
  # Prepare the package before any of the sub-commands are invoked.
18
+ # We explicitly will *not* upload it to the datastore.
15
19
  obj.package = MetaflowPackage(
16
- obj.flow, obj.environment, obj.echo, obj.package_suffixes
20
+ obj.flow,
21
+ obj.environment,
22
+ obj.echo,
23
+ suffixes=obj.package_suffixes,
24
+ flow_datastore=None,
17
25
  )
26
+ obj.package_op_timeout = timeout
18
27
 
19
28
 
20
- @package.command(help="Output information about the current code package.")
29
+ @package.command(help="Output information about the code package.")
21
30
  @click.pass_obj
22
31
  def info(obj):
23
- obj.echo("Status of the current working directory:", fg="magenta", bold=False)
24
- obj.echo_always(
25
- "Hash: *%s*" % sha1(obj.package.blob).hexdigest(),
26
- highlight="green",
27
- highlight_bold=False,
28
- )
29
- obj.echo_always(
30
- "Package size: *%d* KB" % (len(obj.package.blob) / 1024),
31
- highlight="green",
32
- highlight_bold=False,
33
- )
34
- num = sum(1 for _ in obj.package.path_tuples())
35
- obj.echo_always(
36
- "Number of files: *%d*" % num, highlight="green", highlight_bold=False
37
- )
32
+ obj.echo_always(obj.package.show())
38
33
 
39
34
 
40
- @package.command(help="List files included in the code package.")
35
+ @package.command(help="List all files included in the code package.")
41
36
  @click.option(
42
37
  "--archive/--no-archive",
43
38
  default=False,
@@ -47,8 +42,10 @@ def info(obj):
47
42
  )
48
43
  @click.pass_obj
49
44
  def list(obj, archive=False):
45
+ _ = obj.package.blob_with_timeout(timeout=obj.package_op_timeout)
46
+ # We now have all the information about the blob
50
47
  obj.echo(
51
- "Files included in the code package " "(change with --package-suffixes):",
48
+ "Files included in the code package (change with --package-suffixes):",
52
49
  fg="magenta",
53
50
  bold=False,
54
51
  )
@@ -58,10 +55,15 @@ def list(obj, archive=False):
58
55
  obj.echo_always("\n".join(path for path, _ in obj.package.path_tuples()))
59
56
 
60
57
 
61
- @package.command(help="Save the current code package in a tar file")
58
+ @package.command(help="Save the current code package to a file.")
62
59
  @click.argument("path")
63
60
  @click.pass_obj
64
61
  def save(obj, path):
65
62
  with open(path, "wb") as f:
66
- f.write(obj.package.blob)
67
- obj.echo("Code package saved in *%s*." % path, fg="magenta", bold=False)
63
+ f.write(obj.package.blob())
64
+ obj.echo(
65
+ "Code package saved in *%s* with metadata: %s"
66
+ % (path, obj.package.package_metadata),
67
+ fg="magenta",
68
+ bold=False,
69
+ )
@@ -36,8 +36,10 @@ class ParallelDecorator(StepDecorator):
36
36
  defaults = {}
37
37
  IS_PARALLEL = True
38
38
 
39
- def __init__(self, attributes=None, statically_defined=False):
40
- super(ParallelDecorator, self).__init__(attributes, statically_defined)
39
+ def __init__(self, attributes=None, statically_defined=False, inserted_by=None):
40
+ super(ParallelDecorator, self).__init__(
41
+ attributes, statically_defined, inserted_by
42
+ )
41
43
 
42
44
  def runtime_step_cli(
43
45
  self, cli_args, retry_count, max_user_code_retries, ubf_context
@@ -12,9 +12,10 @@ import platform
12
12
  from urllib.error import URLError
13
13
  from urllib.request import urlopen
14
14
  from metaflow.metaflow_config import DATASTORE_LOCAL_DIR, CONDA_USE_FAST_INIT
15
+ from metaflow.packaging_sys import MetaflowCodeContent, ContentType
15
16
  from metaflow.plugins import DATASTORES
16
17
  from metaflow.plugins.pypi.utils import MICROMAMBA_MIRROR_URL, MICROMAMBA_URL
17
- from metaflow.util import which, get_metaflow_root
18
+ from metaflow.util import which
18
19
  from urllib.request import Request
19
20
  import warnings
20
21
 
@@ -365,8 +366,13 @@ if __name__ == "__main__":
365
366
 
366
367
  # Move MAGIC_FILE inside local datastore.
367
368
  os.makedirs(manifest_dir, exist_ok=True)
369
+ path_to_manifest = MetaflowCodeContent.get_filename(
370
+ MAGIC_FILE, ContentType.OTHER_CONTENT
371
+ )
372
+ if path_to_manifest is None:
373
+ raise RuntimeError(f"Cannot find {MAGIC_FILE} in the package")
368
374
  shutil.move(
369
- os.path.join(get_metaflow_root(), MAGIC_FILE),
375
+ path_to_manifest,
370
376
  os.path.join(manifest_dir, MAGIC_FILE),
371
377
  )
372
378
  with open(os.path.join(manifest_dir, MAGIC_FILE)) as f: