metaflow 2.14.0__py2.py3-none-any.whl → 2.14.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
metaflow/cli.py CHANGED
@@ -238,7 +238,6 @@ def version(obj):
238
238
  lazy_sources=plugins.get_plugin_cli_path(),
239
239
  invoke_without_command=True,
240
240
  )
241
- @tracing.cli_entrypoint("cli/start")
242
241
  # Quiet is eager to make sure it is available when processing --config options since
243
242
  # we need it to construct a context to pass to any DeployTimeField for the default
244
243
  # value.
@@ -166,6 +166,7 @@ def common_run_options(func):
166
166
  )
167
167
  @click.argument("step-to-rerun", required=False)
168
168
  @click.command(help="Resume execution of a previous run of this flow.")
169
+ @tracing.cli("cli/resume")
169
170
  @common_run_options
170
171
  @click.pass_obj
171
172
  def resume(
@@ -283,7 +284,7 @@ def resume(
283
284
 
284
285
  @parameters.add_custom_parameters(deploy_mode=True)
285
286
  @click.command(help="Run the workflow locally.")
286
- @tracing.cli_entrypoint("cli/run")
287
+ @tracing.cli("cli/run")
287
288
  @common_run_options
288
289
  @click.option(
289
290
  "--namespace",
@@ -345,18 +346,18 @@ def run(
345
346
  "msg": "Starting run",
346
347
  },
347
348
  )
349
+
350
+ runtime.print_workflow_info()
351
+ runtime.persist_constants()
352
+ if runner_attribute_file:
353
+ with open(runner_attribute_file, "w", encoding="utf-8") as f:
354
+ json.dump(
355
+ {
356
+ "run_id": runtime.run_id,
357
+ "flow_name": obj.flow.name,
358
+ "metadata": obj.metadata.metadata_str(),
359
+ },
360
+ f,
361
+ )
348
362
  with runtime.run_heartbeat():
349
- runtime.print_workflow_info()
350
- runtime.persist_constants()
351
-
352
- if runner_attribute_file:
353
- with open(runner_attribute_file, "w", encoding="utf-8") as f:
354
- json.dump(
355
- {
356
- "run_id": runtime.run_id,
357
- "flow_name": obj.flow.name,
358
- "metadata": obj.metadata.metadata_str(),
359
- },
360
- f,
361
- )
362
363
  runtime.execute()
@@ -7,9 +7,11 @@ from ..exception import CommandException
7
7
  from ..task import MetaflowTask
8
8
  from ..unbounded_foreach import UBF_CONTROL, UBF_TASK
9
9
  from ..util import decompress_list
10
+ import metaflow.tracing as tracing
10
11
 
11
12
 
12
13
  @click.command(help="Internal command to execute a single task.", hidden=True)
14
+ @tracing.cli("cli/step")
13
15
  @click.argument("step-name")
14
16
  @click.option(
15
17
  "--run-id",
metaflow/client/core.py CHANGED
@@ -5,6 +5,7 @@ import os
5
5
  import tarfile
6
6
  from collections import namedtuple
7
7
  from datetime import datetime
8
+ from tempfile import TemporaryDirectory
8
9
  from io import BytesIO
9
10
  from itertools import chain
10
11
  from typing import (
@@ -379,7 +380,7 @@ class MetaflowObject(object):
379
380
  _CLASSES[self._CHILD_CLASS]._NAME,
380
381
  query_filter,
381
382
  self._attempt,
382
- *self.path_components
383
+ *self.path_components,
383
384
  )
384
385
  unfiltered_children = unfiltered_children if unfiltered_children else []
385
386
  children = filter(
@@ -878,6 +879,73 @@ class MetaflowCode(object):
878
879
  """
879
880
  return self._tar
880
881
 
882
+ def extract(self) -> TemporaryDirectory:
883
+ """
884
+ Extracts the code package to a temporary directory.
885
+
886
+ This creates a temporary directory containing all user code
887
+ files from the code package. The temporary directory is
888
+ automatically deleted when the returned TemporaryDirectory
889
+ object is garbage collected or when its cleanup() is called.
890
+
891
+ To preserve the contents to a permanent location, use
892
+ os.replace() which performs a zero-copy move on the same
893
+ filesystem:
894
+
895
+ ```python
896
+ with task.code.extract() as tmp_dir:
897
+ # Move contents to permanent location
898
+ for item in os.listdir(tmp_dir):
899
+ src = os.path.join(tmp_dir, item)
900
+ dst = os.path.join('/path/to/permanent/dir', item)
901
+ os.makedirs(os.path.dirname(dst), exist_ok=True)
902
+ os.replace(src, dst) # Atomic move operation
903
+ ```
904
+ Returns
905
+ -------
906
+ TemporaryDirectory
907
+ A temporary directory containing the extracted code files.
908
+ The directory and its contents are automatically deleted when
909
+ this object is garbage collected.
910
+ """
911
+ exclusions = [
912
+ "metaflow/",
913
+ "metaflow_extensions/",
914
+ "INFO",
915
+ "CONFIG_PARAMETERS",
916
+ "conda.manifest",
917
+ # This file is created when using the conda/pypi features available in
918
+ # nflx-metaflow-extensions: https://github.com/Netflix/metaflow-nflx-extensions
919
+ "condav2-1.cnd",
920
+ ]
921
+ members = [
922
+ m
923
+ for m in self.tarball.getmembers()
924
+ if not any(
925
+ (x.endswith("/") and m.name.startswith(x)) or (m.name == x)
926
+ for x in exclusions
927
+ )
928
+ ]
929
+
930
+ tmp = TemporaryDirectory()
931
+ self.tarball.extractall(tmp.name, members)
932
+ return tmp
933
+
934
+ @property
935
+ def script_name(self) -> str:
936
+ """
937
+ Returns the filename of the Python script containing the FlowSpec.
938
+
939
+ This is the main Python file that was used to execute the flow. For example,
940
+ if your flow is defined in 'myflow.py', this property will return 'myflow.py'.
941
+
942
+ Returns
943
+ -------
944
+ str
945
+ Name of the Python file containing the FlowSpec
946
+ """
947
+ return self._info["script"]
948
+
881
949
  def __str__(self):
882
950
  return "<MetaflowCode: %s>" % self._info["script"]
883
951
 
@@ -1123,6 +1191,143 @@ class Task(MetaflowObject):
1123
1191
  # exclude private data artifacts
1124
1192
  return x.id[0] != "_"
1125
1193
 
1194
+ def _iter_matching_tasks(self, steps, metadata_key, metadata_pattern):
1195
+ """
1196
+ Yield tasks from specified steps matching a foreach path pattern.
1197
+
1198
+ Parameters
1199
+ ----------
1200
+ steps : List[str]
1201
+ List of step names to search for tasks
1202
+ pattern : str
1203
+ Regex pattern to match foreach-indices metadata
1204
+
1205
+ Returns
1206
+ -------
1207
+ Iterator[Task]
1208
+ Tasks matching the foreach path pattern
1209
+ """
1210
+ flow_id, run_id, _, _ = self.path_components
1211
+
1212
+ for step in steps:
1213
+ task_pathspecs = self._metaflow.metadata.filter_tasks_by_metadata(
1214
+ flow_id, run_id, step.id, metadata_key, metadata_pattern
1215
+ )
1216
+ for task_pathspec in task_pathspecs:
1217
+ yield Task(pathspec=task_pathspec, _namespace_check=False)
1218
+
1219
+ @property
1220
+ def parent_tasks(self) -> Iterator["Task"]:
1221
+ """
1222
+ Yields all parent tasks of the current task if one exists.
1223
+
1224
+ Yields
1225
+ ------
1226
+ Task
1227
+ Parent task of the current task
1228
+
1229
+ """
1230
+ flow_id, run_id, _, _ = self.path_components
1231
+
1232
+ steps = list(self.parent.parent_steps)
1233
+ if not steps:
1234
+ return []
1235
+
1236
+ current_path = self.metadata_dict.get("foreach-execution-path", "")
1237
+
1238
+ if len(steps) > 1:
1239
+ # Static join - use exact path matching
1240
+ pattern = current_path or ".*"
1241
+ yield from self._iter_matching_tasks(
1242
+ steps, "foreach-execution-path", pattern
1243
+ )
1244
+ return
1245
+
1246
+ # Handle single step case
1247
+ target_task = Step(
1248
+ f"{flow_id}/{run_id}/{steps[0].id}", _namespace_check=False
1249
+ ).task
1250
+ target_path = target_task.metadata_dict.get("foreach-execution-path")
1251
+
1252
+ if not target_path or not current_path:
1253
+ # (Current task, "A:10") and (Parent task, "")
1254
+ # Pattern: ".*"
1255
+ pattern = ".*"
1256
+ else:
1257
+ current_depth = len(current_path.split(","))
1258
+ target_depth = len(target_path.split(","))
1259
+
1260
+ if current_depth < target_depth:
1261
+ # Foreach join
1262
+ # (Current task, "A:10,B:13") and (Parent task, "A:10,B:13,C:21")
1263
+ # Pattern: "A:10,B:13,.*"
1264
+ pattern = f"{current_path},.*"
1265
+ else:
1266
+ # Foreach split or linear step
1267
+ # Option 1:
1268
+ # (Current task, "A:10,B:13,C:21") and (Parent task, "A:10,B:13")
1269
+ # Option 2:
1270
+ # (Current task, "A:10,B:13") and (Parent task, "A:10,B:13")
1271
+ # Pattern: "A:10,B:13"
1272
+ pattern = ",".join(current_path.split(",")[:target_depth])
1273
+
1274
+ yield from self._iter_matching_tasks(steps, "foreach-execution-path", pattern)
1275
+
1276
+ @property
1277
+ def child_tasks(self) -> Iterator["Task"]:
1278
+ """
1279
+ Yield all child tasks of the current task if one exists.
1280
+
1281
+ Yields
1282
+ ------
1283
+ Task
1284
+ Child task of the current task
1285
+ """
1286
+ flow_id, run_id, _, _ = self.path_components
1287
+ steps = list(self.parent.child_steps)
1288
+ if not steps:
1289
+ return []
1290
+
1291
+ current_path = self.metadata_dict.get("foreach-execution-path", "")
1292
+
1293
+ if len(steps) > 1:
1294
+ # Static split - use exact path matching
1295
+ pattern = current_path or ".*"
1296
+ yield from self._iter_matching_tasks(
1297
+ steps, "foreach-execution-path", pattern
1298
+ )
1299
+ return
1300
+
1301
+ # Handle single step case
1302
+ target_task = Step(
1303
+ f"{flow_id}/{run_id}/{steps[0].id}", _namespace_check=False
1304
+ ).task
1305
+ target_path = target_task.metadata_dict.get("foreach-execution-path")
1306
+
1307
+ if not target_path or not current_path:
1308
+ # (Current task, "A:10") and (Child task, "")
1309
+ # Pattern: ".*"
1310
+ pattern = ".*"
1311
+ else:
1312
+ current_depth = len(current_path.split(","))
1313
+ target_depth = len(target_path.split(","))
1314
+
1315
+ if current_depth < target_depth:
1316
+ # Foreach split
1317
+ # (Current task, "A:10,B:13") and (Child task, "A:10,B:13,C:21")
1318
+ # Pattern: "A:10,B:13,.*"
1319
+ pattern = f"{current_path},.*"
1320
+ else:
1321
+ # Foreach join or linear step
1322
+ # Option 1:
1323
+ # (Current task, "A:10,B:13,C:21") and (Child task, "A:10,B:13")
1324
+ # Option 2:
1325
+ # (Current task, "A:10,B:13") and (Child task, "A:10,B:13")
1326
+ # Pattern: "A:10,B:13"
1327
+ pattern = ",".join(current_path.split(",")[:target_depth])
1328
+
1329
+ yield from self._iter_matching_tasks(steps, "foreach-execution-path", pattern)
1330
+
1126
1331
  @property
1127
1332
  def metadata(self) -> List[Metadata]:
1128
1333
  """
@@ -1837,6 +2042,41 @@ class Step(MetaflowObject):
1837
2042
  for t in self:
1838
2043
  return t.environment_info
1839
2044
 
2045
+ @property
2046
+ def parent_steps(self) -> Iterator["Step"]:
2047
+ """
2048
+ Yields parent steps for the current step.
2049
+
2050
+ Yields
2051
+ ------
2052
+ Step
2053
+ Parent step
2054
+ """
2055
+ graph_info = self.task["_graph_info"].data
2056
+
2057
+ if self.id != "start":
2058
+ flow, run, _ = self.path_components
2059
+ for node_name, attributes in graph_info["steps"].items():
2060
+ if self.id in attributes["next"]:
2061
+ yield Step(f"{flow}/{run}/{node_name}", _namespace_check=False)
2062
+
2063
+ @property
2064
+ def child_steps(self) -> Iterator["Step"]:
2065
+ """
2066
+ Yields child steps for the current step.
2067
+
2068
+ Yields
2069
+ ------
2070
+ Step
2071
+ Child step
2072
+ """
2073
+ graph_info = self.task["_graph_info"].data
2074
+
2075
+ if self.id != "end":
2076
+ flow, run, _ = self.path_components
2077
+ for next_step in graph_info["steps"][self.id]["next"]:
2078
+ yield Step(f"{flow}/{run}/{next_step}", _namespace_check=False)
2079
+
1840
2080
 
1841
2081
  class Run(MetaflowObject):
1842
2082
  """
metaflow/cmd/main_cli.py CHANGED
@@ -12,7 +12,7 @@ import metaflow.tracing as tracing
12
12
 
13
13
 
14
14
  @click.group()
15
- @tracing.cli_entrypoint("cli/main")
15
+ @tracing.cli("cli/main")
16
16
  def main():
17
17
  pass
18
18
 
@@ -52,6 +52,7 @@ class MetadataHeartBeat(object):
52
52
  time.sleep(frequency_secs)
53
53
  retry_counter = 0
54
54
  except HeartBeatException as e:
55
+ print(e)
55
56
  retry_counter = retry_counter + 1
56
57
  time.sleep(1.5**retry_counter)
57
58
 
@@ -5,6 +5,7 @@ import time
5
5
  from collections import namedtuple
6
6
  from itertools import chain
7
7
 
8
+ from typing import List
8
9
  from metaflow.exception import MetaflowInternalError, MetaflowTaggingError
9
10
  from metaflow.tagging_util import validate_tag
10
11
  from metaflow.util import get_username, resolve_identity_as_tuple, is_stringish
@@ -672,6 +673,38 @@ class MetadataProvider(object):
672
673
  if metadata:
673
674
  self.register_metadata(run_id, step_name, task_id, metadata)
674
675
 
676
+ @classmethod
677
+ def filter_tasks_by_metadata(
678
+ cls,
679
+ flow_name: str,
680
+ run_id: str,
681
+ step_name: str,
682
+ field_name: str,
683
+ pattern: str,
684
+ ) -> List[str]:
685
+ """
686
+ Filter tasks by metadata field and pattern, returning task pathspecs that match criteria.
687
+
688
+ Parameters
689
+ ----------
690
+ flow_name : str
691
+ Flow name, that the run belongs to.
692
+ run_id: str
693
+ Run id, together with flow_id, that identifies the specific Run whose tasks to query
694
+ step_name: str
695
+ Step name to query tasks from
696
+ field_name: str
697
+ Metadata field name to query
698
+ pattern: str
699
+ Pattern to match in metadata field value
700
+
701
+ Returns
702
+ -------
703
+ List[str]
704
+ List of task pathspecs that satisfy the query
705
+ """
706
+ raise NotImplementedError()
707
+
675
708
  @staticmethod
676
709
  def _apply_filter(elts, filters):
677
710
  if filters is None:
@@ -248,8 +248,7 @@ DEFAULT_CONTAINER_IMAGE = from_conf("DEFAULT_CONTAINER_IMAGE")
248
248
  # Default container registry
249
249
  DEFAULT_CONTAINER_REGISTRY = from_conf("DEFAULT_CONTAINER_REGISTRY")
250
250
  # Controls whether to include foreach stack information in metadata.
251
- # TODO(Darin, 05/01/24): Remove this flag once we are confident with this feature.
252
- INCLUDE_FOREACH_STACK = from_conf("INCLUDE_FOREACH_STACK", False)
251
+ INCLUDE_FOREACH_STACK = from_conf("INCLUDE_FOREACH_STACK", True)
253
252
  # Maximum length of the foreach value string to be stored in each ForeachFrame.
254
253
  MAXIMUM_FOREACH_VALUE_CHARS = from_conf("MAXIMUM_FOREACH_VALUE_CHARS", 30)
255
254
  # The default runtime limit (In seconds) of jobs launched by any compute provider. Default of 5 days.
@@ -433,9 +432,6 @@ CONDA_PACKAGE_GSROOT = from_conf("CONDA_PACKAGE_GSROOT")
433
432
  # should result in an appreciable speedup in flow environment initialization.
434
433
  CONDA_DEPENDENCY_RESOLVER = from_conf("CONDA_DEPENDENCY_RESOLVER", "conda")
435
434
 
436
- # Default to not using fast init binary.
437
- CONDA_USE_FAST_INIT = from_conf("CONDA_USE_FAST_INIT", False)
438
-
439
435
  ###
440
436
  # Escape hatch configuration
441
437
  ###
@@ -8,12 +8,12 @@ from metaflow.plugins import DATASTORES
8
8
  from metaflow.util import Path
9
9
  from . import TASK_LOG_SOURCE
10
10
 
11
- from metaflow.tracing import cli_entrypoint
11
+ from metaflow.tracing import cli
12
12
 
13
13
  SMALL_FILE_LIMIT = 1024 * 1024
14
14
 
15
15
 
16
- @cli_entrypoint("save_logs")
16
+ @cli("save_logs")
17
17
  def save_logs():
18
18
  def _read_file(path):
19
19
  with open(path, "rb") as f:
@@ -77,13 +77,9 @@ class ArgoWorkflowsSchedulingException(MetaflowException):
77
77
  # List of future enhancements -
78
78
  # 1. Configure Argo metrics.
79
79
  # 2. Support resuming failed workflows within Argo Workflows.
80
- # 3. Support gang-scheduled clusters for distributed PyTorch/TF - One option is to
81
- # use volcano - https://github.com/volcano-sh/volcano/tree/master/example/integrations/argo
82
- # 4. Support GitOps workflows.
83
- # 5. Add Metaflow tags to labels/annotations.
84
- # 6. Support Multi-cluster scheduling - https://github.com/argoproj/argo-workflows/issues/3523#issuecomment-792307297
85
- # 7. Support R lang.
86
- # 8. Ping @savin at slack.outerbounds.co for any feature request
80
+ # 3. Add Metaflow tags to labels/annotations.
81
+ # 4. Support R lang.
82
+ # 5. Ping @savin at slack.outerbounds.co for any feature request
87
83
 
88
84
 
89
85
  class ArgoWorkflows(object):
@@ -742,6 +738,7 @@ class ArgoWorkflows(object):
742
738
  {
743
739
  "slack": bool(self.notify_slack_webhook_url),
744
740
  "pager_duty": bool(self.notify_pager_duty_integration_key),
741
+ "incident_io": bool(self.notify_incident_io_api_key),
745
742
  }
746
743
  )
747
744
  }
@@ -753,6 +750,7 @@ class ArgoWorkflows(object):
753
750
  {
754
751
  "slack": bool(self.notify_slack_webhook_url),
755
752
  "pager_duty": bool(self.notify_pager_duty_integration_key),
753
+ "incident_io": bool(self.notify_incident_io_api_key),
756
754
  }
757
755
  )
758
756
  }
@@ -2021,6 +2019,8 @@ class ArgoWorkflows(object):
2021
2019
  kubernetes_labels = {
2022
2020
  "task_id_entropy": "{{inputs.parameters.task-id-entropy}}",
2023
2021
  "num_parallel": "{{inputs.parameters.num-parallel}}",
2022
+ "metaflow/argo-workflows-name": "{{workflow.name}}",
2023
+ "workflows.argoproj.io/workflow": "{{workflow.name}}",
2024
2024
  }
2025
2025
  jobset.labels(
2026
2026
  {
@@ -2305,6 +2305,10 @@ class ArgoWorkflows(object):
2305
2305
  templates.append(self._slack_success_template())
2306
2306
  templates.append(self._pager_duty_change_template())
2307
2307
  templates.append(self._incident_io_change_template())
2308
+
2309
+ # Clean up None values from templates.
2310
+ templates = list(filter(None, templates))
2311
+
2308
2312
  if self.notify_on_error or self.notify_on_success:
2309
2313
  # Warning: terrible hack to workaround a bug in Argo Workflow where the
2310
2314
  # templates listed above do not execute unless there is an
@@ -3635,7 +3639,7 @@ class Template(object):
3635
3639
  def resource(self, action, manifest, success_criteria, failure_criteria):
3636
3640
  self.payload["resource"] = {}
3637
3641
  self.payload["resource"]["action"] = action
3638
- self.payload["setOwnerReference"] = True
3642
+ self.payload["resource"]["setOwnerReference"] = True
3639
3643
  self.payload["resource"]["successCondition"] = success_criteria
3640
3644
  self.payload["resource"]["failureCondition"] = failure_criteria
3641
3645
  self.payload["resource"]["manifest"] = manifest
@@ -168,12 +168,12 @@ def argo_workflows(obj, name=None):
168
168
  )
169
169
  @click.option(
170
170
  "--notify-slack-webhook-url",
171
- default="",
171
+ default=None,
172
172
  help="Slack incoming webhook url for workflow success/failure notifications.",
173
173
  )
174
174
  @click.option(
175
175
  "--notify-pager-duty-integration-key",
176
- default="",
176
+ default=None,
177
177
  help="PagerDuty Events API V2 Integration key for workflow success/failure notifications.",
178
178
  )
179
179
  @click.option(
@@ -155,7 +155,7 @@ def normalize_client_error(err):
155
155
  # S3 worker pool
156
156
 
157
157
 
158
- @tracing.cli_entrypoint("s3op/worker")
158
+ @tracing.cli("s3op/worker")
159
159
  def worker(result_file_name, queue, mode, s3config):
160
160
  # Interpret mode, it can either be a single op or something like
161
161
  # info_download or info_upload which implies:
@@ -723,7 +723,7 @@ def cli():
723
723
 
724
724
 
725
725
  @cli.command("list", help="List S3 objects")
726
- @tracing.cli_entrypoint("s3op/list")
726
+ @tracing.cli("s3op/list")
727
727
  @click.option(
728
728
  "--recursive/--no-recursive",
729
729
  default=False,
@@ -783,7 +783,7 @@ def lst(
783
783
 
784
784
 
785
785
  @cli.command(help="Upload files to S3")
786
- @tracing.cli_entrypoint("s3op/put")
786
+ @tracing.cli("s3op/put")
787
787
  @click.option(
788
788
  "--file",
789
789
  "files",
@@ -978,7 +978,7 @@ def _populate_prefixes(prefixes, inputs):
978
978
 
979
979
 
980
980
  @cli.command(help="Download files from S3")
981
- @tracing.cli_entrypoint("s3op/get")
981
+ @tracing.cli("s3op/get")
982
982
  @click.option(
983
983
  "--recursive/--no-recursive",
984
984
  default=False,
@@ -264,6 +264,7 @@ class Server(object):
264
264
  def serve(self, path=None, port=None):
265
265
  # Open up a connection
266
266
  if path is not None:
267
+ # Keep the print line to facilitate debugging
267
268
  # print("SERVER: Starting at %s" % path)
268
269
  sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
269
270
  __try_op__("bind", sock.bind, BIND_RETRY, path)
@@ -503,6 +504,12 @@ class Server(object):
503
504
  class_type = self._known_classes.get(class_name)
504
505
  if class_type is None:
505
506
  raise ValueError("Unknown class %s" % class_name)
507
+ # Check if __init__ is overridden
508
+ override_mapping = self._overrides.get(class_type)
509
+ if override_mapping:
510
+ override_func = override_mapping.get("__init__")
511
+ if override_func:
512
+ return override_func(None, class_type, *args, **kwargs)
506
513
  return class_type(*args, **kwargs)
507
514
 
508
515
  def _handle_subclasscheck(self, target, class_name, otherclass_name, reverse=False):
@@ -276,9 +276,22 @@ class MetaWithConnection(StubMetaClass):
276
276
  if len(args) > 0 and id(args[0]) == id(cls.___class_connection___):
277
277
  return super(MetaWithConnection, cls).__call__(*args, **kwargs)
278
278
  else:
279
- return cls.___class_connection___.stub_request(
280
- None, OP_INIT, cls.___class_remote_class_name___, *args, **kwargs
281
- )
279
+ if hasattr(cls, "__overriden_init__"):
280
+ return cls.__overriden_init__(
281
+ None,
282
+ functools.partial(
283
+ cls.___class_connection___.stub_request,
284
+ None,
285
+ OP_INIT,
286
+ cls.___class_remote_class_name___,
287
+ ),
288
+ *args,
289
+ **kwargs
290
+ )
291
+ else:
292
+ return cls.___class_connection___.stub_request(
293
+ None, OP_INIT, cls.___class_remote_class_name___, *args, **kwargs
294
+ )
282
295
 
283
296
  def __subclasscheck__(cls, subclass):
284
297
  subclass_name = "%s.%s" % (subclass.__module__, subclass.__name__)
@@ -381,7 +394,10 @@ def create_class(
381
394
  name = name[7:]
382
395
  method_type = CLASS_METHOD
383
396
  if name in overriden_methods:
384
- if method_type == NORMAL_METHOD:
397
+ if name == "__init__":
398
+ class_dict["__overriden_init__"] = overriden_methods["__init__"]
399
+
400
+ elif method_type == NORMAL_METHOD:
385
401
  class_dict[name] = (
386
402
  lambda override, orig_method: lambda obj, *args, **kwargs: override(
387
403
  obj, functools.partial(orig_method, obj), *args, **kwargs
@@ -412,6 +428,7 @@ def create_class(
412
428
  class_dict[name] = _make_method(
413
429
  method_type, connection, class_name, name, doc
414
430
  )
431
+
415
432
  # Check for any getattr/setattr overrides
416
433
  special_attributes = set(getattr_overrides.keys())
417
434
  special_attributes.update(set(setattr_overrides.keys()))
@@ -39,7 +39,7 @@ def kubernetes():
39
39
  "command inside a Kubernetes pod with the given options. Typically you do not call "
40
40
  "this command directly; it is used internally by Metaflow."
41
41
  )
42
- @tracing.cli_entrypoint("kubernetes/step")
42
+ @tracing.cli("kubernetes/step")
43
43
  @click.argument("step-name")
44
44
  @click.argument("code-package-sha")
45
45
  @click.argument("code-package-url")