flyte 2.0.0b3__py3-none-any.whl → 2.0.0b5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

@@ -7,6 +7,7 @@ import tempfile
7
7
  from pathlib import Path
8
8
  from typing import ClassVar, Type
9
9
 
10
+ from async_lru import alru_cache
10
11
  from flyteidl.core.tasks_pb2 import TaskTemplate
11
12
 
12
13
  from flyte._logging import log, logger
@@ -109,6 +110,7 @@ async def build_pkl_bundle(
109
110
  return CodeBundle(pkl=str(dest), computed_version=str_digest)
110
111
 
111
112
 
113
+ @alru_cache
112
114
  async def build_code_bundle(
113
115
  from_dir: Path,
114
116
  *ignore: Type[Ignore],
flyte/_deploy.py CHANGED
@@ -1,9 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
+ import typing
4
5
  from dataclasses import dataclass
5
6
  from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
6
7
 
8
+ import grpc.aio
7
9
  import rich.repr
8
10
 
9
11
  import flyte.errors
@@ -90,27 +92,39 @@ async def _deploy_task(
90
92
 
91
93
  image_uri = task.image.uri if isinstance(task.image, Image) else task.image
92
94
 
93
- if dryrun:
94
- return translate_task_to_wire(task, serialization_context)
95
-
96
- default_inputs = await convert_upload_default_inputs(task.interface)
97
- spec = translate_task_to_wire(task, serialization_context, default_inputs=default_inputs)
98
-
99
- msg = f"Deploying task {task.name}, with image {image_uri} version {serialization_context.version}"
100
- if spec.task_template.HasField("container") and spec.task_template.container.args:
101
- msg += f" from {spec.task_template.container.args[-3]}.{spec.task_template.container.args[-1]}"
102
- logger.info(msg)
103
- task_id = task_definition_pb2.TaskIdentifier(
104
- org=spec.task_template.id.org,
105
- project=spec.task_template.id.project,
106
- domain=spec.task_template.id.domain,
107
- version=spec.task_template.id.version,
108
- name=spec.task_template.id.name,
109
- )
95
+ try:
96
+ if dryrun:
97
+ return translate_task_to_wire(task, serialization_context)
98
+
99
+ default_inputs = await convert_upload_default_inputs(task.interface)
100
+ spec = translate_task_to_wire(task, serialization_context, default_inputs=default_inputs)
101
+
102
+ msg = f"Deploying task {task.name}, with image {image_uri} version {serialization_context.version}"
103
+ if spec.task_template.HasField("container") and spec.task_template.container.args:
104
+ msg += f" from {spec.task_template.container.args[-3]}.{spec.task_template.container.args[-1]}"
105
+ logger.info(msg)
106
+ task_id = task_definition_pb2.TaskIdentifier(
107
+ org=spec.task_template.id.org,
108
+ project=spec.task_template.id.project,
109
+ domain=spec.task_template.id.domain,
110
+ version=spec.task_template.id.version,
111
+ name=spec.task_template.id.name,
112
+ )
110
113
 
111
- await get_client().task_service.DeployTask(task_service_pb2.DeployTaskRequest(task_id=task_id, spec=spec))
112
- logger.info(f"Deployed task {task.name} with version {task_id.version}")
113
- return spec
114
+ try:
115
+ await get_client().task_service.DeployTask(task_service_pb2.DeployTaskRequest(task_id=task_id, spec=spec))
116
+ logger.info(f"Deployed task {task.name} with version {task_id.version}")
117
+ except grpc.aio.AioRpcError as e:
118
+ if e.code() == grpc.StatusCode.ALREADY_EXISTS:
119
+ logger.info(f"Task {task.name} with image {image_uri} already exists, skipping deployment.")
120
+ return spec
121
+ raise
122
+ return spec
123
+ except Exception as e:
124
+ logger.error(f"Failed to deploy task {task.name} with image {image_uri}: {e}")
125
+ raise flyte.errors.DeploymentError(
126
+ f"Failed to deploy task {task.name} file{task.source_file} with image {image_uri}, Error: {e!s}"
127
+ ) from e
114
128
 
115
129
 
116
130
  async def _build_image_bg(env_name: str, image: Image) -> Tuple[str, str]:
@@ -151,13 +165,14 @@ async def _build_images(deployment: DeploymentPlan) -> ImageCache:
151
165
 
152
166
 
153
167
  @requires_initialization
154
- async def apply(deployment: DeploymentPlan, copy_style: CopyFiles, dryrun: bool = False) -> Deployment:
168
+ async def apply(deployment_plan: DeploymentPlan, copy_style: CopyFiles, dryrun: bool = False) -> Deployment:
155
169
  from ._code_bundle import build_code_bundle
156
170
 
157
171
  cfg = get_common_config()
158
- image_cache = await _build_images(deployment)
159
172
 
160
- version = deployment.version
173
+ image_cache = await _build_images(deployment_plan)
174
+
175
+ version = deployment_plan.version
161
176
  if copy_style == "none" and not version:
162
177
  raise flyte.errors.DeploymentError("Version must be set when copy_style is none")
163
178
  else:
@@ -178,41 +193,44 @@ async def apply(deployment: DeploymentPlan, copy_style: CopyFiles, dryrun: bool
178
193
  )
179
194
 
180
195
  tasks = []
181
- for env_name, env in deployment.envs.items():
196
+
197
+ for env_name, env in deployment_plan.envs.items():
182
198
  logger.info(f"Deploying environment {env_name}")
183
199
  # TODO Make this pluggable based on the environment type
184
200
  if isinstance(env, TaskEnvironment):
185
201
  for task in env.tasks.values():
186
202
  tasks.append(_deploy_task(task, dryrun=dryrun, serialization_context=sc))
187
- return Deployment(envs=deployment.envs, deployed_tasks=await asyncio.gather(*tasks))
203
+ return Deployment(envs=deployment_plan.envs, deployed_tasks=await asyncio.gather(*tasks))
188
204
 
189
205
 
190
- def _recursive_discover(
191
- planned_envs: Dict[str, Environment], envs: Environment | List[Environment]
192
- ) -> Dict[str, Environment]:
206
+ def _recursive_discover(planned_envs: Dict[str, Environment], env: Environment) -> Dict[str, Environment]:
193
207
  """
194
208
  Recursively deploy the environment and its dependencies, if not already deployed (present in env_tasks) and
195
209
  return the updated env_tasks.
196
210
  """
197
- if isinstance(envs, Environment):
198
- envs = [envs]
199
- for env in envs:
200
- # Skip if the environment is already planned
201
- if env.name in planned_envs:
202
- continue
203
- # Recursively discover dependent environments
204
- for dependent_env in env.depends_on:
205
- _recursive_discover(planned_envs, dependent_env)
206
- # Add the environment to the existing envs
207
- planned_envs[env.name] = env
211
+ # Skip if the environment is already planned
212
+ if env.name in planned_envs:
213
+ return planned_envs
214
+ # Recursively discover dependent environments
215
+ for dependent_env in env.depends_on:
216
+ _recursive_discover(planned_envs, dependent_env)
217
+ # Add the environment to the existing envs
218
+ planned_envs[env.name] = env
208
219
  return planned_envs
209
220
 
210
221
 
211
- def plan_deploy(*envs: Environment, version: Optional[str] = None) -> DeploymentPlan:
222
+ def plan_deploy(*envs: Environment, version: Optional[str] = None) -> List[DeploymentPlan]:
212
223
  if envs is None:
213
- return DeploymentPlan({})
214
- planned_envs = _recursive_discover({}, *envs)
215
- return DeploymentPlan(planned_envs, version=version)
224
+ return [DeploymentPlan({})]
225
+ deployment_plans = []
226
+ visited_envs: typing.Set[str] = set()
227
+ for env in envs:
228
+ if env.name in visited_envs:
229
+ continue
230
+ planned_envs = _recursive_discover({}, env)
231
+ deployment_plans.append(DeploymentPlan(planned_envs, version=version))
232
+ visited_envs.update(planned_envs.keys())
233
+ return deployment_plans
216
234
 
217
235
 
218
236
  @syncify
@@ -222,7 +240,7 @@ async def deploy(
222
240
  version: str | None = None,
223
241
  interactive_mode: bool | None = None,
224
242
  copy_style: CopyFiles = "loaded_modules",
225
- ) -> Deployment:
243
+ ) -> List[Deployment]:
226
244
  """
227
245
  Deploy the given environment or list of environments.
228
246
  :param envs: Environment or list of environments to deploy.
@@ -238,16 +256,19 @@ async def deploy(
238
256
  """
239
257
  if interactive_mode:
240
258
  raise NotImplementedError("Interactive mode not yet implemented for deployment")
241
- deployment = plan_deploy(*envs, version=version)
242
- return await apply(deployment, copy_style=copy_style, dryrun=dryrun)
259
+ deployment_plans = plan_deploy(*envs, version=version)
260
+ deployments = []
261
+ for deployment_plan in deployment_plans:
262
+ deployments.append(apply(deployment_plan, copy_style=copy_style, dryrun=dryrun))
263
+ return await asyncio.gather(*deployments)
243
264
 
244
265
 
245
266
  @syncify
246
- async def build_images(*envs: Environment) -> ImageCache:
267
+ async def build_images(envs: Environment) -> ImageCache:
247
268
  """
248
269
  Build the images for the given environments.
249
- :param envs: Environment or list of environments to build images for.
270
+ :param envs: Environment to build images for.
250
271
  :return: ImageCache containing the built images.
251
272
  """
252
- deployment = plan_deploy(*envs)
253
- return await _build_images(deployment)
273
+ deployment = plan_deploy(envs)
274
+ return await _build_images(deployment[0])
flyte/_environment.py CHANGED
@@ -6,14 +6,24 @@ from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union
6
6
 
7
7
  import rich.repr
8
8
 
9
- from flyte._secret import SecretRequest
10
-
11
9
  from ._image import Image
12
10
  from ._resources import Resources
11
+ from ._secret import SecretRequest
13
12
 
14
13
  if TYPE_CHECKING:
15
14
  from kubernetes.client import V1PodTemplate
16
15
 
16
+ # Global registry to track all Environment instances in load order
17
+ _ENVIRONMENT_REGISTRY: List[Environment] = []
18
+
19
+
20
+ def list_loaded_environments() -> List[Environment]:
21
+ """
22
+ Return a list of all Environment objects in the order they were loaded.
23
+ This is useful for deploying environments in the order they were defined.
24
+ """
25
+ return _ENVIRONMENT_REGISTRY
26
+
17
27
 
18
28
  def is_snake_or_kebab_with_numbers(s: str) -> bool:
19
29
  return re.fullmatch(r"^[a-z0-9]+([_-][a-z0-9]+)*$", s) is not None
@@ -44,6 +54,8 @@ class Environment:
44
54
  def __post_init__(self):
45
55
  if not is_snake_or_kebab_with_numbers(self.name):
46
56
  raise ValueError(f"Environment name '{self.name}' must be in snake_case or kebab-case format.")
57
+ # Automatically register this environment instance in load order
58
+ _ENVIRONMENT_REGISTRY.append(self)
47
59
 
48
60
  def add_dependency(self, *env: Environment):
49
61
  """
flyte/_image.py CHANGED
@@ -8,11 +8,14 @@ from abc import abstractmethod
8
8
  from dataclasses import asdict, dataclass, field
9
9
  from functools import cached_property
10
10
  from pathlib import Path
11
- from typing import Callable, ClassVar, Dict, List, Literal, Optional, Tuple, TypeVar, Union
11
+ from typing import TYPE_CHECKING, Callable, ClassVar, Dict, List, Literal, Optional, Tuple, TypeVar, Union
12
12
 
13
13
  import rich.repr
14
14
  from packaging.version import Version
15
15
 
16
+ if TYPE_CHECKING:
17
+ from flyte import Secret, SecretRequest
18
+
16
19
  # Supported Python versions
17
20
  PYTHON_3_10 = (3, 10)
18
21
  PYTHON_3_11 = (3, 11)
@@ -71,6 +74,7 @@ class PipOption:
71
74
  extra_index_urls: Optional[Tuple[str] | Tuple[str, ...] | List[str]] = None
72
75
  pre: bool = False
73
76
  extra_args: Optional[str] = None
77
+ secret_mounts: Optional[Tuple[str | Secret, ...]] = None
74
78
 
75
79
  def get_pip_install_args(self) -> List[str]:
76
80
  pip_install_args = []
@@ -101,6 +105,9 @@ class PipOption:
101
105
  hash_input += str(self.pre)
102
106
  if self.extra_args:
103
107
  hash_input += self.extra_args
108
+ if self.secret_mounts:
109
+ for secret_mount in self.secret_mounts:
110
+ hash_input += str(secret_mount)
104
111
 
105
112
  hasher.update(hash_input.encode("utf-8"))
106
113
 
@@ -110,9 +117,6 @@ class PipOption:
110
117
  class PipPackages(PipOption, Layer):
111
118
  packages: Optional[Tuple[str, ...]] = None
112
119
 
113
- # todo: to be implemented
114
- # secret_mounts: Optional[List[Tuple[str, str]]] = None
115
-
116
120
  def update_hash(self, hasher: hashlib._Hash):
117
121
  """
118
122
  Update the hash with the pip packages
@@ -161,6 +165,15 @@ class UVProject(PipOption, Layer):
161
165
  pyproject: Path
162
166
  uvlock: Path
163
167
 
168
+ def validate(self):
169
+ if not self.pyproject.exists():
170
+ raise FileNotFoundError(f"pyproject.toml file {self.pyproject} does not exist")
171
+ if not self.pyproject.is_file():
172
+ raise ValueError(f"Pyproject file {self.pyproject} is not a file")
173
+ if not self.uvlock.exists():
174
+ raise ValueError(f"UVLock file {self.uvlock} does not exist")
175
+ super().validate()
176
+
164
177
  def update_hash(self, hasher: hashlib._Hash):
165
178
  from ._utils import filehash_update
166
179
 
@@ -168,13 +181,40 @@ class UVProject(PipOption, Layer):
168
181
  filehash_update(self.uvlock, hasher)
169
182
 
170
183
 
184
+ @rich.repr.auto
185
+ @dataclass(frozen=True, repr=True)
186
+ class UVScript(PipOption, Layer):
187
+ script: Path
188
+
189
+ def validate(self):
190
+ if not self.script.exists():
191
+ raise FileNotFoundError(f"UV script {self.script} does not exist")
192
+ if not self.script.is_file():
193
+ raise ValueError(f"UV script {self.script} is not a file")
194
+ if not self.script.suffix == ".py":
195
+ raise ValueError(f"UV script {self.script} must have a .py extension")
196
+ super().validate()
197
+
198
+ def update_hash(self, hasher: hashlib._Hash):
199
+ from ._utils import filehash_update
200
+
201
+ super().update_hash(hasher)
202
+ filehash_update(self.script, hasher)
203
+
204
+
171
205
  @rich.repr.auto
172
206
  @dataclass(frozen=True, repr=True)
173
207
  class AptPackages(Layer):
174
208
  packages: Tuple[str, ...]
209
+ secret_mounts: Optional[Tuple[str | Secret, ...]] = None
175
210
 
176
211
  def update_hash(self, hasher: hashlib._Hash):
177
- hasher.update("".join(self.packages).encode("utf-8"))
212
+ hash_input = "".join(self.packages)
213
+
214
+ if self.secret_mounts:
215
+ for secret_mount in self.secret_mounts:
216
+ hash_input += str(secret_mount)
217
+ hasher.update(hash_input.encode("utf-8"))
178
218
 
179
219
 
180
220
  @rich.repr.auto
@@ -394,8 +434,8 @@ class Image:
394
434
  )
395
435
  labels_and_user = _DockerLines(
396
436
  (
397
- "LABEL org.opencontainers.image.authors='Union.AI <sales@union.ai>'",
398
- "LABEL org.opencontainers.image.source=https://github.com/unionai/unionv2",
437
+ "LABEL org.opencontainers.image.authors='Union.AI <info@union.ai>'",
438
+ "LABEL org.opencontainers.image.source=https://github.com/flyteorg/flyte",
399
439
  "RUN useradd --create-home --shell /bin/bash flytekit &&"
400
440
  " chown -R flytekit /root && chown -R flytekit /home",
401
441
  "WORKDIR /root",
@@ -501,6 +541,7 @@ class Image:
501
541
  pre: bool = False,
502
542
  extra_args: Optional[str] = None,
503
543
  platform: Optional[Tuple[Architecture, ...]] = None,
544
+ secret_mounts: Optional[SecretRequest] = None,
504
545
  ) -> Image:
505
546
  """
506
547
  Use this method to create a new image with the specified uv script.
@@ -534,36 +575,18 @@ class Image:
534
575
 
535
576
  :return: Image
536
577
  """
537
- from ._utils import parse_uv_script_file
538
-
539
- if isinstance(script, str):
540
- script = Path(script)
541
- if not script.exists():
542
- raise FileNotFoundError(f"UV script {script} does not exist")
543
- if not script.is_file():
544
- raise ValueError(f"UV script {script} is not a file")
545
- if not script.suffix == ".py":
546
- raise ValueError(f"UV script {script} must have a .py extension")
547
- header = parse_uv_script_file(script)
578
+ ll = UVScript(
579
+ script=Path(script),
580
+ index_url=index_url,
581
+ extra_index_urls=_ensure_tuple(extra_index_urls) if extra_index_urls else None,
582
+ pre=pre,
583
+ extra_args=extra_args,
584
+ secret_mounts=_ensure_tuple(secret_mounts) if secret_mounts else None,
585
+ )
548
586
 
549
- # todo: arch
550
587
  img = cls.from_debian_base(registry=registry, name=name, python_version=python_version, platform=platform)
551
588
 
552
- # add ca-certificates to the image by default
553
- img = img.with_apt_packages("ca-certificates")
554
-
555
- if header.dependencies:
556
- return img.with_pip_packages(
557
- *header.dependencies,
558
- index_url=index_url,
559
- extra_index_urls=extra_index_urls,
560
- pre=pre,
561
- extra_args=extra_args,
562
- )
563
-
564
- # todo: override the _identifier_override to be the script name or a hash of the script contents
565
- # This is needed because inside the image, the identifier will be computed to be something different.
566
- return img
589
+ return img.clone(addl_layer=ll)
567
590
 
568
591
  def clone(
569
592
  self,
@@ -690,19 +713,26 @@ class Image:
690
713
  new_image = self.clone(addl_layer=WorkDir(workdir=workdir))
691
714
  return new_image
692
715
 
693
- def with_requirements(self, file: str | Path) -> Image:
716
+ def with_requirements(
717
+ self,
718
+ file: str | Path,
719
+ secret_mounts: Optional[SecretRequest] = None,
720
+ ) -> Image:
694
721
  """
695
722
  Use this method to create a new image with the specified requirements file layered on top of the current image
696
723
  Cannot be used in conjunction with conda
697
724
 
698
725
  :param file: path to the requirements file, must be a .txt file
726
+ :param secret_mounts: list of secret to mount for the build process.
699
727
  :return:
700
728
  """
701
729
  if isinstance(file, str):
702
730
  file = Path(file)
703
731
  if file.suffix != ".txt":
704
732
  raise ValueError(f"Requirements file {file} must have a .txt extension")
705
- new_image = self.clone(addl_layer=Requirements(file=file))
733
+ new_image = self.clone(
734
+ addl_layer=Requirements(file=file, secret_mounts=_ensure_tuple(secret_mounts) if secret_mounts else None)
735
+ )
706
736
  return new_image
707
737
 
708
738
  def with_pip_packages(
@@ -712,6 +742,7 @@ class Image:
712
742
  extra_index_urls: Union[str, List[str], Tuple[str, ...], None] = None,
713
743
  pre: bool = False,
714
744
  extra_args: Optional[str] = None,
745
+ secret_mounts: Optional[SecretRequest] = None,
715
746
  ) -> Image:
716
747
  """
717
748
  Use this method to create a new image with the specified pip packages layered on top of the current image
@@ -732,8 +763,8 @@ class Image:
732
763
  :param extra_index_urls: extra index urls to use for pip install, default is None
733
764
  :param pre: whether to allow pre-release versions, default is False
734
765
  :param extra_args: extra arguments to pass to pip install, default is None
735
- # :param secret_mounts: todo
736
766
  :param extra_args: extra arguments to pass to pip install, default is None
767
+ :param secret_mounts: list of secret to mount for the build process.
737
768
  :return: Image
738
769
  """
739
770
  new_packages: Optional[Tuple] = packages or None
@@ -745,6 +776,7 @@ class Image:
745
776
  extra_index_urls=new_extra_index_urls,
746
777
  pre=pre,
747
778
  extra_args=extra_args,
779
+ secret_mounts=_ensure_tuple(secret_mounts) if secret_mounts else None,
748
780
  )
749
781
  new_image = self.clone(addl_layer=ll)
750
782
  return new_image
@@ -790,11 +822,13 @@ class Image:
790
822
 
791
823
  def with_uv_project(
792
824
  self,
793
- pyproject_file: Path,
825
+ pyproject_file: str | Path,
826
+ uvlock: Path | None = None,
794
827
  index_url: Optional[str] = None,
795
- extra_index_urls: Union[str, List[str], Tuple[str, ...], None] = None,
828
+ extra_index_urls: Union[List[str], Tuple[str, ...], None] = None,
796
829
  pre: bool = False,
797
830
  extra_args: Optional[str] = None,
831
+ secret_mounts: Optional[SecretRequest] = None,
798
832
  ) -> Image:
799
833
  """
800
834
  Use this method to create a new image with the specified uv.lock file layered on top of the current image
@@ -803,26 +837,44 @@ class Image:
803
837
  In the Union builders, using this will change the virtual env to /root/.venv
804
838
 
805
839
  :param pyproject_file: path to the pyproject.toml file, needs to have a corresponding uv.lock file
806
- :return:
840
+ :param uvlock: path to the uv.lock file, if not specified, will use the default uv.lock file in the same
841
+ directory as the pyproject.toml file. (pyproject.parent / uv.lock)
842
+ :param index_url: index url to use for pip install, default is None
843
+ :param extra_index_urls: extra index urls to use for pip install, default is None
844
+ :param pre: whether to allow pre-release versions, default is False
845
+ :param extra_args: extra arguments to pass to pip install, default is None
846
+ :param secret_mounts: list of secret mounts to use for the build process.
847
+ :return: Image
807
848
  """
808
- if not pyproject_file.exists():
809
- raise FileNotFoundError(f"UVLock file {pyproject_file} does not exist")
810
- if not pyproject_file.is_file():
811
- raise ValueError(f"UVLock file {pyproject_file} is not a file")
812
- lock = pyproject_file.parent / "uv.lock"
813
- if not lock.exists():
814
- raise ValueError(f"UVLock file {lock} does not exist")
815
- new_image = self.clone(addl_layer=UVProject(pyproject=pyproject_file, uvlock=lock))
849
+ if isinstance(pyproject_file, str):
850
+ pyproject_file = Path(pyproject_file)
851
+ new_image = self.clone(
852
+ addl_layer=UVProject(
853
+ pyproject=pyproject_file,
854
+ uvlock=uvlock or (pyproject_file.parent / "uv.lock"),
855
+ index_url=index_url,
856
+ extra_index_urls=extra_index_urls,
857
+ pre=pre,
858
+ extra_args=extra_args,
859
+ secret_mounts=_ensure_tuple(secret_mounts) if secret_mounts else None,
860
+ )
861
+ )
816
862
  return new_image
817
863
 
818
- def with_apt_packages(self, *packages: str) -> Image:
864
+ def with_apt_packages(self, *packages: str, secret_mounts: Optional[SecretRequest] = None) -> Image:
819
865
  """
820
866
  Use this method to create a new image with the specified apt packages layered on top of the current image
821
867
 
822
868
  :param packages: list of apt packages to install
869
+ :param secret_mounts: list of secret mounts to use for the build process.
823
870
  :return: Image
824
871
  """
825
- new_image = self.clone(addl_layer=AptPackages(packages=packages))
872
+ new_image = self.clone(
873
+ addl_layer=AptPackages(
874
+ packages=packages,
875
+ secret_mounts=_ensure_tuple(secret_mounts) if secret_mounts else None,
876
+ )
877
+ )
826
878
  return new_image
827
879
 
828
880
  def with_commands(self, commands: List[str]) -> Image:
@@ -153,7 +153,7 @@ class RemoteController(Controller):
153
153
  name = task_obj.__name__
154
154
  elif hasattr(task_obj, "name"):
155
155
  name = task_obj.name
156
- logger.warning(f"For action {uniq}, task {name} call sequence is {new_seq}")
156
+ logger.info(f"For action {uniq}, task {name} call sequence is {new_seq}")
157
157
  return new_seq
158
158
 
159
159
  async def _submit(self, _task_call_seq: int, _task: TaskTemplate, *args, **kwargs) -> Any:
@@ -196,7 +196,7 @@ class RemoteController(Controller):
196
196
  sub_action_id, sub_action_output_path = convert.generate_sub_action_id_and_output_path(
197
197
  tctx, task_spec, inputs_hash, _task_call_seq
198
198
  )
199
- logger.warning(f"Sub action {sub_action_id} output path {sub_action_output_path}")
199
+ logger.info(f"Sub action {sub_action_id} output path {sub_action_output_path}")
200
200
 
201
201
  serialized_inputs = inputs.proto_inputs.SerializeToString(deterministic=True)
202
202
  inputs_uri = io.inputs_path(sub_action_output_path)
@@ -421,17 +421,12 @@ class RemoteController(Controller):
421
421
 
422
422
  current_action_id = tctx.action
423
423
  sub_run_output_path = storage.join(tctx.run_base_dir, info.action.name)
424
- print(f"Sub run output path for {info.name} is {sub_run_output_path}", flush=True)
425
424
 
426
425
  if info.interface.has_outputs():
427
426
  outputs_file_path: str = ""
428
427
  if info.output:
429
428
  outputs = await convert.convert_from_native_to_outputs(info.output, info.interface)
430
429
  outputs_file_path = io.outputs_path(sub_run_output_path)
431
- print(
432
- f"Uploading outputs for {info.name} Outputs file path: {outputs_file_path}",
433
- flush=True,
434
- )
435
430
  await io.upload_outputs(outputs, sub_run_output_path, max_bytes=MAX_TRACE_BYTES)
436
431
  elif info.error:
437
432
  err = convert.convert_from_native_to_error(info.error)
@@ -461,19 +456,22 @@ class RemoteController(Controller):
461
456
  end_time=info.end_time,
462
457
  typed_interface=typed_interface if typed_interface else None,
463
458
  )
464
- try:
465
- logger.info(
466
- f"Submitting Trace action Run:[{trace_action.run_name}, Parent:[{trace_action.parent_action_name}],"
467
- f" Trace fn:[{info.name}], action:[{info.action.name}]"
468
- )
469
- await self.submit_action(trace_action)
470
- logger.info(f"Trace Action for [{info.name}] action id: {info.action.name}, completed!")
471
- except asyncio.CancelledError:
472
- # If the action is cancelled, we need to cancel the action on the server as well
473
- raise
474
459
 
475
- async def submit_task_ref(
476
- self, _task: task_definition_pb2.TaskDetails, max_inline_io_bytes: int, *args, **kwargs
460
+ async with self._parent_action_semaphore[unique_action_name(current_action_id)]:
461
+ try:
462
+ logger.info(
463
+ f"Submitting Trace action Run:[{trace_action.run_name},"
464
+ f" Parent:[{trace_action.parent_action_name}],"
465
+ f" Trace fn:[{info.name}], action:[{info.action.name}]"
466
+ )
467
+ await self.submit_action(trace_action)
468
+ logger.info(f"Trace Action for [{info.name}] action id: {info.action.name}, completed!")
469
+ except asyncio.CancelledError:
470
+ # If the action is cancelled, we need to cancel the action on the server as well
471
+ raise
472
+
473
+ async def _submit_task_ref(
474
+ self, invoke_seq_num: int, _task: task_definition_pb2.TaskDetails, max_inline_io_bytes: int, *args, **kwargs
477
475
  ) -> Any:
478
476
  ctx = internal_ctx()
479
477
  tctx = ctx.data.task_context
@@ -482,8 +480,6 @@ class RemoteController(Controller):
482
480
  current_action_id = tctx.action
483
481
  task_name = _task.spec.task_template.id.name
484
482
 
485
- invoke_seq_num = self.generate_task_call_sequence(_task, current_action_id)
486
-
487
483
  native_interface = types.guess_interface(
488
484
  _task.spec.task_template.interface, default_inputs=_task.spec.default_inputs
489
485
  )
@@ -560,3 +556,15 @@ class RemoteController(Controller):
560
556
  )
561
557
  return await load_and_convert_outputs(native_interface, n.realized_outputs_uri, max_inline_io_bytes)
562
558
  return None
559
+
560
+ async def submit_task_ref(
561
+ self, _task: task_definition_pb2.TaskDetails, max_inline_io_bytes: int, *args, **kwargs
562
+ ) -> Any:
563
+ ctx = internal_ctx()
564
+ tctx = ctx.data.task_context
565
+ if tctx is None:
566
+ raise flyte.errors.RuntimeSystemError("BadContext", "Task context not initialized")
567
+ current_action_id = tctx.action
568
+ task_call_seq = self.generate_task_call_sequence(_task, current_action_id)
569
+ async with self._parent_action_semaphore[unique_action_name(current_action_id)]:
570
+ return await self._submit_task_ref(task_call_seq, _task, max_inline_io_bytes, *args, **kwargs)
@@ -86,10 +86,7 @@ class Controller:
86
86
 
87
87
  async def get_action(self, action_id: identifier_pb2.ActionIdentifier, parent_action_name: str) -> Optional[Action]:
88
88
  """Get the action from the informer"""
89
- informer = await self._informers.get(run_name=action_id.run.name, parent_action_name=parent_action_name)
90
- if informer:
91
- return await informer.get(action_id.name)
92
- return None
89
+ return await self._run_coroutine_in_controller_thread(self._bg_get_action(action_id, parent_action_name))
93
90
 
94
91
  @log
95
92
  async def cancel_action(self, action: Action):