flyte 2.0.0b17__py3-none-any.whl → 2.0.0b18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

flyte/_bin/runtime.py CHANGED
@@ -101,6 +101,7 @@ def main(
101
101
  from flyte._logging import logger
102
102
  from flyte.models import ActionID, Checkpoints, CodeBundle, RawDataPath
103
103
 
104
+ logger.warning(f"Flyte runtime started for action {name} with run name {run_name}")
104
105
  logger.info("Registering faulthandler for SIGUSR1")
105
106
  faulthandler.register(signal.SIGUSR1)
106
107
 
@@ -168,6 +169,7 @@ def main(
168
169
  await controller.stop()
169
170
 
170
171
  asyncio.run(_run_and_stop())
172
+ logger.warning(f"Flyte runtime completed for action {name} with run name {run_name}")
171
173
 
172
174
 
173
175
  if __name__ == "__main__":
flyte/_debug/vscode.py CHANGED
@@ -8,6 +8,7 @@ import subprocess
8
8
  import sys
9
9
  import tarfile
10
10
  import time
11
+ from pathlib import Path
11
12
  from typing import List
12
13
 
13
14
  import aiofiles
@@ -173,7 +174,7 @@ def prepare_launch_json(ctx: click.Context, pid: int):
173
174
  Generate the launch.json and settings.json for users to easily launch interactive debugging and task resumption.
174
175
  """
175
176
 
176
- virtual_venv = os.getenv("VIRTUAL_ENV")
177
+ virtual_venv = os.getenv("VIRTUAL_ENV", str(Path(sys.executable).parent.parent))
177
178
  if virtual_venv is None:
178
179
  raise RuntimeError("VIRTUAL_ENV is not found in environment variables.")
179
180
 
@@ -258,11 +259,12 @@ async def _start_vscode_server(ctx: click.Context):
258
259
  await asyncio.gather(download_tgz(ctx.params["dest"], ctx.params["version"], ctx.params["tgz"]), download_vscode())
259
260
  child_process = multiprocessing.Process(
260
261
  target=lambda cmd: asyncio.run(asyncio.run(execute_command(cmd))),
261
- kwargs={"cmd": f"code-server --bind-addr 0.0.0.0:8080 --disable-workspace-trust --auth none {os.getcwd()}"},
262
+ kwargs={"cmd": f"code-server --bind-addr 0.0.0.0:6060 --disable-workspace-trust --auth none {os.getcwd()}"},
262
263
  )
263
264
  child_process.start()
264
265
  if child_process.pid is None:
265
266
  raise RuntimeError("Failed to start vscode server.")
267
+
266
268
  prepare_launch_json(ctx, child_process.pid)
267
269
 
268
270
  start_time = time.time()
flyte/_deploy.py CHANGED
@@ -153,7 +153,7 @@ async def _build_images(deployment: DeploymentPlan) -> ImageCache:
153
153
 
154
154
  elif env.image == "auto" and "auto" not in image_identifier_map:
155
155
  auto_image = Image.from_debian_base()
156
- image_identifier_map["auto"] = auto_image.uri
156
+ images.append(_build_image_bg(env_name, auto_image))
157
157
  final_images = await asyncio.gather(*images)
158
158
 
159
159
  for env_name, image_uri in final_images:
@@ -161,6 +161,8 @@ async def _build_images(deployment: DeploymentPlan) -> ImageCache:
161
161
  env = deployment.envs[env_name]
162
162
  if isinstance(env.image, Image):
163
163
  image_identifier_map[env.image.identifier] = image_uri
164
+ elif env.image == "auto":
165
+ image_identifier_map["auto"] = image_uri
164
166
 
165
167
  return ImageCache(image_lookup=image_identifier_map)
166
168
 
flyte/_environment.py CHANGED
@@ -2,16 +2,14 @@ from __future__ import annotations
2
2
 
3
3
  import re
4
4
  from dataclasses import dataclass, field
5
- from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union
5
+ from typing import Any, Dict, List, Literal, Optional, Union
6
6
 
7
7
  import rich.repr
8
8
 
9
9
  from ._image import Image
10
+ from ._pod import PodTemplate
10
11
  from ._resources import Resources
11
- from ._secret import SecretRequest
12
-
13
- if TYPE_CHECKING:
14
- from kubernetes.client import V1PodTemplate
12
+ from ._secret import Secret, SecretRequest
15
13
 
16
14
  # Global registry to track all Environment instances in load order
17
15
  _ENVIRONMENT_REGISTRY: List[Environment] = []
@@ -44,7 +42,7 @@ class Environment:
44
42
 
45
43
  name: str
46
44
  depends_on: List[Environment] = field(default_factory=list)
47
- pod_template: Optional[Union[str, "V1PodTemplate"]] = None
45
+ pod_template: Optional[Union[str, PodTemplate]] = None
48
46
  description: Optional[str] = None
49
47
  secrets: Optional[SecretRequest] = None
50
48
  env_vars: Optional[Dict[str, str]] = None
@@ -54,6 +52,17 @@ class Environment:
54
52
  def __post_init__(self):
55
53
  if not is_snake_or_kebab_with_numbers(self.name):
56
54
  raise ValueError(f"Environment name '{self.name}' must be in snake_case or kebab-case format.")
55
+ if not isinstance(self.image, (Image, str)):
56
+ raise TypeError(f"Expected image to be of type str or Image, got {type(self.image)}")
57
+ if self.secrets and not isinstance(self.secrets, (str, Secret, List)):
58
+ raise TypeError(f"Expected secrets to be of type SecretRequest, got {type(self.secrets)}")
59
+ for dep in self.depends_on:
60
+ if not isinstance(dep, Environment):
61
+ raise TypeError(f"Expected depends_on to be of type List[Environment], got {type(dep)}")
62
+ if self.resources is not None and not isinstance(self.resources, Resources):
63
+ raise TypeError(f"Expected resources to be of type Resources, got {type(self.resources)}")
64
+ if self.env_vars is not None and not isinstance(self.env_vars, dict):
65
+ raise TypeError(f"Expected env_vars to be of type Dict[str, str], got {type(self.env_vars)}")
57
66
  # Automatically register this environment instance in load order
58
67
  _ENVIRONMENT_REGISTRY.append(self)
59
68
 
flyte/_hash.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import Callable, Generic, TypeVar
1
+ from typing import TypeVar
2
2
 
3
3
  T = TypeVar("T")
4
4
 
@@ -6,18 +6,3 @@ T = TypeVar("T")
6
6
  class HashOnReferenceMixin(object):
7
7
  def __hash__(self):
8
8
  return hash(id(self))
9
-
10
-
11
- class HashMethod(Generic[T]):
12
- """
13
- Flyte-specific object used to wrap the hash function for a specific type
14
- """
15
-
16
- def __init__(self, function: Callable[[T], str]):
17
- self._function = function
18
-
19
- def calculate(self, obj: T) -> str:
20
- """
21
- Calculate hash for `obj`.
22
- """
23
- return self._function(obj)
flyte/_image.py CHANGED
@@ -887,7 +887,12 @@ class Image:
887
887
  Use this method to create a new image with the specified uv.lock file layered on top of the current image
888
888
  Must have a corresponding pyproject.toml file in the same directory
889
889
  Cannot be used in conjunction with conda
890
- In the Union builders, using this will change the virtual env to /root/.venv
890
+
891
+ By default, this method copies the entire project into the image,
892
+ including files such as pyproject.toml, uv.lock, and the src/ directory.
893
+
894
+ If you prefer not to install the current project, you can pass the extra argument --no-install-project.
895
+ In this case, the image builder will only copy pyproject.toml and uv.lock into the image.
891
896
 
892
897
  :param pyproject_file: path to the pyproject.toml file, needs to have a corresponding uv.lock file
893
898
  :param uvlock: path to the uv.lock file, if not specified, will use the default uv.lock file in the same
flyte/_initialize.py CHANGED
@@ -248,23 +248,22 @@ async def init_from_config(
248
248
  import flyte.config as config
249
249
 
250
250
  cfg: config.Config
251
- if path_or_config is None or isinstance(path_or_config, str):
252
- # If a string is passed, treat it as a path to the config file
253
- if root_dir and path_or_config:
254
- cfg = config.auto(str(root_dir / path_or_config))
255
- elif path_or_config:
256
- if not Path(path_or_config).exists():
257
- raise InitializationError(
258
- "ConfigFileNotFoundError",
259
- "user",
260
- f"Configuration file '{path_or_config}' does not exist., current working directory is {Path.cwd()}",
261
- )
262
- cfg = config.auto(path_or_config)
251
+ if path_or_config is None:
252
+ # If no path is provided, use the default config file
253
+ cfg = config.auto()
254
+ elif isinstance(path_or_config, str):
255
+ if root_dir:
256
+ cfg_path = str(root_dir / path_or_config)
263
257
  else:
264
- # If no path is provided, use the default config file
265
- cfg = config.auto()
258
+ cfg_path = path_or_config
259
+ if not Path(cfg_path).exists():
260
+ raise InitializationError(
261
+ "ConfigFileNotFoundError",
262
+ "user",
263
+ f"Configuration file '{cfg_path}' does not exist., current working directory is {Path.cwd()}",
264
+ )
265
+ cfg = config.auto(cfg_path)
266
266
  else:
267
- # If a Config object is passed, use it directly
268
267
  cfg = path_or_config
269
268
 
270
269
  logger.debug(f"Flyte config initialized as {cfg}")
@@ -413,8 +413,7 @@ class RemoteController(Controller):
413
413
  else:
414
414
  logger.warning(f"Action {prev_action.action_id.name} failed, but no error was found, re-running trace!")
415
415
  elif prev_action.realized_outputs_uri is not None:
416
- outputs_file_path = io.outputs_path(prev_action.realized_outputs_uri)
417
- o = await io.load_outputs(outputs_file_path, max_bytes=MAX_TRACE_BYTES)
416
+ o = await io.load_outputs(prev_action.realized_outputs_uri, max_bytes=MAX_TRACE_BYTES)
418
417
  outputs = await convert.convert_outputs_to_native(_interface, o)
419
418
  return (
420
419
  TraceInfo(func_name, sub_action_id, _interface, inputs_uri, output=outputs),
@@ -439,15 +438,13 @@ class RemoteController(Controller):
439
438
  outputs_file_path: str = ""
440
439
 
441
440
  if info.interface.has_outputs():
442
- if info.output:
443
- outputs = await convert.convert_from_native_to_outputs(info.output, info.interface)
444
- outputs_file_path = io.outputs_path(sub_run_output_path)
445
- await io.upload_outputs(outputs, sub_run_output_path, max_bytes=MAX_TRACE_BYTES)
446
- elif info.error:
441
+ if info.error:
447
442
  err = convert.convert_from_native_to_error(info.error)
448
443
  await io.upload_error(err.err, sub_run_output_path)
449
444
  else:
450
- raise flyte.errors.RuntimeSystemError("BadTraceInfo", "Trace info does not have output or error")
445
+ outputs = await convert.convert_from_native_to_outputs(info.output, info.interface)
446
+ outputs_file_path = io.outputs_path(sub_run_output_path)
447
+ await io.upload_outputs(outputs, sub_run_output_path, max_bytes=MAX_TRACE_BYTES)
451
448
 
452
449
  typed_interface = transform_native_to_typed_interface(info.interface)
453
450
 
@@ -158,8 +158,8 @@ class Controller:
158
158
  self._thread.start()
159
159
 
160
160
  # Wait for the thread to be ready
161
- logger.info("Waiting for controller thread to be ready...")
162
161
  if not self._thread_ready.wait(timeout=self._thread_wait_timeout):
162
+ logger.warning("Controller thread did not finish within timeout")
163
163
  raise TimeoutError("Controller thread failed to start in time")
164
164
 
165
165
  if self._get_exception():
@@ -44,19 +44,19 @@ _F_IMG_ID = "_F_IMG_ID"
44
44
  FLYTE_DOCKER_BUILDER_CACHE_FROM = "FLYTE_DOCKER_BUILDER_CACHE_FROM"
45
45
  FLYTE_DOCKER_BUILDER_CACHE_TO = "FLYTE_DOCKER_BUILDER_CACHE_TO"
46
46
 
47
- UV_LOCK_INSTALL_TEMPLATE = Template("""\
48
- WORKDIR /root
47
+ UV_LOCK_WITHOUT_PROJECT_INSTALL_TEMPLATE = Template("""\
49
48
  RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
50
- --mount=type=bind,target=uv.lock,src=uv.lock \
51
- --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
49
+ --mount=type=bind,target=uv.lock,src=$UV_LOCK_PATH \
50
+ --mount=type=bind,target=pyproject.toml,src=$PYPROJECT_PATH \
52
51
  $SECRET_MOUNT \
53
- uv sync $PIP_INSTALL_ARGS
54
- WORKDIR /
52
+ uv sync --active $PIP_INSTALL_ARGS
53
+ """)
55
54
 
56
- # Update PATH and UV_PYTHON to point to the venv created by uv sync
57
- ENV PATH="/root/.venv/bin:$$PATH" \
58
- VIRTUALENV=/root/.venv \
59
- UV_PYTHON=/root/.venv/bin/python
55
+ UV_LOCK_INSTALL_TEMPLATE = Template("""\
56
+ COPY $PYPROJECT_PATH $PYPROJECT_PATH
57
+ RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
58
+ $SECRET_MOUNT \
59
+ uv sync --active $PIP_INSTALL_ARGS --project $PYPROJECT_PATH
60
60
  """)
61
61
 
62
62
  UV_PACKAGE_INSTALL_COMMAND_TEMPLATE = Template("""\
@@ -90,7 +90,7 @@ RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
90
90
  # new template
91
91
  DOCKER_FILE_UV_BASE_TEMPLATE = Template("""\
92
92
  # syntax=docker/dockerfile:1.10
93
- FROM ghcr.io/astral-sh/uv:0.6.12 AS uv
93
+ FROM ghcr.io/astral-sh/uv:0.8.13 AS uv
94
94
  FROM $BASE_IMAGE
95
95
 
96
96
  USER root
@@ -177,6 +177,7 @@ class PythonWheelHandler:
177
177
  "/dist",
178
178
  "--no-deps",
179
179
  "--no-index",
180
+ "--reinstall",
180
181
  layer.package_name,
181
182
  ],
182
183
  ]
@@ -230,20 +231,27 @@ class AptPackagesHandler:
230
231
  class UVProjectHandler:
231
232
  @staticmethod
232
233
  async def handle(layer: UVProject, context_path: Path, dockerfile: str) -> str:
233
- # copy the two files
234
- shutil.copy(layer.pyproject, context_path)
235
- shutil.copy(layer.uvlock, context_path)
236
-
237
- # --locked: Assert that the `uv.lock` will remain unchanged
238
- # --no-dev: Omit the development dependency group
239
- # --no-install-project: Do not install the current project
240
- additional_pip_install_args = ["--locked", "--no-dev", "--no-install-project"]
241
234
  secret_mounts = _get_secret_mounts_layer(layer.secret_mounts)
242
- delta = UV_LOCK_INSTALL_TEMPLATE.substitute(
243
- PIP_INSTALL_ARGS=" ".join(additional_pip_install_args), SECRET_MOUNT=secret_mounts
244
- )
245
- dockerfile += delta
235
+ if layer.extra_index_urls and "--no-install-project" in layer.extra_index_urls:
236
+ # Only Copy pyproject.yaml and uv.lock.
237
+ pyproject_dst = copy_files_to_context(layer.pyproject, context_path)
238
+ uvlock_dst = copy_files_to_context(layer.uvlock, context_path)
239
+ delta = UV_LOCK_WITHOUT_PROJECT_INSTALL_TEMPLATE.substitute(
240
+ UV_LOCK_PATH=uvlock_dst.relative_to(context_path),
241
+ PYPROJECT_PATH=pyproject_dst.relative_to(context_path),
242
+ PIP_INSTALL_ARGS=" ".join(layer.get_pip_install_args()),
243
+ SECRET_MOUNT=secret_mounts,
244
+ )
245
+ else:
246
+ # Copy the entire project.
247
+ pyproject_dst = copy_files_to_context(layer.pyproject.parent, context_path)
248
+ delta = UV_LOCK_INSTALL_TEMPLATE.substitute(
249
+ PYPROJECT_PATH=pyproject_dst.relative_to(context_path),
250
+ PIP_INSTALL_ARGS=" ".join(layer.get_pip_install_args()),
251
+ SECRET_MOUNT=secret_mounts,
252
+ )
246
253
 
254
+ dockerfile += delta
247
255
  return dockerfile
248
256
 
249
257
 
@@ -1,5 +1,7 @@
1
+ import gzip
1
2
  import os
2
3
  import shutil
4
+ import tarfile
3
5
  import tempfile
4
6
  import typing
5
7
  from datetime import datetime, timezone
@@ -7,11 +9,13 @@ from pathlib import Path
7
9
  from typing import TYPE_CHECKING, Optional, Tuple, cast
8
10
  from uuid import uuid4
9
11
 
12
+ import aiofiles
10
13
  import click
11
14
 
12
15
  import flyte
13
16
  import flyte.errors
14
17
  from flyte import Image, remote
18
+ from flyte._code_bundle._utils import tar_strip_file_attributes
15
19
  from flyte._image import (
16
20
  AptPackages,
17
21
  Architecture,
@@ -130,7 +134,7 @@ class RemoteImageBuilder(ImageBuilder):
130
134
  auto_version="latest",
131
135
  )
132
136
  await flyte.with_runcontext(project=IMAGE_TASK_PROJECT, domain=IMAGE_TASK_DOMAIN).run.aio(
133
- entity, spec=spec, context=context, target_image=image_name
137
+ entity, target_image=image_name
134
138
  )
135
139
  except Exception as e:
136
140
  # Ignore the error if optimize is not enabled in the backend.
@@ -160,17 +164,32 @@ async def _validate_configuration(image: Image) -> Tuple[str, Optional[str]]:
160
164
 
161
165
  if any(context_path.iterdir()):
162
166
  # If there are files in the context directory, upload it
163
- archive = Path(shutil.make_archive(str(tmp_path / "context"), "xztar", context_path))
164
- st = archive.stat()
165
- if st.st_size > 5 * 1024 * 1024:
167
+ tar_path = tmp_path / "context.tar"
168
+ with tarfile.open(tar_path, "w", dereference=False) as tar:
169
+ files: typing.List[str] = os.listdir(context_path)
170
+ for ws_file in files:
171
+ tar.add(
172
+ os.path.join(context_path, ws_file),
173
+ recursive=True,
174
+ arcname=ws_file,
175
+ filter=tar_strip_file_attributes,
176
+ )
177
+ context_dst = Path(f"{tar_path!s}.gz")
178
+ with gzip.GzipFile(filename=context_dst, mode="wb", mtime=0) as gzipped:
179
+ async with aiofiles.open(tar_path, "rb") as tar_file:
180
+ content = await tar_file.read()
181
+ gzipped.write(content)
182
+
183
+ context_size = tar_path.stat().st_size
184
+ if context_size > 5 * 1024 * 1024:
166
185
  logger.warning(
167
186
  click.style(
168
- f"Context size is {st.st_size / (1024 * 1024):.2f} MB, which is larger than 5 MB. "
187
+ f"Context size is {context_size / (1024 * 1024):.2f} MB, which is larger than 5 MB. "
169
188
  "Upload and build speed will be impacted.",
170
189
  fg="yellow",
171
190
  )
172
191
  )
173
- _, context_url = await remote.upload_file.aio(archive)
192
+ _, context_url = await remote.upload_file.aio(context_dst)
174
193
  else:
175
194
  context_url = ""
176
195
 
@@ -248,12 +267,20 @@ def _get_layers_proto(image: Image, context_path: Path) -> "image_definition_pb2
248
267
  for line in layer.pyproject.read_text().splitlines():
249
268
  if "tool.uv.index" in line:
250
269
  raise ValueError("External sources are not supported in pyproject.toml")
251
- shutil.copy2(layer.pyproject, context_path / layer.pyproject.name)
270
+
271
+ if layer.extra_index_urls and "--no-install-project" in layer.extra_index_urls:
272
+ # Copy pyproject itself
273
+ pyproject_dst = copy_files_to_context(layer.pyproject, context_path)
274
+ else:
275
+ # Copy the entire project
276
+ pyproject_dst = copy_files_to_context(layer.pyproject.parent, context_path)
252
277
 
253
278
  uv_layer = image_definition_pb2.Layer(
254
279
  uv_project=image_definition_pb2.UVProject(
255
- pyproject=str(layer.pyproject.name),
256
- uvlock=str(layer.uvlock.name),
280
+ pyproject=str(pyproject_dst.relative_to(context_path)),
281
+ uvlock=str(copy_files_to_context(layer.uvlock, context_path).relative_to(context_path)),
282
+ options=pip_options,
283
+ secret_mounts=secret_mounts,
257
284
  )
258
285
  )
259
286
  layers.append(uv_layer)
@@ -303,7 +330,7 @@ def _get_fully_qualified_image_name(outputs: ActionOutputs) -> str:
303
330
 
304
331
  def _get_build_secrets_from_image(image: Image) -> Optional[typing.List[Secret]]:
305
332
  secrets = []
306
- DEFAULT_SECRET_DIR = Path("etc/flyte/secrets")
333
+ DEFAULT_SECRET_DIR = Path("/etc/flyte/secrets")
307
334
  for layer in image._layers:
308
335
  if isinstance(layer, (PipOption, Commands, AptPackages)) and layer.secret_mounts is not None:
309
336
  for secret_mount in layer.secret_mounts:
@@ -23,7 +23,8 @@ def copy_files_to_context(src: Path, context_path: Path) -> Path:
23
23
  dst_path = context_path / src
24
24
  dst_path.parent.mkdir(parents=True, exist_ok=True)
25
25
  if src.is_dir():
26
- shutil.copytree(src, dst_path, dirs_exist_ok=True)
26
+ # TODO: Add support dockerignore
27
+ shutil.copytree(src, dst_path, dirs_exist_ok=True, ignore=shutil.ignore_patterns(".idea", ".venv"))
27
28
  else:
28
29
  shutil.copy(src, dst_path)
29
30
  return dst_path
@@ -308,15 +308,82 @@ def generate_inputs_hash(serialized_inputs: str | bytes) -> str:
308
308
  return hash_data(serialized_inputs)
309
309
 
310
310
 
311
+ def generate_inputs_repr_for_literal(literal: literals_pb2.Literal) -> bytes:
312
+ """
313
+ Generate a byte representation for a single literal that is meant to be hashed as part of the cache key
314
+ computation for an Action. This function should just serialize the literal deterministically, but will
315
+ use an existing hash value if present in the Literal. This is trivial, except we need to handle nested literals
316
+ (inside collections and maps), that may have the hash property set.
317
+
318
+ :param literal: The literal to get a hashable representation for.
319
+ :return: byte representation of the literal that can be fed into a hash function.
320
+ """
321
+ # If the literal has a hash value, use that instead of serializing the full literal
322
+ if literal.hash:
323
+ return literal.hash.encode("utf-8")
324
+
325
+ if literal.HasField("collection"):
326
+ buf = bytearray()
327
+ for nested_literal in literal.collection.literals:
328
+ if nested_literal.hash:
329
+ buf += nested_literal.hash.encode("utf-8")
330
+ else:
331
+ buf += generate_inputs_repr_for_literal(nested_literal)
332
+
333
+ b = bytes(buf)
334
+ return b
335
+
336
+ elif literal.HasField("map"):
337
+ buf = bytearray()
338
+ # Sort keys to ensure deterministic ordering
339
+ for key in sorted(literal.map.literals.keys()):
340
+ nested_literal = literal.map.literals[key]
341
+ buf += key.encode("utf-8")
342
+ if nested_literal.hash:
343
+ buf += nested_literal.hash.encode("utf-8")
344
+ else:
345
+ buf += generate_inputs_repr_for_literal(nested_literal)
346
+
347
+ b = bytes(buf)
348
+ return b
349
+
350
+ # For all other cases (scalars, etc.), just serialize the literal normally
351
+ return literal.SerializeToString(deterministic=True)
352
+
353
+
354
+ def generate_inputs_hash_for_named_literals(inputs: list[run_definition_pb2.NamedLiteral]) -> str:
355
+ """
356
+ Generate a hash for the inputs using the new literal representation approach that respects
357
+ hash values already present in literals. This is used to uniquely identify the inputs for a task
358
+ when some literals may have precomputed hash values.
359
+
360
+ :param inputs: List of NamedLiteral inputs to hash.
361
+ :return: A base64-encoded string representation of the hash.
362
+ """
363
+ if not inputs:
364
+ return ""
365
+
366
+ # Build the byte representation by concatenating each literal's representation
367
+ combined_bytes = b""
368
+ for named_literal in inputs:
369
+ # Add the name to ensure order matters
370
+ name_bytes = named_literal.name.encode("utf-8")
371
+ literal_bytes = generate_inputs_repr_for_literal(named_literal.value)
372
+ # Combine name and literal bytes with a separator to avoid collisions
373
+ combined_bytes += name_bytes + b":" + literal_bytes + b";"
374
+
375
+ return hash_data(combined_bytes)
376
+
377
+
311
378
  def generate_inputs_hash_from_proto(inputs: run_definition_pb2.Inputs) -> str:
312
379
  """
313
380
  Generate a hash for the inputs. This is used to uniquely identify the inputs for a task.
314
381
  :param inputs: The inputs to hash.
315
382
  :return: A hexadecimal string representation of the hash.
316
383
  """
317
- if not inputs:
384
+ if not inputs or not inputs.literals:
318
385
  return ""
319
- return generate_inputs_hash(inputs.SerializeToString(deterministic=True))
386
+ return generate_inputs_hash_for_named_literals(list(inputs.literals))
320
387
 
321
388
 
322
389
  def generate_interface_hash(task_interface: interface_pb2.TypedInterface) -> str:
@@ -4,6 +4,7 @@ invoked within a context tree.
4
4
  """
5
5
 
6
6
  import pathlib
7
+ import time
7
8
  from typing import Any, Dict, List, Optional, Tuple
8
9
 
9
10
  import flyte.report
@@ -172,6 +173,8 @@ async def extract_download_run_upload(
172
173
  This method is invoked from the CLI (urun) and is used to run a task. This assumes that the context tree
173
174
  has already been created, and the task has been loaded. It also handles the loading of the task.
174
175
  """
176
+ t = time.time()
177
+ logger.warning(f"Task {action.name} started at {t}")
175
178
  outputs, err = await convert_and_run(
176
179
  task=task,
177
180
  input_path=input_path,
@@ -194,4 +197,4 @@ async def extract_download_run_upload(
194
197
  logger.info(f"Task {task.name} completed successfully, no outputs")
195
198
  return
196
199
  await upload_outputs(outputs, output_path) if output_path else None
197
- logger.info(f"Task {task.name} completed successfully, uploaded outputs to {output_path}")
200
+ logger.warning(f"Task {task.name} completed successfully, uploaded outputs to {output_path} in {time.time() - t}s")