flyte 2.0.0b21__py3-none-any.whl → 2.0.0b23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

Files changed (92) hide show
  1. flyte/__init__.py +5 -0
  2. flyte/_bin/runtime.py +36 -6
  3. flyte/_cache/cache.py +4 -2
  4. flyte/_cache/local_cache.py +215 -0
  5. flyte/_code_bundle/bundle.py +1 -0
  6. flyte/_debug/constants.py +0 -1
  7. flyte/_debug/vscode.py +6 -1
  8. flyte/_deploy.py +204 -55
  9. flyte/_environment.py +5 -0
  10. flyte/_excepthook.py +1 -1
  11. flyte/_image.py +101 -68
  12. flyte/_initialize.py +30 -1
  13. flyte/_interface.py +3 -1
  14. flyte/_internal/controllers/_local_controller.py +64 -24
  15. flyte/_internal/controllers/remote/_action.py +4 -1
  16. flyte/_internal/controllers/remote/_controller.py +5 -2
  17. flyte/_internal/controllers/remote/_core.py +6 -3
  18. flyte/_internal/controllers/remote/_informer.py +1 -1
  19. flyte/_internal/imagebuild/docker_builder.py +95 -28
  20. flyte/_internal/imagebuild/image_builder.py +0 -5
  21. flyte/_internal/imagebuild/remote_builder.py +6 -1
  22. flyte/_internal/runtime/io.py +13 -1
  23. flyte/_internal/runtime/rusty.py +17 -2
  24. flyte/_internal/runtime/task_serde.py +15 -11
  25. flyte/_internal/runtime/taskrunner.py +1 -1
  26. flyte/_internal/runtime/trigger_serde.py +153 -0
  27. flyte/_keyring/file.py +2 -2
  28. flyte/_logging.py +1 -1
  29. flyte/_protos/common/identifier_pb2.py +19 -1
  30. flyte/_protos/common/identifier_pb2.pyi +22 -0
  31. flyte/_protos/workflow/common_pb2.py +14 -3
  32. flyte/_protos/workflow/common_pb2.pyi +49 -0
  33. flyte/_protos/workflow/queue_service_pb2.py +41 -35
  34. flyte/_protos/workflow/queue_service_pb2.pyi +26 -12
  35. flyte/_protos/workflow/queue_service_pb2_grpc.py +34 -0
  36. flyte/_protos/workflow/run_definition_pb2.py +38 -38
  37. flyte/_protos/workflow/run_definition_pb2.pyi +4 -2
  38. flyte/_protos/workflow/run_service_pb2.py +60 -50
  39. flyte/_protos/workflow/run_service_pb2.pyi +24 -6
  40. flyte/_protos/workflow/run_service_pb2_grpc.py +34 -0
  41. flyte/_protos/workflow/task_definition_pb2.py +15 -11
  42. flyte/_protos/workflow/task_definition_pb2.pyi +19 -2
  43. flyte/_protos/workflow/task_service_pb2.py +18 -17
  44. flyte/_protos/workflow/task_service_pb2.pyi +5 -2
  45. flyte/_protos/workflow/trigger_definition_pb2.py +66 -0
  46. flyte/_protos/workflow/trigger_definition_pb2.pyi +117 -0
  47. flyte/_protos/workflow/trigger_definition_pb2_grpc.py +4 -0
  48. flyte/_protos/workflow/trigger_service_pb2.py +96 -0
  49. flyte/_protos/workflow/trigger_service_pb2.pyi +110 -0
  50. flyte/_protos/workflow/trigger_service_pb2_grpc.py +281 -0
  51. flyte/_run.py +42 -15
  52. flyte/_task.py +35 -4
  53. flyte/_task_environment.py +61 -16
  54. flyte/_trigger.py +382 -0
  55. flyte/_version.py +3 -3
  56. flyte/cli/_abort.py +3 -3
  57. flyte/cli/_build.py +1 -3
  58. flyte/cli/_common.py +17 -4
  59. flyte/cli/_create.py +74 -0
  60. flyte/cli/_delete.py +23 -1
  61. flyte/cli/_deploy.py +16 -10
  62. flyte/cli/_get.py +75 -34
  63. flyte/cli/_params.py +4 -2
  64. flyte/cli/_run.py +25 -6
  65. flyte/cli/_update.py +36 -0
  66. flyte/cli/_user.py +17 -0
  67. flyte/cli/main.py +9 -1
  68. flyte/errors.py +9 -0
  69. flyte/io/_dir.py +513 -115
  70. flyte/io/_file.py +495 -135
  71. flyte/models.py +32 -0
  72. flyte/remote/__init__.py +6 -1
  73. flyte/remote/_action.py +9 -8
  74. flyte/remote/_client/_protocols.py +36 -2
  75. flyte/remote/_client/controlplane.py +19 -3
  76. flyte/remote/_run.py +42 -2
  77. flyte/remote/_task.py +14 -1
  78. flyte/remote/_trigger.py +308 -0
  79. flyte/remote/_user.py +33 -0
  80. flyte/storage/__init__.py +6 -1
  81. flyte/storage/_storage.py +119 -101
  82. flyte/types/_pickle.py +34 -7
  83. flyte/types/_type_engine.py +6 -0
  84. {flyte-2.0.0b21.data → flyte-2.0.0b23.data}/scripts/runtime.py +36 -6
  85. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/METADATA +3 -1
  86. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/RECORD +91 -79
  87. flyte/_protos/secret/secret_pb2_grpc_grpc.py +0 -198
  88. {flyte-2.0.0b21.data → flyte-2.0.0b23.data}/scripts/debug.py +0 -0
  89. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/WHEEL +0 -0
  90. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/entry_points.txt +0 -0
  91. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/licenses/LICENSE +0 -0
  92. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/top_level.txt +0 -0
@@ -22,6 +22,7 @@ from flyte._image import (
22
22
  Layer,
23
23
  PipOption,
24
24
  PipPackages,
25
+ PoetryProject,
25
26
  PythonWheels,
26
27
  Requirements,
27
28
  UVProject,
@@ -46,44 +47,71 @@ FLYTE_DOCKER_BUILDER_CACHE_TO = "FLYTE_DOCKER_BUILDER_CACHE_TO"
46
47
 
47
48
  UV_LOCK_WITHOUT_PROJECT_INSTALL_TEMPLATE = Template("""\
48
49
  RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
49
- --mount=type=bind,target=uv.lock,src=$UV_LOCK_PATH \
50
- --mount=type=bind,target=pyproject.toml,src=$PYPROJECT_PATH \
51
- $SECRET_MOUNT \
52
- uv sync --active $PIP_INSTALL_ARGS
50
+ --mount=type=bind,target=uv.lock,src=$UV_LOCK_PATH \
51
+ --mount=type=bind,target=pyproject.toml,src=$PYPROJECT_PATH \
52
+ $SECRET_MOUNT \
53
+ uv sync --active --inexact $PIP_INSTALL_ARGS
53
54
  """)
54
55
 
55
56
  UV_LOCK_INSTALL_TEMPLATE = Template("""\
56
- COPY $PYPROJECT_PATH $PYPROJECT_PATH
57
57
  RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
58
- $SECRET_MOUNT \
59
- uv sync --active $PIP_INSTALL_ARGS --project $PYPROJECT_PATH
58
+ --mount=type=bind,target=/root/.flyte/$PYPROJECT_PATH,src=$PYPROJECT_PATH,rw \
59
+ $SECRET_MOUNT \
60
+ uv sync --active --inexact --no-editable $PIP_INSTALL_ARGS --project /root/.flyte/$PYPROJECT_PATH
61
+ """)
62
+
63
+ POETRY_LOCK_WITHOUT_PROJECT_INSTALL_TEMPLATE = Template("""\
64
+ RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
65
+ uv pip install poetry
66
+
67
+ ENV POETRY_CACHE_DIR=/tmp/poetry_cache \
68
+ POETRY_VIRTUALENVS_IN_PROJECT=true
69
+
70
+ RUN --mount=type=cache,sharing=locked,mode=0777,target=/tmp/poetry_cache,id=poetry \
71
+ --mount=type=bind,target=poetry.lock,src=$POETRY_LOCK_PATH \
72
+ --mount=type=bind,target=pyproject.toml,src=$PYPROJECT_PATH \
73
+ $SECRET_MOUNT \
74
+ poetry install $POETRY_INSTALL_ARGS
75
+ """)
76
+
77
+ POETRY_LOCK_INSTALL_TEMPLATE = Template("""\
78
+ RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
79
+ uv pip install poetry
80
+
81
+ ENV POETRY_CACHE_DIR=/tmp/poetry_cache \
82
+ POETRY_VIRTUALENVS_IN_PROJECT=true
83
+
84
+ RUN --mount=type=cache,sharing=locked,mode=0777,target=/tmp/poetry_cache,id=poetry \
85
+ --mount=type=bind,target=/root/.flyte/$PYPROJECT_PATH,src=$PYPROJECT_PATH,rw \
86
+ $SECRET_MOUNT \
87
+ poetry install $POETRY_INSTALL_ARGS
60
88
  """)
61
89
 
62
90
  UV_PACKAGE_INSTALL_COMMAND_TEMPLATE = Template("""\
63
91
  RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
64
- $REQUIREMENTS_MOUNT \
65
- $SECRET_MOUNT \
66
- uv pip install --python $$UV_PYTHON $PIP_INSTALL_ARGS
92
+ $REQUIREMENTS_MOUNT \
93
+ $SECRET_MOUNT \
94
+ uv pip install --python $$UV_PYTHON $PIP_INSTALL_ARGS
67
95
  """)
68
96
 
69
97
  UV_WHEEL_INSTALL_COMMAND_TEMPLATE = Template("""\
70
98
  RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=wheel \
71
- --mount=source=/dist,target=/dist,type=bind \
72
- $SECRET_MOUNT \
73
- uv pip install --python $$UV_PYTHON $PIP_INSTALL_ARGS
99
+ --mount=source=/dist,target=/dist,type=bind \
100
+ $SECRET_MOUNT \
101
+ uv pip install --python $$UV_PYTHON $PIP_INSTALL_ARGS
74
102
  """)
75
103
 
76
104
  APT_INSTALL_COMMAND_TEMPLATE = Template("""\
77
105
  RUN --mount=type=cache,sharing=locked,mode=0777,target=/var/cache/apt,id=apt \
78
- $SECRET_MOUNT \
79
- apt-get update && apt-get install -y --no-install-recommends \
80
- $APT_PACKAGES
106
+ $SECRET_MOUNT \
107
+ apt-get update && apt-get install -y --no-install-recommends \
108
+ $APT_PACKAGES
81
109
  """)
82
110
 
83
111
  UV_PYTHON_INSTALL_COMMAND = Template("""\
84
112
  RUN --mount=type=cache,sharing=locked,mode=0777,target=/root/.cache/uv,id=uv \
85
- $SECRET_MOUNT \
86
- uv pip install $PIP_INSTALL_ARGS
113
+ $SECRET_MOUNT \
114
+ uv pip install $PIP_INSTALL_ARGS
87
115
  """)
88
116
 
89
117
  # uv pip install --python /root/env/bin/python
@@ -93,24 +121,29 @@ DOCKER_FILE_UV_BASE_TEMPLATE = Template("""\
93
121
  FROM ghcr.io/astral-sh/uv:0.8.13 AS uv
94
122
  FROM $BASE_IMAGE
95
123
 
124
+
96
125
  USER root
97
126
 
127
+
98
128
  # Copy in uv so that later commands don't have to mount it in
99
129
  COPY --from=uv /uv /usr/bin/uv
100
130
 
131
+
101
132
  # Configure default envs
102
133
  ENV UV_COMPILE_BYTECODE=1 \
103
- UV_LINK_MODE=copy \
104
- VIRTUALENV=/opt/venv \
105
- UV_PYTHON=/opt/venv/bin/python \
106
- PATH="/opt/venv/bin:$$PATH"
134
+ UV_LINK_MODE=copy \
135
+ VIRTUALENV=/opt/venv \
136
+ UV_PYTHON=/opt/venv/bin/python \
137
+ PATH="/opt/venv/bin:$$PATH"
138
+
107
139
 
108
140
  # Create a virtualenv with the user specified python version
109
141
  RUN uv venv $$VIRTUALENV --python=$PYTHON_VERSION
110
142
 
143
+
111
144
  # Adds nvidia just in case it exists
112
145
  ENV PATH="$$PATH:/usr/local/nvidia/bin:/usr/local/cuda/bin" \
113
- LD_LIBRARY_PATH="/usr/local/nvidia/lib64"
146
+ LD_LIBRARY_PATH="/usr/local/nvidia/lib64"
114
147
  """)
115
148
 
116
149
  # This gets added on to the end of the dockerfile
@@ -245,6 +278,9 @@ class UVProjectHandler:
245
278
  else:
246
279
  # Copy the entire project.
247
280
  pyproject_dst = copy_files_to_context(layer.pyproject.parent, context_path)
281
+ if layer.uvlock:
282
+ # Sometimes the uv.lock file is in a different folder, if it's specified, let's copy it there explicitly
283
+ shutil.copy(layer.uvlock, pyproject_dst)
248
284
  delta = UV_LOCK_INSTALL_TEMPLATE.substitute(
249
285
  PYPROJECT_PATH=pyproject_dst.relative_to(context_path),
250
286
  PIP_INSTALL_ARGS=" ".join(layer.get_pip_install_args()),
@@ -255,6 +291,32 @@ class UVProjectHandler:
255
291
  return dockerfile
256
292
 
257
293
 
294
+ class PoetryProjectHandler:
295
+ @staticmethod
296
+ async def handel(layer: PoetryProject, context_path: Path, dockerfile: str) -> str:
297
+ secret_mounts = _get_secret_mounts_layer(layer.secret_mounts)
298
+ if layer.extra_args and "--no-root" in layer.extra_args:
299
+ # Only Copy pyproject.yaml and poetry.lock.
300
+ pyproject_dst = copy_files_to_context(layer.pyproject, context_path)
301
+ poetry_lock_dst = copy_files_to_context(layer.poetry_lock, context_path)
302
+ delta = POETRY_LOCK_WITHOUT_PROJECT_INSTALL_TEMPLATE.substitute(
303
+ POETRY_LOCK_PATH=poetry_lock_dst.relative_to(context_path),
304
+ PYPROJECT_PATH=pyproject_dst.relative_to(context_path),
305
+ POETRY_INSTALL_ARGS=layer.extra_args or "",
306
+ SECRET_MOUNT=secret_mounts,
307
+ )
308
+ else:
309
+ # Copy the entire project.
310
+ pyproject_dst = copy_files_to_context(layer.pyproject.parent, context_path)
311
+ delta = POETRY_LOCK_INSTALL_TEMPLATE.substitute(
312
+ PYPROJECT_PATH=pyproject_dst.relative_to(context_path),
313
+ POETRY_INSTALL_ARGS=layer.extra_args or "",
314
+ SECRET_MOUNT=secret_mounts,
315
+ )
316
+ dockerfile += delta
317
+ return dockerfile
318
+
319
+
258
320
  class DockerIgnoreHandler:
259
321
  @staticmethod
260
322
  async def handle(layer: DockerIgnore, context_path: Path, _: str):
@@ -329,8 +391,9 @@ class CommandsHandler:
329
391
  @staticmethod
330
392
  async def handle(layer: Commands, _: Path, dockerfile: str) -> str:
331
393
  # Append raw commands to the dockerfile
394
+ secret_mounts = _get_secret_mounts_layer(layer.secret_mounts)
332
395
  for command in layer.commands:
333
- dockerfile += f"\nRUN {command}\n"
396
+ dockerfile += f"\nRUN {secret_mounts} {command}\n"
334
397
 
335
398
  return dockerfile
336
399
 
@@ -352,9 +415,8 @@ def _get_secret_commands(layers: typing.Tuple[Layer, ...]) -> typing.List[str]:
352
415
  secret = Secret(key=secret)
353
416
  secret_id = hash(secret)
354
417
  secret_env_key = "_".join([k.upper() for k in filter(None, (secret.group, secret.key))])
355
- secret_env = os.getenv(secret_env_key)
356
- if secret_env:
357
- return ["--secret", f"id={secret_id},env={secret_env}"]
418
+ if os.getenv(secret_env_key):
419
+ return ["--secret", f"id={secret_id},env={secret_env_key}"]
358
420
  secret_file_name = "_".join(list(filter(None, (secret.group, secret.key))))
359
421
  secret_file_path = f"/etc/secrets/{secret_file_name}"
360
422
  if not os.path.exists(secret_file_path):
@@ -362,7 +424,7 @@ def _get_secret_commands(layers: typing.Tuple[Layer, ...]) -> typing.List[str]:
362
424
  return ["--secret", f"id={secret_id},src={secret_file_path}"]
363
425
 
364
426
  for layer in layers:
365
- if isinstance(layer, (PipOption, AptPackages)):
427
+ if isinstance(layer, (PipOption, AptPackages, Commands)):
366
428
  if layer.secret_mounts:
367
429
  for secret_mount in layer.secret_mounts:
368
430
  commands.extend(_get_secret_command(secret_mount))
@@ -423,6 +485,10 @@ async def _process_layer(
423
485
  # Handle UV project
424
486
  dockerfile = await UVProjectHandler.handle(layer, context_path, dockerfile)
425
487
 
488
+ case PoetryProject():
489
+ # Handle Poetry project
490
+ dockerfile = await PoetryProjectHandler.handel(layer, context_path, dockerfile)
491
+
426
492
  case CopyConfig():
427
493
  # Handle local files and folders
428
494
  dockerfile = await CopyConfigHandler.handle(layer, context_path, dockerfile, docker_ignore_file_path)
@@ -569,6 +635,7 @@ class DockerImageBuilder(ImageBuilder):
569
635
  - start from the base image
570
636
  - use python to create a default venv and export variables
571
637
 
638
+
572
639
  Then for the layers
573
640
  - for each layer
574
641
  - find the appropriate layer handler
@@ -135,11 +135,6 @@ class ImageBuildEngine:
135
135
 
136
136
  ImageBuilderType = typing.Literal["local", "remote"]
137
137
 
138
- _SEEN_IMAGES: typing.ClassVar[typing.Dict[str, str]] = {
139
- # Set default for the auto container. See Image._identifier_override for more info.
140
- "auto": Image.from_debian_base().uri,
141
- }
142
-
143
138
  @staticmethod
144
139
  @alru_cache
145
140
  async def image_exists(image: Image) -> Optional[str]:
@@ -182,6 +182,11 @@ async def _validate_configuration(image: Image) -> Tuple[str, Optional[str]]:
182
182
  def _get_layers_proto(image: Image, context_path: Path) -> "image_definition_pb2.ImageSpec":
183
183
  from flyte._protos.imagebuilder import definition_pb2 as image_definition_pb2
184
184
 
185
+ if image.dockerfile is not None:
186
+ raise flyte.errors.ImageBuildError(
187
+ "Custom Dockerfile is not supported with remote image builder.You can use local image builder instead."
188
+ )
189
+
185
190
  layers = []
186
191
  for layer in image._layers:
187
192
  secret_mounts = None
@@ -251,7 +256,7 @@ def _get_layers_proto(image: Image, context_path: Path) -> "image_definition_pb2
251
256
  if "tool.uv.index" in line:
252
257
  raise ValueError("External sources are not supported in pyproject.toml")
253
258
 
254
- if layer.extra_index_urls and "--no-install-project" in layer.extra_index_urls:
259
+ if layer.extra_args and "--no-install-project" in layer.extra_args:
255
260
  # Copy pyproject itself
256
261
  pyproject_dst = copy_files_to_context(layer.pyproject, context_path)
257
262
  else:
@@ -9,6 +9,7 @@ from flyteidl.core import errors_pb2, execution_pb2
9
9
 
10
10
  import flyte.storage as storage
11
11
  from flyte._protos.workflow import run_definition_pb2
12
+ from flyte.models import PathRewrite
12
13
 
13
14
  from .convert import Inputs, Outputs, _clean_error_code
14
15
 
@@ -90,10 +91,11 @@ async def upload_error(err: execution_pb2.ExecutionError, output_prefix: str):
90
91
 
91
92
 
92
93
  # ------------------------------- DOWNLOAD Methods ------------------------------- #
93
- async def load_inputs(path: str, max_bytes: int = -1) -> Inputs:
94
+ async def load_inputs(path: str, max_bytes: int = -1, path_rewrite_config: PathRewrite | None = None) -> Inputs:
94
95
  """
95
96
  :param path: Input file to be downloaded
96
97
  :param max_bytes: Maximum number of bytes to read from the input file. Default is -1, which means no limit.
98
+ :param path_rewrite_config: If provided, rewrites paths in the input blobs according to the configuration.
97
99
  :return: Inputs object
98
100
  """
99
101
  lm = run_definition_pb2.Inputs()
@@ -115,6 +117,16 @@ async def load_inputs(path: str, max_bytes: int = -1) -> Inputs:
115
117
  proto_str = b"".join(proto_bytes)
116
118
 
117
119
  lm.ParseFromString(proto_str)
120
+
121
+ if path_rewrite_config is not None:
122
+ for inp in lm.literals:
123
+ if inp.value.HasField("scalar") and inp.value.scalar.HasField("blob"):
124
+ scalar_blob = inp.value.scalar.blob
125
+ if scalar_blob.uri.startswith(path_rewrite_config.old_prefix):
126
+ scalar_blob.uri = scalar_blob.uri.replace(
127
+ path_rewrite_config.old_prefix, path_rewrite_config.new_prefix, 1
128
+ )
129
+
118
130
  return Inputs(proto_inputs=lm)
119
131
 
120
132
 
@@ -11,7 +11,7 @@ from flyte._internal.runtime.entrypoints import download_code_bundle, load_pkl_t
11
11
  from flyte._internal.runtime.taskrunner import extract_download_run_upload
12
12
  from flyte._logging import logger
13
13
  from flyte._task import TaskTemplate
14
- from flyte.models import ActionID, Checkpoints, CodeBundle, RawDataPath
14
+ from flyte.models import ActionID, Checkpoints, CodeBundle, PathRewrite, RawDataPath
15
15
 
16
16
 
17
17
  async def download_tgz(destination: str, version: str, tgz: str) -> CodeBundle:
@@ -115,6 +115,7 @@ async def run_task(
115
115
  prev_checkpoint: str | None = None,
116
116
  code_bundle: CodeBundle | None = None,
117
117
  input_path: str | None = None,
118
+ path_rewrite_cfg: str | None = None,
118
119
  ):
119
120
  """
120
121
  Runs the task with the provided parameters.
@@ -134,6 +135,7 @@ async def run_task(
134
135
  :param controller: The controller to use for the task.
135
136
  :param code_bundle: Optional code bundle for the task.
136
137
  :param input_path: Optional input path for the task.
138
+ :param path_rewrite_cfg: Optional path rewrite configuration.
137
139
  :return: The loaded task template.
138
140
  """
139
141
  start_time = time.time()
@@ -144,6 +146,19 @@ async def run_task(
144
146
  f" at {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))}"
145
147
  )
146
148
 
149
+ path_rewrite = PathRewrite.from_str(path_rewrite_cfg) if path_rewrite_cfg else None
150
+ if path_rewrite:
151
+ import flyte.storage as storage
152
+
153
+ if not await storage.exists(path_rewrite.new_prefix):
154
+ logger.error(
155
+ f"[rusty] Path rewrite failed for path {path_rewrite.new_prefix}, "
156
+ f"not found, reverting to original path {path_rewrite.old_prefix}"
157
+ )
158
+ path_rewrite = None
159
+ else:
160
+ logger.info(f"[rusty] Using path rewrite: {path_rewrite}")
161
+
147
162
  try:
148
163
  await contextual_run(
149
164
  extract_download_run_upload,
@@ -151,7 +166,7 @@ async def run_task(
151
166
  action=ActionID(name=name, org=org, project=project, domain=domain, run_name=run_name),
152
167
  version=version,
153
168
  controller=controller,
154
- raw_data_path=RawDataPath(path=raw_data_path),
169
+ raw_data_path=RawDataPath(path=raw_data_path, path_rewrite=path_rewrite),
155
170
  output_path=output_path,
156
171
  run_base_dir=run_base_dir,
157
172
  checkpoints=Checkpoints(prev_checkpoint_path=prev_checkpoint, checkpoint_path=checkpoint_path),
@@ -119,7 +119,7 @@ def get_proto_task(task: TaskTemplate, serialize_context: SerializationContext)
119
119
  version=serialize_context.version,
120
120
  )
121
121
 
122
- # TODO Add support for SQL, extra_config, custom
122
+ # TODO Add support for extra_config, custom
123
123
  extra_config: typing.Dict[str, str] = {}
124
124
 
125
125
  if task.pod_template and not isinstance(task.pod_template, str):
@@ -132,7 +132,7 @@ def get_proto_task(task: TaskTemplate, serialize_context: SerializationContext)
132
132
 
133
133
  custom = task.custom_config(serialize_context)
134
134
 
135
- sql = None
135
+ sql = task.sql(serialize_context)
136
136
 
137
137
  # -------------- CACHE HANDLING ----------------------
138
138
  task_cache = cache_from_request(task.cache)
@@ -170,7 +170,7 @@ def get_proto_task(task: TaskTemplate, serialize_context: SerializationContext)
170
170
  retries=get_proto_retry_strategy(task.retries),
171
171
  timeout=get_proto_timeout(task.timeout),
172
172
  pod_template_name=(task.pod_template if task.pod_template and isinstance(task.pod_template, str) else None),
173
- interruptible=task.interruptable,
173
+ interruptible=task.interruptible,
174
174
  generates_deck=wrappers_pb2.BoolValue(value=task.report),
175
175
  ),
176
176
  interface=transform_native_to_typed_interface(task.native_interface),
@@ -211,22 +211,26 @@ def _get_urun_container(
211
211
  # pr: under what conditions should this return None?
212
212
  if isinstance(task_template.image, str):
213
213
  raise flyte.errors.RuntimeSystemError("BadConfig", "Image is not a valid image")
214
- image_id = task_template.image.identifier
214
+
215
+ env_name = ""
216
+ if task_template.parent_env is not None:
217
+ task_env = task_template.parent_env()
218
+ if task_env is not None:
219
+ env_name = task_env.name
220
+ else:
221
+ raise flyte.errors.RuntimeSystemError("BadConfig", "Task template has no parent environment")
222
+
215
223
  if not serialize_context.image_cache:
216
224
  # This computes the image uri, computing hashes as necessary so can fail if done remotely.
217
225
  img_uri = task_template.image.uri
218
- elif serialize_context.image_cache and image_id not in serialize_context.image_cache.image_lookup:
226
+ elif serialize_context.image_cache and env_name not in serialize_context.image_cache.image_lookup:
219
227
  img_uri = task_template.image.uri
220
- from flyte._version import __version__
221
228
 
222
229
  logger.warning(
223
- f"Image {task_template.image} not found in the image cache: {serialize_context.image_cache.image_lookup}.\n"
224
- f"This typically occurs when the Flyte SDK version (`{__version__}`) used in the task environment "
225
- f"differs from the version used to compile or deploy it.\n"
226
- f"Ensure both environments use the same Flyte SDK version to avoid inconsistencies in image resolution."
230
+ f"Image {task_template.image} not found in the image cache: {serialize_context.image_cache.image_lookup}."
227
231
  )
228
232
  else:
229
- img_uri = serialize_context.image_cache.image_lookup[image_id]
233
+ img_uri = serialize_context.image_cache.image_lookup[env_name]
230
234
 
231
235
  return tasks_pb2.Container(
232
236
  image=img_uri,
@@ -144,7 +144,7 @@ async def convert_and_run(
144
144
  interactive_mode=interactive_mode,
145
145
  )
146
146
  with ctx.replace_task_context(tctx):
147
- inputs = await load_inputs(input_path) if input_path else inputs
147
+ inputs = await load_inputs(input_path, path_rewrite_config=raw_data_path.path_rewrite) if input_path else inputs
148
148
  inputs_kwargs = await convert_inputs_to_native(inputs, task.native_interface)
149
149
  out, err = await run_task(tctx=tctx, controller=controller, task=task, inputs=inputs_kwargs)
150
150
  if err is not None:
@@ -0,0 +1,153 @@
1
+ import asyncio
2
+ from typing import Union
3
+
4
+ from flyteidl.core import interface_pb2, literals_pb2
5
+ from google.protobuf import timestamp_pb2, wrappers_pb2
6
+
7
+ import flyte.types
8
+ from flyte import Cron, FixedRate, Trigger, TriggerTime
9
+ from flyte._protos.workflow import common_pb2, run_definition_pb2, trigger_definition_pb2
10
+
11
+
12
+ def _to_schedule(m: Union[Cron, FixedRate], kickoff_arg_name: str | None = None) -> common_pb2.Schedule:
13
+ if isinstance(m, Cron):
14
+ return common_pb2.Schedule(
15
+ cron_expression=m.expression,
16
+ kickoff_time_input_arg=kickoff_arg_name,
17
+ )
18
+ elif isinstance(m, FixedRate):
19
+ start_time = None
20
+ if m.start_time is not None:
21
+ start_time = timestamp_pb2.Timestamp()
22
+ start_time.FromDatetime(m.start_time)
23
+
24
+ return common_pb2.Schedule(
25
+ rate=common_pb2.FixedRate(
26
+ value=m.interval_minutes,
27
+ unit=common_pb2.FixedRateUnit.FIXED_RATE_UNIT_MINUTE,
28
+ start_time=start_time,
29
+ ),
30
+ kickoff_time_input_arg=kickoff_arg_name,
31
+ )
32
+
33
+
34
+ async def process_default_inputs(
35
+ default_inputs: dict,
36
+ task_name: str,
37
+ task_inputs: interface_pb2.VariableMap,
38
+ task_default_inputs: list[common_pb2.NamedParameter],
39
+ ) -> list[run_definition_pb2.NamedLiteral]:
40
+ """
41
+ Process default inputs and convert them to NamedLiteral objects.
42
+
43
+ Args:
44
+ default_inputs: Dictionary of default input values
45
+ task_name: Name of the task for error messages
46
+ task_inputs: Task input variable map
47
+ task_default_inputs: List of default parameters from task
48
+
49
+ Returns:
50
+ List of NamedLiteral objects
51
+ """
52
+ keys = []
53
+ literal_coros = []
54
+ for k, v in default_inputs.items():
55
+ if k not in task_inputs.variables:
56
+ raise ValueError(
57
+ f"Trigger default input '{k}' must be an input to the task, but not found in task {task_name}. "
58
+ f"Available inputs: {list(task_inputs.variables.keys())}"
59
+ )
60
+ else:
61
+ literal_coros.append(flyte.types.TypeEngine.to_literal(v, type(v), task_inputs.variables[k].type))
62
+ keys.append(k)
63
+
64
+ final_literals: list[literals_pb2.Literal] = await asyncio.gather(*literal_coros)
65
+
66
+ for p in task_default_inputs or []:
67
+ if p.name not in keys:
68
+ keys.append(p.name)
69
+ final_literals.append(p.parameter.default)
70
+
71
+ literals: list[run_definition_pb2.NamedLiteral] = []
72
+ for k, lit in zip(keys, final_literals):
73
+ literals.append(
74
+ run_definition_pb2.NamedLiteral(
75
+ name=k,
76
+ value=lit,
77
+ )
78
+ )
79
+
80
+ return literals
81
+
82
+
83
+ async def to_task_trigger(
84
+ t: Trigger,
85
+ task_name: str,
86
+ task_inputs: interface_pb2.VariableMap,
87
+ task_default_inputs: list[common_pb2.NamedParameter],
88
+ ) -> trigger_definition_pb2.TaskTrigger:
89
+ """
90
+ Converts a Trigger object to a TaskTrigger protobuf object.
91
+ Args:
92
+ t:
93
+ task_name:
94
+ task_inputs:
95
+ task_default_inputs:
96
+ Returns:
97
+
98
+ """
99
+ env = None
100
+ if t.env_vars:
101
+ env = run_definition_pb2.Envs()
102
+ for k, v in t.env_vars.items():
103
+ env.values.append(literals_pb2.KeyValuePair(key=k, value=v))
104
+
105
+ labels = run_definition_pb2.Labels(values=t.labels) if t.labels else None
106
+
107
+ annotations = run_definition_pb2.Annotations(values=t.annotations) if t.annotations else None
108
+
109
+ run_spec = run_definition_pb2.RunSpec(
110
+ overwrite_cache=t.overwrite_cache,
111
+ envs=env,
112
+ interruptible=wrappers_pb2.BoolValue(value=t.interruptible) if t.interruptible is not None else None,
113
+ cluster=t.queue,
114
+ labels=labels,
115
+ annotations=annotations,
116
+ )
117
+
118
+ kickoff_arg_name = None
119
+ default_inputs = {}
120
+ if t.inputs:
121
+ for k, v in t.inputs.items():
122
+ if v is TriggerTime:
123
+ kickoff_arg_name = k
124
+ else:
125
+ default_inputs[k] = v
126
+
127
+ # assert that default_inputs and the kickoff_arg_name are infact in the task inputs
128
+ if kickoff_arg_name is not None and kickoff_arg_name not in task_inputs.variables:
129
+ raise ValueError(
130
+ f"For a scheduled trigger, the TriggerTime input '{kickoff_arg_name}' "
131
+ f"must be an input to the task, but not found in task {task_name}. "
132
+ f"Available inputs: {list(task_inputs.variables.keys())}"
133
+ )
134
+
135
+ literals = await process_default_inputs(default_inputs, task_name, task_inputs, task_default_inputs)
136
+
137
+ automation = _to_schedule(
138
+ t.automation,
139
+ kickoff_arg_name=kickoff_arg_name,
140
+ )
141
+
142
+ return trigger_definition_pb2.TaskTrigger(
143
+ name=t.name,
144
+ spec=trigger_definition_pb2.TaskTriggerSpec(
145
+ active=t.auto_activate,
146
+ run_spec=run_spec,
147
+ inputs=run_definition_pb2.Inputs(literals=literals),
148
+ ),
149
+ automation_spec=common_pb2.TriggerAutomationSpec(
150
+ type=common_pb2.TriggerAutomationSpec.Type.TYPE_SCHEDULE,
151
+ schedule=automation,
152
+ ),
153
+ )
flyte/_keyring/file.py CHANGED
@@ -72,9 +72,9 @@ class SimplePlainTextKeyring(KeyringBackend):
72
72
 
73
73
  @property
74
74
  def file_path(self) -> Path:
75
- from flyte.config._reader import resolve_config_path
75
+ from flyte._initialize import get_common_config
76
76
 
77
- config_path = resolve_config_path()
77
+ config_path = get_common_config().source_config_path
78
78
  if config_path and str(config_path.parent) == ".flyte":
79
79
  # if the config is in a .flyte directory, use that as the path
80
80
  return config_path.parent / "keyring.cfg"
flyte/_logging.py CHANGED
@@ -71,7 +71,7 @@ def get_rich_handler(log_level: int) -> Optional[logging.Handler]:
71
71
 
72
72
  handler = RichHandler(
73
73
  tracebacks_suppress=[click],
74
- rich_tracebacks=True,
74
+ rich_tracebacks=False,
75
75
  omit_repeated_times=False,
76
76
  show_path=False,
77
77
  log_time_format="%H:%M:%S.%f",
@@ -14,7 +14,7 @@ _sym_db = _symbol_database.Default()
14
14
  from flyte._protos.validate.validate import validate_pb2 as validate_dot_validate__pb2
15
15
 
16
16
 
17
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x63ommon/identifier.proto\x12\x0f\x63loudidl.common\x1a\x17validate/validate.proto\"~\n\x11ProjectIdentifier\x12+\n\x0corganization\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x0corganization\x12\x1f\n\x06\x64omain\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x06\x64omain\x12\x1b\n\x04name\x18\x03 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"T\n\x11\x43lusterIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"O\n\x15\x43lusterPoolIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"_\n\x17\x43lusterConfigIdentifier\x12+\n\x0corganization\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x0corganization\x12\x17\n\x02id\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x02id\"\x88\x01\n\x19\x43lusterNodepoolIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12*\n\x0c\x63luster_name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x0b\x63lusterName\x12\x1b\n\x04name\x18\x03 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"3\n\x0eUserIdentifier\x12!\n\x07subject\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x07subject\":\n\x15\x41pplicationIdentifier\x12!\n\x07subject\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x07subject\"Q\n\x0eRoleIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"O\n\rOrgIdentifier\x12>\n\x04name\x18\x01 \x01(\tB*\xfa\x42\'r%\x10\x01\x18?2\x1f^[a-z0-9]([-a-z0-9]*[a-z0-9])?$R\x04name\"y\n\x18ManagedClusterIdentifier\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\x12:\n\x03org\x18\x03 \x01(\x0b\x32\x1e.cloudidl.common.OrgIdentifierB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01R\x03orgJ\x04\x08\x01\x10\x02\"S\n\x10PolicyIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"\x93\x01\n\rRunIdentifier\x12\x1b\n\x03org\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x03org\x12#\n\x07project\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x07project\x12!\n\x06\x64omain\x18\x03 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x06\x64omain\x12\x1d\n\x04name\x18\x04 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x1eR\x04name\"m\n\x10\x41\x63tionIdentifier\x12:\n\x03run\x18\x01 \x01(\x0b\x32\x1e.cloudidl.common.RunIdentifierB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01R\x03run\x12\x1d\n\x04name\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x1eR\x04name\"\x86\x01\n\x17\x41\x63tionAttemptIdentifier\x12H\n\taction_id\x18\x01 \x01(\x0b\x32!.cloudidl.common.ActionIdentifierB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01R\x08\x61\x63tionId\x12!\n\x07\x61ttempt\x18\x02 \x01(\rB\x07\xfa\x42\x04*\x02 \x00R\x07\x61ttemptB\xb0\x01\n\x13\x63om.cloudidl.commonB\x0fIdentifierProtoH\x02P\x01Z)github.com/unionai/cloud/gen/pb-go/common\xa2\x02\x03\x43\x43X\xaa\x02\x0f\x43loudidl.Common\xca\x02\x0f\x43loudidl\\Common\xe2\x02\x1b\x43loudidl\\Common\\GPBMetadata\xea\x02\x10\x43loudidl::Commonb\x06proto3')
17
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x63ommon/identifier.proto\x12\x0f\x63loudidl.common\x1a\x17validate/validate.proto\"~\n\x11ProjectIdentifier\x12+\n\x0corganization\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x0corganization\x12\x1f\n\x06\x64omain\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x06\x64omain\x12\x1b\n\x04name\x18\x03 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"T\n\x11\x43lusterIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"O\n\x15\x43lusterPoolIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"_\n\x17\x43lusterConfigIdentifier\x12+\n\x0corganization\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x0corganization\x12\x17\n\x02id\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x02id\"\x88\x01\n\x19\x43lusterNodepoolIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12*\n\x0c\x63luster_name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x0b\x63lusterName\x12\x1b\n\x04name\x18\x03 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"3\n\x0eUserIdentifier\x12!\n\x07subject\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x07subject\":\n\x15\x41pplicationIdentifier\x12!\n\x07subject\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x07subject\"Q\n\x0eRoleIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"O\n\rOrgIdentifier\x12>\n\x04name\x18\x01 \x01(\tB*\xfa\x42\'r%\x10\x01\x18?2\x1f^[a-z0-9]([-a-z0-9]*[a-z0-9])?$R\x04name\"y\n\x18ManagedClusterIdentifier\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\x12:\n\x03org\x18\x03 \x01(\x0b\x32\x1e.cloudidl.common.OrgIdentifierB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01R\x03orgJ\x04\x08\x01\x10\x02\"S\n\x10PolicyIdentifier\x12\"\n\x0corganization\x18\x01 \x01(\tR\x0corganization\x12\x1b\n\x04name\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01R\x04name\"\x93\x01\n\rRunIdentifier\x12\x1b\n\x03org\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x03org\x12#\n\x07project\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x07project\x12!\n\x06\x64omain\x18\x03 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x06\x64omain\x12\x1d\n\x04name\x18\x04 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x1eR\x04name\"m\n\x10\x41\x63tionIdentifier\x12:\n\x03run\x18\x01 \x01(\x0b\x32\x1e.cloudidl.common.RunIdentifierB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01R\x03run\x12\x1d\n\x04name\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x1eR\x04name\"\x86\x01\n\x17\x41\x63tionAttemptIdentifier\x12H\n\taction_id\x18\x01 \x01(\x0b\x32!.cloudidl.common.ActionIdentifierB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01R\x08\x61\x63tionId\x12!\n\x07\x61ttempt\x18\x02 \x01(\rB\x07\xfa\x42\x04*\x02 \x00R\x07\x61ttempt\"\xbb\x01\n\x0bTriggerName\x12\x1b\n\x03org\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x03org\x12#\n\x07project\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x07project\x12!\n\x06\x64omain\x18\x03 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x06\x64omain\x12\x1e\n\x04name\x18\x04 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x04name\x12\'\n\ttask_name\x18\x05 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x08taskName\"t\n\x11TriggerIdentifier\x12:\n\x04name\x18\x01 \x01(\x0b\x32\x1c.cloudidl.common.TriggerNameB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01R\x04name\x12#\n\x08revision\x18\x02 \x01(\x04\x42\x07\xfa\x42\x04\x32\x02 \x00R\x08revisionB\xb0\x01\n\x13\x63om.cloudidl.commonB\x0fIdentifierProtoH\x02P\x01Z)github.com/unionai/cloud/gen/pb-go/common\xa2\x02\x03\x43\x43X\xaa\x02\x0f\x43loudidl.Common\xca\x02\x0f\x43loudidl\\Common\xe2\x02\x1b\x43loudidl\\Common\\GPBMetadata\xea\x02\x10\x43loudidl::Commonb\x06proto3')
18
18
 
19
19
  _globals = globals()
20
20
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -68,6 +68,20 @@ if _descriptor._USE_C_DESCRIPTORS == False:
68
68
  _ACTIONATTEMPTIDENTIFIER.fields_by_name['action_id']._serialized_options = b'\372B\005\212\001\002\020\001'
69
69
  _ACTIONATTEMPTIDENTIFIER.fields_by_name['attempt']._options = None
70
70
  _ACTIONATTEMPTIDENTIFIER.fields_by_name['attempt']._serialized_options = b'\372B\004*\002 \000'
71
+ _TRIGGERNAME.fields_by_name['org']._options = None
72
+ _TRIGGERNAME.fields_by_name['org']._serialized_options = b'\372B\006r\004\020\001\030?'
73
+ _TRIGGERNAME.fields_by_name['project']._options = None
74
+ _TRIGGERNAME.fields_by_name['project']._serialized_options = b'\372B\006r\004\020\001\030?'
75
+ _TRIGGERNAME.fields_by_name['domain']._options = None
76
+ _TRIGGERNAME.fields_by_name['domain']._serialized_options = b'\372B\006r\004\020\001\030?'
77
+ _TRIGGERNAME.fields_by_name['name']._options = None
78
+ _TRIGGERNAME.fields_by_name['name']._serialized_options = b'\372B\007r\005\020\001\030\377\001'
79
+ _TRIGGERNAME.fields_by_name['task_name']._options = None
80
+ _TRIGGERNAME.fields_by_name['task_name']._serialized_options = b'\372B\007r\005\020\001\030\377\001'
81
+ _TRIGGERIDENTIFIER.fields_by_name['name']._options = None
82
+ _TRIGGERIDENTIFIER.fields_by_name['name']._serialized_options = b'\372B\005\212\001\002\020\001'
83
+ _TRIGGERIDENTIFIER.fields_by_name['revision']._options = None
84
+ _TRIGGERIDENTIFIER.fields_by_name['revision']._serialized_options = b'\372B\0042\002 \000'
71
85
  _globals['_PROJECTIDENTIFIER']._serialized_start=69
72
86
  _globals['_PROJECTIDENTIFIER']._serialized_end=195
73
87
  _globals['_CLUSTERIDENTIFIER']._serialized_start=197
@@ -96,4 +110,8 @@ if _descriptor._USE_C_DESCRIPTORS == False:
96
110
  _globals['_ACTIONIDENTIFIER']._serialized_end=1344
97
111
  _globals['_ACTIONATTEMPTIDENTIFIER']._serialized_start=1347
98
112
  _globals['_ACTIONATTEMPTIDENTIFIER']._serialized_end=1481
113
+ _globals['_TRIGGERNAME']._serialized_start=1484
114
+ _globals['_TRIGGERNAME']._serialized_end=1671
115
+ _globals['_TRIGGERIDENTIFIER']._serialized_start=1673
116
+ _globals['_TRIGGERIDENTIFIER']._serialized_end=1789
99
117
  # @@protoc_insertion_point(module_scope)