truefoundry 0.9.2rc1__py3-none-any.whl → 0.9.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of truefoundry might be problematic. Click here for more details.
- truefoundry/common/constants.py +2 -0
- truefoundry/deploy/_autogen/models.py +68 -15
- truefoundry/deploy/builder/__init__.py +2 -1
- truefoundry/deploy/builder/builders/__init__.py +2 -0
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py +76 -0
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py +193 -0
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/execute_notebook.py +21 -0
- truefoundry/deploy/cli/commands/apply_command.py +17 -2
- truefoundry/deploy/cli/commands/deploy_command.py +15 -1
- truefoundry/deploy/lib/clients/servicefoundry_client.py +8 -3
- truefoundry/deploy/lib/dao/apply.py +23 -5
- truefoundry/deploy/lib/diff_utils.py +92 -0
- truefoundry/deploy/lib/model/entity.py +4 -0
- truefoundry/deploy/v2/lib/deploy.py +2 -0
- truefoundry/deploy/v2/lib/deployable_patched_models.py +69 -4
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.4.dist-info}/METADATA +1 -1
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.4.dist-info}/RECORD +19 -15
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.4.dist-info}/WHEEL +0 -0
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.4.dist-info}/entry_points.txt +0 -0
truefoundry/common/constants.py
CHANGED
|
@@ -65,6 +65,8 @@ class TrueFoundrySdkEnv(BaseSettings):
|
|
|
65
65
|
# Fo customizing the python image used for building via PythonBuild
|
|
66
66
|
TFY_PYTHONBUILD_PYTHON_IMAGE_REPO: str = "public.ecr.aws/docker/library/python"
|
|
67
67
|
|
|
68
|
+
TFY_SPARK_BUILD_SPARK_IMAGE_REPO: str = "apache/spark"
|
|
69
|
+
|
|
68
70
|
# For local development, this enables futher configuration via _TFYServersConfig
|
|
69
71
|
TFY_CLI_LOCAL_DEV_MODE: bool = False
|
|
70
72
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# generated by datamodel-codegen:
|
|
2
2
|
# filename: application.json
|
|
3
|
-
# timestamp: 2025-
|
|
3
|
+
# timestamp: 2025-06-08T14:28:34+00:00
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
@@ -104,8 +104,8 @@ class AsyncProcessorSidecar(BaseModel):
|
|
|
104
104
|
|
|
105
105
|
class Autoshutdown(BaseModel):
|
|
106
106
|
wait_time: conint(ge=0) = Field(
|
|
107
|
-
|
|
108
|
-
description="The period to wait after the last received request before scaling the replicas to 0",
|
|
107
|
+
900,
|
|
108
|
+
description="The period to wait after the last received request before scaling the replicas to 0. This value should be high enough to allow for the replicas of the service to come up to avoid premature scaling down.",
|
|
109
109
|
)
|
|
110
110
|
|
|
111
111
|
|
|
@@ -285,8 +285,8 @@ class GitSource(BaseModel):
|
|
|
285
285
|
repo_url: constr(regex=r"^(https?://)\S+$") = Field(
|
|
286
286
|
..., description="The repository URL."
|
|
287
287
|
)
|
|
288
|
-
ref:
|
|
289
|
-
branch_name: Optional[
|
|
288
|
+
ref: constr(regex=r"^\S+$") = Field(..., description="The commit SHA.")
|
|
289
|
+
branch_name: Optional[constr(regex=r"^\S+$")] = Field(
|
|
290
290
|
None,
|
|
291
291
|
description="Selecting branch will select latest commit SHA of the branch.",
|
|
292
292
|
)
|
|
@@ -346,11 +346,11 @@ class Image(BaseModel):
|
|
|
346
346
|
"""
|
|
347
347
|
|
|
348
348
|
type: Literal["image"] = Field(..., description="")
|
|
349
|
-
image_uri: constr(regex=r"^\S
|
|
349
|
+
image_uri: constr(regex=r"^\S+$") = Field(
|
|
350
350
|
...,
|
|
351
351
|
description="The image URI. Specify the name of the image and the tag.\nIf the image is in Dockerhub, you can skip registry-url (for e.g. `tensorflow/tensorflow`).\nYou can use an image from a private registry using Advanced fields",
|
|
352
352
|
)
|
|
353
|
-
docker_registry: Optional[
|
|
353
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
354
354
|
None,
|
|
355
355
|
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
356
356
|
)
|
|
@@ -840,8 +840,40 @@ class SlackWebhook(BaseModel):
|
|
|
840
840
|
)
|
|
841
841
|
|
|
842
842
|
|
|
843
|
+
class SparkBuildSpec(BaseModel):
|
|
844
|
+
"""
|
|
845
|
+
Describes that we are using python to build a container image with a specific python version and pip packages installed.
|
|
846
|
+
"""
|
|
847
|
+
|
|
848
|
+
type: Literal["tfy-spark-buildpack"] = Field(..., description="")
|
|
849
|
+
spark_version: str = Field(
|
|
850
|
+
"3.5.2",
|
|
851
|
+
description="Spark version should match the spark version installed in the image.",
|
|
852
|
+
)
|
|
853
|
+
python_version: Optional[constr(regex=r"^\d+(\.\d+){1,2}([\-\.a-z0-9]+)?$")] = (
|
|
854
|
+
Field(
|
|
855
|
+
None,
|
|
856
|
+
description="Python version to run your application. Should be one of the tags listed on [Official Python Docker Page](https://hub.docker.com/_/python)",
|
|
857
|
+
)
|
|
858
|
+
)
|
|
859
|
+
build_context_path: str = Field(
|
|
860
|
+
"./", description="Build path relative to project root path."
|
|
861
|
+
)
|
|
862
|
+
requirements_path: Optional[str] = Field(
|
|
863
|
+
None,
|
|
864
|
+
description="Path to `requirements.txt` relative to\n`Path to build context`",
|
|
865
|
+
)
|
|
866
|
+
pip_packages: Optional[List[str]] = Field(
|
|
867
|
+
None,
|
|
868
|
+
description='Define pip package requirements.\nIn Python/YAML E.g. ["fastapi>=0.90,<1.0", "uvicorn"]',
|
|
869
|
+
)
|
|
870
|
+
apt_packages: Optional[List[str]] = Field(
|
|
871
|
+
None,
|
|
872
|
+
description='Debian packages to install via `apt get`.\nIn Python/YAML E.g. ["git", "ffmpeg", "htop"]',
|
|
873
|
+
)
|
|
874
|
+
|
|
875
|
+
|
|
843
876
|
class SparkDriverConfig(BaseModel):
|
|
844
|
-
ui_endpoint: Endpoint
|
|
845
877
|
resources: Optional[Resources] = None
|
|
846
878
|
|
|
847
879
|
|
|
@@ -870,11 +902,11 @@ class SparkImage(BaseModel):
|
|
|
870
902
|
"3.5.2",
|
|
871
903
|
description="Spark version should match the spark version installed in the image.",
|
|
872
904
|
)
|
|
873
|
-
image_uri: constr(regex=r"^\S
|
|
905
|
+
image_uri: constr(regex=r"^\S+$") = Field(
|
|
874
906
|
...,
|
|
875
907
|
description="The image URI. Specify the name of the image and the tag.\nIf the image is in Dockerhub, you can skip registry-url (for e.g. `tensorflow/tensorflow`).\nYou can use an image from a private registry using Advanced fields",
|
|
876
908
|
)
|
|
877
|
-
docker_registry: Optional[
|
|
909
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
878
910
|
None,
|
|
879
911
|
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
880
912
|
)
|
|
@@ -896,7 +928,8 @@ class SparkJobJavaEntrypoint(BaseModel):
|
|
|
896
928
|
class SparkJobPythonEntrypoint(BaseModel):
|
|
897
929
|
type: Literal["python"] = Field(..., description="")
|
|
898
930
|
main_application_file: str = Field(
|
|
899
|
-
...,
|
|
931
|
+
...,
|
|
932
|
+
description="The main application file to be executed by the spark job. Relative path in case of git repository.",
|
|
900
933
|
)
|
|
901
934
|
arguments: Optional[str] = Field(
|
|
902
935
|
None, description="Arguments to be passed to the main application file."
|
|
@@ -1113,7 +1146,7 @@ class Build(BaseModel):
|
|
|
1113
1146
|
"""
|
|
1114
1147
|
|
|
1115
1148
|
type: Literal["build"] = Field(..., description="")
|
|
1116
|
-
docker_registry: Optional[
|
|
1149
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
1117
1150
|
None,
|
|
1118
1151
|
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
1119
1152
|
)
|
|
@@ -1399,6 +1432,23 @@ class SSHServer(BaseWorkbenchInput):
|
|
|
1399
1432
|
)
|
|
1400
1433
|
|
|
1401
1434
|
|
|
1435
|
+
class SparkBuild(BaseModel):
|
|
1436
|
+
"""
|
|
1437
|
+
Describes that we are building a new image based on the spec
|
|
1438
|
+
"""
|
|
1439
|
+
|
|
1440
|
+
type: Literal["build"] = Field(..., description="")
|
|
1441
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
1442
|
+
None,
|
|
1443
|
+
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
1444
|
+
)
|
|
1445
|
+
build_source: Union[GitSource, LocalSource] = Field(..., description="")
|
|
1446
|
+
build_spec: Union[SparkBuildSpec, PythonBuild] = Field(
|
|
1447
|
+
...,
|
|
1448
|
+
description="Instructions to build a container image out of the build source",
|
|
1449
|
+
)
|
|
1450
|
+
|
|
1451
|
+
|
|
1402
1452
|
class SparkExecutorConfig(BaseModel):
|
|
1403
1453
|
instances: Union[SparkExecutorFixedInstances, SparkExecutorDynamicScaling] = Field(
|
|
1404
1454
|
{"type": "fixed", "count": 1}, description=""
|
|
@@ -1411,7 +1461,10 @@ class SparkJob(BaseModel):
|
|
|
1411
1461
|
name: constr(regex=r"^[a-z](?:[a-z0-9]|-(?!-)){1,30}[a-z0-9]$") = Field(
|
|
1412
1462
|
..., description="Name of the job"
|
|
1413
1463
|
)
|
|
1414
|
-
image: SparkImage
|
|
1464
|
+
image: Union[SparkImage, SparkBuild] = Field(
|
|
1465
|
+
...,
|
|
1466
|
+
description="The image to use for driver and executors. Must have spark installed. Spark version must match the version in the image.",
|
|
1467
|
+
)
|
|
1415
1468
|
entrypoint: Union[
|
|
1416
1469
|
SparkJobPythonEntrypoint, SparkJobScalaEntrypoint, SparkJobJavaEntrypoint
|
|
1417
1470
|
] = Field(..., description="")
|
|
@@ -1604,8 +1657,8 @@ class Job(BaseModel):
|
|
|
1604
1657
|
trigger: Union[Manual, Schedule] = Field(
|
|
1605
1658
|
{"type": "manual"}, description="Specify the trigger"
|
|
1606
1659
|
)
|
|
1607
|
-
trigger_on_deploy: bool = Field(
|
|
1608
|
-
|
|
1660
|
+
trigger_on_deploy: Optional[bool] = Field(
|
|
1661
|
+
None, description="Trigger the job after deploy immediately"
|
|
1609
1662
|
)
|
|
1610
1663
|
params: Optional[List[Param]] = Field(
|
|
1611
1664
|
None, description="Configure params and pass it to create different job runs"
|
|
@@ -2,6 +2,7 @@ from typing import Any, Dict, List, Optional, Union
|
|
|
2
2
|
|
|
3
3
|
from truefoundry.deploy._autogen.models import (
|
|
4
4
|
DockerFileBuild,
|
|
5
|
+
SparkBuildSpec,
|
|
5
6
|
PythonBuild,
|
|
6
7
|
TaskDockerFileBuild,
|
|
7
8
|
TaskPythonBuild,
|
|
@@ -20,6 +21,7 @@ class _BuildConfig(BaseModel):
|
|
|
20
21
|
NotebookImageBuild,
|
|
21
22
|
TaskPythonBuild,
|
|
22
23
|
TaskDockerFileBuild,
|
|
24
|
+
SparkBuildSpec,
|
|
23
25
|
] = Field(discriminator="type")
|
|
24
26
|
|
|
25
27
|
|
|
@@ -37,7 +39,6 @@ def build(
|
|
|
37
39
|
build_configuration_dict = build_configuration.dict()
|
|
38
40
|
build_configuration_dict.update({"type": "dockerfile"})
|
|
39
41
|
build_configuration = DockerFileBuild.parse_obj(build_configuration_dict)
|
|
40
|
-
|
|
41
42
|
builder = get_builder(build_configuration.type)
|
|
42
43
|
return builder(
|
|
43
44
|
build_configuration=build_configuration,
|
|
@@ -4,12 +4,14 @@ from truefoundry.deploy.builder.builders import (
|
|
|
4
4
|
dockerfile,
|
|
5
5
|
tfy_notebook_buildpack,
|
|
6
6
|
tfy_python_buildpack,
|
|
7
|
+
tfy_spark_buildpack,
|
|
7
8
|
)
|
|
8
9
|
|
|
9
10
|
BUILD_REGISTRY: Dict[str, Callable] = {
|
|
10
11
|
"dockerfile": dockerfile.build,
|
|
11
12
|
"tfy-python-buildpack": tfy_python_buildpack.build,
|
|
12
13
|
"tfy-notebook-buildpack": tfy_notebook_buildpack.build,
|
|
14
|
+
"tfy-spark-buildpack": tfy_spark_buildpack.build,
|
|
13
15
|
}
|
|
14
16
|
|
|
15
17
|
__all__ = ["get_builder"]
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
from tempfile import TemporaryDirectory
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
|
|
6
|
+
from truefoundry.deploy._autogen.models import DockerFileBuild, SparkBuildSpec
|
|
7
|
+
from truefoundry.deploy.builder.builders import dockerfile
|
|
8
|
+
from truefoundry.deploy.builder.builders.tfy_spark_buildpack.dockerfile_template import (
|
|
9
|
+
generate_dockerfile_content,
|
|
10
|
+
)
|
|
11
|
+
from truefoundry.deploy.builder.utils import has_python_package_manager_conf_secret
|
|
12
|
+
|
|
13
|
+
__all__ = ["generate_dockerfile_content", "build"]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _convert_to_dockerfile_build_config(
|
|
17
|
+
build_configuration: SparkBuildSpec,
|
|
18
|
+
dockerfile_path: str,
|
|
19
|
+
mount_python_package_manager_conf_secret: bool = False,
|
|
20
|
+
) -> DockerFileBuild:
|
|
21
|
+
dockerfile_content = generate_dockerfile_content(
|
|
22
|
+
build_configuration=build_configuration,
|
|
23
|
+
mount_python_package_manager_conf_secret=mount_python_package_manager_conf_secret,
|
|
24
|
+
)
|
|
25
|
+
with open(dockerfile_path, "w", encoding="utf8") as fp:
|
|
26
|
+
fp.write(dockerfile_content)
|
|
27
|
+
|
|
28
|
+
return DockerFileBuild(
|
|
29
|
+
type="dockerfile",
|
|
30
|
+
dockerfile_path=dockerfile_path,
|
|
31
|
+
build_context_path=build_configuration.build_context_path,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def build(
|
|
36
|
+
tag: str,
|
|
37
|
+
build_configuration: SparkBuildSpec,
|
|
38
|
+
extra_opts: Optional[List[str]] = None,
|
|
39
|
+
):
|
|
40
|
+
if not build_configuration.spark_version:
|
|
41
|
+
raise ValueError(
|
|
42
|
+
"`spark_version` is required for `tfy-spark-buildpack` builder"
|
|
43
|
+
)
|
|
44
|
+
mount_python_package_manager_conf_secret = (
|
|
45
|
+
has_python_package_manager_conf_secret(extra_opts) if extra_opts else False
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
# Copy execute_notebook.py to the build context
|
|
49
|
+
execute_notebook_src = os.path.join(os.path.dirname(__file__), "execute_notebook.py")
|
|
50
|
+
execute_notebook_dst = os.path.join(build_configuration.build_context_path, "execute_notebook.py")
|
|
51
|
+
|
|
52
|
+
# Track if we copied the file to clean it up later
|
|
53
|
+
copied_execute_notebook = False
|
|
54
|
+
if not os.path.exists(execute_notebook_dst):
|
|
55
|
+
shutil.copy2(execute_notebook_src, execute_notebook_dst)
|
|
56
|
+
copied_execute_notebook = True
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
with TemporaryDirectory() as local_dir:
|
|
60
|
+
docker_build_configuration = _convert_to_dockerfile_build_config(
|
|
61
|
+
build_configuration,
|
|
62
|
+
dockerfile_path=os.path.join(local_dir, "Dockerfile"),
|
|
63
|
+
mount_python_package_manager_conf_secret=mount_python_package_manager_conf_secret,
|
|
64
|
+
)
|
|
65
|
+
dockerfile.build(
|
|
66
|
+
tag=tag,
|
|
67
|
+
build_configuration=docker_build_configuration,
|
|
68
|
+
extra_opts=extra_opts,
|
|
69
|
+
)
|
|
70
|
+
finally:
|
|
71
|
+
# Clean up the copied file if we copied it
|
|
72
|
+
if copied_execute_notebook and os.path.exists(execute_notebook_dst):
|
|
73
|
+
try:
|
|
74
|
+
os.remove(execute_notebook_dst)
|
|
75
|
+
except OSError:
|
|
76
|
+
pass # Ignore errors when cleaning up
|
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
import shlex
|
|
2
|
+
from typing import Dict, List, Optional
|
|
3
|
+
|
|
4
|
+
from mako.template import Template
|
|
5
|
+
|
|
6
|
+
from truefoundry.common.constants import ENV_VARS, PythonPackageManager
|
|
7
|
+
from truefoundry.deploy._autogen.models import SparkBuildSpec
|
|
8
|
+
from truefoundry.deploy.builder.constants import (
|
|
9
|
+
PIP_CONF_BUILDKIT_SECRET_MOUNT,
|
|
10
|
+
PIP_CONF_SECRET_MOUNT_AS_ENV,
|
|
11
|
+
UV_CONF_BUILDKIT_SECRET_MOUNT,
|
|
12
|
+
UV_CONF_SECRET_MOUNT_AS_ENV,
|
|
13
|
+
)
|
|
14
|
+
from truefoundry.deploy.v2.lib.patched_models import (
|
|
15
|
+
_resolve_requirements_path,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
# TODO (chiragjn): Switch to a non-root user inside the container
|
|
19
|
+
|
|
20
|
+
DEFAULT_PYTHON_IMAGE_REPO = "apache/spark"
|
|
21
|
+
|
|
22
|
+
_POST_PYTHON_INSTALL_TEMPLATE = """
|
|
23
|
+
% if apt_install_command is not None:
|
|
24
|
+
RUN ${apt_install_command}
|
|
25
|
+
% endif
|
|
26
|
+
% if requirements_path is not None:
|
|
27
|
+
COPY ${requirements_path} ${requirements_destination_path}
|
|
28
|
+
% endif
|
|
29
|
+
% if python_packages_install_command is not None:
|
|
30
|
+
RUN ${package_manager_config_secret_mount} ${python_packages_install_command}
|
|
31
|
+
% endif
|
|
32
|
+
COPY . /app
|
|
33
|
+
WORKDIR /app
|
|
34
|
+
USER spark
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
DOCKERFILE_TEMPLATE = Template(
|
|
38
|
+
"""
|
|
39
|
+
FROM ${spark_image_repo}:${spark_version}
|
|
40
|
+
USER root
|
|
41
|
+
RUN apt update && \
|
|
42
|
+
DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends git && \
|
|
43
|
+
rm -rf /var/lib/apt/lists/*
|
|
44
|
+
COPY execute_notebook.py /app/execute_notebook.py
|
|
45
|
+
"""
|
|
46
|
+
# + _POST_PYTHON_INSTALL_TEMPLATE
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
ADDITIONAL_PIP_PACKAGES = ['papermill']
|
|
50
|
+
|
|
51
|
+
def generate_apt_install_command(apt_packages: Optional[List[str]]) -> Optional[str]:
|
|
52
|
+
packages_list = None
|
|
53
|
+
if apt_packages:
|
|
54
|
+
packages_list = " ".join(p.strip() for p in apt_packages if p.strip())
|
|
55
|
+
if not packages_list:
|
|
56
|
+
return None
|
|
57
|
+
apt_update_command = "apt update"
|
|
58
|
+
apt_install_command = f"DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends {packages_list}"
|
|
59
|
+
clear_apt_lists_command = "rm -rf /var/lib/apt/lists/*"
|
|
60
|
+
return " && ".join(
|
|
61
|
+
[apt_update_command, apt_install_command, clear_apt_lists_command]
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def generate_pip_install_command(
|
|
66
|
+
requirements_path: Optional[str],
|
|
67
|
+
pip_packages: Optional[List[str]],
|
|
68
|
+
mount_pip_conf_secret: bool = False,
|
|
69
|
+
) -> Optional[str]:
|
|
70
|
+
upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
|
|
71
|
+
envs = []
|
|
72
|
+
if mount_pip_conf_secret:
|
|
73
|
+
envs.append(PIP_CONF_SECRET_MOUNT_AS_ENV)
|
|
74
|
+
|
|
75
|
+
command = ["python", "-m", "pip", "install", "--use-pep517", "--no-cache-dir"]
|
|
76
|
+
args = []
|
|
77
|
+
if requirements_path:
|
|
78
|
+
args.append("-r")
|
|
79
|
+
args.append(requirements_path)
|
|
80
|
+
|
|
81
|
+
if pip_packages:
|
|
82
|
+
args.extend(pip_packages)
|
|
83
|
+
|
|
84
|
+
if not args:
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
final_pip_install_command = shlex.join(envs + command + args)
|
|
88
|
+
final_docker_run_command = " && ".join(
|
|
89
|
+
[upgrade_pip_command, final_pip_install_command]
|
|
90
|
+
)
|
|
91
|
+
return final_docker_run_command
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def generate_uv_pip_install_command(
|
|
95
|
+
requirements_path: Optional[str],
|
|
96
|
+
pip_packages: Optional[List[str]],
|
|
97
|
+
mount_uv_conf_secret: bool = False,
|
|
98
|
+
) -> Optional[str]:
|
|
99
|
+
upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
|
|
100
|
+
uv_mount = f"--mount=from={ENV_VARS.TFY_PYTHON_BUILD_UV_IMAGE_URI},source=/uv,target=/usr/local/bin/uv"
|
|
101
|
+
envs = [
|
|
102
|
+
"UV_LINK_MODE=copy",
|
|
103
|
+
"UV_PYTHON_DOWNLOADS=never",
|
|
104
|
+
"UV_INDEX_STRATEGY=unsafe-best-match",
|
|
105
|
+
]
|
|
106
|
+
if mount_uv_conf_secret:
|
|
107
|
+
envs.append(UV_CONF_SECRET_MOUNT_AS_ENV)
|
|
108
|
+
|
|
109
|
+
command = ["uv", "pip", "install", "--no-cache-dir"]
|
|
110
|
+
|
|
111
|
+
args = []
|
|
112
|
+
|
|
113
|
+
if requirements_path:
|
|
114
|
+
args.append("-r")
|
|
115
|
+
args.append(requirements_path)
|
|
116
|
+
|
|
117
|
+
if pip_packages:
|
|
118
|
+
args.extend(pip_packages)
|
|
119
|
+
|
|
120
|
+
if not args:
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
uv_pip_install_command = shlex.join(envs + command + args)
|
|
124
|
+
shell_commands = " && ".join([upgrade_pip_command, uv_pip_install_command])
|
|
125
|
+
final_docker_run_command = " ".join([uv_mount, shell_commands])
|
|
126
|
+
|
|
127
|
+
return final_docker_run_command
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def generate_dockerfile_content(
|
|
131
|
+
build_configuration: SparkBuildSpec,
|
|
132
|
+
package_manager: str = ENV_VARS.TFY_PYTHON_BUILD_PACKAGE_MANAGER,
|
|
133
|
+
mount_python_package_manager_conf_secret: bool = False,
|
|
134
|
+
) -> str:
|
|
135
|
+
# TODO (chiragjn): Handle recursive references to other requirements files e.g. `-r requirements-gpu.txt`
|
|
136
|
+
requirements_path = _resolve_requirements_path(
|
|
137
|
+
build_context_path=build_configuration.build_context_path,
|
|
138
|
+
requirements_path=build_configuration.requirements_path,
|
|
139
|
+
)
|
|
140
|
+
requirements_destination_path = (
|
|
141
|
+
"/tmp/requirements.txt" if requirements_path else None
|
|
142
|
+
)
|
|
143
|
+
if not build_configuration.spark_version:
|
|
144
|
+
raise ValueError(
|
|
145
|
+
"`spark_version` is required for `tfy-spark-buildpack` builder"
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
# Handle pip packages - SparkBuildSpec pip_packages is Optional[List[str]], so we need to handle None
|
|
149
|
+
pip_packages = build_configuration.pip_packages or []
|
|
150
|
+
|
|
151
|
+
if package_manager == PythonPackageManager.PIP.value:
|
|
152
|
+
python_packages_install_command = generate_pip_install_command(
|
|
153
|
+
requirements_path=requirements_destination_path,
|
|
154
|
+
pip_packages=pip_packages + ADDITIONAL_PIP_PACKAGES,
|
|
155
|
+
mount_pip_conf_secret=mount_python_package_manager_conf_secret,
|
|
156
|
+
)
|
|
157
|
+
elif package_manager == PythonPackageManager.UV.value:
|
|
158
|
+
python_packages_install_command = generate_uv_pip_install_command(
|
|
159
|
+
requirements_path=requirements_destination_path,
|
|
160
|
+
pip_packages=pip_packages + ADDITIONAL_PIP_PACKAGES,
|
|
161
|
+
mount_uv_conf_secret=mount_python_package_manager_conf_secret,
|
|
162
|
+
)
|
|
163
|
+
else:
|
|
164
|
+
raise ValueError(f"Unsupported package manager: {package_manager}")
|
|
165
|
+
|
|
166
|
+
apt_install_command = generate_apt_install_command(
|
|
167
|
+
apt_packages=build_configuration.apt_packages
|
|
168
|
+
)
|
|
169
|
+
template_args = {
|
|
170
|
+
"spark_image_repo": ENV_VARS.TFY_SPARK_BUILD_SPARK_IMAGE_REPO,
|
|
171
|
+
"spark_version": build_configuration.spark_version,
|
|
172
|
+
"apt_install_command": apt_install_command,
|
|
173
|
+
"requirements_path": requirements_path,
|
|
174
|
+
"requirements_destination_path": requirements_destination_path,
|
|
175
|
+
"python_packages_install_command": python_packages_install_command,
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
if mount_python_package_manager_conf_secret:
|
|
179
|
+
if package_manager == PythonPackageManager.PIP.value:
|
|
180
|
+
template_args["package_manager_config_secret_mount"] = (
|
|
181
|
+
PIP_CONF_BUILDKIT_SECRET_MOUNT
|
|
182
|
+
)
|
|
183
|
+
elif package_manager == PythonPackageManager.UV.value:
|
|
184
|
+
template_args["package_manager_config_secret_mount"] = (
|
|
185
|
+
UV_CONF_BUILDKIT_SECRET_MOUNT
|
|
186
|
+
)
|
|
187
|
+
else:
|
|
188
|
+
raise ValueError(f"Unsupported package manager: {package_manager}")
|
|
189
|
+
else:
|
|
190
|
+
template_args["package_manager_config_secret_mount"] = ""
|
|
191
|
+
|
|
192
|
+
dockerfile_content = DOCKERFILE_TEMPLATE.render(**template_args)
|
|
193
|
+
return dockerfile_content
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# This script is used as the main application file for spark applications
|
|
2
|
+
# when the application to be run is a notebook, the actual notebook to be
|
|
3
|
+
# executed is passed as an argument to this script.
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
import papermill as pm
|
|
7
|
+
import sys
|
|
8
|
+
|
|
9
|
+
if __name__ == "__main__":
|
|
10
|
+
|
|
11
|
+
if len(sys.argv) != 2:
|
|
12
|
+
print("Usage: python execute_notebook.py <notebook_path>")
|
|
13
|
+
sys.exit(1)
|
|
14
|
+
|
|
15
|
+
notebook_path = sys.argv[1]
|
|
16
|
+
|
|
17
|
+
pm.execute_notebook(
|
|
18
|
+
notebook_path,
|
|
19
|
+
"output.ipynb",
|
|
20
|
+
parameters=dict(),
|
|
21
|
+
)
|
|
@@ -37,13 +37,28 @@ from truefoundry.deploy.lib.model.entity import ApplyResult
|
|
|
37
37
|
show_default=True,
|
|
38
38
|
help="Simulate the process without actually applying the manifest",
|
|
39
39
|
)
|
|
40
|
+
@click.option(
|
|
41
|
+
"--show-diff",
|
|
42
|
+
"--show_diff",
|
|
43
|
+
is_flag=True,
|
|
44
|
+
show_default=True,
|
|
45
|
+
help="Print manifest differences when using --dry-run",
|
|
46
|
+
)
|
|
40
47
|
@handle_exception_wrapper
|
|
41
|
-
def apply_command(
|
|
48
|
+
def apply_command(
|
|
49
|
+
files: Tuple[str, ...], dry_run: bool = False, show_diff: bool = False
|
|
50
|
+
):
|
|
51
|
+
# Validate that show_diff is only used with dry_run
|
|
52
|
+
if show_diff and not dry_run:
|
|
53
|
+
raise click.ClickException("--show-diff requires --dry-run")
|
|
54
|
+
|
|
42
55
|
apply_results: List[ApplyResult] = []
|
|
43
56
|
client = ServiceFoundryServiceClient()
|
|
44
57
|
for file in files:
|
|
45
58
|
with console.status(PROMPT_APPLYING_MANIFEST.format(file), spinner="dots"):
|
|
46
|
-
for apply_result in apply_lib.apply_manifest_file(
|
|
59
|
+
for apply_result in apply_lib.apply_manifest_file(
|
|
60
|
+
file, client, dry_run, show_diff
|
|
61
|
+
):
|
|
47
62
|
if apply_result.success:
|
|
48
63
|
console.print(f"[green]\u2714 {apply_result.message}[/]")
|
|
49
64
|
else:
|
|
@@ -62,6 +62,14 @@ def _get_default_spec_file():
|
|
|
62
62
|
default=False,
|
|
63
63
|
help="Force create a new deployment by canceling any ongoing deployments",
|
|
64
64
|
)
|
|
65
|
+
@click.option(
|
|
66
|
+
"--trigger-on-deploy/--no-trigger-on-deploy",
|
|
67
|
+
"--trigger_on_deploy/--no_trigger_on_deploy",
|
|
68
|
+
is_flag=True,
|
|
69
|
+
show_default=True,
|
|
70
|
+
default=False,
|
|
71
|
+
help="Trigger a Job run after deployment succeeds. Has no effect for non Job type deployments",
|
|
72
|
+
)
|
|
65
73
|
@click.pass_context
|
|
66
74
|
@handle_exception_wrapper
|
|
67
75
|
def deploy_command(
|
|
@@ -70,6 +78,7 @@ def deploy_command(
|
|
|
70
78
|
workspace_fqn: Optional[str],
|
|
71
79
|
wait: bool,
|
|
72
80
|
force: bool = False,
|
|
81
|
+
trigger_on_deploy: bool = False,
|
|
73
82
|
):
|
|
74
83
|
if ctx.invoked_subcommand is not None:
|
|
75
84
|
return
|
|
@@ -86,7 +95,12 @@ def deploy_command(
|
|
|
86
95
|
application_definition = yaml.safe_load(f)
|
|
87
96
|
|
|
88
97
|
application = Application.parse_obj(application_definition)
|
|
89
|
-
application.deploy(
|
|
98
|
+
application.deploy(
|
|
99
|
+
workspace_fqn=workspace_fqn,
|
|
100
|
+
wait=wait,
|
|
101
|
+
force=force,
|
|
102
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
103
|
+
)
|
|
90
104
|
sys.exit(0)
|
|
91
105
|
|
|
92
106
|
click.echo(
|
|
@@ -39,6 +39,7 @@ from truefoundry.deploy.lib.model.entity import (
|
|
|
39
39
|
JobRun,
|
|
40
40
|
LogBody,
|
|
41
41
|
SocketEvent,
|
|
42
|
+
TFYApplyResponse,
|
|
42
43
|
TriggerJobResult,
|
|
43
44
|
Workspace,
|
|
44
45
|
WorkspaceResources,
|
|
@@ -222,12 +223,14 @@ class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
|
|
|
222
223
|
workspace_id: str,
|
|
223
224
|
application: autogen_models.Workflow,
|
|
224
225
|
force: bool = False,
|
|
226
|
+
trigger_on_deploy: bool = False,
|
|
225
227
|
) -> Deployment:
|
|
226
228
|
data = {
|
|
227
229
|
"workspaceId": workspace_id,
|
|
228
230
|
"name": application.name,
|
|
229
231
|
"manifest": application.dict(exclude_none=True),
|
|
230
232
|
"forceDeploy": force,
|
|
233
|
+
"triggerOnDeploy": trigger_on_deploy,
|
|
231
234
|
}
|
|
232
235
|
logger.debug(json.dumps(data))
|
|
233
236
|
url = f"{self._api_server_url}/{VERSION_PREFIX}/deployment"
|
|
@@ -680,14 +683,16 @@ class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
|
|
|
680
683
|
return parse_obj_as(List[Deployment], response)
|
|
681
684
|
|
|
682
685
|
@check_min_cli_version
|
|
683
|
-
def apply(
|
|
686
|
+
def apply(
|
|
687
|
+
self, manifest: Dict[str, Any], dry_run: bool = False
|
|
688
|
+
) -> TFYApplyResponse:
|
|
684
689
|
url = f"{self._api_server_url}/{VERSION_PREFIX}/apply"
|
|
685
690
|
body = {"manifest": manifest, "dryRun": dry_run}
|
|
686
691
|
response = session_with_retries().put(
|
|
687
692
|
url, headers=self._get_headers(), json=body
|
|
688
693
|
)
|
|
689
|
-
response_data = cast(Dict[str, Any], request_handling(response))
|
|
690
|
-
return response_data
|
|
694
|
+
response_data = cast(Dict[str, Any], request_handling(response) or {})
|
|
695
|
+
return TFYApplyResponse.parse_obj(response_data)
|
|
691
696
|
|
|
692
697
|
@check_min_cli_version
|
|
693
698
|
def delete(self, manifest: Dict[str, Any]) -> Dict[str, Any]:
|
|
@@ -3,9 +3,11 @@ from typing import Any, Dict, Iterator, Optional
|
|
|
3
3
|
|
|
4
4
|
import yaml
|
|
5
5
|
|
|
6
|
+
from truefoundry.cli.console import console
|
|
6
7
|
from truefoundry.deploy.lib.clients.servicefoundry_client import (
|
|
7
8
|
ServiceFoundryServiceClient,
|
|
8
9
|
)
|
|
10
|
+
from truefoundry.deploy.lib.diff_utils import print_manifest_diff
|
|
9
11
|
from truefoundry.deploy.lib.model.entity import ApplyResult, ManifestLike
|
|
10
12
|
from truefoundry.pydantic_v1 import ValidationError
|
|
11
13
|
|
|
@@ -16,6 +18,7 @@ def _apply_manifest(
|
|
|
16
18
|
filename: Optional[str] = None,
|
|
17
19
|
index: Optional[int] = None,
|
|
18
20
|
dry_run: bool = False,
|
|
21
|
+
show_diff: bool = False,
|
|
19
22
|
) -> ApplyResult:
|
|
20
23
|
client = client or ServiceFoundryServiceClient()
|
|
21
24
|
|
|
@@ -26,7 +29,7 @@ def _apply_manifest(
|
|
|
26
29
|
file_metadata += f" from file {filename}"
|
|
27
30
|
|
|
28
31
|
try:
|
|
29
|
-
|
|
32
|
+
parsed_manifest = ManifestLike.parse_obj(manifest)
|
|
30
33
|
except ValidationError as ex:
|
|
31
34
|
return ApplyResult(
|
|
32
35
|
success=False,
|
|
@@ -35,20 +38,30 @@ def _apply_manifest(
|
|
|
35
38
|
|
|
36
39
|
prefix = "[Dry Run] " if dry_run else ""
|
|
37
40
|
suffix = " (No changes were applied)" if dry_run else ""
|
|
41
|
+
|
|
38
42
|
try:
|
|
39
|
-
client.apply(
|
|
43
|
+
api_response = client.apply(parsed_manifest.dict(), dry_run)
|
|
44
|
+
|
|
45
|
+
# Show diff for dry runs only when show_diff is enabled
|
|
46
|
+
if dry_run and show_diff and api_response.existing_manifest:
|
|
47
|
+
print_manifest_diff(
|
|
48
|
+
existing_manifest=api_response.existing_manifest,
|
|
49
|
+
new_manifest=parsed_manifest.dict(),
|
|
50
|
+
manifest_name=f"{parsed_manifest.name} ({parsed_manifest.type})",
|
|
51
|
+
console=console,
|
|
52
|
+
)
|
|
40
53
|
|
|
41
54
|
return ApplyResult(
|
|
42
55
|
success=True,
|
|
43
56
|
message=(
|
|
44
|
-
f"{prefix}Successfully configured manifest {
|
|
57
|
+
f"{prefix}Successfully configured manifest {parsed_manifest.name} of type {parsed_manifest.type}.{suffix}"
|
|
45
58
|
),
|
|
46
59
|
)
|
|
47
60
|
except Exception as ex:
|
|
48
61
|
return ApplyResult(
|
|
49
62
|
success=False,
|
|
50
63
|
message=(
|
|
51
|
-
f"{prefix}Failed to apply manifest {
|
|
64
|
+
f"{prefix}Failed to apply manifest {parsed_manifest.name} of type {parsed_manifest.type}. Error: {ex}.{suffix}"
|
|
52
65
|
),
|
|
53
66
|
)
|
|
54
67
|
|
|
@@ -57,14 +70,18 @@ def apply_manifest(
|
|
|
57
70
|
manifest: Dict[str, Any],
|
|
58
71
|
client: Optional[ServiceFoundryServiceClient] = None,
|
|
59
72
|
dry_run: bool = False,
|
|
73
|
+
show_diff: bool = False,
|
|
60
74
|
) -> ApplyResult:
|
|
61
|
-
return _apply_manifest(
|
|
75
|
+
return _apply_manifest(
|
|
76
|
+
manifest=manifest, client=client, dry_run=dry_run, show_diff=show_diff
|
|
77
|
+
)
|
|
62
78
|
|
|
63
79
|
|
|
64
80
|
def apply_manifest_file(
|
|
65
81
|
filepath: str,
|
|
66
82
|
client: Optional[ServiceFoundryServiceClient] = None,
|
|
67
83
|
dry_run: bool = False,
|
|
84
|
+
show_diff: bool = False,
|
|
68
85
|
) -> Iterator[ApplyResult]:
|
|
69
86
|
client = client or ServiceFoundryServiceClient()
|
|
70
87
|
filename = Path(filepath).name
|
|
@@ -92,4 +109,5 @@ def apply_manifest_file(
|
|
|
92
109
|
filename=filename,
|
|
93
110
|
index=index,
|
|
94
111
|
dry_run=dry_run,
|
|
112
|
+
show_diff=show_diff,
|
|
95
113
|
)
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import difflib
|
|
2
|
+
from typing import Any, Dict, Optional
|
|
3
|
+
|
|
4
|
+
import yaml
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
from rich.markdown import Markdown
|
|
7
|
+
from rich.panel import Panel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def format_manifest_for_diff(manifest: Dict[str, Any]) -> str:
|
|
11
|
+
"""
|
|
12
|
+
Format a manifest for diffing with consistent formatting.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
manifest: The manifest dictionary to format
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
A consistently formatted YAML string suitable for diffing
|
|
19
|
+
"""
|
|
20
|
+
return yaml.dump(manifest, sort_keys=True, indent=2)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def generate_manifest_diff(
|
|
24
|
+
existing_manifest: Dict[str, Any],
|
|
25
|
+
new_manifest: Dict[str, Any],
|
|
26
|
+
manifest_name: str = "manifest",
|
|
27
|
+
) -> Optional[str]:
|
|
28
|
+
"""
|
|
29
|
+
Generate a unified diff between existing and new manifests.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
existing_manifest: The existing manifest of the resource
|
|
33
|
+
new_manifest: The new manifest being applied
|
|
34
|
+
manifest_name: Name of the manifest for diff headers
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Unified diff string if there are differences, None if no differences
|
|
38
|
+
"""
|
|
39
|
+
# Format both manifests consistently
|
|
40
|
+
existing_formatted = format_manifest_for_diff(existing_manifest)
|
|
41
|
+
new_formatted = format_manifest_for_diff(new_manifest)
|
|
42
|
+
|
|
43
|
+
# Generate diff
|
|
44
|
+
existing_lines = existing_formatted.splitlines(keepends=True)
|
|
45
|
+
new_lines = new_formatted.splitlines(keepends=True)
|
|
46
|
+
diff_lines = list(
|
|
47
|
+
difflib.unified_diff(
|
|
48
|
+
existing_lines,
|
|
49
|
+
new_lines,
|
|
50
|
+
fromfile=f"existing/{manifest_name}",
|
|
51
|
+
tofile=f"new/{manifest_name}",
|
|
52
|
+
)
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
if not diff_lines:
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
return "".join(diff_lines)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def print_manifest_diff(
|
|
62
|
+
existing_manifest: Dict[str, Any],
|
|
63
|
+
new_manifest: Dict[str, Any],
|
|
64
|
+
manifest_name: str = "manifest",
|
|
65
|
+
console: Optional[Console] = None,
|
|
66
|
+
) -> bool:
|
|
67
|
+
"""
|
|
68
|
+
Generate and print a colored diff between manifests.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
existing_manifest: The existing manifest of the resource
|
|
72
|
+
new_manifest: The new manifest being applied
|
|
73
|
+
manifest_name: Name of the manifest for diff headers
|
|
74
|
+
console: Optional Rich console instance to use for printing
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
True if diff was printed, False if no diff
|
|
78
|
+
"""
|
|
79
|
+
if console is None:
|
|
80
|
+
console = Console()
|
|
81
|
+
|
|
82
|
+
diff_text = generate_manifest_diff(existing_manifest, new_manifest, manifest_name)
|
|
83
|
+
|
|
84
|
+
if diff_text is None:
|
|
85
|
+
console.print(f"[green]No changes detected for {manifest_name}[/]")
|
|
86
|
+
return False
|
|
87
|
+
|
|
88
|
+
console.print(
|
|
89
|
+
Panel(Markdown(f"```diff\n{diff_text}\n```"), title=f"Diff for {manifest_name}")
|
|
90
|
+
)
|
|
91
|
+
console.print()
|
|
92
|
+
return True
|
|
@@ -280,6 +280,10 @@ class CreateDockerRepositoryResponse(Base):
|
|
|
280
280
|
repoName: str
|
|
281
281
|
|
|
282
282
|
|
|
283
|
+
class TFYApplyResponse(BaseModel):
|
|
284
|
+
existing_manifest: Optional[Dict[str, Any]] = Field(None, alias="existingManifest")
|
|
285
|
+
|
|
286
|
+
|
|
283
287
|
class ApplyResult(BaseModel):
|
|
284
288
|
success: bool
|
|
285
289
|
message: str
|
|
@@ -262,6 +262,7 @@ def deploy_component(
|
|
|
262
262
|
workspace_fqn: Optional[str] = None,
|
|
263
263
|
wait: bool = True,
|
|
264
264
|
force: bool = False,
|
|
265
|
+
trigger_on_deploy: bool = False,
|
|
265
266
|
) -> Deployment:
|
|
266
267
|
workspace_fqn = _resolve_workspace_fqn(
|
|
267
268
|
component=component, workspace_fqn=workspace_fqn
|
|
@@ -284,6 +285,7 @@ def deploy_component(
|
|
|
284
285
|
workspace_id=workspace_id,
|
|
285
286
|
application=updated_component,
|
|
286
287
|
force=force,
|
|
288
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
287
289
|
)
|
|
288
290
|
logger.info(
|
|
289
291
|
"🚀 Deployment started for application '%s'. Deployment FQN is '%s'.",
|
|
@@ -1,10 +1,49 @@
|
|
|
1
|
-
|
|
1
|
+
import warnings
|
|
2
|
+
from typing import Any, Literal, Union
|
|
2
3
|
|
|
4
|
+
from truefoundry.common.warnings import TrueFoundryDeprecationWarning
|
|
3
5
|
from truefoundry.deploy._autogen import models
|
|
4
6
|
from truefoundry.deploy.lib.model.entity import Deployment
|
|
5
7
|
from truefoundry.deploy.v2.lib.deploy import deploy_component
|
|
6
8
|
from truefoundry.deploy.v2.lib.patched_models import LocalSource
|
|
7
|
-
from truefoundry.pydantic_v1 import BaseModel, Field, conint
|
|
9
|
+
from truefoundry.pydantic_v1 import BaseModel, Field, conint, root_validator, validator
|
|
10
|
+
|
|
11
|
+
_TRIGGER_ON_DEPLOY_DEPRECATION_MESSAGE = """
|
|
12
|
+
Setting `trigger_on_deploy` in manifest has been deprecated and the field will be removed in future releases.
|
|
13
|
+
|
|
14
|
+
Please remove it from the spec and instead use
|
|
15
|
+
|
|
16
|
+
`trigger_on_deploy` argument on `.deploy`
|
|
17
|
+
|
|
18
|
+
E.g.
|
|
19
|
+
|
|
20
|
+
```
|
|
21
|
+
job = Job(...) # remove `trigger_on_deploy` from initialization
|
|
22
|
+
job.deploy(..., trigger_on_deploy={arg_value})
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
OR
|
|
26
|
+
|
|
27
|
+
`{flag}` option on `tfy deploy`
|
|
28
|
+
|
|
29
|
+
E.g.
|
|
30
|
+
|
|
31
|
+
```
|
|
32
|
+
tfy deploy -f truefoundry.yaml {flag}
|
|
33
|
+
```
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _warn_if_trigger_on_deploy_used(_klass, v: Any) -> Any:
|
|
38
|
+
if v is not None:
|
|
39
|
+
# v is the value of trigger_on_deploy, which is also the arg_value for the message
|
|
40
|
+
flag = "--trigger-on-deploy" if v else "--no-trigger-on-deploy"
|
|
41
|
+
warnings.warn(
|
|
42
|
+
_TRIGGER_ON_DEPLOY_DEPRECATION_MESSAGE.format(arg_value=v, flag=flag),
|
|
43
|
+
TrueFoundryDeprecationWarning,
|
|
44
|
+
stacklevel=2,
|
|
45
|
+
)
|
|
46
|
+
return v
|
|
8
47
|
|
|
9
48
|
|
|
10
49
|
class DeployablePatchedModelBase(BaseModel):
|
|
@@ -12,13 +51,18 @@ class DeployablePatchedModelBase(BaseModel):
|
|
|
12
51
|
extra = "forbid"
|
|
13
52
|
|
|
14
53
|
def deploy(
|
|
15
|
-
self,
|
|
54
|
+
self,
|
|
55
|
+
workspace_fqn: str,
|
|
56
|
+
wait: bool = True,
|
|
57
|
+
force: bool = False,
|
|
58
|
+
trigger_on_deploy: bool = False,
|
|
16
59
|
) -> Deployment:
|
|
17
60
|
return deploy_component(
|
|
18
61
|
component=self,
|
|
19
62
|
workspace_fqn=workspace_fqn,
|
|
20
63
|
wait=wait,
|
|
21
64
|
force=force,
|
|
65
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
22
66
|
)
|
|
23
67
|
|
|
24
68
|
|
|
@@ -36,6 +80,10 @@ class Job(models.Job, DeployablePatchedModelBase):
|
|
|
36
80
|
type: Literal["job"] = "job"
|
|
37
81
|
resources: models.Resources = Field(default_factory=models.Resources)
|
|
38
82
|
|
|
83
|
+
@validator("trigger_on_deploy")
|
|
84
|
+
def _warn_if_trigger_on_deploy_used(cls, v: Any) -> Any:
|
|
85
|
+
return _warn_if_trigger_on_deploy_used(cls, v)
|
|
86
|
+
|
|
39
87
|
|
|
40
88
|
class SparkJob(models.SparkJob, DeployablePatchedModelBase):
|
|
41
89
|
type: Literal["spark-job"] = "spark-job"
|
|
@@ -118,8 +166,24 @@ class Application(models.Application, DeployablePatchedModelBase):
|
|
|
118
166
|
models.SparkJob,
|
|
119
167
|
] = Field(..., description="", discriminator="type")
|
|
120
168
|
|
|
169
|
+
@root_validator(pre=True)
|
|
170
|
+
def _validate_spec(cls, values: Any) -> Any:
|
|
171
|
+
if isinstance(values, dict) and "__root__" in values:
|
|
172
|
+
root = values["__root__"]
|
|
173
|
+
if (
|
|
174
|
+
isinstance(root, dict)
|
|
175
|
+
and root.get("type") == "job"
|
|
176
|
+
and root.get("trigger_on_deploy") is not None
|
|
177
|
+
):
|
|
178
|
+
_warn_if_trigger_on_deploy_used(cls, root.get("trigger_on_deploy"))
|
|
179
|
+
return values
|
|
180
|
+
|
|
121
181
|
def deploy(
|
|
122
|
-
self,
|
|
182
|
+
self,
|
|
183
|
+
workspace_fqn: str,
|
|
184
|
+
wait: bool = True,
|
|
185
|
+
force: bool = False,
|
|
186
|
+
trigger_on_deploy: bool = False,
|
|
123
187
|
) -> Deployment:
|
|
124
188
|
if isinstance(self.__root__, models.Workflow):
|
|
125
189
|
from truefoundry.deploy.v2.lib.deploy_workflow import deploy_workflow
|
|
@@ -136,4 +200,5 @@ class Application(models.Application, DeployablePatchedModelBase):
|
|
|
136
200
|
workspace_fqn=workspace_fqn,
|
|
137
201
|
wait=wait,
|
|
138
202
|
force=force,
|
|
203
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
139
204
|
)
|
|
@@ -40,7 +40,7 @@ truefoundry/cli/display_util.py,sha256=9vzN3mbQqU6OhS7qRUiMRana4PTHa4sDTA0Hn7OVj
|
|
|
40
40
|
truefoundry/cli/util.py,sha256=pezUfF2GC6ru7s8VeH2a7uvXTU0xN9ka7yLXkIgC3dY,4998
|
|
41
41
|
truefoundry/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
42
|
truefoundry/common/auth_service_client.py,sha256=N3YxKlx63r6cPZqbgb2lqBOPI69ShB7D7RCIq4FSCjc,7949
|
|
43
|
-
truefoundry/common/constants.py,sha256=
|
|
43
|
+
truefoundry/common/constants.py,sha256=3y3JuEJNQa7dFW1-yY-dgjB_tBoa_GwUNQxAsRLWiZ0,4547
|
|
44
44
|
truefoundry/common/credential_file_manager.py,sha256=1yEk1Zm2xS4G0VDFwKSZ4w0VUrcPWQ1nJnoBaz9xyKA,4251
|
|
45
45
|
truefoundry/common/credential_provider.py,sha256=_OhJ2XFlDaVsrUO-FyywxctcGGqDdC2pgcvwEKqQD0Q,4071
|
|
46
46
|
truefoundry/common/entities.py,sha256=b4R6ss06-ygDS3C4Tqa_GOq5LFKDYbt7x4Mghnfz6yo,4007
|
|
@@ -54,23 +54,26 @@ truefoundry/common/utils.py,sha256=j3QP0uOsaGD_VmDDR68JTwoYE1okkAq6OqpVkzVf48Q,6
|
|
|
54
54
|
truefoundry/common/warnings.py,sha256=rs6BHwk7imQYedo07iwh3TWEOywAR3Lqhj0AY4khByg,504
|
|
55
55
|
truefoundry/deploy/__init__.py,sha256=6D22iiCgd5xlzBaG34q9Cx4rGgwf5qIAKQrOCgaCXYY,2746
|
|
56
56
|
truefoundry/deploy/python_deploy_codegen.py,sha256=AainOFR20XvhNeztJkLPWGZ40lAT_nwc-ZmG77Kum4o,6525
|
|
57
|
-
truefoundry/deploy/_autogen/models.py,sha256=
|
|
58
|
-
truefoundry/deploy/builder/__init__.py,sha256=
|
|
57
|
+
truefoundry/deploy/_autogen/models.py,sha256=mcd83zm8--repV2RFRsDNpmR9MgU7hW7VOpM0EDhQko,73878
|
|
58
|
+
truefoundry/deploy/builder/__init__.py,sha256=jPSUJ75X_V-XIvLkG2Nl_wXTdhzy80LcbhMRlD3f5nE,4996
|
|
59
59
|
truefoundry/deploy/builder/constants.py,sha256=amUkHoHvVKzGv0v_knfiioRuKiJM0V0xW0diERgWiI0,508
|
|
60
60
|
truefoundry/deploy/builder/docker_service.py,sha256=sm7GWeIqyrKaZpxskdLejZlsxcZnM3BTDJr6orvPN4E,3948
|
|
61
61
|
truefoundry/deploy/builder/utils.py,sha256=D68-bqM0NQx-Elg-56mtkENyVyg9faZ9tgTmBuo1Sjs,1076
|
|
62
|
-
truefoundry/deploy/builder/builders/__init__.py,sha256=
|
|
62
|
+
truefoundry/deploy/builder/builders/__init__.py,sha256=aomhWdR5L7uSM-GUalw9SnFHD2FQ_n-yFe4NH6nyNxw,715
|
|
63
63
|
truefoundry/deploy/builder/builders/dockerfile.py,sha256=XMbMlPUTMPCyaHl7jJQY1ODtlRkpI61PcvgG6Ck5jNc,1522
|
|
64
64
|
truefoundry/deploy/builder/builders/tfy_notebook_buildpack/__init__.py,sha256=RGWGqY8xOF7vycUPJd10N7ZzahWv24lO0anrOPtLuDU,1796
|
|
65
65
|
truefoundry/deploy/builder/builders/tfy_notebook_buildpack/dockerfile_template.py,sha256=rQgdvKmAT9HArVW4TAG5yd2QTKRs3S5LJ9RQbc_EkHE,2518
|
|
66
66
|
truefoundry/deploy/builder/builders/tfy_python_buildpack/__init__.py,sha256=_fjqHKn80qKi68SAMMALge7_A6e1sTsQWichw8uoGIw,2025
|
|
67
67
|
truefoundry/deploy/builder/builders/tfy_python_buildpack/dockerfile_template.py,sha256=f4l3fH21E2b8W3-JotMKc0AdPcCxV7LRPxxYJa7z_UQ,9134
|
|
68
|
+
truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py,sha256=jpDJIN0flNElgoe7_AtMkT2SFrp0Y-B3LZ8grOvTmYU,2890
|
|
69
|
+
truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=yOgDP0xB8LDidrEFrVRDawGsbm7tuI_IEFEdNWSEaR8,6810
|
|
70
|
+
truefoundry/deploy/builder/builders/tfy_spark_buildpack/execute_notebook.py,sha256=9NEkWDcjczpLwuumYP34lBQ60RwX9ri5b-UE-kgUzD0,517
|
|
68
71
|
truefoundry/deploy/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
69
72
|
truefoundry/deploy/cli/commands/__init__.py,sha256=qv818jxqSAygJ3h-6Ul8t-5VOgR_UrSgsVtNCl3e5G0,1408
|
|
70
|
-
truefoundry/deploy/cli/commands/apply_command.py,sha256=
|
|
73
|
+
truefoundry/deploy/cli/commands/apply_command.py,sha256=DmXmKVokkauyKIiJDtErTwbJ5_LvQeJbTQsG5BjyKpo,2427
|
|
71
74
|
truefoundry/deploy/cli/commands/build_command.py,sha256=zJBywMatbpUlXx5O2aqpEVmPeBIJ9RNnG9abSc8C8CE,1234
|
|
72
75
|
truefoundry/deploy/cli/commands/delete_command.py,sha256=i_lr_MocTEPKF2VwLe8B7oZWsgXK06EX_43_xdM5DIs,3875
|
|
73
|
-
truefoundry/deploy/cli/commands/deploy_command.py,sha256=
|
|
76
|
+
truefoundry/deploy/cli/commands/deploy_command.py,sha256=fN6yVXdSGD8xWyAj6KcwayCjA_sV5aKCpxLuNCrUl8U,4681
|
|
74
77
|
truefoundry/deploy/cli/commands/deploy_init_command.py,sha256=g-jBfrEmhZ0TDWsyqPDn4K6q33EqJSGmBTt1eMYig-w,600
|
|
75
78
|
truefoundry/deploy/cli/commands/get_command.py,sha256=bR8tAjQQhimzaTQ57L6BPJwcxQ_SGWCF5CqHDpxgG90,837
|
|
76
79
|
truefoundry/deploy/cli/commands/k8s_exec_credential_command.py,sha256=EknpdufMAEnjSGMG7a-Jj7tkoiS5zmbJRREafb14Alw,2160
|
|
@@ -92,26 +95,27 @@ truefoundry/deploy/io/output_callback.py,sha256=_q79-dpFxnU762VPM9Ryy2gnuJnIotZ2
|
|
|
92
95
|
truefoundry/deploy/io/rich_output_callback.py,sha256=m99RodkILXCgy_LJujEcojbpW1tL0H5Fjb0lqe6X_PQ,958
|
|
93
96
|
truefoundry/deploy/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
94
97
|
truefoundry/deploy/lib/const.py,sha256=Wg0GDnfFu-g1fJr4lU80NH2ULw0R0dYjV7LnW-PbOeM,173
|
|
98
|
+
truefoundry/deploy/lib/diff_utils.py,sha256=SszFgzBQ7GPwpgJKWjLDT-IQ_AcMB9mW8OP27Uj37HE,2627
|
|
95
99
|
truefoundry/deploy/lib/logs_utils.py,sha256=SQxRv3jDDmgHdOUMhlMaAPGYskybnBUMpst7QU_i_sc,1469
|
|
96
100
|
truefoundry/deploy/lib/messages.py,sha256=8424kj3kqCyDCX5Nr2WJZZ_UEutPoaSs_y2f9-O4yy8,1001
|
|
97
101
|
truefoundry/deploy/lib/session.py,sha256=fLdgR6ZDp8-hFl5NTON4ngnWLsMzGxvKtfpDOOw_7lo,4963
|
|
98
102
|
truefoundry/deploy/lib/util.py,sha256=J7r8San2wKo48A7-BlH2-OKTlBO67zlPjLEhMsL8os0,1059
|
|
99
103
|
truefoundry/deploy/lib/win32.py,sha256=1RcvPTdlOAJ48rt8rCbE2Ufha2ztRqBAE9dueNXArrY,5009
|
|
100
104
|
truefoundry/deploy/lib/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
101
|
-
truefoundry/deploy/lib/clients/servicefoundry_client.py,sha256=
|
|
105
|
+
truefoundry/deploy/lib/clients/servicefoundry_client.py,sha256=nSaaD91ONpDkRyOWHGv2VerPzdtn-Z3UF0iloj00VVU,27200
|
|
102
106
|
truefoundry/deploy/lib/dao/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
103
107
|
truefoundry/deploy/lib/dao/application.py,sha256=oMszpueXPUfTUuN_XdKwoRjQyqAgWHhZ-10cbprCVdM,9226
|
|
104
|
-
truefoundry/deploy/lib/dao/apply.py,sha256=
|
|
108
|
+
truefoundry/deploy/lib/dao/apply.py,sha256=F7I8yp-IZir_6CL9NPE_KFD9rgicVJn2vcIv1a3MpuA,3771
|
|
105
109
|
truefoundry/deploy/lib/dao/delete.py,sha256=uPL2psqWNw2O0oDikXJOlVxmG8n5d3Z0Ia9qZwqCn_8,2735
|
|
106
110
|
truefoundry/deploy/lib/dao/version.py,sha256=AtdW_4O1DPUKdfv2qy6iUJsZ_95vM6z0AqeEy3WDKs8,1130
|
|
107
111
|
truefoundry/deploy/lib/dao/workspace.py,sha256=6YvfCgWDzAULI3Q6JswyZmP1CwJ5rM-ANsIFkbQia0Q,2349
|
|
108
112
|
truefoundry/deploy/lib/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
109
|
-
truefoundry/deploy/lib/model/entity.py,sha256=
|
|
113
|
+
truefoundry/deploy/lib/model/entity.py,sha256=eBfA4trO0jUuDy0wifiu2rB_HryZrx5Kf-tRMwIQ_9g,8716
|
|
110
114
|
truefoundry/deploy/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
111
115
|
truefoundry/deploy/v2/lib/__init__.py,sha256=WEiVMZXOVljzEE3tpGJil14liIn_PCDoACJ6b3tZ6sI,188
|
|
112
|
-
truefoundry/deploy/v2/lib/deploy.py,sha256=
|
|
116
|
+
truefoundry/deploy/v2/lib/deploy.py,sha256=HfSUdAS3gSpFAFtV0Mq9LscfpkaXqA2LHW4VXqk9Y0g,12707
|
|
113
117
|
truefoundry/deploy/v2/lib/deploy_workflow.py,sha256=G5BzMIbap8pgDX1eY-TITruUxQdkKhYtBmRwLL6lDeY,14342
|
|
114
|
-
truefoundry/deploy/v2/lib/deployable_patched_models.py,sha256=
|
|
118
|
+
truefoundry/deploy/v2/lib/deployable_patched_models.py,sha256=mUi-OjPf7bc8rzfrPLdFb79LKuDq7F36RxL4V-AXebs,6830
|
|
115
119
|
truefoundry/deploy/v2/lib/models.py,sha256=ogc1UYs1Z2nBdGSKCrde9sk8d0GxFKMkem99uqO5CmM,1148
|
|
116
120
|
truefoundry/deploy/v2/lib/patched_models.py,sha256=8ib9Y7b4-DoEml2zCv3V7QIqh4tLJUjzPj1AWomwvag,14775
|
|
117
121
|
truefoundry/deploy/v2/lib/source.py,sha256=d6-8_6Zn5koBglqrBrY6ZLG_7yyPuLdyEmK4iZTw6xY,9405
|
|
@@ -377,7 +381,7 @@ truefoundry/workflow/remote_filesystem/__init__.py,sha256=LQ95ViEjJ7Ts4JcCGOxMPs
|
|
|
377
381
|
truefoundry/workflow/remote_filesystem/logger.py,sha256=em2l7D6sw7xTLDP0kQSLpgfRRCLpN14Qw85TN7ujQcE,1022
|
|
378
382
|
truefoundry/workflow/remote_filesystem/tfy_signed_url_client.py,sha256=xcT0wQmQlgzcj0nP3tJopyFSVWT1uv3nhiTIuwfXYeg,12342
|
|
379
383
|
truefoundry/workflow/remote_filesystem/tfy_signed_url_fs.py,sha256=nSGPZu0Gyd_jz0KsEE-7w_BmnTD8CVF1S8cUJoxaCbc,13305
|
|
380
|
-
truefoundry-0.9.
|
|
381
|
-
truefoundry-0.9.
|
|
382
|
-
truefoundry-0.9.
|
|
383
|
-
truefoundry-0.9.
|
|
384
|
+
truefoundry-0.9.4.dist-info/METADATA,sha256=rNFh4XOOz7_58TIg68rG5jxyJkVOI4tPR0sEJYqIAfs,2504
|
|
385
|
+
truefoundry-0.9.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
386
|
+
truefoundry-0.9.4.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
|
|
387
|
+
truefoundry-0.9.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|