truefoundry 0.9.2rc1__py3-none-any.whl → 0.9.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of truefoundry might be problematic. Click here for more details.
- truefoundry/common/constants.py +3 -0
- truefoundry/deploy/__init__.py +3 -0
- truefoundry/deploy/_autogen/models.py +54 -18
- truefoundry/deploy/builder/__init__.py +2 -1
- truefoundry/deploy/builder/builders/__init__.py +2 -0
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py +79 -0
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py +172 -0
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/tfy_execute_notebook.py +56 -0
- truefoundry/deploy/cli/commands/apply_command.py +17 -2
- truefoundry/deploy/cli/commands/deploy_command.py +15 -1
- truefoundry/deploy/lib/clients/servicefoundry_client.py +8 -3
- truefoundry/deploy/lib/dao/apply.py +23 -5
- truefoundry/deploy/lib/diff_utils.py +92 -0
- truefoundry/deploy/lib/model/entity.py +4 -0
- truefoundry/deploy/v2/lib/deploy.py +2 -0
- truefoundry/deploy/v2/lib/deployable_patched_models.py +69 -4
- truefoundry/deploy/v2/lib/patched_models.py +20 -0
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.3.dist-info}/METADATA +1 -1
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.3.dist-info}/RECORD +21 -17
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.3.dist-info}/WHEEL +0 -0
- {truefoundry-0.9.2rc1.dist-info → truefoundry-0.9.3.dist-info}/entry_points.txt +0 -0
truefoundry/common/constants.py
CHANGED
|
@@ -65,6 +65,9 @@ class TrueFoundrySdkEnv(BaseSettings):
|
|
|
65
65
|
# Fo customizing the python image used for building via PythonBuild
|
|
66
66
|
TFY_PYTHONBUILD_PYTHON_IMAGE_REPO: str = "public.ecr.aws/docker/library/python"
|
|
67
67
|
|
|
68
|
+
# TODO(gw): Use another image with more linient rate limits
|
|
69
|
+
TFY_SPARK_BUILD_SPARK_IMAGE_REPO: str = "public.ecr.aws/bitnami/spark"
|
|
70
|
+
|
|
68
71
|
# For local development, this enables futher configuration via _TFYServersConfig
|
|
69
72
|
TFY_CLI_LOCAL_DEV_MODE: bool = False
|
|
70
73
|
|
truefoundry/deploy/__init__.py
CHANGED
|
@@ -113,6 +113,9 @@ from truefoundry.deploy.v2.lib.patched_models import (
|
|
|
113
113
|
ServiceAutoscaling,
|
|
114
114
|
SlackBot,
|
|
115
115
|
SlackWebhook,
|
|
116
|
+
SparkBuild,
|
|
117
|
+
SparkImage,
|
|
118
|
+
SparkImageBuild,
|
|
116
119
|
SparkJobJavaEntrypoint,
|
|
117
120
|
SparkJobPythonEntrypoint,
|
|
118
121
|
SparkJobScalaEntrypoint,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# generated by datamodel-codegen:
|
|
2
2
|
# filename: application.json
|
|
3
|
-
# timestamp: 2025-
|
|
3
|
+
# timestamp: 2025-06-09T12:01:27+00:00
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
@@ -104,8 +104,8 @@ class AsyncProcessorSidecar(BaseModel):
|
|
|
104
104
|
|
|
105
105
|
class Autoshutdown(BaseModel):
|
|
106
106
|
wait_time: conint(ge=0) = Field(
|
|
107
|
-
|
|
108
|
-
description="The period to wait after the last received request before scaling the replicas to 0",
|
|
107
|
+
900,
|
|
108
|
+
description="The period to wait after the last received request before scaling the replicas to 0. This value should be high enough to allow for the replicas of the service to come up to avoid premature scaling down.",
|
|
109
109
|
)
|
|
110
110
|
|
|
111
111
|
|
|
@@ -285,8 +285,8 @@ class GitSource(BaseModel):
|
|
|
285
285
|
repo_url: constr(regex=r"^(https?://)\S+$") = Field(
|
|
286
286
|
..., description="The repository URL."
|
|
287
287
|
)
|
|
288
|
-
ref:
|
|
289
|
-
branch_name: Optional[
|
|
288
|
+
ref: constr(regex=r"^\S+$") = Field(..., description="The commit SHA.")
|
|
289
|
+
branch_name: Optional[constr(regex=r"^\S+$")] = Field(
|
|
290
290
|
None,
|
|
291
291
|
description="Selecting branch will select latest commit SHA of the branch.",
|
|
292
292
|
)
|
|
@@ -346,11 +346,11 @@ class Image(BaseModel):
|
|
|
346
346
|
"""
|
|
347
347
|
|
|
348
348
|
type: Literal["image"] = Field(..., description="")
|
|
349
|
-
image_uri: constr(regex=r"^\S
|
|
349
|
+
image_uri: constr(regex=r"^\S+$") = Field(
|
|
350
350
|
...,
|
|
351
351
|
description="The image URI. Specify the name of the image and the tag.\nIf the image is in Dockerhub, you can skip registry-url (for e.g. `tensorflow/tensorflow`).\nYou can use an image from a private registry using Advanced fields",
|
|
352
352
|
)
|
|
353
|
-
docker_registry: Optional[
|
|
353
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
354
354
|
None,
|
|
355
355
|
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
356
356
|
)
|
|
@@ -643,7 +643,7 @@ class PythonBuild(BaseModel):
|
|
|
643
643
|
)
|
|
644
644
|
cuda_version: Optional[
|
|
645
645
|
constr(
|
|
646
|
-
regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8)$"
|
|
646
|
+
regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8|12\.3-cudnn9|12\.4-cudnn9|12\.5-cudnn9|12\.6-cudnn9)$"
|
|
647
647
|
)
|
|
648
648
|
] = Field(
|
|
649
649
|
None,
|
|
@@ -840,8 +840,26 @@ class SlackWebhook(BaseModel):
|
|
|
840
840
|
)
|
|
841
841
|
|
|
842
842
|
|
|
843
|
+
class SparkBuild(BaseModel):
|
|
844
|
+
"""
|
|
845
|
+
Describes that we are using python to build a container image with a specific python version and pip packages installed.
|
|
846
|
+
"""
|
|
847
|
+
|
|
848
|
+
type: Literal["tfy-spark-buildpack"] = Field(..., description="")
|
|
849
|
+
spark_version: str = Field(
|
|
850
|
+
"3.5.2",
|
|
851
|
+
description="Spark version should match the spark version installed in the image.",
|
|
852
|
+
)
|
|
853
|
+
build_context_path: str = Field(
|
|
854
|
+
"./", description="Build path relative to project root path."
|
|
855
|
+
)
|
|
856
|
+
requirements_path: Optional[str] = Field(
|
|
857
|
+
None,
|
|
858
|
+
description="Path to `requirements.txt` relative to\n`Path to build context`",
|
|
859
|
+
)
|
|
860
|
+
|
|
861
|
+
|
|
843
862
|
class SparkDriverConfig(BaseModel):
|
|
844
|
-
ui_endpoint: Endpoint
|
|
845
863
|
resources: Optional[Resources] = None
|
|
846
864
|
|
|
847
865
|
|
|
@@ -865,19 +883,33 @@ class SparkImage(BaseModel):
|
|
|
865
883
|
Describes that we are using a pre-built image stored in a Docker Image registry
|
|
866
884
|
"""
|
|
867
885
|
|
|
868
|
-
type: Literal["image"] = Field(..., description="")
|
|
886
|
+
type: Literal["spark-image"] = Field(..., description="")
|
|
869
887
|
spark_version: str = Field(
|
|
870
888
|
"3.5.2",
|
|
871
889
|
description="Spark version should match the spark version installed in the image.",
|
|
872
890
|
)
|
|
873
|
-
image_uri: constr(regex=r"^\S
|
|
891
|
+
image_uri: constr(regex=r"^\S+$") = Field(
|
|
874
892
|
...,
|
|
875
893
|
description="The image URI. Specify the name of the image and the tag.\nIf the image is in Dockerhub, you can skip registry-url (for e.g. `tensorflow/tensorflow`).\nYou can use an image from a private registry using Advanced fields",
|
|
876
894
|
)
|
|
877
|
-
docker_registry: Optional[
|
|
895
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
896
|
+
None,
|
|
897
|
+
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
898
|
+
)
|
|
899
|
+
|
|
900
|
+
|
|
901
|
+
class SparkImageBuild(BaseModel):
|
|
902
|
+
"""
|
|
903
|
+
Describes that we are building a new image based on the spec
|
|
904
|
+
"""
|
|
905
|
+
|
|
906
|
+
type: Literal["spark-image-build"] = Field(..., description="")
|
|
907
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
878
908
|
None,
|
|
879
909
|
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
880
910
|
)
|
|
911
|
+
build_source: GitSource
|
|
912
|
+
build_spec: SparkBuild
|
|
881
913
|
|
|
882
914
|
|
|
883
915
|
class SparkJobJavaEntrypoint(BaseModel):
|
|
@@ -896,7 +928,8 @@ class SparkJobJavaEntrypoint(BaseModel):
|
|
|
896
928
|
class SparkJobPythonEntrypoint(BaseModel):
|
|
897
929
|
type: Literal["python"] = Field(..., description="")
|
|
898
930
|
main_application_file: str = Field(
|
|
899
|
-
...,
|
|
931
|
+
...,
|
|
932
|
+
description="The main application file to be executed by the spark job. Relative path in case of git repository.",
|
|
900
933
|
)
|
|
901
934
|
arguments: Optional[str] = Field(
|
|
902
935
|
None, description="Arguments to be passed to the main application file."
|
|
@@ -978,7 +1011,7 @@ class TaskPythonBuild(BaseModel):
|
|
|
978
1011
|
)
|
|
979
1012
|
cuda_version: Optional[
|
|
980
1013
|
constr(
|
|
981
|
-
regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8)$"
|
|
1014
|
+
regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8|12\.3-cudnn9|12\.4-cudnn9|12\.5-cudnn9|12\.6-cudnn9)$"
|
|
982
1015
|
)
|
|
983
1016
|
] = Field(
|
|
984
1017
|
None,
|
|
@@ -1113,7 +1146,7 @@ class Build(BaseModel):
|
|
|
1113
1146
|
"""
|
|
1114
1147
|
|
|
1115
1148
|
type: Literal["build"] = Field(..., description="")
|
|
1116
|
-
docker_registry: Optional[
|
|
1149
|
+
docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
|
|
1117
1150
|
None,
|
|
1118
1151
|
description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
|
|
1119
1152
|
)
|
|
@@ -1411,7 +1444,10 @@ class SparkJob(BaseModel):
|
|
|
1411
1444
|
name: constr(regex=r"^[a-z](?:[a-z0-9]|-(?!-)){1,30}[a-z0-9]$") = Field(
|
|
1412
1445
|
..., description="Name of the job"
|
|
1413
1446
|
)
|
|
1414
|
-
image: SparkImage
|
|
1447
|
+
image: Union[SparkImage, SparkImageBuild] = Field(
|
|
1448
|
+
...,
|
|
1449
|
+
description="The image to use for driver and executors. Must have spark installed. Spark version must match the version in the image.",
|
|
1450
|
+
)
|
|
1415
1451
|
entrypoint: Union[
|
|
1416
1452
|
SparkJobPythonEntrypoint, SparkJobScalaEntrypoint, SparkJobJavaEntrypoint
|
|
1417
1453
|
] = Field(..., description="")
|
|
@@ -1604,8 +1640,8 @@ class Job(BaseModel):
|
|
|
1604
1640
|
trigger: Union[Manual, Schedule] = Field(
|
|
1605
1641
|
{"type": "manual"}, description="Specify the trigger"
|
|
1606
1642
|
)
|
|
1607
|
-
trigger_on_deploy: bool = Field(
|
|
1608
|
-
|
|
1643
|
+
trigger_on_deploy: Optional[bool] = Field(
|
|
1644
|
+
None, description="Trigger the job after deploy immediately"
|
|
1609
1645
|
)
|
|
1610
1646
|
params: Optional[List[Param]] = Field(
|
|
1611
1647
|
None, description="Configure params and pass it to create different job runs"
|
|
@@ -3,6 +3,7 @@ from typing import Any, Dict, List, Optional, Union
|
|
|
3
3
|
from truefoundry.deploy._autogen.models import (
|
|
4
4
|
DockerFileBuild,
|
|
5
5
|
PythonBuild,
|
|
6
|
+
SparkBuild,
|
|
6
7
|
TaskDockerFileBuild,
|
|
7
8
|
TaskPythonBuild,
|
|
8
9
|
)
|
|
@@ -20,6 +21,7 @@ class _BuildConfig(BaseModel):
|
|
|
20
21
|
NotebookImageBuild,
|
|
21
22
|
TaskPythonBuild,
|
|
22
23
|
TaskDockerFileBuild,
|
|
24
|
+
SparkBuild,
|
|
23
25
|
] = Field(discriminator="type")
|
|
24
26
|
|
|
25
27
|
|
|
@@ -37,7 +39,6 @@ def build(
|
|
|
37
39
|
build_configuration_dict = build_configuration.dict()
|
|
38
40
|
build_configuration_dict.update({"type": "dockerfile"})
|
|
39
41
|
build_configuration = DockerFileBuild.parse_obj(build_configuration_dict)
|
|
40
|
-
|
|
41
42
|
builder = get_builder(build_configuration.type)
|
|
42
43
|
return builder(
|
|
43
44
|
build_configuration=build_configuration,
|
|
@@ -4,12 +4,14 @@ from truefoundry.deploy.builder.builders import (
|
|
|
4
4
|
dockerfile,
|
|
5
5
|
tfy_notebook_buildpack,
|
|
6
6
|
tfy_python_buildpack,
|
|
7
|
+
tfy_spark_buildpack,
|
|
7
8
|
)
|
|
8
9
|
|
|
9
10
|
BUILD_REGISTRY: Dict[str, Callable] = {
|
|
10
11
|
"dockerfile": dockerfile.build,
|
|
11
12
|
"tfy-python-buildpack": tfy_python_buildpack.build,
|
|
12
13
|
"tfy-notebook-buildpack": tfy_notebook_buildpack.build,
|
|
14
|
+
"tfy-spark-buildpack": tfy_spark_buildpack.build,
|
|
13
15
|
}
|
|
14
16
|
|
|
15
17
|
__all__ = ["get_builder"]
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
from tempfile import TemporaryDirectory
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
|
|
6
|
+
from truefoundry.common.constants import PythonPackageManager
|
|
7
|
+
from truefoundry.deploy._autogen.models import DockerFileBuild, SparkBuild
|
|
8
|
+
from truefoundry.deploy.builder.builders import dockerfile
|
|
9
|
+
from truefoundry.deploy.builder.builders.tfy_spark_buildpack.dockerfile_template import (
|
|
10
|
+
generate_dockerfile_content,
|
|
11
|
+
)
|
|
12
|
+
from truefoundry.deploy.builder.utils import has_python_package_manager_conf_secret
|
|
13
|
+
|
|
14
|
+
__all__ = ["generate_dockerfile_content", "build"]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _convert_to_dockerfile_build_config(
|
|
18
|
+
build_configuration: SparkBuild,
|
|
19
|
+
dockerfile_path: str,
|
|
20
|
+
mount_python_package_manager_conf_secret: bool = False,
|
|
21
|
+
) -> DockerFileBuild:
|
|
22
|
+
dockerfile_content = generate_dockerfile_content(
|
|
23
|
+
build_configuration=build_configuration,
|
|
24
|
+
mount_python_package_manager_conf_secret=mount_python_package_manager_conf_secret,
|
|
25
|
+
package_manager=PythonPackageManager.PIP.value,
|
|
26
|
+
)
|
|
27
|
+
with open(dockerfile_path, "w", encoding="utf8") as fp:
|
|
28
|
+
fp.write(dockerfile_content)
|
|
29
|
+
|
|
30
|
+
return DockerFileBuild(
|
|
31
|
+
type="dockerfile",
|
|
32
|
+
dockerfile_path=dockerfile_path,
|
|
33
|
+
build_context_path=build_configuration.build_context_path,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def build(
|
|
38
|
+
tag: str,
|
|
39
|
+
build_configuration: SparkBuild,
|
|
40
|
+
extra_opts: Optional[List[str]] = None,
|
|
41
|
+
):
|
|
42
|
+
mount_python_package_manager_conf_secret = (
|
|
43
|
+
has_python_package_manager_conf_secret(extra_opts) if extra_opts else False
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
# Copy tfy_execute_notebook.py to the build context
|
|
47
|
+
execute_notebook_src = os.path.join(
|
|
48
|
+
os.path.dirname(__file__), "tfy_execute_notebook.py"
|
|
49
|
+
)
|
|
50
|
+
execute_notebook_dst = os.path.join(
|
|
51
|
+
build_configuration.build_context_path, "tfy_execute_notebook.py"
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
# Verify the source file exists before copying
|
|
55
|
+
if not os.path.isfile(execute_notebook_src):
|
|
56
|
+
raise FileNotFoundError(f"Required file not found: {execute_notebook_src}")
|
|
57
|
+
|
|
58
|
+
# Always copy the file, overwrite if exists
|
|
59
|
+
shutil.copy2(execute_notebook_src, execute_notebook_dst)
|
|
60
|
+
|
|
61
|
+
try:
|
|
62
|
+
with TemporaryDirectory() as local_dir:
|
|
63
|
+
docker_build_configuration = _convert_to_dockerfile_build_config(
|
|
64
|
+
build_configuration,
|
|
65
|
+
dockerfile_path=os.path.join(local_dir, "Dockerfile"),
|
|
66
|
+
mount_python_package_manager_conf_secret=mount_python_package_manager_conf_secret,
|
|
67
|
+
)
|
|
68
|
+
dockerfile.build(
|
|
69
|
+
tag=tag,
|
|
70
|
+
build_configuration=docker_build_configuration,
|
|
71
|
+
extra_opts=extra_opts,
|
|
72
|
+
)
|
|
73
|
+
finally:
|
|
74
|
+
# Clean up the copied file if we copied it
|
|
75
|
+
if os.path.exists(execute_notebook_dst):
|
|
76
|
+
try:
|
|
77
|
+
os.remove(execute_notebook_dst)
|
|
78
|
+
except OSError:
|
|
79
|
+
pass # Ignore errors when cleaning up
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import shlex
|
|
2
|
+
from typing import List, Optional
|
|
3
|
+
|
|
4
|
+
from mako.template import Template
|
|
5
|
+
|
|
6
|
+
from truefoundry.common.constants import ENV_VARS, PythonPackageManager
|
|
7
|
+
from truefoundry.deploy._autogen.models import SparkBuild
|
|
8
|
+
from truefoundry.deploy.builder.constants import (
|
|
9
|
+
PIP_CONF_BUILDKIT_SECRET_MOUNT,
|
|
10
|
+
PIP_CONF_SECRET_MOUNT_AS_ENV,
|
|
11
|
+
UV_CONF_BUILDKIT_SECRET_MOUNT,
|
|
12
|
+
UV_CONF_SECRET_MOUNT_AS_ENV,
|
|
13
|
+
)
|
|
14
|
+
from truefoundry.deploy.v2.lib.patched_models import (
|
|
15
|
+
_resolve_requirements_path,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
# TODO (chiragjn): Switch to a non-root user inside the container
|
|
19
|
+
|
|
20
|
+
_POST_PYTHON_INSTALL_TEMPLATE = """
|
|
21
|
+
% if requirements_path is not None:
|
|
22
|
+
COPY ${requirements_path} ${requirements_destination_path}
|
|
23
|
+
% endif
|
|
24
|
+
% if python_packages_install_command is not None:
|
|
25
|
+
RUN ${package_manager_config_secret_mount} ${python_packages_install_command}
|
|
26
|
+
% endif
|
|
27
|
+
USER spark
|
|
28
|
+
COPY . /app
|
|
29
|
+
WORKDIR /app
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
_POST_USER_TEMPLATE = """
|
|
33
|
+
COPY tfy_execute_notebook.py /app/tfy_execute_notebook.py
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
DOCKERFILE_TEMPLATE = Template(
|
|
37
|
+
"""
|
|
38
|
+
FROM ${spark_image_repo}:${spark_version}
|
|
39
|
+
USER root
|
|
40
|
+
RUN apt update && \
|
|
41
|
+
DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends git && \
|
|
42
|
+
rm -rf /var/lib/apt/lists/*
|
|
43
|
+
"""
|
|
44
|
+
+ _POST_PYTHON_INSTALL_TEMPLATE
|
|
45
|
+
+ _POST_USER_TEMPLATE
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
ADDITIONAL_PIP_PACKAGES = ["papermill>=2.6.0,<2.7.0"]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def generate_pip_install_command(
|
|
52
|
+
requirements_path: Optional[str],
|
|
53
|
+
pip_packages: Optional[List[str]],
|
|
54
|
+
mount_pip_conf_secret: bool = False,
|
|
55
|
+
) -> Optional[str]:
|
|
56
|
+
upgrade_pip_command = "python3 -m pip install -U pip setuptools wheel"
|
|
57
|
+
envs = []
|
|
58
|
+
if mount_pip_conf_secret:
|
|
59
|
+
envs.append(PIP_CONF_SECRET_MOUNT_AS_ENV)
|
|
60
|
+
|
|
61
|
+
command = ["python3", "-m", "pip", "install", "--use-pep517", "--no-cache-dir"]
|
|
62
|
+
args = []
|
|
63
|
+
if requirements_path:
|
|
64
|
+
args.append("-r")
|
|
65
|
+
args.append(requirements_path)
|
|
66
|
+
|
|
67
|
+
if pip_packages:
|
|
68
|
+
args.extend(pip_packages)
|
|
69
|
+
|
|
70
|
+
if not args:
|
|
71
|
+
return None
|
|
72
|
+
|
|
73
|
+
final_pip_install_command = shlex.join(envs + command + args)
|
|
74
|
+
final_docker_run_command = " && ".join(
|
|
75
|
+
[upgrade_pip_command, final_pip_install_command]
|
|
76
|
+
)
|
|
77
|
+
return final_docker_run_command
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def generate_uv_pip_install_command(
|
|
81
|
+
requirements_path: Optional[str],
|
|
82
|
+
pip_packages: Optional[List[str]],
|
|
83
|
+
mount_uv_conf_secret: bool = False,
|
|
84
|
+
) -> Optional[str]:
|
|
85
|
+
upgrade_pip_command = "python3 -m pip install -U pip setuptools wheel"
|
|
86
|
+
uv_mount = f"--mount=from={ENV_VARS.TFY_PYTHON_BUILD_UV_IMAGE_URI},source=/uv,target=/usr/local/bin/uv"
|
|
87
|
+
envs = [
|
|
88
|
+
"UV_LINK_MODE=copy",
|
|
89
|
+
"UV_PYTHON_DOWNLOADS=never",
|
|
90
|
+
"UV_INDEX_STRATEGY=unsafe-best-match",
|
|
91
|
+
]
|
|
92
|
+
if mount_uv_conf_secret:
|
|
93
|
+
envs.append(UV_CONF_SECRET_MOUNT_AS_ENV)
|
|
94
|
+
|
|
95
|
+
command = ["uv", "pip", "install", "--no-cache-dir"]
|
|
96
|
+
|
|
97
|
+
args = []
|
|
98
|
+
|
|
99
|
+
if requirements_path:
|
|
100
|
+
args.append("-r")
|
|
101
|
+
args.append(requirements_path)
|
|
102
|
+
|
|
103
|
+
if pip_packages:
|
|
104
|
+
args.extend(pip_packages)
|
|
105
|
+
|
|
106
|
+
if not args:
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
uv_pip_install_command = shlex.join(envs + command + args)
|
|
110
|
+
shell_commands = " && ".join([upgrade_pip_command, uv_pip_install_command])
|
|
111
|
+
final_docker_run_command = " ".join([uv_mount, shell_commands])
|
|
112
|
+
|
|
113
|
+
return final_docker_run_command
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def generate_dockerfile_content(
|
|
117
|
+
build_configuration: SparkBuild,
|
|
118
|
+
package_manager: str = ENV_VARS.TFY_PYTHON_BUILD_PACKAGE_MANAGER,
|
|
119
|
+
mount_python_package_manager_conf_secret: bool = False,
|
|
120
|
+
) -> str:
|
|
121
|
+
# TODO (chiragjn): Handle recursive references to other requirements files e.g. `-r requirements-gpu.txt`
|
|
122
|
+
requirements_path = _resolve_requirements_path(
|
|
123
|
+
build_context_path=build_configuration.build_context_path,
|
|
124
|
+
requirements_path=build_configuration.requirements_path,
|
|
125
|
+
)
|
|
126
|
+
requirements_destination_path = (
|
|
127
|
+
"/tmp/requirements.txt" if requirements_path else None
|
|
128
|
+
)
|
|
129
|
+
if not build_configuration.spark_version:
|
|
130
|
+
raise ValueError(
|
|
131
|
+
"`spark_version` is required for `tfy-spark-buildpack` builder"
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
if package_manager == PythonPackageManager.PIP.value:
|
|
135
|
+
python_packages_install_command = generate_pip_install_command(
|
|
136
|
+
requirements_path=requirements_destination_path,
|
|
137
|
+
pip_packages=ADDITIONAL_PIP_PACKAGES,
|
|
138
|
+
mount_pip_conf_secret=mount_python_package_manager_conf_secret,
|
|
139
|
+
)
|
|
140
|
+
elif package_manager == PythonPackageManager.UV.value:
|
|
141
|
+
python_packages_install_command = generate_uv_pip_install_command(
|
|
142
|
+
requirements_path=requirements_destination_path,
|
|
143
|
+
pip_packages=ADDITIONAL_PIP_PACKAGES,
|
|
144
|
+
mount_uv_conf_secret=mount_python_package_manager_conf_secret,
|
|
145
|
+
)
|
|
146
|
+
else:
|
|
147
|
+
raise ValueError(f"Unsupported package manager: {package_manager}")
|
|
148
|
+
|
|
149
|
+
template_args = {
|
|
150
|
+
"spark_image_repo": ENV_VARS.TFY_SPARK_BUILD_SPARK_IMAGE_REPO,
|
|
151
|
+
"spark_version": build_configuration.spark_version,
|
|
152
|
+
"requirements_path": requirements_path,
|
|
153
|
+
"requirements_destination_path": requirements_destination_path,
|
|
154
|
+
"python_packages_install_command": python_packages_install_command,
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if mount_python_package_manager_conf_secret:
|
|
158
|
+
if package_manager == PythonPackageManager.PIP.value:
|
|
159
|
+
template_args["package_manager_config_secret_mount"] = (
|
|
160
|
+
PIP_CONF_BUILDKIT_SECRET_MOUNT
|
|
161
|
+
)
|
|
162
|
+
elif package_manager == PythonPackageManager.UV.value:
|
|
163
|
+
template_args["package_manager_config_secret_mount"] = (
|
|
164
|
+
UV_CONF_BUILDKIT_SECRET_MOUNT
|
|
165
|
+
)
|
|
166
|
+
else:
|
|
167
|
+
raise ValueError(f"Unsupported package manager: {package_manager}")
|
|
168
|
+
else:
|
|
169
|
+
template_args["package_manager_config_secret_mount"] = ""
|
|
170
|
+
|
|
171
|
+
dockerfile_content = DOCKERFILE_TEMPLATE.render(**template_args)
|
|
172
|
+
return dockerfile_content
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# This script is used as the main application file for spark applications
|
|
2
|
+
# when the application to be run is a notebook, the actual notebook to be
|
|
3
|
+
# executed is passed as an argument to this script.
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
import argparse
|
|
7
|
+
import sys
|
|
8
|
+
|
|
9
|
+
import papermill as pm
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def execute_notebook(notebook_path, output_path="output.ipynb", parameters=None):
|
|
13
|
+
"""
|
|
14
|
+
Execute a Jupyter notebook using papermill.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
notebook_path: Path to the input notebook
|
|
18
|
+
output_path: Path for the output notebook
|
|
19
|
+
parameters: Dictionary of parameters to pass to the notebook
|
|
20
|
+
|
|
21
|
+
Raises:
|
|
22
|
+
Exception: If notebook execution fails
|
|
23
|
+
"""
|
|
24
|
+
if parameters is None:
|
|
25
|
+
parameters = {}
|
|
26
|
+
|
|
27
|
+
print(f"Starting execution of notebook: {notebook_path}")
|
|
28
|
+
pm.execute_notebook(
|
|
29
|
+
notebook_path,
|
|
30
|
+
output_path,
|
|
31
|
+
parameters=parameters,
|
|
32
|
+
# TODO(gw): Replace with kernel name for venv
|
|
33
|
+
kernel_name="python3",
|
|
34
|
+
)
|
|
35
|
+
print(f"Successfully executed notebook: {notebook_path}")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
if __name__ == "__main__":
|
|
39
|
+
parser = argparse.ArgumentParser(
|
|
40
|
+
description="Execute a Jupyter notebook using papermill for Spark applications"
|
|
41
|
+
)
|
|
42
|
+
parser.add_argument("notebook_path", help="Path to the notebook file to execute")
|
|
43
|
+
|
|
44
|
+
args = parser.parse_args()
|
|
45
|
+
|
|
46
|
+
# TODO(gw): Add support for passing parameters to the notebook
|
|
47
|
+
try:
|
|
48
|
+
execute_notebook(args.notebook_path)
|
|
49
|
+
except Exception as e:
|
|
50
|
+
print(f"Error executing notebook {args.notebook_path}: {e}")
|
|
51
|
+
print(
|
|
52
|
+
"Exiting with status code 1 to signal failure to parent process/orchestrator"
|
|
53
|
+
)
|
|
54
|
+
sys.exit(1)
|
|
55
|
+
|
|
56
|
+
# TODO(gw): Publish the output notebook to blob storage from where it could be rendered
|
|
@@ -37,13 +37,28 @@ from truefoundry.deploy.lib.model.entity import ApplyResult
|
|
|
37
37
|
show_default=True,
|
|
38
38
|
help="Simulate the process without actually applying the manifest",
|
|
39
39
|
)
|
|
40
|
+
@click.option(
|
|
41
|
+
"--show-diff",
|
|
42
|
+
"--show_diff",
|
|
43
|
+
is_flag=True,
|
|
44
|
+
show_default=True,
|
|
45
|
+
help="Print manifest differences when using --dry-run",
|
|
46
|
+
)
|
|
40
47
|
@handle_exception_wrapper
|
|
41
|
-
def apply_command(
|
|
48
|
+
def apply_command(
|
|
49
|
+
files: Tuple[str, ...], dry_run: bool = False, show_diff: bool = False
|
|
50
|
+
):
|
|
51
|
+
# Validate that show_diff is only used with dry_run
|
|
52
|
+
if show_diff and not dry_run:
|
|
53
|
+
raise click.ClickException("--show-diff requires --dry-run")
|
|
54
|
+
|
|
42
55
|
apply_results: List[ApplyResult] = []
|
|
43
56
|
client = ServiceFoundryServiceClient()
|
|
44
57
|
for file in files:
|
|
45
58
|
with console.status(PROMPT_APPLYING_MANIFEST.format(file), spinner="dots"):
|
|
46
|
-
for apply_result in apply_lib.apply_manifest_file(
|
|
59
|
+
for apply_result in apply_lib.apply_manifest_file(
|
|
60
|
+
file, client, dry_run, show_diff
|
|
61
|
+
):
|
|
47
62
|
if apply_result.success:
|
|
48
63
|
console.print(f"[green]\u2714 {apply_result.message}[/]")
|
|
49
64
|
else:
|
|
@@ -62,6 +62,14 @@ def _get_default_spec_file():
|
|
|
62
62
|
default=False,
|
|
63
63
|
help="Force create a new deployment by canceling any ongoing deployments",
|
|
64
64
|
)
|
|
65
|
+
@click.option(
|
|
66
|
+
"--trigger-on-deploy/--no-trigger-on-deploy",
|
|
67
|
+
"--trigger_on_deploy/--no_trigger_on_deploy",
|
|
68
|
+
is_flag=True,
|
|
69
|
+
show_default=True,
|
|
70
|
+
default=False,
|
|
71
|
+
help="Trigger a Job run after deployment succeeds. Has no effect for non Job type deployments",
|
|
72
|
+
)
|
|
65
73
|
@click.pass_context
|
|
66
74
|
@handle_exception_wrapper
|
|
67
75
|
def deploy_command(
|
|
@@ -70,6 +78,7 @@ def deploy_command(
|
|
|
70
78
|
workspace_fqn: Optional[str],
|
|
71
79
|
wait: bool,
|
|
72
80
|
force: bool = False,
|
|
81
|
+
trigger_on_deploy: bool = False,
|
|
73
82
|
):
|
|
74
83
|
if ctx.invoked_subcommand is not None:
|
|
75
84
|
return
|
|
@@ -86,7 +95,12 @@ def deploy_command(
|
|
|
86
95
|
application_definition = yaml.safe_load(f)
|
|
87
96
|
|
|
88
97
|
application = Application.parse_obj(application_definition)
|
|
89
|
-
application.deploy(
|
|
98
|
+
application.deploy(
|
|
99
|
+
workspace_fqn=workspace_fqn,
|
|
100
|
+
wait=wait,
|
|
101
|
+
force=force,
|
|
102
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
103
|
+
)
|
|
90
104
|
sys.exit(0)
|
|
91
105
|
|
|
92
106
|
click.echo(
|
|
@@ -39,6 +39,7 @@ from truefoundry.deploy.lib.model.entity import (
|
|
|
39
39
|
JobRun,
|
|
40
40
|
LogBody,
|
|
41
41
|
SocketEvent,
|
|
42
|
+
TFYApplyResponse,
|
|
42
43
|
TriggerJobResult,
|
|
43
44
|
Workspace,
|
|
44
45
|
WorkspaceResources,
|
|
@@ -222,12 +223,14 @@ class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
|
|
|
222
223
|
workspace_id: str,
|
|
223
224
|
application: autogen_models.Workflow,
|
|
224
225
|
force: bool = False,
|
|
226
|
+
trigger_on_deploy: bool = False,
|
|
225
227
|
) -> Deployment:
|
|
226
228
|
data = {
|
|
227
229
|
"workspaceId": workspace_id,
|
|
228
230
|
"name": application.name,
|
|
229
231
|
"manifest": application.dict(exclude_none=True),
|
|
230
232
|
"forceDeploy": force,
|
|
233
|
+
"triggerOnDeploy": trigger_on_deploy,
|
|
231
234
|
}
|
|
232
235
|
logger.debug(json.dumps(data))
|
|
233
236
|
url = f"{self._api_server_url}/{VERSION_PREFIX}/deployment"
|
|
@@ -680,14 +683,16 @@ class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
|
|
|
680
683
|
return parse_obj_as(List[Deployment], response)
|
|
681
684
|
|
|
682
685
|
@check_min_cli_version
|
|
683
|
-
def apply(
|
|
686
|
+
def apply(
|
|
687
|
+
self, manifest: Dict[str, Any], dry_run: bool = False
|
|
688
|
+
) -> TFYApplyResponse:
|
|
684
689
|
url = f"{self._api_server_url}/{VERSION_PREFIX}/apply"
|
|
685
690
|
body = {"manifest": manifest, "dryRun": dry_run}
|
|
686
691
|
response = session_with_retries().put(
|
|
687
692
|
url, headers=self._get_headers(), json=body
|
|
688
693
|
)
|
|
689
|
-
response_data = cast(Dict[str, Any], request_handling(response))
|
|
690
|
-
return response_data
|
|
694
|
+
response_data = cast(Dict[str, Any], request_handling(response) or {})
|
|
695
|
+
return TFYApplyResponse.parse_obj(response_data)
|
|
691
696
|
|
|
692
697
|
@check_min_cli_version
|
|
693
698
|
def delete(self, manifest: Dict[str, Any]) -> Dict[str, Any]:
|
|
@@ -3,9 +3,11 @@ from typing import Any, Dict, Iterator, Optional
|
|
|
3
3
|
|
|
4
4
|
import yaml
|
|
5
5
|
|
|
6
|
+
from truefoundry.cli.console import console
|
|
6
7
|
from truefoundry.deploy.lib.clients.servicefoundry_client import (
|
|
7
8
|
ServiceFoundryServiceClient,
|
|
8
9
|
)
|
|
10
|
+
from truefoundry.deploy.lib.diff_utils import print_manifest_diff
|
|
9
11
|
from truefoundry.deploy.lib.model.entity import ApplyResult, ManifestLike
|
|
10
12
|
from truefoundry.pydantic_v1 import ValidationError
|
|
11
13
|
|
|
@@ -16,6 +18,7 @@ def _apply_manifest(
|
|
|
16
18
|
filename: Optional[str] = None,
|
|
17
19
|
index: Optional[int] = None,
|
|
18
20
|
dry_run: bool = False,
|
|
21
|
+
show_diff: bool = False,
|
|
19
22
|
) -> ApplyResult:
|
|
20
23
|
client = client or ServiceFoundryServiceClient()
|
|
21
24
|
|
|
@@ -26,7 +29,7 @@ def _apply_manifest(
|
|
|
26
29
|
file_metadata += f" from file {filename}"
|
|
27
30
|
|
|
28
31
|
try:
|
|
29
|
-
|
|
32
|
+
parsed_manifest = ManifestLike.parse_obj(manifest)
|
|
30
33
|
except ValidationError as ex:
|
|
31
34
|
return ApplyResult(
|
|
32
35
|
success=False,
|
|
@@ -35,20 +38,30 @@ def _apply_manifest(
|
|
|
35
38
|
|
|
36
39
|
prefix = "[Dry Run] " if dry_run else ""
|
|
37
40
|
suffix = " (No changes were applied)" if dry_run else ""
|
|
41
|
+
|
|
38
42
|
try:
|
|
39
|
-
client.apply(
|
|
43
|
+
api_response = client.apply(parsed_manifest.dict(), dry_run)
|
|
44
|
+
|
|
45
|
+
# Show diff for dry runs only when show_diff is enabled
|
|
46
|
+
if dry_run and show_diff and api_response.existing_manifest:
|
|
47
|
+
print_manifest_diff(
|
|
48
|
+
existing_manifest=api_response.existing_manifest,
|
|
49
|
+
new_manifest=parsed_manifest.dict(),
|
|
50
|
+
manifest_name=f"{parsed_manifest.name} ({parsed_manifest.type})",
|
|
51
|
+
console=console,
|
|
52
|
+
)
|
|
40
53
|
|
|
41
54
|
return ApplyResult(
|
|
42
55
|
success=True,
|
|
43
56
|
message=(
|
|
44
|
-
f"{prefix}Successfully configured manifest {
|
|
57
|
+
f"{prefix}Successfully configured manifest {parsed_manifest.name} of type {parsed_manifest.type}.{suffix}"
|
|
45
58
|
),
|
|
46
59
|
)
|
|
47
60
|
except Exception as ex:
|
|
48
61
|
return ApplyResult(
|
|
49
62
|
success=False,
|
|
50
63
|
message=(
|
|
51
|
-
f"{prefix}Failed to apply manifest {
|
|
64
|
+
f"{prefix}Failed to apply manifest {parsed_manifest.name} of type {parsed_manifest.type}. Error: {ex}.{suffix}"
|
|
52
65
|
),
|
|
53
66
|
)
|
|
54
67
|
|
|
@@ -57,14 +70,18 @@ def apply_manifest(
|
|
|
57
70
|
manifest: Dict[str, Any],
|
|
58
71
|
client: Optional[ServiceFoundryServiceClient] = None,
|
|
59
72
|
dry_run: bool = False,
|
|
73
|
+
show_diff: bool = False,
|
|
60
74
|
) -> ApplyResult:
|
|
61
|
-
return _apply_manifest(
|
|
75
|
+
return _apply_manifest(
|
|
76
|
+
manifest=manifest, client=client, dry_run=dry_run, show_diff=show_diff
|
|
77
|
+
)
|
|
62
78
|
|
|
63
79
|
|
|
64
80
|
def apply_manifest_file(
|
|
65
81
|
filepath: str,
|
|
66
82
|
client: Optional[ServiceFoundryServiceClient] = None,
|
|
67
83
|
dry_run: bool = False,
|
|
84
|
+
show_diff: bool = False,
|
|
68
85
|
) -> Iterator[ApplyResult]:
|
|
69
86
|
client = client or ServiceFoundryServiceClient()
|
|
70
87
|
filename = Path(filepath).name
|
|
@@ -92,4 +109,5 @@ def apply_manifest_file(
|
|
|
92
109
|
filename=filename,
|
|
93
110
|
index=index,
|
|
94
111
|
dry_run=dry_run,
|
|
112
|
+
show_diff=show_diff,
|
|
95
113
|
)
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import difflib
|
|
2
|
+
from typing import Any, Dict, Optional
|
|
3
|
+
|
|
4
|
+
import yaml
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
from rich.markdown import Markdown
|
|
7
|
+
from rich.panel import Panel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def format_manifest_for_diff(manifest: Dict[str, Any]) -> str:
|
|
11
|
+
"""
|
|
12
|
+
Format a manifest for diffing with consistent formatting.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
manifest: The manifest dictionary to format
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
A consistently formatted YAML string suitable for diffing
|
|
19
|
+
"""
|
|
20
|
+
return yaml.dump(manifest, sort_keys=True, indent=2)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def generate_manifest_diff(
|
|
24
|
+
existing_manifest: Dict[str, Any],
|
|
25
|
+
new_manifest: Dict[str, Any],
|
|
26
|
+
manifest_name: str = "manifest",
|
|
27
|
+
) -> Optional[str]:
|
|
28
|
+
"""
|
|
29
|
+
Generate a unified diff between existing and new manifests.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
existing_manifest: The existing manifest of the resource
|
|
33
|
+
new_manifest: The new manifest being applied
|
|
34
|
+
manifest_name: Name of the manifest for diff headers
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Unified diff string if there are differences, None if no differences
|
|
38
|
+
"""
|
|
39
|
+
# Format both manifests consistently
|
|
40
|
+
existing_formatted = format_manifest_for_diff(existing_manifest)
|
|
41
|
+
new_formatted = format_manifest_for_diff(new_manifest)
|
|
42
|
+
|
|
43
|
+
# Generate diff
|
|
44
|
+
existing_lines = existing_formatted.splitlines(keepends=True)
|
|
45
|
+
new_lines = new_formatted.splitlines(keepends=True)
|
|
46
|
+
diff_lines = list(
|
|
47
|
+
difflib.unified_diff(
|
|
48
|
+
existing_lines,
|
|
49
|
+
new_lines,
|
|
50
|
+
fromfile=f"existing/{manifest_name}",
|
|
51
|
+
tofile=f"new/{manifest_name}",
|
|
52
|
+
)
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
if not diff_lines:
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
return "".join(diff_lines)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def print_manifest_diff(
|
|
62
|
+
existing_manifest: Dict[str, Any],
|
|
63
|
+
new_manifest: Dict[str, Any],
|
|
64
|
+
manifest_name: str = "manifest",
|
|
65
|
+
console: Optional[Console] = None,
|
|
66
|
+
) -> bool:
|
|
67
|
+
"""
|
|
68
|
+
Generate and print a colored diff between manifests.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
existing_manifest: The existing manifest of the resource
|
|
72
|
+
new_manifest: The new manifest being applied
|
|
73
|
+
manifest_name: Name of the manifest for diff headers
|
|
74
|
+
console: Optional Rich console instance to use for printing
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
True if diff was printed, False if no diff
|
|
78
|
+
"""
|
|
79
|
+
if console is None:
|
|
80
|
+
console = Console()
|
|
81
|
+
|
|
82
|
+
diff_text = generate_manifest_diff(existing_manifest, new_manifest, manifest_name)
|
|
83
|
+
|
|
84
|
+
if diff_text is None:
|
|
85
|
+
console.print(f"[green]No changes detected for {manifest_name}[/]")
|
|
86
|
+
return False
|
|
87
|
+
|
|
88
|
+
console.print(
|
|
89
|
+
Panel(Markdown(f"```diff\n{diff_text}\n```"), title=f"Diff for {manifest_name}")
|
|
90
|
+
)
|
|
91
|
+
console.print()
|
|
92
|
+
return True
|
|
@@ -280,6 +280,10 @@ class CreateDockerRepositoryResponse(Base):
|
|
|
280
280
|
repoName: str
|
|
281
281
|
|
|
282
282
|
|
|
283
|
+
class TFYApplyResponse(BaseModel):
|
|
284
|
+
existing_manifest: Optional[Dict[str, Any]] = Field(None, alias="existingManifest")
|
|
285
|
+
|
|
286
|
+
|
|
283
287
|
class ApplyResult(BaseModel):
|
|
284
288
|
success: bool
|
|
285
289
|
message: str
|
|
@@ -262,6 +262,7 @@ def deploy_component(
|
|
|
262
262
|
workspace_fqn: Optional[str] = None,
|
|
263
263
|
wait: bool = True,
|
|
264
264
|
force: bool = False,
|
|
265
|
+
trigger_on_deploy: bool = False,
|
|
265
266
|
) -> Deployment:
|
|
266
267
|
workspace_fqn = _resolve_workspace_fqn(
|
|
267
268
|
component=component, workspace_fqn=workspace_fqn
|
|
@@ -284,6 +285,7 @@ def deploy_component(
|
|
|
284
285
|
workspace_id=workspace_id,
|
|
285
286
|
application=updated_component,
|
|
286
287
|
force=force,
|
|
288
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
287
289
|
)
|
|
288
290
|
logger.info(
|
|
289
291
|
"🚀 Deployment started for application '%s'. Deployment FQN is '%s'.",
|
|
@@ -1,10 +1,49 @@
|
|
|
1
|
-
|
|
1
|
+
import warnings
|
|
2
|
+
from typing import Any, Literal, Union
|
|
2
3
|
|
|
4
|
+
from truefoundry.common.warnings import TrueFoundryDeprecationWarning
|
|
3
5
|
from truefoundry.deploy._autogen import models
|
|
4
6
|
from truefoundry.deploy.lib.model.entity import Deployment
|
|
5
7
|
from truefoundry.deploy.v2.lib.deploy import deploy_component
|
|
6
8
|
from truefoundry.deploy.v2.lib.patched_models import LocalSource
|
|
7
|
-
from truefoundry.pydantic_v1 import BaseModel, Field, conint
|
|
9
|
+
from truefoundry.pydantic_v1 import BaseModel, Field, conint, root_validator, validator
|
|
10
|
+
|
|
11
|
+
_TRIGGER_ON_DEPLOY_DEPRECATION_MESSAGE = """
|
|
12
|
+
Setting `trigger_on_deploy` in manifest has been deprecated and the field will be removed in future releases.
|
|
13
|
+
|
|
14
|
+
Please remove it from the spec and instead use
|
|
15
|
+
|
|
16
|
+
`trigger_on_deploy` argument on `.deploy`
|
|
17
|
+
|
|
18
|
+
E.g.
|
|
19
|
+
|
|
20
|
+
```
|
|
21
|
+
job = Job(...) # remove `trigger_on_deploy` from initialization
|
|
22
|
+
job.deploy(..., trigger_on_deploy={arg_value})
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
OR
|
|
26
|
+
|
|
27
|
+
`{flag}` option on `tfy deploy`
|
|
28
|
+
|
|
29
|
+
E.g.
|
|
30
|
+
|
|
31
|
+
```
|
|
32
|
+
tfy deploy -f truefoundry.yaml {flag}
|
|
33
|
+
```
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _warn_if_trigger_on_deploy_used(_klass, v: Any) -> Any:
|
|
38
|
+
if v is not None:
|
|
39
|
+
# v is the value of trigger_on_deploy, which is also the arg_value for the message
|
|
40
|
+
flag = "--trigger-on-deploy" if v else "--no-trigger-on-deploy"
|
|
41
|
+
warnings.warn(
|
|
42
|
+
_TRIGGER_ON_DEPLOY_DEPRECATION_MESSAGE.format(arg_value=v, flag=flag),
|
|
43
|
+
TrueFoundryDeprecationWarning,
|
|
44
|
+
stacklevel=2,
|
|
45
|
+
)
|
|
46
|
+
return v
|
|
8
47
|
|
|
9
48
|
|
|
10
49
|
class DeployablePatchedModelBase(BaseModel):
|
|
@@ -12,13 +51,18 @@ class DeployablePatchedModelBase(BaseModel):
|
|
|
12
51
|
extra = "forbid"
|
|
13
52
|
|
|
14
53
|
def deploy(
|
|
15
|
-
self,
|
|
54
|
+
self,
|
|
55
|
+
workspace_fqn: str,
|
|
56
|
+
wait: bool = True,
|
|
57
|
+
force: bool = False,
|
|
58
|
+
trigger_on_deploy: bool = False,
|
|
16
59
|
) -> Deployment:
|
|
17
60
|
return deploy_component(
|
|
18
61
|
component=self,
|
|
19
62
|
workspace_fqn=workspace_fqn,
|
|
20
63
|
wait=wait,
|
|
21
64
|
force=force,
|
|
65
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
22
66
|
)
|
|
23
67
|
|
|
24
68
|
|
|
@@ -36,6 +80,10 @@ class Job(models.Job, DeployablePatchedModelBase):
|
|
|
36
80
|
type: Literal["job"] = "job"
|
|
37
81
|
resources: models.Resources = Field(default_factory=models.Resources)
|
|
38
82
|
|
|
83
|
+
@validator("trigger_on_deploy")
|
|
84
|
+
def _warn_if_trigger_on_deploy_used(cls, v: Any) -> Any:
|
|
85
|
+
return _warn_if_trigger_on_deploy_used(cls, v)
|
|
86
|
+
|
|
39
87
|
|
|
40
88
|
class SparkJob(models.SparkJob, DeployablePatchedModelBase):
|
|
41
89
|
type: Literal["spark-job"] = "spark-job"
|
|
@@ -118,8 +166,24 @@ class Application(models.Application, DeployablePatchedModelBase):
|
|
|
118
166
|
models.SparkJob,
|
|
119
167
|
] = Field(..., description="", discriminator="type")
|
|
120
168
|
|
|
169
|
+
@root_validator(pre=True)
|
|
170
|
+
def _validate_spec(cls, values: Any) -> Any:
|
|
171
|
+
if isinstance(values, dict) and "__root__" in values:
|
|
172
|
+
root = values["__root__"]
|
|
173
|
+
if (
|
|
174
|
+
isinstance(root, dict)
|
|
175
|
+
and root.get("type") == "job"
|
|
176
|
+
and root.get("trigger_on_deploy") is not None
|
|
177
|
+
):
|
|
178
|
+
_warn_if_trigger_on_deploy_used(cls, root.get("trigger_on_deploy"))
|
|
179
|
+
return values
|
|
180
|
+
|
|
121
181
|
def deploy(
|
|
122
|
-
self,
|
|
182
|
+
self,
|
|
183
|
+
workspace_fqn: str,
|
|
184
|
+
wait: bool = True,
|
|
185
|
+
force: bool = False,
|
|
186
|
+
trigger_on_deploy: bool = False,
|
|
123
187
|
) -> Deployment:
|
|
124
188
|
if isinstance(self.__root__, models.Workflow):
|
|
125
189
|
from truefoundry.deploy.v2.lib.deploy_workflow import deploy_workflow
|
|
@@ -136,4 +200,5 @@ class Application(models.Application, DeployablePatchedModelBase):
|
|
|
136
200
|
workspace_fqn=workspace_fqn,
|
|
137
201
|
wait=wait,
|
|
138
202
|
force=force,
|
|
203
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
139
204
|
)
|
|
@@ -159,6 +159,26 @@ class PythonBuild(models.PythonBuild, PatchedModelBase):
|
|
|
159
159
|
return values
|
|
160
160
|
|
|
161
161
|
|
|
162
|
+
class SparkBuild(models.SparkBuild, PatchedModelBase):
|
|
163
|
+
type: Literal["tfy-spark-buildpack"] = "tfy-spark-buildpack"
|
|
164
|
+
|
|
165
|
+
@root_validator
|
|
166
|
+
def validate_values(cls, values):
|
|
167
|
+
_resolve_requirements_path(
|
|
168
|
+
build_context_path=values.get("build_context_path") or "./",
|
|
169
|
+
requirements_path=values.get("requirements_path"),
|
|
170
|
+
)
|
|
171
|
+
return values
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class SparkImageBuild(models.SparkImageBuild, PatchedModelBase):
|
|
175
|
+
type: Literal["spark-image-build"] = "spark-image-build"
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class SparkImage(models.SparkImage, PatchedModelBase):
|
|
179
|
+
type: Literal["spark-image"] = "spark-image"
|
|
180
|
+
|
|
181
|
+
|
|
162
182
|
class RemoteSource(models.RemoteSource, PatchedModelBase):
|
|
163
183
|
type: Literal["remote"] = "remote"
|
|
164
184
|
|
|
@@ -40,7 +40,7 @@ truefoundry/cli/display_util.py,sha256=9vzN3mbQqU6OhS7qRUiMRana4PTHa4sDTA0Hn7OVj
|
|
|
40
40
|
truefoundry/cli/util.py,sha256=pezUfF2GC6ru7s8VeH2a7uvXTU0xN9ka7yLXkIgC3dY,4998
|
|
41
41
|
truefoundry/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
42
|
truefoundry/common/auth_service_client.py,sha256=N3YxKlx63r6cPZqbgb2lqBOPI69ShB7D7RCIq4FSCjc,7949
|
|
43
|
-
truefoundry/common/constants.py,sha256=
|
|
43
|
+
truefoundry/common/constants.py,sha256=nWd3Je71WmHEORRUTCupZy5fWADqEFftjYP6wiYhCIc,4627
|
|
44
44
|
truefoundry/common/credential_file_manager.py,sha256=1yEk1Zm2xS4G0VDFwKSZ4w0VUrcPWQ1nJnoBaz9xyKA,4251
|
|
45
45
|
truefoundry/common/credential_provider.py,sha256=_OhJ2XFlDaVsrUO-FyywxctcGGqDdC2pgcvwEKqQD0Q,4071
|
|
46
46
|
truefoundry/common/entities.py,sha256=b4R6ss06-ygDS3C4Tqa_GOq5LFKDYbt7x4Mghnfz6yo,4007
|
|
@@ -52,25 +52,28 @@ truefoundry/common/storage_provider_utils.py,sha256=yURhMw8k0FLFvaviRHDiifhvc6Gn
|
|
|
52
52
|
truefoundry/common/types.py,sha256=BMJFCsR1lPJAw66IQBSvLyV4I6o_x5oj78gVsUa9si8,188
|
|
53
53
|
truefoundry/common/utils.py,sha256=j3QP0uOsaGD_VmDDR68JTwoYE1okkAq6OqpVkzVf48Q,6424
|
|
54
54
|
truefoundry/common/warnings.py,sha256=rs6BHwk7imQYedo07iwh3TWEOywAR3Lqhj0AY4khByg,504
|
|
55
|
-
truefoundry/deploy/__init__.py,sha256=
|
|
55
|
+
truefoundry/deploy/__init__.py,sha256=2GNbI8IGJBotz_IKaqQ-DWYWZn_pSu7lN7aId15Gk7Q,2799
|
|
56
56
|
truefoundry/deploy/python_deploy_codegen.py,sha256=AainOFR20XvhNeztJkLPWGZ40lAT_nwc-ZmG77Kum4o,6525
|
|
57
|
-
truefoundry/deploy/_autogen/models.py,sha256=
|
|
58
|
-
truefoundry/deploy/builder/__init__.py,sha256=
|
|
57
|
+
truefoundry/deploy/_autogen/models.py,sha256=xt-DuaRDx5jeRwyGoQH2yyPZAep9Q2MHFW9XBuRzG8E,73161
|
|
58
|
+
truefoundry/deploy/builder/__init__.py,sha256=kgvlkVkiWpMVdim81tIeLrdoACqrFDgwCqHdQVsCsMo,4988
|
|
59
59
|
truefoundry/deploy/builder/constants.py,sha256=amUkHoHvVKzGv0v_knfiioRuKiJM0V0xW0diERgWiI0,508
|
|
60
60
|
truefoundry/deploy/builder/docker_service.py,sha256=sm7GWeIqyrKaZpxskdLejZlsxcZnM3BTDJr6orvPN4E,3948
|
|
61
61
|
truefoundry/deploy/builder/utils.py,sha256=D68-bqM0NQx-Elg-56mtkENyVyg9faZ9tgTmBuo1Sjs,1076
|
|
62
|
-
truefoundry/deploy/builder/builders/__init__.py,sha256=
|
|
62
|
+
truefoundry/deploy/builder/builders/__init__.py,sha256=aomhWdR5L7uSM-GUalw9SnFHD2FQ_n-yFe4NH6nyNxw,715
|
|
63
63
|
truefoundry/deploy/builder/builders/dockerfile.py,sha256=XMbMlPUTMPCyaHl7jJQY1ODtlRkpI61PcvgG6Ck5jNc,1522
|
|
64
64
|
truefoundry/deploy/builder/builders/tfy_notebook_buildpack/__init__.py,sha256=RGWGqY8xOF7vycUPJd10N7ZzahWv24lO0anrOPtLuDU,1796
|
|
65
65
|
truefoundry/deploy/builder/builders/tfy_notebook_buildpack/dockerfile_template.py,sha256=rQgdvKmAT9HArVW4TAG5yd2QTKRs3S5LJ9RQbc_EkHE,2518
|
|
66
66
|
truefoundry/deploy/builder/builders/tfy_python_buildpack/__init__.py,sha256=_fjqHKn80qKi68SAMMALge7_A6e1sTsQWichw8uoGIw,2025
|
|
67
67
|
truefoundry/deploy/builder/builders/tfy_python_buildpack/dockerfile_template.py,sha256=f4l3fH21E2b8W3-JotMKc0AdPcCxV7LRPxxYJa7z_UQ,9134
|
|
68
|
+
truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py,sha256=NEPlM6_vTVxp4ITa18B8DBbgYCn1q5d8be21lbgu5oY,2888
|
|
69
|
+
truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=r2EhmPWsHwAKxtMIJQRLQAqxmiMnCrkNCn6IbWvM-XU,5812
|
|
70
|
+
truefoundry/deploy/builder/builders/tfy_spark_buildpack/tfy_execute_notebook.py,sha256=bpzrsAVJMaN4W0LqRFfMC4pw9QkfPvK2uc-jRGaAlrw,1733
|
|
68
71
|
truefoundry/deploy/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
69
72
|
truefoundry/deploy/cli/commands/__init__.py,sha256=qv818jxqSAygJ3h-6Ul8t-5VOgR_UrSgsVtNCl3e5G0,1408
|
|
70
|
-
truefoundry/deploy/cli/commands/apply_command.py,sha256=
|
|
73
|
+
truefoundry/deploy/cli/commands/apply_command.py,sha256=DmXmKVokkauyKIiJDtErTwbJ5_LvQeJbTQsG5BjyKpo,2427
|
|
71
74
|
truefoundry/deploy/cli/commands/build_command.py,sha256=zJBywMatbpUlXx5O2aqpEVmPeBIJ9RNnG9abSc8C8CE,1234
|
|
72
75
|
truefoundry/deploy/cli/commands/delete_command.py,sha256=i_lr_MocTEPKF2VwLe8B7oZWsgXK06EX_43_xdM5DIs,3875
|
|
73
|
-
truefoundry/deploy/cli/commands/deploy_command.py,sha256=
|
|
76
|
+
truefoundry/deploy/cli/commands/deploy_command.py,sha256=fN6yVXdSGD8xWyAj6KcwayCjA_sV5aKCpxLuNCrUl8U,4681
|
|
74
77
|
truefoundry/deploy/cli/commands/deploy_init_command.py,sha256=g-jBfrEmhZ0TDWsyqPDn4K6q33EqJSGmBTt1eMYig-w,600
|
|
75
78
|
truefoundry/deploy/cli/commands/get_command.py,sha256=bR8tAjQQhimzaTQ57L6BPJwcxQ_SGWCF5CqHDpxgG90,837
|
|
76
79
|
truefoundry/deploy/cli/commands/k8s_exec_credential_command.py,sha256=EknpdufMAEnjSGMG7a-Jj7tkoiS5zmbJRREafb14Alw,2160
|
|
@@ -92,28 +95,29 @@ truefoundry/deploy/io/output_callback.py,sha256=_q79-dpFxnU762VPM9Ryy2gnuJnIotZ2
|
|
|
92
95
|
truefoundry/deploy/io/rich_output_callback.py,sha256=m99RodkILXCgy_LJujEcojbpW1tL0H5Fjb0lqe6X_PQ,958
|
|
93
96
|
truefoundry/deploy/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
94
97
|
truefoundry/deploy/lib/const.py,sha256=Wg0GDnfFu-g1fJr4lU80NH2ULw0R0dYjV7LnW-PbOeM,173
|
|
98
|
+
truefoundry/deploy/lib/diff_utils.py,sha256=SszFgzBQ7GPwpgJKWjLDT-IQ_AcMB9mW8OP27Uj37HE,2627
|
|
95
99
|
truefoundry/deploy/lib/logs_utils.py,sha256=SQxRv3jDDmgHdOUMhlMaAPGYskybnBUMpst7QU_i_sc,1469
|
|
96
100
|
truefoundry/deploy/lib/messages.py,sha256=8424kj3kqCyDCX5Nr2WJZZ_UEutPoaSs_y2f9-O4yy8,1001
|
|
97
101
|
truefoundry/deploy/lib/session.py,sha256=fLdgR6ZDp8-hFl5NTON4ngnWLsMzGxvKtfpDOOw_7lo,4963
|
|
98
102
|
truefoundry/deploy/lib/util.py,sha256=J7r8San2wKo48A7-BlH2-OKTlBO67zlPjLEhMsL8os0,1059
|
|
99
103
|
truefoundry/deploy/lib/win32.py,sha256=1RcvPTdlOAJ48rt8rCbE2Ufha2ztRqBAE9dueNXArrY,5009
|
|
100
104
|
truefoundry/deploy/lib/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
101
|
-
truefoundry/deploy/lib/clients/servicefoundry_client.py,sha256=
|
|
105
|
+
truefoundry/deploy/lib/clients/servicefoundry_client.py,sha256=nSaaD91ONpDkRyOWHGv2VerPzdtn-Z3UF0iloj00VVU,27200
|
|
102
106
|
truefoundry/deploy/lib/dao/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
103
107
|
truefoundry/deploy/lib/dao/application.py,sha256=oMszpueXPUfTUuN_XdKwoRjQyqAgWHhZ-10cbprCVdM,9226
|
|
104
|
-
truefoundry/deploy/lib/dao/apply.py,sha256=
|
|
108
|
+
truefoundry/deploy/lib/dao/apply.py,sha256=F7I8yp-IZir_6CL9NPE_KFD9rgicVJn2vcIv1a3MpuA,3771
|
|
105
109
|
truefoundry/deploy/lib/dao/delete.py,sha256=uPL2psqWNw2O0oDikXJOlVxmG8n5d3Z0Ia9qZwqCn_8,2735
|
|
106
110
|
truefoundry/deploy/lib/dao/version.py,sha256=AtdW_4O1DPUKdfv2qy6iUJsZ_95vM6z0AqeEy3WDKs8,1130
|
|
107
111
|
truefoundry/deploy/lib/dao/workspace.py,sha256=6YvfCgWDzAULI3Q6JswyZmP1CwJ5rM-ANsIFkbQia0Q,2349
|
|
108
112
|
truefoundry/deploy/lib/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
109
|
-
truefoundry/deploy/lib/model/entity.py,sha256=
|
|
113
|
+
truefoundry/deploy/lib/model/entity.py,sha256=eBfA4trO0jUuDy0wifiu2rB_HryZrx5Kf-tRMwIQ_9g,8716
|
|
110
114
|
truefoundry/deploy/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
111
115
|
truefoundry/deploy/v2/lib/__init__.py,sha256=WEiVMZXOVljzEE3tpGJil14liIn_PCDoACJ6b3tZ6sI,188
|
|
112
|
-
truefoundry/deploy/v2/lib/deploy.py,sha256=
|
|
116
|
+
truefoundry/deploy/v2/lib/deploy.py,sha256=HfSUdAS3gSpFAFtV0Mq9LscfpkaXqA2LHW4VXqk9Y0g,12707
|
|
113
117
|
truefoundry/deploy/v2/lib/deploy_workflow.py,sha256=G5BzMIbap8pgDX1eY-TITruUxQdkKhYtBmRwLL6lDeY,14342
|
|
114
|
-
truefoundry/deploy/v2/lib/deployable_patched_models.py,sha256=
|
|
118
|
+
truefoundry/deploy/v2/lib/deployable_patched_models.py,sha256=mUi-OjPf7bc8rzfrPLdFb79LKuDq7F36RxL4V-AXebs,6830
|
|
115
119
|
truefoundry/deploy/v2/lib/models.py,sha256=ogc1UYs1Z2nBdGSKCrde9sk8d0GxFKMkem99uqO5CmM,1148
|
|
116
|
-
truefoundry/deploy/v2/lib/patched_models.py,sha256=
|
|
120
|
+
truefoundry/deploy/v2/lib/patched_models.py,sha256=VkfS7akbUzMA4q15lQUcAirdTsyVE1rfMeCmjXJC6Zk,15394
|
|
117
121
|
truefoundry/deploy/v2/lib/source.py,sha256=d6-8_6Zn5koBglqrBrY6ZLG_7yyPuLdyEmK4iZTw6xY,9405
|
|
118
122
|
truefoundry/ml/__init__.py,sha256=EEEHV7w58Krpo_W9Chd8Y3TdItfFO3LI6j6Izqc4-P8,2219
|
|
119
123
|
truefoundry/ml/constants.py,sha256=vDq72d4C9FSWqr9MMdjgTF4TuyNFApvo_6RVsSeAjB4,2837
|
|
@@ -377,7 +381,7 @@ truefoundry/workflow/remote_filesystem/__init__.py,sha256=LQ95ViEjJ7Ts4JcCGOxMPs
|
|
|
377
381
|
truefoundry/workflow/remote_filesystem/logger.py,sha256=em2l7D6sw7xTLDP0kQSLpgfRRCLpN14Qw85TN7ujQcE,1022
|
|
378
382
|
truefoundry/workflow/remote_filesystem/tfy_signed_url_client.py,sha256=xcT0wQmQlgzcj0nP3tJopyFSVWT1uv3nhiTIuwfXYeg,12342
|
|
379
383
|
truefoundry/workflow/remote_filesystem/tfy_signed_url_fs.py,sha256=nSGPZu0Gyd_jz0KsEE-7w_BmnTD8CVF1S8cUJoxaCbc,13305
|
|
380
|
-
truefoundry-0.9.
|
|
381
|
-
truefoundry-0.9.
|
|
382
|
-
truefoundry-0.9.
|
|
383
|
-
truefoundry-0.9.
|
|
384
|
+
truefoundry-0.9.3.dist-info/METADATA,sha256=M8MsWn9958d-O70Q5n7eKZoZuEq1GXp4LlJLutm0ndI,2504
|
|
385
|
+
truefoundry-0.9.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
386
|
+
truefoundry-0.9.3.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
|
|
387
|
+
truefoundry-0.9.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|