truefoundry 0.11.2__py3-none-any.whl → 0.11.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of truefoundry might be problematic. Click here for more details.

truefoundry/__init__.py CHANGED
@@ -12,6 +12,7 @@ from truefoundry_sdk import (
12
12
  ToolSchema,
13
13
  UserMessage,
14
14
  )
15
+ from truefoundry_sdk.client import TrueFoundry
15
16
 
16
17
  from truefoundry._client import client
17
18
  from truefoundry.common.warnings import (
@@ -39,6 +40,7 @@ __all__ = [
39
40
  "render_prompt",
40
41
  "suppress_truefoundry_deprecation_warnings",
41
42
  "SystemMessage",
43
+ "TrueFoundry",
42
44
  "ToolCall",
43
45
  "ToolMessage",
44
46
  "ToolSchema",
@@ -68,6 +68,8 @@ class TrueFoundrySdkEnv(BaseSettings):
68
68
  # TODO(gw): Use another image with more linient rate limits
69
69
  TFY_SPARK_BUILD_SPARK_IMAGE_REPO: str = "public.ecr.aws/bitnami/spark"
70
70
 
71
+ TFY_TASK_PYSPARK_BUILD_SPARK_IMAGE_REPO: str = "public.ecr.aws/bitnami/spark"
72
+
71
73
  # For local development, this enables futher configuration via _TFYServersConfig
72
74
  TFY_CLI_LOCAL_DEV_MODE: bool = False
73
75
 
@@ -120,6 +120,7 @@ from truefoundry.deploy.v2.lib.patched_models import (
120
120
  SparkJobPythonEntrypoint,
121
121
  SparkJobPythonNotebookEntrypoint,
122
122
  SparkJobScalaEntrypoint,
123
+ SparkJobScalaNotebookEntrypoint,
123
124
  SQSInputConfig,
124
125
  SQSOutputConfig,
125
126
  SQSQueueMetricConfig,
@@ -1,6 +1,6 @@
1
1
  # generated by datamodel-codegen:
2
2
  # filename: application.json
3
- # timestamp: 2025-06-18T21:24:37+00:00
3
+ # timestamp: 2025-07-28T14:11:36+00:00
4
4
 
5
5
  from __future__ import annotations
6
6
 
@@ -103,7 +103,7 @@ class AsyncProcessorSidecar(BaseModel):
103
103
 
104
104
 
105
105
  class Autoshutdown(BaseModel):
106
- wait_time: conint(ge=0) = Field(
106
+ wait_time: conint(ge=0, le=604800) = Field(
107
107
  900,
108
108
  description="The period to wait after the last received request before scaling the replicas to 0. This value should be high enough to allow for the replicas of the service to come up to avoid premature scaling down.",
109
109
  )
@@ -957,6 +957,14 @@ class SparkJobScalaEntrypoint(BaseModel):
957
957
  )
958
958
 
959
959
 
960
+ class SparkJobScalaNotebookEntrypoint(BaseModel):
961
+ type: Literal["scala-notebook"] = Field(..., description="")
962
+ main_application_file: str = Field(
963
+ ...,
964
+ description="The main application file to be executed by the spark job. Relative path in case of git repository.",
965
+ )
966
+
967
+
960
968
  class StaticVolumeConfig(BaseModel):
961
969
  type: Literal["static"] = Field(..., description="Volume Type for the volume.")
962
970
  persistent_volume_name: str = Field(
@@ -989,6 +997,34 @@ class TaskDockerFileBuild(BaseModel):
989
997
  build_args: Optional[Dict[str, str]] = Field(None, description="")
990
998
 
991
999
 
1000
+ class TaskPySparkBuild(BaseModel):
1001
+ """
1002
+ Describes the configuration for the PySpark build for a task
1003
+ """
1004
+
1005
+ type: Literal["task-pyspark-build"] = Field(..., description="")
1006
+ spark_version: str = Field(
1007
+ "3.5.2",
1008
+ description="Spark version should match the spark version installed in the image.",
1009
+ )
1010
+ docker_registry: Optional[str] = Field(
1011
+ None,
1012
+ description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
1013
+ )
1014
+ requirements_path: Optional[str] = Field(
1015
+ None,
1016
+ description="Path to `requirements.txt` relative to\n`Path to build context`",
1017
+ )
1018
+ pip_packages: Optional[List[str]] = Field(
1019
+ None,
1020
+ description='Define pip package requirements.\nIn Python/YAML E.g. ["fastapi>=0.90,<1.0", "uvicorn"]',
1021
+ )
1022
+ apt_packages: Optional[List[str]] = Field(
1023
+ None,
1024
+ description='Debian packages to install via `apt get`.\nIn Python/YAML E.g. ["git", "ffmpeg", "htop"]',
1025
+ )
1026
+
1027
+
992
1028
  class TaskPythonBuild(BaseModel):
993
1029
  """
994
1030
  Describes the configuration for the python build for a task
@@ -1200,7 +1236,7 @@ class ContainerTaskConfig(BaseModel):
1200
1236
  description="Configure environment variables to be injected in the task either as plain text or secrets. [Docs](https://docs.truefoundry.com/docs/env-variables)",
1201
1237
  )
1202
1238
  resources: Optional[Resources] = None
1203
- mounts: Optional[List[Union[SecretMount, StringDataMount, VolumeMount]]] = Field(
1239
+ mounts: Optional[List[VolumeMount]] = Field(
1204
1240
  None, description="Configure data to be mounted to Workflow pod(s) as a volume."
1205
1241
  )
1206
1242
  service_account: Optional[str] = Field(None, description="")
@@ -1461,6 +1497,7 @@ class SparkJob(BaseModel):
1461
1497
  SparkJobScalaEntrypoint,
1462
1498
  SparkJobJavaEntrypoint,
1463
1499
  SparkJobPythonNotebookEntrypoint,
1500
+ SparkJobScalaNotebookEntrypoint,
1464
1501
  ] = Field(..., description="")
1465
1502
  driver_config: SparkDriverConfig
1466
1503
  executor_config: SparkExecutorConfig
@@ -1567,15 +1604,6 @@ class FlyteLaunchPlan(BaseModel):
1567
1604
  closure: Any
1568
1605
 
1569
1606
 
1570
- class FlyteTaskCustom(BaseModel):
1571
- truefoundry: Union[PythonTaskConfig, ContainerTaskConfig]
1572
-
1573
-
1574
- class FlyteTaskTemplate(BaseModel):
1575
- id: FlyteTaskID
1576
- custom: FlyteTaskCustom
1577
-
1578
-
1579
1607
  class JobAlert(BaseModel):
1580
1608
  """
1581
1609
  Describes the configuration for the job alerts
@@ -1594,6 +1622,25 @@ class JobAlert(BaseModel):
1594
1622
  on_failure: bool = Field(True, description="Send an alert when the job fails")
1595
1623
 
1596
1624
 
1625
+ class PySparkTaskConfig(BaseModel):
1626
+ type: Literal["pyspark-task-config"] = Field(..., description="")
1627
+ image: TaskPySparkBuild
1628
+ driver_config: SparkDriverConfig
1629
+ executor_config: SparkExecutorConfig
1630
+ spark_conf: Optional[Dict[str, Any]] = Field(
1631
+ None,
1632
+ description="Extra configuration properties to be passed to the spark job. [Docs](https://spark.apache.org/docs/latest/configuration.html)",
1633
+ )
1634
+ env: Optional[Dict[str, str]] = Field(
1635
+ None,
1636
+ description="Configure environment variables to be injected in the task either as plain text or secrets. [Docs](https://docs.truefoundry.com/docs/env-variables)",
1637
+ )
1638
+ mounts: Optional[List[Union[SecretMount, StringDataMount, VolumeMount]]] = Field(
1639
+ None, description="Configure data to be mounted to Workflow pod(s) as a volume."
1640
+ )
1641
+ service_account: Optional[str] = Field(None, description="")
1642
+
1643
+
1597
1644
  class Service(BaseService):
1598
1645
  """
1599
1646
  Describes the configuration for the service
@@ -1630,9 +1677,13 @@ class AsyncService(BaseService):
1630
1677
  sidecar: Optional[AsyncProcessorSidecar] = None
1631
1678
 
1632
1679
 
1633
- class FlyteTask(BaseModel):
1634
- template: FlyteTaskTemplate
1635
- description: Optional[Any] = None
1680
+ class FlyteTaskCustom(BaseModel):
1681
+ truefoundry: Union[PythonTaskConfig, ContainerTaskConfig, PySparkTaskConfig]
1682
+
1683
+
1684
+ class FlyteTaskTemplate(BaseModel):
1685
+ id: FlyteTaskID
1686
+ custom: FlyteTaskCustom
1636
1687
 
1637
1688
 
1638
1689
  class Job(BaseModel):
@@ -1688,27 +1739,6 @@ class Job(BaseModel):
1688
1739
  )
1689
1740
 
1690
1741
 
1691
- class Workflow(BaseModel):
1692
- """
1693
- Describes the configuration for the worflow
1694
- """
1695
-
1696
- type: Literal["workflow"] = Field(..., description="")
1697
- name: constr(regex=r"^[a-z](?:[a-z0-9]|-(?!-)){1,30}[a-z0-9]$") = Field(
1698
- ..., description="Name of the workflow"
1699
- )
1700
- source: Union[LocalSource, RemoteSource] = Field(
1701
- ..., description="Source Code for the workflow, either local or remote"
1702
- )
1703
- workflow_file_path: str = Field(
1704
- ..., description="Path to the workflow file relative to the project root path"
1705
- )
1706
- flyte_entities: Optional[List[Union[FlyteTask, FlyteWorkflow, FlyteLaunchPlan]]] = (
1707
- Field(None, description="")
1708
- )
1709
- alerts: Optional[List[WorkflowAlert]] = Field(None, description="")
1710
-
1711
-
1712
1742
  class ApplicationSet(BaseModel):
1713
1743
  """
1714
1744
  Describes the configuration for the application set
@@ -1735,6 +1765,32 @@ class ApplicationSet(BaseModel):
1735
1765
  )
1736
1766
 
1737
1767
 
1768
+ class FlyteTask(BaseModel):
1769
+ template: FlyteTaskTemplate
1770
+ description: Optional[Any] = None
1771
+
1772
+
1773
+ class Workflow(BaseModel):
1774
+ """
1775
+ Describes the configuration for the worflow
1776
+ """
1777
+
1778
+ type: Literal["workflow"] = Field(..., description="")
1779
+ name: constr(regex=r"^[a-z](?:[a-z0-9]|-(?!-)){1,30}[a-z0-9]$") = Field(
1780
+ ..., description="Name of the workflow"
1781
+ )
1782
+ source: Union[LocalSource, RemoteSource] = Field(
1783
+ ..., description="Source Code for the workflow, either local or remote"
1784
+ )
1785
+ workflow_file_path: str = Field(
1786
+ ..., description="Path to the workflow file relative to the project root path"
1787
+ )
1788
+ flyte_entities: Optional[List[Union[FlyteTask, FlyteWorkflow, FlyteLaunchPlan]]] = (
1789
+ Field(None, description="")
1790
+ )
1791
+ alerts: Optional[List[WorkflowAlert]] = Field(None, description="")
1792
+
1793
+
1738
1794
  class Application(BaseModel):
1739
1795
  __root__: Union[
1740
1796
  Service,
@@ -5,6 +5,7 @@ from truefoundry.deploy._autogen.models import (
5
5
  PythonBuild,
6
6
  SparkBuild,
7
7
  TaskDockerFileBuild,
8
+ TaskPySparkBuild,
8
9
  TaskPythonBuild,
9
10
  )
10
11
  from truefoundry.deploy.builder.builders import get_builder
@@ -22,6 +23,7 @@ class _BuildConfig(BaseModel):
22
23
  TaskPythonBuild,
23
24
  TaskDockerFileBuild,
24
25
  SparkBuild,
26
+ TaskPySparkBuild,
25
27
  ] = Field(discriminator="type")
26
28
 
27
29
 
@@ -5,6 +5,7 @@ from truefoundry.deploy.builder.builders import (
5
5
  tfy_notebook_buildpack,
6
6
  tfy_python_buildpack,
7
7
  tfy_spark_buildpack,
8
+ tfy_task_pyspark_buildpack,
8
9
  )
9
10
 
10
11
  BUILD_REGISTRY: Dict[str, Callable] = {
@@ -12,6 +13,7 @@ BUILD_REGISTRY: Dict[str, Callable] = {
12
13
  "tfy-python-buildpack": tfy_python_buildpack.build,
13
14
  "tfy-notebook-buildpack": tfy_notebook_buildpack.build,
14
15
  "tfy-spark-buildpack": tfy_spark_buildpack.build,
16
+ "task-pyspark-build": tfy_task_pyspark_buildpack.build,
15
17
  }
16
18
 
17
19
  __all__ = ["get_builder"]
@@ -1,5 +1,4 @@
1
- import shlex
2
- from typing import Dict, List, Optional
1
+ from typing import Dict
3
2
 
4
3
  from mako.template import Template
5
4
 
@@ -7,9 +6,12 @@ from truefoundry.common.constants import ENV_VARS, PythonPackageManager
7
6
  from truefoundry.deploy._autogen.models import PythonBuild
8
7
  from truefoundry.deploy.builder.constants import (
9
8
  PIP_CONF_BUILDKIT_SECRET_MOUNT,
10
- PIP_CONF_SECRET_MOUNT_AS_ENV,
11
9
  UV_CONF_BUILDKIT_SECRET_MOUNT,
12
- UV_CONF_SECRET_MOUNT_AS_ENV,
10
+ )
11
+ from truefoundry.deploy.builder.utils import (
12
+ generate_apt_install_command,
13
+ generate_pip_install_command,
14
+ generate_uv_pip_install_command,
13
15
  )
14
16
  from truefoundry.deploy.v2.lib.patched_models import (
15
17
  CUDAVersion,
@@ -82,85 +84,6 @@ CUDA_VERSION_TO_IMAGE_TAG: Dict[str, str] = {
82
84
  }
83
85
 
84
86
 
85
- def generate_apt_install_command(apt_packages: Optional[List[str]]) -> Optional[str]:
86
- packages_list = None
87
- if apt_packages:
88
- packages_list = " ".join(p.strip() for p in apt_packages if p.strip())
89
- if not packages_list:
90
- return None
91
- apt_update_command = "apt update"
92
- apt_install_command = f"DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends {packages_list}"
93
- clear_apt_lists_command = "rm -rf /var/lib/apt/lists/*"
94
- return " && ".join(
95
- [apt_update_command, apt_install_command, clear_apt_lists_command]
96
- )
97
-
98
-
99
- def generate_pip_install_command(
100
- requirements_path: Optional[str],
101
- pip_packages: Optional[List[str]],
102
- mount_pip_conf_secret: bool = False,
103
- ) -> Optional[str]:
104
- upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
105
- envs = []
106
- if mount_pip_conf_secret:
107
- envs.append(PIP_CONF_SECRET_MOUNT_AS_ENV)
108
-
109
- command = ["python", "-m", "pip", "install", "--use-pep517", "--no-cache-dir"]
110
- args = []
111
- if requirements_path:
112
- args.append("-r")
113
- args.append(requirements_path)
114
-
115
- if pip_packages:
116
- args.extend(pip_packages)
117
-
118
- if not args:
119
- return None
120
-
121
- final_pip_install_command = shlex.join(envs + command + args)
122
- final_docker_run_command = " && ".join(
123
- [upgrade_pip_command, final_pip_install_command]
124
- )
125
- return final_docker_run_command
126
-
127
-
128
- def generate_uv_pip_install_command(
129
- requirements_path: Optional[str],
130
- pip_packages: Optional[List[str]],
131
- mount_uv_conf_secret: bool = False,
132
- ) -> Optional[str]:
133
- upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
134
- uv_mount = f"--mount=from={ENV_VARS.TFY_PYTHON_BUILD_UV_IMAGE_URI},source=/uv,target=/usr/local/bin/uv"
135
- envs = [
136
- "UV_LINK_MODE=copy",
137
- "UV_PYTHON_DOWNLOADS=never",
138
- "UV_INDEX_STRATEGY=unsafe-best-match",
139
- ]
140
- if mount_uv_conf_secret:
141
- envs.append(UV_CONF_SECRET_MOUNT_AS_ENV)
142
-
143
- command = ["uv", "pip", "install", "--no-cache-dir"]
144
-
145
- args = []
146
-
147
- if requirements_path:
148
- args.append("-r")
149
- args.append(requirements_path)
150
-
151
- if pip_packages:
152
- args.extend(pip_packages)
153
-
154
- if not args:
155
- return None
156
-
157
- uv_pip_install_command = shlex.join(envs + command + args)
158
- shell_commands = " && ".join([upgrade_pip_command, uv_pip_install_command])
159
- final_docker_run_command = " ".join([uv_mount, shell_commands])
160
-
161
- return final_docker_run_command
162
-
163
-
164
87
  def generate_dockerfile_content(
165
88
  build_configuration: PythonBuild,
166
89
  package_manager: str = ENV_VARS.TFY_PYTHON_BUILD_PACKAGE_MANAGER,
@@ -1,15 +1,14 @@
1
- import shlex
2
- from typing import List, Optional
3
-
4
1
  from mako.template import Template
5
2
 
6
3
  from truefoundry.common.constants import ENV_VARS, PythonPackageManager
7
4
  from truefoundry.deploy._autogen.models import SparkBuild
8
5
  from truefoundry.deploy.builder.constants import (
9
6
  PIP_CONF_BUILDKIT_SECRET_MOUNT,
10
- PIP_CONF_SECRET_MOUNT_AS_ENV,
11
7
  UV_CONF_BUILDKIT_SECRET_MOUNT,
12
- UV_CONF_SECRET_MOUNT_AS_ENV,
8
+ )
9
+ from truefoundry.deploy.builder.utils import (
10
+ generate_pip_install_command,
11
+ generate_uv_pip_install_command,
13
12
  )
14
13
  from truefoundry.deploy.v2.lib.patched_models import (
15
14
  _resolve_requirements_path,
@@ -26,8 +25,7 @@ RUN ${package_manager_config_secret_mount} ${python_packages_install_command}
26
25
  % endif
27
26
  ENV PYTHONDONTWRITEBYTECODE=1
28
27
  ENV IPYTHONDIR=/tmp/.ipython
29
- RUN groupadd --system --gid 1001 spark && useradd --system --uid 1001 --gid spark --no-create-home spark
30
- USER spark
28
+ USER 1001
31
29
  COPY . /app
32
30
  """
33
31
 
@@ -35,6 +33,20 @@ _POST_USER_TEMPLATE = """
35
33
  COPY tfy_execute_notebook.py /app/tfy_execute_notebook.py
36
34
  """
37
35
 
36
+ _ALMOND_INSTALL_TEMPLATE = """
37
+ ENV COURSIER_CACHE=/opt/coursier-cache
38
+ RUN install_packages curl
39
+ RUN curl -Lo coursier https://git.io/coursier-cli && \
40
+ chmod +x coursier && \
41
+ ./coursier launch almond:0.14.1 -- --install --global && \
42
+ chown -R 1001:0 /usr/local/share/jupyter && \
43
+ chown -R 1001:0 /opt/coursier-cache && \
44
+ rm -f coursier
45
+ """
46
+
47
+ # Docker image size with almond - 1.26GB
48
+ # Docker image size without almond - 1.1GB
49
+ # Not much harm in packaging almond by default
38
50
  DOCKERFILE_TEMPLATE = Template(
39
51
  """
40
52
  FROM ${spark_image_repo}:${spark_version}
@@ -43,6 +55,7 @@ RUN apt update && \
43
55
  DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends git && \
44
56
  rm -rf /var/lib/apt/lists/*
45
57
  """
58
+ + _ALMOND_INSTALL_TEMPLATE
46
59
  + _POST_PYTHON_INSTALL_TEMPLATE
47
60
  + _POST_USER_TEMPLATE
48
61
  )
@@ -55,71 +68,6 @@ ADDITIONAL_PIP_PACKAGES = [
55
68
  ]
56
69
 
57
70
 
58
- def generate_pip_install_command(
59
- requirements_path: Optional[str],
60
- pip_packages: Optional[List[str]],
61
- mount_pip_conf_secret: bool = False,
62
- ) -> Optional[str]:
63
- upgrade_pip_command = "python3 -m pip install -U pip setuptools wheel"
64
- envs = []
65
- if mount_pip_conf_secret:
66
- envs.append(PIP_CONF_SECRET_MOUNT_AS_ENV)
67
-
68
- command = ["python3", "-m", "pip", "install", "--use-pep517", "--no-cache-dir"]
69
- args = []
70
- if requirements_path:
71
- args.append("-r")
72
- args.append(requirements_path)
73
-
74
- if pip_packages:
75
- args.extend(pip_packages)
76
-
77
- if not args:
78
- return None
79
-
80
- final_pip_install_command = shlex.join(envs + command + args)
81
- final_docker_run_command = " && ".join(
82
- [upgrade_pip_command, final_pip_install_command]
83
- )
84
- return final_docker_run_command
85
-
86
-
87
- def generate_uv_pip_install_command(
88
- requirements_path: Optional[str],
89
- pip_packages: Optional[List[str]],
90
- mount_uv_conf_secret: bool = False,
91
- ) -> Optional[str]:
92
- upgrade_pip_command = "python3 -m pip install -U pip setuptools wheel"
93
- uv_mount = f"--mount=from={ENV_VARS.TFY_PYTHON_BUILD_UV_IMAGE_URI},source=/uv,target=/usr/local/bin/uv"
94
- envs = [
95
- "UV_LINK_MODE=copy",
96
- "UV_PYTHON_DOWNLOADS=never",
97
- "UV_INDEX_STRATEGY=unsafe-best-match",
98
- ]
99
- if mount_uv_conf_secret:
100
- envs.append(UV_CONF_SECRET_MOUNT_AS_ENV)
101
-
102
- command = ["uv", "pip", "install", "--no-cache-dir"]
103
-
104
- args = []
105
-
106
- if requirements_path:
107
- args.append("-r")
108
- args.append(requirements_path)
109
-
110
- if pip_packages:
111
- args.extend(pip_packages)
112
-
113
- if not args:
114
- return None
115
-
116
- uv_pip_install_command = shlex.join(envs + command + args)
117
- shell_commands = " && ".join([upgrade_pip_command, uv_pip_install_command])
118
- final_docker_run_command = " ".join([uv_mount, shell_commands])
119
-
120
- return final_docker_run_command
121
-
122
-
123
71
  def generate_dockerfile_content(
124
72
  build_configuration: SparkBuild,
125
73
  package_manager: str = ENV_VARS.TFY_PYTHON_BUILD_PACKAGE_MANAGER,
@@ -86,12 +86,24 @@ def execute_notebook(notebook_path, output_path="/tmp/output.ipynb", parameters=
86
86
  parameters = {}
87
87
 
88
88
  print(f"Starting execution of notebook: {notebook_path}")
89
+ notebook_type = os.environ.get("TFY_NOTEBOOK_TYPE", "").lower()
90
+ kernel_mapping = {"python": "python3", "scala": "scala"}
91
+
92
+ if notebook_type not in kernel_mapping:
93
+ supported_types = ", ".join(kernel_mapping.keys())
94
+ raise ValueError(
95
+ f"Unsupported notebook type: '{notebook_type}'. "
96
+ f"Supported types: [{supported_types}]"
97
+ )
98
+
99
+ kernel_name = kernel_mapping[notebook_type]
100
+
89
101
  pm.execute_notebook(
90
102
  input_path=notebook_path,
91
103
  output_path=output_path,
92
104
  parameters=parameters,
93
105
  # TODO(gw): Replace with kernel name for venv
94
- kernel_name="python3",
106
+ kernel_name=kernel_name,
95
107
  # Log cell by cell execution output
96
108
  # TODO(gw): Output logs to a file instead, so that they aren't merged with the container's logs
97
109
  log_output=True,
@@ -107,6 +119,7 @@ def validate_env_vars():
107
119
  "TFY_NOTEBOOK_OUTPUT_S3_BUCKET",
108
120
  "SPARK_APPLICATION_EVENT_LOG_JWT_TOKEN",
109
121
  "TFY_NOTEBOOK_OUTPUT_S3_SECRET_KEY",
122
+ "TFY_NOTEBOOK_TYPE",
110
123
  ]
111
124
  unset_keys = [key for key in keys if not os.environ.get(key)]
112
125
  if unset_keys:
@@ -0,0 +1,52 @@
1
+ import os
2
+ from tempfile import TemporaryDirectory
3
+ from typing import List, Optional
4
+
5
+ from truefoundry.deploy._autogen.models import DockerFileBuild, TaskPySparkBuild
6
+ from truefoundry.deploy.builder.builders import dockerfile
7
+ from truefoundry.deploy.builder.builders.tfy_task_pyspark_buildpack.dockerfile_template import (
8
+ generate_dockerfile_content,
9
+ )
10
+ from truefoundry.deploy.builder.utils import has_python_package_manager_conf_secret
11
+
12
+ __all__ = ["generate_dockerfile_content", "build"]
13
+
14
+
15
+ def _convert_to_dockerfile_build_config(
16
+ build_configuration: TaskPySparkBuild,
17
+ dockerfile_path: str,
18
+ mount_python_package_manager_conf_secret: bool = False,
19
+ ) -> DockerFileBuild:
20
+ dockerfile_content = generate_dockerfile_content(
21
+ build_configuration=build_configuration,
22
+ mount_python_package_manager_conf_secret=mount_python_package_manager_conf_secret,
23
+ )
24
+ with open(dockerfile_path, "w", encoding="utf8") as fp:
25
+ fp.write(dockerfile_content)
26
+
27
+ return DockerFileBuild(
28
+ type="dockerfile",
29
+ dockerfile_path=dockerfile_path,
30
+ )
31
+
32
+
33
+ def build(
34
+ tag: str,
35
+ build_configuration: TaskPySparkBuild,
36
+ extra_opts: Optional[List[str]] = None,
37
+ ):
38
+ mount_python_package_manager_conf_secret = (
39
+ has_python_package_manager_conf_secret(extra_opts) if extra_opts else False
40
+ )
41
+
42
+ with TemporaryDirectory() as local_dir:
43
+ docker_build_configuration = _convert_to_dockerfile_build_config(
44
+ build_configuration,
45
+ dockerfile_path=os.path.join(local_dir, "Dockerfile"),
46
+ mount_python_package_manager_conf_secret=mount_python_package_manager_conf_secret,
47
+ )
48
+ dockerfile.build(
49
+ tag=tag,
50
+ build_configuration=docker_build_configuration,
51
+ extra_opts=extra_opts,
52
+ )
@@ -0,0 +1,121 @@
1
+ from mako.template import Template
2
+
3
+ from truefoundry.common.constants import ENV_VARS, PythonPackageManager
4
+ from truefoundry.deploy._autogen.models import TaskPySparkBuild
5
+ from truefoundry.deploy.builder.constants import (
6
+ PIP_CONF_BUILDKIT_SECRET_MOUNT,
7
+ UV_CONF_BUILDKIT_SECRET_MOUNT,
8
+ )
9
+ from truefoundry.deploy.builder.utils import (
10
+ generate_apt_install_command,
11
+ generate_pip_install_command,
12
+ generate_uv_pip_install_command,
13
+ )
14
+ from truefoundry.deploy.v2.lib.patched_models import (
15
+ _resolve_requirements_path,
16
+ )
17
+
18
+ # TODO[GW]: Switch to a non-root user inside the container
19
+ _POST_PYTHON_INSTALL_TEMPLATE = """
20
+ % if apt_install_command is not None:
21
+ RUN ${apt_install_command}
22
+ % endif
23
+ % if requirements_path is not None:
24
+ COPY ${requirements_path} ${requirements_destination_path}
25
+ % endif
26
+ % if python_packages_install_command is not None:
27
+ RUN ${package_manager_config_secret_mount} ${python_packages_install_command}
28
+ % endif
29
+ COPY . /app
30
+ WORKDIR /app
31
+ """
32
+
33
+ # TODO[GW]: Check if the entrypoint for the image needs to change
34
+ # Using /opt/venv/ because flyte seems to be using it and this doesn't look configurable
35
+ # TODO[GW]: Double check this^
36
+ DOCKERFILE_TEMPLATE = Template(
37
+ """
38
+ FROM ${spark_image_repo}:${spark_version}
39
+ ENV PATH=/opt/venv/bin:$PATH
40
+ USER root
41
+ RUN mkdir -p /var/lib/apt/lists/partial && \
42
+ apt update && \
43
+ DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends git && \
44
+ python -m venv /opt/venv/ && \
45
+ rm -rf /var/lib/apt/lists/*
46
+ """
47
+ + _POST_PYTHON_INSTALL_TEMPLATE
48
+ )
49
+
50
+
51
+ def get_additional_pip_packages(build_configuration: TaskPySparkBuild):
52
+ return [
53
+ f"pyspark=={build_configuration.spark_version}",
54
+ ]
55
+
56
+
57
+ def generate_dockerfile_content(
58
+ build_configuration: TaskPySparkBuild,
59
+ package_manager: str = ENV_VARS.TFY_PYTHON_BUILD_PACKAGE_MANAGER,
60
+ mount_python_package_manager_conf_secret: bool = False,
61
+ ) -> str:
62
+ # TODO (chiragjn): Handle recursive references to other requirements files e.g. `-r requirements-gpu.txt`
63
+ requirements_path = _resolve_requirements_path(
64
+ build_context_path="",
65
+ requirements_path=build_configuration.requirements_path,
66
+ )
67
+ requirements_destination_path = (
68
+ "/tmp/requirements.txt" if requirements_path else None
69
+ )
70
+ # if not build_configuration.python_version:
71
+ # raise ValueError(
72
+ # "`python_version` is required for `tfy-python-buildpack` builder"
73
+ # )
74
+ pip_packages = get_additional_pip_packages(build_configuration) + (
75
+ build_configuration.pip_packages or []
76
+ )
77
+ if package_manager == PythonPackageManager.PIP.value:
78
+ python_packages_install_command = generate_pip_install_command(
79
+ requirements_path=requirements_destination_path,
80
+ pip_packages=pip_packages,
81
+ mount_pip_conf_secret=mount_python_package_manager_conf_secret,
82
+ )
83
+ elif package_manager == PythonPackageManager.UV.value:
84
+ python_packages_install_command = generate_uv_pip_install_command(
85
+ requirements_path=requirements_destination_path,
86
+ pip_packages=pip_packages,
87
+ mount_uv_conf_secret=mount_python_package_manager_conf_secret,
88
+ )
89
+ else:
90
+ raise ValueError(f"Unsupported package manager: {package_manager}")
91
+
92
+ apt_install_command = generate_apt_install_command(
93
+ apt_packages=build_configuration.apt_packages
94
+ )
95
+ template_args = {
96
+ "spark_image_repo": ENV_VARS.TFY_TASK_PYSPARK_BUILD_SPARK_IMAGE_REPO,
97
+ "spark_version": build_configuration.spark_version,
98
+ "apt_install_command": apt_install_command,
99
+ "requirements_path": requirements_path,
100
+ "requirements_destination_path": requirements_destination_path,
101
+ "python_packages_install_command": python_packages_install_command,
102
+ }
103
+
104
+ if mount_python_package_manager_conf_secret:
105
+ if package_manager == PythonPackageManager.PIP.value:
106
+ template_args["package_manager_config_secret_mount"] = (
107
+ PIP_CONF_BUILDKIT_SECRET_MOUNT
108
+ )
109
+ elif package_manager == PythonPackageManager.UV.value:
110
+ template_args["package_manager_config_secret_mount"] = (
111
+ UV_CONF_BUILDKIT_SECRET_MOUNT
112
+ )
113
+ else:
114
+ raise ValueError(f"Unsupported package manager: {package_manager}")
115
+ else:
116
+ template_args["package_manager_config_secret_mount"] = ""
117
+
118
+ template = DOCKERFILE_TEMPLATE
119
+
120
+ dockerfile_content = template.render(**template_args)
121
+ return dockerfile_content
@@ -1,8 +1,12 @@
1
+ import shlex
1
2
  from typing import List, Optional
2
3
 
4
+ from truefoundry.common.constants import ENV_VARS
3
5
  from truefoundry.deploy.builder.constants import (
4
6
  BUILDKIT_SECRET_MOUNT_PIP_CONF_ID,
5
7
  BUILDKIT_SECRET_MOUNT_UV_CONF_ID,
8
+ PIP_CONF_SECRET_MOUNT_AS_ENV,
9
+ UV_CONF_SECRET_MOUNT_AS_ENV,
6
10
  )
7
11
 
8
12
 
@@ -35,3 +39,82 @@ def has_python_package_manager_conf_secret(docker_build_extra_args: List[str]) -
35
39
  ):
36
40
  return True
37
41
  return False
42
+
43
+
44
+ def generate_pip_install_command(
45
+ requirements_path: Optional[str],
46
+ pip_packages: Optional[List[str]],
47
+ mount_pip_conf_secret: bool = False,
48
+ ) -> Optional[str]:
49
+ upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
50
+ envs = []
51
+ if mount_pip_conf_secret:
52
+ envs.append(PIP_CONF_SECRET_MOUNT_AS_ENV)
53
+
54
+ command = ["python", "-m", "pip", "install", "--use-pep517", "--no-cache-dir"]
55
+ args = []
56
+ if requirements_path:
57
+ args.append("-r")
58
+ args.append(requirements_path)
59
+
60
+ if pip_packages:
61
+ args.extend(pip_packages)
62
+
63
+ if not args:
64
+ return None
65
+
66
+ final_pip_install_command = shlex.join(envs + command + args)
67
+ final_docker_run_command = " && ".join(
68
+ [upgrade_pip_command, final_pip_install_command]
69
+ )
70
+ return final_docker_run_command
71
+
72
+
73
+ def generate_uv_pip_install_command(
74
+ requirements_path: Optional[str],
75
+ pip_packages: Optional[List[str]],
76
+ mount_uv_conf_secret: bool = False,
77
+ ) -> Optional[str]:
78
+ upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
79
+ uv_mount = f"--mount=from={ENV_VARS.TFY_PYTHON_BUILD_UV_IMAGE_URI},source=/uv,target=/usr/local/bin/uv"
80
+ envs = [
81
+ "UV_LINK_MODE=copy",
82
+ "UV_PYTHON_DOWNLOADS=never",
83
+ "UV_INDEX_STRATEGY=unsafe-best-match",
84
+ ]
85
+ if mount_uv_conf_secret:
86
+ envs.append(UV_CONF_SECRET_MOUNT_AS_ENV)
87
+
88
+ command = ["uv", "pip", "install", "--no-cache-dir"]
89
+
90
+ args = []
91
+
92
+ if requirements_path:
93
+ args.append("-r")
94
+ args.append(requirements_path)
95
+
96
+ if pip_packages:
97
+ args.extend(pip_packages)
98
+
99
+ if not args:
100
+ return None
101
+
102
+ uv_pip_install_command = shlex.join(envs + command + args)
103
+ shell_commands = " && ".join([upgrade_pip_command, uv_pip_install_command])
104
+ final_docker_run_command = " ".join([uv_mount, shell_commands])
105
+
106
+ return final_docker_run_command
107
+
108
+
109
+ def generate_apt_install_command(apt_packages: Optional[List[str]]) -> Optional[str]:
110
+ packages_list = None
111
+ if apt_packages:
112
+ packages_list = " ".join(p.strip() for p in apt_packages if p.strip())
113
+ if not packages_list:
114
+ return None
115
+ apt_update_command = "apt update"
116
+ apt_install_command = f"DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends {packages_list}"
117
+ clear_apt_lists_command = "rm -rf /var/lib/apt/lists/*"
118
+ return " && ".join(
119
+ [apt_update_command, apt_install_command, clear_apt_lists_command]
120
+ )
@@ -35,8 +35,20 @@ def trigger_command():
35
35
  nargs=-1,
36
36
  required=False,
37
37
  )
38
+ @click.option(
39
+ "--run-name-alias",
40
+ "--run_name_alias",
41
+ type=click.STRING,
42
+ required=False,
43
+ help="Alias for the job run name.",
44
+ )
38
45
  @handle_exception_wrapper
39
- def trigger_job(application_fqn: str, command: Optional[Sequence[str]], params):
46
+ def trigger_job(
47
+ application_fqn: str,
48
+ params,
49
+ command: Optional[Sequence[str]],
50
+ run_name_alias: Optional[str],
51
+ ):
40
52
  """
41
53
  Trigger a Job on TrueFoundry asynchronously
42
54
 
@@ -54,6 +66,10 @@ def trigger_job(application_fqn: str, command: Optional[Sequence[str]], params):
54
66
  Passing params:
55
67
 
56
68
  [b]tfy trigger job --application-fqn "my-cluster:my-workspace:my-job" -- --param1_name param1_value --param2_name param2_value ...[/]
69
+ \n
70
+
71
+ passing run_name_alias:
72
+ [b]tfy trigger job --application-fqn "my-cluster:my-workspace:my-job" --run_name_alias "my_run_alias"[/]
57
73
  """
58
74
  if params:
59
75
  params_dict = {}
@@ -78,7 +94,10 @@ def trigger_job(application_fqn: str, command: Optional[Sequence[str]], params):
78
94
  params_dict[key] = value
79
95
 
80
96
  application.trigger_job(
81
- application_fqn=application_fqn, command=command, params=params
97
+ application_fqn=application_fqn,
98
+ command=command,
99
+ params=params,
100
+ run_name_alias=run_name_alias,
82
101
  )
83
102
 
84
103
 
@@ -578,6 +578,7 @@ class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
578
578
  def trigger_job(
579
579
  self,
580
580
  deployment_id: str,
581
+ run_name_alias: Optional[str] = None,
581
582
  command: Optional[str] = None,
582
583
  params: Optional[Dict[str, str]] = None,
583
584
  ) -> TriggerJobResult:
@@ -585,11 +586,14 @@ class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
585
586
  body = {
586
587
  "deploymentId": deployment_id,
587
588
  "input": {},
589
+ "metadata": {},
588
590
  }
589
591
  if command:
590
592
  body["input"]["command"] = command
591
593
  if params:
592
594
  body["input"]["params"] = params
595
+ if run_name_alias:
596
+ body["metadata"]["job_run_name_alias"] = run_name_alias
593
597
  response = session_with_retries().post(
594
598
  url, json=body, headers=self._get_headers()
595
599
  )
@@ -117,6 +117,7 @@ def trigger_job(
117
117
  application_fqn: str,
118
118
  command: Optional[Union[str, Sequence[str]]] = None,
119
119
  params: Optional[Dict[str, str]] = None,
120
+ run_name_alias: Optional[str] = None,
120
121
  ) -> TriggerJobResult:
121
122
  """
122
123
  Trigger a Job on TrueFoundry platform
@@ -178,6 +179,7 @@ def trigger_job(
178
179
  deployment_id=application_info.activeDeploymentId,
179
180
  command=command_str if command_str else None,
180
181
  params=params if params else None,
182
+ run_name_alias=run_name_alias.strip() if run_name_alias else None,
181
183
  )
182
184
  jobRunName = result.jobRunName
183
185
  previous_runs_url = f"{client.tfy_host.strip('/')}/deployments/{application_info.id}?tab=previousRuns"
@@ -526,3 +526,42 @@ class SparkJobPythonNotebookEntrypoint(
526
526
  models.SparkJobPythonNotebookEntrypoint, PatchedModelBase
527
527
  ):
528
528
  type: Literal["python-notebook"] = "python-notebook"
529
+
530
+
531
+ class SparkJobScalaNotebookEntrypoint(
532
+ models.SparkJobScalaNotebookEntrypoint, PatchedModelBase
533
+ ):
534
+ type: Literal["scala-notebook"] = "scala-notebook"
535
+
536
+
537
+ class PySparkTaskConfig(models.PySparkTaskConfig, PatchedModelBase):
538
+ type: Literal["pyspark-task-config"] = "pyspark-task-config"
539
+
540
+ def __init__(self, *args, **kwargs):
541
+ super().__init__(*args, **kwargs)
542
+ try:
543
+ import truefoundry.workflow.spark_task as _ # noqa: F401
544
+ except ImportError as e:
545
+ raise ImportError(
546
+ "truefoundry.workflow.spark_task is not installed. Please install it with `pip install truefoundry[workflow,spark]`"
547
+ ) from e
548
+
549
+
550
+ class SparkDriverConfig(models.SparkDriverConfig, PatchedModelBase):
551
+ type: Literal["spark-driver-config"] = "spark-driver-config"
552
+
553
+
554
+ class SparkExecutorConfig(models.SparkExecutorConfig, PatchedModelBase):
555
+ type: Literal["spark-executor-config"] = "spark-executor-config"
556
+
557
+
558
+ class SparkExecutorFixedInstances(models.SparkExecutorFixedInstances, PatchedModelBase):
559
+ type: Literal["fixed"] = "fixed"
560
+
561
+
562
+ class SparkExecutorDynamicScaling(models.SparkExecutorDynamicScaling, PatchedModelBase):
563
+ type: Literal["dynamic"] = "dynamic"
564
+
565
+
566
+ class TaskPySparkBuild(models.TaskPySparkBuild, PatchedModelBase):
567
+ type: Literal["task-pyspark-build"] = "task-pyspark-build"
@@ -15,6 +15,7 @@ from flytekit.types.file import FlyteFile
15
15
  from truefoundry.common.constants import ENV_VARS
16
16
  from truefoundry.deploy.v2.lib.patched_models import (
17
17
  ContainerTaskConfig,
18
+ PySparkTaskConfig,
18
19
  PythonTaskConfig,
19
20
  TaskDockerFileBuild,
20
21
  TaskPythonBuild,
@@ -41,6 +42,7 @@ __all__ = [
41
42
  "ExecutionConfig",
42
43
  "FlyteFile",
43
44
  "FlyteError",
45
+ "PySparkTaskConfig",
44
46
  ]
45
47
 
46
48
 
@@ -0,0 +1,91 @@
1
+ import os
2
+ import shutil
3
+ from typing import Any, Callable, Dict, Optional
4
+
5
+ from flytekit import FlyteContextManager, PythonFunctionTask, lazy_module
6
+ from flytekit.configuration import SerializationSettings
7
+ from flytekit.core.context_manager import ExecutionParameters
8
+ from flytekit.extend import ExecutionState, TaskPlugins
9
+ from flytekit.extend.backend.base_agent import AsyncAgentExecutorMixin
10
+
11
+ from truefoundry.deploy.v2.lib.patched_models import PySparkTaskConfig
12
+
13
+ pyspark_sql = lazy_module("pyspark.sql")
14
+ SparkSession = pyspark_sql.SparkSession
15
+
16
+
17
+ class TfySparkFunctionTask(
18
+ AsyncAgentExecutorMixin, PythonFunctionTask[PySparkTaskConfig]
19
+ ):
20
+ """
21
+ Actual Plugin that transforms the local python code for execution within a spark context
22
+ """
23
+
24
+ _SPARK_TASK_TYPE = "spark"
25
+
26
+ def __init__(
27
+ self,
28
+ task_config: PySparkTaskConfig,
29
+ task_function: Callable,
30
+ **kwargs,
31
+ ):
32
+ self.sess: Optional[SparkSession] = None # type: ignore
33
+
34
+ task_type = self._SPARK_TASK_TYPE
35
+
36
+ super(TfySparkFunctionTask, self).__init__(
37
+ task_config=task_config,
38
+ task_type=task_type,
39
+ task_function=task_function,
40
+ **kwargs,
41
+ )
42
+
43
+ def get_custom(self, settings: SerializationSettings) -> Dict[str, Any]:
44
+ return {"truefoundry": self._task_config.dict()}
45
+
46
+ def pre_execute(self, user_params: ExecutionParameters) -> ExecutionParameters:
47
+ import pyspark as _pyspark
48
+
49
+ ctx = FlyteContextManager.current_context()
50
+ sess_builder = _pyspark.sql.SparkSession.builder.appName(
51
+ f"FlyteSpark: {user_params.execution_id}"
52
+ )
53
+ if not (
54
+ ctx.execution_state
55
+ and ctx.execution_state.mode == ExecutionState.Mode.TASK_EXECUTION
56
+ ):
57
+ # If either of above cases is not true, then we are in local execution of this task
58
+ # Add system spark-conf for local/notebook based execution.
59
+ spark_conf = _pyspark.SparkConf()
60
+ spark_conf.set("spark.driver.bindAddress", "127.0.0.1")
61
+ for k, v in self.task_config.spark_conf.items():
62
+ spark_conf.set(k, v)
63
+ # In local execution, propagate PYTHONPATH to executors too. This makes the spark
64
+ # execution hermetic to the execution environment. For example, it allows running
65
+ # Spark applications using Bazel, without major changes.
66
+ if "PYTHONPATH" in os.environ:
67
+ spark_conf.setExecutorEnv("PYTHONPATH", os.environ["PYTHONPATH"])
68
+ sess_builder = sess_builder.config(conf=spark_conf)
69
+
70
+ self.sess = sess_builder.getOrCreate()
71
+
72
+ if (
73
+ ctx.serialization_settings
74
+ and ctx.serialization_settings.fast_serialization_settings
75
+ and ctx.serialization_settings.fast_serialization_settings.enabled
76
+ and ctx.execution_state
77
+ and ctx.execution_state.mode == ExecutionState.Mode.TASK_EXECUTION
78
+ ):
79
+ file_name = "flyte_wf"
80
+ file_format = "zip"
81
+ shutil.make_archive(file_name, file_format, os.getcwd())
82
+ self.sess.sparkContext.addPyFile(f"{file_name}.{file_format}")
83
+
84
+ return user_params.builder().add_attr("SPARK_SESSION", self.sess).build()
85
+
86
+ def execute(self, **kwargs) -> Any:
87
+ return PythonFunctionTask.execute(self, **kwargs)
88
+
89
+
90
+ # Inject the Spark plugin into flytekits dynamic plugin loading system
91
+ TaskPlugins.register_pythontask_plugin(PySparkTaskConfig, TfySparkFunctionTask)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: truefoundry
3
- Version: 0.11.2
3
+ Version: 0.11.3
4
4
  Summary: TrueFoundry CLI
5
5
  Author-email: TrueFoundry Team <abhishek@truefoundry.com>
6
6
  Requires-Python: <3.14,>=3.8.1
@@ -30,14 +30,17 @@ Requires-Dist: requirements-parser<0.12.0,>=0.11.0
30
30
  Requires-Dist: rich-click<2.0.0,>=1.2.1
31
31
  Requires-Dist: rich<14.0.0,>=13.7.1
32
32
  Requires-Dist: tqdm<5.0.0,>=4.0.0
33
- Requires-Dist: truefoundry-sdk<0.2.0,>=0.1.7
33
+ Requires-Dist: truefoundry-sdk<0.2.0,>=0.1.9
34
34
  Requires-Dist: typing-extensions>=4.0
35
35
  Requires-Dist: urllib3<3,>=1.26.18
36
36
  Requires-Dist: yq<4.0.0,>=3.1.0
37
37
  Provides-Extra: ai
38
38
  Requires-Dist: mcp==1.9.4; (python_version >= '3.10') and extra == 'ai'
39
+ Provides-Extra: spark
40
+ Requires-Dist: flytekit==1.15.3; (python_version >= '3.9' and python_version <= '3.12') and extra == 'spark'
41
+ Requires-Dist: flytekitplugins-spark==1.15.3; (python_version >= '3.9' and python_version <= '3.12') and extra == 'spark'
39
42
  Provides-Extra: workflow
40
- Requires-Dist: flytekit==1.15.3; (python_version >= '3.9' and python_version < '3.13') and extra == 'workflow'
43
+ Requires-Dist: flytekit==1.15.3; (python_version >= '3.9' and python_version <= '3.12') and extra == 'workflow'
41
44
  Description-Content-Type: text/markdown
42
45
 
43
46
  # TrueFoundry
@@ -1,4 +1,4 @@
1
- truefoundry/__init__.py,sha256=z9iNNI3mqVWkvzBXMRu096jXaYloYSN6rnlTbfmKcJE,1056
1
+ truefoundry/__init__.py,sha256=xCzLSYqNASD0k3bJXR3Hranqf-9TIlD4gnHnQIUh_M8,1122
2
2
  truefoundry/_client.py,sha256=Y3qHi_Lg4Sx6GNvsjAHIoAfFr8PJnqgCrXmpNAI3ECg,1417
3
3
  truefoundry/logger.py,sha256=u-YCNjg5HBwE70uQcpjIG64Ghos-K2ulTWaxC03BSj4,714
4
4
  truefoundry/pydantic_v1.py,sha256=jSuhGtz0Mbk1qYu8jJ1AcnIDK4oxUsdhALc4spqstmM,345
@@ -40,7 +40,7 @@ truefoundry/cli/display_util.py,sha256=9vzN3mbQqU6OhS7qRUiMRana4PTHa4sDTA0Hn7OVj
40
40
  truefoundry/cli/util.py,sha256=kEjC20-n_jwxZV9jq-78CxDk4xAySxAoYIXTxZfJzLM,5423
41
41
  truefoundry/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
42
  truefoundry/common/auth_service_client.py,sha256=N3YxKlx63r6cPZqbgb2lqBOPI69ShB7D7RCIq4FSCjc,7949
43
- truefoundry/common/constants.py,sha256=nWd3Je71WmHEORRUTCupZy5fWADqEFftjYP6wiYhCIc,4627
43
+ truefoundry/common/constants.py,sha256=3GH-KgEuBp-Gmaq_lreudKCKKPVPdEsGME7xdvIOpBA,4710
44
44
  truefoundry/common/credential_file_manager.py,sha256=1yEk1Zm2xS4G0VDFwKSZ4w0VUrcPWQ1nJnoBaz9xyKA,4251
45
45
  truefoundry/common/credential_provider.py,sha256=_OhJ2XFlDaVsrUO-FyywxctcGGqDdC2pgcvwEKqQD0Q,4071
46
46
  truefoundry/common/entities.py,sha256=b4R6ss06-ygDS3C4Tqa_GOq5LFKDYbt7x4Mghnfz6yo,4007
@@ -52,22 +52,24 @@ truefoundry/common/storage_provider_utils.py,sha256=yURhMw8k0FLFvaviRHDiifhvc6Gn
52
52
  truefoundry/common/types.py,sha256=BMJFCsR1lPJAw66IQBSvLyV4I6o_x5oj78gVsUa9si8,188
53
53
  truefoundry/common/utils.py,sha256=P0FuAadoJGdpieUORLSN-PiFnkyoGO-K2cS4OPITBWg,6714
54
54
  truefoundry/common/warnings.py,sha256=xDMhR_-ZGC40Ycaj6nlFb5MYPexn8WbKCHd4FlflTXQ,705
55
- truefoundry/deploy/__init__.py,sha256=PVbGPU9S3-dTFn5LvLwaEnfsp2RrGT9iiM7_15kOV84,2837
55
+ truefoundry/deploy/__init__.py,sha256=sP-6Nv-_uV2o3knWcNSGV07j_Hkq0lfUkfZffBg-Hfo,2874
56
56
  truefoundry/deploy/python_deploy_codegen.py,sha256=k19_m5DGsUyjOUCSKwIVP8vDna2sq01tHABsUfoVpW4,8019
57
- truefoundry/deploy/_autogen/models.py,sha256=8j_y0Yp8k8Sjj7iVtZDHeuxq9kDvD0xI8-iFnbf0370,73571
58
- truefoundry/deploy/builder/__init__.py,sha256=kgvlkVkiWpMVdim81tIeLrdoACqrFDgwCqHdQVsCsMo,4988
57
+ truefoundry/deploy/_autogen/models.py,sha256=oOBwFb7qumBz1XXllns8wL02-NVvYq5iOOGZlEP3BzU,75893
58
+ truefoundry/deploy/builder/__init__.py,sha256=VR07ZB7ziONEBbVgg1JdRTWY7t4qJjJTMhc2VodXYdA,5036
59
59
  truefoundry/deploy/builder/constants.py,sha256=amUkHoHvVKzGv0v_knfiioRuKiJM0V0xW0diERgWiI0,508
60
60
  truefoundry/deploy/builder/docker_service.py,sha256=sm7GWeIqyrKaZpxskdLejZlsxcZnM3BTDJr6orvPN4E,3948
61
- truefoundry/deploy/builder/utils.py,sha256=D68-bqM0NQx-Elg-56mtkENyVyg9faZ9tgTmBuo1Sjs,1076
62
- truefoundry/deploy/builder/builders/__init__.py,sha256=aomhWdR5L7uSM-GUalw9SnFHD2FQ_n-yFe4NH6nyNxw,715
61
+ truefoundry/deploy/builder/utils.py,sha256=4TO0f3qMFGfFoBK0two1P59jgxlNjUoZYHYRgStcovM,3694
62
+ truefoundry/deploy/builder/builders/__init__.py,sha256=Gp9NODR1E7mUjadhzIe3zzO43bBfHPeNcEDryYF2uo0,807
63
63
  truefoundry/deploy/builder/builders/dockerfile.py,sha256=XMbMlPUTMPCyaHl7jJQY1ODtlRkpI61PcvgG6Ck5jNc,1522
64
64
  truefoundry/deploy/builder/builders/tfy_notebook_buildpack/__init__.py,sha256=RGWGqY8xOF7vycUPJd10N7ZzahWv24lO0anrOPtLuDU,1796
65
65
  truefoundry/deploy/builder/builders/tfy_notebook_buildpack/dockerfile_template.py,sha256=rQgdvKmAT9HArVW4TAG5yd2QTKRs3S5LJ9RQbc_EkHE,2518
66
66
  truefoundry/deploy/builder/builders/tfy_python_buildpack/__init__.py,sha256=_fjqHKn80qKi68SAMMALge7_A6e1sTsQWichw8uoGIw,2025
67
- truefoundry/deploy/builder/builders/tfy_python_buildpack/dockerfile_template.py,sha256=f4l3fH21E2b8W3-JotMKc0AdPcCxV7LRPxxYJa7z_UQ,9134
67
+ truefoundry/deploy/builder/builders/tfy_python_buildpack/dockerfile_template.py,sha256=Kj-ICGFTpDj86v6Juohz7q2TNYpcGIeKBW5HADG7SGE,6704
68
68
  truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py,sha256=NEPlM6_vTVxp4ITa18B8DBbgYCn1q5d8be21lbgu5oY,2888
69
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=2zohUaW8Yw_QREHlpRW7Pooomt19HJh44fHjlsiDmwM,6064
70
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/tfy_execute_notebook.py,sha256=QvawKw30dcHROJ05XQU2KgwH3gtUmEGSkuLxiuPNJ2c,5899
69
+ truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=nMJJfxjy8R7BZK89KicerQQwKLspUSJ3kerWZI3hFxk,4571
70
+ truefoundry/deploy/builder/builders/tfy_spark_buildpack/tfy_execute_notebook.py,sha256=-D37Zjy2SBt3RHxonPEpR1_LR0W7vTSM1kQ1S-fdK-I,6363
71
+ truefoundry/deploy/builder/builders/tfy_task_pyspark_buildpack/__init__.py,sha256=ynnjrFDg1__REd_x-npxxj-5zmFo46z_Ntz7GZ9-DHI,1819
72
+ truefoundry/deploy/builder/builders/tfy_task_pyspark_buildpack/dockerfile_template.py,sha256=cjyPCLXJ8x5blaKbTK4XQ-4tO7DJqe5I9Fw2EJoLmtk,4555
71
73
  truefoundry/deploy/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
72
74
  truefoundry/deploy/cli/commands/__init__.py,sha256=qv818jxqSAygJ3h-6Ul8t-5VOgR_UrSgsVtNCl3e5G0,1408
73
75
  truefoundry/deploy/cli/commands/apply_command.py,sha256=DmXmKVokkauyKIiJDtErTwbJ5_LvQeJbTQsG5BjyKpo,2427
@@ -84,7 +86,7 @@ truefoundry/deploy/cli/commands/logs_command.py,sha256=osl2z5VaIceB9sYa6GtwsuyAP
84
86
  truefoundry/deploy/cli/commands/patch_application_command.py,sha256=aRTHu2OmxQd7j9iE0RavsFCkCILp0rGh4DJO51Oij5I,2591
85
87
  truefoundry/deploy/cli/commands/patch_command.py,sha256=wA95khMO9uVz8SaJlgYMUwaX7HagtchjyxXXATq83Bk,1665
86
88
  truefoundry/deploy/cli/commands/terminate_comand.py,sha256=UKhOdbAej8ubX3q44vpLrOotAcvH4vHpRZJQrRf_AfM,1077
87
- truefoundry/deploy/cli/commands/trigger_command.py,sha256=_qSl-AShepZpbGUGTfLfJGd74VJJ_wd3eXYt2DfxIFo,4716
89
+ truefoundry/deploy/cli/commands/trigger_command.py,sha256=-FZy_XnFsexH5SOWzDc4Dj9fTwmdjene_EaLjJPmG2c,5119
88
90
  truefoundry/deploy/cli/commands/utils.py,sha256=mIMYbHuAxnT0yz_0PU8LDC9sAZPU_xURZFMOrGoasuc,3694
89
91
  truefoundry/deploy/core/__init__.py,sha256=j61bMWj4BkWihdssKMSFhieo7afJDtpc7qO7zk1rDB4,140
90
92
  truefoundry/deploy/core/login.py,sha256=N2VrW3nlBzoyoYulkipxwQvCpjBhi3sfsmhxK1ktWhg,236
@@ -102,9 +104,9 @@ truefoundry/deploy/lib/session.py,sha256=fLdgR6ZDp8-hFl5NTON4ngnWLsMzGxvKtfpDOOw
102
104
  truefoundry/deploy/lib/util.py,sha256=J7r8San2wKo48A7-BlH2-OKTlBO67zlPjLEhMsL8os0,1059
103
105
  truefoundry/deploy/lib/win32.py,sha256=1RcvPTdlOAJ48rt8rCbE2Ufha2ztRqBAE9dueNXArrY,5009
104
106
  truefoundry/deploy/lib/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
105
- truefoundry/deploy/lib/clients/servicefoundry_client.py,sha256=nSaaD91ONpDkRyOWHGv2VerPzdtn-Z3UF0iloj00VVU,27200
107
+ truefoundry/deploy/lib/clients/servicefoundry_client.py,sha256=JIj0Rs5PVZzXeh2QubLaVjgMJiUkfHrIMTtZMpgBmiA,27369
106
108
  truefoundry/deploy/lib/dao/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
107
- truefoundry/deploy/lib/dao/application.py,sha256=oMszpueXPUfTUuN_XdKwoRjQyqAgWHhZ-10cbprCVdM,9226
109
+ truefoundry/deploy/lib/dao/application.py,sha256=wRM4b3Mr4XCm838s8YjXlPpkKaSFbGgMyliEZRJra2k,9343
108
110
  truefoundry/deploy/lib/dao/apply.py,sha256=F7I8yp-IZir_6CL9NPE_KFD9rgicVJn2vcIv1a3MpuA,3771
109
111
  truefoundry/deploy/lib/dao/delete.py,sha256=uPL2psqWNw2O0oDikXJOlVxmG8n5d3Z0Ia9qZwqCn_8,2735
110
112
  truefoundry/deploy/lib/dao/version.py,sha256=AtdW_4O1DPUKdfv2qy6iUJsZ_95vM6z0AqeEy3WDKs8,1130
@@ -117,7 +119,7 @@ truefoundry/deploy/v2/lib/deploy.py,sha256=Ltm7cpIW14IbmEsR3EAIeWQUch2Z6HLej7heu
117
119
  truefoundry/deploy/v2/lib/deploy_workflow.py,sha256=G5BzMIbap8pgDX1eY-TITruUxQdkKhYtBmRwLL6lDeY,14342
118
120
  truefoundry/deploy/v2/lib/deployable_patched_models.py,sha256=mUi-OjPf7bc8rzfrPLdFb79LKuDq7F36RxL4V-AXebs,6830
119
121
  truefoundry/deploy/v2/lib/models.py,sha256=ogc1UYs1Z2nBdGSKCrde9sk8d0GxFKMkem99uqO5CmM,1148
120
- truefoundry/deploy/v2/lib/patched_models.py,sha256=oNsOr5ojVn2XHjATD3VLuuO6w_ljDL99siHXy6y3Y0g,15558
122
+ truefoundry/deploy/v2/lib/patched_models.py,sha256=bsznDLcUH5GcW8SUEvHETJqoFGlYYJ0j-tyGIqnRraw,16911
121
123
  truefoundry/deploy/v2/lib/source.py,sha256=d6-8_6Zn5koBglqrBrY6ZLG_7yyPuLdyEmK4iZTw6xY,9405
122
124
  truefoundry/ml/__init__.py,sha256=EEEHV7w58Krpo_W9Chd8Y3TdItfFO3LI6j6Izqc4-P8,2219
123
125
  truefoundry/ml/constants.py,sha256=vDq72d4C9FSWqr9MMdjgTF4TuyNFApvo_6RVsSeAjB4,2837
@@ -370,17 +372,18 @@ truefoundry/ml/log_types/image/constants.py,sha256=wLtGEOA4T5fZHSlOXPuNDLX3lpbCt
370
372
  truefoundry/ml/log_types/image/image.py,sha256=sa0tBHdyluC8bELXY16E0HgFrUDnDBxHrteix4BFXcs,12479
371
373
  truefoundry/ml/log_types/image/image_normalizer.py,sha256=vrzfuSpVGgIxw_Q2sbFe7kQ_JpAndX0bMwC7wtfi41g,3104
372
374
  truefoundry/ml/log_types/image/types.py,sha256=inFQlyAyDvZtfliFpENirNCm1XO9beyZ8DNn97DoDKs,1568
373
- truefoundry/workflow/__init__.py,sha256=8wjsorcOGzCAWGqLRbAUf8eyezxpnB4NvXHX_rdO7ks,1656
375
+ truefoundry/workflow/__init__.py,sha256=tscHelUxCnzkZBVdeEHtW7HjTsUzwMxIQQ_iAIAL0tM,1704
374
376
  truefoundry/workflow/container_task.py,sha256=8arieePsX4__OnG337hOtCiNgJwtKJJCsZcmFmCBJtk,402
375
377
  truefoundry/workflow/map_task.py,sha256=f9vcAPRQy0Ttw6bvdZBKUVJMSm4eGQrbE1GHWhepHIU,1864
376
378
  truefoundry/workflow/python_task.py,sha256=SRXRLC4vdBqGjhkwuaY39LEWN6iPCpJAuW17URRdWTY,1128
379
+ truefoundry/workflow/spark_task.py,sha256=qHVxFD_sQqiK34zwOq9Idbme_ut6Z151QwfaWT4IiMA,3550
377
380
  truefoundry/workflow/task.py,sha256=34m55mALXx6ko9o5HkK6FDtMajdvJzBhOsHwDM2RcBA,1779
378
381
  truefoundry/workflow/workflow.py,sha256=OjKBwEArxTzNDpfJWgnIqkXDQrYQRLXjheRwpOCu3LE,4861
379
382
  truefoundry/workflow/remote_filesystem/__init__.py,sha256=LQ95ViEjJ7Ts4JcCGOxMPs7NZmQdZ4bTiq6qXtsjUhE,206
380
383
  truefoundry/workflow/remote_filesystem/logger.py,sha256=em2l7D6sw7xTLDP0kQSLpgfRRCLpN14Qw85TN7ujQcE,1022
381
384
  truefoundry/workflow/remote_filesystem/tfy_signed_url_client.py,sha256=xcT0wQmQlgzcj0nP3tJopyFSVWT1uv3nhiTIuwfXYeg,12342
382
385
  truefoundry/workflow/remote_filesystem/tfy_signed_url_fs.py,sha256=nSGPZu0Gyd_jz0KsEE-7w_BmnTD8CVF1S8cUJoxaCbc,13305
383
- truefoundry-0.11.2.dist-info/METADATA,sha256=7n9RCZ9gGKy-q0nVRgja-FIyJEBcgo4eXtbrCPpfIFk,2505
384
- truefoundry-0.11.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
385
- truefoundry-0.11.2.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
386
- truefoundry-0.11.2.dist-info/RECORD,,
386
+ truefoundry-0.11.3.dist-info/METADATA,sha256=IkAQz7euSz52XyM6XyCJREpVOiT0EnVKHjmqKArbl_0,2759
387
+ truefoundry-0.11.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
388
+ truefoundry-0.11.3.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
389
+ truefoundry-0.11.3.dist-info/RECORD,,