truefoundry 0.9.4__py3-none-any.whl → 0.10.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of truefoundry might be problematic. Click here for more details.

@@ -65,7 +65,8 @@ class TrueFoundrySdkEnv(BaseSettings):
65
65
  # Fo customizing the python image used for building via PythonBuild
66
66
  TFY_PYTHONBUILD_PYTHON_IMAGE_REPO: str = "public.ecr.aws/docker/library/python"
67
67
 
68
- TFY_SPARK_BUILD_SPARK_IMAGE_REPO: str = "apache/spark"
68
+ # TODO(gw): Use another image with more linient rate limits
69
+ TFY_SPARK_BUILD_SPARK_IMAGE_REPO: str = "public.ecr.aws/bitnami/spark"
69
70
 
70
71
  # For local development, this enables futher configuration via _TFYServersConfig
71
72
  TFY_CLI_LOCAL_DEV_MODE: bool = False
@@ -113,6 +113,9 @@ from truefoundry.deploy.v2.lib.patched_models import (
113
113
  ServiceAutoscaling,
114
114
  SlackBot,
115
115
  SlackWebhook,
116
+ SparkBuild,
117
+ SparkImage,
118
+ SparkImageBuild,
116
119
  SparkJobJavaEntrypoint,
117
120
  SparkJobPythonEntrypoint,
118
121
  SparkJobScalaEntrypoint,
@@ -1,6 +1,6 @@
1
1
  # generated by datamodel-codegen:
2
2
  # filename: application.json
3
- # timestamp: 2025-06-08T14:28:34+00:00
3
+ # timestamp: 2025-06-09T12:01:27+00:00
4
4
 
5
5
  from __future__ import annotations
6
6
 
@@ -643,7 +643,7 @@ class PythonBuild(BaseModel):
643
643
  )
644
644
  cuda_version: Optional[
645
645
  constr(
646
- regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8)$"
646
+ regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8|12\.3-cudnn9|12\.4-cudnn9|12\.5-cudnn9|12\.6-cudnn9)$"
647
647
  )
648
648
  ] = Field(
649
649
  None,
@@ -840,7 +840,7 @@ class SlackWebhook(BaseModel):
840
840
  )
841
841
 
842
842
 
843
- class SparkBuildSpec(BaseModel):
843
+ class SparkBuild(BaseModel):
844
844
  """
845
845
  Describes that we are using python to build a container image with a specific python version and pip packages installed.
846
846
  """
@@ -850,12 +850,6 @@ class SparkBuildSpec(BaseModel):
850
850
  "3.5.2",
851
851
  description="Spark version should match the spark version installed in the image.",
852
852
  )
853
- python_version: Optional[constr(regex=r"^\d+(\.\d+){1,2}([\-\.a-z0-9]+)?$")] = (
854
- Field(
855
- None,
856
- description="Python version to run your application. Should be one of the tags listed on [Official Python Docker Page](https://hub.docker.com/_/python)",
857
- )
858
- )
859
853
  build_context_path: str = Field(
860
854
  "./", description="Build path relative to project root path."
861
855
  )
@@ -863,14 +857,6 @@ class SparkBuildSpec(BaseModel):
863
857
  None,
864
858
  description="Path to `requirements.txt` relative to\n`Path to build context`",
865
859
  )
866
- pip_packages: Optional[List[str]] = Field(
867
- None,
868
- description='Define pip package requirements.\nIn Python/YAML E.g. ["fastapi>=0.90,<1.0", "uvicorn"]',
869
- )
870
- apt_packages: Optional[List[str]] = Field(
871
- None,
872
- description='Debian packages to install via `apt get`.\nIn Python/YAML E.g. ["git", "ffmpeg", "htop"]',
873
- )
874
860
 
875
861
 
876
862
  class SparkDriverConfig(BaseModel):
@@ -897,7 +883,7 @@ class SparkImage(BaseModel):
897
883
  Describes that we are using a pre-built image stored in a Docker Image registry
898
884
  """
899
885
 
900
- type: Literal["image"] = Field(..., description="")
886
+ type: Literal["spark-image"] = Field(..., description="")
901
887
  spark_version: str = Field(
902
888
  "3.5.2",
903
889
  description="Spark version should match the spark version installed in the image.",
@@ -912,6 +898,20 @@ class SparkImage(BaseModel):
912
898
  )
913
899
 
914
900
 
901
+ class SparkImageBuild(BaseModel):
902
+ """
903
+ Describes that we are building a new image based on the spec
904
+ """
905
+
906
+ type: Literal["spark-image-build"] = Field(..., description="")
907
+ docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
908
+ None,
909
+ description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
910
+ )
911
+ build_source: GitSource
912
+ build_spec: SparkBuild
913
+
914
+
915
915
  class SparkJobJavaEntrypoint(BaseModel):
916
916
  type: Literal["java"] = Field(..., description="")
917
917
  main_application_file: str = Field(
@@ -1011,7 +1011,7 @@ class TaskPythonBuild(BaseModel):
1011
1011
  )
1012
1012
  cuda_version: Optional[
1013
1013
  constr(
1014
- regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8)$"
1014
+ regex=r"^((\d+\.\d+(\.\d+)?-cudnn\d+-(runtime|devel)-ubuntu\d+\.\d+)|11\.0-cudnn8|11\.1-cudnn8|11\.2-cudnn8|11\.3-cudnn8|11\.4-cudnn8|11\.5-cudnn8|11\.6-cudnn8|11\.7-cudnn8|11\.8-cudnn8|12\.0-cudnn8|12\.1-cudnn8|12\.2-cudnn8|12\.3-cudnn9|12\.4-cudnn9|12\.5-cudnn9|12\.6-cudnn9)$"
1015
1015
  )
1016
1016
  ] = Field(
1017
1017
  None,
@@ -1432,23 +1432,6 @@ class SSHServer(BaseWorkbenchInput):
1432
1432
  )
1433
1433
 
1434
1434
 
1435
- class SparkBuild(BaseModel):
1436
- """
1437
- Describes that we are building a new image based on the spec
1438
- """
1439
-
1440
- type: Literal["build"] = Field(..., description="")
1441
- docker_registry: Optional[constr(regex=r"^\S+$")] = Field(
1442
- None,
1443
- description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
1444
- )
1445
- build_source: Union[GitSource, LocalSource] = Field(..., description="")
1446
- build_spec: Union[SparkBuildSpec, PythonBuild] = Field(
1447
- ...,
1448
- description="Instructions to build a container image out of the build source",
1449
- )
1450
-
1451
-
1452
1435
  class SparkExecutorConfig(BaseModel):
1453
1436
  instances: Union[SparkExecutorFixedInstances, SparkExecutorDynamicScaling] = Field(
1454
1437
  {"type": "fixed", "count": 1}, description=""
@@ -1461,7 +1444,7 @@ class SparkJob(BaseModel):
1461
1444
  name: constr(regex=r"^[a-z](?:[a-z0-9]|-(?!-)){1,30}[a-z0-9]$") = Field(
1462
1445
  ..., description="Name of the job"
1463
1446
  )
1464
- image: Union[SparkImage, SparkBuild] = Field(
1447
+ image: Union[SparkImage, SparkImageBuild] = Field(
1465
1448
  ...,
1466
1449
  description="The image to use for driver and executors. Must have spark installed. Spark version must match the version in the image.",
1467
1450
  )
@@ -2,8 +2,8 @@ from typing import Any, Dict, List, Optional, Union
2
2
 
3
3
  from truefoundry.deploy._autogen.models import (
4
4
  DockerFileBuild,
5
- SparkBuildSpec,
6
5
  PythonBuild,
6
+ SparkBuild,
7
7
  TaskDockerFileBuild,
8
8
  TaskPythonBuild,
9
9
  )
@@ -21,7 +21,7 @@ class _BuildConfig(BaseModel):
21
21
  NotebookImageBuild,
22
22
  TaskPythonBuild,
23
23
  TaskDockerFileBuild,
24
- SparkBuildSpec,
24
+ SparkBuild,
25
25
  ] = Field(discriminator="type")
26
26
 
27
27
 
@@ -3,7 +3,8 @@ import shutil
3
3
  from tempfile import TemporaryDirectory
4
4
  from typing import List, Optional
5
5
 
6
- from truefoundry.deploy._autogen.models import DockerFileBuild, SparkBuildSpec
6
+ from truefoundry.common.constants import PythonPackageManager
7
+ from truefoundry.deploy._autogen.models import DockerFileBuild, SparkBuild
7
8
  from truefoundry.deploy.builder.builders import dockerfile
8
9
  from truefoundry.deploy.builder.builders.tfy_spark_buildpack.dockerfile_template import (
9
10
  generate_dockerfile_content,
@@ -14,13 +15,14 @@ __all__ = ["generate_dockerfile_content", "build"]
14
15
 
15
16
 
16
17
  def _convert_to_dockerfile_build_config(
17
- build_configuration: SparkBuildSpec,
18
+ build_configuration: SparkBuild,
18
19
  dockerfile_path: str,
19
20
  mount_python_package_manager_conf_secret: bool = False,
20
21
  ) -> DockerFileBuild:
21
22
  dockerfile_content = generate_dockerfile_content(
22
23
  build_configuration=build_configuration,
23
24
  mount_python_package_manager_conf_secret=mount_python_package_manager_conf_secret,
25
+ package_manager=PythonPackageManager.PIP.value,
24
26
  )
25
27
  with open(dockerfile_path, "w", encoding="utf8") as fp:
26
28
  fp.write(dockerfile_content)
@@ -34,27 +36,28 @@ def _convert_to_dockerfile_build_config(
34
36
 
35
37
  def build(
36
38
  tag: str,
37
- build_configuration: SparkBuildSpec,
39
+ build_configuration: SparkBuild,
38
40
  extra_opts: Optional[List[str]] = None,
39
41
  ):
40
- if not build_configuration.spark_version:
41
- raise ValueError(
42
- "`spark_version` is required for `tfy-spark-buildpack` builder"
43
- )
44
42
  mount_python_package_manager_conf_secret = (
45
43
  has_python_package_manager_conf_secret(extra_opts) if extra_opts else False
46
44
  )
47
-
48
- # Copy execute_notebook.py to the build context
49
- execute_notebook_src = os.path.join(os.path.dirname(__file__), "execute_notebook.py")
50
- execute_notebook_dst = os.path.join(build_configuration.build_context_path, "execute_notebook.py")
51
-
52
- # Track if we copied the file to clean it up later
53
- copied_execute_notebook = False
54
- if not os.path.exists(execute_notebook_dst):
55
- shutil.copy2(execute_notebook_src, execute_notebook_dst)
56
- copied_execute_notebook = True
57
-
45
+
46
+ # Copy tfy_execute_notebook.py to the build context
47
+ execute_notebook_src = os.path.join(
48
+ os.path.dirname(__file__), "tfy_execute_notebook.py"
49
+ )
50
+ execute_notebook_dst = os.path.join(
51
+ build_configuration.build_context_path, "tfy_execute_notebook.py"
52
+ )
53
+
54
+ # Verify the source file exists before copying
55
+ if not os.path.isfile(execute_notebook_src):
56
+ raise FileNotFoundError(f"Required file not found: {execute_notebook_src}")
57
+
58
+ # Always copy the file, overwrite if exists
59
+ shutil.copy2(execute_notebook_src, execute_notebook_dst)
60
+
58
61
  try:
59
62
  with TemporaryDirectory() as local_dir:
60
63
  docker_build_configuration = _convert_to_dockerfile_build_config(
@@ -69,7 +72,7 @@ def build(
69
72
  )
70
73
  finally:
71
74
  # Clean up the copied file if we copied it
72
- if copied_execute_notebook and os.path.exists(execute_notebook_dst):
75
+ if os.path.exists(execute_notebook_dst):
73
76
  try:
74
77
  os.remove(execute_notebook_dst)
75
78
  except OSError:
@@ -1,10 +1,10 @@
1
1
  import shlex
2
- from typing import Dict, List, Optional
2
+ from typing import List, Optional
3
3
 
4
4
  from mako.template import Template
5
5
 
6
6
  from truefoundry.common.constants import ENV_VARS, PythonPackageManager
7
- from truefoundry.deploy._autogen.models import SparkBuildSpec
7
+ from truefoundry.deploy._autogen.models import SparkBuild
8
8
  from truefoundry.deploy.builder.constants import (
9
9
  PIP_CONF_BUILDKIT_SECRET_MOUNT,
10
10
  PIP_CONF_SECRET_MOUNT_AS_ENV,
@@ -17,21 +17,19 @@ from truefoundry.deploy.v2.lib.patched_models import (
17
17
 
18
18
  # TODO (chiragjn): Switch to a non-root user inside the container
19
19
 
20
- DEFAULT_PYTHON_IMAGE_REPO = "apache/spark"
21
-
22
20
  _POST_PYTHON_INSTALL_TEMPLATE = """
23
- % if apt_install_command is not None:
24
- RUN ${apt_install_command}
25
- % endif
26
21
  % if requirements_path is not None:
27
22
  COPY ${requirements_path} ${requirements_destination_path}
28
23
  % endif
29
24
  % if python_packages_install_command is not None:
30
25
  RUN ${package_manager_config_secret_mount} ${python_packages_install_command}
31
26
  % endif
32
- COPY . /app
33
- WORKDIR /app
34
27
  USER spark
28
+ COPY . /app
29
+ """
30
+
31
+ _POST_USER_TEMPLATE = """
32
+ COPY tfy_execute_notebook.py /app/tfy_execute_notebook.py
35
33
  """
36
34
 
37
35
  DOCKERFILE_TEMPLATE = Template(
@@ -41,25 +39,12 @@ USER root
41
39
  RUN apt update && \
42
40
  DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends git && \
43
41
  rm -rf /var/lib/apt/lists/*
44
- COPY execute_notebook.py /app/execute_notebook.py
45
42
  """
46
- # + _POST_PYTHON_INSTALL_TEMPLATE
43
+ + _POST_PYTHON_INSTALL_TEMPLATE
44
+ + _POST_USER_TEMPLATE
47
45
  )
48
46
 
49
- ADDITIONAL_PIP_PACKAGES = ['papermill']
50
-
51
- def generate_apt_install_command(apt_packages: Optional[List[str]]) -> Optional[str]:
52
- packages_list = None
53
- if apt_packages:
54
- packages_list = " ".join(p.strip() for p in apt_packages if p.strip())
55
- if not packages_list:
56
- return None
57
- apt_update_command = "apt update"
58
- apt_install_command = f"DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends {packages_list}"
59
- clear_apt_lists_command = "rm -rf /var/lib/apt/lists/*"
60
- return " && ".join(
61
- [apt_update_command, apt_install_command, clear_apt_lists_command]
62
- )
47
+ ADDITIONAL_PIP_PACKAGES = ["papermill>=2.6.0,<2.7.0", "ipykernel>=6.0.0,<7.0.0"]
63
48
 
64
49
 
65
50
  def generate_pip_install_command(
@@ -67,12 +52,12 @@ def generate_pip_install_command(
67
52
  pip_packages: Optional[List[str]],
68
53
  mount_pip_conf_secret: bool = False,
69
54
  ) -> Optional[str]:
70
- upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
55
+ upgrade_pip_command = "python3 -m pip install -U pip setuptools wheel"
71
56
  envs = []
72
57
  if mount_pip_conf_secret:
73
58
  envs.append(PIP_CONF_SECRET_MOUNT_AS_ENV)
74
59
 
75
- command = ["python", "-m", "pip", "install", "--use-pep517", "--no-cache-dir"]
60
+ command = ["python3", "-m", "pip", "install", "--use-pep517", "--no-cache-dir"]
76
61
  args = []
77
62
  if requirements_path:
78
63
  args.append("-r")
@@ -96,7 +81,7 @@ def generate_uv_pip_install_command(
96
81
  pip_packages: Optional[List[str]],
97
82
  mount_uv_conf_secret: bool = False,
98
83
  ) -> Optional[str]:
99
- upgrade_pip_command = "python -m pip install -U pip setuptools wheel"
84
+ upgrade_pip_command = "python3 -m pip install -U pip setuptools wheel"
100
85
  uv_mount = f"--mount=from={ENV_VARS.TFY_PYTHON_BUILD_UV_IMAGE_URI},source=/uv,target=/usr/local/bin/uv"
101
86
  envs = [
102
87
  "UV_LINK_MODE=copy",
@@ -128,7 +113,7 @@ def generate_uv_pip_install_command(
128
113
 
129
114
 
130
115
  def generate_dockerfile_content(
131
- build_configuration: SparkBuildSpec,
116
+ build_configuration: SparkBuild,
132
117
  package_manager: str = ENV_VARS.TFY_PYTHON_BUILD_PACKAGE_MANAGER,
133
118
  mount_python_package_manager_conf_secret: bool = False,
134
119
  ) -> str:
@@ -144,32 +129,25 @@ def generate_dockerfile_content(
144
129
  raise ValueError(
145
130
  "`spark_version` is required for `tfy-spark-buildpack` builder"
146
131
  )
147
-
148
- # Handle pip packages - SparkBuildSpec pip_packages is Optional[List[str]], so we need to handle None
149
- pip_packages = build_configuration.pip_packages or []
150
-
132
+
151
133
  if package_manager == PythonPackageManager.PIP.value:
152
134
  python_packages_install_command = generate_pip_install_command(
153
135
  requirements_path=requirements_destination_path,
154
- pip_packages=pip_packages + ADDITIONAL_PIP_PACKAGES,
136
+ pip_packages=ADDITIONAL_PIP_PACKAGES,
155
137
  mount_pip_conf_secret=mount_python_package_manager_conf_secret,
156
138
  )
157
139
  elif package_manager == PythonPackageManager.UV.value:
158
140
  python_packages_install_command = generate_uv_pip_install_command(
159
141
  requirements_path=requirements_destination_path,
160
- pip_packages=pip_packages + ADDITIONAL_PIP_PACKAGES,
142
+ pip_packages=ADDITIONAL_PIP_PACKAGES,
161
143
  mount_uv_conf_secret=mount_python_package_manager_conf_secret,
162
144
  )
163
145
  else:
164
146
  raise ValueError(f"Unsupported package manager: {package_manager}")
165
147
 
166
- apt_install_command = generate_apt_install_command(
167
- apt_packages=build_configuration.apt_packages
168
- )
169
148
  template_args = {
170
149
  "spark_image_repo": ENV_VARS.TFY_SPARK_BUILD_SPARK_IMAGE_REPO,
171
150
  "spark_version": build_configuration.spark_version,
172
- "apt_install_command": apt_install_command,
173
151
  "requirements_path": requirements_path,
174
152
  "requirements_destination_path": requirements_destination_path,
175
153
  "python_packages_install_command": python_packages_install_command,
@@ -0,0 +1,56 @@
1
+ # This script is used as the main application file for spark applications
2
+ # when the application to be run is a notebook, the actual notebook to be
3
+ # executed is passed as an argument to this script.
4
+
5
+
6
+ import argparse
7
+ import sys
8
+
9
+ import papermill as pm
10
+
11
+
12
+ def execute_notebook(notebook_path, output_path="/tmp/output.ipynb", parameters=None):
13
+ """
14
+ Execute a Jupyter notebook using papermill.
15
+
16
+ Args:
17
+ notebook_path: Path to the input notebook
18
+ output_path: Path for the output notebook
19
+ parameters: Dictionary of parameters to pass to the notebook
20
+
21
+ Raises:
22
+ Exception: If notebook execution fails
23
+ """
24
+ if parameters is None:
25
+ parameters = {}
26
+
27
+ print(f"Starting execution of notebook: {notebook_path}")
28
+ pm.execute_notebook(
29
+ notebook_path,
30
+ output_path,
31
+ parameters=parameters,
32
+ # TODO(gw): Replace with kernel name for venv
33
+ kernel_name="python3",
34
+ )
35
+ print(f"Successfully executed notebook: {notebook_path}")
36
+
37
+
38
+ if __name__ == "__main__":
39
+ parser = argparse.ArgumentParser(
40
+ description="Execute a Jupyter notebook using papermill for Spark applications"
41
+ )
42
+ parser.add_argument("notebook_path", help="Path to the notebook file to execute")
43
+
44
+ args = parser.parse_args()
45
+
46
+ # TODO(gw): Add support for passing parameters to the notebook
47
+ try:
48
+ execute_notebook(args.notebook_path)
49
+ except Exception as e:
50
+ print(f"Error executing notebook {args.notebook_path}: {e}")
51
+ print(
52
+ "Exiting with status code 1 to signal failure to parent process/orchestrator"
53
+ )
54
+ sys.exit(1)
55
+
56
+ # TODO(gw): Publish the output notebook to blob storage from where it could be rendered
@@ -159,6 +159,26 @@ class PythonBuild(models.PythonBuild, PatchedModelBase):
159
159
  return values
160
160
 
161
161
 
162
+ class SparkBuild(models.SparkBuild, PatchedModelBase):
163
+ type: Literal["tfy-spark-buildpack"] = "tfy-spark-buildpack"
164
+
165
+ @root_validator
166
+ def validate_values(cls, values):
167
+ _resolve_requirements_path(
168
+ build_context_path=values.get("build_context_path") or "./",
169
+ requirements_path=values.get("requirements_path"),
170
+ )
171
+ return values
172
+
173
+
174
+ class SparkImageBuild(models.SparkImageBuild, PatchedModelBase):
175
+ type: Literal["spark-image-build"] = "spark-image-build"
176
+
177
+
178
+ class SparkImage(models.SparkImage, PatchedModelBase):
179
+ type: Literal["spark-image"] = "spark-image"
180
+
181
+
162
182
  class RemoteSource(models.RemoteSource, PatchedModelBase):
163
183
  type: Literal["remote"] = "remote"
164
184
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: truefoundry
3
- Version: 0.9.4
3
+ Version: 0.10.1rc1
4
4
  Summary: TrueFoundry CLI
5
5
  Author-email: TrueFoundry Team <abhishek@truefoundry.com>
6
6
  Requires-Python: <3.14,>=3.8.1
@@ -40,7 +40,7 @@ truefoundry/cli/display_util.py,sha256=9vzN3mbQqU6OhS7qRUiMRana4PTHa4sDTA0Hn7OVj
40
40
  truefoundry/cli/util.py,sha256=pezUfF2GC6ru7s8VeH2a7uvXTU0xN9ka7yLXkIgC3dY,4998
41
41
  truefoundry/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
42
  truefoundry/common/auth_service_client.py,sha256=N3YxKlx63r6cPZqbgb2lqBOPI69ShB7D7RCIq4FSCjc,7949
43
- truefoundry/common/constants.py,sha256=3y3JuEJNQa7dFW1-yY-dgjB_tBoa_GwUNQxAsRLWiZ0,4547
43
+ truefoundry/common/constants.py,sha256=nWd3Je71WmHEORRUTCupZy5fWADqEFftjYP6wiYhCIc,4627
44
44
  truefoundry/common/credential_file_manager.py,sha256=1yEk1Zm2xS4G0VDFwKSZ4w0VUrcPWQ1nJnoBaz9xyKA,4251
45
45
  truefoundry/common/credential_provider.py,sha256=_OhJ2XFlDaVsrUO-FyywxctcGGqDdC2pgcvwEKqQD0Q,4071
46
46
  truefoundry/common/entities.py,sha256=b4R6ss06-ygDS3C4Tqa_GOq5LFKDYbt7x4Mghnfz6yo,4007
@@ -52,10 +52,10 @@ truefoundry/common/storage_provider_utils.py,sha256=yURhMw8k0FLFvaviRHDiifhvc6Gn
52
52
  truefoundry/common/types.py,sha256=BMJFCsR1lPJAw66IQBSvLyV4I6o_x5oj78gVsUa9si8,188
53
53
  truefoundry/common/utils.py,sha256=j3QP0uOsaGD_VmDDR68JTwoYE1okkAq6OqpVkzVf48Q,6424
54
54
  truefoundry/common/warnings.py,sha256=rs6BHwk7imQYedo07iwh3TWEOywAR3Lqhj0AY4khByg,504
55
- truefoundry/deploy/__init__.py,sha256=6D22iiCgd5xlzBaG34q9Cx4rGgwf5qIAKQrOCgaCXYY,2746
55
+ truefoundry/deploy/__init__.py,sha256=2GNbI8IGJBotz_IKaqQ-DWYWZn_pSu7lN7aId15Gk7Q,2799
56
56
  truefoundry/deploy/python_deploy_codegen.py,sha256=AainOFR20XvhNeztJkLPWGZ40lAT_nwc-ZmG77Kum4o,6525
57
- truefoundry/deploy/_autogen/models.py,sha256=mcd83zm8--repV2RFRsDNpmR9MgU7hW7VOpM0EDhQko,73878
58
- truefoundry/deploy/builder/__init__.py,sha256=jPSUJ75X_V-XIvLkG2Nl_wXTdhzy80LcbhMRlD3f5nE,4996
57
+ truefoundry/deploy/_autogen/models.py,sha256=xt-DuaRDx5jeRwyGoQH2yyPZAep9Q2MHFW9XBuRzG8E,73161
58
+ truefoundry/deploy/builder/__init__.py,sha256=kgvlkVkiWpMVdim81tIeLrdoACqrFDgwCqHdQVsCsMo,4988
59
59
  truefoundry/deploy/builder/constants.py,sha256=amUkHoHvVKzGv0v_knfiioRuKiJM0V0xW0diERgWiI0,508
60
60
  truefoundry/deploy/builder/docker_service.py,sha256=sm7GWeIqyrKaZpxskdLejZlsxcZnM3BTDJr6orvPN4E,3948
61
61
  truefoundry/deploy/builder/utils.py,sha256=D68-bqM0NQx-Elg-56mtkENyVyg9faZ9tgTmBuo1Sjs,1076
@@ -65,9 +65,9 @@ truefoundry/deploy/builder/builders/tfy_notebook_buildpack/__init__.py,sha256=RG
65
65
  truefoundry/deploy/builder/builders/tfy_notebook_buildpack/dockerfile_template.py,sha256=rQgdvKmAT9HArVW4TAG5yd2QTKRs3S5LJ9RQbc_EkHE,2518
66
66
  truefoundry/deploy/builder/builders/tfy_python_buildpack/__init__.py,sha256=_fjqHKn80qKi68SAMMALge7_A6e1sTsQWichw8uoGIw,2025
67
67
  truefoundry/deploy/builder/builders/tfy_python_buildpack/dockerfile_template.py,sha256=f4l3fH21E2b8W3-JotMKc0AdPcCxV7LRPxxYJa7z_UQ,9134
68
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py,sha256=jpDJIN0flNElgoe7_AtMkT2SFrp0Y-B3LZ8grOvTmYU,2890
69
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=yOgDP0xB8LDidrEFrVRDawGsbm7tuI_IEFEdNWSEaR8,6810
70
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/execute_notebook.py,sha256=9NEkWDcjczpLwuumYP34lBQ60RwX9ri5b-UE-kgUzD0,517
68
+ truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py,sha256=NEPlM6_vTVxp4ITa18B8DBbgYCn1q5d8be21lbgu5oY,2888
69
+ truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=czPBD29Q8Ua9NQNA-EOgx8hrafA22fiE14ZNB6wyLVc,5826
70
+ truefoundry/deploy/builder/builders/tfy_spark_buildpack/tfy_execute_notebook.py,sha256=Orib09GBfAwQukt5wV_jyxL-fw_dWsuIqQ5TyTST188,1738
71
71
  truefoundry/deploy/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
72
72
  truefoundry/deploy/cli/commands/__init__.py,sha256=qv818jxqSAygJ3h-6Ul8t-5VOgR_UrSgsVtNCl3e5G0,1408
73
73
  truefoundry/deploy/cli/commands/apply_command.py,sha256=DmXmKVokkauyKIiJDtErTwbJ5_LvQeJbTQsG5BjyKpo,2427
@@ -117,7 +117,7 @@ truefoundry/deploy/v2/lib/deploy.py,sha256=HfSUdAS3gSpFAFtV0Mq9LscfpkaXqA2LHW4VX
117
117
  truefoundry/deploy/v2/lib/deploy_workflow.py,sha256=G5BzMIbap8pgDX1eY-TITruUxQdkKhYtBmRwLL6lDeY,14342
118
118
  truefoundry/deploy/v2/lib/deployable_patched_models.py,sha256=mUi-OjPf7bc8rzfrPLdFb79LKuDq7F36RxL4V-AXebs,6830
119
119
  truefoundry/deploy/v2/lib/models.py,sha256=ogc1UYs1Z2nBdGSKCrde9sk8d0GxFKMkem99uqO5CmM,1148
120
- truefoundry/deploy/v2/lib/patched_models.py,sha256=8ib9Y7b4-DoEml2zCv3V7QIqh4tLJUjzPj1AWomwvag,14775
120
+ truefoundry/deploy/v2/lib/patched_models.py,sha256=VkfS7akbUzMA4q15lQUcAirdTsyVE1rfMeCmjXJC6Zk,15394
121
121
  truefoundry/deploy/v2/lib/source.py,sha256=d6-8_6Zn5koBglqrBrY6ZLG_7yyPuLdyEmK4iZTw6xY,9405
122
122
  truefoundry/ml/__init__.py,sha256=EEEHV7w58Krpo_W9Chd8Y3TdItfFO3LI6j6Izqc4-P8,2219
123
123
  truefoundry/ml/constants.py,sha256=vDq72d4C9FSWqr9MMdjgTF4TuyNFApvo_6RVsSeAjB4,2837
@@ -381,7 +381,7 @@ truefoundry/workflow/remote_filesystem/__init__.py,sha256=LQ95ViEjJ7Ts4JcCGOxMPs
381
381
  truefoundry/workflow/remote_filesystem/logger.py,sha256=em2l7D6sw7xTLDP0kQSLpgfRRCLpN14Qw85TN7ujQcE,1022
382
382
  truefoundry/workflow/remote_filesystem/tfy_signed_url_client.py,sha256=xcT0wQmQlgzcj0nP3tJopyFSVWT1uv3nhiTIuwfXYeg,12342
383
383
  truefoundry/workflow/remote_filesystem/tfy_signed_url_fs.py,sha256=nSGPZu0Gyd_jz0KsEE-7w_BmnTD8CVF1S8cUJoxaCbc,13305
384
- truefoundry-0.9.4.dist-info/METADATA,sha256=rNFh4XOOz7_58TIg68rG5jxyJkVOI4tPR0sEJYqIAfs,2504
385
- truefoundry-0.9.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
386
- truefoundry-0.9.4.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
387
- truefoundry-0.9.4.dist-info/RECORD,,
384
+ truefoundry-0.10.1rc1.dist-info/METADATA,sha256=kPyFmtEe0M1iet-IELLot33EbKYjqHx9wQljCzt5oJw,2508
385
+ truefoundry-0.10.1rc1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
386
+ truefoundry-0.10.1rc1.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
387
+ truefoundry-0.10.1rc1.dist-info/RECORD,,
@@ -1,21 +0,0 @@
1
- # This script is used as the main application file for spark applications
2
- # when the application to be run is a notebook, the actual notebook to be
3
- # executed is passed as an argument to this script.
4
-
5
-
6
- import papermill as pm
7
- import sys
8
-
9
- if __name__ == "__main__":
10
-
11
- if len(sys.argv) != 2:
12
- print("Usage: python execute_notebook.py <notebook_path>")
13
- sys.exit(1)
14
-
15
- notebook_path = sys.argv[1]
16
-
17
- pm.execute_notebook(
18
- notebook_path,
19
- "output.ipynb",
20
- parameters=dict(),
21
- )