truefoundry 0.2.0rc5__tar.gz → 0.2.0rc7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of truefoundry might be problematic. Click here for more details.
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/PKG-INFO +4 -4
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/pyproject.toml +4 -3
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/agents/project_identifier.py +5 -5
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/agents/tester.py +2 -2
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/cli.py +46 -34
- truefoundry-0.2.0rc7/truefoundry/autodeploy/constants.py +22 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/ask.py +1 -1
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/commit.py +1 -1
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/docker_run.py +1 -1
- truefoundry-0.2.0rc7/truefoundry/cli/__main__.py +37 -0
- truefoundry-0.2.0rc7/truefoundry/deploy/cli/__init__.py +0 -0
- truefoundry-0.2.0rc7/truefoundry/deploy/cli/cli.py +99 -0
- truefoundry-0.2.0rc7/truefoundry/deploy/cli/deploy.py +184 -0
- truefoundry-0.2.0rc5/truefoundry/autodeploy/constants.py +0 -12
- truefoundry-0.2.0rc5/truefoundry/cli/__main__.py +0 -60
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/README.md +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/__init__.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/__init__.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/agents/__init__.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/agents/base.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/agents/developer.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/exception.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/logger.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/__init__.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/base.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/docker_build.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/file_type_counts.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/list_files.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/read_file.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/send_request.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/write_file.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/utils/diff.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/utils/pydantic_compat.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/cli/__init__.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/deploy/__init__.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/langchain/__init__.py +0 -0
- {truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/ml/__init__.py +0 -0
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: truefoundry
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.0rc7
|
|
4
4
|
Summary: Truefoundry CLI
|
|
5
5
|
Author: Abhishek Choudhary
|
|
6
6
|
Author-email: abhichoudhary06@gmail.com
|
|
7
|
-
Requires-Python: >=3.8,<3.13
|
|
7
|
+
Requires-Python: >=3.8.1,<3.13
|
|
8
8
|
Classifier: Programming Language :: Python :: 3
|
|
9
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
10
9
|
Classifier: Programming Language :: Python :: 3.9
|
|
11
10
|
Classifier: Programming Language :: Python :: 3.10
|
|
12
11
|
Classifier: Programming Language :: Python :: 3.11
|
|
@@ -15,13 +14,14 @@ Provides-Extra: ml
|
|
|
15
14
|
Requires-Dist: docker (>=7.0.0,<8.0.0)
|
|
16
15
|
Requires-Dist: gitignorefile (>=1.1.2,<2.0.0)
|
|
17
16
|
Requires-Dist: gitpython (>=3.1.43,<4.0.0)
|
|
17
|
+
Requires-Dist: inquirer (>=3.2.4,<4.0.0)
|
|
18
18
|
Requires-Dist: mlfoundry (==0.10.9) ; extra == "ml"
|
|
19
19
|
Requires-Dist: openai (>=1.16.2,<2.0.0)
|
|
20
20
|
Requires-Dist: pydantic (>=1.10.0,<3)
|
|
21
21
|
Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
|
|
22
22
|
Requires-Dist: requests (>=2.31.0,<3.0.0)
|
|
23
23
|
Requires-Dist: rich (>=13.7.1,<14.0.0)
|
|
24
|
-
Requires-Dist: servicefoundry (==0.10.
|
|
24
|
+
Requires-Dist: servicefoundry (==0.10.10)
|
|
25
25
|
Description-Content-Type: text/markdown
|
|
26
26
|
|
|
27
27
|
# Truefoundry
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "truefoundry"
|
|
3
|
-
version = "0.2.
|
|
3
|
+
version = "0.2.0rc7"
|
|
4
4
|
description = "Truefoundry CLI"
|
|
5
5
|
authors = ["Abhishek Choudhary <abhichoudhary06@gmail.com>"]
|
|
6
6
|
readme = "README.md"
|
|
7
7
|
|
|
8
8
|
[tool.poetry.dependencies]
|
|
9
|
-
python = "^3.8,<3.13"
|
|
10
|
-
servicefoundry = "0.10.
|
|
9
|
+
python = "^3.8.1,<3.13"
|
|
10
|
+
servicefoundry = "0.10.10"
|
|
11
11
|
mlfoundry = { version = "0.10.9", optional = true }
|
|
12
12
|
openai = "^1.16.2"
|
|
13
13
|
docker = "^7.0.0"
|
|
@@ -17,6 +17,7 @@ requests = "^2.31.0"
|
|
|
17
17
|
python-dotenv = "^1.0.1"
|
|
18
18
|
gitignorefile = "^1.1.2"
|
|
19
19
|
gitpython = "^3.1.43"
|
|
20
|
+
inquirer = "^3.2.4"
|
|
20
21
|
|
|
21
22
|
[tool.poetry.extras]
|
|
22
23
|
ml = ["mlfoundry"]
|
{truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/agents/project_identifier.py
RENAMED
|
@@ -94,20 +94,20 @@ pip, poetry, yarn, go.mod, cargo.toml, npm, setup.py.
|
|
|
94
94
|
def render(self, console: Console):
|
|
95
95
|
if self.primary_programming_language is not None:
|
|
96
96
|
console.print(
|
|
97
|
-
f"[bold magenta]
|
|
97
|
+
f"[bold magenta]TrueFoundry:[/] Identified a project using [bold cyan]{self.primary_programming_language}[/]."
|
|
98
98
|
)
|
|
99
99
|
console.print(
|
|
100
|
-
f"[bold magenta]
|
|
100
|
+
f"[bold magenta]TrueFoundry:[/] Framework Identified: [bold cyan]{'Not applicable' if self.framework is None else self.framework}[/]"
|
|
101
101
|
)
|
|
102
102
|
console.print(
|
|
103
|
-
f"[bold magenta]
|
|
103
|
+
f"[bold magenta]TrueFoundry:[/] Dependency Manager Identified: [bold cyan]{'Not applicable' if self.dependency_manager is None else self.dependency_manager}[/]"
|
|
104
104
|
)
|
|
105
105
|
else:
|
|
106
106
|
console.print(
|
|
107
|
-
"[bold magenta]
|
|
107
|
+
"[bold magenta]TrueFoundry:[/] Unable to identify any programming language in the project."
|
|
108
108
|
)
|
|
109
109
|
console.print(
|
|
110
|
-
f"[bold magenta]
|
|
110
|
+
f"[bold magenta]TrueFoundry:[/] [italic]{self.justification}[/]"
|
|
111
111
|
)
|
|
112
112
|
|
|
113
113
|
def __init__(self, project_root_path: str, openai_client: OpenAI):
|
|
@@ -53,10 +53,10 @@ Why was the testing a failure or successful?
|
|
|
53
53
|
|
|
54
54
|
def render(self, console: Console):
|
|
55
55
|
console.print(
|
|
56
|
-
f"[bold cyan]
|
|
56
|
+
f"[bold cyan]TrueFoundry:[/] The given project has been {'[bold green]successfully built[/]' if self.successful else '[bold red]failed to build[/]'}"
|
|
57
57
|
)
|
|
58
58
|
console.print(
|
|
59
|
-
f"[bold magenta]
|
|
59
|
+
f"[bold magenta]TrueFoundry:[/] [italic]{self.justification}[/]"
|
|
60
60
|
)
|
|
61
61
|
if not self.successful:
|
|
62
62
|
console.print(f"[cyan]logs:[/] {self.logs}")
|
|
@@ -6,6 +6,7 @@ from typing import Dict, Optional
|
|
|
6
6
|
|
|
7
7
|
import click
|
|
8
8
|
import docker
|
|
9
|
+
import inquirer
|
|
9
10
|
from dotenv import dotenv_values
|
|
10
11
|
|
|
11
12
|
from truefoundry.autodeploy.exception import GitBinaryNotFoundException
|
|
@@ -17,6 +18,7 @@ except ImportError as ex:
|
|
|
17
18
|
raise GitBinaryNotFoundException from ex
|
|
18
19
|
|
|
19
20
|
import requests
|
|
21
|
+
from click.exceptions import ClickException
|
|
20
22
|
from openai import OpenAI
|
|
21
23
|
from rich.console import Console
|
|
22
24
|
from rich.prompt import Prompt
|
|
@@ -32,6 +34,8 @@ from truefoundry.autodeploy.agents.project_identifier import (
|
|
|
32
34
|
)
|
|
33
35
|
from truefoundry.autodeploy.agents.tester import Tester
|
|
34
36
|
from truefoundry.autodeploy.constants import (
|
|
37
|
+
ABOUT_AUTODEPLOY,
|
|
38
|
+
AUTODEPLOY_INTRO_MESSAGE,
|
|
35
39
|
AUTODEPLOY_OPENAI_API_KEY,
|
|
36
40
|
AUTODEPLOY_OPENAI_BASE_URL,
|
|
37
41
|
AUTODEPLOY_TFY_BASE_URL,
|
|
@@ -42,12 +46,12 @@ from truefoundry.autodeploy.tools.docker_run import DockerRun, DockerRunLog
|
|
|
42
46
|
|
|
43
47
|
|
|
44
48
|
def _get_openai_client() -> OpenAI:
|
|
45
|
-
session = ServiceFoundrySession()
|
|
46
49
|
if AUTODEPLOY_OPENAI_BASE_URL is not None and AUTODEPLOY_OPENAI_API_KEY is not None:
|
|
47
50
|
return OpenAI(
|
|
48
51
|
api_key=AUTODEPLOY_OPENAI_API_KEY, base_url=AUTODEPLOY_OPENAI_BASE_URL
|
|
49
52
|
)
|
|
50
53
|
try:
|
|
54
|
+
session = ServiceFoundrySession()
|
|
51
55
|
resp = requests.get(
|
|
52
56
|
f"{AUTODEPLOY_TFY_BASE_URL}/api/svc/v1/llm-gateway/access-details",
|
|
53
57
|
headers={
|
|
@@ -58,9 +62,11 @@ def _get_openai_client() -> OpenAI:
|
|
|
58
62
|
resp = resp.json()
|
|
59
63
|
return OpenAI(api_key=resp["jwtToken"], base_url=resp["inferenceBaseURL"])
|
|
60
64
|
except requests.exceptions.HTTPError as http_error:
|
|
61
|
-
raise
|
|
62
|
-
"
|
|
65
|
+
raise ClickException(
|
|
66
|
+
f"An error occurred while connecting to the Truefoundry server.\nThe server responded with status code {http_error.response.status_code}."
|
|
63
67
|
) from http_error
|
|
68
|
+
except Exception as e:
|
|
69
|
+
raise ClickException(message=str(e)) from e
|
|
64
70
|
|
|
65
71
|
|
|
66
72
|
def deploy_component(
|
|
@@ -119,21 +125,21 @@ def _check_repo(project_root_path: str, console: Console):
|
|
|
119
125
|
sys.exit(1)
|
|
120
126
|
current_active_branch = repo.active_branch.name
|
|
121
127
|
console.print(
|
|
122
|
-
f"[bold magenta]
|
|
128
|
+
f"[bold magenta]TrueFoundry:[/] Current branch [green]{current_active_branch!r}[/]"
|
|
123
129
|
)
|
|
124
130
|
branch_name = Prompt.ask(
|
|
125
|
-
"[bold magenta]
|
|
131
|
+
"[bold magenta]TrueFoundry:[/] Enter a branch name if you want to checkout to a new branch. "
|
|
126
132
|
f"Press enter to continue on [green]{current_active_branch!r}[/]",
|
|
127
133
|
console=console,
|
|
128
134
|
)
|
|
129
135
|
if branch_name:
|
|
130
136
|
repo.git.checkout("-b", branch_name)
|
|
131
137
|
console.print(
|
|
132
|
-
f"[bold magenta]
|
|
138
|
+
f"[bold magenta]TrueFoundry:[/] Switched to branch: [green]{repo.active_branch}[/]"
|
|
133
139
|
)
|
|
134
140
|
else:
|
|
135
141
|
console.print(
|
|
136
|
-
f"[bold magenta]
|
|
142
|
+
f"[bold magenta]TrueFoundry:[/] Continuing on [green]{current_active_branch!r}[/]"
|
|
137
143
|
)
|
|
138
144
|
|
|
139
145
|
except InvalidGitRepositoryError:
|
|
@@ -163,22 +169,22 @@ def _update_status(event, status: Status):
|
|
|
163
169
|
event, (Developer.Request, ProjectIdentifier.Response, Tester.Response)
|
|
164
170
|
):
|
|
165
171
|
status.update(
|
|
166
|
-
"[bold magenta]
|
|
172
|
+
"[bold magenta]TrueFoundry[/] is currently building the project. Please wait..."
|
|
167
173
|
)
|
|
168
174
|
|
|
169
175
|
if isinstance(event, ProjectIdentifier.Request):
|
|
170
176
|
status.update(
|
|
171
|
-
"[bold magenta]
|
|
177
|
+
"[bold magenta]TrueFoundry[/] is currently identifying the project..."
|
|
172
178
|
)
|
|
173
179
|
|
|
174
180
|
if isinstance(event, (Tester.Request, DockerRun.Response)):
|
|
175
181
|
status.update(
|
|
176
|
-
"[bold magenta]
|
|
182
|
+
"[bold magenta]TrueFoundry[/] is currently running tests on the project..."
|
|
177
183
|
)
|
|
178
184
|
|
|
179
185
|
if isinstance(event, DockerRunLog):
|
|
180
186
|
status.update(
|
|
181
|
-
"[bold cyan]Running:[/] [bold magenta]
|
|
187
|
+
"[bold cyan]Running:[/] [bold magenta]TrueFoundry[/] is executing the Docker container. Press [yellow]control-c[/] to stop waiting for additional logs..."
|
|
182
188
|
)
|
|
183
189
|
|
|
184
190
|
|
|
@@ -201,26 +207,24 @@ def _get_docker(console: Console) -> docker.DockerClient:
|
|
|
201
207
|
sys.exit(1)
|
|
202
208
|
|
|
203
209
|
|
|
204
|
-
def cli(project_root_path: str, deploy: bool):
|
|
210
|
+
def cli(project_root_path: str, deploy: bool, workspace_fqn: str = None):
|
|
205
211
|
console = Console()
|
|
206
212
|
openai_client = _get_openai_client()
|
|
207
213
|
docker_client = _get_docker(console)
|
|
208
214
|
project_root_path = os.path.abspath(project_root_path)
|
|
215
|
+
console.print(ABOUT_AUTODEPLOY)
|
|
216
|
+
console.print(AUTODEPLOY_INTRO_MESSAGE)
|
|
209
217
|
console.print(
|
|
210
|
-
"[bold
|
|
218
|
+
"[bold reverse]You will need to have Docker and Git installed on your machine for this to work[/]"
|
|
211
219
|
)
|
|
212
220
|
if AUTODEPLOY_OPENAI_BASE_URL is not None and AUTODEPLOY_OPENAI_API_KEY is not None:
|
|
213
221
|
console.print(
|
|
214
222
|
"[bold green]OpenAI credentials found in environment variables.[/]"
|
|
215
223
|
)
|
|
216
224
|
console.print(
|
|
217
|
-
"[bold reverse red]DISCLAIMER:[/] The contents of your project will be sent to OpenAI.",
|
|
218
225
|
"This operation will use tokens from your provided OpenAI account and may incur costs.",
|
|
219
226
|
)
|
|
220
227
|
else:
|
|
221
|
-
console.print(
|
|
222
|
-
"[bold reverse red]DISCLAIMER:[/] The contents of the project will be sent to OpenAI."
|
|
223
|
-
)
|
|
224
228
|
console.print(
|
|
225
229
|
"[dim]To use your own LLM, set the environment variables [dim italic green]AUTODEPLOY_OPENAI_BASE_URL[/],[/]",
|
|
226
230
|
"[dim][dim italic green]AUTODEPLOY_OPENAI_API_KEY[/], and [dim italic green]AUTODEPLOY_MODEL_NAME[/] for URL, API key, and LLM model name respectively.[/]",
|
|
@@ -228,41 +232,52 @@ def cli(project_root_path: str, deploy: bool):
|
|
|
228
232
|
console.print(
|
|
229
233
|
"[bold cyan]Note:[/] All changes will be committed to a new branch. Please ensure you have a repository."
|
|
230
234
|
)
|
|
235
|
+
console.print("[bright_green]Let's get started[/]")
|
|
231
236
|
_check_repo(project_root_path=project_root_path, console=console)
|
|
232
237
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
238
|
+
choices = {
|
|
239
|
+
"Service: An application that runs continuously. Example: web servers, workers polling a job queue, etc.": "SERVICE",
|
|
240
|
+
"Job: An application that runs once and then stops. Example: Training an ML model, running a script, etc.": "JOB",
|
|
241
|
+
}
|
|
242
|
+
component = inquirer.prompt(
|
|
243
|
+
[
|
|
244
|
+
inquirer.List(
|
|
245
|
+
"component",
|
|
246
|
+
message="TrueFoundry: Is your project a",
|
|
247
|
+
choices=choices.keys(),
|
|
248
|
+
)
|
|
249
|
+
]
|
|
250
|
+
)["component"]
|
|
251
|
+
component_type = ComponentType[choices[component]]
|
|
241
252
|
while True:
|
|
242
253
|
name = Prompt.ask(
|
|
243
|
-
"[bold magenta]
|
|
254
|
+
"[bold magenta]TrueFoundry:[/] Name of deployment",
|
|
244
255
|
console=console,
|
|
245
256
|
default=_get_default_project_name(project_root_path),
|
|
246
257
|
)
|
|
247
258
|
if not re.match(r"^[a-z][a-z0-9\-]{1,30}[a-z0-9]$", name):
|
|
248
259
|
console.print(
|
|
249
|
-
"[bold magenta]
|
|
260
|
+
"[bold magenta]TrueFoundry:[/] The name should be between 2-30 alphaneumaric"
|
|
250
261
|
" characters and '-'. The first character should not be a digit."
|
|
251
262
|
)
|
|
252
263
|
else:
|
|
253
264
|
break
|
|
254
265
|
command = Prompt.ask(
|
|
255
|
-
"[bold magenta]
|
|
266
|
+
"[bold magenta]TrueFoundry:[/] Command to run the application",
|
|
256
267
|
console=console,
|
|
257
268
|
show_default=False,
|
|
258
269
|
default=None,
|
|
259
270
|
)
|
|
260
271
|
|
|
261
272
|
env_path = Prompt.ask(
|
|
262
|
-
"[bold magenta]
|
|
273
|
+
"[bold magenta]TrueFoundry:[/] Enter .env file location for environment variables, "
|
|
263
274
|
"or press [green]Enter[/] to skip.",
|
|
264
275
|
console=console,
|
|
265
276
|
)
|
|
277
|
+
if workspace_fqn is None:
|
|
278
|
+
workspace_fqn = Prompt.ask(
|
|
279
|
+
"[bold magenta]TrueFoundry:[/] Enter the Workspace FQN where you would like to deploy, [dim]Ex: cluster-name:workspace-name[/]"
|
|
280
|
+
)
|
|
266
281
|
while True:
|
|
267
282
|
try:
|
|
268
283
|
env = _parse_env(project_root_path, env_path) if env_path else {}
|
|
@@ -270,13 +285,13 @@ def cli(project_root_path: str, deploy: bool):
|
|
|
270
285
|
except FileNotFoundError:
|
|
271
286
|
console.print("[red]Invalid location for .env[/]")
|
|
272
287
|
env_path = Prompt.ask(
|
|
273
|
-
"[bold magenta]
|
|
288
|
+
"[bold magenta]TrueFoundry:[/]Please provide the correct path,"
|
|
274
289
|
"or press [green]Enter[/] to skip.",
|
|
275
290
|
console=console,
|
|
276
291
|
)
|
|
277
292
|
continue
|
|
278
293
|
status = console.status(
|
|
279
|
-
"[bold cyan]Starting up:[/] [bold magenta]
|
|
294
|
+
"[bold cyan]Starting up:[/] [bold magenta]TrueFoundry[/] is initializing. Please wait..."
|
|
280
295
|
)
|
|
281
296
|
with status:
|
|
282
297
|
developer = Developer(
|
|
@@ -299,9 +314,6 @@ def cli(project_root_path: str, deploy: bool):
|
|
|
299
314
|
break
|
|
300
315
|
|
|
301
316
|
if deploy:
|
|
302
|
-
workspace_fqn = Prompt.ask(
|
|
303
|
-
"Enter the Workspace FQN where you would like to deploy"
|
|
304
|
-
)
|
|
305
317
|
deploy_component(
|
|
306
318
|
workspace_fqn=workspace_fqn,
|
|
307
319
|
project_root_path=project_root_path,
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
DEBUG = os.getenv("AUTODEPLOY_DEBUG", "")
|
|
4
|
+
|
|
5
|
+
AUTODEPLOY_TFY_BASE_URL = os.getenv(
|
|
6
|
+
"AUTODEPLOY_TFY_BASE_URL", "https://app.truefoundry.com"
|
|
7
|
+
).strip("/")
|
|
8
|
+
AUTODEPLOY_OPENAI_BASE_URL = os.environ.get("AUTODEPLOY_OPENAI_BASE_URL")
|
|
9
|
+
AUTODEPLOY_OPENAI_API_KEY = os.environ.get("AUTODEPLOY_OPENAI_API_KEY")
|
|
10
|
+
AUTODEPLOY_MODEL_NAME = os.environ.get(
|
|
11
|
+
"AUTODEPLOY_MODEL_NAME", "auto-deploy-openai/gpt-4-turbo-2024-04-09"
|
|
12
|
+
)
|
|
13
|
+
AUTODEPLOY_INTRO_MESSAGE = """Truefoundry will first check for a [blue]Dockerfile[/] in your project.
|
|
14
|
+
If it's not present, Truefoundry will generate one for you.
|
|
15
|
+
Then, it will attempt to build a Docker image on your machine.
|
|
16
|
+
If any issues are encountered during this process, Truefoundry will attempt to automatically fix them.
|
|
17
|
+
Finally, it will run the application to verify that everything is set up correctly.
|
|
18
|
+
"""
|
|
19
|
+
ABOUT_AUTODEPLOY = """To deploy your project, we will generate the deployment configuration using AI.
|
|
20
|
+
We will analyze your codebase using our AI agent and make the required changes so that we can build and deploy the code.
|
|
21
|
+
We will confirm all the changes with you.
|
|
22
|
+
"""
|
|
@@ -12,7 +12,7 @@ class AskQuestion(Event):
|
|
|
12
12
|
question: str
|
|
13
13
|
|
|
14
14
|
def render(self, console: Console) -> str:
|
|
15
|
-
console.print(f"[bold magenta]
|
|
15
|
+
console.print(f"[bold magenta]TrueFoundry:[/] {self.question}")
|
|
16
16
|
response = console.input("[bold green]You:[/] ")
|
|
17
17
|
return response
|
|
18
18
|
|
|
@@ -27,7 +27,7 @@ class CommitConfirmation(Event):
|
|
|
27
27
|
commit_message: str
|
|
28
28
|
|
|
29
29
|
def render(self, console: Console) -> Optional["Commit.Response"]:
|
|
30
|
-
console.print("[bold magenta]
|
|
30
|
+
console.print("[bold magenta]TrueFoundry[/] wants to make a commit,", end=" ")
|
|
31
31
|
console.print(f"with Commit Message: [green]{self.commit_message}[/]")
|
|
32
32
|
console.print("[yellow]Displaying changes to be made by the patch[/]")
|
|
33
33
|
console.print(Padding.indent(renderable=LLMDiff(self.patch), level=2))
|
|
@@ -48,7 +48,7 @@ The values are the ports to open on the host""",
|
|
|
48
48
|
|
|
49
49
|
def render(self, console: Console):
|
|
50
50
|
console.print(
|
|
51
|
-
f"[bold magenta]
|
|
51
|
+
f"[bold magenta]TrueFoundry[/] is executing the Docker container. Image Tag: [bold green]{self.image_tag}[/], Exposed Port: [bold green]{str(self.ports) if self.ports is not None else 'Not exposed'}[/], Command: [bold green]{self.command}[/]"
|
|
52
52
|
)
|
|
53
53
|
|
|
54
54
|
class Response(ResponseEvent):
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
import click
|
|
4
|
+
|
|
5
|
+
from truefoundry.deploy.cli.cli import create_truefoundry_cli
|
|
6
|
+
|
|
7
|
+
MLFOUNDRY_INSTALLED = True
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
from mlfoundry.cli.commands import download
|
|
12
|
+
except ImportError:
|
|
13
|
+
MLFOUNDRY_INSTALLED = False
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@click.group()
|
|
17
|
+
def ml():
|
|
18
|
+
"""MlFoundry CLI"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def main():
|
|
22
|
+
# Exit the interpreter by raising SystemExit(status).
|
|
23
|
+
# If the status is omitted or None, it defaults to zero (i.e., success).
|
|
24
|
+
# If the status is an integer, it will be used as the system exit status.
|
|
25
|
+
# If it is another kind of object, it will be printed and the system exit status will be one (i.e., failure).
|
|
26
|
+
try:
|
|
27
|
+
cli = create_truefoundry_cli()
|
|
28
|
+
if MLFOUNDRY_INSTALLED:
|
|
29
|
+
ml.add_command(download)
|
|
30
|
+
cli.add_command(ml)
|
|
31
|
+
except Exception as e:
|
|
32
|
+
raise click.exceptions.UsageError(message=str(e)) from e
|
|
33
|
+
sys.exit(cli())
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
if __name__ == "__main__":
|
|
37
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
import rich_click as click
|
|
5
|
+
from servicefoundry import logger
|
|
6
|
+
from servicefoundry.cli.commands import (
|
|
7
|
+
deploy_patch_v2_command,
|
|
8
|
+
get_login_command,
|
|
9
|
+
get_logout_command,
|
|
10
|
+
get_patch_command,
|
|
11
|
+
)
|
|
12
|
+
from servicefoundry.cli.config import CliConfig
|
|
13
|
+
from servicefoundry.cli.const import GROUP_CLS
|
|
14
|
+
from servicefoundry.cli.util import setup_rich_click
|
|
15
|
+
from servicefoundry.lib.util import is_debug_env_set, is_experimental_env_set
|
|
16
|
+
from servicefoundry.version import __version__
|
|
17
|
+
|
|
18
|
+
from truefoundry.deploy.cli.deploy import deploy_v2_command
|
|
19
|
+
|
|
20
|
+
click.rich_click.USE_RICH_MARKUP = True
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _add_experimental_commands(cli):
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def create_truefoundry_cli():
|
|
28
|
+
"""Generates CLI by combining all subcommands into a main CLI and returns in
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
function: main CLI functions will all added sub-commands
|
|
32
|
+
"""
|
|
33
|
+
cli = truefoundry_cli
|
|
34
|
+
cli.add_command(get_login_command())
|
|
35
|
+
# cli.add_command(get_get_command())
|
|
36
|
+
# cli.add_command(get_list_command())
|
|
37
|
+
# cli.add_command(get_delete_command())
|
|
38
|
+
# cli.add_command(get_create_command())
|
|
39
|
+
# cli.add_command(get_redeploy_command())
|
|
40
|
+
cli.add_command(get_logout_command())
|
|
41
|
+
# cli.add_command(get_build_command())
|
|
42
|
+
cli.add_command(deploy_v2_command)
|
|
43
|
+
cli.add_command(deploy_patch_v2_command)
|
|
44
|
+
# cli.add_command(get_build_logs_command())
|
|
45
|
+
# cli.add_command(get_logs_command())
|
|
46
|
+
# cli.add_command(get_trigger_command())
|
|
47
|
+
# cli.add_command(get_terminate_command())
|
|
48
|
+
|
|
49
|
+
if not (sys.platform.startswith("win32") or sys.platform.startswith("cygwin")):
|
|
50
|
+
cli.add_command(get_patch_command())
|
|
51
|
+
|
|
52
|
+
if is_experimental_env_set():
|
|
53
|
+
_add_experimental_commands(cli)
|
|
54
|
+
return cli
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) # noqa: C408
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@click.group(
|
|
61
|
+
cls=GROUP_CLS, context_settings=CONTEXT_SETTINGS, invoke_without_command=True
|
|
62
|
+
)
|
|
63
|
+
@click.option(
|
|
64
|
+
"--json",
|
|
65
|
+
is_flag=True,
|
|
66
|
+
help="Output entities in json format instead of formatted tables",
|
|
67
|
+
)
|
|
68
|
+
@click.option(
|
|
69
|
+
"--debug",
|
|
70
|
+
is_flag=True,
|
|
71
|
+
default=is_debug_env_set,
|
|
72
|
+
help="Set logging level to Debug. Can also be set using environment variable. E.g. SFY_DEBUG=1",
|
|
73
|
+
)
|
|
74
|
+
@click.version_option(__version__)
|
|
75
|
+
@click.pass_context
|
|
76
|
+
def truefoundry_cli(ctx, json, debug):
|
|
77
|
+
"""
|
|
78
|
+
Truefoundry provides an easy way to deploy your services, jobs and models.
|
|
79
|
+
\b
|
|
80
|
+
|
|
81
|
+
To start, login to your Truefoundry account with [i]tfy login[/]
|
|
82
|
+
|
|
83
|
+
Then start deploying with [i]tfy deploy[/]
|
|
84
|
+
|
|
85
|
+
And more: [link=https://docs.truefoundry.com/docs]https://docs.truefoundry.com/docs[/]
|
|
86
|
+
|
|
87
|
+
"""
|
|
88
|
+
setup_rich_click()
|
|
89
|
+
# TODO (chiragjn): Change this to -o json|yaml|table|pager
|
|
90
|
+
CliConfig.set("json", json)
|
|
91
|
+
if ctx.invoked_subcommand is None:
|
|
92
|
+
click.echo(ctx.get_help())
|
|
93
|
+
log_level = logging.INFO
|
|
94
|
+
# no info logs while outputting json
|
|
95
|
+
if json:
|
|
96
|
+
log_level = logging.ERROR
|
|
97
|
+
if debug:
|
|
98
|
+
log_level = logging.DEBUG
|
|
99
|
+
logger.add_cli_handler(level=log_level)
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
import rich_click as click
|
|
6
|
+
import yaml
|
|
7
|
+
from click import UsageError
|
|
8
|
+
from click.exceptions import ClickException
|
|
9
|
+
from servicefoundry.cli.const import GROUP_CLS
|
|
10
|
+
from servicefoundry.lib.dao import application as application_lib
|
|
11
|
+
|
|
12
|
+
from truefoundry.autodeploy.exception import GitBinaryNotFoundException
|
|
13
|
+
|
|
14
|
+
GIT_BINARY = True
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
from truefoundry.autodeploy.cli import cli as autodeploy_cli
|
|
18
|
+
except GitBinaryNotFoundException:
|
|
19
|
+
GIT_BINARY = False
|
|
20
|
+
|
|
21
|
+
default_file = "default"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _get_yaml_file():
|
|
25
|
+
files = ["truefoundry.yaml", "servicefoundry.yaml"]
|
|
26
|
+
for file in files:
|
|
27
|
+
if os.path.exists(file):
|
|
28
|
+
return file
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@click.group(
|
|
33
|
+
name="deploy",
|
|
34
|
+
cls=GROUP_CLS,
|
|
35
|
+
invoke_without_command=True,
|
|
36
|
+
help="Deploy application to Truefoundry",
|
|
37
|
+
)
|
|
38
|
+
@click.option(
|
|
39
|
+
"-f",
|
|
40
|
+
"--file",
|
|
41
|
+
type=click.STRING,
|
|
42
|
+
default=default_file,
|
|
43
|
+
help="Path to truefoundry.yaml file",
|
|
44
|
+
show_default=False,
|
|
45
|
+
)
|
|
46
|
+
@click.option(
|
|
47
|
+
"-w",
|
|
48
|
+
"--workspace-fqn",
|
|
49
|
+
"--workspace_fqn",
|
|
50
|
+
required=True,
|
|
51
|
+
help="FQN of the Workspace to deploy to",
|
|
52
|
+
)
|
|
53
|
+
@click.option(
|
|
54
|
+
"--wait/--no-wait",
|
|
55
|
+
"--wait/--no_wait",
|
|
56
|
+
is_flag=True,
|
|
57
|
+
show_default=True,
|
|
58
|
+
default=True,
|
|
59
|
+
help="Wait and tail the deployment progress",
|
|
60
|
+
)
|
|
61
|
+
def deploy_v2_command(file: str, workspace_fqn: str, wait: bool):
|
|
62
|
+
from servicefoundry.lib.auth.servicefoundry_session import ServiceFoundrySession
|
|
63
|
+
from servicefoundry.v2.lib.deployable_patched_models import Application
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
_ = ServiceFoundrySession()
|
|
67
|
+
except Exception as e:
|
|
68
|
+
raise ClickException(message=str(e)) from e
|
|
69
|
+
|
|
70
|
+
if file != default_file and not os.path.exists(file):
|
|
71
|
+
raise UsageError(
|
|
72
|
+
f"The file {file} does not exist. Please check the file path and try again."
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
file = _get_yaml_file()
|
|
76
|
+
|
|
77
|
+
if file is None:
|
|
78
|
+
click.echo(
|
|
79
|
+
click.style(
|
|
80
|
+
"We did not find any truefoundry.yaml or servicefoundry.yaml at the root path.",
|
|
81
|
+
fg="red",
|
|
82
|
+
),
|
|
83
|
+
color=True,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
if not sys.stdout.isatty():
|
|
87
|
+
click.echo(
|
|
88
|
+
click.style(
|
|
89
|
+
'Please create a truefoundry.yaml or pass the file name with "--file file_name"',
|
|
90
|
+
fg="yellow",
|
|
91
|
+
),
|
|
92
|
+
color=True,
|
|
93
|
+
)
|
|
94
|
+
sys.exit(1)
|
|
95
|
+
|
|
96
|
+
click.echo(
|
|
97
|
+
click.style(
|
|
98
|
+
'We will be using TrueFoundry AI to build your project.\nIf you wish to proceed without TrueFoundry AI,\nyou need to either have a truefoundry.yaml file in your project root or\npass the path to a yaml file using the "--file file_name" option.',
|
|
99
|
+
fg="yellow",
|
|
100
|
+
),
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
if GIT_BINARY:
|
|
104
|
+
autodeploy_cli(
|
|
105
|
+
project_root_path=".", deploy=True, workspace_fqn=workspace_fqn
|
|
106
|
+
)
|
|
107
|
+
else:
|
|
108
|
+
raise UsageError(
|
|
109
|
+
"We cannot find the 'git' command. We use Git to track changes made while automatically building your project. Please install Git to use this feature or manually create a 'truefoundry.yaml' file."
|
|
110
|
+
)
|
|
111
|
+
else:
|
|
112
|
+
with open(file, "r") as f:
|
|
113
|
+
application_definition = yaml.safe_load(f)
|
|
114
|
+
|
|
115
|
+
application = Application.parse_obj(application_definition)
|
|
116
|
+
application.deploy(workspace_fqn=workspace_fqn, wait=wait)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@click.group(
|
|
120
|
+
name="patch-application",
|
|
121
|
+
cls=GROUP_CLS,
|
|
122
|
+
invoke_without_command=True,
|
|
123
|
+
help="Deploy application with patches to Truefoundry",
|
|
124
|
+
)
|
|
125
|
+
@click.option(
|
|
126
|
+
"-f",
|
|
127
|
+
"--patch-file",
|
|
128
|
+
"--patch_file",
|
|
129
|
+
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
|
130
|
+
help="Path to yaml patch file",
|
|
131
|
+
show_default=True,
|
|
132
|
+
)
|
|
133
|
+
@click.option(
|
|
134
|
+
"-p",
|
|
135
|
+
"--patch",
|
|
136
|
+
type=click.STRING,
|
|
137
|
+
help="Patch in JSON format provided as a string.",
|
|
138
|
+
show_default=True,
|
|
139
|
+
)
|
|
140
|
+
@click.option(
|
|
141
|
+
"-a",
|
|
142
|
+
"--application_fqn",
|
|
143
|
+
"--application-fqn",
|
|
144
|
+
type=click.STRING,
|
|
145
|
+
required=True,
|
|
146
|
+
help="FQN of the Application to patch and deploy",
|
|
147
|
+
)
|
|
148
|
+
@click.option(
|
|
149
|
+
"--wait/--no-wait",
|
|
150
|
+
"--wait/--no_wait",
|
|
151
|
+
is_flag=True,
|
|
152
|
+
show_default=True,
|
|
153
|
+
default=True,
|
|
154
|
+
help="Wait and tail the deployment progress",
|
|
155
|
+
)
|
|
156
|
+
def deploy_patch_v2_command(
|
|
157
|
+
patch_file: str, application_fqn: str, patch: str, wait: bool
|
|
158
|
+
):
|
|
159
|
+
from servicefoundry.v2.lib.deployable_patched_models import Application
|
|
160
|
+
|
|
161
|
+
manifest_patch_obj = None
|
|
162
|
+
if not patch_file and not patch:
|
|
163
|
+
raise Exception("You need to either provide --file or --patch.")
|
|
164
|
+
elif patch and patch_file:
|
|
165
|
+
raise Exception("You can only provide one of --file and --patch")
|
|
166
|
+
elif patch:
|
|
167
|
+
try:
|
|
168
|
+
manifest_patch_obj = json.loads(patch)
|
|
169
|
+
except json.decoder.JSONDecodeError as e:
|
|
170
|
+
raise Exception("Invalid JSON provided as --patch") from e
|
|
171
|
+
elif patch_file:
|
|
172
|
+
with open(patch_file, "r") as f:
|
|
173
|
+
manifest_patch_obj = yaml.safe_load(f)
|
|
174
|
+
|
|
175
|
+
if not manifest_patch_obj or not isinstance(manifest_patch_obj, dict):
|
|
176
|
+
raise Exception("Invalid patch, aborting deployment.")
|
|
177
|
+
|
|
178
|
+
tfy_application = application_lib.get_application(application_fqn=application_fqn)
|
|
179
|
+
patched_application_obj = application_lib.get_patched_application_definition(
|
|
180
|
+
application=tfy_application, manifest_patch=manifest_patch_obj
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
application = Application.parse_obj(patched_application_obj)
|
|
184
|
+
application.deploy(workspace_fqn=tfy_application.workspace.fqn, wait=wait)
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
DEBUG = os.getenv("AUTODEPLOY_DEBUG", "")
|
|
4
|
-
|
|
5
|
-
AUTODEPLOY_TFY_BASE_URL = os.getenv(
|
|
6
|
-
"AUTODEPLOY_TFY_BASE_URL", "https://app.devtest.truefoundry.tech"
|
|
7
|
-
).strip("/")
|
|
8
|
-
AUTODEPLOY_OPENAI_BASE_URL = os.environ.get("AUTODEPLOY_OPENAI_BASE_URL")
|
|
9
|
-
AUTODEPLOY_OPENAI_API_KEY = os.environ.get("AUTODEPLOY_OPENAI_API_KEY")
|
|
10
|
-
AUTODEPLOY_MODEL_NAME = os.environ.get(
|
|
11
|
-
"AUTODEPLOY_MODEL_NAME", "auto-deploy-openai/gpt-4-turbo-2024-04-09"
|
|
12
|
-
)
|
|
@@ -1,60 +0,0 @@
|
|
|
1
|
-
import sys
|
|
2
|
-
|
|
3
|
-
import click
|
|
4
|
-
from servicefoundry.cli import create_servicefoundry_cli
|
|
5
|
-
from servicefoundry.cli.const import COMMAND_CLS
|
|
6
|
-
|
|
7
|
-
from truefoundry.autodeploy.exception import GitBinaryNotFoundException
|
|
8
|
-
|
|
9
|
-
AUTODEPLOY_INSTALLED = True
|
|
10
|
-
MLFOUNDRY_INSTALLED = True
|
|
11
|
-
GIT_BINARY = True
|
|
12
|
-
|
|
13
|
-
try:
|
|
14
|
-
from mlfoundry.cli.commands import download
|
|
15
|
-
except ImportError:
|
|
16
|
-
MLFOUNDRY_INSTALLED = False
|
|
17
|
-
|
|
18
|
-
try:
|
|
19
|
-
from truefoundry.autodeploy.cli import autodeploy_cli
|
|
20
|
-
except ImportError:
|
|
21
|
-
AUTODEPLOY_INSTALLED = False
|
|
22
|
-
except GitBinaryNotFoundException:
|
|
23
|
-
GIT_BINARY = False
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
@click.group()
|
|
27
|
-
def ml():
|
|
28
|
-
"""MlFoundry CLI"""
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
@click.command(name="auto-deploy", cls=COMMAND_CLS)
|
|
32
|
-
def handle_git_error():
|
|
33
|
-
"""
|
|
34
|
-
Build and deploy projects using Truefoundry
|
|
35
|
-
"""
|
|
36
|
-
raise click.UsageError(
|
|
37
|
-
"The 'git' command could not be found. Please ensure Git is available in your system to run auto-deploy."
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def main():
|
|
42
|
-
# Exit the interpreter by raising SystemExit(status).
|
|
43
|
-
# If the status is omitted or None, it defaults to zero (i.e., success).
|
|
44
|
-
# If the status is an integer, it will be used as the system exit status.
|
|
45
|
-
# If it is another kind of object, it will be printed and the system exit status will be one (i.e., failure).
|
|
46
|
-
cli = create_servicefoundry_cli()
|
|
47
|
-
if MLFOUNDRY_INSTALLED:
|
|
48
|
-
ml.add_command(download)
|
|
49
|
-
cli.add_command(ml)
|
|
50
|
-
if AUTODEPLOY_INSTALLED:
|
|
51
|
-
if GIT_BINARY:
|
|
52
|
-
cli.add_command(autodeploy_cli)
|
|
53
|
-
else:
|
|
54
|
-
cli.add_command(handle_git_error)
|
|
55
|
-
|
|
56
|
-
sys.exit(cli())
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
if __name__ == "__main__":
|
|
60
|
-
main()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/tools/file_type_counts.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{truefoundry-0.2.0rc5 → truefoundry-0.2.0rc7}/truefoundry/autodeploy/utils/pydantic_compat.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|