truefoundry 0.1.2__py3-none-any.whl → 0.2.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of truefoundry might be problematic. Click here for more details.
- truefoundry/autodeploy/__init__.py +0 -0
- truefoundry/autodeploy/agents/__init__.py +0 -0
- truefoundry/autodeploy/agents/base.py +180 -0
- truefoundry/autodeploy/agents/developer.py +113 -0
- truefoundry/autodeploy/agents/project_identifier.py +124 -0
- truefoundry/autodeploy/agents/tester.py +74 -0
- truefoundry/autodeploy/cli.py +312 -0
- truefoundry/autodeploy/constants.py +12 -0
- truefoundry/autodeploy/exception.py +2 -0
- truefoundry/autodeploy/logger.py +13 -0
- truefoundry/autodeploy/tools/__init__.py +26 -0
- truefoundry/autodeploy/tools/ask.py +32 -0
- truefoundry/autodeploy/tools/base.py +31 -0
- truefoundry/autodeploy/tools/commit.py +138 -0
- truefoundry/autodeploy/tools/docker_build.py +109 -0
- truefoundry/autodeploy/tools/docker_run.py +149 -0
- truefoundry/autodeploy/tools/file_type_counts.py +79 -0
- truefoundry/autodeploy/tools/list_files.py +82 -0
- truefoundry/autodeploy/tools/read_file.py +66 -0
- truefoundry/autodeploy/tools/send_request.py +54 -0
- truefoundry/autodeploy/tools/write_file.py +101 -0
- truefoundry/autodeploy/utils/diff.py +157 -0
- truefoundry/cli/__main__.py +29 -0
- {truefoundry-0.1.2.dist-info → truefoundry-0.2.0rc1.dist-info}/METADATA +10 -1
- truefoundry-0.2.0rc1.dist-info/RECORD +32 -0
- truefoundry-0.1.2.dist-info/RECORD +0 -10
- {truefoundry-0.1.2.dist-info → truefoundry-0.2.0rc1.dist-info}/WHEEL +0 -0
- {truefoundry-0.1.2.dist-info → truefoundry-0.2.0rc1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
import sys
|
|
5
|
+
from typing import Dict, Optional
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
import docker
|
|
9
|
+
from dotenv import dotenv_values
|
|
10
|
+
|
|
11
|
+
from truefoundry.autodeploy.exception import GitBinaryNotFoundException
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from git import GitCommandError, Repo
|
|
15
|
+
from git.exc import InvalidGitRepositoryError
|
|
16
|
+
except ImportError as ex:
|
|
17
|
+
raise GitBinaryNotFoundException from ex
|
|
18
|
+
|
|
19
|
+
import requests
|
|
20
|
+
from openai import OpenAI
|
|
21
|
+
from rich.console import Console
|
|
22
|
+
from rich.prompt import Prompt
|
|
23
|
+
from rich.status import Status
|
|
24
|
+
from servicefoundry import Build, DockerFileBuild, Job, LocalSource, Port, Service
|
|
25
|
+
from servicefoundry.cli.const import COMMAND_CLS
|
|
26
|
+
from servicefoundry.lib.auth.servicefoundry_session import ServiceFoundrySession
|
|
27
|
+
|
|
28
|
+
from truefoundry.autodeploy.agents.developer import Developer
|
|
29
|
+
from truefoundry.autodeploy.agents.project_identifier import (
|
|
30
|
+
ComponentType,
|
|
31
|
+
ProjectIdentifier,
|
|
32
|
+
)
|
|
33
|
+
from truefoundry.autodeploy.agents.tester import Tester
|
|
34
|
+
from truefoundry.autodeploy.constants import (
|
|
35
|
+
AUTODEPLOY_OPENAI_API_KEY,
|
|
36
|
+
AUTODEPLOY_OPENAI_BASE_URL,
|
|
37
|
+
AUTODEPLOY_TFY_BASE_URL,
|
|
38
|
+
)
|
|
39
|
+
from truefoundry.autodeploy.tools.ask import AskQuestion
|
|
40
|
+
from truefoundry.autodeploy.tools.commit import CommitConfirmation
|
|
41
|
+
from truefoundry.autodeploy.tools.docker_run import DockerRun, DockerRunLog
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def get_openai_client() -> OpenAI:
|
|
45
|
+
session = ServiceFoundrySession()
|
|
46
|
+
if AUTODEPLOY_OPENAI_BASE_URL is not None and AUTODEPLOY_OPENAI_API_KEY is not None:
|
|
47
|
+
return OpenAI(
|
|
48
|
+
api_key=AUTODEPLOY_OPENAI_API_KEY, base_url=AUTODEPLOY_OPENAI_BASE_URL
|
|
49
|
+
)
|
|
50
|
+
try:
|
|
51
|
+
resp = requests.get(
|
|
52
|
+
f"{AUTODEPLOY_TFY_BASE_URL}/api/svc/v1/llm-gateway/access-details",
|
|
53
|
+
headers={
|
|
54
|
+
"Authorization": f"Bearer {session.access_token}",
|
|
55
|
+
},
|
|
56
|
+
)
|
|
57
|
+
resp.raise_for_status()
|
|
58
|
+
resp = resp.json()
|
|
59
|
+
return OpenAI(api_key=resp["jwtToken"], base_url=resp["inferenceBaseURL"])
|
|
60
|
+
except requests.exceptions.HTTPError as http_error:
|
|
61
|
+
raise Exception(
|
|
62
|
+
"Error occured while connecting to servicefoundry server"
|
|
63
|
+
) from http_error
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def deploy_component(
|
|
67
|
+
workspace_fqn: str,
|
|
68
|
+
project_root_path: str,
|
|
69
|
+
dockerfile_path: str,
|
|
70
|
+
component_type: ComponentType,
|
|
71
|
+
name: str,
|
|
72
|
+
env: Dict,
|
|
73
|
+
command: Optional[str] = None,
|
|
74
|
+
port: Optional[int] = None,
|
|
75
|
+
):
|
|
76
|
+
logging.basicConfig(level=logging.INFO)
|
|
77
|
+
|
|
78
|
+
if not os.path.exists(os.path.join(project_root_path, dockerfile_path)):
|
|
79
|
+
raise FileNotFoundError("Dockerfile not found in the project.")
|
|
80
|
+
|
|
81
|
+
image = Build(
|
|
82
|
+
build_spec=DockerFileBuild(
|
|
83
|
+
dockerfile_path=dockerfile_path,
|
|
84
|
+
command=command,
|
|
85
|
+
),
|
|
86
|
+
build_source=LocalSource(project_root_path=project_root_path),
|
|
87
|
+
)
|
|
88
|
+
if component_type == ComponentType.SERVICE:
|
|
89
|
+
if port is None:
|
|
90
|
+
raise ValueError("Port is required for deploying service")
|
|
91
|
+
app = Service(
|
|
92
|
+
name=name,
|
|
93
|
+
image=image,
|
|
94
|
+
ports=[Port(port=port, expose=False)],
|
|
95
|
+
env=env,
|
|
96
|
+
)
|
|
97
|
+
else:
|
|
98
|
+
app = Job(name=name, image=image, env=env)
|
|
99
|
+
app.deploy(workspace_fqn=workspace_fqn)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def _parse_env(project_root_path: str, env_path: str) -> Dict:
|
|
103
|
+
if not os.path.isabs(env_path):
|
|
104
|
+
env_path = os.path.join(project_root_path, env_path)
|
|
105
|
+
|
|
106
|
+
if os.path.exists(env_path):
|
|
107
|
+
return dotenv_values(env_path)
|
|
108
|
+
|
|
109
|
+
raise FileNotFoundError(f"Invalid path {env_path!r}")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _check_repo(project_root_path: str, console: Console):
|
|
113
|
+
try:
|
|
114
|
+
repo = Repo(project_root_path)
|
|
115
|
+
if repo.is_dirty():
|
|
116
|
+
console.print(
|
|
117
|
+
"[bold red]Error:[/] The repository has uncommitted changes. Please commit or stash them before proceeding."
|
|
118
|
+
)
|
|
119
|
+
sys.exit(1)
|
|
120
|
+
current_active_branch = repo.active_branch.name
|
|
121
|
+
console.print(
|
|
122
|
+
f"[bold magenta]TFY-Agent:[/] Current branch [green]{current_active_branch!r}[/]"
|
|
123
|
+
)
|
|
124
|
+
branch_name = Prompt.ask(
|
|
125
|
+
"[bold magenta]TFY-Agent:[/] Enter a branch name if you want to checkout to a new branch. "
|
|
126
|
+
f"Press enter to continue on [green]{current_active_branch!r}[/]",
|
|
127
|
+
console=console,
|
|
128
|
+
)
|
|
129
|
+
if branch_name:
|
|
130
|
+
repo.git.checkout("-b", branch_name)
|
|
131
|
+
console.print(
|
|
132
|
+
f"[bold magenta]TFY-Agent:[/] Switched to branch: [green]{repo.active_branch}[/]"
|
|
133
|
+
)
|
|
134
|
+
else:
|
|
135
|
+
console.print(
|
|
136
|
+
f"[bold magenta]TFY-Agent:[/] Continuing on [green]{current_active_branch!r}[/]"
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
except InvalidGitRepositoryError:
|
|
140
|
+
console.print(
|
|
141
|
+
"[red]Error:[/] This operation can only be performed inside a Git repository."
|
|
142
|
+
)
|
|
143
|
+
sys.exit(1)
|
|
144
|
+
|
|
145
|
+
except GitCommandError as gce:
|
|
146
|
+
console.print(
|
|
147
|
+
f"Command execution failed due to the following error:[red]{gce.stderr}[/]".replace(
|
|
148
|
+
"\n stderr:", ""
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
console.print(
|
|
152
|
+
"[bold red]Error:[/] Unable to switch to the new branch. It's possible that this branch already exists."
|
|
153
|
+
)
|
|
154
|
+
sys.exit(1)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _update_status(event, status: Status):
|
|
158
|
+
if isinstance(event, (AskQuestion, CommitConfirmation)):
|
|
159
|
+
status.stop()
|
|
160
|
+
|
|
161
|
+
if isinstance(
|
|
162
|
+
event, (Developer.Request, ProjectIdentifier.Response, Tester.Response)
|
|
163
|
+
):
|
|
164
|
+
status.update(
|
|
165
|
+
"[bold magenta]TFY-Agent[/] is currently building the project. Please wait..."
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
if isinstance(event, ProjectIdentifier.Request):
|
|
169
|
+
status.update(
|
|
170
|
+
"[bold magenta]TFY-Agent[/] is currently identifying the project..."
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
if isinstance(event, (Tester.Request, DockerRun.Response)):
|
|
174
|
+
status.update(
|
|
175
|
+
"[bold magenta]TFY-Agent[/] is currently running tests on the project..."
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
if isinstance(event, DockerRunLog):
|
|
179
|
+
status.update(
|
|
180
|
+
"[bold cyan]Running:[/] [bold magenta]TFY-Agent[/] is executing the Docker container. Press [yellow]control-c[/] to stop waiting for additional logs..."
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def _get_default_project_name(project_root_path: str):
|
|
185
|
+
path = os.path.abspath(project_root_path).rstrip(os.path.sep)
|
|
186
|
+
name = path.split(os.path.sep)[-1].lower()
|
|
187
|
+
name = re.sub(r"[^a-z0-9]", "-", name)
|
|
188
|
+
name = "-".join(n for n in name.split("-") if n)[:30]
|
|
189
|
+
return name
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def cli(project_root_path: str, deploy: bool):
|
|
193
|
+
openai_client = get_openai_client()
|
|
194
|
+
docker_client = docker.from_env()
|
|
195
|
+
project_root_path = os.path.abspath(project_root_path)
|
|
196
|
+
console = Console()
|
|
197
|
+
console.print(
|
|
198
|
+
"[bold magenta]TFY-Agent[/]: A tool for building and deploying [magenta]Jobs/Services[/] to the Truefoundry platform."
|
|
199
|
+
)
|
|
200
|
+
console.print(
|
|
201
|
+
"[bold reverse red]DISCLAIMER:[/] The [bold magenta]TFY-Agent[/] may use LLM resources outside of your organization. Please proceed only if you accept this."
|
|
202
|
+
)
|
|
203
|
+
console.print("Let's get started! Please answer the following questions:")
|
|
204
|
+
console.print(
|
|
205
|
+
"[bold cyan]Note:[/] All changes will be committed to a new branch. Please ensure you have a repository."
|
|
206
|
+
)
|
|
207
|
+
_check_repo(project_root_path=project_root_path, console=console)
|
|
208
|
+
|
|
209
|
+
component_type = ComponentType[
|
|
210
|
+
Prompt.ask(
|
|
211
|
+
"[bold magenta]TFY-Agent:[/] Is your project a Service? Or a Job?",
|
|
212
|
+
choices=[k.value.lower() for k in ComponentType],
|
|
213
|
+
console=console,
|
|
214
|
+
default="service",
|
|
215
|
+
).upper()
|
|
216
|
+
]
|
|
217
|
+
while True:
|
|
218
|
+
name = Prompt.ask(
|
|
219
|
+
"[bold magenta]TFY-Agent:[/] Name of deployment",
|
|
220
|
+
console=console,
|
|
221
|
+
default=_get_default_project_name(project_root_path),
|
|
222
|
+
)
|
|
223
|
+
if not re.match(r"^[a-z][a-z0-9\-]{1,30}[a-z0-9]$", name):
|
|
224
|
+
console.print(
|
|
225
|
+
"[bold magenta]TFY-Agent:[/] The name should be between 2-30 alphaneumaric"
|
|
226
|
+
" characters and '-'. The first character should not be a digit."
|
|
227
|
+
)
|
|
228
|
+
else:
|
|
229
|
+
break
|
|
230
|
+
command = Prompt.ask(
|
|
231
|
+
"[bold magenta]TFY-Agent:[/] Command to run the application",
|
|
232
|
+
console=console,
|
|
233
|
+
show_default=False,
|
|
234
|
+
default=None,
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
env_path = Prompt.ask(
|
|
238
|
+
"[bold magenta]TFY-Agent:[/] Enter .env file location for environment variables, "
|
|
239
|
+
"or press [green]Enter[/] to skip.",
|
|
240
|
+
console=console,
|
|
241
|
+
)
|
|
242
|
+
while True:
|
|
243
|
+
try:
|
|
244
|
+
env = _parse_env(project_root_path, env_path) if env_path else {}
|
|
245
|
+
break
|
|
246
|
+
except FileNotFoundError:
|
|
247
|
+
console.print("[red]Invalid location for .env[/]")
|
|
248
|
+
env_path = Prompt.ask(
|
|
249
|
+
"[bold magenta]TFY-Agent:[/]Please provide the correct path,"
|
|
250
|
+
"or press [green]Enter[/] to skip.",
|
|
251
|
+
console=console,
|
|
252
|
+
)
|
|
253
|
+
continue
|
|
254
|
+
status = console.status(
|
|
255
|
+
"[bold cyan]Starting up:[/] [bold magenta]TFY-Agent[/] is initializing. Please wait..."
|
|
256
|
+
)
|
|
257
|
+
with status:
|
|
258
|
+
developer = Developer(
|
|
259
|
+
project_root_path=project_root_path,
|
|
260
|
+
openai_client=openai_client,
|
|
261
|
+
docker_client=docker_client,
|
|
262
|
+
environment=env,
|
|
263
|
+
)
|
|
264
|
+
developer_run = developer.run(developer.Request(command=command, name=name))
|
|
265
|
+
inp = None
|
|
266
|
+
response = None
|
|
267
|
+
while True:
|
|
268
|
+
try:
|
|
269
|
+
status.start()
|
|
270
|
+
event = developer_run.send(inp)
|
|
271
|
+
_update_status(event=event, status=status)
|
|
272
|
+
inp = event.render(console)
|
|
273
|
+
except StopIteration as ex:
|
|
274
|
+
response = ex.value
|
|
275
|
+
break
|
|
276
|
+
|
|
277
|
+
if deploy:
|
|
278
|
+
workspace_fqn = Prompt.ask(
|
|
279
|
+
"Enter the Workspace FQN where you would like to deploy"
|
|
280
|
+
)
|
|
281
|
+
deploy_component(
|
|
282
|
+
workspace_fqn=workspace_fqn,
|
|
283
|
+
project_root_path=project_root_path,
|
|
284
|
+
dockerfile_path=response.dockerfile_path,
|
|
285
|
+
name=name,
|
|
286
|
+
component_type=component_type,
|
|
287
|
+
env=env,
|
|
288
|
+
command=response.command,
|
|
289
|
+
port=response.port,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@click.command(name="auto-deploy", cls=COMMAND_CLS)
|
|
294
|
+
@click.option(
|
|
295
|
+
"--path", type=click.STRING, required=True, help="The root path of the project"
|
|
296
|
+
)
|
|
297
|
+
@click.option(
|
|
298
|
+
"--deploy",
|
|
299
|
+
type=click.BOOL,
|
|
300
|
+
is_flag=True,
|
|
301
|
+
default=False,
|
|
302
|
+
show_default=True,
|
|
303
|
+
help="Deploy the project after successfully building it.",
|
|
304
|
+
)
|
|
305
|
+
def autodeploy_cli(path: str, deploy: bool):
|
|
306
|
+
"""
|
|
307
|
+
Build and deploy projects using Truefoundry
|
|
308
|
+
"""
|
|
309
|
+
cli(
|
|
310
|
+
project_root_path=path,
|
|
311
|
+
deploy=deploy,
|
|
312
|
+
)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
DEBUG = os.getenv("AUTODEPLOY_DEBUG", "")
|
|
4
|
+
|
|
5
|
+
AUTODEPLOY_TFY_BASE_URL = os.getenv(
|
|
6
|
+
"AUTODEPLOY_TFY_BASE_URL", "https://app.devtest.truefoundry.tech"
|
|
7
|
+
).strip("/")
|
|
8
|
+
AUTODEPLOY_OPENAI_BASE_URL = os.environ.get("AUTODEPLOY_OPENAI_BASE_URL")
|
|
9
|
+
AUTODEPLOY_OPENAI_API_KEY = os.environ.get("AUTODEPLOY_OPENAI_API_KEY")
|
|
10
|
+
AUTODEPLOY_MODEL_NAME = os.environ.get(
|
|
11
|
+
"AUTODEPLOY_MODEL_NAME", "auto-deploy-openai/gpt-4-turbo-2024-04-09"
|
|
12
|
+
)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from rich.logging import RichHandler
|
|
4
|
+
|
|
5
|
+
from truefoundry.autodeploy.constants import DEBUG
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger("autodeploy")
|
|
8
|
+
|
|
9
|
+
level = logging.DEBUG if DEBUG else logging.NOTSET
|
|
10
|
+
handler = RichHandler(level=level, show_path=False)
|
|
11
|
+
handler.setLevel(level)
|
|
12
|
+
logger.addHandler(handler)
|
|
13
|
+
logger.setLevel(level)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from truefoundry.autodeploy.tools.ask import Ask
|
|
2
|
+
from truefoundry.autodeploy.tools.base import Event, RequestEvent, ResponseEvent, Tool
|
|
3
|
+
from truefoundry.autodeploy.tools.commit import Commit
|
|
4
|
+
from truefoundry.autodeploy.tools.docker_build import DockerBuild
|
|
5
|
+
from truefoundry.autodeploy.tools.docker_run import DockerRun
|
|
6
|
+
from truefoundry.autodeploy.tools.file_type_counts import FileTypeCounts
|
|
7
|
+
from truefoundry.autodeploy.tools.list_files import ListFiles
|
|
8
|
+
from truefoundry.autodeploy.tools.read_file import ReadFile
|
|
9
|
+
from truefoundry.autodeploy.tools.send_request import SendRequest
|
|
10
|
+
from truefoundry.autodeploy.tools.write_file import WriteFile
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"Ask",
|
|
14
|
+
"Tool",
|
|
15
|
+
"DockerBuild",
|
|
16
|
+
"DockerRun",
|
|
17
|
+
"FileTypeCounts",
|
|
18
|
+
"ListFiles",
|
|
19
|
+
"ReadFile",
|
|
20
|
+
"SendRequest",
|
|
21
|
+
"WriteFile",
|
|
22
|
+
"Commit",
|
|
23
|
+
"RequestEvent",
|
|
24
|
+
"ResponseEvent",
|
|
25
|
+
"Event",
|
|
26
|
+
]
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Iterable
|
|
4
|
+
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
|
|
7
|
+
from truefoundry.autodeploy.tools.base import Event, RequestEvent, ResponseEvent, Tool
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class AskQuestion(Event):
|
|
11
|
+
question: str
|
|
12
|
+
|
|
13
|
+
def render(self, console: Console) -> str:
|
|
14
|
+
console.print(f"[bold magenta]TFY-Agent:[/] {self.question}")
|
|
15
|
+
response = console.input("[bold green]You:[/] ")
|
|
16
|
+
return response
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Ask(Tool):
|
|
20
|
+
description = """
|
|
21
|
+
Ask a question to the user.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
class Request(RequestEvent):
|
|
25
|
+
question: str
|
|
26
|
+
|
|
27
|
+
class Response(ResponseEvent):
|
|
28
|
+
response: str
|
|
29
|
+
|
|
30
|
+
def run(self, request: Ask.Request) -> Iterable[Event]:
|
|
31
|
+
response = yield AskQuestion(**request.model_dump())
|
|
32
|
+
return Ask.Response(response=response)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, ClassVar, Generator, Optional, Protocol, Type
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
from rich.console import Console
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Event(BaseModel):
|
|
10
|
+
def render(self, _: Console) -> Optional[Any]: ...
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class RequestEvent(Event): ...
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ResponseEvent(Event): ...
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Tool(Protocol):
|
|
20
|
+
description: ClassVar[str]
|
|
21
|
+
Request: ClassVar[Type[RequestEvent]]
|
|
22
|
+
Response: ClassVar[Type[ResponseEvent]]
|
|
23
|
+
|
|
24
|
+
def run(self, request: RequestEvent) -> Generator[Event, Any, ResponseEvent]: ...
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Message(Event):
|
|
28
|
+
message: Any
|
|
29
|
+
|
|
30
|
+
def render(self, console: Console):
|
|
31
|
+
console.print(self.message)
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import tempfile
|
|
5
|
+
from typing import Any, Generator, Optional
|
|
6
|
+
|
|
7
|
+
from git import Repo
|
|
8
|
+
from pydantic import Field
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
from rich.padding import Padding
|
|
11
|
+
from rich.prompt import Confirm
|
|
12
|
+
|
|
13
|
+
from truefoundry.autodeploy.logger import logger
|
|
14
|
+
from truefoundry.autodeploy.tools.base import (
|
|
15
|
+
Event,
|
|
16
|
+
Message,
|
|
17
|
+
RequestEvent,
|
|
18
|
+
ResponseEvent,
|
|
19
|
+
Tool,
|
|
20
|
+
)
|
|
21
|
+
from truefoundry.autodeploy.utils.diff import LLMDiff
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class CommitConfirmation(Event):
|
|
25
|
+
patch: str
|
|
26
|
+
commit_message: str
|
|
27
|
+
|
|
28
|
+
def render(self, console: Console) -> Optional["Commit.Response"]:
|
|
29
|
+
console.print("[bold magenta]TFY Agent[/] wants to make a commit,", end=" ")
|
|
30
|
+
console.print(f"with Commit Message: [green]{self.commit_message}[/]")
|
|
31
|
+
console.print("[yellow]Displaying changes to be made by the patch[/]")
|
|
32
|
+
console.print(Padding.indent(renderable=LLMDiff(self.patch), level=2))
|
|
33
|
+
|
|
34
|
+
response = Confirm.ask(
|
|
35
|
+
"Apply patch?",
|
|
36
|
+
)
|
|
37
|
+
if not response:
|
|
38
|
+
description = console.input(
|
|
39
|
+
"You chose to cancel. Can you provide a reason why? [green]>> "
|
|
40
|
+
)
|
|
41
|
+
return Commit.Response(
|
|
42
|
+
cancellation_reason=description, error="Operation cancelled by user."
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class Commit(Tool):
|
|
47
|
+
description = """
|
|
48
|
+
Git commit.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
class Request(RequestEvent):
|
|
52
|
+
patch: str = Field(
|
|
53
|
+
...,
|
|
54
|
+
description="""
|
|
55
|
+
Patch file content.
|
|
56
|
+
|
|
57
|
+
The format starts with the same two-line header as the context format, except that the original file is preceded by "---" and the new file is preceded by "+++". Following this is one change hunk that contain the line differences in the file. Addition lines are preceded by a plus sign, and deletion lines are preceded by a minus sign.
|
|
58
|
+
A hunk begins with range information and is immediately followed with the line additions, line deletions. The range information is surrounded by double at signs, and combines onto a single line what appears on two lines in the context format (above). The format of the range information line is as follows:
|
|
59
|
+
@@ -l,s +l,s @@ optional section heading
|
|
60
|
+
The hunk range information contains two hunk ranges. The range for the hunk of the original file is preceded by a minus symbol, and the range for the new file is preceded by a plus symbol. Each hunk range is of the format l,s where l is the starting line number and s is the number of lines the change hunk applies to for each respective file. In many versions of GNU diff, each range can omit the comma and trailing value s, in which case s defaults to 1. Note that the only really interesting value is the l line number of the first range; all the other values can be computed from the diff.
|
|
61
|
+
The hunk range for the original should be the sum of deletion (including changed) hunk lines. The hunk range for the new file should be a sum of addition (including changed) hunk lines. If hunk size information does not correspond with the number of lines in the hunk, then the diff could be considered invalid and be rejected.
|
|
62
|
+
If a line is modified, it is represented as a deletion and addition. Example:
|
|
63
|
+
-check this dokument. On
|
|
64
|
+
+check this document. On
|
|
65
|
+
|
|
66
|
+
Example:
|
|
67
|
+
--- a/path/to/original
|
|
68
|
+
+++ b/path/to/new
|
|
69
|
+
@@ -8,13 +14,8 @@
|
|
70
|
+
-This paragraph contains
|
|
71
|
+
-text that is outdated.
|
|
72
|
+
-It will be deleted in the
|
|
73
|
+
-near future.
|
|
74
|
+
-
|
|
75
|
+
It is important to spell
|
|
76
|
+
-check this dokument. On
|
|
77
|
+
+check this document. On
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
A hunk should always contain some modifications.
|
|
81
|
+
Always include a new line in the end of the patch.
|
|
82
|
+
There can be only one hunk for a single file in a patch.
|
|
83
|
+
Minimize number of lines that are unchanged in a hunk.
|
|
84
|
+
Within hunk, you cannot ignore lines that are not changing.
|
|
85
|
+
Focus on producing smaller focused hunks.
|
|
86
|
+
To produce smaller hunks, you can split changes in multiple commits.
|
|
87
|
+
Do not use context lines in the hunk.
|
|
88
|
+
|
|
89
|
+
This will be applied using the `git apply --recount --unidiff-zero` command.
|
|
90
|
+
""",
|
|
91
|
+
)
|
|
92
|
+
commit_message: str = Field(
|
|
93
|
+
...,
|
|
94
|
+
description="""
|
|
95
|
+
The commit message should be describing the patch and the reason behind the patch.
|
|
96
|
+
The patch should have any changes that is not described in the commit message.
|
|
97
|
+
""",
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
class Response(ResponseEvent):
|
|
101
|
+
cancellation_reason: Optional[str] = Field(
|
|
102
|
+
None, description="Operation cancelled by user"
|
|
103
|
+
)
|
|
104
|
+
error: Optional[str] = Field(
|
|
105
|
+
None,
|
|
106
|
+
description="Error while applying patch.",
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
def __init__(self, project_root_path: str):
|
|
110
|
+
self.project_root_path = project_root_path
|
|
111
|
+
self.repo = Repo(path=self.project_root_path, search_parent_directories=False)
|
|
112
|
+
|
|
113
|
+
def run(
|
|
114
|
+
self,
|
|
115
|
+
request: Commit.Request,
|
|
116
|
+
) -> Generator[Event, Any, ResponseEvent]:
|
|
117
|
+
fp = tempfile.NamedTemporaryFile(mode="w", delete=False)
|
|
118
|
+
try:
|
|
119
|
+
interaction_response = yield CommitConfirmation(**request.model_dump())
|
|
120
|
+
if isinstance(interaction_response, Commit.Response):
|
|
121
|
+
return interaction_response
|
|
122
|
+
fp.write(request.patch)
|
|
123
|
+
fp.close()
|
|
124
|
+
self.repo.git.apply(["--recount", "--unidiff-zero", fp.name], index=True)
|
|
125
|
+
self.repo.index.commit(message=request.commit_message)
|
|
126
|
+
yield Message(
|
|
127
|
+
message=f"[bold green]Success:[/] Changes committed with the message: '{request.commit_message}'"
|
|
128
|
+
)
|
|
129
|
+
return Commit.Response()
|
|
130
|
+
except Exception as ex:
|
|
131
|
+
logger.exception("")
|
|
132
|
+
yield Message(
|
|
133
|
+
message="[red]Alert:[/] Commit failed. Attempting to retry..."
|
|
134
|
+
)
|
|
135
|
+
return Commit.Response(error=str(ex))
|
|
136
|
+
finally:
|
|
137
|
+
if os.path.exists(fp.name):
|
|
138
|
+
os.remove(fp.name)
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import itertools
|
|
4
|
+
import re
|
|
5
|
+
from typing import Any, Generator, Iterable, Optional
|
|
6
|
+
|
|
7
|
+
import docker
|
|
8
|
+
from docker.models.images import BuildError, json_stream
|
|
9
|
+
from pydantic import Field
|
|
10
|
+
from rich.console import Console, ConsoleOptions, RenderResult
|
|
11
|
+
from rich.padding import Padding
|
|
12
|
+
from rich.text import Text
|
|
13
|
+
|
|
14
|
+
from truefoundry.autodeploy.tools.base import (
|
|
15
|
+
Event,
|
|
16
|
+
Message,
|
|
17
|
+
RequestEvent,
|
|
18
|
+
ResponseEvent,
|
|
19
|
+
Tool,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DockerBuildLog(Event):
|
|
24
|
+
log: str
|
|
25
|
+
|
|
26
|
+
def render(self, console: Console):
|
|
27
|
+
console.print(Padding.indent(renderable=Text.from_ansi(self.log), level=2))
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# vendored from
|
|
31
|
+
# https://github.com/docker/docker-py/blob/9ad4bddc9ee23f3646f256280a21ef86274e39bc/docker/models/images.py#L220
|
|
32
|
+
def _build(docker_client: docker.DockerClient, **kwargs) -> Iterable[DockerBuildLog]:
|
|
33
|
+
resp = docker_client.images.client.api.build(**kwargs)
|
|
34
|
+
if isinstance(resp, str):
|
|
35
|
+
return docker_client.images.get(resp)
|
|
36
|
+
last_event = None
|
|
37
|
+
image_id = None
|
|
38
|
+
result_stream, internal_stream = itertools.tee(json_stream(resp))
|
|
39
|
+
for chunk in internal_stream:
|
|
40
|
+
if "error" in chunk:
|
|
41
|
+
raise BuildError(chunk["error"], result_stream)
|
|
42
|
+
if "stream" in chunk:
|
|
43
|
+
yield DockerBuildLog(log=chunk["stream"])
|
|
44
|
+
match = re.search(
|
|
45
|
+
r"(^Successfully built |sha256:)([0-9a-f]+)$", chunk["stream"]
|
|
46
|
+
)
|
|
47
|
+
if match:
|
|
48
|
+
image_id = match.group(2)
|
|
49
|
+
last_event = chunk
|
|
50
|
+
if image_id:
|
|
51
|
+
return None
|
|
52
|
+
raise BuildError(last_event or "Unknown", result_stream)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class DockerBuild(Tool):
|
|
56
|
+
description = """
|
|
57
|
+
Build a docker image.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
class Request(RequestEvent):
|
|
61
|
+
dockerfile_path: str = Field(
|
|
62
|
+
...,
|
|
63
|
+
pattern=r"^[a-zA-Z0-9\.]{1}.*$",
|
|
64
|
+
description="Dockerfile path. ",
|
|
65
|
+
)
|
|
66
|
+
image_tag: str = Field(..., description="image tag")
|
|
67
|
+
|
|
68
|
+
class Response(ResponseEvent):
|
|
69
|
+
error: Optional[str] = Field(None, description="Error raised while building")
|
|
70
|
+
build_logs: Optional[str] = Field(None, description="Build logs")
|
|
71
|
+
|
|
72
|
+
def __rich_console__(
|
|
73
|
+
self, console: Console, options: ConsoleOptions
|
|
74
|
+
) -> RenderResult:
|
|
75
|
+
none_text = "[italic magenta]None[/]"
|
|
76
|
+
error_text = (
|
|
77
|
+
f"[green]'{self.error}'[/]" if self.error is not None else none_text
|
|
78
|
+
)
|
|
79
|
+
yield Text.from_markup("[bold magenta]Response[/](")
|
|
80
|
+
if self.build_logs is not None:
|
|
81
|
+
yield Text.from_markup(' [yellow]build_logs[/]= "')
|
|
82
|
+
yield Text.from_ansi(self.build_logs)
|
|
83
|
+
yield Text.from_markup('"')
|
|
84
|
+
else:
|
|
85
|
+
yield Text.from_markup(f" [yellow]build_logs[/]={none_text}")
|
|
86
|
+
yield Text.from_markup(f" [yellow]error[/]={error_text}\n)")
|
|
87
|
+
|
|
88
|
+
def __init__(self, project_root_path: str, docker_client: docker.DockerClient):
|
|
89
|
+
self.project_root_path = project_root_path
|
|
90
|
+
self.docker_client = docker_client
|
|
91
|
+
|
|
92
|
+
def run(self, request: DockerBuild.Request) -> Generator[Event, Any, ResponseEvent]:
|
|
93
|
+
yield Message(message="[bold cyan]Processing:[/] Building Docker image...")
|
|
94
|
+
yield Message(message="[bold yellow]Docker build logs:[/]")
|
|
95
|
+
try:
|
|
96
|
+
for message in _build(
|
|
97
|
+
self.docker_client,
|
|
98
|
+
path=self.project_root_path,
|
|
99
|
+
tag=request.image_tag,
|
|
100
|
+
):
|
|
101
|
+
yield message
|
|
102
|
+
return DockerBuild.Response()
|
|
103
|
+
except BuildError as ex:
|
|
104
|
+
logs = ""
|
|
105
|
+
for log_line in ex.build_log:
|
|
106
|
+
logs += log_line.get("stream", "")
|
|
107
|
+
return DockerBuild.Response(error=str(ex), build_logs=logs[-800:])
|
|
108
|
+
except (docker.errors.APIError, docker.errors.DockerException) as ex:
|
|
109
|
+
return DockerBuild.Response(error=str(ex), build_logs="")
|