truefoundry 0.9.1__py3-none-any.whl → 0.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of truefoundry might be problematic. Click here for more details.
- truefoundry/_ask/__init__.py +0 -0
- truefoundry/{deploy/cli/commands/ask_command.py → _ask/cli.py} +7 -11
- truefoundry/{deploy/lib/clients/ask_client.py → _ask/client.py} +314 -199
- truefoundry/_ask/llm_utils.py +344 -0
- truefoundry/cli/__main__.py +1 -1
- truefoundry/cli/display_util.py +1 -63
- truefoundry/cli/util.py +32 -1
- truefoundry/common/constants.py +5 -6
- truefoundry/deploy/_autogen/models.py +3 -4
- truefoundry/deploy/cli/commands/__init__.py +16 -1
- truefoundry/deploy/cli/commands/deploy_command.py +15 -1
- truefoundry/deploy/cli/commands/kubeconfig_command.py +1 -2
- truefoundry/deploy/cli/commands/utils.py +3 -33
- truefoundry/deploy/lib/clients/servicefoundry_client.py +2 -0
- truefoundry/deploy/v2/lib/deploy.py +2 -0
- truefoundry/deploy/v2/lib/deployable_patched_models.py +71 -7
- {truefoundry-0.9.1.dist-info → truefoundry-0.9.2.dist-info}/METADATA +3 -2
- {truefoundry-0.9.1.dist-info → truefoundry-0.9.2.dist-info}/RECORD +20 -18
- {truefoundry-0.9.1.dist-info → truefoundry-0.9.2.dist-info}/WHEEL +0 -0
- {truefoundry-0.9.1.dist-info → truefoundry-0.9.2.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import datetime
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
import warnings
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from collections.abc import Iterable
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from typing import Any, List, Literal, Optional, cast
|
|
10
|
+
|
|
11
|
+
from openai.types.chat import (
|
|
12
|
+
ChatCompletionMessageParam,
|
|
13
|
+
ChatCompletionMessageToolCall,
|
|
14
|
+
ChatCompletionMessageToolCallParam,
|
|
15
|
+
ChatCompletionToolParam,
|
|
16
|
+
)
|
|
17
|
+
from rich.console import Console, Group
|
|
18
|
+
from rich.markdown import Markdown
|
|
19
|
+
from rich.panel import Panel
|
|
20
|
+
from rich.text import Text
|
|
21
|
+
|
|
22
|
+
# Taken from https://github.com/pydantic/pydantic-ai/blob/222bec41e28fb96b49e71609c43b683d97d1dc97/pydantic_ai_slim/pydantic_ai/models/gemini.py#L791
|
|
23
|
+
|
|
24
|
+
JsonSchema = dict[str, Any]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass(init=False)
|
|
28
|
+
class WalkJsonSchema(ABC):
|
|
29
|
+
"""Walks a JSON schema, applying transformations to it at each level.
|
|
30
|
+
|
|
31
|
+
Note: We may eventually want to rework tools to build the JSON schema from the type directly, using a subclass of
|
|
32
|
+
pydantic.json_schema.GenerateJsonSchema, rather than making use of this machinery.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
schema: JsonSchema,
|
|
38
|
+
*,
|
|
39
|
+
prefer_inlined_defs: bool = False,
|
|
40
|
+
simplify_nullable_unions: bool = False,
|
|
41
|
+
):
|
|
42
|
+
self.schema = schema
|
|
43
|
+
self.prefer_inlined_defs = prefer_inlined_defs
|
|
44
|
+
self.simplify_nullable_unions = simplify_nullable_unions
|
|
45
|
+
|
|
46
|
+
self.defs: dict[str, JsonSchema] = self.schema.get("$defs", {})
|
|
47
|
+
self.refs_stack: list[str] = []
|
|
48
|
+
self.recursive_refs = set[str]()
|
|
49
|
+
|
|
50
|
+
@abstractmethod
|
|
51
|
+
def transform(self, schema: JsonSchema) -> JsonSchema:
|
|
52
|
+
"""Make changes to the schema."""
|
|
53
|
+
return schema
|
|
54
|
+
|
|
55
|
+
def walk(self) -> JsonSchema:
|
|
56
|
+
schema = copy.deepcopy(self.schema)
|
|
57
|
+
|
|
58
|
+
# First, handle everything but $defs:
|
|
59
|
+
schema.pop("$defs", None)
|
|
60
|
+
handled = self._handle(schema)
|
|
61
|
+
|
|
62
|
+
if not self.prefer_inlined_defs and self.defs:
|
|
63
|
+
handled["$defs"] = {k: self._handle(v) for k, v in self.defs.items()}
|
|
64
|
+
|
|
65
|
+
elif self.recursive_refs: # pragma: no cover
|
|
66
|
+
# If we are preferring inlined defs and there are recursive refs, we _have_ to use a $defs+$ref structure
|
|
67
|
+
# We try to use whatever the original root key was, but if it is already in use,
|
|
68
|
+
# we modify it to avoid collisions.
|
|
69
|
+
defs = {key: self.defs[key] for key in self.recursive_refs}
|
|
70
|
+
root_ref = self.schema.get("$ref")
|
|
71
|
+
root_key = None if root_ref is None else re.sub(r"^#/\$defs/", "", root_ref)
|
|
72
|
+
if root_key is None:
|
|
73
|
+
root_key = self.schema.get("title", "root")
|
|
74
|
+
while root_key in defs:
|
|
75
|
+
# Modify the root key until it is not already in use
|
|
76
|
+
root_key = f"{root_key}_root"
|
|
77
|
+
|
|
78
|
+
defs[root_key] = handled
|
|
79
|
+
return {"$defs": defs, "$ref": f"#/$defs/{root_key}"}
|
|
80
|
+
|
|
81
|
+
return handled
|
|
82
|
+
|
|
83
|
+
def _handle(self, schema: JsonSchema) -> JsonSchema:
|
|
84
|
+
nested_refs = 0
|
|
85
|
+
if self.prefer_inlined_defs:
|
|
86
|
+
while ref := schema.get("$ref"):
|
|
87
|
+
key = re.sub(r"^#/\$defs/", "", ref)
|
|
88
|
+
if key in self.refs_stack:
|
|
89
|
+
self.recursive_refs.add(key)
|
|
90
|
+
break # recursive ref can't be unpacked
|
|
91
|
+
self.refs_stack.append(key)
|
|
92
|
+
nested_refs += 1
|
|
93
|
+
|
|
94
|
+
def_schema = self.defs.get(key)
|
|
95
|
+
if def_schema is None: # pragma: no cover
|
|
96
|
+
raise ValueError(f"Could not find $ref definition for {key}")
|
|
97
|
+
schema = def_schema
|
|
98
|
+
|
|
99
|
+
# Handle the schema based on its type / structure
|
|
100
|
+
type_ = schema.get("type")
|
|
101
|
+
if type_ == "object":
|
|
102
|
+
schema = self._handle_object(schema)
|
|
103
|
+
elif type_ == "array":
|
|
104
|
+
schema = self._handle_array(schema)
|
|
105
|
+
elif type_ is None:
|
|
106
|
+
schema = self._handle_union(schema, "anyOf")
|
|
107
|
+
schema = self._handle_union(schema, "oneOf")
|
|
108
|
+
|
|
109
|
+
# Apply the base transform
|
|
110
|
+
schema = self.transform(schema)
|
|
111
|
+
|
|
112
|
+
if nested_refs > 0:
|
|
113
|
+
self.refs_stack = self.refs_stack[:-nested_refs]
|
|
114
|
+
|
|
115
|
+
return schema
|
|
116
|
+
|
|
117
|
+
def _handle_object(self, schema: JsonSchema) -> JsonSchema:
|
|
118
|
+
if properties := schema.get("properties"):
|
|
119
|
+
handled_properties = {}
|
|
120
|
+
for key, value in properties.items():
|
|
121
|
+
handled_properties[key] = self._handle(value)
|
|
122
|
+
schema["properties"] = handled_properties
|
|
123
|
+
|
|
124
|
+
if (additional_properties := schema.get("additionalProperties")) is not None:
|
|
125
|
+
if isinstance(additional_properties, bool):
|
|
126
|
+
schema["additionalProperties"] = additional_properties
|
|
127
|
+
else:
|
|
128
|
+
schema["additionalProperties"] = self._handle(additional_properties)
|
|
129
|
+
|
|
130
|
+
if (pattern_properties := schema.get("patternProperties")) is not None:
|
|
131
|
+
handled_pattern_properties = {}
|
|
132
|
+
for key, value in pattern_properties.items():
|
|
133
|
+
handled_pattern_properties[key] = self._handle(value)
|
|
134
|
+
schema["patternProperties"] = handled_pattern_properties
|
|
135
|
+
|
|
136
|
+
return schema
|
|
137
|
+
|
|
138
|
+
def _handle_array(self, schema: JsonSchema) -> JsonSchema:
|
|
139
|
+
if prefix_items := schema.get("prefixItems"):
|
|
140
|
+
schema["prefixItems"] = [self._handle(item) for item in prefix_items]
|
|
141
|
+
|
|
142
|
+
if items := schema.get("items"):
|
|
143
|
+
schema["items"] = self._handle(items)
|
|
144
|
+
|
|
145
|
+
return schema
|
|
146
|
+
|
|
147
|
+
def _handle_union(
|
|
148
|
+
self, schema: JsonSchema, union_kind: Literal["anyOf", "oneOf"]
|
|
149
|
+
) -> JsonSchema:
|
|
150
|
+
members = schema.get(union_kind)
|
|
151
|
+
if not members:
|
|
152
|
+
return schema
|
|
153
|
+
|
|
154
|
+
handled = [self._handle(member) for member in members]
|
|
155
|
+
|
|
156
|
+
# convert nullable unions to nullable types
|
|
157
|
+
if self.simplify_nullable_unions:
|
|
158
|
+
handled = self._simplify_nullable_union(handled)
|
|
159
|
+
|
|
160
|
+
if len(handled) == 1:
|
|
161
|
+
# In this case, no need to retain the union
|
|
162
|
+
return handled[0]
|
|
163
|
+
|
|
164
|
+
# If we have keys besides the union kind (such as title or discriminator), keep them without modifications
|
|
165
|
+
schema = schema.copy()
|
|
166
|
+
schema[union_kind] = handled
|
|
167
|
+
return schema
|
|
168
|
+
|
|
169
|
+
@staticmethod
|
|
170
|
+
def _simplify_nullable_union(cases: list[JsonSchema]) -> list[JsonSchema]:
|
|
171
|
+
# TODO: Should we move this to relevant subclasses? Or is it worth keeping here to make reuse easier?
|
|
172
|
+
if len(cases) == 2 and {"type": "null"} in cases:
|
|
173
|
+
# Find the non-null schema
|
|
174
|
+
non_null_schema = next(
|
|
175
|
+
(item for item in cases if item != {"type": "null"}),
|
|
176
|
+
None,
|
|
177
|
+
)
|
|
178
|
+
if non_null_schema:
|
|
179
|
+
# Create a new schema based on the non-null part, mark as nullable
|
|
180
|
+
new_schema = copy.deepcopy(non_null_schema)
|
|
181
|
+
new_schema["nullable"] = True
|
|
182
|
+
return [new_schema]
|
|
183
|
+
else: # pragma: no cover
|
|
184
|
+
# they are both null, so just return one of them
|
|
185
|
+
return [cases[0]]
|
|
186
|
+
|
|
187
|
+
return cases # pragma: no cover
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
class _GeminiJsonSchema(WalkJsonSchema):
|
|
191
|
+
"""Transforms the JSON Schema from Pydantic to be suitable for Gemini.
|
|
192
|
+
|
|
193
|
+
Gemini which [supports](https://ai.google.dev/gemini-api/docs/function-calling#function_declarations)
|
|
194
|
+
a subset of OpenAPI v3.0.3.
|
|
195
|
+
|
|
196
|
+
Specifically:
|
|
197
|
+
* gemini doesn't allow the `title` keyword to be set
|
|
198
|
+
* gemini doesn't allow `$defs` — we need to inline the definitions where possible
|
|
199
|
+
"""
|
|
200
|
+
|
|
201
|
+
def __init__(self, schema: JsonSchema):
|
|
202
|
+
super().__init__(
|
|
203
|
+
schema, prefer_inlined_defs=True, simplify_nullable_unions=True
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
def transform(self, schema: JsonSchema) -> JsonSchema: # noqa: C901
|
|
207
|
+
# Note: we need to remove `additionalProperties: False` since it is currently mishandled by Gemini
|
|
208
|
+
additional_properties = schema.pop(
|
|
209
|
+
"additionalProperties", None
|
|
210
|
+
) # don't pop yet so it's included in the warning
|
|
211
|
+
if additional_properties:
|
|
212
|
+
original_schema = {**schema, "additionalProperties": additional_properties}
|
|
213
|
+
warnings.warn(
|
|
214
|
+
"`additionalProperties` is not supported by Gemini; it will be removed from the tool JSON schema."
|
|
215
|
+
f" Full schema: {self.schema}\n\n"
|
|
216
|
+
f"Source of additionalProperties within the full schema: {original_schema}\n\n"
|
|
217
|
+
"If this came from a field with a type like `dict[str, MyType]`, that field will always be empty.\n\n"
|
|
218
|
+
"If Google's APIs are updated to support this properly, please create an issue on the PydanticAI GitHub"
|
|
219
|
+
" and we will fix this behavior.",
|
|
220
|
+
UserWarning,
|
|
221
|
+
stacklevel=2,
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
schema.pop("title", None)
|
|
225
|
+
schema.pop("default", None)
|
|
226
|
+
schema.pop("$schema", None)
|
|
227
|
+
if (const := schema.pop("const", None)) is not None: # pragma: no cover
|
|
228
|
+
# Gemini doesn't support const, but it does support enum with a single value
|
|
229
|
+
schema["enum"] = [const]
|
|
230
|
+
schema.pop("discriminator", None)
|
|
231
|
+
schema.pop("examples", None)
|
|
232
|
+
|
|
233
|
+
# TODO: Should we use the trick from pydantic_ai.models.openai._OpenAIJsonSchema
|
|
234
|
+
# where we add notes about these properties to the field description?
|
|
235
|
+
schema.pop("exclusiveMaximum", None)
|
|
236
|
+
schema.pop("exclusiveMinimum", None)
|
|
237
|
+
|
|
238
|
+
# Gemini only supports string enums, so we need to convert any enum values to strings.
|
|
239
|
+
# Pydantic will take care of transforming the transformed string values to the correct type.
|
|
240
|
+
if enum := schema.get("enum"):
|
|
241
|
+
schema["type"] = "string"
|
|
242
|
+
schema["enum"] = [str(val) for val in enum]
|
|
243
|
+
|
|
244
|
+
type_ = schema.get("type")
|
|
245
|
+
if "oneOf" in schema and "type" not in schema: # pragma: no cover
|
|
246
|
+
# This gets hit when we have a discriminated union
|
|
247
|
+
# Gemini returns an API error in this case even though it says in its error message it shouldn't...
|
|
248
|
+
# Changing the oneOf to an anyOf prevents the API error and I think is functionally equivalent
|
|
249
|
+
schema["anyOf"] = schema.pop("oneOf")
|
|
250
|
+
|
|
251
|
+
if type_ == "string" and (fmt := schema.pop("format", None)):
|
|
252
|
+
description = schema.get("description")
|
|
253
|
+
if description:
|
|
254
|
+
schema["description"] = f"{description} (format: {fmt})"
|
|
255
|
+
else:
|
|
256
|
+
schema["description"] = f"Format: {fmt}"
|
|
257
|
+
|
|
258
|
+
if "$ref" in schema:
|
|
259
|
+
raise ValueError(
|
|
260
|
+
f"Recursive `$ref`s in JSON Schema are not supported by Gemini: {schema['$ref']}"
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
if "prefixItems" in schema:
|
|
264
|
+
# prefixItems is not currently supported in Gemini, so we convert it to items for best compatibility
|
|
265
|
+
prefix_items = schema.pop("prefixItems")
|
|
266
|
+
items = schema.get("items")
|
|
267
|
+
unique_items = [items] if items is not None else []
|
|
268
|
+
for item in prefix_items:
|
|
269
|
+
if item not in unique_items:
|
|
270
|
+
unique_items.append(item)
|
|
271
|
+
if len(unique_items) > 1: # pragma: no cover
|
|
272
|
+
schema["items"] = {"anyOf": unique_items}
|
|
273
|
+
elif len(unique_items) == 1:
|
|
274
|
+
schema["items"] = unique_items[0]
|
|
275
|
+
schema.setdefault("minItems", len(prefix_items))
|
|
276
|
+
if items is None:
|
|
277
|
+
schema.setdefault("maxItems", len(prefix_items))
|
|
278
|
+
|
|
279
|
+
return schema
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def translate_tools_for_gemini(
|
|
283
|
+
tools: Optional[List[ChatCompletionToolParam]],
|
|
284
|
+
) -> Optional[List[ChatCompletionToolParam]]:
|
|
285
|
+
if tools is None:
|
|
286
|
+
return None
|
|
287
|
+
tools = copy.deepcopy(tools)
|
|
288
|
+
for tool in tools:
|
|
289
|
+
if "parameters" in tool["function"]:
|
|
290
|
+
tool["function"]["parameters"] = _GeminiJsonSchema(
|
|
291
|
+
tool["function"]["parameters"]
|
|
292
|
+
).walk()
|
|
293
|
+
|
|
294
|
+
return tools
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def log_chat_completion_message(
|
|
298
|
+
message: ChatCompletionMessageParam, console: Console
|
|
299
|
+
) -> None:
|
|
300
|
+
timestamp = datetime.datetime.now().astimezone().strftime("%Y-%m-%d %H:%M:%S")
|
|
301
|
+
code_theme = "github-dark"
|
|
302
|
+
try:
|
|
303
|
+
_content = message.get("content") or ""
|
|
304
|
+
message_content = f"```json\n{json.dumps(json.loads(_content), indent=2)}\n```"
|
|
305
|
+
except (TypeError, ValueError):
|
|
306
|
+
message_content = str(message.get("content") or "")
|
|
307
|
+
|
|
308
|
+
tool_calls: Iterable[ChatCompletionMessageToolCallParam] = cast(
|
|
309
|
+
list[ChatCompletionMessageToolCallParam],
|
|
310
|
+
message.get("tool_calls", []),
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
rendered_content: list[Markdown | Text] = []
|
|
314
|
+
|
|
315
|
+
if bool(message_content.strip()):
|
|
316
|
+
rendered_content.append(Markdown(markup=message_content, code_theme=code_theme))
|
|
317
|
+
|
|
318
|
+
for call in tool_calls:
|
|
319
|
+
assert isinstance(call, ChatCompletionMessageToolCall)
|
|
320
|
+
name = call.function.name
|
|
321
|
+
args = call.function.arguments
|
|
322
|
+
rendered_content.append(
|
|
323
|
+
Text.from_markup(
|
|
324
|
+
"[bold magenta]Tool Calls:[/bold magenta]\n",
|
|
325
|
+
overflow="fold",
|
|
326
|
+
)
|
|
327
|
+
)
|
|
328
|
+
rendered_content.append(
|
|
329
|
+
Markdown(markup=f"```python\n▶ {name}({args})\n```", code_theme=code_theme)
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
if not rendered_content:
|
|
333
|
+
return
|
|
334
|
+
|
|
335
|
+
panel = Panel(
|
|
336
|
+
Group(*rendered_content, fit=True),
|
|
337
|
+
title=f"[bold blue]{timestamp}[/bold blue]",
|
|
338
|
+
title_align="left",
|
|
339
|
+
border_style="bright_blue",
|
|
340
|
+
padding=(1, 2),
|
|
341
|
+
expand=True,
|
|
342
|
+
width=console.width,
|
|
343
|
+
)
|
|
344
|
+
console.print(panel)
|
truefoundry/cli/__main__.py
CHANGED
|
@@ -4,6 +4,7 @@ import sys
|
|
|
4
4
|
import rich_click as click
|
|
5
5
|
|
|
6
6
|
from truefoundry import logger
|
|
7
|
+
from truefoundry._ask.cli import get_ask_command
|
|
7
8
|
from truefoundry.cli.config import CliConfig
|
|
8
9
|
from truefoundry.cli.const import GROUP_CLS
|
|
9
10
|
from truefoundry.cli.util import setup_rich_click
|
|
@@ -11,7 +12,6 @@ from truefoundry.common.constants import TFY_DEBUG_ENV_KEY
|
|
|
11
12
|
from truefoundry.common.utils import is_internal_env_set
|
|
12
13
|
from truefoundry.deploy.cli.commands import (
|
|
13
14
|
get_apply_command,
|
|
14
|
-
get_ask_command,
|
|
15
15
|
get_build_command,
|
|
16
16
|
get_delete_command,
|
|
17
17
|
get_deploy_command,
|
truefoundry/cli/display_util.py
CHANGED
|
@@ -1,20 +1,9 @@
|
|
|
1
1
|
import datetime
|
|
2
2
|
import json
|
|
3
|
-
|
|
4
|
-
from typing import Optional, cast
|
|
5
|
-
|
|
6
|
-
from openai.types.chat import (
|
|
7
|
-
ChatCompletionMessageParam,
|
|
8
|
-
ChatCompletionMessageToolCall,
|
|
9
|
-
ChatCompletionMessageToolCallParam,
|
|
10
|
-
)
|
|
3
|
+
|
|
11
4
|
from rich import box
|
|
12
5
|
from rich import print_json as _rich_print_json
|
|
13
|
-
from rich.console import Console, Group
|
|
14
|
-
from rich.markdown import Markdown
|
|
15
|
-
from rich.panel import Panel
|
|
16
6
|
from rich.table import Table
|
|
17
|
-
from rich.text import Text
|
|
18
7
|
|
|
19
8
|
from truefoundry.cli.config import CliConfig
|
|
20
9
|
from truefoundry.cli.console import console
|
|
@@ -132,54 +121,3 @@ def print_entity_obj(title, entity):
|
|
|
132
121
|
for key in keys:
|
|
133
122
|
table.add_row(f"[bold]{stringify(key)}[/]", stringify(entity_data[key]))
|
|
134
123
|
console.print(table)
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
def log_chat_completion_message(
|
|
138
|
-
message: ChatCompletionMessageParam, console_: Optional[Console] = None
|
|
139
|
-
) -> None:
|
|
140
|
-
timestamp = datetime.datetime.now().astimezone().strftime("%Y-%m-%d %H:%M:%S")
|
|
141
|
-
code_theme = "github-dark"
|
|
142
|
-
target_console = console_ or console
|
|
143
|
-
try:
|
|
144
|
-
_content = message.get("content") or ""
|
|
145
|
-
message_content = f"```json\n{json.dumps(json.loads(_content), indent=2)}\n```"
|
|
146
|
-
except (TypeError, ValueError):
|
|
147
|
-
message_content = str(message.get("content") or "")
|
|
148
|
-
|
|
149
|
-
tool_calls: Iterable[ChatCompletionMessageToolCallParam] = cast(
|
|
150
|
-
list[ChatCompletionMessageToolCallParam],
|
|
151
|
-
message.get("tool_calls", []),
|
|
152
|
-
)
|
|
153
|
-
|
|
154
|
-
rendered_content: list[Markdown | Text] = []
|
|
155
|
-
|
|
156
|
-
if bool(message_content.strip()):
|
|
157
|
-
rendered_content.append(Markdown(markup=message_content, code_theme=code_theme))
|
|
158
|
-
|
|
159
|
-
for call in tool_calls:
|
|
160
|
-
assert isinstance(call, ChatCompletionMessageToolCall)
|
|
161
|
-
name = call.function.name
|
|
162
|
-
args = call.function.arguments
|
|
163
|
-
rendered_content.append(
|
|
164
|
-
Text.from_markup(
|
|
165
|
-
"[bold magenta]Tool Calls:[/bold magenta]\n",
|
|
166
|
-
overflow="fold",
|
|
167
|
-
)
|
|
168
|
-
)
|
|
169
|
-
rendered_content.append(
|
|
170
|
-
Markdown(markup=f"```python\n▶ {name}({args})\n```", code_theme=code_theme)
|
|
171
|
-
)
|
|
172
|
-
|
|
173
|
-
if not rendered_content:
|
|
174
|
-
return
|
|
175
|
-
|
|
176
|
-
panel = Panel(
|
|
177
|
-
Group(*rendered_content, fit=True),
|
|
178
|
-
title=f"[bold blue]{timestamp}[/bold blue]",
|
|
179
|
-
title_align="left",
|
|
180
|
-
border_style="bright_blue",
|
|
181
|
-
padding=(1, 2),
|
|
182
|
-
expand=True,
|
|
183
|
-
width=console.width,
|
|
184
|
-
)
|
|
185
|
-
target_console.print(panel)
|
truefoundry/cli/util.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import functools
|
|
2
2
|
import sys
|
|
3
3
|
import zipfile
|
|
4
|
-
from typing import Dict
|
|
4
|
+
from typing import Dict, Optional
|
|
5
5
|
|
|
6
|
+
import questionary
|
|
6
7
|
import rich_click as click
|
|
7
8
|
from packaging.version import parse as parse_version
|
|
8
9
|
from requests.exceptions import ConnectionError, Timeout
|
|
@@ -135,3 +136,33 @@ def _prompt_if_no_value_and_supported(prompt: str, hide_input: bool = True):
|
|
|
135
136
|
kwargs = {"prompt": prompt, "hide_input": hide_input, "prompt_required": False}
|
|
136
137
|
|
|
137
138
|
return kwargs
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def select_cluster(cluster: Optional[str] = None) -> str:
|
|
142
|
+
"""
|
|
143
|
+
Retrieve available clusters and either return the specified one after validation
|
|
144
|
+
or allow the user to interactively select from the list.
|
|
145
|
+
"""
|
|
146
|
+
from truefoundry.deploy.lib.clients.servicefoundry_client import (
|
|
147
|
+
ServiceFoundryServiceClient,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
clusters = ServiceFoundryServiceClient().list_clusters()
|
|
151
|
+
|
|
152
|
+
if not clusters:
|
|
153
|
+
raise click.ClickException("No clusters found in your account.")
|
|
154
|
+
|
|
155
|
+
if cluster:
|
|
156
|
+
if not any(c.id == cluster for c in clusters):
|
|
157
|
+
raise click.ClickException(
|
|
158
|
+
f"Cluster {cluster} not found. Either it does not exist or you might not be authorized to access it"
|
|
159
|
+
)
|
|
160
|
+
return cluster
|
|
161
|
+
|
|
162
|
+
choices = {cluster.id: cluster for cluster in clusters}
|
|
163
|
+
selected_cluster = questionary.select(
|
|
164
|
+
"Pick a Cluster:", choices=list(choices.keys())
|
|
165
|
+
).ask()
|
|
166
|
+
if not selected_cluster:
|
|
167
|
+
raise click.ClickException("No cluster selected.")
|
|
168
|
+
return selected_cluster
|
truefoundry/common/constants.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import enum
|
|
2
2
|
import os
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import Optional
|
|
4
|
+
from typing import Any, Dict, Optional
|
|
5
5
|
|
|
6
6
|
from truefoundry.pydantic_v1 import BaseSettings, Field, SecretStr
|
|
7
7
|
|
|
@@ -26,8 +26,6 @@ TFY_ASK_OPENAI_API_KEY_KEY = "TFY_ASK_OPENAI_API_KEY"
|
|
|
26
26
|
TFY_ASK_OPENAI_BASE_URL_KEY = "TFY_ASK_OPENAI_BASE_URL"
|
|
27
27
|
TFY_ASK_MODEL_NAME_KEY = "TFY_ASK_OPENAI_MODEL"
|
|
28
28
|
|
|
29
|
-
TFY_ASK_SYSTEM_PROMPT_NAME_KEY = "TFY_ASK_SYSTEM_PROMPT_NAME"
|
|
30
|
-
|
|
31
29
|
|
|
32
30
|
class PythonPackageManager(str, enum.Enum):
|
|
33
31
|
PIP = "pip"
|
|
@@ -85,10 +83,11 @@ class TrueFoundrySdkEnv(BaseSettings):
|
|
|
85
83
|
default=None, env=TFY_ASK_OPENAI_BASE_URL_KEY
|
|
86
84
|
)
|
|
87
85
|
TFY_ASK_MODEL_NAME: Optional[str] = Field(default=None, env=TFY_ASK_MODEL_NAME_KEY)
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
default="tfy-ask-k8s-prompt", env=TFY_ASK_SYSTEM_PROMPT_NAME_KEY
|
|
86
|
+
TFY_ASK_GENERATION_PARAMS: Dict[str, Any] = Field(
|
|
87
|
+
default_factory=lambda: {"temperature": 0.0, "top_p": 1, "max_tokens": 4096}
|
|
91
88
|
)
|
|
89
|
+
TFY_ASK_SYSTEM_PROMPT_NAME: str = Field(default="tfy-ask-k8s-prompt")
|
|
90
|
+
TFY_INTERNAL_ASK_CONFIG_OVERRIDE_FILE: Optional[str] = Field(default=None)
|
|
92
91
|
|
|
93
92
|
# This is a hack to fresh read the env vars because people can end up importing this file
|
|
94
93
|
# before setting the correct env vars. E.g. in notebook environments.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# generated by datamodel-codegen:
|
|
2
2
|
# filename: application.json
|
|
3
|
-
# timestamp: 2025-05-
|
|
3
|
+
# timestamp: 2025-05-27T08:27:32+00:00
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
@@ -841,7 +841,6 @@ class SlackWebhook(BaseModel):
|
|
|
841
841
|
|
|
842
842
|
|
|
843
843
|
class SparkDriverConfig(BaseModel):
|
|
844
|
-
ui_endpoint: Endpoint
|
|
845
844
|
resources: Optional[Resources] = None
|
|
846
845
|
|
|
847
846
|
|
|
@@ -1604,8 +1603,8 @@ class Job(BaseModel):
|
|
|
1604
1603
|
trigger: Union[Manual, Schedule] = Field(
|
|
1605
1604
|
{"type": "manual"}, description="Specify the trigger"
|
|
1606
1605
|
)
|
|
1607
|
-
trigger_on_deploy: bool = Field(
|
|
1608
|
-
|
|
1606
|
+
trigger_on_deploy: Optional[bool] = Field(
|
|
1607
|
+
None, description="Trigger the job after deploy immediately"
|
|
1609
1608
|
)
|
|
1610
1609
|
params: Optional[List[Param]] = Field(
|
|
1611
1610
|
None, description="Configure params and pass it to create different job runs"
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from truefoundry.deploy.cli.commands.apply_command import get_apply_command
|
|
2
|
-
from truefoundry.deploy.cli.commands.ask_command import get_ask_command
|
|
3
2
|
from truefoundry.deploy.cli.commands.build_command import get_build_command
|
|
4
3
|
from truefoundry.deploy.cli.commands.delete_command import get_delete_command
|
|
5
4
|
from truefoundry.deploy.cli.commands.deploy_command import get_deploy_command
|
|
@@ -14,3 +13,19 @@ from truefoundry.deploy.cli.commands.patch_application_command import (
|
|
|
14
13
|
from truefoundry.deploy.cli.commands.patch_command import get_patch_command
|
|
15
14
|
from truefoundry.deploy.cli.commands.terminate_comand import get_terminate_command
|
|
16
15
|
from truefoundry.deploy.cli.commands.trigger_command import get_trigger_command
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"get_apply_command",
|
|
19
|
+
"get_build_command",
|
|
20
|
+
"get_delete_command",
|
|
21
|
+
"get_deploy_command",
|
|
22
|
+
"get_deploy_init_command",
|
|
23
|
+
"get_get_command",
|
|
24
|
+
"get_login_command",
|
|
25
|
+
"get_logout_command",
|
|
26
|
+
"get_logs_command",
|
|
27
|
+
"get_patch_application_command",
|
|
28
|
+
"get_patch_command",
|
|
29
|
+
"get_terminate_command",
|
|
30
|
+
"get_trigger_command",
|
|
31
|
+
]
|
|
@@ -62,6 +62,14 @@ def _get_default_spec_file():
|
|
|
62
62
|
default=False,
|
|
63
63
|
help="Force create a new deployment by canceling any ongoing deployments",
|
|
64
64
|
)
|
|
65
|
+
@click.option(
|
|
66
|
+
"--trigger-on-deploy/--no-trigger-on-deploy",
|
|
67
|
+
"--trigger_on_deploy/--no_trigger_on_deploy",
|
|
68
|
+
is_flag=True,
|
|
69
|
+
show_default=True,
|
|
70
|
+
default=False,
|
|
71
|
+
help="Trigger a Job run after deployment succeeds. Has no effect for non Job type deployments",
|
|
72
|
+
)
|
|
65
73
|
@click.pass_context
|
|
66
74
|
@handle_exception_wrapper
|
|
67
75
|
def deploy_command(
|
|
@@ -70,6 +78,7 @@ def deploy_command(
|
|
|
70
78
|
workspace_fqn: Optional[str],
|
|
71
79
|
wait: bool,
|
|
72
80
|
force: bool = False,
|
|
81
|
+
trigger_on_deploy: bool = False,
|
|
73
82
|
):
|
|
74
83
|
if ctx.invoked_subcommand is not None:
|
|
75
84
|
return
|
|
@@ -86,7 +95,12 @@ def deploy_command(
|
|
|
86
95
|
application_definition = yaml.safe_load(f)
|
|
87
96
|
|
|
88
97
|
application = Application.parse_obj(application_definition)
|
|
89
|
-
application.deploy(
|
|
98
|
+
application.deploy(
|
|
99
|
+
workspace_fqn=workspace_fqn,
|
|
100
|
+
wait=wait,
|
|
101
|
+
force=force,
|
|
102
|
+
trigger_on_deploy=trigger_on_deploy,
|
|
103
|
+
)
|
|
90
104
|
sys.exit(0)
|
|
91
105
|
|
|
92
106
|
click.echo(
|
|
@@ -5,7 +5,7 @@ import rich_click as click
|
|
|
5
5
|
from rich.console import Console
|
|
6
6
|
|
|
7
7
|
from truefoundry.cli.const import COMMAND_CLS
|
|
8
|
-
from truefoundry.cli.util import handle_exception_wrapper
|
|
8
|
+
from truefoundry.cli.util import handle_exception_wrapper, select_cluster
|
|
9
9
|
from truefoundry.common.session import Session
|
|
10
10
|
from truefoundry.deploy.cli.commands.utils import (
|
|
11
11
|
CONTEXT_NAME_FORMAT,
|
|
@@ -14,7 +14,6 @@ from truefoundry.deploy.cli.commands.utils import (
|
|
|
14
14
|
get_kubeconfig_content,
|
|
15
15
|
get_kubeconfig_path,
|
|
16
16
|
save_kubeconfig,
|
|
17
|
-
select_cluster,
|
|
18
17
|
)
|
|
19
18
|
|
|
20
19
|
console = Console()
|
|
@@ -2,8 +2,6 @@ import os
|
|
|
2
2
|
from pathlib import Path
|
|
3
3
|
from typing import Any, Dict, List, Optional
|
|
4
4
|
|
|
5
|
-
import questionary
|
|
6
|
-
import rich_click as click
|
|
7
5
|
import yaml
|
|
8
6
|
|
|
9
7
|
DEFAULT_KUBECONFIG_PATH: Path = Path.home() / ".kube" / "config"
|
|
@@ -114,35 +112,7 @@ def get_cluster_context(
|
|
|
114
112
|
|
|
115
113
|
|
|
116
114
|
def get_cluster_server_url(config: Dict[str, Any], cluster: str) -> Optional[str]:
|
|
117
|
-
|
|
118
|
-
if
|
|
119
|
-
return
|
|
115
|
+
cluster_context: Optional[Dict[str, Any]] = get_cluster_context(config, cluster)
|
|
116
|
+
if cluster_context:
|
|
117
|
+
return cluster_context["cluster"].get("server")
|
|
120
118
|
return None
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
def select_cluster(cluster: Optional[str] = None) -> str:
|
|
124
|
-
"""
|
|
125
|
-
Retrieve available clusters and either return the specified one after validation
|
|
126
|
-
or allow the user to interactively select from the list.
|
|
127
|
-
"""
|
|
128
|
-
from truefoundry.deploy.lib.clients.servicefoundry_client import (
|
|
129
|
-
ServiceFoundryServiceClient,
|
|
130
|
-
)
|
|
131
|
-
|
|
132
|
-
clusters = ServiceFoundryServiceClient().list_clusters()
|
|
133
|
-
|
|
134
|
-
if not clusters:
|
|
135
|
-
raise click.ClickException("No clusters found in your account.")
|
|
136
|
-
|
|
137
|
-
if cluster:
|
|
138
|
-
if not any(c.id == cluster for c in clusters):
|
|
139
|
-
raise click.ClickException(
|
|
140
|
-
f"Cluster {cluster} not found. Either it does not exist or you might not be autthorized to access it"
|
|
141
|
-
)
|
|
142
|
-
return cluster
|
|
143
|
-
|
|
144
|
-
choices = {cluster.id: cluster for cluster in clusters}
|
|
145
|
-
cluster = questionary.select("Pick a Cluster:", choices=list(choices.keys())).ask()
|
|
146
|
-
if not cluster:
|
|
147
|
-
raise click.ClickException("No cluster selected.")
|
|
148
|
-
return cluster
|
|
@@ -222,12 +222,14 @@ class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
|
|
|
222
222
|
workspace_id: str,
|
|
223
223
|
application: autogen_models.Workflow,
|
|
224
224
|
force: bool = False,
|
|
225
|
+
trigger_on_deploy: bool = False,
|
|
225
226
|
) -> Deployment:
|
|
226
227
|
data = {
|
|
227
228
|
"workspaceId": workspace_id,
|
|
228
229
|
"name": application.name,
|
|
229
230
|
"manifest": application.dict(exclude_none=True),
|
|
230
231
|
"forceDeploy": force,
|
|
232
|
+
"triggerOnDeploy": trigger_on_deploy,
|
|
231
233
|
}
|
|
232
234
|
logger.debug(json.dumps(data))
|
|
233
235
|
url = f"{self._api_server_url}/{VERSION_PREFIX}/deployment"
|