truefoundry 0.9.0rc1__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of truefoundry might be problematic. Click here for more details.

File without changes
@@ -8,7 +8,7 @@ from rich.console import Console
8
8
 
9
9
  from truefoundry.cli.config import CliConfig
10
10
  from truefoundry.cli.const import COMMAND_CLS
11
- from truefoundry.cli.util import handle_exception_wrapper
11
+ from truefoundry.cli.util import handle_exception_wrapper, select_cluster
12
12
  from truefoundry.common.constants import (
13
13
  ENV_VARS,
14
14
  OPENAI_API_KEY_KEY,
@@ -19,7 +19,6 @@ from truefoundry.common.constants import (
19
19
  )
20
20
  from truefoundry.common.session import Session
21
21
  from truefoundry.common.utils import get_tfy_servers_config
22
- from truefoundry.deploy.cli.commands.utils import select_cluster
23
22
 
24
23
  console = Console()
25
24
 
@@ -85,14 +84,14 @@ def _get_openai_client() -> Tuple[AsyncOpenAI, str]:
85
84
  return client, openai_model
86
85
  else:
87
86
  llm_env_instruction = (
88
- "[dim]No OpenAI API key found in env."
87
+ "No OpenAI API Key found in env."
89
88
  f"\n- To use your own OpenAI API compatible model for the ask command, set the env vars "
90
89
  f"[green]{TFY_ASK_OPENAI_BASE_URL_KEY}[/], "
91
90
  f"[green]{TFY_ASK_OPENAI_API_KEY_KEY}[/], and "
92
91
  f"[green]{TFY_ASK_MODEL_NAME_KEY}[/] (default: {default_model})."
93
- f"\n- Alternatively, you can use the default OpenAI model by setting the env vars "
92
+ f"\n- Alternatively, you can use OpenAI directly by setting the env vars "
94
93
  f"[green]{OPENAI_API_KEY_KEY}[/], "
95
- f"[green]{OPENAI_MODEL_KEY}[/] (default: {default_model}).[/dim]"
94
+ f"[green]{OPENAI_MODEL_KEY}[/] (default: {default_model})"
96
95
  )
97
96
  raise ValueError(llm_env_instruction)
98
97
 
@@ -111,7 +110,7 @@ def ask_command(ctx, cluster: str) -> None:
111
110
  """
112
111
  Ask questions related to your Cluster in TrueFoundry.
113
112
  """
114
- from truefoundry.deploy.lib.clients.ask_client import ask_client_main
113
+ from truefoundry._ask.client import ask_client
115
114
 
116
115
  debug = CliConfig.debug
117
116
  if debug:
@@ -125,19 +124,16 @@ def ask_command(ctx, cluster: str) -> None:
125
124
  "Use this command to ask questions and troubleshoot issues in your Kubernetes cluster managed by the TrueFoundry Control Plane.\n"
126
125
  "It helps you investigate and identify potential problems across services, pods, deployments, and more.\n"
127
126
  )
128
-
127
+ openai_client, openai_model = _get_openai_client()
129
128
  if not cluster:
130
129
  console.print(
131
130
  "[dim]Tip: You can specify a cluster using the '--cluster' option, or select one interactively from the list.[/dim]\n"
132
131
  )
133
-
134
- # Get the cluster id from the command line argument
135
132
  cluster = select_cluster(cluster)
136
- openai_client, openai_model = _get_openai_client()
137
133
  tfy_servers_config = get_tfy_servers_config(session.tfy_host)
138
134
  mcp_server_url = f"{tfy_servers_config.servicefoundry_server_url}/v1/k8s-mcp"
139
135
  asyncio.run(
140
- ask_client_main(
136
+ ask_client(
141
137
  cluster=cluster,
142
138
  server_url=mcp_server_url,
143
139
  token=session.access_token,