truefoundry 0.10.4rc3__py3-none-any.whl → 0.10.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of truefoundry might be problematic. Click here for more details.

truefoundry/_ask/cli.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import asyncio
2
2
  import logging
3
- from typing import Tuple
3
+ from typing import Callable, Dict, Optional, Tuple, Union
4
4
 
5
5
  import rich_click as click
6
6
  from openai import AsyncOpenAI
@@ -11,8 +11,6 @@ from truefoundry.cli.const import COMMAND_CLS
11
11
  from truefoundry.cli.util import handle_exception_wrapper, select_cluster
12
12
  from truefoundry.common.constants import (
13
13
  ENV_VARS,
14
- OPENAI_API_KEY_KEY,
15
- OPENAI_MODEL_KEY,
16
14
  TFY_ASK_MODEL_NAME_KEY,
17
15
  TFY_ASK_OPENAI_API_KEY_KEY,
18
16
  TFY_ASK_OPENAI_BASE_URL_KEY,
@@ -23,77 +21,87 @@ from truefoundry.common.utils import get_tfy_servers_config
23
21
  console = Console()
24
22
 
25
23
 
26
- def _get_openai_client() -> Tuple[AsyncOpenAI, str]:
24
+ class CustomAsyncOpenAI(AsyncOpenAI):
25
+ def __init__(
26
+ self, *, api_key: Optional[Union[str, Callable[[], str]]] = None, **kwargs
27
+ ):
28
+ self.__api_key_fn = None
29
+ if isinstance(api_key, str) or api_key is None:
30
+ _api_key = api_key
31
+ else:
32
+ self.__api_key_fn = api_key
33
+ _api_key = self.__api_key_fn()
34
+ super().__init__(api_key=_api_key, **kwargs)
35
+
36
+ @property
37
+ def auth_headers(self) -> Dict[str, str]:
38
+ if self.__api_key_fn is not None:
39
+ api_key = self.__api_key_fn()
40
+ else:
41
+ api_key = self.api_key
42
+ return {"Authorization": f"Bearer {api_key}"}
43
+
44
+
45
+ def _get_openai_client(session: Session) -> Tuple[CustomAsyncOpenAI, str]:
27
46
  """
28
47
  Returns an AsyncOpenAI client using either user-provided credentials or TrueFoundry LLM gateway.
29
48
  """
30
- console.print("")
31
- default_model = "gpt-4o"
32
- if ENV_VARS.TFY_ASK_OPENAI_BASE_URL and ENV_VARS.TFY_ASK_OPENAI_API_KEY:
49
+ if ENV_VARS.TFY_ASK_OPENAI_BASE_URL:
33
50
  console.print(
34
- f"Found custom OpenAI API settings ([green]{TFY_ASK_OPENAI_BASE_URL_KEY}[/green], [green]{TFY_ASK_OPENAI_API_KEY_KEY}[/green]) in env"
51
+ f"Found custom OpenAI compatible API settings ([green]{TFY_ASK_OPENAI_BASE_URL_KEY}[/green]) in env"
35
52
  )
36
- client = AsyncOpenAI(
37
- base_url=ENV_VARS.TFY_ASK_OPENAI_BASE_URL,
38
- api_key=ENV_VARS.TFY_ASK_OPENAI_API_KEY,
39
- )
40
- if ENV_VARS.TFY_ASK_MODEL_NAME:
41
- openai_model = ENV_VARS.TFY_ASK_MODEL_NAME
53
+ if ENV_VARS.TFY_ASK_OPENAI_API_KEY:
42
54
  console.print(
43
- f"Using custom OpenAI model from env [green]{TFY_ASK_MODEL_NAME_KEY}[/green]: [yellow]{openai_model}[/yellow]"
55
+ f"Found API key ([green]{TFY_ASK_OPENAI_API_KEY_KEY}[/green]) in env"
44
56
  )
57
+ api_key = ENV_VARS.TFY_ASK_OPENAI_API_KEY
45
58
  else:
46
- openai_model = default_model
47
59
  console.print(
48
- f"Using default OpenAI model: [yellow]{openai_model}[/yellow]"
49
- f"\n[dim]Tip: To use a different model, set the env var "
50
- f"[green]{TFY_ASK_MODEL_NAME_KEY}[/green] to the model name you want to use.[/dim]"
60
+ f"No API key found in env, using [yellow]EMPTY[/yellow] as API key"
61
+ f"\n[dim]Tip: To use a different API key, set the env var "
62
+ f"[green]{TFY_ASK_OPENAI_API_KEY_KEY}[/green] to the API key you want to use.[/dim]"
51
63
  )
64
+ api_key = "EMPTY"
65
+ base_url = ENV_VARS.TFY_ASK_OPENAI_BASE_URL
66
+ default_model = "gpt-4o"
67
+ else:
68
+ tfy_servers_config = get_tfy_servers_config(session.tfy_host)
69
+ base_url = f"{tfy_servers_config.servicefoundry_server_url}/v1/tfy-ai/proxy/api/inference/openai"
52
70
  console.print(
53
- "[dim][yellow]This operation will use tokens from your model provider and may incur costs.[/yellow][/dim]"
71
+ f"Using TrueFoundry Managed AI."
72
+ f"\n[dim]Tip: To use your own OpenAI API compatible API for the ask command, set the following env vars"
73
+ f"\n * [green]{TFY_ASK_OPENAI_BASE_URL_KEY}[/] to the base URL of your OpenAI compatible API. E.g. [yellow]https://api.openai.com/v1[/yellow]"
74
+ f"\n * [green]{TFY_ASK_OPENAI_API_KEY_KEY}[/] to the API key of your OpenAI compatible API."
75
+ f"[/dim]"
54
76
  )
55
77
  console.print("")
56
- return client, openai_model
57
- elif ENV_VARS.OPENAI_API_KEY:
58
- console.print(f"Found [green]{OPENAI_API_KEY_KEY}[/green] in env")
59
- client = AsyncOpenAI(
60
- api_key=ENV_VARS.OPENAI_API_KEY,
78
+
79
+ api_key = lambda: session.access_token # noqa: E731
80
+ default_model = "tfy-ai-openai/gpt-4o"
81
+ client = CustomAsyncOpenAI(
82
+ base_url=base_url,
83
+ api_key=api_key,
84
+ )
85
+ if ENV_VARS.TFY_ASK_MODEL_NAME:
86
+ openai_model = ENV_VARS.TFY_ASK_MODEL_NAME
87
+ console.print(
88
+ f"Using custom model from env [green]{TFY_ASK_MODEL_NAME_KEY}[/green]: [yellow]{openai_model}[/yellow]"
61
89
  )
62
- if ENV_VARS.OPENAI_MODEL:
63
- openai_model = ENV_VARS.OPENAI_MODEL
64
- console.print(
65
- f"Using custom OpenAI model from env [green]{OPENAI_MODEL_KEY}[/green]: [yellow]{openai_model}[/yellow]"
66
- )
67
- else:
68
- openai_model = default_model
69
- console.print(
70
- f"Using default OpenAI model: [yellow]{openai_model}[/yellow]"
71
- f"\n[dim]Tip: To use a different OpenAI model, set the env var "
72
- f"[green]{OPENAI_MODEL_KEY}[/green] to the model name you want to use.[/dim]"
73
- )
90
+ else:
91
+ openai_model = default_model
74
92
  console.print(
75
- f"[dim]Tip: To use your own OpenAI API compatible model for the ask command, set the env vars "
76
- f"[green]{TFY_ASK_OPENAI_BASE_URL_KEY}[/], "
77
- f"[green]{TFY_ASK_OPENAI_API_KEY_KEY}[/], and "
78
- f"[green]{TFY_ASK_MODEL_NAME_KEY}[/].[/dim]"
93
+ f"Using default model: [yellow]{openai_model}[/yellow]"
94
+ f"\n[dim]Tip: To use a different model, set the env var "
95
+ f"[green]{TFY_ASK_MODEL_NAME_KEY}[/green] to the model name you want to use.[/dim]"
79
96
  )
97
+ console.print("")
98
+
99
+ if ENV_VARS.TFY_ASK_OPENAI_BASE_URL:
80
100
  console.print(
81
- "[dim][yellow]This operation will use tokens from your OpenAI account and may incur costs.[/yellow][/dim]"
101
+ "[dim][yellow]This operation will use tokens from your model provider and may incur costs.[/yellow][/dim]"
82
102
  )
83
103
  console.print("")
84
- return client, openai_model
85
- else:
86
- llm_env_instruction = (
87
- "No OpenAI API Key found in env."
88
- f"\n- To use your own OpenAI API compatible model for the ask command, set the env vars "
89
- f"[green]{TFY_ASK_OPENAI_BASE_URL_KEY}[/], "
90
- f"[green]{TFY_ASK_OPENAI_API_KEY_KEY}[/], and "
91
- f"[green]{TFY_ASK_MODEL_NAME_KEY}[/] (default: {default_model})."
92
- f"\n- Alternatively, you can use OpenAI directly by setting the env vars "
93
- f"[green]{OPENAI_API_KEY_KEY}[/], "
94
- f"[green]{OPENAI_MODEL_KEY}[/] (default: {default_model})"
95
- )
96
- raise ValueError(llm_env_instruction)
104
+ return client, openai_model
97
105
 
98
106
 
99
107
  @click.command(name="ask", cls=COMMAND_CLS)
@@ -124,7 +132,7 @@ def ask_command(ctx, cluster: str) -> None:
124
132
  "Use this command to ask questions and troubleshoot issues in your Kubernetes cluster managed by the TrueFoundry Control Plane.\n"
125
133
  "It helps you investigate and identify potential problems across services, pods, deployments, and more.\n"
126
134
  )
127
- openai_client, openai_model = _get_openai_client()
135
+ openai_client, openai_model = _get_openai_client(session=session)
128
136
  if not cluster:
129
137
  console.print(
130
138
  "[dim]Tip: You can specify a cluster using the '--cluster' option, or select one interactively from the list.[/dim]\n"
truefoundry/cli/util.py CHANGED
@@ -109,7 +109,7 @@ def print_dict_as_table_panel(
109
109
  ):
110
110
  table = Table(show_header=False, box=None)
111
111
  table.add_column("Key", style=f"bold {key_color}", width=15)
112
- table.add_column("Value")
112
+ table.add_column("Value", overflow="fold")
113
113
  for key, value in dct.items():
114
114
  table.add_row(key, value)
115
115
  console.print(
@@ -118,6 +118,7 @@ from truefoundry.deploy.v2.lib.patched_models import (
118
118
  SparkImageBuild,
119
119
  SparkJobJavaEntrypoint,
120
120
  SparkJobPythonEntrypoint,
121
+ SparkJobPythonNotebookEntrypoint,
121
122
  SparkJobScalaEntrypoint,
122
123
  SQSInputConfig,
123
124
  SQSOutputConfig,
@@ -1,6 +1,6 @@
1
1
  # generated by datamodel-codegen:
2
2
  # filename: application.json
3
- # timestamp: 2025-06-09T12:01:27+00:00
3
+ # timestamp: 2025-06-18T21:24:37+00:00
4
4
 
5
5
  from __future__ import annotations
6
6
 
@@ -908,7 +908,7 @@ class SparkImageBuild(BaseModel):
908
908
  None,
909
909
  description="FQN of the container registry. If you can't find your registry here,\nadd it through the [Integrations](/integrations?tab=docker-registry) page",
910
910
  )
911
- build_source: GitSource
911
+ build_source: Union[GitSource, RemoteSource] = Field(..., description="")
912
912
  build_spec: SparkBuild
913
913
 
914
914
 
@@ -936,6 +936,14 @@ class SparkJobPythonEntrypoint(BaseModel):
936
936
  )
937
937
 
938
938
 
939
+ class SparkJobPythonNotebookEntrypoint(BaseModel):
940
+ type: Literal["python-notebook"] = Field(..., description="")
941
+ main_application_file: str = Field(
942
+ ...,
943
+ description="The main application file to be executed by the spark job. Relative path in case of git repository.",
944
+ )
945
+
946
+
939
947
  class SparkJobScalaEntrypoint(BaseModel):
940
948
  type: Literal["scala"] = Field(..., description="")
941
949
  main_application_file: str = Field(
@@ -1449,7 +1457,10 @@ class SparkJob(BaseModel):
1449
1457
  description="The image to use for driver and executors. Must have spark installed. Spark version must match the version in the image.",
1450
1458
  )
1451
1459
  entrypoint: Union[
1452
- SparkJobPythonEntrypoint, SparkJobScalaEntrypoint, SparkJobJavaEntrypoint
1460
+ SparkJobPythonEntrypoint,
1461
+ SparkJobScalaEntrypoint,
1462
+ SparkJobJavaEntrypoint,
1463
+ SparkJobPythonNotebookEntrypoint,
1453
1464
  ] = Field(..., description="")
1454
1465
  driver_config: SparkDriverConfig
1455
1466
  executor_config: SparkExecutorConfig
@@ -2,7 +2,7 @@ import ast
2
2
  import io
3
3
  import json
4
4
  import re
5
- from typing import Any, Dict, List, Optional
5
+ from typing import Any, Callable, Dict, List, Optional
6
6
 
7
7
  from rich.console import Console
8
8
  from rich.pretty import pprint
@@ -143,9 +143,14 @@ def _convert_deployment_config_to_python(workspace_fqn: str, application_spec: d
143
143
  return generated_code
144
144
 
145
145
 
146
+ def _default_exclude_fn(model: BaseModel, name: str) -> bool:
147
+ return False
148
+
149
+
146
150
  def convert_deployment_config_to_python(
147
151
  workspace_fqn: str,
148
152
  application_spec: Dict[str, Any],
153
+ exclude_fn: Callable[[BaseModel, str], bool] = _default_exclude_fn,
149
154
  exclude_unset: bool = False,
150
155
  exclude_defaults: bool = False,
151
156
  ):
@@ -156,11 +161,13 @@ def convert_deployment_config_to_python(
156
161
  pairs = []
157
162
  for name, value in original_repr_args(self):
158
163
  if name is not None:
159
- model_field = self.__fields__.get(name)
160
- if model_field is None:
164
+ if exclude_fn(self, name):
161
165
  continue
162
166
  if exclude_unset and name not in self.__fields_set__:
163
167
  continue
168
+ model_field = self.__fields__.get(name)
169
+ if model_field is None:
170
+ continue
164
171
  if (
165
172
  exclude_defaults
166
173
  and not getattr(model_field, "required", True)
@@ -520,3 +520,9 @@ class SparkJobPythonEntrypoint(models.SparkJobPythonEntrypoint, PatchedModelBase
520
520
 
521
521
  class SparkJobJavaEntrypoint(models.SparkJobJavaEntrypoint, PatchedModelBase):
522
522
  type: Literal["java"] = "java"
523
+
524
+
525
+ class SparkJobPythonNotebookEntrypoint(
526
+ models.SparkJobPythonNotebookEntrypoint, PatchedModelBase
527
+ ):
528
+ type: Literal["python-notebook"] = "python-notebook"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: truefoundry
3
- Version: 0.10.4rc3
3
+ Version: 0.10.5
4
4
  Summary: TrueFoundry CLI
5
5
  Author-email: TrueFoundry Team <abhishek@truefoundry.com>
6
6
  Requires-Python: <3.14,>=3.8.1
@@ -30,12 +30,12 @@ Requires-Dist: requirements-parser<0.12.0,>=0.11.0
30
30
  Requires-Dist: rich-click<2.0.0,>=1.2.1
31
31
  Requires-Dist: rich<14.0.0,>=13.7.1
32
32
  Requires-Dist: tqdm<5.0.0,>=4.0.0
33
- Requires-Dist: truefoundry-sdk<0.2.0,>=0.1.1
33
+ Requires-Dist: truefoundry-sdk<0.2.0,>=0.1.3
34
34
  Requires-Dist: typing-extensions>=4.0
35
35
  Requires-Dist: urllib3<3,>=1.26.18
36
36
  Requires-Dist: yq<4.0.0,>=3.1.0
37
37
  Provides-Extra: ai
38
- Requires-Dist: mcp==1.9.1; (python_version >= '3.10') and extra == 'ai'
38
+ Requires-Dist: mcp==1.9.4; (python_version >= '3.10') and extra == 'ai'
39
39
  Provides-Extra: workflow
40
40
  Requires-Dist: flytekit==1.15.3; (python_version >= '3.9' and python_version < '3.13') and extra == 'workflow'
41
41
  Description-Content-Type: text/markdown
@@ -4,7 +4,7 @@ truefoundry/logger.py,sha256=u-YCNjg5HBwE70uQcpjIG64Ghos-K2ulTWaxC03BSj4,714
4
4
  truefoundry/pydantic_v1.py,sha256=jSuhGtz0Mbk1qYu8jJ1AcnIDK4oxUsdhALc4spqstmM,345
5
5
  truefoundry/version.py,sha256=bqiT4Q-VWrTC6P4qfK43mez-Ppf-smWfrl6DcwV7mrw,137
6
6
  truefoundry/_ask/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- truefoundry/_ask/cli.py,sha256=zPaDvxhX2dITmPTtut2Iu6WAIaizrwR-U_dDZ6xv2io,5814
7
+ truefoundry/_ask/cli.py,sha256=RDi1lwbUMYw0CnvaYG4o6o1phmnKjuggdQ5I8sllTlA,5812
8
8
  truefoundry/_ask/client.py,sha256=QWQRiDwmtIlLaZsyGcLZaQstYFzpmJeCRdATMapjL-8,18740
9
9
  truefoundry/_ask/llm_utils.py,sha256=ayjz7JtVu142lrm8t0cVoxLxUpx76b71y8R62z_WurY,13537
10
10
  truefoundry/autodeploy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -37,7 +37,7 @@ truefoundry/cli/config.py,sha256=f7z0_gmYZiNImB7Bxz0AnOlrxY2X4lFnX4jYW1I7NHQ,139
37
37
  truefoundry/cli/console.py,sha256=9-dMy4YPisCJQziRKTg8Qa0UJnOGl1soiUnJjsnLDvE,242
38
38
  truefoundry/cli/const.py,sha256=dVHPo1uAiDSSMXwXoT2mR5kNQjExT98QNVRz98Hz_Ts,510
39
39
  truefoundry/cli/display_util.py,sha256=9vzN3mbQqU6OhS7qRUiMRana4PTHa4sDTA0Hn7OVjCI,3108
40
- truefoundry/cli/util.py,sha256=pezUfF2GC6ru7s8VeH2a7uvXTU0xN9ka7yLXkIgC3dY,4998
40
+ truefoundry/cli/util.py,sha256=sSKFZ5wGkForoRIiKD2gFlMyj9D1iinzdjQtMYJx8oU,5015
41
41
  truefoundry/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
42
  truefoundry/common/auth_service_client.py,sha256=N3YxKlx63r6cPZqbgb2lqBOPI69ShB7D7RCIq4FSCjc,7949
43
43
  truefoundry/common/constants.py,sha256=nWd3Je71WmHEORRUTCupZy5fWADqEFftjYP6wiYhCIc,4627
@@ -52,9 +52,9 @@ truefoundry/common/storage_provider_utils.py,sha256=yURhMw8k0FLFvaviRHDiifhvc6Gn
52
52
  truefoundry/common/types.py,sha256=BMJFCsR1lPJAw66IQBSvLyV4I6o_x5oj78gVsUa9si8,188
53
53
  truefoundry/common/utils.py,sha256=j3QP0uOsaGD_VmDDR68JTwoYE1okkAq6OqpVkzVf48Q,6424
54
54
  truefoundry/common/warnings.py,sha256=rs6BHwk7imQYedo07iwh3TWEOywAR3Lqhj0AY4khByg,504
55
- truefoundry/deploy/__init__.py,sha256=2GNbI8IGJBotz_IKaqQ-DWYWZn_pSu7lN7aId15Gk7Q,2799
56
- truefoundry/deploy/python_deploy_codegen.py,sha256=X6cSGQ9_9GxrgIlTLvBWMDz9QnU7hrxieMIutNJe_ng,7784
57
- truefoundry/deploy/_autogen/models.py,sha256=xt-DuaRDx5jeRwyGoQH2yyPZAep9Q2MHFW9XBuRzG8E,73161
55
+ truefoundry/deploy/__init__.py,sha256=PVbGPU9S3-dTFn5LvLwaEnfsp2RrGT9iiM7_15kOV84,2837
56
+ truefoundry/deploy/python_deploy_codegen.py,sha256=k19_m5DGsUyjOUCSKwIVP8vDna2sq01tHABsUfoVpW4,8019
57
+ truefoundry/deploy/_autogen/models.py,sha256=8j_y0Yp8k8Sjj7iVtZDHeuxq9kDvD0xI8-iFnbf0370,73571
58
58
  truefoundry/deploy/builder/__init__.py,sha256=kgvlkVkiWpMVdim81tIeLrdoACqrFDgwCqHdQVsCsMo,4988
59
59
  truefoundry/deploy/builder/constants.py,sha256=amUkHoHvVKzGv0v_knfiioRuKiJM0V0xW0diERgWiI0,508
60
60
  truefoundry/deploy/builder/docker_service.py,sha256=sm7GWeIqyrKaZpxskdLejZlsxcZnM3BTDJr6orvPN4E,3948
@@ -117,7 +117,7 @@ truefoundry/deploy/v2/lib/deploy.py,sha256=HfSUdAS3gSpFAFtV0Mq9LscfpkaXqA2LHW4VX
117
117
  truefoundry/deploy/v2/lib/deploy_workflow.py,sha256=G5BzMIbap8pgDX1eY-TITruUxQdkKhYtBmRwLL6lDeY,14342
118
118
  truefoundry/deploy/v2/lib/deployable_patched_models.py,sha256=mUi-OjPf7bc8rzfrPLdFb79LKuDq7F36RxL4V-AXebs,6830
119
119
  truefoundry/deploy/v2/lib/models.py,sha256=ogc1UYs1Z2nBdGSKCrde9sk8d0GxFKMkem99uqO5CmM,1148
120
- truefoundry/deploy/v2/lib/patched_models.py,sha256=VkfS7akbUzMA4q15lQUcAirdTsyVE1rfMeCmjXJC6Zk,15394
120
+ truefoundry/deploy/v2/lib/patched_models.py,sha256=oNsOr5ojVn2XHjATD3VLuuO6w_ljDL99siHXy6y3Y0g,15558
121
121
  truefoundry/deploy/v2/lib/source.py,sha256=d6-8_6Zn5koBglqrBrY6ZLG_7yyPuLdyEmK4iZTw6xY,9405
122
122
  truefoundry/ml/__init__.py,sha256=EEEHV7w58Krpo_W9Chd8Y3TdItfFO3LI6j6Izqc4-P8,2219
123
123
  truefoundry/ml/constants.py,sha256=vDq72d4C9FSWqr9MMdjgTF4TuyNFApvo_6RVsSeAjB4,2837
@@ -381,7 +381,7 @@ truefoundry/workflow/remote_filesystem/__init__.py,sha256=LQ95ViEjJ7Ts4JcCGOxMPs
381
381
  truefoundry/workflow/remote_filesystem/logger.py,sha256=em2l7D6sw7xTLDP0kQSLpgfRRCLpN14Qw85TN7ujQcE,1022
382
382
  truefoundry/workflow/remote_filesystem/tfy_signed_url_client.py,sha256=xcT0wQmQlgzcj0nP3tJopyFSVWT1uv3nhiTIuwfXYeg,12342
383
383
  truefoundry/workflow/remote_filesystem/tfy_signed_url_fs.py,sha256=nSGPZu0Gyd_jz0KsEE-7w_BmnTD8CVF1S8cUJoxaCbc,13305
384
- truefoundry-0.10.4rc3.dist-info/METADATA,sha256=DvsgKrey42e5PHmRKl107ZHQeOO-uqsRX-OG7tibhiI,2508
385
- truefoundry-0.10.4rc3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
386
- truefoundry-0.10.4rc3.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
387
- truefoundry-0.10.4rc3.dist-info/RECORD,,
384
+ truefoundry-0.10.5.dist-info/METADATA,sha256=t8-jiTcLLtOLh7QQQwl_FXqgG0GJdttfh8b06Org1iE,2505
385
+ truefoundry-0.10.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
386
+ truefoundry-0.10.5.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
387
+ truefoundry-0.10.5.dist-info/RECORD,,