typeagent-py 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {typeagent_py-0.1.0.dist-info → typeagent_py-0.1.1.dist-info}/METADATA +1 -1
- {typeagent_py-0.1.0.dist-info → typeagent_py-0.1.1.dist-info}/RECORD +18 -5
- {typeagent_py-0.1.0.dist-info → typeagent_py-0.1.1.dist-info}/top_level.txt +1 -0
- typechat/__about__.py +4 -0
- typechat/__init__.py +25 -0
- typechat/_internal/__init__.py +0 -0
- typechat/_internal/interactive.py +37 -0
- typechat/_internal/model.py +184 -0
- typechat/_internal/result.py +21 -0
- typechat/_internal/translator.py +125 -0
- typechat/_internal/ts_conversion/__init__.py +37 -0
- typechat/_internal/ts_conversion/python_type_to_ts_nodes.py +447 -0
- typechat/_internal/ts_conversion/ts_node_to_string.py +96 -0
- typechat/_internal/ts_conversion/ts_type_nodes.py +78 -0
- typechat/_internal/validator.py +67 -0
- typechat/py.typed +0 -0
- {typeagent_py-0.1.0.dist-info → typeagent_py-0.1.1.dist-info}/WHEEL +0 -0
- {typeagent_py-0.1.0.dist-info → typeagent_py-0.1.1.dist-info}/licenses/LICENSE +0 -0
@@ -48,8 +48,21 @@ typeagent/storage/sqlite/reltermsindex.py,sha256=VwmUH-awNZ5YeMZTuFVfKP-8G0WQQ1k
|
|
48
48
|
typeagent/storage/sqlite/schema.py,sha256=c5-dff8wdIA37SegPOI-_h-w2eCPSnpnPQAC3vcNzYo,8061
|
49
49
|
typeagent/storage/sqlite/semrefindex.py,sha256=eqHrQMyVdFS9HOXV1dLvp0bMs8JKoPQLmV46Cs0HQJM,5456
|
50
50
|
typeagent/storage/sqlite/timestampindex.py,sha256=gnmmwgRKCwFi2iGzGJVe7Zz12rblB-5-5WZkqpDgySM,4764
|
51
|
-
typeagent_py-0.1.
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
51
|
+
typeagent_py-0.1.1.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
|
52
|
+
typechat/__about__.py,sha256=Qcvdg23chrwFVJ3cN1UPGeSSk37i-nAOPF2CQ6x1R4w,103
|
53
|
+
typechat/__init__.py,sha256=cH0fRzr___j_COUgwiretNwsd3o_wY7hYqFQ5TkYefE,905
|
54
|
+
typechat/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
|
+
typechat/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
56
|
+
typechat/_internal/interactive.py,sha256=Z1kvHt88SnQw3CyfWbOXMXLB-JjrJE0n4o55mQcw3pI,1530
|
57
|
+
typechat/_internal/model.py,sha256=zTjUg0kVTNYYj-xAU_dJDLyWEZA8GuekYTcr6CcIM2g,6975
|
58
|
+
typechat/_internal/result.py,sha256=umzDE3PWSZmecGgVtig-Rb2vRw93fEcYb1Kazp5gv3o,511
|
59
|
+
typechat/_internal/translator.py,sha256=VcUQR2zAd2YOQyO2OAiqwJe9pyDupAtuWZCfrOS4uLk,5149
|
60
|
+
typechat/_internal/validator.py,sha256=RxIdA3mIhj9z66jTujVjqIC99ykEivWywAhqW4swAhw,2508
|
61
|
+
typechat/_internal/ts_conversion/__init__.py,sha256=7fZHhPl1Km8TtvCDAJE_IathP0VrNjlJPS08J7xKIJg,1298
|
62
|
+
typechat/_internal/ts_conversion/python_type_to_ts_nodes.py,sha256=cfI2VhZ3W1WERSQ3pabH7eg86wCgIAERQI_utdvaZfI,17909
|
63
|
+
typechat/_internal/ts_conversion/ts_node_to_string.py,sha256=QnTVlmXLp7iozdyV4TEeaGT2bhHDHBGgMJGE3hk6ZEo,4260
|
64
|
+
typechat/_internal/ts_conversion/ts_type_nodes.py,sha256=LWtX6k45Tuw48DYBYwy4k-nF2mqjzbHP3bpYKrW58r8,2053
|
65
|
+
typeagent_py-0.1.1.dist-info/METADATA,sha256=uqdhYIaJpNKtD_YRUZrJglla7BHJdKg7-ObSLERZPBg,1002
|
66
|
+
typeagent_py-0.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
67
|
+
typeagent_py-0.1.1.dist-info/top_level.txt,sha256=CvJe8hnRs8A7kg7LXtgnH6Uj5MsGftIb_aryk_aoE6M,19
|
68
|
+
typeagent_py-0.1.1.dist-info/RECORD,,
|
typechat/__about__.py
ADDED
typechat/__init__.py
ADDED
@@ -0,0 +1,25 @@
|
|
1
|
+
# SPDX-FileCopyrightText: Microsoft Corporation
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: MIT
|
4
|
+
|
5
|
+
from typechat._internal.model import PromptSection, TypeChatLanguageModel, create_language_model, create_openai_language_model, create_azure_openai_language_model
|
6
|
+
from typechat._internal.result import Failure, Result, Success
|
7
|
+
from typechat._internal.translator import TypeChatJsonTranslator
|
8
|
+
from typechat._internal.ts_conversion import python_type_to_typescript_schema
|
9
|
+
from typechat._internal.validator import TypeChatValidator
|
10
|
+
from typechat._internal.interactive import process_requests
|
11
|
+
|
12
|
+
__all__ = [
|
13
|
+
"TypeChatLanguageModel",
|
14
|
+
"TypeChatJsonTranslator",
|
15
|
+
"TypeChatValidator",
|
16
|
+
"Success",
|
17
|
+
"Failure",
|
18
|
+
"Result",
|
19
|
+
"python_type_to_typescript_schema",
|
20
|
+
"PromptSection",
|
21
|
+
"create_language_model",
|
22
|
+
"create_openai_language_model",
|
23
|
+
"create_azure_openai_language_model",
|
24
|
+
"process_requests",
|
25
|
+
]
|
File without changes
|
@@ -0,0 +1,37 @@
|
|
1
|
+
from typing import Callable, Awaitable
|
2
|
+
|
3
|
+
async def process_requests(interactive_prompt: str, input_file_name: str | None, process_request: Callable[[str], Awaitable[None]]):
|
4
|
+
"""
|
5
|
+
A request processor for interactive input or input from a text file. If an input file name is specified,
|
6
|
+
the callback function is invoked for each line in file. Otherwise, the callback function is invoked for
|
7
|
+
each line of interactive input until the user types "quit" or "exit".
|
8
|
+
|
9
|
+
Args:
|
10
|
+
interactive_prompt: Prompt to present to user.
|
11
|
+
input_file_name: Input text file name, if any.
|
12
|
+
process_request: Async callback function that is invoked for each interactive input or each line in text file.
|
13
|
+
"""
|
14
|
+
if input_file_name is not None:
|
15
|
+
with open(input_file_name, "r") as file:
|
16
|
+
lines = filter(str.rstrip, file)
|
17
|
+
for line in lines:
|
18
|
+
if line.startswith("# "):
|
19
|
+
continue
|
20
|
+
print(interactive_prompt + line)
|
21
|
+
await process_request(line)
|
22
|
+
else:
|
23
|
+
try:
|
24
|
+
# Use readline to enable input editing and history
|
25
|
+
import readline # type: ignore
|
26
|
+
except ImportError:
|
27
|
+
pass
|
28
|
+
while True:
|
29
|
+
try:
|
30
|
+
line = input(interactive_prompt)
|
31
|
+
except EOFError:
|
32
|
+
print("\n")
|
33
|
+
break
|
34
|
+
if line.lower().strip() in ("quit", "exit"):
|
35
|
+
break
|
36
|
+
else:
|
37
|
+
await process_request(line)
|
@@ -0,0 +1,184 @@
|
|
1
|
+
import asyncio
|
2
|
+
from types import TracebackType
|
3
|
+
from typing_extensions import AsyncContextManager, Literal, Protocol, Self, TypedDict, cast, override
|
4
|
+
|
5
|
+
from typechat._internal.result import Failure, Result, Success
|
6
|
+
|
7
|
+
import httpx
|
8
|
+
|
9
|
+
class PromptSection(TypedDict):
|
10
|
+
"""
|
11
|
+
Represents a section of an LLM prompt with an associated role. TypeChat uses the "user" role for
|
12
|
+
prompts it generates and the "assistant" role for previous LLM responses (which will be part of
|
13
|
+
the prompt in repair attempts). TypeChat currently doesn't use the "system" role.
|
14
|
+
"""
|
15
|
+
role: Literal["system", "user", "assistant"]
|
16
|
+
content: str
|
17
|
+
|
18
|
+
class TypeChatLanguageModel(Protocol):
|
19
|
+
async def complete(self, prompt: str | list[PromptSection]) -> Result[str]:
|
20
|
+
"""
|
21
|
+
Represents a AI language model that can complete prompts.
|
22
|
+
|
23
|
+
TypeChat uses an implementation of this protocol to communicate
|
24
|
+
with an AI service that can translate natural language requests to JSON
|
25
|
+
instances according to a provided schema.
|
26
|
+
The `create_language_model` function can create an instance.
|
27
|
+
"""
|
28
|
+
...
|
29
|
+
|
30
|
+
_TRANSIENT_ERROR_CODES = [
|
31
|
+
429,
|
32
|
+
500,
|
33
|
+
502,
|
34
|
+
503,
|
35
|
+
504,
|
36
|
+
]
|
37
|
+
|
38
|
+
class HttpxLanguageModel(TypeChatLanguageModel, AsyncContextManager):
|
39
|
+
url: str
|
40
|
+
headers: dict[str, str]
|
41
|
+
default_params: dict[str, str]
|
42
|
+
# Specifies the maximum number of retry attempts.
|
43
|
+
max_retry_attempts: int = 3
|
44
|
+
# Specifies the delay before retrying in milliseconds.
|
45
|
+
retry_pause_seconds: float = 1.0
|
46
|
+
# Specifies how long a request should wait in seconds
|
47
|
+
# before timing out with a Failure.
|
48
|
+
timeout_seconds = 10
|
49
|
+
_async_client: httpx.AsyncClient
|
50
|
+
|
51
|
+
def __init__(self, url: str, headers: dict[str, str], default_params: dict[str, str]):
|
52
|
+
super().__init__()
|
53
|
+
self.url = url
|
54
|
+
self.headers = headers
|
55
|
+
self.default_params = default_params
|
56
|
+
self._async_client = httpx.AsyncClient()
|
57
|
+
|
58
|
+
@override
|
59
|
+
async def complete(self, prompt: str | list[PromptSection]) -> Success[str] | Failure:
|
60
|
+
headers = {
|
61
|
+
"Content-Type": "application/json",
|
62
|
+
**self.headers,
|
63
|
+
}
|
64
|
+
|
65
|
+
if isinstance(prompt, str):
|
66
|
+
prompt = [{"role": "user", "content": prompt}]
|
67
|
+
|
68
|
+
body = {
|
69
|
+
**self.default_params,
|
70
|
+
"messages": prompt,
|
71
|
+
"temperature": 0.0,
|
72
|
+
"n": 1,
|
73
|
+
}
|
74
|
+
retry_count = 0
|
75
|
+
while True:
|
76
|
+
try:
|
77
|
+
response = await self._async_client.post(
|
78
|
+
self.url,
|
79
|
+
headers=headers,
|
80
|
+
json=body,
|
81
|
+
timeout=self.timeout_seconds
|
82
|
+
)
|
83
|
+
if response.is_success:
|
84
|
+
json_result = cast(
|
85
|
+
dict[Literal["choices"], list[dict[Literal["message"], PromptSection]]],
|
86
|
+
response.json()
|
87
|
+
)
|
88
|
+
return Success(json_result["choices"][0]["message"]["content"] or "")
|
89
|
+
|
90
|
+
if response.status_code not in _TRANSIENT_ERROR_CODES or retry_count >= self.max_retry_attempts:
|
91
|
+
return Failure(f"REST API error {response.status_code}: {response.reason_phrase}")
|
92
|
+
except Exception as e:
|
93
|
+
if retry_count >= self.max_retry_attempts:
|
94
|
+
return Failure(str(e) or f"{repr(e)} raised from within internal TypeChat language model.")
|
95
|
+
|
96
|
+
await asyncio.sleep(self.retry_pause_seconds)
|
97
|
+
retry_count += 1
|
98
|
+
|
99
|
+
@override
|
100
|
+
async def __aenter__(self) -> Self:
|
101
|
+
return self
|
102
|
+
|
103
|
+
@override
|
104
|
+
async def __aexit__(self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None) -> bool | None:
|
105
|
+
await self._async_client.aclose()
|
106
|
+
|
107
|
+
def __del__(self):
|
108
|
+
try:
|
109
|
+
asyncio.get_running_loop().create_task(self._async_client.aclose())
|
110
|
+
except Exception:
|
111
|
+
pass
|
112
|
+
|
113
|
+
def create_language_model(vals: dict[str, str | None]) -> HttpxLanguageModel:
|
114
|
+
"""
|
115
|
+
Creates a language model encapsulation of an OpenAI or Azure OpenAI REST API endpoint
|
116
|
+
chosen by a dictionary of variables (typically just `os.environ`).
|
117
|
+
|
118
|
+
If an `OPENAI_API_KEY` environment variable exists, an OpenAI model is constructed.
|
119
|
+
The `OPENAI_ENDPOINT` and `OPENAI_MODEL` environment variables must also be defined or an error will be raised.
|
120
|
+
|
121
|
+
If an `AZURE_OPENAI_API_KEY` environment variable exists, an Azure OpenAI model is constructed.
|
122
|
+
The `AZURE_OPENAI_ENDPOINT` environment variable must also be defined or an exception will be thrown.
|
123
|
+
|
124
|
+
If none of these key variables are defined, an exception is thrown.
|
125
|
+
@returns An instance of `TypeChatLanguageModel`.
|
126
|
+
|
127
|
+
Args:
|
128
|
+
vals: A dictionary of variables. Typically just `os.environ`.
|
129
|
+
"""
|
130
|
+
|
131
|
+
def required_var(name: str) -> str:
|
132
|
+
val = vals.get(name, None)
|
133
|
+
if val is None:
|
134
|
+
raise ValueError(f"Missing environment variable {name}.")
|
135
|
+
return val
|
136
|
+
|
137
|
+
if "OPENAI_API_KEY" in vals:
|
138
|
+
api_key = required_var("OPENAI_API_KEY")
|
139
|
+
model = required_var("OPENAI_MODEL")
|
140
|
+
endpoint = vals.get("OPENAI_ENDPOINT", None) or "https://api.openai.com/v1/chat/completions"
|
141
|
+
org = vals.get("OPENAI_ORG", None) or ""
|
142
|
+
return create_openai_language_model(api_key, model, endpoint, org)
|
143
|
+
|
144
|
+
elif "AZURE_OPENAI_API_KEY" in vals:
|
145
|
+
api_key=required_var("AZURE_OPENAI_API_KEY")
|
146
|
+
endpoint=required_var("AZURE_OPENAI_ENDPOINT")
|
147
|
+
return create_azure_openai_language_model(api_key, endpoint)
|
148
|
+
else:
|
149
|
+
raise ValueError("Missing environment variables for OPENAI_API_KEY or AZURE_OPENAI_API_KEY.")
|
150
|
+
|
151
|
+
def create_openai_language_model(api_key: str, model: str, endpoint: str = "https://api.openai.com/v1/chat/completions", org: str = "") -> HttpxLanguageModel:
|
152
|
+
"""
|
153
|
+
Creates a language model encapsulation of an OpenAI REST API endpoint.
|
154
|
+
|
155
|
+
Args:
|
156
|
+
api_key: The OpenAI API key.
|
157
|
+
model: The OpenAI model name.
|
158
|
+
endpoint: The OpenAI REST API endpoint.
|
159
|
+
org: The OpenAI organization.
|
160
|
+
"""
|
161
|
+
headers = {
|
162
|
+
"Authorization": f"Bearer {api_key}",
|
163
|
+
"OpenAI-Organization": org,
|
164
|
+
}
|
165
|
+
default_params = {
|
166
|
+
"model": model,
|
167
|
+
}
|
168
|
+
return HttpxLanguageModel(url=endpoint, headers=headers, default_params=default_params)
|
169
|
+
|
170
|
+
def create_azure_openai_language_model(api_key: str, endpoint: str) -> HttpxLanguageModel:
|
171
|
+
"""
|
172
|
+
Creates a language model encapsulation of an Azure OpenAI REST API endpoint.
|
173
|
+
|
174
|
+
Args:
|
175
|
+
api_key: The Azure OpenAI API key.
|
176
|
+
endpoint: The Azure OpenAI REST API endpoint.
|
177
|
+
"""
|
178
|
+
headers = {
|
179
|
+
# Needed when using managed identity
|
180
|
+
"Authorization": f"Bearer {api_key}",
|
181
|
+
# Needed when using regular API key
|
182
|
+
"api-key": api_key,
|
183
|
+
}
|
184
|
+
return HttpxLanguageModel(url=endpoint, headers=headers, default_params={})
|
@@ -0,0 +1,21 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
from typing_extensions import Generic, TypeAlias, TypeVar
|
3
|
+
|
4
|
+
T = TypeVar("T", covariant=True)
|
5
|
+
|
6
|
+
@dataclass
|
7
|
+
class Success(Generic[T]):
|
8
|
+
"An object representing a successful operation with a result of type `T`."
|
9
|
+
value: T
|
10
|
+
|
11
|
+
|
12
|
+
@dataclass
|
13
|
+
class Failure:
|
14
|
+
"An object representing an operation that failed for the reason given in `message`."
|
15
|
+
message: str
|
16
|
+
|
17
|
+
|
18
|
+
"""
|
19
|
+
An object representing a successful or failed operation of type `T`.
|
20
|
+
"""
|
21
|
+
Result: TypeAlias = Success[T] | Failure
|
@@ -0,0 +1,125 @@
|
|
1
|
+
from typing_extensions import Generic, TypeVar
|
2
|
+
|
3
|
+
import pydantic_core
|
4
|
+
|
5
|
+
from typechat._internal.model import PromptSection, TypeChatLanguageModel
|
6
|
+
from typechat._internal.result import Failure, Result, Success
|
7
|
+
from typechat._internal.ts_conversion import python_type_to_typescript_schema
|
8
|
+
from typechat._internal.validator import TypeChatValidator
|
9
|
+
|
10
|
+
T = TypeVar("T", covariant=True)
|
11
|
+
|
12
|
+
class TypeChatJsonTranslator(Generic[T]):
|
13
|
+
"""
|
14
|
+
Represents an object that can translate natural language requests in JSON objects of the given type.
|
15
|
+
"""
|
16
|
+
|
17
|
+
model: TypeChatLanguageModel
|
18
|
+
validator: TypeChatValidator[T]
|
19
|
+
target_type: type[T]
|
20
|
+
type_name: str
|
21
|
+
schema_str: str
|
22
|
+
_max_repair_attempts = 1
|
23
|
+
|
24
|
+
def __init__(
|
25
|
+
self,
|
26
|
+
model: TypeChatLanguageModel,
|
27
|
+
validator: TypeChatValidator[T],
|
28
|
+
target_type: type[T],
|
29
|
+
*, # keyword-only parameters follow
|
30
|
+
_raise_on_schema_errors: bool = True,
|
31
|
+
):
|
32
|
+
"""
|
33
|
+
Args:
|
34
|
+
model: The associated `TypeChatLanguageModel`.
|
35
|
+
validator: The associated `TypeChatValidator[T]`.
|
36
|
+
target_type: A runtime type object describing `T` - the expected shape of JSON data.
|
37
|
+
"""
|
38
|
+
super().__init__()
|
39
|
+
self.model = model
|
40
|
+
self.validator = validator
|
41
|
+
self.target_type = target_type
|
42
|
+
|
43
|
+
conversion_result = python_type_to_typescript_schema(target_type)
|
44
|
+
|
45
|
+
if _raise_on_schema_errors and conversion_result.errors:
|
46
|
+
error_text = "".join(f"\n- {error}" for error in conversion_result.errors)
|
47
|
+
raise ValueError(f"Could not convert Python type to TypeScript schema: \n{error_text}")
|
48
|
+
|
49
|
+
self.type_name = conversion_result.typescript_type_reference
|
50
|
+
self.schema_str = conversion_result.typescript_schema_str
|
51
|
+
|
52
|
+
async def translate(self, input: str, *, prompt_preamble: str | list[PromptSection] | None = None) -> Result[T]:
|
53
|
+
"""
|
54
|
+
Translates a natural language request into an object of type `T`. If the JSON object returned by
|
55
|
+
the language model fails to validate, repair attempts will be made up until `_max_repair_attempts`.
|
56
|
+
The prompt for the subsequent attempts will include the diagnostics produced for the prior attempt.
|
57
|
+
This often helps produce a valid instance.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
input: A natural language request.
|
61
|
+
prompt_preamble: An optional string or list of prompt sections to prepend to the generated prompt.\
|
62
|
+
If a string is given, it is converted to a single "user" role prompt section.
|
63
|
+
"""
|
64
|
+
|
65
|
+
messages: list[PromptSection] = []
|
66
|
+
|
67
|
+
if prompt_preamble:
|
68
|
+
if isinstance(prompt_preamble, str):
|
69
|
+
prompt_preamble = [{"role": "user", "content": prompt_preamble}]
|
70
|
+
messages.extend(prompt_preamble)
|
71
|
+
|
72
|
+
messages.append({"role": "user", "content": self._create_request_prompt(input)})
|
73
|
+
|
74
|
+
num_repairs_attempted = 0
|
75
|
+
while True:
|
76
|
+
completion_response = await self.model.complete(messages)
|
77
|
+
if isinstance(completion_response, Failure):
|
78
|
+
return completion_response
|
79
|
+
|
80
|
+
text_response = completion_response.value
|
81
|
+
first_curly = text_response.find("{")
|
82
|
+
last_curly = text_response.rfind("}") + 1
|
83
|
+
error_message: str
|
84
|
+
if 0 <= first_curly < last_curly:
|
85
|
+
trimmed_response = text_response[first_curly:last_curly]
|
86
|
+
try:
|
87
|
+
parsed_response = pydantic_core.from_json(trimmed_response, allow_inf_nan=False, cache_strings=False)
|
88
|
+
except ValueError as e:
|
89
|
+
error_message = f"Error: {e}\n\nAttempted to parse:\n\n{trimmed_response}"
|
90
|
+
else:
|
91
|
+
result = self.validator.validate_object(parsed_response)
|
92
|
+
if isinstance(result, Success):
|
93
|
+
return result
|
94
|
+
error_message = result.message
|
95
|
+
else:
|
96
|
+
error_message = f"Response did not contain any text resembling JSON.\nResponse was\n\n{text_response}"
|
97
|
+
if num_repairs_attempted >= self._max_repair_attempts:
|
98
|
+
return Failure(error_message)
|
99
|
+
num_repairs_attempted += 1
|
100
|
+
messages.append({"role": "assistant", "content": text_response})
|
101
|
+
messages.append({"role": "user", "content": self._create_repair_prompt(error_message)})
|
102
|
+
|
103
|
+
def _create_request_prompt(self, intent: str) -> str:
|
104
|
+
prompt = f"""
|
105
|
+
You are a service that translates user requests into JSON objects of type "{self.type_name}" according to the following TypeScript definitions:
|
106
|
+
```
|
107
|
+
{self.schema_str}
|
108
|
+
```
|
109
|
+
The following is a user request:
|
110
|
+
'''
|
111
|
+
{intent}
|
112
|
+
'''
|
113
|
+
The following is the user request translated into a JSON object with 2 spaces of indentation and no properties with the value undefined:
|
114
|
+
"""
|
115
|
+
return prompt
|
116
|
+
|
117
|
+
def _create_repair_prompt(self, validation_error: str) -> str:
|
118
|
+
prompt = f"""
|
119
|
+
The above JSON object is invalid for the following reason:
|
120
|
+
'''
|
121
|
+
{validation_error}
|
122
|
+
'''
|
123
|
+
The following is a revised JSON object:
|
124
|
+
"""
|
125
|
+
return prompt
|
@@ -0,0 +1,37 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
from typing_extensions import TypeAliasType
|
3
|
+
|
4
|
+
from typechat._internal.ts_conversion.python_type_to_ts_nodes import python_type_to_typescript_nodes
|
5
|
+
from typechat._internal.ts_conversion.ts_node_to_string import ts_declaration_to_str
|
6
|
+
|
7
|
+
__all__ = [
|
8
|
+
"python_type_to_typescript_schema",
|
9
|
+
"TypeScriptSchemaConversionResult",
|
10
|
+
]
|
11
|
+
|
12
|
+
@dataclass
|
13
|
+
class TypeScriptSchemaConversionResult:
|
14
|
+
typescript_schema_str: str
|
15
|
+
"""The TypeScript declarations generated from the Python declarations."""
|
16
|
+
|
17
|
+
typescript_type_reference: str
|
18
|
+
"""The TypeScript string representation of a given Python type."""
|
19
|
+
|
20
|
+
errors: list[str]
|
21
|
+
"""Any errors that occurred during conversion."""
|
22
|
+
|
23
|
+
def python_type_to_typescript_schema(py_type: type | TypeAliasType) -> TypeScriptSchemaConversionResult:
|
24
|
+
"""Converts a Python type to a TypeScript schema."""
|
25
|
+
|
26
|
+
node_conversion_result = python_type_to_typescript_nodes(py_type)
|
27
|
+
|
28
|
+
decl_strs = map(ts_declaration_to_str, node_conversion_result.type_declarations)
|
29
|
+
decl_strs = reversed(list(decl_strs))
|
30
|
+
|
31
|
+
schema_str = "\n".join(decl_strs)
|
32
|
+
|
33
|
+
return TypeScriptSchemaConversionResult(
|
34
|
+
typescript_schema_str=schema_str,
|
35
|
+
typescript_type_reference=py_type.__name__,
|
36
|
+
errors=node_conversion_result.errors,
|
37
|
+
)
|
@@ -0,0 +1,447 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from collections import OrderedDict
|
4
|
+
import inspect
|
5
|
+
import sys
|
6
|
+
import typing
|
7
|
+
import typing_extensions
|
8
|
+
from dataclasses import MISSING, Field, dataclass
|
9
|
+
from types import NoneType, UnionType
|
10
|
+
from typing_extensions import (
|
11
|
+
Annotated,
|
12
|
+
Any,
|
13
|
+
ClassVar,
|
14
|
+
Doc,
|
15
|
+
Final,
|
16
|
+
Generic,
|
17
|
+
Literal,
|
18
|
+
LiteralString,
|
19
|
+
Never,
|
20
|
+
NoReturn,
|
21
|
+
NotRequired,
|
22
|
+
Protocol,
|
23
|
+
Required,
|
24
|
+
TypeAlias,
|
25
|
+
TypeAliasType,
|
26
|
+
TypeGuard,
|
27
|
+
TypeVar,
|
28
|
+
Union,
|
29
|
+
cast,
|
30
|
+
get_args,
|
31
|
+
get_origin,
|
32
|
+
get_original_bases,
|
33
|
+
get_type_hints,
|
34
|
+
is_typeddict,
|
35
|
+
)
|
36
|
+
|
37
|
+
from typechat._internal.ts_conversion.ts_type_nodes import (
|
38
|
+
AnyTypeReferenceNode,
|
39
|
+
ArrayTypeNode,
|
40
|
+
BooleanTypeReferenceNode,
|
41
|
+
IdentifierNode,
|
42
|
+
IndexSignatureDeclarationNode,
|
43
|
+
InterfaceDeclarationNode,
|
44
|
+
LiteralTypeNode,
|
45
|
+
NeverTypeReferenceNode,
|
46
|
+
NullTypeReferenceNode,
|
47
|
+
NumberTypeReferenceNode,
|
48
|
+
PropertyDeclarationNode,
|
49
|
+
StringTypeReferenceNode,
|
50
|
+
ThisTypeReferenceNode,
|
51
|
+
TopLevelDeclarationNode,
|
52
|
+
TupleTypeNode,
|
53
|
+
TypeAliasDeclarationNode,
|
54
|
+
TypeNode,
|
55
|
+
TypeParameterDeclarationNode,
|
56
|
+
TypeReferenceNode,
|
57
|
+
UnionTypeNode,
|
58
|
+
)
|
59
|
+
|
60
|
+
class GenericDeclarationish(Protocol):
|
61
|
+
__parameters__: list[TypeVar]
|
62
|
+
__type_params__: list[TypeVar] # NOTE: may not be present unless running in 3.12
|
63
|
+
|
64
|
+
class GenericAliasish(Protocol):
|
65
|
+
__origin__: object
|
66
|
+
__args__: tuple[object, ...]
|
67
|
+
__name__: str
|
68
|
+
|
69
|
+
|
70
|
+
class Annotatedish(Protocol):
|
71
|
+
# NOTE: `__origin__` here refers to `SomeType` in `Annnotated[SomeType, ...]`
|
72
|
+
__origin__: object
|
73
|
+
__metadata__: tuple[object, ...]
|
74
|
+
|
75
|
+
class Dataclassish(Protocol):
|
76
|
+
__dataclass_fields__: dict[str, Field[Any]]
|
77
|
+
|
78
|
+
# type[TypedDict]
|
79
|
+
# https://github.com/microsoft/pyright/pull/6505#issuecomment-1834431725
|
80
|
+
class TypeOfTypedDict(Protocol):
|
81
|
+
__total__: bool
|
82
|
+
|
83
|
+
if sys.version_info >= (3, 12) and typing.TypeAliasType is not typing_extensions.TypeAliasType:
|
84
|
+
# Sometimes typing_extensions aliases TypeAliasType,
|
85
|
+
# sometimes it's its own declaration.
|
86
|
+
def is_type_alias_type(py_type: object) -> TypeGuard[TypeAliasType]:
|
87
|
+
return isinstance(py_type, typing.TypeAliasType | typing_extensions.TypeAliasType)
|
88
|
+
else:
|
89
|
+
def is_type_alias_type(py_type: object) -> TypeGuard[TypeAliasType]:
|
90
|
+
return isinstance(py_type, typing_extensions.TypeAliasType)
|
91
|
+
|
92
|
+
|
93
|
+
def is_generic(py_type: object) -> TypeGuard[GenericAliasish]:
|
94
|
+
return hasattr(py_type, "__origin__") and hasattr(py_type, "__args__")
|
95
|
+
|
96
|
+
def is_dataclass(py_type: object) -> TypeGuard[Dataclassish]:
|
97
|
+
return hasattr(py_type, "__dataclass_fields__") and isinstance(cast(Any, py_type).__dataclass_fields__, dict)
|
98
|
+
|
99
|
+
TypeReferenceTarget: TypeAlias = type | TypeAliasType | TypeVar | GenericAliasish
|
100
|
+
|
101
|
+
def is_python_type_or_alias(origin: object) -> TypeGuard[type | TypeAliasType]:
|
102
|
+
return isinstance(origin, type) or is_type_alias_type(origin)
|
103
|
+
|
104
|
+
|
105
|
+
_KNOWN_GENERIC_SPECIAL_FORMS: frozenset[Any] = frozenset(
|
106
|
+
[
|
107
|
+
Required,
|
108
|
+
NotRequired,
|
109
|
+
ClassVar,
|
110
|
+
Final,
|
111
|
+
Annotated,
|
112
|
+
Generic,
|
113
|
+
]
|
114
|
+
)
|
115
|
+
|
116
|
+
_KNOWN_SPECIAL_BASES: frozenset[Any] = frozenset([
|
117
|
+
typing.TypedDict,
|
118
|
+
typing_extensions.TypedDict,
|
119
|
+
Protocol,
|
120
|
+
|
121
|
+
# In older versions of Python, `__orig_bases__` will not be defined on `TypedDict`s
|
122
|
+
# derived from the built-in `typing` module (but they will from `typing_extensions`!).
|
123
|
+
# So `get_original_bases` will fetch `__bases__` which will map `TypedDict` to a plain `dict`.
|
124
|
+
dict,
|
125
|
+
])
|
126
|
+
|
127
|
+
|
128
|
+
@dataclass
|
129
|
+
class TypeScriptNodeTranslationResult:
|
130
|
+
type_declarations: list[TopLevelDeclarationNode]
|
131
|
+
errors: list[str]
|
132
|
+
|
133
|
+
|
134
|
+
# TODO: https://github.com/microsoft/pyright/issues/6587
|
135
|
+
_SELF_TYPE = getattr(typing_extensions, "Self")
|
136
|
+
|
137
|
+
_LIST_TYPES: set[object] = {
|
138
|
+
list,
|
139
|
+
set,
|
140
|
+
frozenset,
|
141
|
+
# TODO: https://github.com/microsoft/pyright/issues/6582
|
142
|
+
# collections.abc.MutableSequence,
|
143
|
+
# collections.abc.Sequence,
|
144
|
+
# collections.abc.Set
|
145
|
+
}
|
146
|
+
|
147
|
+
# TODO: https://github.com/microsoft/pyright/issues/6582
|
148
|
+
# _DICT_TYPES: set[type] = {
|
149
|
+
# dict,
|
150
|
+
# collections.abc.MutableMapping,
|
151
|
+
# collections.abc.Mapping
|
152
|
+
# }
|
153
|
+
|
154
|
+
|
155
|
+
def python_type_to_typescript_nodes(root_py_type: object) -> TypeScriptNodeTranslationResult:
|
156
|
+
# TODO: handle conflicting names
|
157
|
+
|
158
|
+
declared_types: OrderedDict[object, TopLevelDeclarationNode | None] = OrderedDict()
|
159
|
+
undeclared_types: OrderedDict[object, object] = OrderedDict({root_py_type: root_py_type}) # just a set, really
|
160
|
+
used_names: dict[str, type | TypeAliasType] = {}
|
161
|
+
errors: list[str] = []
|
162
|
+
|
163
|
+
def skip_annotations(py_type: object) -> object:
|
164
|
+
origin = py_type
|
165
|
+
while (origin := get_origin(py_type)) and origin in _KNOWN_GENERIC_SPECIAL_FORMS:
|
166
|
+
type_arguments = get_args(py_type)
|
167
|
+
if not type_arguments:
|
168
|
+
errors.append(f"'{origin}' has been used without any type arguments.")
|
169
|
+
return Any
|
170
|
+
py_type = type_arguments[0]
|
171
|
+
continue
|
172
|
+
return py_type
|
173
|
+
|
174
|
+
def convert_to_type_reference_node(py_type: TypeReferenceTarget) -> TypeNode:
|
175
|
+
py_type_to_declare = py_type
|
176
|
+
|
177
|
+
if is_generic(py_type):
|
178
|
+
py_type_to_declare = get_origin(py_type)
|
179
|
+
|
180
|
+
if py_type_to_declare not in declared_types:
|
181
|
+
if is_python_type_or_alias(py_type_to_declare):
|
182
|
+
undeclared_types[py_type_to_declare] = py_type_to_declare
|
183
|
+
elif not isinstance(py_type, TypeVar):
|
184
|
+
errors.append(f"Invalid usage of '{py_type}' as a type annotation.")
|
185
|
+
return AnyTypeReferenceNode
|
186
|
+
|
187
|
+
if is_generic(py_type):
|
188
|
+
return generic_alias_to_type_reference(py_type)
|
189
|
+
|
190
|
+
return TypeReferenceNode(IdentifierNode(py_type.__name__))
|
191
|
+
|
192
|
+
def generic_alias_to_type_reference(py_type: GenericAliasish) -> TypeReferenceNode:
|
193
|
+
origin = get_origin(py_type)
|
194
|
+
assert origin is not None
|
195
|
+
name = origin.__name__
|
196
|
+
type_arguments = list(map(convert_to_type_node, get_args(py_type)))
|
197
|
+
return TypeReferenceNode(IdentifierNode(name), type_arguments)
|
198
|
+
|
199
|
+
def convert_literal_type_arg_to_type_node(py_type: object) -> TypeNode:
|
200
|
+
py_type = skip_annotations(py_type)
|
201
|
+
match py_type:
|
202
|
+
case str() | int() | float(): # no need to match bool, it's a subclass of int
|
203
|
+
return LiteralTypeNode(py_type)
|
204
|
+
case None:
|
205
|
+
return NullTypeReferenceNode
|
206
|
+
case _:
|
207
|
+
errors.append(f"'{py_type}' cannot be used as a literal type.")
|
208
|
+
return AnyTypeReferenceNode
|
209
|
+
|
210
|
+
def convert_to_type_node(py_type: object) -> TypeNode:
|
211
|
+
py_type = skip_annotations(py_type)
|
212
|
+
|
213
|
+
if py_type is str or py_type is LiteralString:
|
214
|
+
return StringTypeReferenceNode
|
215
|
+
if py_type is int or py_type is float:
|
216
|
+
return NumberTypeReferenceNode
|
217
|
+
if py_type is bool:
|
218
|
+
return BooleanTypeReferenceNode
|
219
|
+
if py_type is Any or py_type is object:
|
220
|
+
return AnyTypeReferenceNode
|
221
|
+
if py_type is None or py_type is NoneType:
|
222
|
+
return NullTypeReferenceNode
|
223
|
+
if py_type is Never or py_type is NoReturn:
|
224
|
+
return NeverTypeReferenceNode
|
225
|
+
if py_type is _SELF_TYPE:
|
226
|
+
return ThisTypeReferenceNode
|
227
|
+
|
228
|
+
# TODO: consider handling bare 'tuple' (and list, etc.)
|
229
|
+
# https://docs.python.org/3/library/typing.html#annotating-tuples
|
230
|
+
# Using plain tuple as an annotation is equivalent to using tuple[Any, ...]:
|
231
|
+
|
232
|
+
origin = get_origin(py_type)
|
233
|
+
if origin is not None:
|
234
|
+
if origin in _LIST_TYPES:
|
235
|
+
(type_arg,) = get_type_argument_nodes(py_type, 1, AnyTypeReferenceNode)
|
236
|
+
if isinstance(type_arg, UnionTypeNode):
|
237
|
+
return TypeReferenceNode(IdentifierNode("Array"), [type_arg])
|
238
|
+
return ArrayTypeNode(type_arg)
|
239
|
+
|
240
|
+
if origin is dict:
|
241
|
+
# TODO
|
242
|
+
# Currently, we naively assume all dicts are string-keyed
|
243
|
+
# unless they're annotated with `int` or `float` (note: not `int | float`).
|
244
|
+
key_type_arg, value_type_arg = get_type_argument_nodes(py_type, 2, AnyTypeReferenceNode)
|
245
|
+
if key_type_arg is not NumberTypeReferenceNode:
|
246
|
+
key_type_arg = StringTypeReferenceNode
|
247
|
+
return TypeReferenceNode(IdentifierNode("Record"), [key_type_arg, value_type_arg])
|
248
|
+
|
249
|
+
if origin is tuple:
|
250
|
+
# Note that when the type is `tuple[()]`,
|
251
|
+
# `type_args` will be an empty tuple.
|
252
|
+
# Which is nice, because we don't have to special-case anything!
|
253
|
+
type_args = get_args(py_type)
|
254
|
+
|
255
|
+
if Ellipsis in type_args:
|
256
|
+
if len(type_args) != 2:
|
257
|
+
errors.append(
|
258
|
+
f"The tuple type '{py_type}' is ill-formed. Tuples with an ellipsis can only take the form 'tuple[SomeType, ...]'."
|
259
|
+
)
|
260
|
+
return ArrayTypeNode(AnyTypeReferenceNode)
|
261
|
+
|
262
|
+
ellipsis_index = type_args.index(Ellipsis)
|
263
|
+
if ellipsis_index != 1:
|
264
|
+
errors.append(
|
265
|
+
f"The tuple type '{py_type}' is ill-formed because the ellipsis (...) cannot be the first element."
|
266
|
+
)
|
267
|
+
return ArrayTypeNode(AnyTypeReferenceNode)
|
268
|
+
|
269
|
+
return ArrayTypeNode(convert_to_type_node(type_args[0]))
|
270
|
+
|
271
|
+
return TupleTypeNode([convert_to_type_node(py_type_arg) for py_type_arg in type_args])
|
272
|
+
|
273
|
+
if origin is Union or origin is UnionType:
|
274
|
+
type_node = [convert_to_type_node(py_type_arg) for py_type_arg in get_args(py_type)]
|
275
|
+
assert len(type_node) > 1
|
276
|
+
return UnionTypeNode(type_node)
|
277
|
+
|
278
|
+
if origin is Literal:
|
279
|
+
type_node = [convert_literal_type_arg_to_type_node(py_type_arg) for py_type_arg in get_args(py_type)]
|
280
|
+
assert len(type_node) >= 1
|
281
|
+
return UnionTypeNode(type_node)
|
282
|
+
|
283
|
+
assert is_generic(py_type)
|
284
|
+
return convert_to_type_reference_node(py_type)
|
285
|
+
|
286
|
+
if is_python_type_or_alias(py_type):
|
287
|
+
return convert_to_type_reference_node(py_type)
|
288
|
+
|
289
|
+
if isinstance(py_type, TypeVar):
|
290
|
+
return convert_to_type_reference_node(py_type)
|
291
|
+
|
292
|
+
errors.append(f"'{py_type}' cannot be used as a type annotation.")
|
293
|
+
return AnyTypeReferenceNode
|
294
|
+
|
295
|
+
def declare_property(name: str, py_annotation: type | TypeAliasType, is_typeddict_attribute: bool, optionality_default: bool):
|
296
|
+
"""
|
297
|
+
Declare a property for a given type.
|
298
|
+
If 'optionality_default' is
|
299
|
+
"""
|
300
|
+
current_annotation: object = py_annotation
|
301
|
+
origin: object
|
302
|
+
optional: bool | None = None
|
303
|
+
comment: str | None = None
|
304
|
+
while origin := get_origin(current_annotation):
|
305
|
+
if origin is Annotated and comment is None:
|
306
|
+
current_annotation = cast(Annotatedish, current_annotation)
|
307
|
+
|
308
|
+
for metadata in current_annotation.__metadata__:
|
309
|
+
if isinstance(metadata, Doc):
|
310
|
+
comment = metadata.documentation
|
311
|
+
break
|
312
|
+
if isinstance(metadata, str):
|
313
|
+
comment = metadata
|
314
|
+
break
|
315
|
+
|
316
|
+
current_annotation = current_annotation.__origin__
|
317
|
+
|
318
|
+
elif origin is Required or origin is NotRequired:
|
319
|
+
if not is_typeddict_attribute:
|
320
|
+
errors.append(f"Optionality cannot be specified with {origin} outside of TypedDicts.")
|
321
|
+
|
322
|
+
if optional is None:
|
323
|
+
optional = origin is NotRequired
|
324
|
+
else:
|
325
|
+
errors.append(f"{origin} cannot be used within another optionality annotation.")
|
326
|
+
|
327
|
+
current_annotation = get_args(current_annotation)[0]
|
328
|
+
else:
|
329
|
+
break
|
330
|
+
|
331
|
+
if optional is None:
|
332
|
+
optional = optionality_default
|
333
|
+
|
334
|
+
type_annotation = convert_to_type_node(skip_annotations(current_annotation))
|
335
|
+
return PropertyDeclarationNode(name, optional, comment or "", type_annotation)
|
336
|
+
|
337
|
+
def reserve_name(val: type | TypeAliasType):
|
338
|
+
type_name = val.__name__
|
339
|
+
if type_name in used_names:
|
340
|
+
errors.append(f"Cannot create a schema using two types with the same name. {type_name} conflicts between {val} and {used_names[type_name]}")
|
341
|
+
else:
|
342
|
+
used_names[type_name] = val
|
343
|
+
|
344
|
+
def declare_type(py_type: object):
|
345
|
+
if (is_typeddict(py_type) or is_dataclass(py_type)) and isinstance(py_type, type):
|
346
|
+
comment = py_type.__doc__ or ""
|
347
|
+
|
348
|
+
if hasattr(py_type, "__type_params__") and cast(GenericDeclarationish, py_type).__type_params__:
|
349
|
+
type_params = [
|
350
|
+
TypeParameterDeclarationNode(type_param.__name__)
|
351
|
+
for type_param in cast(GenericDeclarationish, py_type).__type_params__
|
352
|
+
]
|
353
|
+
elif hasattr(py_type, "__parameters__") and cast(GenericDeclarationish, py_type).__parameters__:
|
354
|
+
type_params = [
|
355
|
+
TypeParameterDeclarationNode(type_param.__name__)
|
356
|
+
for type_param in cast(GenericDeclarationish, py_type).__parameters__
|
357
|
+
]
|
358
|
+
else:
|
359
|
+
type_params = None
|
360
|
+
|
361
|
+
annotated_members = get_type_hints(py_type, include_extras=True)
|
362
|
+
|
363
|
+
raw_but_filtered_bases: list[type] = [
|
364
|
+
base
|
365
|
+
for base in get_original_bases(py_type)
|
366
|
+
if not(base is object or base in _KNOWN_SPECIAL_BASES or get_origin(base) in _KNOWN_GENERIC_SPECIAL_FORMS)
|
367
|
+
]
|
368
|
+
base_attributes: OrderedDict[str, set[object]] = OrderedDict()
|
369
|
+
for base in raw_but_filtered_bases:
|
370
|
+
for prop, type_hint in get_type_hints(get_origin(base) or base, include_extras=True).items():
|
371
|
+
base_attributes.setdefault(prop, set()).add(type_hint)
|
372
|
+
bases = [convert_to_type_node(base) for base in raw_but_filtered_bases]
|
373
|
+
|
374
|
+
properties: list[PropertyDeclarationNode | IndexSignatureDeclarationNode] = []
|
375
|
+
if is_typeddict(py_type):
|
376
|
+
for attr_name, type_hint in annotated_members.items():
|
377
|
+
if attribute_identical_in_all_bases(attr_name, type_hint, base_attributes):
|
378
|
+
continue
|
379
|
+
|
380
|
+
assume_optional = cast(TypeOfTypedDict, py_type).__total__ is False
|
381
|
+
prop = declare_property(attr_name, type_hint, is_typeddict_attribute=True, optionality_default=assume_optional)
|
382
|
+
properties.append(prop)
|
383
|
+
else:
|
384
|
+
# When a dataclass is created with no explicit docstring, @dataclass will
|
385
|
+
# generate one for us; however, we don't want these in the default output.
|
386
|
+
cleaned_signature = str(inspect.signature(py_type)).replace(" -> None", "")
|
387
|
+
dataclass_doc = f"{py_type.__name__}{cleaned_signature}"
|
388
|
+
if comment == dataclass_doc:
|
389
|
+
comment = ""
|
390
|
+
|
391
|
+
for attr_name, field in cast(Dataclassish, py_type).__dataclass_fields__.items():
|
392
|
+
type_hint = annotated_members[attr_name]
|
393
|
+
optional = not(field.default is MISSING and field.default_factory is MISSING)
|
394
|
+
prop = declare_property(attr_name, type_hint, is_typeddict_attribute=False, optionality_default=optional)
|
395
|
+
properties.append(prop)
|
396
|
+
|
397
|
+
reserve_name(py_type)
|
398
|
+
return InterfaceDeclarationNode(py_type.__name__, type_params, comment, bases, properties)
|
399
|
+
if isinstance(py_type, type):
|
400
|
+
errors.append(f"{py_type.__name__} was not a TypedDict, dataclass, or type alias, and cannot be translated.")
|
401
|
+
|
402
|
+
reserve_name(py_type)
|
403
|
+
|
404
|
+
return InterfaceDeclarationNode(py_type.__name__, None, "", None, [])
|
405
|
+
if is_type_alias_type(py_type):
|
406
|
+
type_params = [TypeParameterDeclarationNode(type_param.__name__) for type_param in py_type.__type_params__]
|
407
|
+
|
408
|
+
reserve_name(py_type)
|
409
|
+
|
410
|
+
return TypeAliasDeclarationNode(
|
411
|
+
py_type.__name__,
|
412
|
+
type_params,
|
413
|
+
f"Comment for {py_type.__name__}.",
|
414
|
+
convert_to_type_node(py_type.__value__),
|
415
|
+
)
|
416
|
+
|
417
|
+
raise RuntimeError(f"Cannot declare type {py_type}.")
|
418
|
+
|
419
|
+
def attribute_identical_in_all_bases(attr_name: str, type_hint: object, base_attributes: dict[str, set[object]]) -> bool:
|
420
|
+
"""
|
421
|
+
We typically want to omit attributes with type hints that are
|
422
|
+
identical to those declared in all base types.
|
423
|
+
"""
|
424
|
+
return attr_name in base_attributes and len(base_attributes[attr_name]) == 1 and type_hint in base_attributes[attr_name]
|
425
|
+
|
426
|
+
def get_type_argument_nodes(py_type: object, count: int, default: TypeNode) -> list[TypeNode]:
|
427
|
+
py_type_args = get_args(py_type)
|
428
|
+
result: list[TypeNode] = []
|
429
|
+
if len(py_type_args) != count:
|
430
|
+
errors.append(f"Expected '{count}' type arguments for '{py_type}'.")
|
431
|
+
for i in range(count):
|
432
|
+
if i < len(py_type_args):
|
433
|
+
type_node = convert_to_type_node(py_type_args[i])
|
434
|
+
else:
|
435
|
+
type_node = default
|
436
|
+
result.append(type_node)
|
437
|
+
return result
|
438
|
+
|
439
|
+
while undeclared_types:
|
440
|
+
py_type = undeclared_types.popitem()[0]
|
441
|
+
declared_types[py_type] = None
|
442
|
+
declared_types[py_type] = declare_type(py_type)
|
443
|
+
|
444
|
+
type_declarations = cast(list[TopLevelDeclarationNode], list(declared_types.values()))
|
445
|
+
assert None not in type_declarations
|
446
|
+
|
447
|
+
return TypeScriptNodeTranslationResult(type_declarations, errors)
|
@@ -0,0 +1,96 @@
|
|
1
|
+
import json
|
2
|
+
from typing_extensions import assert_never
|
3
|
+
|
4
|
+
from typechat._internal.ts_conversion.ts_type_nodes import (
|
5
|
+
ArrayTypeNode,
|
6
|
+
IdentifierNode,
|
7
|
+
IndexSignatureDeclarationNode,
|
8
|
+
InterfaceDeclarationNode,
|
9
|
+
LiteralTypeNode,
|
10
|
+
NullTypeReferenceNode,
|
11
|
+
PropertyDeclarationNode,
|
12
|
+
TopLevelDeclarationNode,
|
13
|
+
TupleTypeNode,
|
14
|
+
TypeAliasDeclarationNode,
|
15
|
+
TypeNode,
|
16
|
+
TypeReferenceNode,
|
17
|
+
UnionTypeNode,
|
18
|
+
)
|
19
|
+
|
20
|
+
|
21
|
+
def comment_to_str(comment_text: str, indentation: str) -> str:
|
22
|
+
comment_text = comment_text.strip()
|
23
|
+
if not comment_text:
|
24
|
+
return ""
|
25
|
+
lines = [line.strip() for line in comment_text.splitlines()]
|
26
|
+
|
27
|
+
return "\n".join([f"{indentation}// {line}" for line in lines]) + "\n"
|
28
|
+
|
29
|
+
|
30
|
+
def ts_type_to_str(type_node: TypeNode) -> str:
|
31
|
+
match type_node:
|
32
|
+
case TypeReferenceNode(name, type_arguments):
|
33
|
+
assert isinstance(name, IdentifierNode)
|
34
|
+
if type_arguments is None:
|
35
|
+
return name.text
|
36
|
+
return f"{name.text}<{', '.join([ts_type_to_str(arg) for arg in type_arguments])}>"
|
37
|
+
case ArrayTypeNode(element_type):
|
38
|
+
assert type(element_type) is not UnionTypeNode
|
39
|
+
# if type(element_type) is UnionTypeNode:
|
40
|
+
# return f"Array<{ts_type_to_str(element_type)}>"
|
41
|
+
return f"{ts_type_to_str(element_type)}[]"
|
42
|
+
case TupleTypeNode(element_types):
|
43
|
+
return f"[{', '.join([ts_type_to_str(element_type) for element_type in element_types])}]"
|
44
|
+
case UnionTypeNode(types):
|
45
|
+
# Remove duplicates, but try to preserve order of types,
|
46
|
+
# and put null at the end if it's present.
|
47
|
+
str_set: set[str] = set()
|
48
|
+
type_strs: list[str] = []
|
49
|
+
nullable = False
|
50
|
+
for type_node in types:
|
51
|
+
if type_node is NullTypeReferenceNode:
|
52
|
+
nullable = True
|
53
|
+
continue
|
54
|
+
type_str = ts_type_to_str(type_node)
|
55
|
+
if type_str not in str_set:
|
56
|
+
str_set.add(type_str)
|
57
|
+
type_strs.append(type_str)
|
58
|
+
if nullable:
|
59
|
+
type_strs.append("null")
|
60
|
+
return " | ".join(type_strs)
|
61
|
+
case LiteralTypeNode(value):
|
62
|
+
return json.dumps(value)
|
63
|
+
# case _:
|
64
|
+
# raise NotImplementedError(f"Unhandled type {type(type_node)}")
|
65
|
+
assert_never(type_node)
|
66
|
+
|
67
|
+
def object_member_to_str(member: PropertyDeclarationNode | IndexSignatureDeclarationNode) -> str:
|
68
|
+
match member:
|
69
|
+
case PropertyDeclarationNode(name, is_optional, comment, annotation):
|
70
|
+
comment = comment_to_str(comment, " ")
|
71
|
+
if not name.isidentifier():
|
72
|
+
name = json.dumps(name)
|
73
|
+
return f"{comment} {name}{'?' if is_optional else ''}: {ts_type_to_str(annotation)};"
|
74
|
+
case IndexSignatureDeclarationNode(key_type, value_type):
|
75
|
+
return f"[key: {ts_type_to_str(key_type)}]: {ts_type_to_str(value_type)};"
|
76
|
+
# case _:
|
77
|
+
# raise NotImplementedError(f"Unhandled member type {type(member)}")
|
78
|
+
assert_never(member)
|
79
|
+
|
80
|
+
|
81
|
+
def ts_declaration_to_str(declaration: TopLevelDeclarationNode) -> str:
|
82
|
+
match declaration:
|
83
|
+
case InterfaceDeclarationNode(name, type_parameters, comment, base_types, members):
|
84
|
+
comment = comment_to_str(comment, "")
|
85
|
+
type_param_str = f"<{', '.join([param.name for param in type_parameters])}>" if type_parameters else ""
|
86
|
+
base_type_str = (
|
87
|
+
f" extends {', '.join([ts_type_to_str(base_type) for base_type in base_types])}" if base_types else ""
|
88
|
+
)
|
89
|
+
members_str = "\n".join([f"{object_member_to_str(member)}" for member in members]) + "\n" if members else ""
|
90
|
+
return f"{comment}interface {name}{type_param_str}{base_type_str} {{\n{members_str}}}\n"
|
91
|
+
case TypeAliasDeclarationNode(name, type_parameters, comment, target):
|
92
|
+
type_param_str = f"<{', '.join([param.name for param in type_parameters])}>" if type_parameters else ""
|
93
|
+
return f"type {name}{type_param_str} = {ts_type_to_str(target)}\n"
|
94
|
+
# case _:
|
95
|
+
# raise NotImplementedError(f"Unhandled declaration type {type(declaration)}")
|
96
|
+
assert_never(declaration)
|
@@ -0,0 +1,78 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from typing_extensions import TypeAlias
|
5
|
+
|
6
|
+
TypeNode: TypeAlias = "TypeReferenceNode | UnionTypeNode | LiteralTypeNode | ArrayTypeNode | TupleTypeNode"
|
7
|
+
|
8
|
+
@dataclass
|
9
|
+
class IdentifierNode:
|
10
|
+
text: str
|
11
|
+
|
12
|
+
@dataclass
|
13
|
+
class QualifiedNameNode:
|
14
|
+
left: QualifiedNameNode | IdentifierNode
|
15
|
+
right: IdentifierNode
|
16
|
+
|
17
|
+
@dataclass
|
18
|
+
class TypeReferenceNode:
|
19
|
+
name: QualifiedNameNode | IdentifierNode
|
20
|
+
type_arguments: list[TypeNode] | None = None
|
21
|
+
|
22
|
+
@dataclass
|
23
|
+
class UnionTypeNode:
|
24
|
+
types: list[TypeNode]
|
25
|
+
|
26
|
+
@dataclass
|
27
|
+
class LiteralTypeNode:
|
28
|
+
value: str | int | float | bool
|
29
|
+
|
30
|
+
@dataclass
|
31
|
+
class ArrayTypeNode:
|
32
|
+
element_type: TypeNode
|
33
|
+
|
34
|
+
@dataclass
|
35
|
+
class TupleTypeNode:
|
36
|
+
element_types: list[TypeNode]
|
37
|
+
|
38
|
+
@dataclass
|
39
|
+
class InterfaceDeclarationNode:
|
40
|
+
name: str
|
41
|
+
type_parameters: list[TypeParameterDeclarationNode] | None
|
42
|
+
comment: str
|
43
|
+
base_types: list[TypeNode] | None
|
44
|
+
members: list[PropertyDeclarationNode | IndexSignatureDeclarationNode]
|
45
|
+
|
46
|
+
@dataclass
|
47
|
+
class TypeParameterDeclarationNode:
|
48
|
+
name: str
|
49
|
+
constraint: TypeNode | None = None
|
50
|
+
|
51
|
+
@dataclass
|
52
|
+
class PropertyDeclarationNode:
|
53
|
+
name: str
|
54
|
+
is_optional: bool
|
55
|
+
comment: str
|
56
|
+
type: TypeNode
|
57
|
+
|
58
|
+
@dataclass
|
59
|
+
class IndexSignatureDeclarationNode:
|
60
|
+
key_type: TypeNode
|
61
|
+
value_type: TypeNode
|
62
|
+
|
63
|
+
@dataclass
|
64
|
+
class TypeAliasDeclarationNode:
|
65
|
+
name: str
|
66
|
+
type_parameters: list[TypeParameterDeclarationNode] | None
|
67
|
+
comment: str
|
68
|
+
type: TypeNode
|
69
|
+
|
70
|
+
TopLevelDeclarationNode: TypeAlias = "InterfaceDeclarationNode | TypeAliasDeclarationNode"
|
71
|
+
|
72
|
+
StringTypeReferenceNode = TypeReferenceNode(IdentifierNode("string"))
|
73
|
+
NumberTypeReferenceNode = TypeReferenceNode(IdentifierNode("number"))
|
74
|
+
BooleanTypeReferenceNode = TypeReferenceNode(IdentifierNode("boolean"))
|
75
|
+
AnyTypeReferenceNode = TypeReferenceNode(IdentifierNode("any"))
|
76
|
+
NullTypeReferenceNode = TypeReferenceNode(IdentifierNode("null"))
|
77
|
+
NeverTypeReferenceNode = TypeReferenceNode(IdentifierNode("never"))
|
78
|
+
ThisTypeReferenceNode = TypeReferenceNode(IdentifierNode("this"))
|
@@ -0,0 +1,67 @@
|
|
1
|
+
import json
|
2
|
+
from typing_extensions import Generic, TypeVar
|
3
|
+
|
4
|
+
import pydantic
|
5
|
+
import pydantic_core
|
6
|
+
|
7
|
+
from typechat._internal.result import Failure, Result, Success
|
8
|
+
|
9
|
+
T = TypeVar("T", covariant=True)
|
10
|
+
|
11
|
+
class TypeChatValidator(Generic[T]):
|
12
|
+
"""
|
13
|
+
Validates an object against a given Python type.
|
14
|
+
"""
|
15
|
+
|
16
|
+
_adapted_type: pydantic.TypeAdapter[T]
|
17
|
+
|
18
|
+
def __init__(self, py_type: type[T]):
|
19
|
+
"""
|
20
|
+
Args:
|
21
|
+
|
22
|
+
py_type: The schema type to validate against.
|
23
|
+
"""
|
24
|
+
super().__init__()
|
25
|
+
self._adapted_type = pydantic.TypeAdapter(py_type)
|
26
|
+
|
27
|
+
def validate_object(self, obj: object) -> Result[T]:
|
28
|
+
"""
|
29
|
+
Validates the given Python object according to the associated schema type.
|
30
|
+
|
31
|
+
Returns a `Success[T]` object containing the object if validation was successful.
|
32
|
+
Otherwise, returns a `Failure` object with a `message` property describing the error.
|
33
|
+
"""
|
34
|
+
try:
|
35
|
+
# TODO: Switch to `validate_python` when validation modes are exposed.
|
36
|
+
# https://github.com/pydantic/pydantic-core/issues/712
|
37
|
+
# We'd prefer to keep `validate_object` as the core method and
|
38
|
+
# allow translators to concern themselves with the JSON instead.
|
39
|
+
# However, under Pydantic's `strict` mode, a `dict` isn't considered compatible
|
40
|
+
# with a dataclass. So for now, jump back to JSON and validate the string.
|
41
|
+
json_str = pydantic_core.to_json(obj)
|
42
|
+
typed_dict = self._adapted_type.validate_json(json_str, strict=True)
|
43
|
+
return Success(typed_dict)
|
44
|
+
except pydantic.ValidationError as validation_error:
|
45
|
+
return _handle_error(validation_error)
|
46
|
+
|
47
|
+
|
48
|
+
def _handle_error(validation_error: pydantic.ValidationError) -> Failure:
|
49
|
+
error_strings: list[str] = []
|
50
|
+
for error in validation_error.errors(include_url=False):
|
51
|
+
error_string = ""
|
52
|
+
loc_path = error["loc"]
|
53
|
+
if loc_path:
|
54
|
+
error_string += f"Validation path `{'.'.join(map(str, loc_path))}` "
|
55
|
+
else:
|
56
|
+
error_string += "Root validation "
|
57
|
+
input = error["input"]
|
58
|
+
error_string += f"failed for value `{json.dumps(input)}` because:\n {error['msg']}"
|
59
|
+
error_strings.append(error_string)
|
60
|
+
|
61
|
+
if len(error_strings) > 1:
|
62
|
+
failure_message = "Several possible issues may have occurred with the given data.\n\n"
|
63
|
+
else:
|
64
|
+
failure_message = ""
|
65
|
+
failure_message += "\n".join(error_strings)
|
66
|
+
|
67
|
+
return Failure(failure_message)
|
typechat/py.typed
ADDED
File without changes
|
File without changes
|
File without changes
|