lightningrod-ai 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lightningrod/__init__.py +66 -0
- lightningrod/_display.py +204 -0
- lightningrod/_errors.py +67 -0
- lightningrod/_generated/__init__.py +8 -0
- lightningrod/_generated/api/__init__.py +1 -0
- lightningrod/_generated/api/datasets/__init__.py +1 -0
- lightningrod/_generated/api/datasets/create_dataset_datasets_post.py +133 -0
- lightningrod/_generated/api/datasets/get_dataset_datasets_dataset_id_get.py +168 -0
- lightningrod/_generated/api/datasets/get_dataset_samples_datasets_dataset_id_samples_get.py +209 -0
- lightningrod/_generated/api/datasets/upload_samples_datasets_dataset_id_samples_post.py +190 -0
- lightningrod/_generated/api/file_sets/__init__.py +1 -0
- lightningrod/_generated/api/file_sets/add_file_to_set_filesets_file_set_id_files_post.py +190 -0
- lightningrod/_generated/api/file_sets/create_file_set_filesets_post.py +174 -0
- lightningrod/_generated/api/file_sets/get_file_set_filesets_file_set_id_get.py +168 -0
- lightningrod/_generated/api/file_sets/list_file_sets_filesets_get.py +173 -0
- lightningrod/_generated/api/file_sets/list_files_in_set_filesets_file_set_id_files_get.py +209 -0
- lightningrod/_generated/api/files/__init__.py +1 -0
- lightningrod/_generated/api/files/create_file_upload_files_post.py +174 -0
- lightningrod/_generated/api/open_ai_compatible/__init__.py +1 -0
- lightningrod/_generated/api/open_ai_compatible/chat_completions_openai_chat_completions_post.py +174 -0
- lightningrod/_generated/api/organizations/__init__.py +1 -0
- lightningrod/_generated/api/organizations/get_balance_organizations_balance_get.py +131 -0
- lightningrod/_generated/api/samples/__init__.py +1 -0
- lightningrod/_generated/api/samples/validate_sample_samples_validate_post.py +174 -0
- lightningrod/_generated/api/transform_jobs/__init__.py +1 -0
- lightningrod/_generated/api/transform_jobs/cost_estimation_transform_jobs_cost_estimation_post.py +174 -0
- lightningrod/_generated/api/transform_jobs/create_transform_job_transform_jobs_post.py +174 -0
- lightningrod/_generated/api/transform_jobs/get_transform_job_metrics_transform_jobs_job_id_metrics_get.py +172 -0
- lightningrod/_generated/api/transform_jobs/get_transform_job_transform_jobs_job_id_get.py +168 -0
- lightningrod/_generated/client.py +268 -0
- lightningrod/_generated/errors.py +16 -0
- lightningrod/_generated/models/__init__.py +147 -0
- lightningrod/_generated/models/answer_type.py +129 -0
- lightningrod/_generated/models/answer_type_enum.py +11 -0
- lightningrod/_generated/models/balance_response.py +61 -0
- lightningrod/_generated/models/chat_completion_request.py +216 -0
- lightningrod/_generated/models/chat_completion_response.py +146 -0
- lightningrod/_generated/models/chat_message.py +69 -0
- lightningrod/_generated/models/choice.py +97 -0
- lightningrod/_generated/models/create_dataset_response.py +61 -0
- lightningrod/_generated/models/create_file_set_file_request.py +101 -0
- lightningrod/_generated/models/create_file_set_file_request_metadata_type_0.py +46 -0
- lightningrod/_generated/models/create_file_set_request.py +83 -0
- lightningrod/_generated/models/create_file_upload_request.py +91 -0
- lightningrod/_generated/models/create_file_upload_response.py +165 -0
- lightningrod/_generated/models/create_file_upload_response_metadata_type_0.py +46 -0
- lightningrod/_generated/models/create_transform_job_request.py +312 -0
- lightningrod/_generated/models/dataset_metadata.py +69 -0
- lightningrod/_generated/models/estimate_cost_request.py +243 -0
- lightningrod/_generated/models/estimate_cost_response.py +117 -0
- lightningrod/_generated/models/event_usage_summary.py +80 -0
- lightningrod/_generated/models/file_set.py +128 -0
- lightningrod/_generated/models/file_set_file.py +203 -0
- lightningrod/_generated/models/file_set_file_metadata_type_0.py +57 -0
- lightningrod/_generated/models/file_set_query_seed_generator.py +136 -0
- lightningrod/_generated/models/file_set_seed_generator.py +126 -0
- lightningrod/_generated/models/filter_criteria.py +83 -0
- lightningrod/_generated/models/forward_looking_question.py +130 -0
- lightningrod/_generated/models/forward_looking_question_generator.py +217 -0
- lightningrod/_generated/models/gdelt_seed_generator.py +103 -0
- lightningrod/_generated/models/http_validation_error.py +79 -0
- lightningrod/_generated/models/job_usage.py +185 -0
- lightningrod/_generated/models/job_usage_by_step_type_0.py +59 -0
- lightningrod/_generated/models/label.py +143 -0
- lightningrod/_generated/models/list_file_set_files_response.py +113 -0
- lightningrod/_generated/models/list_file_sets_response.py +75 -0
- lightningrod/_generated/models/llm_model_usage_summary.py +98 -0
- lightningrod/_generated/models/mock_transform_config.py +243 -0
- lightningrod/_generated/models/mock_transform_config_metadata_additions.py +46 -0
- lightningrod/_generated/models/model_config.py +316 -0
- lightningrod/_generated/models/model_source_type.py +16 -0
- lightningrod/_generated/models/news_context.py +82 -0
- lightningrod/_generated/models/news_context_generator.py +127 -0
- lightningrod/_generated/models/news_seed_generator.py +220 -0
- lightningrod/_generated/models/paginated_samples_response.py +113 -0
- lightningrod/_generated/models/pipeline_metrics_response.py +99 -0
- lightningrod/_generated/models/question.py +74 -0
- lightningrod/_generated/models/question_and_label_generator.py +217 -0
- lightningrod/_generated/models/question_generator.py +217 -0
- lightningrod/_generated/models/question_pipeline.py +417 -0
- lightningrod/_generated/models/question_renderer.py +123 -0
- lightningrod/_generated/models/rag_context.py +82 -0
- lightningrod/_generated/models/response_message.py +69 -0
- lightningrod/_generated/models/rollout.py +130 -0
- lightningrod/_generated/models/rollout_generator.py +139 -0
- lightningrod/_generated/models/rollout_parsed_output_type_0.py +46 -0
- lightningrod/_generated/models/sample.py +323 -0
- lightningrod/_generated/models/sample_meta.py +46 -0
- lightningrod/_generated/models/seed.py +135 -0
- lightningrod/_generated/models/step_cost_breakdown.py +109 -0
- lightningrod/_generated/models/transform_job.py +268 -0
- lightningrod/_generated/models/transform_job_status.py +11 -0
- lightningrod/_generated/models/transform_step_metrics_response.py +131 -0
- lightningrod/_generated/models/transform_type.py +25 -0
- lightningrod/_generated/models/upload_samples_request.py +75 -0
- lightningrod/_generated/models/upload_samples_response.py +69 -0
- lightningrod/_generated/models/usage.py +77 -0
- lightningrod/_generated/models/usage_summary.py +102 -0
- lightningrod/_generated/models/usage_summary_events.py +59 -0
- lightningrod/_generated/models/usage_summary_llm_by_model.py +59 -0
- lightningrod/_generated/models/validate_sample_response.py +69 -0
- lightningrod/_generated/models/validation_error.py +90 -0
- lightningrod/_generated/models/web_search_labeler.py +120 -0
- lightningrod/_generated/py.typed +1 -0
- lightningrod/_generated/types.py +54 -0
- lightningrod/client.py +48 -0
- lightningrod/datasets/__init__.py +4 -0
- lightningrod/datasets/client.py +174 -0
- lightningrod/datasets/dataset.py +255 -0
- lightningrod/files/__init__.py +0 -0
- lightningrod/files/client.py +58 -0
- lightningrod/filesets/__init__.py +0 -0
- lightningrod/filesets/client.py +106 -0
- lightningrod/organization/__init__.py +0 -0
- lightningrod/organization/client.py +17 -0
- lightningrod/py.typed +0 -0
- lightningrod/transforms/__init__.py +0 -0
- lightningrod/transforms/client.py +154 -0
- lightningrod_ai-0.1.6.dist-info/METADATA +122 -0
- lightningrod_ai-0.1.6.dist-info/RECORD +123 -0
- lightningrod_ai-0.1.6.dist-info/WHEEL +5 -0
- lightningrod_ai-0.1.6.dist-info/licenses/LICENSE +23 -0
- lightningrod_ai-0.1.6.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
from typing import TYPE_CHECKING, Any, TypeVar, cast
|
|
5
|
+
|
|
6
|
+
from attrs import define as _attrs_define
|
|
7
|
+
from attrs import field as _attrs_field
|
|
8
|
+
|
|
9
|
+
from ..types import UNSET, Unset
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from ..models.chat_message import ChatMessage
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
T = TypeVar("T", bound="ChatCompletionRequest")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@_attrs_define
|
|
19
|
+
class ChatCompletionRequest:
|
|
20
|
+
"""
|
|
21
|
+
Attributes:
|
|
22
|
+
model (str): ID of the model to use
|
|
23
|
+
messages (list[ChatMessage]): A list of messages comprising the conversation so far
|
|
24
|
+
temperature (float | None | Unset): Sampling temperature between 0 and 2
|
|
25
|
+
max_tokens (int | None | Unset): Maximum number of tokens to generate
|
|
26
|
+
top_p (float | None | Unset): Nucleus sampling parameter
|
|
27
|
+
stream (bool | None | Unset): Whether to stream back partial progress Default: False.
|
|
28
|
+
n (int | None | Unset): Number of chat completion choices to generate Default: 1.
|
|
29
|
+
stop (list[str] | None | str | Unset): Up to 4 sequences where the API will stop generating
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
model: str
|
|
33
|
+
messages: list[ChatMessage]
|
|
34
|
+
temperature: float | None | Unset = UNSET
|
|
35
|
+
max_tokens: int | None | Unset = UNSET
|
|
36
|
+
top_p: float | None | Unset = UNSET
|
|
37
|
+
stream: bool | None | Unset = False
|
|
38
|
+
n: int | None | Unset = 1
|
|
39
|
+
stop: list[str] | None | str | Unset = UNSET
|
|
40
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
41
|
+
|
|
42
|
+
def to_dict(self) -> dict[str, Any]:
|
|
43
|
+
model = self.model
|
|
44
|
+
|
|
45
|
+
messages = []
|
|
46
|
+
for messages_item_data in self.messages:
|
|
47
|
+
messages_item = messages_item_data.to_dict()
|
|
48
|
+
messages.append(messages_item)
|
|
49
|
+
|
|
50
|
+
temperature: float | None | Unset
|
|
51
|
+
if isinstance(self.temperature, Unset):
|
|
52
|
+
temperature = UNSET
|
|
53
|
+
else:
|
|
54
|
+
temperature = self.temperature
|
|
55
|
+
|
|
56
|
+
max_tokens: int | None | Unset
|
|
57
|
+
if isinstance(self.max_tokens, Unset):
|
|
58
|
+
max_tokens = UNSET
|
|
59
|
+
else:
|
|
60
|
+
max_tokens = self.max_tokens
|
|
61
|
+
|
|
62
|
+
top_p: float | None | Unset
|
|
63
|
+
if isinstance(self.top_p, Unset):
|
|
64
|
+
top_p = UNSET
|
|
65
|
+
else:
|
|
66
|
+
top_p = self.top_p
|
|
67
|
+
|
|
68
|
+
stream: bool | None | Unset
|
|
69
|
+
if isinstance(self.stream, Unset):
|
|
70
|
+
stream = UNSET
|
|
71
|
+
else:
|
|
72
|
+
stream = self.stream
|
|
73
|
+
|
|
74
|
+
n: int | None | Unset
|
|
75
|
+
if isinstance(self.n, Unset):
|
|
76
|
+
n = UNSET
|
|
77
|
+
else:
|
|
78
|
+
n = self.n
|
|
79
|
+
|
|
80
|
+
stop: list[str] | None | str | Unset
|
|
81
|
+
if isinstance(self.stop, Unset):
|
|
82
|
+
stop = UNSET
|
|
83
|
+
elif isinstance(self.stop, list):
|
|
84
|
+
stop = self.stop
|
|
85
|
+
|
|
86
|
+
else:
|
|
87
|
+
stop = self.stop
|
|
88
|
+
|
|
89
|
+
field_dict: dict[str, Any] = {}
|
|
90
|
+
field_dict.update(self.additional_properties)
|
|
91
|
+
field_dict.update(
|
|
92
|
+
{
|
|
93
|
+
"model": model,
|
|
94
|
+
"messages": messages,
|
|
95
|
+
}
|
|
96
|
+
)
|
|
97
|
+
if temperature is not UNSET:
|
|
98
|
+
field_dict["temperature"] = temperature
|
|
99
|
+
if max_tokens is not UNSET:
|
|
100
|
+
field_dict["max_tokens"] = max_tokens
|
|
101
|
+
if top_p is not UNSET:
|
|
102
|
+
field_dict["top_p"] = top_p
|
|
103
|
+
if stream is not UNSET:
|
|
104
|
+
field_dict["stream"] = stream
|
|
105
|
+
if n is not UNSET:
|
|
106
|
+
field_dict["n"] = n
|
|
107
|
+
if stop is not UNSET:
|
|
108
|
+
field_dict["stop"] = stop
|
|
109
|
+
|
|
110
|
+
return field_dict
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
114
|
+
from ..models.chat_message import ChatMessage
|
|
115
|
+
|
|
116
|
+
d = dict(src_dict)
|
|
117
|
+
model = d.pop("model")
|
|
118
|
+
|
|
119
|
+
messages = []
|
|
120
|
+
_messages = d.pop("messages")
|
|
121
|
+
for messages_item_data in _messages:
|
|
122
|
+
messages_item = ChatMessage.from_dict(messages_item_data)
|
|
123
|
+
|
|
124
|
+
messages.append(messages_item)
|
|
125
|
+
|
|
126
|
+
def _parse_temperature(data: object) -> float | None | Unset:
|
|
127
|
+
if data is None:
|
|
128
|
+
return data
|
|
129
|
+
if isinstance(data, Unset):
|
|
130
|
+
return data
|
|
131
|
+
return cast(float | None | Unset, data)
|
|
132
|
+
|
|
133
|
+
temperature = _parse_temperature(d.pop("temperature", UNSET))
|
|
134
|
+
|
|
135
|
+
def _parse_max_tokens(data: object) -> int | None | Unset:
|
|
136
|
+
if data is None:
|
|
137
|
+
return data
|
|
138
|
+
if isinstance(data, Unset):
|
|
139
|
+
return data
|
|
140
|
+
return cast(int | None | Unset, data)
|
|
141
|
+
|
|
142
|
+
max_tokens = _parse_max_tokens(d.pop("max_tokens", UNSET))
|
|
143
|
+
|
|
144
|
+
def _parse_top_p(data: object) -> float | None | Unset:
|
|
145
|
+
if data is None:
|
|
146
|
+
return data
|
|
147
|
+
if isinstance(data, Unset):
|
|
148
|
+
return data
|
|
149
|
+
return cast(float | None | Unset, data)
|
|
150
|
+
|
|
151
|
+
top_p = _parse_top_p(d.pop("top_p", UNSET))
|
|
152
|
+
|
|
153
|
+
def _parse_stream(data: object) -> bool | None | Unset:
|
|
154
|
+
if data is None:
|
|
155
|
+
return data
|
|
156
|
+
if isinstance(data, Unset):
|
|
157
|
+
return data
|
|
158
|
+
return cast(bool | None | Unset, data)
|
|
159
|
+
|
|
160
|
+
stream = _parse_stream(d.pop("stream", UNSET))
|
|
161
|
+
|
|
162
|
+
def _parse_n(data: object) -> int | None | Unset:
|
|
163
|
+
if data is None:
|
|
164
|
+
return data
|
|
165
|
+
if isinstance(data, Unset):
|
|
166
|
+
return data
|
|
167
|
+
return cast(int | None | Unset, data)
|
|
168
|
+
|
|
169
|
+
n = _parse_n(d.pop("n", UNSET))
|
|
170
|
+
|
|
171
|
+
def _parse_stop(data: object) -> list[str] | None | str | Unset:
|
|
172
|
+
if data is None:
|
|
173
|
+
return data
|
|
174
|
+
if isinstance(data, Unset):
|
|
175
|
+
return data
|
|
176
|
+
try:
|
|
177
|
+
if not isinstance(data, list):
|
|
178
|
+
raise TypeError()
|
|
179
|
+
stop_type_1 = cast(list[str], data)
|
|
180
|
+
|
|
181
|
+
return stop_type_1
|
|
182
|
+
except (TypeError, ValueError, AttributeError, KeyError):
|
|
183
|
+
pass
|
|
184
|
+
return cast(list[str] | None | str | Unset, data)
|
|
185
|
+
|
|
186
|
+
stop = _parse_stop(d.pop("stop", UNSET))
|
|
187
|
+
|
|
188
|
+
chat_completion_request = cls(
|
|
189
|
+
model=model,
|
|
190
|
+
messages=messages,
|
|
191
|
+
temperature=temperature,
|
|
192
|
+
max_tokens=max_tokens,
|
|
193
|
+
top_p=top_p,
|
|
194
|
+
stream=stream,
|
|
195
|
+
n=n,
|
|
196
|
+
stop=stop,
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
chat_completion_request.additional_properties = d
|
|
200
|
+
return chat_completion_request
|
|
201
|
+
|
|
202
|
+
@property
|
|
203
|
+
def additional_keys(self) -> list[str]:
|
|
204
|
+
return list(self.additional_properties.keys())
|
|
205
|
+
|
|
206
|
+
def __getitem__(self, key: str) -> Any:
|
|
207
|
+
return self.additional_properties[key]
|
|
208
|
+
|
|
209
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
210
|
+
self.additional_properties[key] = value
|
|
211
|
+
|
|
212
|
+
def __delitem__(self, key: str) -> None:
|
|
213
|
+
del self.additional_properties[key]
|
|
214
|
+
|
|
215
|
+
def __contains__(self, key: str) -> bool:
|
|
216
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Literal, TypeVar, cast
|
|
5
|
+
|
|
6
|
+
from attrs import define as _attrs_define
|
|
7
|
+
from attrs import field as _attrs_field
|
|
8
|
+
|
|
9
|
+
from ..types import UNSET, Unset
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from ..models.choice import Choice
|
|
13
|
+
from ..models.usage import Usage
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
T = TypeVar("T", bound="ChatCompletionResponse")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@_attrs_define
|
|
20
|
+
class ChatCompletionResponse:
|
|
21
|
+
"""
|
|
22
|
+
Attributes:
|
|
23
|
+
id (str): A unique identifier for the chat completion
|
|
24
|
+
created (int): Unix timestamp of when the completion was created
|
|
25
|
+
model (str): The model used for the chat completion
|
|
26
|
+
choices (list[Choice]): A list of chat completion choices
|
|
27
|
+
object_ (Literal['chat.completion'] | Unset): The object type Default: 'chat.completion'.
|
|
28
|
+
usage (None | Unset | Usage): Usage statistics for the completion request
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
id: str
|
|
32
|
+
created: int
|
|
33
|
+
model: str
|
|
34
|
+
choices: list[Choice]
|
|
35
|
+
object_: Literal["chat.completion"] | Unset = "chat.completion"
|
|
36
|
+
usage: None | Unset | Usage = UNSET
|
|
37
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
38
|
+
|
|
39
|
+
def to_dict(self) -> dict[str, Any]:
|
|
40
|
+
from ..models.usage import Usage
|
|
41
|
+
|
|
42
|
+
id = self.id
|
|
43
|
+
|
|
44
|
+
created = self.created
|
|
45
|
+
|
|
46
|
+
model = self.model
|
|
47
|
+
|
|
48
|
+
choices = []
|
|
49
|
+
for choices_item_data in self.choices:
|
|
50
|
+
choices_item = choices_item_data.to_dict()
|
|
51
|
+
choices.append(choices_item)
|
|
52
|
+
|
|
53
|
+
object_ = self.object_
|
|
54
|
+
|
|
55
|
+
usage: dict[str, Any] | None | Unset
|
|
56
|
+
if isinstance(self.usage, Unset):
|
|
57
|
+
usage = UNSET
|
|
58
|
+
elif isinstance(self.usage, Usage):
|
|
59
|
+
usage = self.usage.to_dict()
|
|
60
|
+
else:
|
|
61
|
+
usage = self.usage
|
|
62
|
+
|
|
63
|
+
field_dict: dict[str, Any] = {}
|
|
64
|
+
field_dict.update(self.additional_properties)
|
|
65
|
+
field_dict.update(
|
|
66
|
+
{
|
|
67
|
+
"id": id,
|
|
68
|
+
"created": created,
|
|
69
|
+
"model": model,
|
|
70
|
+
"choices": choices,
|
|
71
|
+
}
|
|
72
|
+
)
|
|
73
|
+
if object_ is not UNSET:
|
|
74
|
+
field_dict["object"] = object_
|
|
75
|
+
if usage is not UNSET:
|
|
76
|
+
field_dict["usage"] = usage
|
|
77
|
+
|
|
78
|
+
return field_dict
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
82
|
+
from ..models.choice import Choice
|
|
83
|
+
from ..models.usage import Usage
|
|
84
|
+
|
|
85
|
+
d = dict(src_dict)
|
|
86
|
+
id = d.pop("id")
|
|
87
|
+
|
|
88
|
+
created = d.pop("created")
|
|
89
|
+
|
|
90
|
+
model = d.pop("model")
|
|
91
|
+
|
|
92
|
+
choices = []
|
|
93
|
+
_choices = d.pop("choices")
|
|
94
|
+
for choices_item_data in _choices:
|
|
95
|
+
choices_item = Choice.from_dict(choices_item_data)
|
|
96
|
+
|
|
97
|
+
choices.append(choices_item)
|
|
98
|
+
|
|
99
|
+
object_ = cast(Literal["chat.completion"] | Unset, d.pop("object", UNSET))
|
|
100
|
+
if object_ != "chat.completion" and not isinstance(object_, Unset):
|
|
101
|
+
raise ValueError(f"object must match const 'chat.completion', got '{object_}'")
|
|
102
|
+
|
|
103
|
+
def _parse_usage(data: object) -> None | Unset | Usage:
|
|
104
|
+
if data is None:
|
|
105
|
+
return data
|
|
106
|
+
if isinstance(data, Unset):
|
|
107
|
+
return data
|
|
108
|
+
try:
|
|
109
|
+
if not isinstance(data, dict):
|
|
110
|
+
raise TypeError()
|
|
111
|
+
usage_type_0 = Usage.from_dict(data)
|
|
112
|
+
|
|
113
|
+
return usage_type_0
|
|
114
|
+
except (TypeError, ValueError, AttributeError, KeyError):
|
|
115
|
+
pass
|
|
116
|
+
return cast(None | Unset | Usage, data)
|
|
117
|
+
|
|
118
|
+
usage = _parse_usage(d.pop("usage", UNSET))
|
|
119
|
+
|
|
120
|
+
chat_completion_response = cls(
|
|
121
|
+
id=id,
|
|
122
|
+
created=created,
|
|
123
|
+
model=model,
|
|
124
|
+
choices=choices,
|
|
125
|
+
object_=object_,
|
|
126
|
+
usage=usage,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
chat_completion_response.additional_properties = d
|
|
130
|
+
return chat_completion_response
|
|
131
|
+
|
|
132
|
+
@property
|
|
133
|
+
def additional_keys(self) -> list[str]:
|
|
134
|
+
return list(self.additional_properties.keys())
|
|
135
|
+
|
|
136
|
+
def __getitem__(self, key: str) -> Any:
|
|
137
|
+
return self.additional_properties[key]
|
|
138
|
+
|
|
139
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
140
|
+
self.additional_properties[key] = value
|
|
141
|
+
|
|
142
|
+
def __delitem__(self, key: str) -> None:
|
|
143
|
+
del self.additional_properties[key]
|
|
144
|
+
|
|
145
|
+
def __contains__(self, key: str) -> bool:
|
|
146
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
from typing import Any, TypeVar
|
|
5
|
+
|
|
6
|
+
from attrs import define as _attrs_define
|
|
7
|
+
from attrs import field as _attrs_field
|
|
8
|
+
|
|
9
|
+
T = TypeVar("T", bound="ChatMessage")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@_attrs_define
|
|
13
|
+
class ChatMessage:
|
|
14
|
+
"""
|
|
15
|
+
Attributes:
|
|
16
|
+
role (str): The role of the message author (system, user, or assistant)
|
|
17
|
+
content (str): The content of the message
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
role: str
|
|
21
|
+
content: str
|
|
22
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
23
|
+
|
|
24
|
+
def to_dict(self) -> dict[str, Any]:
|
|
25
|
+
role = self.role
|
|
26
|
+
|
|
27
|
+
content = self.content
|
|
28
|
+
|
|
29
|
+
field_dict: dict[str, Any] = {}
|
|
30
|
+
field_dict.update(self.additional_properties)
|
|
31
|
+
field_dict.update(
|
|
32
|
+
{
|
|
33
|
+
"role": role,
|
|
34
|
+
"content": content,
|
|
35
|
+
}
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
return field_dict
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
42
|
+
d = dict(src_dict)
|
|
43
|
+
role = d.pop("role")
|
|
44
|
+
|
|
45
|
+
content = d.pop("content")
|
|
46
|
+
|
|
47
|
+
chat_message = cls(
|
|
48
|
+
role=role,
|
|
49
|
+
content=content,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
chat_message.additional_properties = d
|
|
53
|
+
return chat_message
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def additional_keys(self) -> list[str]:
|
|
57
|
+
return list(self.additional_properties.keys())
|
|
58
|
+
|
|
59
|
+
def __getitem__(self, key: str) -> Any:
|
|
60
|
+
return self.additional_properties[key]
|
|
61
|
+
|
|
62
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
63
|
+
self.additional_properties[key] = value
|
|
64
|
+
|
|
65
|
+
def __delitem__(self, key: str) -> None:
|
|
66
|
+
del self.additional_properties[key]
|
|
67
|
+
|
|
68
|
+
def __contains__(self, key: str) -> bool:
|
|
69
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
from typing import TYPE_CHECKING, Any, TypeVar, cast
|
|
5
|
+
|
|
6
|
+
from attrs import define as _attrs_define
|
|
7
|
+
from attrs import field as _attrs_field
|
|
8
|
+
|
|
9
|
+
from ..types import UNSET, Unset
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from ..models.response_message import ResponseMessage
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
T = TypeVar("T", bound="Choice")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@_attrs_define
|
|
19
|
+
class Choice:
|
|
20
|
+
"""
|
|
21
|
+
Attributes:
|
|
22
|
+
index (int): The index of this choice
|
|
23
|
+
message (ResponseMessage):
|
|
24
|
+
finish_reason (None | str | Unset): The reason the model stopped generating tokens
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
index: int
|
|
28
|
+
message: ResponseMessage
|
|
29
|
+
finish_reason: None | str | Unset = UNSET
|
|
30
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
31
|
+
|
|
32
|
+
def to_dict(self) -> dict[str, Any]:
|
|
33
|
+
index = self.index
|
|
34
|
+
|
|
35
|
+
message = self.message.to_dict()
|
|
36
|
+
|
|
37
|
+
finish_reason: None | str | Unset
|
|
38
|
+
if isinstance(self.finish_reason, Unset):
|
|
39
|
+
finish_reason = UNSET
|
|
40
|
+
else:
|
|
41
|
+
finish_reason = self.finish_reason
|
|
42
|
+
|
|
43
|
+
field_dict: dict[str, Any] = {}
|
|
44
|
+
field_dict.update(self.additional_properties)
|
|
45
|
+
field_dict.update(
|
|
46
|
+
{
|
|
47
|
+
"index": index,
|
|
48
|
+
"message": message,
|
|
49
|
+
}
|
|
50
|
+
)
|
|
51
|
+
if finish_reason is not UNSET:
|
|
52
|
+
field_dict["finish_reason"] = finish_reason
|
|
53
|
+
|
|
54
|
+
return field_dict
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
58
|
+
from ..models.response_message import ResponseMessage
|
|
59
|
+
|
|
60
|
+
d = dict(src_dict)
|
|
61
|
+
index = d.pop("index")
|
|
62
|
+
|
|
63
|
+
message = ResponseMessage.from_dict(d.pop("message"))
|
|
64
|
+
|
|
65
|
+
def _parse_finish_reason(data: object) -> None | str | Unset:
|
|
66
|
+
if data is None:
|
|
67
|
+
return data
|
|
68
|
+
if isinstance(data, Unset):
|
|
69
|
+
return data
|
|
70
|
+
return cast(None | str | Unset, data)
|
|
71
|
+
|
|
72
|
+
finish_reason = _parse_finish_reason(d.pop("finish_reason", UNSET))
|
|
73
|
+
|
|
74
|
+
choice = cls(
|
|
75
|
+
index=index,
|
|
76
|
+
message=message,
|
|
77
|
+
finish_reason=finish_reason,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
choice.additional_properties = d
|
|
81
|
+
return choice
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def additional_keys(self) -> list[str]:
|
|
85
|
+
return list(self.additional_properties.keys())
|
|
86
|
+
|
|
87
|
+
def __getitem__(self, key: str) -> Any:
|
|
88
|
+
return self.additional_properties[key]
|
|
89
|
+
|
|
90
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
91
|
+
self.additional_properties[key] = value
|
|
92
|
+
|
|
93
|
+
def __delitem__(self, key: str) -> None:
|
|
94
|
+
del self.additional_properties[key]
|
|
95
|
+
|
|
96
|
+
def __contains__(self, key: str) -> bool:
|
|
97
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
from typing import Any, TypeVar
|
|
5
|
+
|
|
6
|
+
from attrs import define as _attrs_define
|
|
7
|
+
from attrs import field as _attrs_field
|
|
8
|
+
|
|
9
|
+
T = TypeVar("T", bound="CreateDatasetResponse")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@_attrs_define
|
|
13
|
+
class CreateDatasetResponse:
|
|
14
|
+
"""
|
|
15
|
+
Attributes:
|
|
16
|
+
id (str):
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
id: str
|
|
20
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
21
|
+
|
|
22
|
+
def to_dict(self) -> dict[str, Any]:
|
|
23
|
+
id = self.id
|
|
24
|
+
|
|
25
|
+
field_dict: dict[str, Any] = {}
|
|
26
|
+
field_dict.update(self.additional_properties)
|
|
27
|
+
field_dict.update(
|
|
28
|
+
{
|
|
29
|
+
"id": id,
|
|
30
|
+
}
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
return field_dict
|
|
34
|
+
|
|
35
|
+
@classmethod
|
|
36
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
37
|
+
d = dict(src_dict)
|
|
38
|
+
id = d.pop("id")
|
|
39
|
+
|
|
40
|
+
create_dataset_response = cls(
|
|
41
|
+
id=id,
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
create_dataset_response.additional_properties = d
|
|
45
|
+
return create_dataset_response
|
|
46
|
+
|
|
47
|
+
@property
|
|
48
|
+
def additional_keys(self) -> list[str]:
|
|
49
|
+
return list(self.additional_properties.keys())
|
|
50
|
+
|
|
51
|
+
def __getitem__(self, key: str) -> Any:
|
|
52
|
+
return self.additional_properties[key]
|
|
53
|
+
|
|
54
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
55
|
+
self.additional_properties[key] = value
|
|
56
|
+
|
|
57
|
+
def __delitem__(self, key: str) -> None:
|
|
58
|
+
del self.additional_properties[key]
|
|
59
|
+
|
|
60
|
+
def __contains__(self, key: str) -> bool:
|
|
61
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
from typing import TYPE_CHECKING, Any, TypeVar, cast
|
|
5
|
+
|
|
6
|
+
from attrs import define as _attrs_define
|
|
7
|
+
from attrs import field as _attrs_field
|
|
8
|
+
|
|
9
|
+
from ..types import UNSET, Unset
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from ..models.create_file_set_file_request_metadata_type_0 import CreateFileSetFileRequestMetadataType0
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
T = TypeVar("T", bound="CreateFileSetFileRequest")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@_attrs_define
|
|
19
|
+
class CreateFileSetFileRequest:
|
|
20
|
+
"""
|
|
21
|
+
Attributes:
|
|
22
|
+
file_id (str): ID of the file
|
|
23
|
+
metadata (CreateFileSetFileRequestMetadataType0 | None | Unset): Optional file-level metadata
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
file_id: str
|
|
27
|
+
metadata: CreateFileSetFileRequestMetadataType0 | None | Unset = UNSET
|
|
28
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
29
|
+
|
|
30
|
+
def to_dict(self) -> dict[str, Any]:
|
|
31
|
+
from ..models.create_file_set_file_request_metadata_type_0 import CreateFileSetFileRequestMetadataType0
|
|
32
|
+
|
|
33
|
+
file_id = self.file_id
|
|
34
|
+
|
|
35
|
+
metadata: dict[str, Any] | None | Unset
|
|
36
|
+
if isinstance(self.metadata, Unset):
|
|
37
|
+
metadata = UNSET
|
|
38
|
+
elif isinstance(self.metadata, CreateFileSetFileRequestMetadataType0):
|
|
39
|
+
metadata = self.metadata.to_dict()
|
|
40
|
+
else:
|
|
41
|
+
metadata = self.metadata
|
|
42
|
+
|
|
43
|
+
field_dict: dict[str, Any] = {}
|
|
44
|
+
field_dict.update(self.additional_properties)
|
|
45
|
+
field_dict.update(
|
|
46
|
+
{
|
|
47
|
+
"file_id": file_id,
|
|
48
|
+
}
|
|
49
|
+
)
|
|
50
|
+
if metadata is not UNSET:
|
|
51
|
+
field_dict["metadata"] = metadata
|
|
52
|
+
|
|
53
|
+
return field_dict
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
57
|
+
from ..models.create_file_set_file_request_metadata_type_0 import CreateFileSetFileRequestMetadataType0
|
|
58
|
+
|
|
59
|
+
d = dict(src_dict)
|
|
60
|
+
file_id = d.pop("file_id")
|
|
61
|
+
|
|
62
|
+
def _parse_metadata(data: object) -> CreateFileSetFileRequestMetadataType0 | None | Unset:
|
|
63
|
+
if data is None:
|
|
64
|
+
return data
|
|
65
|
+
if isinstance(data, Unset):
|
|
66
|
+
return data
|
|
67
|
+
try:
|
|
68
|
+
if not isinstance(data, dict):
|
|
69
|
+
raise TypeError()
|
|
70
|
+
metadata_type_0 = CreateFileSetFileRequestMetadataType0.from_dict(data)
|
|
71
|
+
|
|
72
|
+
return metadata_type_0
|
|
73
|
+
except (TypeError, ValueError, AttributeError, KeyError):
|
|
74
|
+
pass
|
|
75
|
+
return cast(CreateFileSetFileRequestMetadataType0 | None | Unset, data)
|
|
76
|
+
|
|
77
|
+
metadata = _parse_metadata(d.pop("metadata", UNSET))
|
|
78
|
+
|
|
79
|
+
create_file_set_file_request = cls(
|
|
80
|
+
file_id=file_id,
|
|
81
|
+
metadata=metadata,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
create_file_set_file_request.additional_properties = d
|
|
85
|
+
return create_file_set_file_request
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def additional_keys(self) -> list[str]:
|
|
89
|
+
return list(self.additional_properties.keys())
|
|
90
|
+
|
|
91
|
+
def __getitem__(self, key: str) -> Any:
|
|
92
|
+
return self.additional_properties[key]
|
|
93
|
+
|
|
94
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
95
|
+
self.additional_properties[key] = value
|
|
96
|
+
|
|
97
|
+
def __delitem__(self, key: str) -> None:
|
|
98
|
+
del self.additional_properties[key]
|
|
99
|
+
|
|
100
|
+
def __contains__(self, key: str) -> bool:
|
|
101
|
+
return key in self.additional_properties
|