retab 0.0.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. retab-0.0.35.dist-info/METADATA +417 -0
  2. retab-0.0.35.dist-info/RECORD +111 -0
  3. retab-0.0.35.dist-info/WHEEL +5 -0
  4. retab-0.0.35.dist-info/top_level.txt +1 -0
  5. uiform/__init__.py +4 -0
  6. uiform/_resource.py +28 -0
  7. uiform/_utils/__init__.py +0 -0
  8. uiform/_utils/ai_models.py +100 -0
  9. uiform/_utils/benchmarking copy.py +588 -0
  10. uiform/_utils/benchmarking.py +485 -0
  11. uiform/_utils/chat.py +332 -0
  12. uiform/_utils/display.py +443 -0
  13. uiform/_utils/json_schema.py +2161 -0
  14. uiform/_utils/mime.py +168 -0
  15. uiform/_utils/responses.py +163 -0
  16. uiform/_utils/stream_context_managers.py +52 -0
  17. uiform/_utils/usage/__init__.py +0 -0
  18. uiform/_utils/usage/usage.py +300 -0
  19. uiform/client.py +701 -0
  20. uiform/py.typed +0 -0
  21. uiform/resources/__init__.py +0 -0
  22. uiform/resources/consensus/__init__.py +3 -0
  23. uiform/resources/consensus/client.py +114 -0
  24. uiform/resources/consensus/completions.py +252 -0
  25. uiform/resources/consensus/completions_stream.py +278 -0
  26. uiform/resources/consensus/responses.py +325 -0
  27. uiform/resources/consensus/responses_stream.py +373 -0
  28. uiform/resources/deployments/__init__.py +9 -0
  29. uiform/resources/deployments/client.py +78 -0
  30. uiform/resources/deployments/endpoints.py +322 -0
  31. uiform/resources/deployments/links.py +452 -0
  32. uiform/resources/deployments/logs.py +211 -0
  33. uiform/resources/deployments/mailboxes.py +496 -0
  34. uiform/resources/deployments/outlook.py +531 -0
  35. uiform/resources/deployments/tests.py +158 -0
  36. uiform/resources/documents/__init__.py +3 -0
  37. uiform/resources/documents/client.py +255 -0
  38. uiform/resources/documents/extractions.py +441 -0
  39. uiform/resources/evals.py +812 -0
  40. uiform/resources/files.py +24 -0
  41. uiform/resources/finetuning.py +62 -0
  42. uiform/resources/jsonlUtils.py +1046 -0
  43. uiform/resources/models.py +45 -0
  44. uiform/resources/openai_example.py +22 -0
  45. uiform/resources/processors/__init__.py +3 -0
  46. uiform/resources/processors/automations/__init__.py +9 -0
  47. uiform/resources/processors/automations/client.py +78 -0
  48. uiform/resources/processors/automations/endpoints.py +317 -0
  49. uiform/resources/processors/automations/links.py +356 -0
  50. uiform/resources/processors/automations/logs.py +211 -0
  51. uiform/resources/processors/automations/mailboxes.py +435 -0
  52. uiform/resources/processors/automations/outlook.py +444 -0
  53. uiform/resources/processors/automations/tests.py +158 -0
  54. uiform/resources/processors/client.py +474 -0
  55. uiform/resources/prompt_optimization.py +76 -0
  56. uiform/resources/schemas.py +369 -0
  57. uiform/resources/secrets/__init__.py +9 -0
  58. uiform/resources/secrets/client.py +20 -0
  59. uiform/resources/secrets/external_api_keys.py +109 -0
  60. uiform/resources/secrets/webhook.py +62 -0
  61. uiform/resources/usage.py +271 -0
  62. uiform/types/__init__.py +0 -0
  63. uiform/types/ai_models.py +645 -0
  64. uiform/types/automations/__init__.py +0 -0
  65. uiform/types/automations/cron.py +58 -0
  66. uiform/types/automations/endpoints.py +21 -0
  67. uiform/types/automations/links.py +28 -0
  68. uiform/types/automations/mailboxes.py +60 -0
  69. uiform/types/automations/outlook.py +68 -0
  70. uiform/types/automations/webhooks.py +21 -0
  71. uiform/types/chat.py +8 -0
  72. uiform/types/completions.py +93 -0
  73. uiform/types/consensus.py +10 -0
  74. uiform/types/db/__init__.py +0 -0
  75. uiform/types/db/annotations.py +24 -0
  76. uiform/types/db/files.py +36 -0
  77. uiform/types/deployments/__init__.py +0 -0
  78. uiform/types/deployments/cron.py +59 -0
  79. uiform/types/deployments/endpoints.py +28 -0
  80. uiform/types/deployments/links.py +36 -0
  81. uiform/types/deployments/mailboxes.py +67 -0
  82. uiform/types/deployments/outlook.py +76 -0
  83. uiform/types/deployments/webhooks.py +21 -0
  84. uiform/types/documents/__init__.py +0 -0
  85. uiform/types/documents/correct_orientation.py +13 -0
  86. uiform/types/documents/create_messages.py +226 -0
  87. uiform/types/documents/extractions.py +297 -0
  88. uiform/types/evals.py +207 -0
  89. uiform/types/events.py +76 -0
  90. uiform/types/extractions.py +85 -0
  91. uiform/types/jobs/__init__.py +0 -0
  92. uiform/types/jobs/base.py +150 -0
  93. uiform/types/jobs/batch_annotation.py +22 -0
  94. uiform/types/jobs/evaluation.py +133 -0
  95. uiform/types/jobs/finetune.py +6 -0
  96. uiform/types/jobs/prompt_optimization.py +41 -0
  97. uiform/types/jobs/webcrawl.py +6 -0
  98. uiform/types/logs.py +231 -0
  99. uiform/types/mime.py +257 -0
  100. uiform/types/modalities.py +68 -0
  101. uiform/types/pagination.py +6 -0
  102. uiform/types/schemas/__init__.py +0 -0
  103. uiform/types/schemas/enhance.py +53 -0
  104. uiform/types/schemas/evaluate.py +55 -0
  105. uiform/types/schemas/generate.py +32 -0
  106. uiform/types/schemas/layout.py +58 -0
  107. uiform/types/schemas/object.py +631 -0
  108. uiform/types/schemas/templates.py +107 -0
  109. uiform/types/secrets/__init__.py +0 -0
  110. uiform/types/secrets/external_api_keys.py +22 -0
  111. uiform/types/standards.py +39 -0
uiform/py.typed ADDED
File without changes
File without changes
@@ -0,0 +1,3 @@
1
+ from .client import AsyncConsensus, Consensus
2
+
3
+ __all__ = ["Consensus", "AsyncConsensus"]
@@ -0,0 +1,114 @@
1
+ from typing import Any, Dict, List, Literal, Optional
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+ from ..._resource import AsyncAPIResource, SyncAPIResource
6
+ from ...types.standards import PreparedRequest
7
+ from .completions import AsyncCompletions, Completions
8
+ from .responses import AsyncResponses, Responses
9
+ from ...types.consensus import ReconciliationResponse
10
+
11
+ class BaseConsensusMixin:
12
+
13
+ def _prepare_reconcile(
14
+ self,
15
+ list_dicts: List[Dict[str, Any]],
16
+ reference_schema: Optional[Dict[str, Any]] = None,
17
+ mode: Literal["direct", "aligned"] = "direct",
18
+ idempotency_key: str | None = None,
19
+ ) -> PreparedRequest:
20
+ data = {
21
+ "list_dicts": list_dicts,
22
+ "mode": mode,
23
+ }
24
+
25
+ if reference_schema is not None:
26
+ data["reference_schema"] = reference_schema
27
+
28
+ return PreparedRequest(
29
+ method="POST",
30
+ url="/v1/consensus/reconcile",
31
+ data=data,
32
+ idempotency_key=idempotency_key
33
+ )
34
+
35
+ class Consensus(SyncAPIResource, BaseConsensusMixin):
36
+ """Consensus API wrapper for synchronous operations"""
37
+
38
+ def __init__(self, client: Any) -> None:
39
+ super().__init__(client=client)
40
+ self.completions = Completions(client=client)
41
+ self.responses = Responses(client=client)
42
+
43
+ def reconcile(
44
+ self,
45
+ list_dicts: List[Dict[str, Any]],
46
+ reference_schema: Optional[Dict[str, Any]] = None,
47
+ mode: Literal["direct", "aligned"] = "direct",
48
+ idempotency_key: str | None = None,
49
+ ) -> ReconciliationResponse:
50
+ """
51
+ Reconcile multiple dictionaries to produce a single unified consensus dictionary.
52
+
53
+ Args:
54
+ list_dicts: List of dictionaries to reconcile
55
+ reference_schema: Optional schema to validate dictionaries against
56
+ mode: Mode for consensus computation ("direct" or "aligned")
57
+ idempotency_key: Optional idempotency key for the request
58
+
59
+ Returns:
60
+ Dict containing the consensus dictionary and consensus likelihoods
61
+
62
+ Raises:
63
+ UiformAPIError: If the API request fails
64
+ """
65
+ request = self._prepare_reconcile(
66
+ list_dicts,
67
+ reference_schema,
68
+ mode,
69
+ idempotency_key
70
+ )
71
+ response = self._client._prepared_request(request)
72
+ return ReconciliationResponse.model_validate(response)
73
+
74
+
75
+ class AsyncConsensus(AsyncAPIResource, BaseConsensusMixin):
76
+ """Consensus API wrapper for asynchronous operations"""
77
+
78
+ def __init__(self, client: Any) -> None:
79
+ super().__init__(client=client)
80
+ self.completions = AsyncCompletions(client=client)
81
+ self.responses = AsyncResponses(client=client)
82
+
83
+
84
+ async def reconcile(
85
+ self,
86
+ list_dicts: List[Dict[str, Any]],
87
+ reference_schema: Optional[Dict[str, Any]] = None,
88
+ mode: Literal["direct", "aligned"] = "direct",
89
+ idempotency_key: str | None = None,
90
+ ) -> ReconciliationResponse:
91
+ """
92
+ Reconcile multiple dictionaries to produce a single unified consensus dictionary asynchronously.
93
+
94
+ Args:
95
+ list_dicts: List of dictionaries to reconcile
96
+ reference_schema: Optional schema to validate dictionaries against
97
+ mode: Mode for consensus computation ("direct" or "aligned")
98
+ idempotency_key: Optional idempotency key for the request
99
+
100
+ Returns:
101
+ Dict containing the consensus dictionary and consensus likelihoods
102
+
103
+ Raises:
104
+ UiformAPIError: If the API request fails
105
+ """
106
+ request = self._prepare_reconcile(
107
+ list_dicts,
108
+ reference_schema,
109
+ mode,
110
+ idempotency_key
111
+ )
112
+ response = await self._client._prepared_request(request)
113
+
114
+ return ReconciliationResponse.model_validate(response)
@@ -0,0 +1,252 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import Any, AsyncGenerator, Generator
4
+
5
+ from openai.types.chat.chat_completion_reasoning_effort import ChatCompletionReasoningEffort
6
+ from openai.types.chat.parsed_chat_completion import ParsedChatCompletionMessage
7
+ from openai.types.shared_params.response_format_json_schema import ResponseFormatJSONSchema
8
+ #from openai.lib._parsing import ResponseFormatT
9
+ from pydantic import BaseModel as ResponseFormatT
10
+
11
+
12
+ from ..._resource import AsyncAPIResource, SyncAPIResource
13
+ from ..._utils.ai_models import assert_valid_model_extraction
14
+ from ..._utils.json_schema import load_json_schema, unflatten_dict
15
+ from ..._utils.stream_context_managers import as_async_context_manager, as_context_manager
16
+ from ...types.chat import ChatCompletionUiformMessage
17
+ from ...types.completions import UiChatCompletionsRequest
18
+ from ...types.documents.extractions import UiParsedChatCompletion, UiParsedChatCompletionChunk, UiParsedChoice
19
+ from ...types.standards import PreparedRequest
20
+ from ...types.schemas.object import Schema
21
+
22
+
23
+
24
+ class BaseCompletionsMixin:
25
+
26
+ def prepare_parse(
27
+ self,
28
+ response_format: type[ResponseFormatT],
29
+ messages: list[ChatCompletionUiformMessage],
30
+ model: str,
31
+ temperature: float,
32
+ reasoning_effort: ChatCompletionReasoningEffort,
33
+ stream: bool,
34
+ n_consensus: int,
35
+ idempotency_key: str | None = None,
36
+ ) -> PreparedRequest:
37
+ assert_valid_model_extraction(model)
38
+
39
+ json_schema = response_format.model_json_schema()
40
+
41
+ schema_obj = Schema(json_schema=json_schema)
42
+
43
+ data = {
44
+ "messages": messages,
45
+ "response_format": {
46
+ "type": "json_schema",
47
+ "json_schema": {
48
+ "name": schema_obj.id,
49
+ "schema": schema_obj.inference_json_schema,
50
+ "strict": True,
51
+ },
52
+ },
53
+ "model": model,
54
+ "temperature": temperature,
55
+ "stream": stream,
56
+ "reasoning_effort": reasoning_effort,
57
+ "n_consensus": n_consensus,
58
+ }
59
+
60
+ # Validate DocumentAPIRequest data (raises exception if invalid)
61
+ ui_chat_completions_request = UiChatCompletionsRequest.model_validate(data)
62
+
63
+ return PreparedRequest(method="POST", url="/v1/completions", data=ui_chat_completions_request.model_dump(), idempotency_key=idempotency_key)
64
+
65
+
66
+ def prepare_create(
67
+ self,
68
+ response_format: ResponseFormatJSONSchema,
69
+ messages: list[ChatCompletionUiformMessage],
70
+ model: str,
71
+ temperature: float,
72
+ reasoning_effort: ChatCompletionReasoningEffort,
73
+ stream: bool,
74
+ n_consensus: int,
75
+ idempotency_key: str | None = None,
76
+ ) -> PreparedRequest:
77
+
78
+ json_schema = response_format["json_schema"].get("schema")
79
+
80
+ assert isinstance(json_schema, dict), f"json_schema must be a dictionary, got {type(json_schema)}"
81
+
82
+ schema_obj = Schema(json_schema=json_schema)
83
+
84
+ data = {
85
+ "messages": messages,
86
+ "response_format": {
87
+ "type": "json_schema",
88
+ "json_schema": {
89
+ "name": schema_obj.id,
90
+ "schema": schema_obj.inference_json_schema,
91
+ "strict": True,
92
+ },
93
+ },
94
+ "model": model,
95
+ "temperature": temperature,
96
+ "stream": stream,
97
+ "reasoning_effort": reasoning_effort,
98
+ "n_consensus": n_consensus,
99
+ }
100
+
101
+ # Validate DocumentAPIRequest data (raises exception if invalid)
102
+ ui_chat_completions_request = UiChatCompletionsRequest.model_validate(data)
103
+
104
+ return PreparedRequest(method="POST", url="/v1/completions", data=ui_chat_completions_request.model_dump(), idempotency_key=idempotency_key)
105
+
106
+
107
+ class Completions(SyncAPIResource, BaseCompletionsMixin):
108
+ """Multi-provider Completions API wrapper"""
109
+
110
+ def create(
111
+ self,
112
+ response_format: ResponseFormatJSONSchema,
113
+ messages: list[ChatCompletionUiformMessage],
114
+ model: str = "gpt-4o-2024-08-06",
115
+ temperature: float = 0,
116
+ reasoning_effort: ChatCompletionReasoningEffort = "medium",
117
+ n_consensus: int = 1,
118
+ idempotency_key: str | None = None,
119
+ stream: bool = False,
120
+ ) -> UiParsedChatCompletion:
121
+ """
122
+ Create a completion using the UiForm API.
123
+ """
124
+
125
+ request = self.prepare_create(
126
+ model=model,
127
+ temperature=temperature,
128
+ reasoning_effort=reasoning_effort,
129
+ stream=stream,
130
+ messages=messages,
131
+ response_format=response_format,
132
+ n_consensus=n_consensus,
133
+ idempotency_key=idempotency_key,
134
+ )
135
+
136
+ response = self._client._prepared_request(request)
137
+
138
+ return UiParsedChatCompletion.model_validate(response)
139
+
140
+
141
+ def parse(
142
+ self,
143
+ response_format: type[ResponseFormatT],
144
+ messages: list[ChatCompletionUiformMessage],
145
+ model: str = "gpt-4o-2024-08-06",
146
+ temperature: float = 0,
147
+ reasoning_effort: ChatCompletionReasoningEffort = "medium",
148
+ n_consensus: int = 1,
149
+ idempotency_key: str | None = None,
150
+ ) -> UiParsedChatCompletion:
151
+ """
152
+ Parse messages using the UiForm API to extract structured data according to the provided JSON schema.
153
+
154
+ Args:
155
+ response_format: JSON schema defining the expected data structure
156
+ messages: List of chat messages to parse
157
+ model: The AI model to use for processing
158
+ temperature: Model temperature setting (0-1)
159
+ reasoning_effort: The effort level for the model to reason about the input data
160
+ idempotency_key: Idempotency key for request
161
+ store: Whether to store the data in the UiForm database
162
+
163
+ Returns:
164
+ UiParsedChatCompletion: Parsed response from the API
165
+ """
166
+ request = self.prepare_parse(
167
+ response_format=response_format,
168
+ messages=messages,
169
+ model=model,
170
+ temperature=temperature,
171
+ reasoning_effort=reasoning_effort,
172
+ stream=False,
173
+ n_consensus=n_consensus,
174
+ idempotency_key=idempotency_key,
175
+ )
176
+ response = self._client._prepared_request(request)
177
+
178
+ return UiParsedChatCompletion.model_validate(response)
179
+
180
+
181
+ class AsyncCompletions(AsyncAPIResource, BaseCompletionsMixin):
182
+ """Multi-provider Completions API wrapper for asynchronous usage."""
183
+
184
+ async def create(
185
+ self,
186
+ response_format: ResponseFormatJSONSchema,
187
+ messages: list[ChatCompletionUiformMessage],
188
+ model: str = "gpt-4o-2024-08-06",
189
+ temperature: float = 0,
190
+ reasoning_effort: ChatCompletionReasoningEffort = "medium",
191
+ n_consensus: int = 1,
192
+ idempotency_key: str | None = None,
193
+ stream: bool = False,
194
+ ) -> UiParsedChatCompletion:
195
+ """
196
+ Create a completion using the UiForm API.
197
+ """
198
+
199
+ request = self.prepare_create(
200
+ model=model,
201
+ temperature=temperature,
202
+ reasoning_effort=reasoning_effort,
203
+ stream=stream,
204
+ messages=messages,
205
+ response_format=response_format,
206
+ n_consensus=n_consensus,
207
+ idempotency_key=idempotency_key,
208
+ )
209
+
210
+ response = await self._client._prepared_request(request)
211
+ return UiParsedChatCompletion.model_validate(response)
212
+
213
+
214
+
215
+
216
+ async def parse(
217
+ self,
218
+ response_format: type[ResponseFormatT],
219
+ messages: list[ChatCompletionUiformMessage],
220
+ model: str = "gpt-4o-2024-08-06",
221
+ temperature: float = 0,
222
+ reasoning_effort: ChatCompletionReasoningEffort = "medium",
223
+ n_consensus: int = 1,
224
+ idempotency_key: str | None = None,
225
+ ) -> UiParsedChatCompletion:
226
+ """
227
+ Parse messages using the UiForm API asynchronously.
228
+
229
+ Args:
230
+ json_schema: JSON schema defining the expected data structure
231
+ messages: List of chat messages to parse
232
+ model: The AI model to use
233
+ temperature: Model temperature setting (0-1)
234
+ reasoning_effort: The effort level for the model to reason about the input data
235
+ n_consensus: Number of consensus models to use for extraction
236
+ idempotency_key: Idempotency key for request
237
+
238
+ Returns:
239
+ UiParsedChatCompletion: Parsed response from the API
240
+ """
241
+ request = self.prepare_parse(
242
+ response_format=response_format,
243
+ messages=messages,
244
+ model=model,
245
+ temperature=temperature,
246
+ reasoning_effort=reasoning_effort,
247
+ stream=False,
248
+ n_consensus=n_consensus,
249
+ idempotency_key=idempotency_key,
250
+ )
251
+ response = await self._client._prepared_request(request)
252
+ return UiParsedChatCompletion.model_validate(response)
@@ -0,0 +1,278 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import Any, AsyncGenerator, Generator
4
+
5
+ from openai.types.chat.chat_completion_reasoning_effort import ChatCompletionReasoningEffort
6
+ from openai.types.chat.parsed_chat_completion import ParsedChatCompletionMessage
7
+ from openai.types.shared_params.response_format_json_schema import ResponseFormatJSONSchema
8
+ #from openai.lib._parsing import ResponseFormatT
9
+ from pydantic import BaseModel as ResponseFormatT
10
+
11
+
12
+ from ..._resource import AsyncAPIResource, SyncAPIResource
13
+ from ..._utils.ai_models import assert_valid_model_extraction
14
+ from ..._utils.json_schema import load_json_schema, unflatten_dict
15
+ from ..._utils.stream_context_managers import as_async_context_manager, as_context_manager
16
+ from ...types.chat import ChatCompletionUiformMessage
17
+ from ...types.completions import UiChatCompletionsRequest
18
+ from ...types.documents.extractions import UiParsedChatCompletion, UiParsedChatCompletionChunk, UiParsedChoice
19
+ from ...types.standards import PreparedRequest
20
+ from ...types.schemas.object import Schema
21
+
22
+
23
+
24
+ class BaseCompletionsMixin:
25
+
26
+ def prepare_parse(
27
+ self,
28
+ response_format: type[ResponseFormatT],
29
+ messages: list[ChatCompletionUiformMessage],
30
+ model: str,
31
+ temperature: float,
32
+ reasoning_effort: ChatCompletionReasoningEffort,
33
+ stream: bool,
34
+ n_consensus: int,
35
+ idempotency_key: str | None = None,
36
+ ) -> PreparedRequest:
37
+ assert_valid_model_extraction(model)
38
+
39
+ json_schema = response_format.model_json_schema()
40
+
41
+ schema_obj = Schema(json_schema=json_schema)
42
+
43
+ data = {
44
+ "messages": messages,
45
+ "response_format": {
46
+ "type": "json_schema",
47
+ "json_schema": {
48
+ "name": schema_obj.id,
49
+ "schema": schema_obj.inference_json_schema,
50
+ "strict": True,
51
+ },
52
+ },
53
+ "model": model,
54
+ "temperature": temperature,
55
+ "stream": stream,
56
+ "reasoning_effort": reasoning_effort,
57
+ "n_consensus": n_consensus,
58
+ }
59
+
60
+ # Validate DocumentAPIRequest data (raises exception if invalid)
61
+ ui_chat_completions_request = UiChatCompletionsRequest.model_validate(data)
62
+
63
+ return PreparedRequest(method="POST", url="/v1/completions", data=ui_chat_completions_request.model_dump(), idempotency_key=idempotency_key)
64
+
65
+
66
+ def prepare_create(
67
+ self,
68
+ response_format: ResponseFormatJSONSchema,
69
+ messages: list[ChatCompletionUiformMessage],
70
+ model: str,
71
+ temperature: float,
72
+ reasoning_effort: ChatCompletionReasoningEffort,
73
+ stream: bool,
74
+ n_consensus: int,
75
+ idempotency_key: str | None = None,
76
+ ) -> PreparedRequest:
77
+
78
+ json_schema = response_format["json_schema"].get("schema")
79
+
80
+ assert isinstance(json_schema, dict), f"json_schema must be a dictionary, got {type(json_schema)}"
81
+
82
+ schema_obj = Schema(json_schema=json_schema)
83
+
84
+ data = {
85
+ "messages": messages,
86
+ "response_format": {
87
+ "type": "json_schema",
88
+ "json_schema": {
89
+ "name": schema_obj.id,
90
+ "schema": schema_obj.inference_json_schema,
91
+ "strict": True,
92
+ },
93
+ },
94
+ "model": model,
95
+ "temperature": temperature,
96
+ "stream": stream,
97
+ "reasoning_effort": reasoning_effort,
98
+ "n_consensus": n_consensus,
99
+ }
100
+
101
+ # Validate DocumentAPIRequest data (raises exception if invalid)
102
+ ui_chat_completions_request = UiChatCompletionsRequest.model_validate(data)
103
+
104
+ return PreparedRequest(method="POST", url="/v1/completions", data=ui_chat_completions_request.model_dump(), idempotency_key=idempotency_key)
105
+
106
+
107
+ class Completions(SyncAPIResource, BaseCompletionsMixin):
108
+ """Multi-provider Completions API wrapper"""
109
+
110
+ @as_context_manager
111
+ def stream(
112
+ self,
113
+ response_format: type[ResponseFormatT],
114
+ messages: list[ChatCompletionUiformMessage],
115
+ model: str = "gpt-4o-2024-08-06",
116
+ temperature: float = 0,
117
+ reasoning_effort: ChatCompletionReasoningEffort = "medium",
118
+ n_consensus: int = 1,
119
+ idempotency_key: str | None = None,
120
+ ) -> Generator[UiParsedChatCompletion, None, None]:
121
+ """
122
+ Process messages using the UiForm API with streaming enabled.
123
+
124
+ Args:
125
+ response_format: JSON schema defining the expected data structure
126
+ messages: List of chat messages to parse
127
+ model: The AI model to use for processing
128
+ temperature: Model temperature setting (0-1)
129
+ reasoning_effort: The effort level for the model to reason about the input data
130
+ idempotency_key: Idempotency key for request
131
+
132
+ Returns:
133
+ Generator[UiParsedChatCompletion]: Stream of parsed responses
134
+
135
+ Usage:
136
+ ```python
137
+ with uiform.completions.stream(json_schema, messages, model, temperature, reasoning_effort) as stream:
138
+ for response in stream:
139
+ print(response)
140
+ ```
141
+ """
142
+ request = self.prepare_parse(
143
+ response_format=response_format,
144
+ messages=messages,
145
+ model=model,
146
+ temperature=temperature,
147
+ reasoning_effort=reasoning_effort,
148
+ stream=True,
149
+ n_consensus=n_consensus,
150
+ idempotency_key=idempotency_key,
151
+ )
152
+
153
+ # Request the stream and return a context manager
154
+ ui_parsed_chat_completion_cum_chunk: UiParsedChatCompletionChunk | None = None
155
+ # Initialize the UiParsedChatCompletion object
156
+ ui_parsed_completion: UiParsedChatCompletion = UiParsedChatCompletion(
157
+ id="",
158
+ created=0,
159
+ model="",
160
+ object="chat.completion",
161
+ likelihoods={},
162
+ choices=[
163
+ UiParsedChoice(
164
+ index=0,
165
+ message=ParsedChatCompletionMessage(content="", role="assistant"),
166
+ finish_reason=None,
167
+ logprobs=None,
168
+ )
169
+ ],
170
+ )
171
+ for chunk_json in self._client._prepared_request_stream(request):
172
+ if not chunk_json:
173
+ continue
174
+ ui_parsed_chat_completion_cum_chunk = UiParsedChatCompletionChunk.model_validate(chunk_json).chunk_accumulator(ui_parsed_chat_completion_cum_chunk)
175
+ # Basic stuff
176
+ ui_parsed_completion.id = ui_parsed_chat_completion_cum_chunk.id
177
+ ui_parsed_completion.created = ui_parsed_chat_completion_cum_chunk.created
178
+ ui_parsed_completion.model = ui_parsed_chat_completion_cum_chunk.model
179
+
180
+ # Update the ui_parsed_completion object
181
+ ui_parsed_completion.likelihoods = unflatten_dict(ui_parsed_chat_completion_cum_chunk.choices[0].delta.flat_likelihoods)
182
+ parsed = unflatten_dict(ui_parsed_chat_completion_cum_chunk.choices[0].delta.flat_parsed)
183
+ ui_parsed_completion.choices[0].message.content = json.dumps(parsed)
184
+ ui_parsed_completion.choices[0].message.parsed = parsed
185
+
186
+ yield ui_parsed_completion
187
+
188
+ # change the finish_reason to stop
189
+ ui_parsed_completion.choices[0].finish_reason = "stop"
190
+ yield ui_parsed_completion
191
+
192
+
193
+ class AsyncCompletions(AsyncAPIResource, BaseCompletionsMixin):
194
+ """Multi-provider Completions API wrapper for asynchronous usage."""
195
+
196
+
197
+ @as_async_context_manager
198
+ async def stream(
199
+ self,
200
+ response_format: type[ResponseFormatT],
201
+ messages: list[ChatCompletionUiformMessage],
202
+ model: str = "gpt-4o-2024-08-06",
203
+ temperature: float = 0,
204
+ reasoning_effort: ChatCompletionReasoningEffort = "medium",
205
+ n_consensus: int = 1,
206
+ idempotency_key: str | None = None,
207
+ ) -> AsyncGenerator[UiParsedChatCompletion, None]:
208
+ """
209
+ Parse messages using the UiForm API asynchronously with streaming.
210
+
211
+ Args:
212
+ json_schema: JSON schema defining the expected data structure
213
+ messages: List of chat messages to parse
214
+ model: The AI model to use
215
+ temperature: Model temperature setting (0-1)
216
+ reasoning_effort: The effort level for the model to reason about the input data
217
+ n_consensus: Number of consensus models to use for extraction
218
+ idempotency_key: Idempotency key for request
219
+
220
+ Returns:
221
+ AsyncGenerator[UiParsedChatCompletion]: Stream of parsed responses
222
+
223
+ Usage:
224
+ ```python
225
+ async with uiform.completions.stream(json_schema, messages, model, temperature, reasoning_effort, n_consensus) as stream:
226
+ async for response in stream:
227
+ print(response)
228
+ ```
229
+ """
230
+ request = self.prepare_parse(
231
+ response_format=response_format,
232
+ messages=messages,
233
+ model=model,
234
+ temperature=temperature,
235
+ reasoning_effort=reasoning_effort,
236
+ stream=True,
237
+ n_consensus=n_consensus,
238
+ idempotency_key=idempotency_key,
239
+ )
240
+
241
+ # Request the stream and return a context manager
242
+ ui_parsed_chat_completion_cum_chunk: UiParsedChatCompletionChunk | None = None
243
+ # Initialize the UiParsedChatCompletion object
244
+ ui_parsed_completion: UiParsedChatCompletion = UiParsedChatCompletion(
245
+ id="",
246
+ created=0,
247
+ model="",
248
+ object="chat.completion",
249
+ likelihoods={},
250
+ choices=[
251
+ UiParsedChoice(
252
+ index=0,
253
+ message=ParsedChatCompletionMessage(content="", role="assistant"),
254
+ finish_reason=None,
255
+ logprobs=None,
256
+ )
257
+ ],
258
+ )
259
+ async for chunk_json in self._client._prepared_request_stream(request):
260
+ if not chunk_json:
261
+ continue
262
+ ui_parsed_chat_completion_cum_chunk = UiParsedChatCompletionChunk.model_validate(chunk_json).chunk_accumulator(ui_parsed_chat_completion_cum_chunk)
263
+ # Basic stuff
264
+ ui_parsed_completion.id = ui_parsed_chat_completion_cum_chunk.id
265
+ ui_parsed_completion.created = ui_parsed_chat_completion_cum_chunk.created
266
+ ui_parsed_completion.model = ui_parsed_chat_completion_cum_chunk.model
267
+
268
+ # Update the ui_parsed_completion object
269
+ ui_parsed_completion.likelihoods = unflatten_dict(ui_parsed_chat_completion_cum_chunk.choices[0].delta.flat_likelihoods)
270
+ parsed = unflatten_dict(ui_parsed_chat_completion_cum_chunk.choices[0].delta.flat_parsed)
271
+ ui_parsed_completion.choices[0].message.content = json.dumps(parsed)
272
+ ui_parsed_completion.choices[0].message.parsed = parsed
273
+
274
+ yield ui_parsed_completion
275
+
276
+ # change the finish_reason to stop
277
+ ui_parsed_completion.choices[0].finish_reason = "stop"
278
+ yield ui_parsed_completion