unique_sdk 0.10.18__py3-none-any.whl → 0.10.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,29 @@
1
1
  from typing import (
2
+ TYPE_CHECKING,
2
3
  Any,
3
4
  ClassVar,
4
- Dict,
5
5
  List,
6
6
  Literal,
7
+ NotRequired,
7
8
  Optional,
8
- TypedDict,
9
+ Union,
10
+ Unpack,
9
11
  cast,
10
12
  )
11
13
 
12
- from typing_extensions import NotRequired, Unpack
14
+ from typing_extensions import TypedDict
15
+
16
+ # Avoid introducing a dependency on the openai sdk as it's only used for type hints
17
+ if TYPE_CHECKING:
18
+ from openai.types.responses import (
19
+ ResponseIncludable,
20
+ ResponseInputParam,
21
+ ResponseOutputItem,
22
+ ResponseTextConfigParam,
23
+ ToolParam,
24
+ response_create_params,
25
+ )
26
+ from openai.types.shared_params import Metadata, Reasoning
13
27
 
14
28
  from unique_sdk._api_resource import APIResource
15
29
  from unique_sdk._request_options import RequestOptions
@@ -35,63 +49,49 @@ class Integrated(APIResource["Integrated"]):
35
49
  content: str
36
50
  name: Optional[str]
37
51
 
38
- class CreateStream(RequestOptions):
39
- model: NotRequired[
40
- Literal[
41
- "AZURE_GPT_4_0613",
42
- "AZURE_GPT_4_32K_0613",
43
- ]
44
- ]
45
- timeout: NotRequired["int"]
46
- messages: List["Integrated.ChatCompletionRequestMessage"]
52
+ class CommonIntegratedParams(RequestOptions):
53
+ model: NotRequired[str]
47
54
  searchContext: NotRequired[List["Integrated.SearchResult"]]
48
55
  chatId: str
49
56
  assistantId: str
50
57
  assistantMessageId: str
51
58
  userMessageId: str
52
59
  startText: NotRequired["str"]
53
- debugInfo: NotRequired[Dict[str, Any]]
60
+ debugInfo: NotRequired[dict[str, Any]]
61
+
62
+ class CreateStream(CommonIntegratedParams):
63
+ timeout: NotRequired["int"]
64
+ messages: List["Integrated.ChatCompletionRequestMessage"]
54
65
 
55
66
  # For further details about the responses parameters, see the OpenAI API documentation.
56
- class CreateStreamResponseParams(TypedDict):
57
- debugInfo: Optional[Dict[str, Any]] = None
58
- input: Any
59
- model: str
60
- searchContext: Optional[List["Integrated.SearchResult"]] = None
61
- chatId: str
62
- assistantMessageId: str
63
- userMessageId: str
64
- startText: str | None = None
65
- include: Optional[
66
- list[
67
- Literal[
68
- "computer_call_output.output.image_url",
69
- "file_search_call.results",
70
- "message.input_image.image_url",
71
- "reasoning.encrypted_content",
72
- ]
73
- ]
74
- ] = None
75
- instructions: str | None = None
76
- max_output_tokens: int | None = None
77
- metadata: Optional[Dict[str, str]] = None
78
- parallel_tool_calls: float | None = None
79
- temperature: float | None = None
80
- text: Any
81
- tool_choice: Any
82
- tools: Any
83
- top_p: float | None = None
84
- reasoning: Any
67
+ # Note that other parameters from openai.resources.responses.Response.create can be passed
68
+ class CreateStreamResponsesOpenaiParams(TypedDict):
69
+ include: NotRequired[list["ResponseIncludable"] | None]
70
+ instructions: NotRequired[str | None]
71
+ max_output_tokens: NotRequired[int | None]
72
+ metadata: NotRequired[Union["Metadata", None]]
73
+ parallel_tool_calls: NotRequired[bool | None]
74
+ temperature: NotRequired[float | None]
75
+ text: NotRequired["ResponseTextConfigParam"]
76
+ tool_choice: NotRequired["response_create_params.ToolChoice"]
77
+ tools: NotRequired[list["ToolParam"]]
78
+ top_p: NotRequired[float | None]
79
+ reasoning: NotRequired["Reasoning"]
80
+
81
+ class CreateStreamResponsesParams(CommonIntegratedParams):
82
+ input: Union[str, "ResponseInputParam"]
83
+ options: NotRequired["Integrated.CreateStreamResponsesOpenaiParams"]
85
84
 
86
85
  class ToolCall(TypedDict):
87
86
  id: str
88
- name: str | None = None
89
- arguments: str | None = None
87
+ name: str | None
88
+ arguments: str | None
90
89
 
91
90
  class ResponsesStreamResult(TypedDict):
92
91
  id: str
93
92
  message: Message
94
93
  toolCalls: List["Integrated.ToolCall"]
94
+ output: list["ResponseOutputItem"]
95
95
 
96
96
  @classmethod
97
97
  def chat_stream_completion(
@@ -146,7 +146,7 @@ class Integrated(APIResource["Integrated"]):
146
146
  cls,
147
147
  user_id: str,
148
148
  company_id: str,
149
- **params: Unpack["Integrated.CreateStreamResponseParams"],
149
+ **params: Unpack["Integrated.CreateStreamResponsesParams"],
150
150
  ) -> "Integrated.ResponsesStreamResult":
151
151
  """
152
152
  Executes a call to the language model and streams to the chat in real-time.
@@ -154,7 +154,7 @@ class Integrated(APIResource["Integrated"]):
154
154
  In the form of [sourceX]. The reference documents must be given as a list in searchContext.
155
155
  """
156
156
  return cast(
157
- "Integrated.Responses",
157
+ "Integrated.ResponsesStreamResult",
158
158
  cls._static_request(
159
159
  "post",
160
160
  "/integrated/chat/stream-responses",
@@ -169,7 +169,7 @@ class Integrated(APIResource["Integrated"]):
169
169
  cls,
170
170
  user_id: str,
171
171
  company_id: str,
172
- **params: Unpack["Integrated.CreateStreamResponseParams"],
172
+ **params: Unpack["Integrated.CreateStreamResponsesParams"],
173
173
  ) -> "Integrated.ResponsesStreamResult":
174
174
  """
175
175
  Executes a call to the language model and streams to the chat in real-time.
@@ -177,7 +177,7 @@ class Integrated(APIResource["Integrated"]):
177
177
  In the form of [sourceX]. The reference documents must be given as a list in searchContext.
178
178
  """
179
179
  return cast(
180
- "Integrated.Responses",
180
+ "Integrated.ResponsesStreamResult",
181
181
  cls._static_request(
182
182
  "post",
183
183
  "/integrated/chat/stream-responses",
@@ -1,7 +1,8 @@
1
1
  import asyncio
2
- from typing import List
2
+ from typing import List, Literal
3
3
 
4
4
  from unique_sdk.api_resources._content import Content
5
+ from unique_sdk.api_resources._message import Message
5
6
  from unique_sdk.api_resources._space import Space
6
7
  from unique_sdk.utils.file_io import upload_file
7
8
 
@@ -16,6 +17,7 @@ async def send_message_and_wait_for_completion(
16
17
  chat_id: str = None,
17
18
  poll_interval: float = 1.0,
18
19
  max_wait: float = 60.0,
20
+ stop_condition: Literal["stoppedStreamingAt", "completedAt"] = "stoppedStreamingAt",
19
21
  ) -> "Space.Message":
20
22
  """
21
23
  Sends a prompt asynchronously and polls for completion. (until stoppedStreamingAt is not None)
@@ -27,6 +29,7 @@ async def send_message_and_wait_for_completion(
27
29
  text: The prompt text.
28
30
  poll_interval: Seconds between polls.
29
31
  max_wait: Maximum seconds to wait for completion.
32
+ stop_condition: Defines when to expect a response back, when the assistant stop streaming or when it completes the message. (default: "stoppedStreamingAt")
30
33
  **kwargs: Additional parameters for the prompt.
31
34
 
32
35
  Returns:
@@ -42,11 +45,21 @@ async def send_message_and_wait_for_completion(
42
45
  scopeRules=scope_rules,
43
46
  )
44
47
  chat_id = response.get("chatId")
48
+ message_id = response.get("id")
45
49
 
46
50
  max_attempts = int(max_wait // poll_interval)
47
51
  for _ in range(max_attempts):
48
52
  answer = Space.get_latest_message(user_id, company_id, chat_id)
49
- if answer.get("stoppedStreamingAt") is not None:
53
+ if answer.get(stop_condition) is not None:
54
+ try:
55
+ user_message = Message.retrieve(
56
+ user_id, company_id, message_id, chatId=chat_id
57
+ )
58
+ debug_info = user_message.get("debugInfo")
59
+ answer["debugInfo"] = debug_info
60
+ except Exception as e:
61
+ print(f"Failed to load debug info from user message: {e}")
62
+
50
63
  return answer
51
64
  await asyncio.sleep(poll_interval)
52
65
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: unique_sdk
3
- Version: 0.10.18
3
+ Version: 0.10.20
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Martin Fadler
@@ -10,6 +10,8 @@ Classifier: License :: OSI Approved :: MIT License
10
10
  Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
+ Provides-Extra: openai
14
+ Requires-Dist: openai (>=1.105.0,<2.0.0) ; extra == "openai"
13
15
  Requires-Dist: requests (>=2.32.3,<3.0.0)
14
16
  Requires-Dist: typing-extensions (>=4.9.0,<5.0.0)
15
17
  Description-Content-Type: text/markdown
@@ -1466,11 +1468,13 @@ The script sends a prompt asynchronously and continuously polls for completion,
1466
1468
  - `chat_id`: The ID of the chat where the message should be sent. If omitted, a new chat will be created.
1467
1469
  - `poll_interval`: The number of seconds to wait between polling attempts (default: `1` second).
1468
1470
  - `max_wait`: The maximum number of seconds to wait for the message to complete (default: `60` seconds).
1471
+ - `stop_condition`: Defines when to expect a response back, when the assistant stop streaming or when it completes the message. (default: "stoppedStreamingAt")
1469
1472
 
1470
1473
  The script ensures you can flexibly interact with spaces in new or ongoing chats, with fine-grained control over tools, context, and polling behavior.
1471
1474
 
1472
1475
  ```python
1473
- latest_message = await unique_sdk.utils.chat_in_space.send_message_and_wait_for_completion(
1476
+ from unique_sdk.utils.chat_in_space import send_message_and_wait_for_completion
1477
+ latest_message = await send_message_and_wait_for_completion(
1474
1478
  user_id=user_id,
1475
1479
  company_id=company_id,
1476
1480
  assistant_id=assistant_id,
@@ -1497,6 +1501,7 @@ latest_message = await unique_sdk.utils.chat_in_space.send_message_and_wait_for_
1497
1501
  }
1498
1502
  ]
1499
1503
  },
1504
+ stop_condition = "completedAt" # If not specified, stoppedStreamingAt will be set by default
1500
1505
  )
1501
1506
  ```
1502
1507
 
@@ -1572,8 +1577,16 @@ All notable changes to this project will be documented in this file.
1572
1577
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
1573
1578
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
1574
1579
 
1580
+ ## [0.10.20] - 2025-09-04
1581
+ - Update Responses API types
1582
+
1583
+ ## [0.10.19] - 2025-09-02
1584
+ - Improve `send_message_and_wait_for_completion`:
1585
+ - Add option to select stop_condition `["stoppedStreamingAt", "completedAt"]`.
1586
+ - Load `debugInfo` from `last_user_message` for better developer experience.
1587
+
1575
1588
  ## [0.10.18] - 2025-09-02
1576
- - Temporarily remove support for udpate and delete files by filePath.
1589
+ - Temporarily remove support for update and delete files by filePath.
1577
1590
 
1578
1591
  ## [0.10.17] - 2025-09-01
1579
1592
  - Add function to update a file
@@ -21,7 +21,7 @@ unique_sdk/api_resources/_content.py,sha256=Vi-wZ-T5f-OqBGXkA3B9dALoFHer5F8LAQlc
21
21
  unique_sdk/api_resources/_embedding.py,sha256=C6qak7cCUBMBINfPhgH8taCJZ9n6w1MUElqDJJ8dG10,1281
22
22
  unique_sdk/api_resources/_event.py,sha256=bpWF9vstdoAWbUzr-iiGP713ceP0zPk77GJXiImf9zg,374
23
23
  unique_sdk/api_resources/_folder.py,sha256=mIyWaxJtIHlDLPFZ0FY1U9b3dmtmIcjDEbgOZtLA-DI,12871
24
- unique_sdk/api_resources/_integrated.py,sha256=z_DrftwjgVCi10QQqRYnG5_-95kD7Kfjogbb-dmnJuA,5854
24
+ unique_sdk/api_resources/_integrated.py,sha256=O8e673z-RB7FRFMQYn_YEuHijebr5W7KJxkUnymbBZk,6164
25
25
  unique_sdk/api_resources/_mcp.py,sha256=zKh0dyn0QnkKk57N2zlGVN_GQoxEp5T2CS38vVm6jQY,3341
26
26
  unique_sdk/api_resources/_message.py,sha256=gEDIzg3METZU2k7m69meAuf0IWmZxnYOjbBKPRMwPYE,7688
27
27
  unique_sdk/api_resources/_message_assessment.py,sha256=SSfx6eW7zb_GKe8cFJzCqW-t-_eWEXxKP5cnIb0DhIc,2276
@@ -32,11 +32,11 @@ unique_sdk/api_resources/_search_string.py,sha256=4Idw6exgZdA8qksz9WkiA68k1hTU-7
32
32
  unique_sdk/api_resources/_short_term_memory.py,sha256=vPRN-Y0WPx74E6y-A3LocGc0TxJdzT-xGL66WzZwKRg,2820
33
33
  unique_sdk/api_resources/_space.py,sha256=6789zLwkoZqrEESiTJIBVaNi8kAKAZnqR0KMmW1AzgI,4905
34
34
  unique_sdk/utils/chat_history.py,sha256=5UqL9hF1O9pV7skbNOlEibF5rHdYsmG3m5-YEPUowOs,3037
35
- unique_sdk/utils/chat_in_space.py,sha256=3NeBjOu7p43V_6PrjwxyaTkgknUS10KE4QRuTlFDU_4,5232
35
+ unique_sdk/utils/chat_in_space.py,sha256=NrH9e2lvXtj_oePG0RWUqFoTanMblF8-VgtnVfszPS8,5949
36
36
  unique_sdk/utils/file_io.py,sha256=YY8B7VJcTLOPmCXByiOfNerXGlAtjCC5EVNmAbQJ3dQ,4306
37
37
  unique_sdk/utils/sources.py,sha256=DoxxhMLcLhmDfNarjXa41H4JD2GSSDywr71hiC-4pYc,4952
38
38
  unique_sdk/utils/token.py,sha256=AzKuAA1AwBtnvSFxGcsHLpxXr_wWE5Mj4jYBbOz2ljA,1740
39
- unique_sdk-0.10.18.dist-info/LICENSE,sha256=EJCWoHgrXVBUb47PnjeV4MFIEOR71MAdCOIgv61J-4k,1065
40
- unique_sdk-0.10.18.dist-info/METADATA,sha256=aduy4luaw6fs5a2fInb5ws8VfGb_glx3CG1RrgFrnmM,53900
41
- unique_sdk-0.10.18.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
42
- unique_sdk-0.10.18.dist-info/RECORD,,
39
+ unique_sdk-0.10.20.dist-info/LICENSE,sha256=EJCWoHgrXVBUb47PnjeV4MFIEOR71MAdCOIgv61J-4k,1065
40
+ unique_sdk-0.10.20.dist-info/METADATA,sha256=mbVWlT-JFrSvRV5W7AjWT3a1HoZvlnvbReiu_f1HMpI,54588
41
+ unique_sdk-0.10.20.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
42
+ unique_sdk-0.10.20.dist-info/RECORD,,