openai-sdk-helpers 0.0.8__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. openai_sdk_helpers/__init__.py +90 -2
  2. openai_sdk_helpers/agent/__init__.py +8 -4
  3. openai_sdk_helpers/agent/base.py +80 -45
  4. openai_sdk_helpers/agent/config.py +6 -4
  5. openai_sdk_helpers/agent/{project_manager.py → coordination.py} +29 -45
  6. openai_sdk_helpers/agent/prompt_utils.py +7 -1
  7. openai_sdk_helpers/agent/runner.py +67 -141
  8. openai_sdk_helpers/agent/search/__init__.py +33 -0
  9. openai_sdk_helpers/agent/search/base.py +297 -0
  10. openai_sdk_helpers/agent/{vector_search.py → search/vector.py} +89 -157
  11. openai_sdk_helpers/agent/{web_search.py → search/web.py} +77 -156
  12. openai_sdk_helpers/agent/summarizer.py +29 -8
  13. openai_sdk_helpers/agent/translator.py +40 -13
  14. openai_sdk_helpers/agent/validation.py +32 -8
  15. openai_sdk_helpers/async_utils.py +132 -0
  16. openai_sdk_helpers/config.py +101 -65
  17. openai_sdk_helpers/context_manager.py +241 -0
  18. openai_sdk_helpers/enums/__init__.py +9 -1
  19. openai_sdk_helpers/enums/base.py +67 -8
  20. openai_sdk_helpers/environment.py +33 -6
  21. openai_sdk_helpers/errors.py +133 -0
  22. openai_sdk_helpers/logging_config.py +105 -0
  23. openai_sdk_helpers/prompt/__init__.py +10 -71
  24. openai_sdk_helpers/prompt/base.py +222 -0
  25. openai_sdk_helpers/response/__init__.py +38 -3
  26. openai_sdk_helpers/response/base.py +363 -210
  27. openai_sdk_helpers/response/config.py +318 -0
  28. openai_sdk_helpers/response/messages.py +56 -40
  29. openai_sdk_helpers/response/runner.py +77 -33
  30. openai_sdk_helpers/response/tool_call.py +62 -27
  31. openai_sdk_helpers/response/vector_store.py +27 -14
  32. openai_sdk_helpers/retry.py +175 -0
  33. openai_sdk_helpers/streamlit_app/__init__.py +19 -2
  34. openai_sdk_helpers/streamlit_app/app.py +114 -39
  35. openai_sdk_helpers/streamlit_app/config.py +502 -0
  36. openai_sdk_helpers/streamlit_app/streamlit_web_search.py +5 -6
  37. openai_sdk_helpers/structure/__init__.py +72 -3
  38. openai_sdk_helpers/structure/agent_blueprint.py +82 -19
  39. openai_sdk_helpers/structure/base.py +208 -93
  40. openai_sdk_helpers/structure/plan/__init__.py +29 -1
  41. openai_sdk_helpers/structure/plan/enum.py +41 -5
  42. openai_sdk_helpers/structure/plan/helpers.py +172 -0
  43. openai_sdk_helpers/structure/plan/plan.py +109 -49
  44. openai_sdk_helpers/structure/plan/task.py +38 -6
  45. openai_sdk_helpers/structure/plan/types.py +15 -0
  46. openai_sdk_helpers/structure/prompt.py +21 -2
  47. openai_sdk_helpers/structure/responses.py +52 -11
  48. openai_sdk_helpers/structure/summary.py +55 -7
  49. openai_sdk_helpers/structure/validation.py +34 -6
  50. openai_sdk_helpers/structure/vector_search.py +132 -18
  51. openai_sdk_helpers/structure/web_search.py +125 -13
  52. openai_sdk_helpers/tools.py +193 -0
  53. openai_sdk_helpers/types.py +57 -0
  54. openai_sdk_helpers/utils/__init__.py +34 -1
  55. openai_sdk_helpers/utils/core.py +296 -34
  56. openai_sdk_helpers/validation.py +302 -0
  57. openai_sdk_helpers/vector_storage/__init__.py +21 -1
  58. openai_sdk_helpers/vector_storage/cleanup.py +25 -13
  59. openai_sdk_helpers/vector_storage/storage.py +123 -64
  60. openai_sdk_helpers/vector_storage/types.py +20 -19
  61. openai_sdk_helpers-0.1.0.dist-info/METADATA +550 -0
  62. openai_sdk_helpers-0.1.0.dist-info/RECORD +69 -0
  63. openai_sdk_helpers/streamlit_app/configuration.py +0 -324
  64. openai_sdk_helpers-0.0.8.dist-info/METADATA +0 -194
  65. openai_sdk_helpers-0.0.8.dist-info/RECORD +0 -55
  66. {openai_sdk_helpers-0.0.8.dist-info → openai_sdk_helpers-0.1.0.dist-info}/WHEEL +0 -0
  67. {openai_sdk_helpers-0.0.8.dist-info → openai_sdk_helpers-0.1.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,10 +1,15 @@
1
- """Streamlit chat application driven by a developer configuration."""
1
+ """Configuration-driven Streamlit chat application.
2
+
3
+ This module implements a complete Streamlit chat interface that loads its
4
+ configuration from a Python module. It handles conversation state, message
5
+ rendering, response execution, and resource cleanup.
6
+ """
2
7
 
3
8
  from __future__ import annotations
4
9
 
5
10
  import json
6
11
  from pathlib import Path
7
- from typing import Any, Dict, List
12
+ from typing import Any
8
13
 
9
14
  import streamlit as st
10
15
  from dotenv import load_dotenv
@@ -17,21 +22,29 @@ from openai_sdk_helpers.streamlit_app import (
17
22
  _load_configuration,
18
23
  )
19
24
  from openai_sdk_helpers.structure.base import BaseStructure
20
- from openai_sdk_helpers.utils import ensure_list, coerce_jsonable, log
25
+ from openai_sdk_helpers.utils import coerce_jsonable, ensure_list, log
21
26
 
22
27
 
23
28
  def _extract_assistant_text(response: BaseResponse[Any]) -> str:
24
- """Return the latest assistant message as a friendly string.
29
+ """Extract the latest assistant message as readable text.
30
+
31
+ Searches the response's message history for the most recent assistant
32
+ or tool message and extracts displayable text content.
25
33
 
26
34
  Parameters
27
35
  ----------
28
36
  response : BaseResponse[Any]
29
- Active response session containing message history.
37
+ Active response session with message history.
30
38
 
31
39
  Returns
32
40
  -------
33
41
  str
34
- Concatenated assistant text, or an empty string when unavailable.
42
+ Concatenated assistant text, or empty string if unavailable.
43
+
44
+ Examples
45
+ --------
46
+ >>> text = _extract_assistant_text(response)
47
+ >>> print(text)
35
48
  """
36
49
  message = response.get_last_assistant_message() or response.get_last_tool_message()
37
50
  if message is None:
@@ -41,7 +54,7 @@ def _extract_assistant_text(response: BaseResponse[Any]) -> str:
41
54
  if content is None:
42
55
  return ""
43
56
 
44
- text_parts: List[str] = []
57
+ text_parts: list[str] = []
45
58
  for part in ensure_list(content):
46
59
  text_value = getattr(getattr(part, "text", None), "value", None)
47
60
  if text_value:
@@ -52,19 +65,28 @@ def _extract_assistant_text(response: BaseResponse[Any]) -> str:
52
65
 
53
66
 
54
67
  def _render_summary(result: Any, response: BaseResponse[Any]) -> str:
55
- """Generate the assistant-facing summary shown in the transcript.
68
+ """Generate display text for the chat transcript.
69
+
70
+ Converts the response result into a human-readable format suitable
71
+ for display in the Streamlit chat interface. Handles structured
72
+ outputs, dictionaries, and raw text.
56
73
 
57
74
  Parameters
58
75
  ----------
59
76
  result : Any
60
- Parsed result returned from ``BaseResponse.run_sync``.
77
+ Parsed result from BaseResponse.run_sync.
61
78
  response : BaseResponse[Any]
62
- Response instance containing the latest assistant message.
79
+ Response instance containing message history.
63
80
 
64
81
  Returns
65
82
  -------
66
83
  str
67
84
  Display-ready summary text for the chat transcript.
85
+
86
+ Notes
87
+ -----
88
+ Falls back to extracting assistant text from message history if
89
+ the result cannot be formatted directly.
68
90
  """
69
91
  if isinstance(result, BaseStructure):
70
92
  return result.print()
@@ -79,20 +101,29 @@ def _render_summary(result: Any, response: BaseResponse[Any]) -> str:
79
101
  return "No response returned."
80
102
 
81
103
 
82
- def _build_raw_output(result: Any, response: BaseResponse[Any]) -> Dict[str, Any]:
83
- """Assemble the raw payload shown under the expandable transcript section.
104
+ def _build_raw_output(result: Any, response: BaseResponse[Any]) -> dict[str, Any]:
105
+ """Assemble raw JSON payload for the expandable transcript section.
106
+
107
+ Creates a structured dictionary containing both the parsed result
108
+ and the complete conversation history for debugging and inspection.
84
109
 
85
110
  Parameters
86
111
  ----------
87
112
  result : Any
88
- Parsed result returned from the response instance.
113
+ Parsed result from the response execution.
89
114
  response : BaseResponse[Any]
90
- Response session containing message history.
115
+ Response session with complete message history.
91
116
 
92
117
  Returns
93
118
  -------
94
119
  dict[str, Any]
95
- Mapping that includes parsed data and raw conversation messages.
120
+ Mapping with 'parsed' data and 'conversation' messages.
121
+
122
+ Examples
123
+ --------
124
+ >>> raw = _build_raw_output(result, response)
125
+ >>> raw.keys()
126
+ dict_keys(['parsed', 'conversation'])
96
127
  """
97
128
  return {
98
129
  "parsed": coerce_jsonable(result),
@@ -101,22 +132,31 @@ def _build_raw_output(result: Any, response: BaseResponse[Any]) -> Dict[str, Any
101
132
 
102
133
 
103
134
  def _get_response_instance(config: StreamlitAppConfig) -> BaseResponse[Any]:
104
- """Instantiate and cache the configured :class:`BaseResponse`.
135
+ """Instantiate and cache the configured BaseResponse.
136
+
137
+ Creates a new response instance from the configuration if not already
138
+ cached in session state. Applies vector store attachments and cleanup
139
+ settings based on configuration.
105
140
 
106
141
  Parameters
107
142
  ----------
108
143
  config : StreamlitAppConfig
109
- Loaded configuration containing the response definition.
144
+ Loaded configuration with response handler definition.
110
145
 
111
146
  Returns
112
147
  -------
113
148
  BaseResponse[Any]
114
- Active response instance for the current session.
149
+ Active response instance for the current Streamlit session.
115
150
 
116
151
  Raises
117
152
  ------
118
153
  TypeError
119
- If the configured ``response`` cannot produce ``BaseResponse``.
154
+ If the configured response cannot produce a BaseResponse.
155
+
156
+ Notes
157
+ -----
158
+ The response instance is cached in st.session_state['response_instance']
159
+ to maintain conversation state across Streamlit reruns.
120
160
  """
121
161
  if "response_instance" in st.session_state:
122
162
  cached = st.session_state["response_instance"]
@@ -138,17 +178,21 @@ def _get_response_instance(config: StreamlitAppConfig) -> BaseResponse[Any]:
138
178
 
139
179
 
140
180
  def _reset_chat(close_response: bool = True) -> None:
141
- """Clear the conversation and optionally close the response session.
181
+ """Clear conversation and optionally close the response session.
182
+
183
+ Saves the current conversation to disk, closes the response to clean
184
+ up resources, and clears the chat history from session state.
142
185
 
143
186
  Parameters
144
187
  ----------
145
- close_response : bool, default=True
146
- Whether to call ``close`` on the cached response instance.
147
-
148
- Returns
149
- -------
150
- None
151
- This function mutates ``st.session_state`` in-place.
188
+ close_response : bool, default True
189
+ Whether to call close() on the cached response instance,
190
+ triggering resource cleanup.
191
+
192
+ Notes
193
+ -----
194
+ This function mutates st.session_state in-place, clearing the
195
+ chat_history and response_instance keys.
152
196
  """
153
197
  response = st.session_state.get("response_instance")
154
198
  if close_response and isinstance(response, BaseResponse):
@@ -160,12 +204,15 @@ def _reset_chat(close_response: bool = True) -> None:
160
204
 
161
205
 
162
206
  def _init_session_state() -> None:
163
- """Prepare Streamlit session state containers.
207
+ """Initialize Streamlit session state for chat functionality.
164
208
 
165
- Returns
166
- -------
167
- None
168
- This function initializes chat-related session keys when absent.
209
+ Creates the chat_history list in session state if it doesn't exist,
210
+ enabling conversation persistence across Streamlit reruns.
211
+
212
+ Notes
213
+ -----
214
+ This function should be called early in the app lifecycle to ensure
215
+ session state is properly initialized before rendering chat UI.
169
216
  """
170
217
  if "chat_history" not in st.session_state:
171
218
  st.session_state["chat_history"] = []
@@ -174,10 +221,14 @@ def _init_session_state() -> None:
174
221
  def _render_chat_history() -> None:
175
222
  """Display the conversation transcript from session state.
176
223
 
177
- Returns
178
- -------
179
- None
180
- Renders chat messages in the current Streamlit session.
224
+ Iterates through chat_history in session state and renders each
225
+ message with appropriate formatting. Assistant messages include
226
+ an expandable raw output section.
227
+
228
+ Notes
229
+ -----
230
+ Uses Streamlit's chat_message context manager for role-based
231
+ message styling.
181
232
  """
182
233
  for message in st.session_state.get("chat_history", []):
183
234
  role = message.get("role", "assistant")
@@ -193,14 +244,24 @@ def _render_chat_history() -> None:
193
244
 
194
245
 
195
246
  def _handle_user_message(prompt: str, config: StreamlitAppConfig) -> None:
196
- """Append a user prompt and stream the assistant reply into the transcript.
247
+ """Process user input and generate assistant response.
248
+
249
+ Appends the user message to chat history, executes the response
250
+ handler, and adds the assistant's reply to the conversation.
251
+ Handles errors gracefully by displaying them in the UI.
197
252
 
198
253
  Parameters
199
254
  ----------
200
255
  prompt : str
201
256
  User-entered text to send to the assistant.
202
257
  config : StreamlitAppConfig
203
- Loaded configuration containing the response definition.
258
+ Loaded configuration with response handler definition.
259
+
260
+ Notes
261
+ -----
262
+ Errors during response execution are caught and displayed in the
263
+ chat transcript rather than crashing the application. The function
264
+ triggers a Streamlit rerun after successful response generation.
204
265
  """
205
266
  st.session_state["chat_history"].append({"role": "user", "content": prompt})
206
267
  try:
@@ -230,12 +291,26 @@ def _handle_user_message(prompt: str, config: StreamlitAppConfig) -> None:
230
291
 
231
292
 
232
293
  def main(config_path: Path) -> None:
233
- """Run the config-driven Streamlit chat app.
294
+ """Run the configuration-driven Streamlit chat application.
295
+
296
+ Entry point for the Streamlit app that loads configuration, sets up
297
+ the UI, manages session state, and handles user interactions.
234
298
 
235
299
  Parameters
236
300
  ----------
237
301
  config_path : Path
238
302
  Filesystem location of the configuration module.
303
+
304
+ Notes
305
+ -----
306
+ This function should be called as the entry point for the Streamlit
307
+ application. It handles the complete application lifecycle including
308
+ configuration loading, UI rendering, and chat interactions.
309
+
310
+ Examples
311
+ --------
312
+ >>> from pathlib import Path
313
+ >>> main(Path("./my_config.py"))
239
314
  """
240
315
  config = _load_configuration(config_path)
241
316
  st.set_page_config(page_title=config.display_title, layout="wide")