openai-sdk-helpers 0.4.3__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. openai_sdk_helpers/__init__.py +41 -7
  2. openai_sdk_helpers/agent/__init__.py +1 -2
  3. openai_sdk_helpers/agent/base.py +89 -173
  4. openai_sdk_helpers/agent/configuration.py +12 -20
  5. openai_sdk_helpers/agent/coordinator.py +14 -17
  6. openai_sdk_helpers/agent/runner.py +3 -45
  7. openai_sdk_helpers/agent/search/base.py +49 -71
  8. openai_sdk_helpers/agent/search/vector.py +82 -110
  9. openai_sdk_helpers/agent/search/web.py +103 -81
  10. openai_sdk_helpers/agent/summarizer.py +20 -28
  11. openai_sdk_helpers/agent/translator.py +17 -23
  12. openai_sdk_helpers/agent/validator.py +17 -23
  13. openai_sdk_helpers/errors.py +9 -0
  14. openai_sdk_helpers/extract/__init__.py +23 -0
  15. openai_sdk_helpers/extract/extractor.py +157 -0
  16. openai_sdk_helpers/extract/generator.py +476 -0
  17. openai_sdk_helpers/prompt/extractor_config_agent_instructions.jinja +6 -0
  18. openai_sdk_helpers/prompt/extractor_config_generator.jinja +37 -0
  19. openai_sdk_helpers/prompt/extractor_config_generator_instructions.jinja +9 -0
  20. openai_sdk_helpers/prompt/extractor_prompt_optimizer_agent_instructions.jinja +4 -0
  21. openai_sdk_helpers/prompt/extractor_prompt_optimizer_request.jinja +11 -0
  22. openai_sdk_helpers/response/__init__.py +2 -6
  23. openai_sdk_helpers/response/base.py +85 -94
  24. openai_sdk_helpers/response/configuration.py +39 -14
  25. openai_sdk_helpers/response/files.py +2 -0
  26. openai_sdk_helpers/response/runner.py +1 -48
  27. openai_sdk_helpers/response/tool_call.py +0 -141
  28. openai_sdk_helpers/response/vector_store.py +8 -5
  29. openai_sdk_helpers/streamlit_app/app.py +1 -1
  30. openai_sdk_helpers/structure/__init__.py +16 -0
  31. openai_sdk_helpers/structure/base.py +239 -278
  32. openai_sdk_helpers/structure/extraction.py +1228 -0
  33. openai_sdk_helpers/structure/plan/plan.py +0 -20
  34. openai_sdk_helpers/structure/plan/task.py +0 -33
  35. openai_sdk_helpers/structure/prompt.py +16 -0
  36. openai_sdk_helpers/structure/responses.py +2 -2
  37. openai_sdk_helpers/structure/web_search.py +0 -10
  38. openai_sdk_helpers/tools.py +346 -99
  39. openai_sdk_helpers/utils/__init__.py +7 -0
  40. openai_sdk_helpers/utils/json/base_model.py +315 -32
  41. openai_sdk_helpers/utils/langextract.py +194 -0
  42. {openai_sdk_helpers-0.4.3.dist-info → openai_sdk_helpers-0.5.0.dist-info}/METADATA +18 -4
  43. {openai_sdk_helpers-0.4.3.dist-info → openai_sdk_helpers-0.5.0.dist-info}/RECORD +46 -37
  44. openai_sdk_helpers/streamlit_app/streamlit_web_search.py +0 -75
  45. {openai_sdk_helpers-0.4.3.dist-info → openai_sdk_helpers-0.5.0.dist-info}/WHEEL +0 -0
  46. {openai_sdk_helpers-0.4.3.dist-info → openai_sdk_helpers-0.5.0.dist-info}/entry_points.txt +0 -0
  47. {openai_sdk_helpers-0.4.3.dist-info → openai_sdk_helpers-0.5.0.dist-info}/licenses/LICENSE +0 -0
@@ -16,7 +16,6 @@ from ..utils import ensure_directory, log
16
16
  from .base import AgentBase
17
17
  from .configuration import AgentConfiguration
18
18
 
19
-
20
19
  PromptFn = Callable[[str], PromptStructure]
21
20
  BuildPlanFn = Callable[[str], PlanStructure]
22
21
  ExecutePlanFn = Callable[[PlanStructure], List[str]]
@@ -42,10 +41,10 @@ class CoordinatorAgent(AgentBase):
42
41
  Name of the parent module for data organization.
43
42
  configuration : AgentConfiguration or None, default=None
44
43
  Optional agent configuration describing prompts and metadata.
45
- prompt_dir : Path or None, default=None
46
- Optional directory holding prompt templates.
47
- default_model : str or None, default=None
48
- Optional fallback model identifier.
44
+ template_path : Path or None, default=None
45
+ Optional template file path for prompt rendering.
46
+ model : str or None, default=None
47
+ Model identifier to use for coordinator operations.
49
48
 
50
49
  Methods
51
50
  -------
@@ -85,8 +84,8 @@ class CoordinatorAgent(AgentBase):
85
84
  module_data_path: Path,
86
85
  name: str,
87
86
  configuration: Optional[AgentConfiguration] = None,
88
- prompt_dir: Optional[Path] = None,
89
- default_model: Optional[str] = None,
87
+ template_path: Optional[Path] = None,
88
+ model: Optional[str] = None,
90
89
  ) -> None:
91
90
  """Initialize the project manager with injected workflow helpers.
92
91
 
@@ -106,10 +105,10 @@ class CoordinatorAgent(AgentBase):
106
105
  Name of the parent module for data organization.
107
106
  configuration : AgentConfiguration or None, default=None
108
107
  Optional agent configuration describing prompts and metadata.
109
- prompt_dir : Path or None, default=None
110
- Optional directory holding prompt templates.
111
- default_model : str or None, default=None
112
- Optional fallback model identifier.
108
+ template_path : Path or None, default=None
109
+ Optional template file path for prompt rendering.
110
+ model : str or None, default=None
111
+ Model identifier to use for coordinator operations.
113
112
 
114
113
  Raises
115
114
  ------
@@ -129,15 +128,13 @@ class CoordinatorAgent(AgentBase):
129
128
  """
130
129
  if configuration is None:
131
130
  configuration = AgentConfiguration(
132
- name="coordinator_agent",
131
+ name=__class__.__name__,
133
132
  instructions="Coordinate agents for planning and summarization.",
134
133
  description="Coordinates agents for planning and summarization.",
134
+ template_path=template_path,
135
+ model=model,
135
136
  )
136
- super().__init__(
137
- configuration=configuration,
138
- prompt_dir=prompt_dir,
139
- default_model=default_model,
140
- )
137
+ super().__init__(configuration=configuration)
141
138
  self._prompt_fn = prompt_fn
142
139
  self._build_plan_fn = build_plan_fn
143
140
  self._execute_plan_fn = execute_plan_fn
@@ -2,14 +2,14 @@
2
2
 
3
3
  These helpers provide a consistent interface around the lower-level functions in
4
4
  the ``agent.base`` module, allowing callers to execute agents with consistent
5
- signatures whether they need asynchronous, synchronous, or streamed results.
5
+ signatures whether they need asynchronous or synchronous results.
6
6
  """
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
10
  from typing import Any, Dict, Optional
11
11
 
12
- from agents import Agent, RunResult, RunResultStreaming, Runner, Session
12
+ from agents import Agent, RunResult, Runner, Session
13
13
 
14
14
  from openai_sdk_helpers.utils.async_utils import run_coroutine_with_fallback
15
15
  from ..structure.base import StructureBase
@@ -109,46 +109,4 @@ def run_sync(
109
109
  return result
110
110
 
111
111
 
112
- def run_streamed(
113
- agent: Agent,
114
- input: str,
115
- *,
116
- context: Optional[Dict[str, Any]] = None,
117
- output_structure: Optional[type[StructureBase]] = None,
118
- session: Optional[Session] = None,
119
- ) -> RunResultStreaming | StructureBase:
120
- """Stream agent execution results.
121
-
122
- Parameters
123
- ----------
124
- agent : Agent
125
- Configured agent to execute.
126
- input : str
127
- Prompt or query string for the agent.
128
- context : dict or None, default=None
129
- Optional context dictionary passed to the agent.
130
- output_structure : type[StructureBase] or None, default=None
131
- Optional type used to cast the final output.
132
- session : Session or None, default=None
133
- Optional session for maintaining conversation history.
134
-
135
- Returns
136
- -------
137
- RunResultStreaming
138
- Streaming output wrapper from the agent execution.
139
-
140
- Examples
141
- --------
142
- >>> from agents import Agent
143
- >>> agent = Agent(name="test", instructions="test", model="gpt-4o-mini")
144
- >>> result = run_streamed(agent, "Explain AI") # doctest: +SKIP
145
- >>> for chunk in result.stream_text(): # doctest: +SKIP
146
- ... print(chunk, end="")
147
- """
148
- result = Runner.run_streamed(agent, input, context=context, session=session)
149
- if output_structure is not None:
150
- return result.final_output_as(output_structure)
151
- return result
152
-
153
-
154
- __all__ = ["run_sync", "run_async", "run_streamed"]
112
+ __all__ = ["run_sync", "run_async"]
@@ -10,7 +10,7 @@ from __future__ import annotations
10
10
  import asyncio
11
11
  from abc import ABC, abstractmethod
12
12
  from pathlib import Path
13
- from typing import Generic, List, Optional, TypeVar, Union
13
+ from typing import Any, Generic, List, Optional, TypeVar, Union
14
14
 
15
15
  from ..base import AgentBase
16
16
  from ..configuration import AgentConfiguration
@@ -33,10 +33,10 @@ class SearchPlanner(AgentBase, Generic[PlanType]):
33
33
 
34
34
  Parameters
35
35
  ----------
36
- prompt_dir : Path, optional
37
- Directory containing prompt templates.
38
- default_model : str, optional
39
- Default model identifier to use when not defined in configuration.
36
+ template_path : Path | str | None, optional
37
+ Template file path for prompt rendering.
38
+ model : str | None, optional
39
+ Model identifier to use when not defined in configuration.
40
40
 
41
41
  Methods
42
42
  -------
@@ -48,35 +48,39 @@ class SearchPlanner(AgentBase, Generic[PlanType]):
48
48
  Raises
49
49
  ------
50
50
  ValueError
51
- If the default model is not provided.
51
+ If the configuration omits a model identifier.
52
52
 
53
53
  Examples
54
54
  --------
55
55
  >>> class MyPlanner(SearchPlanner):
56
- ... def _configure_agent(self):
56
+ ... def _configure_agent(self, template_path=None, model=None):
57
57
  ... return AgentConfiguration(
58
58
  ... name="my_planner",
59
59
  ... description="Plans searches",
60
60
  ... output_structure=WebSearchPlanStructure,
61
61
  ... )
62
- >>> planner = MyPlanner(default_model="gpt-4o-mini")
62
+ >>> planner = MyPlanner(model="gpt-4o-mini")
63
63
  """
64
64
 
65
65
  def __init__(
66
66
  self,
67
- prompt_dir: Optional[Path] = None,
68
- default_model: Optional[str] = None,
67
+ template_path: Path | str | None = None,
68
+ model: str | None = None,
69
+ **kwargs: Any,
69
70
  ) -> None:
70
71
  """Initialize the planner agent."""
71
- configuration = self._configure_agent()
72
- super().__init__(
73
- configuration=configuration,
74
- prompt_dir=prompt_dir,
75
- default_model=default_model,
72
+ configuration = self._configure_agent(
73
+ template_path=template_path, model=model, **kwargs
76
74
  )
75
+ super().__init__(configuration=configuration)
77
76
 
78
77
  @abstractmethod
79
- def _configure_agent(self) -> AgentConfiguration:
78
+ def _configure_agent(
79
+ self,
80
+ template_path: Path | str | None = None,
81
+ model: str | None = None,
82
+ **kwargs: Any,
83
+ ) -> AgentConfiguration:
80
84
  """Return configuration for this planner.
81
85
 
82
86
  Returns
@@ -124,10 +128,10 @@ class SearchToolAgent(AgentBase, Generic[ItemType, ResultType, PlanType]):
124
128
 
125
129
  Parameters
126
130
  ----------
127
- prompt_dir : Path, optional
128
- Directory containing prompt templates.
129
- default_model : str, optional
130
- Default model identifier to use when not defined in configuration.
131
+ template_path : Path | str | None, optional
132
+ Template file path for prompt rendering.
133
+ model : str | None, optional
134
+ Model identifier to use when not defined in configuration.
131
135
  max_concurrent_searches : int, default=10
132
136
  Maximum number of concurrent search operations.
133
137
 
@@ -143,12 +147,12 @@ class SearchToolAgent(AgentBase, Generic[ItemType, ResultType, PlanType]):
143
147
  Raises
144
148
  ------
145
149
  ValueError
146
- If the default model is not provided.
150
+ If the configuration omits a model identifier.
147
151
 
148
152
  Examples
149
153
  --------
150
154
  >>> class MyTool(SearchToolAgent):
151
- ... def _configure_agent(self):
155
+ ... def _configure_agent(self, *, template_path: Path | str | None = None, model: str | None = None):
152
156
  ... return AgentConfiguration(
153
157
  ... name="my_tool",
154
158
  ... description="Executes searches",
@@ -156,27 +160,34 @@ class SearchToolAgent(AgentBase, Generic[ItemType, ResultType, PlanType]):
156
160
  ... )
157
161
  ... async def run_search(self, item):
158
162
  ... return "result"
159
- >>> tool = MyTool(default_model="gpt-4o-mini")
163
+ >>> tool = MyTool(model="gpt-4o-mini")
160
164
  """
161
165
 
162
166
  def __init__(
163
167
  self,
164
168
  *,
165
- prompt_dir: Optional[Path] = None,
166
- default_model: Optional[str] = None,
169
+ template_path: Path | str | None = None,
170
+ model: str | None = None,
167
171
  max_concurrent_searches: int = 10,
172
+ **kwargs: Any,
168
173
  ) -> None:
169
174
  """Initialize the search tool agent."""
170
175
  self._max_concurrent_searches = max_concurrent_searches
171
- configuration = self._configure_agent()
172
- super().__init__(
173
- configuration=configuration,
174
- prompt_dir=prompt_dir,
175
- default_model=default_model,
176
+ configuration = self._configure_agent(
177
+ template_path=template_path,
178
+ model=model,
179
+ **kwargs,
176
180
  )
181
+ super().__init__(configuration=configuration)
177
182
 
178
183
  @abstractmethod
179
- def _configure_agent(self) -> AgentConfiguration:
184
+ def _configure_agent(
185
+ self,
186
+ *,
187
+ template_path: Path | str | None = None,
188
+ model: str | None = None,
189
+ **kwargs: Any,
190
+ ) -> AgentConfiguration:
180
191
  """Return configuration for this tool agent.
181
192
 
182
193
  Returns
@@ -247,10 +258,10 @@ class SearchWriter(AgentBase, Generic[ReportType]):
247
258
 
248
259
  Parameters
249
260
  ----------
250
- prompt_dir : Path, optional
251
- Directory containing prompt templates.
252
- default_model : str, optional
253
- Default model identifier to use when not defined in configuration.
261
+ template_path : Path | str | None, optional
262
+ Template file path for prompt rendering.
263
+ model : str | None, optional
264
+ Model identifier to use when not defined in configuration.
254
265
 
255
266
  Methods
256
267
  -------
@@ -262,53 +273,20 @@ class SearchWriter(AgentBase, Generic[ReportType]):
262
273
  Raises
263
274
  ------
264
275
  ValueError
265
- If the default model is not provided.
276
+ If the configuration omits a model identifier.
266
277
 
267
278
  Examples
268
279
  --------
269
280
  >>> class MyWriter(SearchWriter):
270
- ... def _configure_agent(self):
281
+ ... def _configure_agent(self, template_path=None, model=None):
271
282
  ... return AgentConfiguration(
272
283
  ... name="my_writer",
273
284
  ... description="Writes reports",
274
285
  ... output_structure=WebSearchReportStructure,
275
286
  ... )
276
- >>> writer = MyWriter(default_model="gpt-4o-mini")
287
+ >>> writer = MyWriter(model="gpt-4o-mini")
277
288
  """
278
289
 
279
- def __init__(
280
- self,
281
- prompt_dir: Optional[Path] = None,
282
- default_model: Optional[str] = None,
283
- ) -> None:
284
- """Initialize the writer agent."""
285
- configuration = self._configure_agent()
286
- super().__init__(
287
- configuration=configuration,
288
- prompt_dir=prompt_dir,
289
- default_model=default_model,
290
- )
291
-
292
- @abstractmethod
293
- def _configure_agent(self) -> AgentConfiguration:
294
- """Return configuration for this writer.
295
-
296
- Returns
297
- -------
298
- AgentConfiguration
299
- Configuration with name, description, and output_structure set.
300
-
301
- Examples
302
- --------
303
- >>> configuration = AgentConfiguration(
304
- ... name="web_writer",
305
- ... description="Write web search report",
306
- ... output_structure=WebSearchReportStructure,
307
- ... )
308
- >>> return configuration
309
- """
310
- pass
311
-
312
290
  async def run_agent(
313
291
  self,
314
292
  query: str,
@@ -3,13 +3,12 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from pathlib import Path
6
- from typing import Any, Callable, Dict, List, Optional
6
+ from typing import Any, Callable, List, Optional
7
7
 
8
8
  from agents import custom_span, gen_trace_id, trace
9
9
  from agents.model_settings import ModelSettings
10
10
 
11
11
  from ...environment import DEFAULT_PROMPT_DIR
12
- from ...structure.prompt import PromptStructure
13
12
  from ...structure.vector_search import (
14
13
  VectorSearchItemStructure,
15
14
  VectorSearchItemResultStructure,
@@ -18,11 +17,11 @@ from ...structure.vector_search import (
18
17
  VectorSearchPlanStructure,
19
18
  VectorSearchReportStructure,
20
19
  )
21
- from ...tools import tool_handler_factory
22
20
  from ...vector_storage import VectorStorage
23
21
  from ..configuration import AgentConfiguration
24
22
  from ..utils import run_coroutine_agent_sync
25
23
  from .base import SearchPlanner, SearchToolAgent, SearchWriter
24
+ from ..base import AgentBase
26
25
 
27
26
  MAX_CONCURRENT_SEARCHES = 10
28
27
 
@@ -32,11 +31,10 @@ class VectorAgentPlanner(SearchPlanner[VectorSearchPlanStructure]):
32
31
 
33
32
  Parameters
34
33
  ----------
35
- prompt_dir : Path or None, default=None
36
- Directory containing prompt templates. Defaults to the packaged
37
- ``prompt`` directory when not provided.
38
- default_model : str or None, default=None
39
- Default model identifier to use when not defined in configuration.
34
+ template_path : Path | str | None, optional
35
+ Template file path for prompt rendering.
36
+ model : str | None, optional
37
+ Model identifier to use when not defined in configuration.
40
38
 
41
39
  Methods
42
40
  -------
@@ -46,21 +44,19 @@ class VectorAgentPlanner(SearchPlanner[VectorSearchPlanStructure]):
46
44
  Raises
47
45
  ------
48
46
  ValueError
49
- If the default model is not provided.
47
+ If the configuration omits a model identifier.
50
48
 
51
49
  Examples
52
50
  --------
53
- >>> planner = VectorSearchPlanner(default_model="gpt-4o-mini")
51
+ >>> planner = VectorSearchPlanner(model="gpt-4o-mini")
54
52
  """
55
53
 
56
- def __init__(
57
- self, prompt_dir: Optional[Path] = None, default_model: Optional[str] = None
58
- ) -> None:
59
- """Initialize the planner agent."""
60
- prompt_directory = prompt_dir or DEFAULT_PROMPT_DIR
61
- super().__init__(prompt_dir=prompt_directory, default_model=default_model)
62
-
63
- def _configure_agent(self) -> AgentConfiguration:
54
+ def _configure_agent(
55
+ self,
56
+ template_path: Path | str | None = None,
57
+ model: str | None = None,
58
+ **kwargs: Any,
59
+ ) -> AgentConfiguration:
64
60
  """Return configuration for the vector planner agent.
65
61
 
66
62
  Returns
@@ -74,6 +70,8 @@ class VectorAgentPlanner(SearchPlanner[VectorSearchPlanStructure]):
74
70
  description="Plan vector searches based on a user query.",
75
71
  output_structure=VectorSearchPlanStructure,
76
72
  model_settings=ModelSettings(tool_choice="none"),
73
+ template_path=template_path,
74
+ model=model,
77
75
  )
78
76
 
79
77
 
@@ -88,12 +86,11 @@ class VectorSearchTool(
88
86
 
89
87
  Parameters
90
88
  ----------
91
- prompt_dir : Path or None, default=None
92
- Directory containing prompt templates. Defaults to the packaged
93
- ``prompt`` directory when not provided.
94
- default_model : str or None, default=None
95
- Default model identifier to use when not defined in configuration.
96
- store_name : str or None, default=None
89
+ template_path : Path | str | None, optional
90
+ Template file path for prompt rendering.
91
+ model : str | None, optional
92
+ Model identifier to use when not defined in configuration.
93
+ store_name : str
97
94
  Name of the vector store to query.
98
95
  max_concurrent_searches : int, default=MAX_CONCURRENT_SEARCHES
99
96
  Maximum number of concurrent vector search tasks to run.
@@ -113,37 +110,20 @@ class VectorSearchTool(
113
110
  Raises
114
111
  ------
115
112
  ValueError
116
- If the default model is not provided.
113
+ If the configuration omits a model identifier.
117
114
 
118
115
  Examples
119
116
  --------
120
- >>> tool = VectorSearchTool(default_model="gpt-4o-mini", store_name="my_store")
117
+ >>> tool = VectorSearchTool(model="gpt-4o-mini", store_name="my_store")
121
118
  """
122
119
 
123
- def __init__(
120
+ def _configure_agent(
124
121
  self,
125
122
  *,
126
- prompt_dir: Optional[Path] = None,
127
- default_model: Optional[str] = None,
128
- store_name: Optional[str] = None,
129
- max_concurrent_searches: int = MAX_CONCURRENT_SEARCHES,
130
- vector_storage: Optional[VectorStorage] = None,
131
- vector_storage_factory: Optional[Callable[[str], VectorStorage]] = None,
132
- ) -> None:
133
- """Initialize the search tool agent."""
134
- prompt_directory = prompt_dir or DEFAULT_PROMPT_DIR
135
- self._vector_storage: Optional[VectorStorage] = None
136
- self._store_name = store_name or "editorial"
137
- self._vector_storage_factory = vector_storage_factory
138
- if vector_storage is not None:
139
- self._vector_storage = vector_storage
140
- super().__init__(
141
- prompt_dir=prompt_directory,
142
- default_model=default_model,
143
- max_concurrent_searches=max_concurrent_searches,
144
- )
145
-
146
- def _configure_agent(self) -> AgentConfiguration:
123
+ template_path: Path | str | None = None,
124
+ model: str | None = None,
125
+ **kwargs: Any,
126
+ ) -> AgentConfiguration:
147
127
  """Return configuration for the vector search tool agent.
148
128
 
149
129
  Returns
@@ -151,6 +131,8 @@ class VectorSearchTool(
151
131
  AgentConfiguration
152
132
  Configuration with name, description, and input type.
153
133
  """
134
+ if self._store_name is None:
135
+ raise ValueError("store_name must be provided to configure the agent.")
154
136
  return AgentConfiguration(
155
137
  name="vector_search",
156
138
  instructions="Agent instructions",
@@ -158,6 +140,28 @@ class VectorSearchTool(
158
140
  input_structure=VectorSearchPlanStructure,
159
141
  output_structure=VectorSearchItemResultsStructure,
160
142
  model_settings=ModelSettings(tool_choice="none"),
143
+ template_path=template_path,
144
+ model=model,
145
+ )
146
+
147
+ def __init__(
148
+ self,
149
+ *,
150
+ store_name: str,
151
+ template_path: Path | str | None = None,
152
+ model: str | None = None,
153
+ max_concurrent_searches: int = MAX_CONCURRENT_SEARCHES,
154
+ vector_storage: Optional[VectorStorage] = None,
155
+ vector_storage_factory: Optional[Callable[[str], VectorStorage]] = None,
156
+ ) -> None:
157
+ """Initialize the vector search tool agent."""
158
+ self._vector_storage = vector_storage
159
+ self._vector_storage_factory = vector_storage_factory
160
+ self._store_name = store_name
161
+ super().__init__(
162
+ template_path=template_path,
163
+ model=model,
164
+ max_concurrent_searches=max_concurrent_searches,
161
165
  )
162
166
 
163
167
  def _get_vector_storage(self) -> VectorStorage:
@@ -209,11 +213,10 @@ class VectorSearchWriter(SearchWriter[VectorSearchReportStructure]):
209
213
 
210
214
  Parameters
211
215
  ----------
212
- prompt_dir : Path or None, default=None
213
- Directory containing prompt templates. Defaults to the packaged
214
- ``prompt`` directory when not provided.
215
- default_model : str or None, default=None
216
- Default model identifier to use when not defined in configuration.
216
+ template_path : Path | str | None, optional
217
+ Template file path for prompt rendering.
218
+ model : str | None, optional
219
+ Model identifier to use when not defined in configuration.
217
220
 
218
221
  Methods
219
222
  -------
@@ -223,21 +226,32 @@ class VectorSearchWriter(SearchWriter[VectorSearchReportStructure]):
223
226
  Raises
224
227
  ------
225
228
  ValueError
226
- If the default model is not provided.
229
+ If the configuration omits a model identifier.
227
230
 
228
231
  Examples
229
232
  --------
230
- >>> writer = VectorSearchWriter(default_model="gpt-4o-mini")
233
+ >>> writer = VectorSearchWriter(model="gpt-4o-mini")
231
234
  """
232
235
 
233
236
  def __init__(
234
- self, prompt_dir: Optional[Path] = None, default_model: Optional[str] = None
237
+ self,
238
+ template_path: Path | str | None = None,
239
+ model: str | None = None,
240
+ **kwargs: Any,
235
241
  ) -> None:
236
242
  """Initialize the writer agent."""
237
- prompt_directory = prompt_dir or DEFAULT_PROMPT_DIR
238
- super().__init__(prompt_dir=prompt_directory, default_model=default_model)
243
+ configuration = self._configure_agent(
244
+ template_path=template_path, model=model, **kwargs
245
+ )
246
+ super().__init__(configuration=configuration)
239
247
 
240
- def _configure_agent(self) -> AgentConfiguration:
248
+ def _configure_agent(
249
+ self,
250
+ *,
251
+ template_path: Path | str | None = None,
252
+ model: str | None = None,
253
+ **kwargs: Any,
254
+ ) -> AgentConfiguration:
241
255
  """Return configuration for the vector writer agent.
242
256
 
243
257
  Returns
@@ -251,10 +265,12 @@ class VectorSearchWriter(SearchWriter[VectorSearchReportStructure]):
251
265
  description="Write a report based on search results.",
252
266
  output_structure=VectorSearchReportStructure,
253
267
  model_settings=ModelSettings(tool_choice="none"),
268
+ template_path=template_path,
269
+ model=model,
254
270
  )
255
271
 
256
272
 
257
- class VectorAgentSearch:
273
+ class VectorAgentSearch(AgentBase):
258
274
  """Manage the complete vector search workflow.
259
275
 
260
276
  This high-level agent orchestrates a multi-step research process that plans
@@ -316,7 +332,7 @@ class VectorAgentSearch:
316
332
  Raises
317
333
  ------
318
334
  ValueError
319
- If the default model is not provided.
335
+ If the model identifier is not provided.
320
336
  """
321
337
 
322
338
  def __init__(
@@ -353,18 +369,16 @@ class VectorAgentSearch:
353
369
  trace_id = gen_trace_id()
354
370
  with trace("VectorSearch trace", trace_id=trace_id):
355
371
  planner = VectorAgentPlanner(
356
- prompt_dir=self._prompt_dir, default_model=self._default_model
372
+ template_path=self._prompt_dir, model=self._default_model
357
373
  )
358
374
  tool = VectorSearchTool(
359
- prompt_dir=self._prompt_dir,
360
- default_model=self._default_model,
361
- store_name=self._vector_store_name,
375
+ template_path=self._prompt_dir,
376
+ model=self._default_model,
362
377
  max_concurrent_searches=self._max_concurrent_searches,
363
- vector_storage=self._vector_storage,
364
- vector_storage_factory=self._vector_storage_factory,
378
+ store_name=self._vector_store_name,
365
379
  )
366
380
  writer = VectorSearchWriter(
367
- prompt_dir=self._prompt_dir, default_model=self._default_model
381
+ template_path=self._prompt_dir, model=self._default_model
368
382
  )
369
383
  with custom_span("vector_search.plan"):
370
384
  search_plan = await planner.run_agent(query=search_query)
@@ -399,48 +413,6 @@ class VectorAgentSearch:
399
413
  """
400
414
  return run_coroutine_agent_sync(self.run_agent(search_query))
401
415
 
402
- def as_response_tool(
403
- self,
404
- *,
405
- tool_name: str = "vector_search",
406
- tool_description: str = "Run the vector search workflow.",
407
- ) -> tuple[dict[str, Callable[..., Any]], dict[str, Any]]:
408
- """Return a Responses API tool handler and definition.
409
-
410
- Parameters
411
- ----------
412
- vector_store_name : str
413
- Name of the vector store to use for the response tool.
414
- tool_name : str, default="vector_search"
415
- Name to use for the response tool.
416
- tool_description : str, default="Run the vector search workflow."
417
- Description for the response tool.
418
-
419
- Returns
420
- -------
421
- tuple[dict[str, Callable[..., Any]], dict[str, Any]]
422
- Tool handler mapping and tool definition for Responses API usage.
423
- """
424
- search = VectorAgentSearch(
425
- prompt_dir=self._prompt_dir,
426
- default_model=self._default_model,
427
- vector_store_name=self._vector_store_name,
428
- max_concurrent_searches=self._max_concurrent_searches,
429
- vector_storage=self._vector_storage,
430
- vector_storage_factory=self._vector_storage_factory,
431
- )
432
-
433
- def _run_search(prompt: str) -> VectorSearchStructure:
434
- return run_coroutine_agent_sync(search.run_agent(search_query=prompt))
435
-
436
- tool_handler = {
437
- tool_name: tool_handler_factory(_run_search, input_model=PromptStructure)
438
- }
439
- tool_definition = PromptStructure.response_tool_definition(
440
- tool_name, tool_description=tool_description
441
- )
442
- return tool_handler, tool_definition
443
-
444
416
 
445
417
  __all__ = [
446
418
  "MAX_CONCURRENT_SEARCHES",