qtype 0.0.16__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qtype/application/commons/tools.py +1 -1
- qtype/application/converters/tools_from_api.py +5 -5
- qtype/application/converters/tools_from_module.py +2 -2
- qtype/application/converters/types.py +14 -43
- qtype/application/documentation.py +1 -1
- qtype/application/facade.py +94 -73
- qtype/base/types.py +227 -7
- qtype/cli.py +4 -0
- qtype/commands/convert.py +20 -8
- qtype/commands/generate.py +19 -27
- qtype/commands/run.py +73 -36
- qtype/commands/serve.py +74 -54
- qtype/commands/validate.py +34 -8
- qtype/commands/visualize.py +46 -22
- qtype/dsl/__init__.py +6 -5
- qtype/dsl/custom_types.py +1 -1
- qtype/dsl/domain_types.py +65 -5
- qtype/dsl/linker.py +384 -0
- qtype/dsl/loader.py +315 -0
- qtype/dsl/model.py +612 -363
- qtype/dsl/parser.py +200 -0
- qtype/dsl/types.py +50 -0
- qtype/interpreter/api.py +57 -136
- qtype/interpreter/auth/aws.py +19 -9
- qtype/interpreter/auth/generic.py +93 -16
- qtype/interpreter/base/base_step_executor.py +436 -0
- qtype/interpreter/base/batch_step_executor.py +171 -0
- qtype/interpreter/base/exceptions.py +50 -0
- qtype/interpreter/base/executor_context.py +74 -0
- qtype/interpreter/base/factory.py +117 -0
- qtype/interpreter/base/progress_tracker.py +110 -0
- qtype/interpreter/base/secrets.py +339 -0
- qtype/interpreter/base/step_cache.py +74 -0
- qtype/interpreter/base/stream_emitter.py +469 -0
- qtype/interpreter/conversions.py +462 -22
- qtype/interpreter/converters.py +77 -0
- qtype/interpreter/endpoints.py +355 -0
- qtype/interpreter/executors/agent_executor.py +242 -0
- qtype/interpreter/executors/aggregate_executor.py +93 -0
- qtype/interpreter/executors/decoder_executor.py +163 -0
- qtype/interpreter/executors/doc_to_text_executor.py +112 -0
- qtype/interpreter/executors/document_embedder_executor.py +107 -0
- qtype/interpreter/executors/document_search_executor.py +122 -0
- qtype/interpreter/executors/document_source_executor.py +118 -0
- qtype/interpreter/executors/document_splitter_executor.py +105 -0
- qtype/interpreter/executors/echo_executor.py +63 -0
- qtype/interpreter/executors/field_extractor_executor.py +160 -0
- qtype/interpreter/executors/file_source_executor.py +101 -0
- qtype/interpreter/executors/file_writer_executor.py +110 -0
- qtype/interpreter/executors/index_upsert_executor.py +228 -0
- qtype/interpreter/executors/invoke_embedding_executor.py +92 -0
- qtype/interpreter/executors/invoke_flow_executor.py +51 -0
- qtype/interpreter/executors/invoke_tool_executor.py +358 -0
- qtype/interpreter/executors/llm_inference_executor.py +272 -0
- qtype/interpreter/executors/prompt_template_executor.py +78 -0
- qtype/interpreter/executors/sql_source_executor.py +106 -0
- qtype/interpreter/executors/vector_search_executor.py +91 -0
- qtype/interpreter/flow.py +159 -22
- qtype/interpreter/metadata_api.py +115 -0
- qtype/interpreter/resource_cache.py +5 -4
- qtype/interpreter/rich_progress.py +225 -0
- qtype/interpreter/stream/chat/__init__.py +15 -0
- qtype/interpreter/stream/chat/converter.py +391 -0
- qtype/interpreter/{chat → stream/chat}/file_conversions.py +2 -2
- qtype/interpreter/stream/chat/ui_request_to_domain_type.py +140 -0
- qtype/interpreter/stream/chat/vercel.py +609 -0
- qtype/interpreter/stream/utils/__init__.py +15 -0
- qtype/interpreter/stream/utils/build_vercel_ai_formatter.py +74 -0
- qtype/interpreter/stream/utils/callback_to_stream.py +66 -0
- qtype/interpreter/stream/utils/create_streaming_response.py +18 -0
- qtype/interpreter/stream/utils/default_chat_extract_text.py +20 -0
- qtype/interpreter/stream/utils/error_streaming_response.py +20 -0
- qtype/interpreter/telemetry.py +135 -8
- qtype/interpreter/tools/__init__.py +5 -0
- qtype/interpreter/tools/function_tool_helper.py +265 -0
- qtype/interpreter/types.py +330 -0
- qtype/interpreter/typing.py +83 -89
- qtype/interpreter/ui/404/index.html +1 -1
- qtype/interpreter/ui/404.html +1 -1
- qtype/interpreter/ui/_next/static/{nUaw6_IwRwPqkzwe5s725 → 20HoJN6otZ_LyHLHpCPE6}/_buildManifest.js +1 -1
- qtype/interpreter/ui/_next/static/chunks/{393-8fd474427f8e19ce.js → 434-b2112d19f25c44ff.js} +3 -3
- qtype/interpreter/ui/_next/static/chunks/app/page-8c67d16ac90d23cb.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/ba12c10f-546f2714ff8abc66.js +1 -0
- qtype/interpreter/ui/_next/static/css/8a8d1269e362fef7.css +3 -0
- qtype/interpreter/ui/icon.png +0 -0
- qtype/interpreter/ui/index.html +1 -1
- qtype/interpreter/ui/index.txt +4 -4
- qtype/semantic/checker.py +583 -0
- qtype/semantic/generate.py +262 -83
- qtype/semantic/loader.py +95 -0
- qtype/semantic/model.py +436 -159
- qtype/semantic/resolver.py +63 -19
- qtype/semantic/visualize.py +28 -31
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/METADATA +16 -3
- qtype-0.1.1.dist-info/RECORD +135 -0
- qtype/dsl/base_types.py +0 -38
- qtype/dsl/validator.py +0 -465
- qtype/interpreter/batch/__init__.py +0 -0
- qtype/interpreter/batch/file_sink_source.py +0 -162
- qtype/interpreter/batch/flow.py +0 -95
- qtype/interpreter/batch/sql_source.py +0 -92
- qtype/interpreter/batch/step.py +0 -74
- qtype/interpreter/batch/types.py +0 -41
- qtype/interpreter/batch/utils.py +0 -178
- qtype/interpreter/chat/chat_api.py +0 -237
- qtype/interpreter/chat/vercel.py +0 -314
- qtype/interpreter/exceptions.py +0 -10
- qtype/interpreter/step.py +0 -67
- qtype/interpreter/steps/__init__.py +0 -0
- qtype/interpreter/steps/agent.py +0 -114
- qtype/interpreter/steps/condition.py +0 -36
- qtype/interpreter/steps/decoder.py +0 -88
- qtype/interpreter/steps/llm_inference.py +0 -171
- qtype/interpreter/steps/prompt_template.py +0 -54
- qtype/interpreter/steps/search.py +0 -24
- qtype/interpreter/steps/tool.py +0 -219
- qtype/interpreter/streaming_helpers.py +0 -123
- qtype/interpreter/ui/_next/static/chunks/app/page-7e26b6156cfb55d3.js +0 -1
- qtype/interpreter/ui/_next/static/chunks/ba12c10f-22556063851a6df2.js +0 -1
- qtype/interpreter/ui/_next/static/css/b40532b0db09cce3.css +0 -3
- qtype/interpreter/ui/favicon.ico +0 -0
- qtype/loader.py +0 -390
- qtype-0.0.16.dist-info/RECORD +0 -106
- /qtype/interpreter/ui/_next/static/{nUaw6_IwRwPqkzwe5s725 → 20HoJN6otZ_LyHLHpCPE6}/_ssgManifest.js +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/WHEEL +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/entry_points.txt +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/licenses/LICENSE +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Context managers for emitting streaming events during step execution.
|
|
3
|
+
|
|
4
|
+
This module provides a clean, idiomatic Python API for executors to emit
|
|
5
|
+
streaming events without directly handling the StreamEvent types.
|
|
6
|
+
|
|
7
|
+
Usage Example:
|
|
8
|
+
```python
|
|
9
|
+
class MyExecutor(StepExecutor):
|
|
10
|
+
async def process_message(self, message: FlowMessage):
|
|
11
|
+
emitter = self.stream_emitter
|
|
12
|
+
|
|
13
|
+
# Status update
|
|
14
|
+
await emitter.status("Processing started...")
|
|
15
|
+
|
|
16
|
+
# Text streaming
|
|
17
|
+
async with emitter.text_stream("response-1") as streamer:
|
|
18
|
+
async for chunk in some_generator():
|
|
19
|
+
await streamer.delta(chunk)
|
|
20
|
+
|
|
21
|
+
# Tool execution
|
|
22
|
+
async with emitter.tool_execution(
|
|
23
|
+
tool_call_id="tool-1",
|
|
24
|
+
tool_name="search",
|
|
25
|
+
tool_input={"query": "test"}
|
|
26
|
+
) as tool_ctx:
|
|
27
|
+
result = await execute_tool()
|
|
28
|
+
await tool_ctx.complete(result)
|
|
29
|
+
|
|
30
|
+
yield message
|
|
31
|
+
```
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
from __future__ import annotations
|
|
35
|
+
|
|
36
|
+
from typing import Any
|
|
37
|
+
|
|
38
|
+
from qtype.interpreter.types import (
|
|
39
|
+
ErrorEvent,
|
|
40
|
+
ReasoningStreamDeltaEvent,
|
|
41
|
+
ReasoningStreamEndEvent,
|
|
42
|
+
ReasoningStreamStartEvent,
|
|
43
|
+
StatusEvent,
|
|
44
|
+
StepEndEvent,
|
|
45
|
+
StepStartEvent,
|
|
46
|
+
StreamingCallback,
|
|
47
|
+
TextStreamDeltaEvent,
|
|
48
|
+
TextStreamEndEvent,
|
|
49
|
+
TextStreamStartEvent,
|
|
50
|
+
ToolExecutionEndEvent,
|
|
51
|
+
ToolExecutionErrorEvent,
|
|
52
|
+
ToolExecutionStartEvent,
|
|
53
|
+
)
|
|
54
|
+
from qtype.semantic.model import Step
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class TextStreamContext:
|
|
58
|
+
"""
|
|
59
|
+
Async context manager for text streaming.
|
|
60
|
+
|
|
61
|
+
Automatically emits TextStreamStartEvent on entry and TextStreamEndEvent
|
|
62
|
+
on exit. Provides delta() method for emitting text chunks.
|
|
63
|
+
|
|
64
|
+
Example:
|
|
65
|
+
```python
|
|
66
|
+
async with emitter.text_stream("llm-response") as streamer:
|
|
67
|
+
async for chunk in llm_client.stream():
|
|
68
|
+
await streamer.delta(chunk.text)
|
|
69
|
+
```
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
def __init__(
|
|
73
|
+
self,
|
|
74
|
+
step: Step,
|
|
75
|
+
stream_id: str,
|
|
76
|
+
on_stream_event: StreamingCallback | None,
|
|
77
|
+
):
|
|
78
|
+
self.step = step
|
|
79
|
+
self.stream_id = stream_id
|
|
80
|
+
self.on_stream_event = on_stream_event
|
|
81
|
+
|
|
82
|
+
async def __aenter__(self) -> TextStreamContext:
|
|
83
|
+
"""Emit TextStreamStartEvent when entering context."""
|
|
84
|
+
if self.on_stream_event:
|
|
85
|
+
await self.on_stream_event(
|
|
86
|
+
TextStreamStartEvent(step=self.step, stream_id=self.stream_id)
|
|
87
|
+
)
|
|
88
|
+
return self
|
|
89
|
+
|
|
90
|
+
async def __aexit__(
|
|
91
|
+
self,
|
|
92
|
+
exc_type: type[BaseException] | None,
|
|
93
|
+
exc_val: BaseException | None,
|
|
94
|
+
exc_tb: Any,
|
|
95
|
+
) -> bool:
|
|
96
|
+
"""Emit TextStreamEndEvent when exiting context."""
|
|
97
|
+
if self.on_stream_event:
|
|
98
|
+
await self.on_stream_event(
|
|
99
|
+
TextStreamEndEvent(step=self.step, stream_id=self.stream_id)
|
|
100
|
+
)
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
async def delta(self, text: str) -> None:
|
|
104
|
+
"""
|
|
105
|
+
Emit a text delta chunk.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
text: The incremental text content to append to the stream
|
|
109
|
+
"""
|
|
110
|
+
if self.on_stream_event:
|
|
111
|
+
await self.on_stream_event(
|
|
112
|
+
TextStreamDeltaEvent(
|
|
113
|
+
step=self.step,
|
|
114
|
+
stream_id=self.stream_id,
|
|
115
|
+
delta=text,
|
|
116
|
+
)
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class ReasoningStreamContext:
|
|
121
|
+
"""
|
|
122
|
+
Async context manager for reasoning streaming.
|
|
123
|
+
|
|
124
|
+
Automatically emits ReasoningStreamStartEvent on entry and
|
|
125
|
+
ReasoningStreamEndEvent on exit. Provides delta() method for emitting
|
|
126
|
+
reasoning chunks.
|
|
127
|
+
|
|
128
|
+
Example:
|
|
129
|
+
```python
|
|
130
|
+
async with emitter.reasoning_stream("agent-reasoning") as streamer:
|
|
131
|
+
async for chunk in agent.stream_reasoning():
|
|
132
|
+
await streamer.delta(chunk.text)
|
|
133
|
+
```
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
def __init__(
|
|
137
|
+
self,
|
|
138
|
+
step: Step,
|
|
139
|
+
stream_id: str,
|
|
140
|
+
on_stream_event: StreamingCallback | None,
|
|
141
|
+
):
|
|
142
|
+
self.step = step
|
|
143
|
+
self.stream_id = stream_id
|
|
144
|
+
self.on_stream_event = on_stream_event
|
|
145
|
+
|
|
146
|
+
async def __aenter__(self) -> ReasoningStreamContext:
|
|
147
|
+
"""Emit ReasoningStreamStartEvent when entering context."""
|
|
148
|
+
if self.on_stream_event:
|
|
149
|
+
await self.on_stream_event(
|
|
150
|
+
ReasoningStreamStartEvent(
|
|
151
|
+
step=self.step, stream_id=self.stream_id
|
|
152
|
+
)
|
|
153
|
+
)
|
|
154
|
+
return self
|
|
155
|
+
|
|
156
|
+
async def __aexit__(
|
|
157
|
+
self,
|
|
158
|
+
exc_type: type[BaseException] | None,
|
|
159
|
+
exc_val: BaseException | None,
|
|
160
|
+
exc_tb: Any,
|
|
161
|
+
) -> bool:
|
|
162
|
+
"""Emit ReasoningStreamEndEvent when exiting context."""
|
|
163
|
+
if self.on_stream_event:
|
|
164
|
+
await self.on_stream_event(
|
|
165
|
+
ReasoningStreamEndEvent(
|
|
166
|
+
step=self.step, stream_id=self.stream_id
|
|
167
|
+
)
|
|
168
|
+
)
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
async def delta(self, text: str) -> None:
|
|
172
|
+
"""
|
|
173
|
+
Emit a reasoning delta chunk.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
text: The incremental reasoning content to append to the stream
|
|
177
|
+
"""
|
|
178
|
+
if self.on_stream_event:
|
|
179
|
+
await self.on_stream_event(
|
|
180
|
+
ReasoningStreamDeltaEvent(
|
|
181
|
+
step=self.step,
|
|
182
|
+
stream_id=self.stream_id,
|
|
183
|
+
delta=text,
|
|
184
|
+
)
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
class StepBoundaryContext:
|
|
189
|
+
"""
|
|
190
|
+
Async context manager for step boundaries.
|
|
191
|
+
|
|
192
|
+
Automatically emits StepStartEvent on entry and StepEndEvent on exit.
|
|
193
|
+
Use this to group related events together visually in the UI.
|
|
194
|
+
|
|
195
|
+
Example:
|
|
196
|
+
```python
|
|
197
|
+
async with emitter.step_boundary():
|
|
198
|
+
await emitter.status("Step 1: Loading data...")
|
|
199
|
+
# ... do work ...
|
|
200
|
+
await emitter.status("Step 1: Complete")
|
|
201
|
+
```
|
|
202
|
+
"""
|
|
203
|
+
|
|
204
|
+
def __init__(
|
|
205
|
+
self,
|
|
206
|
+
step: Step,
|
|
207
|
+
on_stream_event: StreamingCallback | None,
|
|
208
|
+
):
|
|
209
|
+
self.step = step
|
|
210
|
+
self.on_stream_event = on_stream_event
|
|
211
|
+
|
|
212
|
+
async def __aenter__(self) -> StepBoundaryContext:
|
|
213
|
+
"""Emit StepStartEvent when entering context."""
|
|
214
|
+
if self.on_stream_event:
|
|
215
|
+
await self.on_stream_event(StepStartEvent(step=self.step))
|
|
216
|
+
return self
|
|
217
|
+
|
|
218
|
+
async def __aexit__(
|
|
219
|
+
self,
|
|
220
|
+
exc_type: type[BaseException] | None,
|
|
221
|
+
exc_val: BaseException | None,
|
|
222
|
+
exc_tb: Any,
|
|
223
|
+
) -> bool:
|
|
224
|
+
"""Emit StepEndEvent when exiting context."""
|
|
225
|
+
if self.on_stream_event:
|
|
226
|
+
await self.on_stream_event(StepEndEvent(step=self.step))
|
|
227
|
+
return False
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
class ToolExecutionContext:
|
|
231
|
+
"""
|
|
232
|
+
Async context manager for tool execution.
|
|
233
|
+
|
|
234
|
+
Automatically emits ToolExecutionStartEvent on entry. On exit, if an
|
|
235
|
+
exception occurred, emits ToolExecutionErrorEvent. Otherwise, you must
|
|
236
|
+
call complete() or error() explicitly.
|
|
237
|
+
|
|
238
|
+
Example:
|
|
239
|
+
```python
|
|
240
|
+
async with emitter.tool_execution(
|
|
241
|
+
tool_call_id="tool-1",
|
|
242
|
+
tool_name="search",
|
|
243
|
+
tool_input={"query": "test"}
|
|
244
|
+
) as tool_ctx:
|
|
245
|
+
result = await execute_tool()
|
|
246
|
+
await tool_ctx.complete(result)
|
|
247
|
+
```
|
|
248
|
+
"""
|
|
249
|
+
|
|
250
|
+
def __init__(
|
|
251
|
+
self,
|
|
252
|
+
step: Step,
|
|
253
|
+
tool_call_id: str,
|
|
254
|
+
tool_name: str,
|
|
255
|
+
tool_input: dict[str, Any],
|
|
256
|
+
on_stream_event: StreamingCallback | None,
|
|
257
|
+
):
|
|
258
|
+
self.step = step
|
|
259
|
+
self.tool_call_id = tool_call_id
|
|
260
|
+
self.tool_name = tool_name
|
|
261
|
+
self.tool_input = tool_input
|
|
262
|
+
self.on_stream_event = on_stream_event
|
|
263
|
+
self._completed = False
|
|
264
|
+
|
|
265
|
+
async def __aenter__(self) -> ToolExecutionContext:
|
|
266
|
+
"""Emit ToolExecutionStartEvent when entering context."""
|
|
267
|
+
if self.on_stream_event:
|
|
268
|
+
await self.on_stream_event(
|
|
269
|
+
ToolExecutionStartEvent(
|
|
270
|
+
step=self.step,
|
|
271
|
+
tool_call_id=self.tool_call_id,
|
|
272
|
+
tool_name=self.tool_name,
|
|
273
|
+
tool_input=self.tool_input,
|
|
274
|
+
)
|
|
275
|
+
)
|
|
276
|
+
return self
|
|
277
|
+
|
|
278
|
+
async def __aexit__(
|
|
279
|
+
self,
|
|
280
|
+
exc_type: type[BaseException] | None,
|
|
281
|
+
exc_val: BaseException | None,
|
|
282
|
+
exc_tb: Any,
|
|
283
|
+
) -> bool:
|
|
284
|
+
"""
|
|
285
|
+
Emit ToolExecutionErrorEvent if exception occurred.
|
|
286
|
+
|
|
287
|
+
If no exception and complete()/error() wasn't called, this is a
|
|
288
|
+
programming error but we don't raise to avoid masking other issues.
|
|
289
|
+
"""
|
|
290
|
+
if exc_type is not None and self.on_stream_event:
|
|
291
|
+
await self.on_stream_event(
|
|
292
|
+
ToolExecutionErrorEvent(
|
|
293
|
+
step=self.step,
|
|
294
|
+
tool_call_id=self.tool_call_id,
|
|
295
|
+
error_message=str(exc_val),
|
|
296
|
+
)
|
|
297
|
+
)
|
|
298
|
+
self._completed = True
|
|
299
|
+
return False
|
|
300
|
+
|
|
301
|
+
async def complete(self, output: Any) -> None:
|
|
302
|
+
"""
|
|
303
|
+
Mark tool execution as complete with successful output.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
output: The result returned by the tool
|
|
307
|
+
"""
|
|
308
|
+
if self._completed:
|
|
309
|
+
return
|
|
310
|
+
if self.on_stream_event:
|
|
311
|
+
await self.on_stream_event(
|
|
312
|
+
ToolExecutionEndEvent(
|
|
313
|
+
step=self.step,
|
|
314
|
+
tool_call_id=self.tool_call_id,
|
|
315
|
+
tool_output=output,
|
|
316
|
+
)
|
|
317
|
+
)
|
|
318
|
+
self._completed = True
|
|
319
|
+
|
|
320
|
+
async def error(self, error_message: str) -> None:
|
|
321
|
+
"""
|
|
322
|
+
Mark tool execution as failed.
|
|
323
|
+
|
|
324
|
+
Args:
|
|
325
|
+
error_message: Description of the error that occurred
|
|
326
|
+
"""
|
|
327
|
+
if self._completed:
|
|
328
|
+
return
|
|
329
|
+
if self.on_stream_event:
|
|
330
|
+
await self.on_stream_event(
|
|
331
|
+
ToolExecutionErrorEvent(
|
|
332
|
+
step=self.step,
|
|
333
|
+
tool_call_id=self.tool_call_id,
|
|
334
|
+
error_message=error_message,
|
|
335
|
+
)
|
|
336
|
+
)
|
|
337
|
+
self._completed = True
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
class StreamEmitter:
|
|
341
|
+
"""
|
|
342
|
+
Factory for creating streaming context managers.
|
|
343
|
+
|
|
344
|
+
This class is instantiated once per StepExecutor and provides factory
|
|
345
|
+
methods for creating context managers and convenience methods for
|
|
346
|
+
one-shot events.
|
|
347
|
+
|
|
348
|
+
The executor can access this via self.stream_emitter.
|
|
349
|
+
|
|
350
|
+
Example:
|
|
351
|
+
```python
|
|
352
|
+
class MyExecutor(StepExecutor):
|
|
353
|
+
async def process_message(self, message: FlowMessage):
|
|
354
|
+
# One-shot status
|
|
355
|
+
await self.stream_emitter.status("Processing...")
|
|
356
|
+
|
|
357
|
+
# Text streaming
|
|
358
|
+
async with self.stream_emitter.text_stream("id") as s:
|
|
359
|
+
await s.delta("Hello")
|
|
360
|
+
|
|
361
|
+
# Step boundary
|
|
362
|
+
async with self.stream_emitter.step_boundary():
|
|
363
|
+
await self.stream_emitter.status("Step content")
|
|
364
|
+
|
|
365
|
+
# Tool execution
|
|
366
|
+
async with self.stream_emitter.tool_execution(
|
|
367
|
+
"tool-1", "search", {"q": "test"}
|
|
368
|
+
) as tool:
|
|
369
|
+
result = await run_tool()
|
|
370
|
+
await tool.complete(result)
|
|
371
|
+
|
|
372
|
+
yield message
|
|
373
|
+
```
|
|
374
|
+
"""
|
|
375
|
+
|
|
376
|
+
def __init__(
|
|
377
|
+
self,
|
|
378
|
+
step: Step,
|
|
379
|
+
on_stream_event: StreamingCallback | None,
|
|
380
|
+
):
|
|
381
|
+
self.step = step
|
|
382
|
+
self.on_stream_event = on_stream_event
|
|
383
|
+
|
|
384
|
+
def text_stream(self, stream_id: str) -> TextStreamContext:
|
|
385
|
+
"""
|
|
386
|
+
Create a context manager for text streaming.
|
|
387
|
+
|
|
388
|
+
Args:
|
|
389
|
+
stream_id: Unique identifier for this text stream
|
|
390
|
+
|
|
391
|
+
Returns:
|
|
392
|
+
Context manager that emits start/delta/end events
|
|
393
|
+
"""
|
|
394
|
+
return TextStreamContext(self.step, stream_id, self.on_stream_event)
|
|
395
|
+
|
|
396
|
+
def reasoning_stream(self, stream_id: str) -> ReasoningStreamContext:
|
|
397
|
+
"""
|
|
398
|
+
Create a context manager for reasoning streaming.
|
|
399
|
+
|
|
400
|
+
Args:
|
|
401
|
+
stream_id: Unique identifier for this reasoning stream
|
|
402
|
+
|
|
403
|
+
Returns:
|
|
404
|
+
Context manager that emits start/delta/end events for reasoning
|
|
405
|
+
"""
|
|
406
|
+
return ReasoningStreamContext(
|
|
407
|
+
self.step, stream_id, self.on_stream_event
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
def step_boundary(self) -> StepBoundaryContext:
|
|
411
|
+
"""
|
|
412
|
+
Create a context manager for step boundaries.
|
|
413
|
+
|
|
414
|
+
Returns:
|
|
415
|
+
Context manager that emits step start/end events
|
|
416
|
+
"""
|
|
417
|
+
return StepBoundaryContext(self.step, self.on_stream_event)
|
|
418
|
+
|
|
419
|
+
def tool_execution(
|
|
420
|
+
self,
|
|
421
|
+
tool_call_id: str,
|
|
422
|
+
tool_name: str,
|
|
423
|
+
tool_input: dict[str, Any],
|
|
424
|
+
) -> ToolExecutionContext:
|
|
425
|
+
"""
|
|
426
|
+
Create a context manager for tool execution.
|
|
427
|
+
|
|
428
|
+
Args:
|
|
429
|
+
tool_call_id: Unique identifier for this tool call
|
|
430
|
+
tool_name: Name of the tool being executed
|
|
431
|
+
tool_input: Input parameters for the tool
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
Context manager that emits tool execution events
|
|
435
|
+
"""
|
|
436
|
+
return ToolExecutionContext(
|
|
437
|
+
self.step,
|
|
438
|
+
tool_call_id,
|
|
439
|
+
tool_name,
|
|
440
|
+
tool_input,
|
|
441
|
+
self.on_stream_event,
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
async def status(self, message: str) -> None:
|
|
445
|
+
"""
|
|
446
|
+
Emit a complete status message.
|
|
447
|
+
|
|
448
|
+
This is a convenience method for simple status updates that don't
|
|
449
|
+
require streaming.
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
message: The status message to display
|
|
453
|
+
"""
|
|
454
|
+
if self.on_stream_event:
|
|
455
|
+
await self.on_stream_event(
|
|
456
|
+
StatusEvent(step=self.step, message=message)
|
|
457
|
+
)
|
|
458
|
+
|
|
459
|
+
async def error(self, error_message: str) -> None:
|
|
460
|
+
"""
|
|
461
|
+
Emit an error event.
|
|
462
|
+
|
|
463
|
+
Args:
|
|
464
|
+
error_message: Description of the error
|
|
465
|
+
"""
|
|
466
|
+
if self.on_stream_event:
|
|
467
|
+
await self.on_stream_event(
|
|
468
|
+
ErrorEvent(step=self.step, error_message=error_message)
|
|
469
|
+
)
|