dv-pipecat-ai 0.0.82.dev857__py3-none-any.whl → 0.0.85.dev837__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dv-pipecat-ai might be problematic. Click here for more details.
- {dv_pipecat_ai-0.0.82.dev857.dist-info → dv_pipecat_ai-0.0.85.dev837.dist-info}/METADATA +98 -130
- {dv_pipecat_ai-0.0.82.dev857.dist-info → dv_pipecat_ai-0.0.85.dev837.dist-info}/RECORD +192 -140
- pipecat/adapters/base_llm_adapter.py +38 -1
- pipecat/adapters/services/anthropic_adapter.py +9 -14
- pipecat/adapters/services/aws_nova_sonic_adapter.py +120 -5
- pipecat/adapters/services/bedrock_adapter.py +236 -13
- pipecat/adapters/services/gemini_adapter.py +12 -8
- pipecat/adapters/services/open_ai_adapter.py +19 -7
- pipecat/adapters/services/open_ai_realtime_adapter.py +5 -0
- pipecat/audio/dtmf/dtmf-0.wav +0 -0
- pipecat/audio/dtmf/dtmf-1.wav +0 -0
- pipecat/audio/dtmf/dtmf-2.wav +0 -0
- pipecat/audio/dtmf/dtmf-3.wav +0 -0
- pipecat/audio/dtmf/dtmf-4.wav +0 -0
- pipecat/audio/dtmf/dtmf-5.wav +0 -0
- pipecat/audio/dtmf/dtmf-6.wav +0 -0
- pipecat/audio/dtmf/dtmf-7.wav +0 -0
- pipecat/audio/dtmf/dtmf-8.wav +0 -0
- pipecat/audio/dtmf/dtmf-9.wav +0 -0
- pipecat/audio/dtmf/dtmf-pound.wav +0 -0
- pipecat/audio/dtmf/dtmf-star.wav +0 -0
- pipecat/audio/filters/krisp_viva_filter.py +193 -0
- pipecat/audio/filters/noisereduce_filter.py +15 -0
- pipecat/audio/turn/base_turn_analyzer.py +9 -1
- pipecat/audio/turn/smart_turn/base_smart_turn.py +14 -8
- pipecat/audio/turn/smart_turn/data/__init__.py +0 -0
- pipecat/audio/turn/smart_turn/data/smart-turn-v3.0.onnx +0 -0
- pipecat/audio/turn/smart_turn/http_smart_turn.py +6 -2
- pipecat/audio/turn/smart_turn/local_smart_turn.py +1 -1
- pipecat/audio/turn/smart_turn/local_smart_turn_v2.py +1 -1
- pipecat/audio/turn/smart_turn/local_smart_turn_v3.py +124 -0
- pipecat/audio/vad/data/README.md +10 -0
- pipecat/audio/vad/data/silero_vad_v2.onnx +0 -0
- pipecat/audio/vad/silero.py +9 -3
- pipecat/audio/vad/vad_analyzer.py +13 -1
- pipecat/extensions/voicemail/voicemail_detector.py +5 -5
- pipecat/frames/frames.py +277 -86
- pipecat/observers/loggers/debug_log_observer.py +3 -3
- pipecat/observers/loggers/llm_log_observer.py +7 -3
- pipecat/observers/loggers/user_bot_latency_log_observer.py +22 -10
- pipecat/pipeline/runner.py +18 -6
- pipecat/pipeline/service_switcher.py +64 -36
- pipecat/pipeline/task.py +125 -79
- pipecat/pipeline/tts_switcher.py +30 -0
- pipecat/processors/aggregators/dtmf_aggregator.py +2 -3
- pipecat/processors/aggregators/{gated_openai_llm_context.py → gated_llm_context.py} +9 -9
- pipecat/processors/aggregators/gated_open_ai_llm_context.py +12 -0
- pipecat/processors/aggregators/llm_context.py +40 -2
- pipecat/processors/aggregators/llm_response.py +32 -15
- pipecat/processors/aggregators/llm_response_universal.py +19 -15
- pipecat/processors/aggregators/user_response.py +6 -6
- pipecat/processors/aggregators/vision_image_frame.py +24 -2
- pipecat/processors/audio/audio_buffer_processor.py +43 -8
- pipecat/processors/dtmf_aggregator.py +174 -77
- pipecat/processors/filters/stt_mute_filter.py +17 -0
- pipecat/processors/frame_processor.py +110 -24
- pipecat/processors/frameworks/langchain.py +8 -2
- pipecat/processors/frameworks/rtvi.py +210 -68
- pipecat/processors/frameworks/strands_agents.py +170 -0
- pipecat/processors/logger.py +2 -2
- pipecat/processors/transcript_processor.py +26 -5
- pipecat/processors/user_idle_processor.py +35 -11
- pipecat/runner/daily.py +59 -20
- pipecat/runner/run.py +395 -93
- pipecat/runner/types.py +6 -4
- pipecat/runner/utils.py +51 -10
- pipecat/serializers/__init__.py +5 -1
- pipecat/serializers/asterisk.py +16 -2
- pipecat/serializers/convox.py +41 -4
- pipecat/serializers/custom.py +257 -0
- pipecat/serializers/exotel.py +5 -5
- pipecat/serializers/livekit.py +20 -0
- pipecat/serializers/plivo.py +5 -5
- pipecat/serializers/protobuf.py +6 -5
- pipecat/serializers/telnyx.py +2 -2
- pipecat/serializers/twilio.py +43 -23
- pipecat/serializers/vi.py +324 -0
- pipecat/services/ai_service.py +2 -6
- pipecat/services/anthropic/llm.py +2 -25
- pipecat/services/assemblyai/models.py +6 -0
- pipecat/services/assemblyai/stt.py +13 -5
- pipecat/services/asyncai/tts.py +5 -3
- pipecat/services/aws/__init__.py +1 -0
- pipecat/services/aws/llm.py +147 -105
- pipecat/services/aws/nova_sonic/__init__.py +0 -0
- pipecat/services/aws/nova_sonic/context.py +436 -0
- pipecat/services/aws/nova_sonic/frames.py +25 -0
- pipecat/services/aws/nova_sonic/llm.py +1265 -0
- pipecat/services/aws/stt.py +3 -3
- pipecat/services/aws_nova_sonic/__init__.py +19 -1
- pipecat/services/aws_nova_sonic/aws.py +11 -1151
- pipecat/services/aws_nova_sonic/context.py +8 -354
- pipecat/services/aws_nova_sonic/frames.py +13 -17
- pipecat/services/azure/llm.py +51 -1
- pipecat/services/azure/realtime/__init__.py +0 -0
- pipecat/services/azure/realtime/llm.py +65 -0
- pipecat/services/azure/stt.py +15 -0
- pipecat/services/cartesia/stt.py +77 -70
- pipecat/services/cartesia/tts.py +80 -13
- pipecat/services/deepgram/__init__.py +1 -0
- pipecat/services/deepgram/flux/__init__.py +0 -0
- pipecat/services/deepgram/flux/stt.py +640 -0
- pipecat/services/elevenlabs/__init__.py +4 -1
- pipecat/services/elevenlabs/stt.py +339 -0
- pipecat/services/elevenlabs/tts.py +87 -46
- pipecat/services/fish/tts.py +5 -2
- pipecat/services/gemini_multimodal_live/events.py +38 -524
- pipecat/services/gemini_multimodal_live/file_api.py +23 -173
- pipecat/services/gemini_multimodal_live/gemini.py +41 -1403
- pipecat/services/gladia/stt.py +56 -72
- pipecat/services/google/__init__.py +1 -0
- pipecat/services/google/gemini_live/__init__.py +3 -0
- pipecat/services/google/gemini_live/file_api.py +189 -0
- pipecat/services/google/gemini_live/llm.py +1582 -0
- pipecat/services/google/gemini_live/llm_vertex.py +184 -0
- pipecat/services/google/llm.py +15 -11
- pipecat/services/google/llm_openai.py +3 -3
- pipecat/services/google/llm_vertex.py +86 -16
- pipecat/services/google/stt.py +4 -0
- pipecat/services/google/tts.py +7 -3
- pipecat/services/heygen/api.py +2 -0
- pipecat/services/heygen/client.py +8 -4
- pipecat/services/heygen/video.py +2 -0
- pipecat/services/hume/__init__.py +5 -0
- pipecat/services/hume/tts.py +220 -0
- pipecat/services/inworld/tts.py +6 -6
- pipecat/services/llm_service.py +15 -5
- pipecat/services/lmnt/tts.py +4 -2
- pipecat/services/mcp_service.py +4 -2
- pipecat/services/mem0/memory.py +6 -5
- pipecat/services/mistral/llm.py +29 -8
- pipecat/services/moondream/vision.py +42 -16
- pipecat/services/neuphonic/tts.py +5 -2
- pipecat/services/openai/__init__.py +1 -0
- pipecat/services/openai/base_llm.py +27 -20
- pipecat/services/openai/realtime/__init__.py +0 -0
- pipecat/services/openai/realtime/context.py +272 -0
- pipecat/services/openai/realtime/events.py +1106 -0
- pipecat/services/openai/realtime/frames.py +37 -0
- pipecat/services/openai/realtime/llm.py +829 -0
- pipecat/services/openai/tts.py +49 -10
- pipecat/services/openai_realtime/__init__.py +27 -0
- pipecat/services/openai_realtime/azure.py +21 -0
- pipecat/services/openai_realtime/context.py +21 -0
- pipecat/services/openai_realtime/events.py +21 -0
- pipecat/services/openai_realtime/frames.py +21 -0
- pipecat/services/openai_realtime_beta/azure.py +16 -0
- pipecat/services/openai_realtime_beta/openai.py +17 -5
- pipecat/services/piper/tts.py +7 -9
- pipecat/services/playht/tts.py +34 -4
- pipecat/services/rime/tts.py +12 -12
- pipecat/services/riva/stt.py +3 -1
- pipecat/services/salesforce/__init__.py +9 -0
- pipecat/services/salesforce/llm.py +700 -0
- pipecat/services/sarvam/__init__.py +7 -0
- pipecat/services/sarvam/stt.py +540 -0
- pipecat/services/sarvam/tts.py +97 -13
- pipecat/services/simli/video.py +2 -2
- pipecat/services/speechmatics/stt.py +22 -10
- pipecat/services/stt_service.py +47 -0
- pipecat/services/tavus/video.py +2 -2
- pipecat/services/tts_service.py +75 -22
- pipecat/services/vision_service.py +7 -6
- pipecat/services/vistaar/llm.py +51 -9
- pipecat/tests/utils.py +4 -4
- pipecat/transcriptions/language.py +41 -1
- pipecat/transports/base_input.py +13 -34
- pipecat/transports/base_output.py +140 -104
- pipecat/transports/daily/transport.py +199 -26
- pipecat/transports/heygen/__init__.py +0 -0
- pipecat/transports/heygen/transport.py +381 -0
- pipecat/transports/livekit/transport.py +228 -63
- pipecat/transports/local/audio.py +6 -1
- pipecat/transports/local/tk.py +11 -2
- pipecat/transports/network/fastapi_websocket.py +1 -1
- pipecat/transports/smallwebrtc/connection.py +103 -19
- pipecat/transports/smallwebrtc/request_handler.py +246 -0
- pipecat/transports/smallwebrtc/transport.py +65 -23
- pipecat/transports/tavus/transport.py +23 -12
- pipecat/transports/websocket/client.py +41 -5
- pipecat/transports/websocket/fastapi.py +21 -11
- pipecat/transports/websocket/server.py +14 -7
- pipecat/transports/whatsapp/api.py +8 -0
- pipecat/transports/whatsapp/client.py +47 -0
- pipecat/utils/base_object.py +54 -22
- pipecat/utils/redis.py +58 -0
- pipecat/utils/string.py +13 -1
- pipecat/utils/tracing/service_decorators.py +21 -21
- pipecat/serializers/genesys.py +0 -95
- pipecat/services/google/test-google-chirp.py +0 -45
- pipecat/services/openai.py +0 -698
- {dv_pipecat_ai-0.0.82.dev857.dist-info → dv_pipecat_ai-0.0.85.dev837.dist-info}/WHEEL +0 -0
- {dv_pipecat_ai-0.0.82.dev857.dist-info → dv_pipecat_ai-0.0.85.dev837.dist-info}/licenses/LICENSE +0 -0
- {dv_pipecat_ai-0.0.82.dev857.dist-info → dv_pipecat_ai-0.0.85.dev837.dist-info}/top_level.txt +0 -0
- /pipecat/services/{aws_nova_sonic → aws/nova_sonic}/ready.wav +0 -0
pipecat/pipeline/runner.py
CHANGED
|
@@ -70,11 +70,15 @@ class PipelineRunner(BaseObject):
|
|
|
70
70
|
"""
|
|
71
71
|
logger.debug(f"Runner {self} started running {task}", call_id=task._conversation_id)
|
|
72
72
|
self._tasks[task.name] = task
|
|
73
|
-
|
|
73
|
+
|
|
74
|
+
# PipelineTask handles asyncio.CancelledError to shutdown the pipeline
|
|
75
|
+
# properly and re-raises it in case there's more cleanup to do.
|
|
74
76
|
try:
|
|
77
|
+
params = PipelineTaskParams(loop=self._loop)
|
|
75
78
|
await task.run(params)
|
|
76
79
|
except asyncio.CancelledError:
|
|
77
|
-
|
|
80
|
+
pass
|
|
81
|
+
|
|
78
82
|
del self._tasks[task.name]
|
|
79
83
|
|
|
80
84
|
# Cleanup base object.
|
|
@@ -106,13 +110,21 @@ class PipelineRunner(BaseObject):
|
|
|
106
110
|
|
|
107
111
|
def _setup_sigint(self):
|
|
108
112
|
"""Set up signal handlers for graceful shutdown."""
|
|
109
|
-
|
|
110
|
-
|
|
113
|
+
try:
|
|
114
|
+
loop = asyncio.get_running_loop()
|
|
115
|
+
loop.add_signal_handler(signal.SIGINT, lambda *args: self._sig_handler())
|
|
116
|
+
except NotImplementedError:
|
|
117
|
+
# Windows fallback
|
|
118
|
+
signal.signal(signal.SIGINT, lambda s, f: self._sig_handler())
|
|
111
119
|
|
|
112
120
|
def _setup_sigterm(self):
|
|
113
121
|
"""Set up signal handlers for graceful shutdown."""
|
|
114
|
-
|
|
115
|
-
|
|
122
|
+
try:
|
|
123
|
+
loop = asyncio.get_running_loop()
|
|
124
|
+
loop.add_signal_handler(signal.SIGTERM, lambda *args: self._sig_handler())
|
|
125
|
+
except NotImplementedError:
|
|
126
|
+
# Windows fallback
|
|
127
|
+
signal.signal(signal.SIGTERM, lambda s, f: self._sig_handler())
|
|
116
128
|
|
|
117
129
|
def _sig_handler(self):
|
|
118
130
|
"""Handle interrupt signals by cancelling all tasks."""
|
|
@@ -6,9 +6,15 @@
|
|
|
6
6
|
|
|
7
7
|
"""Service switcher for switching between different services at runtime, with different switching strategies."""
|
|
8
8
|
|
|
9
|
+
from dataclasses import dataclass
|
|
9
10
|
from typing import Any, Generic, List, Optional, Type, TypeVar
|
|
10
11
|
|
|
11
|
-
from pipecat.frames.frames import
|
|
12
|
+
from pipecat.frames.frames import (
|
|
13
|
+
ControlFrame,
|
|
14
|
+
Frame,
|
|
15
|
+
ManuallySwitchServiceFrame,
|
|
16
|
+
ServiceSwitcherFrame,
|
|
17
|
+
)
|
|
12
18
|
from pipecat.pipeline.parallel_pipeline import ParallelPipeline
|
|
13
19
|
from pipecat.processors.filters.function_filter import FunctionFilter
|
|
14
20
|
from pipecat.processors.frame_processor import FrameDirection, FrameProcessor
|
|
@@ -22,19 +28,6 @@ class ServiceSwitcherStrategy:
|
|
|
22
28
|
self.services = services
|
|
23
29
|
self.active_service: Optional[FrameProcessor] = None
|
|
24
30
|
|
|
25
|
-
def is_active(self, service: FrameProcessor) -> bool:
|
|
26
|
-
"""Determine if the given service is the currently active one.
|
|
27
|
-
|
|
28
|
-
This method should be overridden by subclasses to implement specific logic.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
service: The service to check.
|
|
32
|
-
|
|
33
|
-
Returns:
|
|
34
|
-
True if the given service is the active one, False otherwise.
|
|
35
|
-
"""
|
|
36
|
-
raise NotImplementedError("Subclasses must implement this method.")
|
|
37
|
-
|
|
38
31
|
def handle_frame(self, frame: ServiceSwitcherFrame, direction: FrameDirection):
|
|
39
32
|
"""Handle a frame that controls service switching.
|
|
40
33
|
|
|
@@ -60,17 +53,6 @@ class ServiceSwitcherStrategyManual(ServiceSwitcherStrategy):
|
|
|
60
53
|
super().__init__(services)
|
|
61
54
|
self.active_service = services[0] if services else None
|
|
62
55
|
|
|
63
|
-
def is_active(self, service: FrameProcessor) -> bool:
|
|
64
|
-
"""Check if the given service is the currently active one.
|
|
65
|
-
|
|
66
|
-
Args:
|
|
67
|
-
service: The service to check.
|
|
68
|
-
|
|
69
|
-
Returns:
|
|
70
|
-
True if the given service is the active one, False otherwise.
|
|
71
|
-
"""
|
|
72
|
-
return service == self.active_service
|
|
73
|
-
|
|
74
56
|
def handle_frame(self, frame: ServiceSwitcherFrame, direction: FrameDirection):
|
|
75
57
|
"""Handle a frame that controls service switching.
|
|
76
58
|
|
|
@@ -79,20 +61,21 @@ class ServiceSwitcherStrategyManual(ServiceSwitcherStrategy):
|
|
|
79
61
|
direction: The direction of the frame (upstream or downstream).
|
|
80
62
|
"""
|
|
81
63
|
if isinstance(frame, ManuallySwitchServiceFrame):
|
|
82
|
-
self.
|
|
64
|
+
self._set_active_if_available(frame.service)
|
|
83
65
|
else:
|
|
84
66
|
raise ValueError(f"Unsupported frame type: {type(frame)}")
|
|
85
67
|
|
|
86
|
-
def
|
|
87
|
-
"""Set the active service to the given one.
|
|
68
|
+
def _set_active_if_available(self, service: FrameProcessor):
|
|
69
|
+
"""Set the active service to the given one, if it is in the list of available services.
|
|
70
|
+
|
|
71
|
+
If it's not in the list, the request is ignored, as it may have been
|
|
72
|
+
intended for another ServiceSwitcher in the pipeline.
|
|
88
73
|
|
|
89
74
|
Args:
|
|
90
75
|
service: The service to set as active.
|
|
91
76
|
"""
|
|
92
77
|
if service in self.services:
|
|
93
78
|
self.active_service = service
|
|
94
|
-
else:
|
|
95
|
-
raise ValueError(f"Service {service} is not in the list of available services.")
|
|
96
79
|
|
|
97
80
|
|
|
98
81
|
StrategyType = TypeVar("StrategyType", bound=ServiceSwitcherStrategy)
|
|
@@ -108,6 +91,43 @@ class ServiceSwitcher(ParallelPipeline, Generic[StrategyType]):
|
|
|
108
91
|
self.services = services
|
|
109
92
|
self.strategy = strategy
|
|
110
93
|
|
|
94
|
+
class ServiceSwitcherFilter(FunctionFilter):
|
|
95
|
+
"""An internal filter that allows frames to pass through to the wrapped service only if it's the active service."""
|
|
96
|
+
|
|
97
|
+
def __init__(
|
|
98
|
+
self,
|
|
99
|
+
wrapped_service: FrameProcessor,
|
|
100
|
+
active_service: FrameProcessor,
|
|
101
|
+
direction: FrameDirection,
|
|
102
|
+
):
|
|
103
|
+
"""Initialize the service switcher filter with a strategy and direction."""
|
|
104
|
+
|
|
105
|
+
async def filter(_: Frame) -> bool:
|
|
106
|
+
return self._wrapped_service == self._active_service
|
|
107
|
+
|
|
108
|
+
super().__init__(filter, direction)
|
|
109
|
+
self._wrapped_service = wrapped_service
|
|
110
|
+
self._active_service = active_service
|
|
111
|
+
|
|
112
|
+
async def process_frame(self, frame, direction):
|
|
113
|
+
"""Process a frame through the filter, handling special internal filter-updating frames."""
|
|
114
|
+
if isinstance(frame, ServiceSwitcher.ServiceSwitcherFilterFrame):
|
|
115
|
+
self._active_service = frame.active_service
|
|
116
|
+
# Two ServiceSwitcherFilters "sandwich" a service. Push the
|
|
117
|
+
# frame only to update the other side of the sandwich, but
|
|
118
|
+
# otherwise don't let it leave the sandwich.
|
|
119
|
+
if direction == self._direction:
|
|
120
|
+
await self.push_frame(frame, direction)
|
|
121
|
+
return
|
|
122
|
+
|
|
123
|
+
await super().process_frame(frame, direction)
|
|
124
|
+
|
|
125
|
+
@dataclass
|
|
126
|
+
class ServiceSwitcherFilterFrame(ControlFrame):
|
|
127
|
+
"""An internal frame used by ServiceSwitcher to filter frames based on active service."""
|
|
128
|
+
|
|
129
|
+
active_service: FrameProcessor
|
|
130
|
+
|
|
111
131
|
@staticmethod
|
|
112
132
|
def _make_pipeline_definitions(
|
|
113
133
|
services: List[FrameProcessor], strategy: ServiceSwitcherStrategy
|
|
@@ -121,14 +141,18 @@ class ServiceSwitcher(ParallelPipeline, Generic[StrategyType]):
|
|
|
121
141
|
def _make_pipeline_definition(
|
|
122
142
|
service: FrameProcessor, strategy: ServiceSwitcherStrategy
|
|
123
143
|
) -> Any:
|
|
124
|
-
async def filter(frame) -> bool:
|
|
125
|
-
_ = frame
|
|
126
|
-
return strategy.is_active(service)
|
|
127
|
-
|
|
128
144
|
return [
|
|
129
|
-
|
|
145
|
+
ServiceSwitcher.ServiceSwitcherFilter(
|
|
146
|
+
wrapped_service=service,
|
|
147
|
+
active_service=strategy.active_service,
|
|
148
|
+
direction=FrameDirection.DOWNSTREAM,
|
|
149
|
+
),
|
|
130
150
|
service,
|
|
131
|
-
|
|
151
|
+
ServiceSwitcher.ServiceSwitcherFilter(
|
|
152
|
+
wrapped_service=service,
|
|
153
|
+
active_service=strategy.active_service,
|
|
154
|
+
direction=FrameDirection.UPSTREAM,
|
|
155
|
+
),
|
|
132
156
|
]
|
|
133
157
|
|
|
134
158
|
async def process_frame(self, frame: Frame, direction: FrameDirection):
|
|
@@ -142,3 +166,7 @@ class ServiceSwitcher(ParallelPipeline, Generic[StrategyType]):
|
|
|
142
166
|
|
|
143
167
|
if isinstance(frame, ServiceSwitcherFrame):
|
|
144
168
|
self.strategy.handle_frame(frame, direction)
|
|
169
|
+
service_switcher_filter_frame = ServiceSwitcher.ServiceSwitcherFilterFrame(
|
|
170
|
+
active_service=self.strategy.active_service
|
|
171
|
+
)
|
|
172
|
+
await super().process_frame(service_switcher_filter_frame, direction)
|
pipecat/pipeline/task.py
CHANGED
|
@@ -13,8 +13,7 @@ including heartbeats, idle detection, and observer integration.
|
|
|
13
13
|
|
|
14
14
|
import asyncio
|
|
15
15
|
import time
|
|
16
|
-
from
|
|
17
|
-
from typing import Any, AsyncIterable, Deque, Dict, Iterable, List, Optional, Tuple, Type
|
|
16
|
+
from typing import Any, AsyncIterable, Dict, Iterable, List, Optional, Tuple, Type
|
|
18
17
|
|
|
19
18
|
from loguru import logger
|
|
20
19
|
from pydantic import BaseModel, ConfigDict, Field
|
|
@@ -31,7 +30,8 @@ from pipecat.frames.frames import (
|
|
|
31
30
|
ErrorFrame,
|
|
32
31
|
Frame,
|
|
33
32
|
HeartbeatFrame,
|
|
34
|
-
|
|
33
|
+
InterruptionFrame,
|
|
34
|
+
InterruptionTaskFrame,
|
|
35
35
|
MetricsFrame,
|
|
36
36
|
StartFrame,
|
|
37
37
|
StopFrame,
|
|
@@ -113,9 +113,32 @@ class PipelineTask(BasePipelineTask):
|
|
|
113
113
|
- on_frame_reached_downstream: Called when downstream frames reach the sink
|
|
114
114
|
- on_idle_timeout: Called when pipeline is idle beyond timeout threshold
|
|
115
115
|
- on_pipeline_started: Called when pipeline starts with StartFrame
|
|
116
|
-
- on_pipeline_stopped: Called when pipeline stops with StopFrame
|
|
117
|
-
|
|
118
|
-
|
|
116
|
+
- on_pipeline_stopped: [deprecated] Called when pipeline stops with StopFrame
|
|
117
|
+
|
|
118
|
+
.. deprecated:: 0.0.86
|
|
119
|
+
Use `on_pipeline_finished` instead.
|
|
120
|
+
|
|
121
|
+
- on_pipeline_ended: [deprecated] Called when pipeline ends with EndFrame
|
|
122
|
+
|
|
123
|
+
.. deprecated:: 0.0.86
|
|
124
|
+
Use `on_pipeline_finished` instead.
|
|
125
|
+
|
|
126
|
+
- on_pipeline_cancelled: [deprecated] Called when pipeline is cancelled with CancelFrame
|
|
127
|
+
|
|
128
|
+
.. deprecated:: 0.0.86
|
|
129
|
+
Use `on_pipeline_finished` instead.
|
|
130
|
+
|
|
131
|
+
- on_pipeline_finished: Called after the pipeline has reached any terminal state.
|
|
132
|
+
This includes:
|
|
133
|
+
|
|
134
|
+
- StopFrame: pipeline was stopped (processors keep connections open)
|
|
135
|
+
- EndFrame: pipeline ended normally
|
|
136
|
+
- CancelFrame: pipeline was cancelled
|
|
137
|
+
|
|
138
|
+
Use this event for cleanup, logging, or post-processing tasks. Users can inspect
|
|
139
|
+
the frame if they need to handle specific cases.
|
|
140
|
+
|
|
141
|
+
- on_pipeline_error: Called when an error occurs with ErrorFrame
|
|
119
142
|
|
|
120
143
|
Example::
|
|
121
144
|
|
|
@@ -126,6 +149,18 @@ class PipelineTask(BasePipelineTask):
|
|
|
126
149
|
@task.event_handler("on_idle_timeout")
|
|
127
150
|
async def on_pipeline_idle_timeout(task):
|
|
128
151
|
...
|
|
152
|
+
|
|
153
|
+
@task.event_handler("on_pipeline_started")
|
|
154
|
+
async def on_pipeline_started(task, frame):
|
|
155
|
+
...
|
|
156
|
+
|
|
157
|
+
@task.event_handler("on_pipeline_finished")
|
|
158
|
+
async def on_pipeline_finished(task, frame):
|
|
159
|
+
...
|
|
160
|
+
|
|
161
|
+
@task.event_handler("on_pipeline_error")
|
|
162
|
+
async def on_pipeline_error(task, frame):
|
|
163
|
+
...
|
|
129
164
|
"""
|
|
130
165
|
|
|
131
166
|
def __init__(
|
|
@@ -234,6 +269,9 @@ class PipelineTask(BasePipelineTask):
|
|
|
234
269
|
# StopFrame) has been received at the end of the pipeline.
|
|
235
270
|
self._pipeline_end_event = asyncio.Event()
|
|
236
271
|
|
|
272
|
+
# This event is set when the pipeline truly finishes.
|
|
273
|
+
self._pipeline_finished_event = asyncio.Event()
|
|
274
|
+
|
|
237
275
|
# This is the final pipeline. It is composed of a source processor,
|
|
238
276
|
# followed by the user pipeline, and ending with a sink processor. The
|
|
239
277
|
# source allows us to receive and react to upstream frames, and the sink
|
|
@@ -262,6 +300,8 @@ class PipelineTask(BasePipelineTask):
|
|
|
262
300
|
self._register_event_handler("on_pipeline_stopped")
|
|
263
301
|
self._register_event_handler("on_pipeline_ended")
|
|
264
302
|
self._register_event_handler("on_pipeline_cancelled")
|
|
303
|
+
self._register_event_handler("on_pipeline_finished")
|
|
304
|
+
self._register_event_handler("on_pipeline_error")
|
|
265
305
|
|
|
266
306
|
@property
|
|
267
307
|
def params(self) -> PipelineParams:
|
|
@@ -290,6 +330,27 @@ class PipelineTask(BasePipelineTask):
|
|
|
290
330
|
"""
|
|
291
331
|
return self._turn_trace_observer
|
|
292
332
|
|
|
333
|
+
def event_handler(self, event_name: str):
|
|
334
|
+
"""Decorator for registering event handlers.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
event_name: The name of the event to handle.
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
The decorator function that registers the handler.
|
|
341
|
+
"""
|
|
342
|
+
if event_name in ["on_pipeline_stopped", "on_pipeline_ended", "on_pipeline_cancelled"]:
|
|
343
|
+
import warnings
|
|
344
|
+
|
|
345
|
+
with warnings.catch_warnings():
|
|
346
|
+
warnings.simplefilter("always")
|
|
347
|
+
warnings.warn(
|
|
348
|
+
f"Event '{event_name}' is deprecated, use 'on_pipeline_finished' instead.",
|
|
349
|
+
DeprecationWarning,
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
return super().event_handler(event_name)
|
|
353
|
+
|
|
293
354
|
def add_observer(self, observer: BaseObserver):
|
|
294
355
|
"""Add an observer to monitor pipeline execution.
|
|
295
356
|
|
|
@@ -343,12 +404,9 @@ class PipelineTask(BasePipelineTask):
|
|
|
343
404
|
await self.queue_frame(EndFrame())
|
|
344
405
|
|
|
345
406
|
async def cancel(self):
|
|
346
|
-
"""
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
waiting for completion.
|
|
350
|
-
"""
|
|
351
|
-
await self._cancel()
|
|
407
|
+
"""Request the running pipeline to cancel."""
|
|
408
|
+
if not self._finished:
|
|
409
|
+
await self._cancel()
|
|
352
410
|
|
|
353
411
|
async def run(self, params: PipelineTaskParams):
|
|
354
412
|
"""Start and manage the pipeline execution until completion or cancellation.
|
|
@@ -358,51 +416,38 @@ class PipelineTask(BasePipelineTask):
|
|
|
358
416
|
"""
|
|
359
417
|
if self.has_finished():
|
|
360
418
|
return
|
|
361
|
-
cleanup_pipeline = True
|
|
362
|
-
try:
|
|
363
|
-
# Setup processors.
|
|
364
|
-
await self._setup(params)
|
|
365
419
|
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
# controlled source processor).
|
|
369
|
-
push_task = await self._create_tasks()
|
|
370
|
-
await push_task
|
|
420
|
+
# Setup processors.
|
|
421
|
+
await self._setup(params)
|
|
371
422
|
|
|
372
|
-
|
|
373
|
-
|
|
423
|
+
# Create all main tasks and wait for the main push task. This is the
|
|
424
|
+
# task that pushes frames to the very beginning of our pipeline (i.e. to
|
|
425
|
+
# our controlled source processor).
|
|
426
|
+
await self._create_tasks()
|
|
374
427
|
|
|
375
|
-
|
|
376
|
-
|
|
428
|
+
try:
|
|
429
|
+
# Wait for pipeline to finish.
|
|
430
|
+
await self._wait_for_pipeline_finished()
|
|
377
431
|
except asyncio.CancelledError:
|
|
378
|
-
|
|
379
|
-
#
|
|
432
|
+
logger.debug(f"Pipeline task {self} got cancelled from outside...")
|
|
433
|
+
# We have been cancelled from outside, let's just cancel everything.
|
|
434
|
+
await self._cancel()
|
|
435
|
+
# Wait again for pipeline to finish. This time we have really
|
|
436
|
+
# cancelled, so it should really finish.
|
|
437
|
+
await self._wait_for_pipeline_finished()
|
|
438
|
+
# Re-raise in case there's more cleanup to do.
|
|
380
439
|
raise
|
|
381
440
|
finally:
|
|
382
441
|
# We can reach this point for different reasons:
|
|
383
442
|
#
|
|
384
|
-
# 1. The task has finished
|
|
385
|
-
# 2. By
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
# `self._cancelled` is true.
|
|
393
|
-
#
|
|
394
|
-
# Case (3) will raise the exception above (because we are cancelling
|
|
395
|
-
# the asyncio task). This will be then captured by the
|
|
396
|
-
# `PipelineRunner` which will call `PipelineTask.cancel()` and
|
|
397
|
-
# therefore becoming case (2).
|
|
398
|
-
if self._finished or self._cancelled:
|
|
399
|
-
logger.debug(f"Pipeline task {self} is finishing cleanup...")
|
|
400
|
-
await self._cancel_tasks()
|
|
401
|
-
await self._cleanup(cleanup_pipeline)
|
|
402
|
-
if self._check_dangling_tasks:
|
|
403
|
-
self._print_dangling_tasks()
|
|
404
|
-
self._finished = True
|
|
405
|
-
logger.debug(f"Pipeline task {self} has finished")
|
|
443
|
+
# 1. The pipeline task has finished (try case).
|
|
444
|
+
# 2. By an asyncio task cancellation (except case).
|
|
445
|
+
logger.debug(f"Pipeline task {self} is finishing...")
|
|
446
|
+
await self._cancel_tasks()
|
|
447
|
+
if self._check_dangling_tasks:
|
|
448
|
+
self._print_dangling_tasks()
|
|
449
|
+
self._finished = True
|
|
450
|
+
logger.debug(f"Pipeline task {self} has finished")
|
|
406
451
|
|
|
407
452
|
async def queue_frame(self, frame: Frame):
|
|
408
453
|
"""Queue a single frame to be pushed down the pipeline.
|
|
@@ -430,19 +475,7 @@ class PipelineTask(BasePipelineTask):
|
|
|
430
475
|
if not self._cancelled:
|
|
431
476
|
logger.debug(f"Canceling pipeline task {self}", call_id=self._conversation_id)
|
|
432
477
|
self._cancelled = True
|
|
433
|
-
|
|
434
|
-
# Make sure everything is cleaned up downstream. This is sent
|
|
435
|
-
# out-of-band from the main streaming task which is what we want since
|
|
436
|
-
# we want to cancel right away.
|
|
437
|
-
await self._pipeline.queue_frame(cancel_frame)
|
|
438
|
-
# Wait for CancelFrame to make it through the pipeline.
|
|
439
|
-
await self._wait_for_pipeline_end(cancel_frame)
|
|
440
|
-
# Only cancel the push task, we don't want to be able to process any
|
|
441
|
-
# other frame after cancel. Everything else will be cancelled in
|
|
442
|
-
# run().
|
|
443
|
-
if self._process_push_task:
|
|
444
|
-
await self._task_manager.cancel_task(self._process_push_task)
|
|
445
|
-
self._process_push_task = None
|
|
478
|
+
await self.queue_frame(CancelFrame())
|
|
446
479
|
|
|
447
480
|
async def _create_tasks(self):
|
|
448
481
|
"""Create and start all pipeline processing tasks."""
|
|
@@ -532,6 +565,7 @@ class PipelineTask(BasePipelineTask):
|
|
|
532
565
|
)
|
|
533
566
|
finally:
|
|
534
567
|
await self._call_event_handler("on_pipeline_cancelled", frame)
|
|
568
|
+
await self._call_event_handler("on_pipeline_finished", frame)
|
|
535
569
|
|
|
536
570
|
logger.debug(f"{self}: Closing. Waiting for {frame} to reach the end of the pipeline...")
|
|
537
571
|
|
|
@@ -543,6 +577,17 @@ class PipelineTask(BasePipelineTask):
|
|
|
543
577
|
|
|
544
578
|
self._pipeline_end_event.clear()
|
|
545
579
|
|
|
580
|
+
# We are really done.
|
|
581
|
+
self._pipeline_finished_event.set()
|
|
582
|
+
|
|
583
|
+
async def _wait_for_pipeline_finished(self):
|
|
584
|
+
await self._pipeline_finished_event.wait()
|
|
585
|
+
self._pipeline_finished_event.clear()
|
|
586
|
+
# Make sure we wait for the main task to complete.
|
|
587
|
+
if self._process_push_task:
|
|
588
|
+
await self._process_push_task
|
|
589
|
+
self._process_push_task = None
|
|
590
|
+
|
|
546
591
|
async def _setup(self, params: PipelineTaskParams):
|
|
547
592
|
"""Set up the pipeline task and all processors."""
|
|
548
593
|
mgr_params = TaskManagerParams(loop=params.loop)
|
|
@@ -627,22 +672,31 @@ class PipelineTask(BasePipelineTask):
|
|
|
627
672
|
|
|
628
673
|
if isinstance(frame, EndTaskFrame):
|
|
629
674
|
# Tell the task we should end nicely.
|
|
675
|
+
logger.debug(f"{self}: received end task frame {frame}")
|
|
630
676
|
await self.queue_frame(EndFrame())
|
|
631
677
|
elif isinstance(frame, CancelTaskFrame):
|
|
632
678
|
# Tell the task we should end right away.
|
|
679
|
+
logger.debug(f"{self}: received cancel task frame {frame}")
|
|
633
680
|
await self.queue_frame(CancelFrame())
|
|
634
681
|
elif isinstance(frame, StopTaskFrame):
|
|
635
682
|
# Tell the task we should stop nicely.
|
|
683
|
+
logger.debug(f"{self}: received stop task frame {frame}")
|
|
636
684
|
await self.queue_frame(StopFrame())
|
|
685
|
+
elif isinstance(frame, InterruptionTaskFrame):
|
|
686
|
+
# Tell the task we should interrupt the pipeline. Note that we are
|
|
687
|
+
# bypassing the push queue and directly queue into the
|
|
688
|
+
# pipeline. This is in case the push task is blocked waiting for a
|
|
689
|
+
# pipeline-ending frame to finish traversing the pipeline.
|
|
690
|
+
logger.debug(f"{self}: received interruption task frame {frame}")
|
|
691
|
+
await self._pipeline.queue_frame(InterruptionFrame())
|
|
637
692
|
elif isinstance(frame, ErrorFrame):
|
|
693
|
+
await self._call_event_handler("on_pipeline_error", frame)
|
|
638
694
|
if frame.fatal:
|
|
639
695
|
logger.error(f"A fatal error occurred: {frame}")
|
|
640
696
|
# Cancel all tasks downstream.
|
|
641
697
|
await self.queue_frame(CancelFrame())
|
|
642
|
-
# Tell the task we should stop.
|
|
643
|
-
await self.queue_frame(StopTaskFrame())
|
|
644
698
|
else:
|
|
645
|
-
logger.warning(f"Something went wrong: {frame}")
|
|
699
|
+
logger.warning(f"{self}: Something went wrong: {frame}")
|
|
646
700
|
|
|
647
701
|
async def _sink_push_frame(self, frame: Frame, direction: FrameDirection):
|
|
648
702
|
"""Process frames coming downstream from the pipeline.
|
|
@@ -669,9 +723,11 @@ class PipelineTask(BasePipelineTask):
|
|
|
669
723
|
self._pipeline_start_event.set()
|
|
670
724
|
elif isinstance(frame, EndFrame):
|
|
671
725
|
await self._call_event_handler("on_pipeline_ended", frame)
|
|
726
|
+
await self._call_event_handler("on_pipeline_finished", frame)
|
|
672
727
|
self._pipeline_end_event.set()
|
|
673
728
|
elif isinstance(frame, StopFrame):
|
|
674
729
|
await self._call_event_handler("on_pipeline_stopped", frame)
|
|
730
|
+
await self._call_event_handler("on_pipeline_finished", frame)
|
|
675
731
|
self._pipeline_end_event.set()
|
|
676
732
|
elif isinstance(frame, CancelFrame):
|
|
677
733
|
self._pipeline_end_event.set()
|
|
@@ -718,7 +774,6 @@ class PipelineTask(BasePipelineTask):
|
|
|
718
774
|
"""
|
|
719
775
|
running = True
|
|
720
776
|
last_frame_time = 0
|
|
721
|
-
frame_buffer = deque(maxlen=10) # Store last 10 frames
|
|
722
777
|
|
|
723
778
|
while running:
|
|
724
779
|
try:
|
|
@@ -726,9 +781,6 @@ class PipelineTask(BasePipelineTask):
|
|
|
726
781
|
self._idle_queue.get(), timeout=self._idle_timeout_secs
|
|
727
782
|
)
|
|
728
783
|
|
|
729
|
-
if not isinstance(frame, InputAudioRawFrame):
|
|
730
|
-
frame_buffer.append(frame)
|
|
731
|
-
|
|
732
784
|
if isinstance(frame, StartFrame) or isinstance(frame, self._idle_timeout_frames):
|
|
733
785
|
# If we find a StartFrame or one of the frames that prevents a
|
|
734
786
|
# time out we update the time.
|
|
@@ -739,7 +791,7 @@ class PipelineTask(BasePipelineTask):
|
|
|
739
791
|
# valid frames.
|
|
740
792
|
diff_time = time.time() - last_frame_time
|
|
741
793
|
if diff_time >= self._idle_timeout_secs:
|
|
742
|
-
running = await self._idle_timeout_detected(
|
|
794
|
+
running = await self._idle_timeout_detected()
|
|
743
795
|
# Reset `last_frame_time` so we don't trigger another
|
|
744
796
|
# immediate idle timeout if we are not cancelling. For
|
|
745
797
|
# example, we might want to force the bot to say goodbye
|
|
@@ -749,14 +801,11 @@ class PipelineTask(BasePipelineTask):
|
|
|
749
801
|
self._idle_queue.task_done()
|
|
750
802
|
|
|
751
803
|
except asyncio.TimeoutError:
|
|
752
|
-
running = await self._idle_timeout_detected(
|
|
804
|
+
running = await self._idle_timeout_detected()
|
|
753
805
|
|
|
754
|
-
async def _idle_timeout_detected(self
|
|
806
|
+
async def _idle_timeout_detected(self) -> bool:
|
|
755
807
|
"""Handle idle timeout detection and optional cancellation.
|
|
756
808
|
|
|
757
|
-
Args:
|
|
758
|
-
last_frames: Recent frames received before timeout for debugging.
|
|
759
|
-
|
|
760
809
|
Returns:
|
|
761
810
|
Whether the pipeline task should continue running.
|
|
762
811
|
"""
|
|
@@ -764,10 +813,7 @@ class PipelineTask(BasePipelineTask):
|
|
|
764
813
|
if self._cancelled:
|
|
765
814
|
return True
|
|
766
815
|
|
|
767
|
-
logger.warning("Idle timeout detected.
|
|
768
|
-
for i, frame in enumerate(last_frames, 1):
|
|
769
|
-
logger.warning(f"Frame {i}: {frame}")
|
|
770
|
-
|
|
816
|
+
logger.warning("Idle timeout detected.")
|
|
771
817
|
await self._call_event_handler("on_idle_timeout")
|
|
772
818
|
if self._cancel_on_idle_timeout:
|
|
773
819
|
logger.warning(
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright (c) 2025, Daily
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: BSD 2-Clause License
|
|
5
|
+
#
|
|
6
|
+
|
|
7
|
+
"""TTS switcher for switching between different TTS services at runtime, with different switching strategies."""
|
|
8
|
+
|
|
9
|
+
from typing import List, Optional, Type
|
|
10
|
+
|
|
11
|
+
from pipecat.pipeline.service_switcher import ServiceSwitcher, StrategyType
|
|
12
|
+
from pipecat.services.tts_service import TTSService
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class TTSSwitcher(ServiceSwitcher[StrategyType]):
|
|
16
|
+
"""A pipeline that switches between different TTS services at runtime."""
|
|
17
|
+
|
|
18
|
+
def __init__(self, tts_services: List[TTSService], strategy_type: Type[StrategyType]):
|
|
19
|
+
"""Initialize the TTS switcher with a list of TTS services and a switching strategy."""
|
|
20
|
+
super().__init__(tts_services, strategy_type)
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def tts_services(self) -> List[TTSService]:
|
|
24
|
+
"""Get the list of TTS services managed by this switcher."""
|
|
25
|
+
return self.services
|
|
26
|
+
|
|
27
|
+
@property
|
|
28
|
+
def active_tts(self) -> Optional[TTSService]:
|
|
29
|
+
"""Get the currently active TTS service, if any."""
|
|
30
|
+
return self.strategy.active_service
|
|
@@ -16,7 +16,6 @@ from typing import Optional
|
|
|
16
16
|
|
|
17
17
|
from pipecat.audio.dtmf.types import KeypadEntry
|
|
18
18
|
from pipecat.frames.frames import (
|
|
19
|
-
BotInterruptionFrame,
|
|
20
19
|
CancelFrame,
|
|
21
20
|
EndFrame,
|
|
22
21
|
Frame,
|
|
@@ -24,7 +23,7 @@ from pipecat.frames.frames import (
|
|
|
24
23
|
StartFrame,
|
|
25
24
|
TranscriptionFrame,
|
|
26
25
|
)
|
|
27
|
-
from pipecat.processors.frame_processor import FrameDirection, FrameProcessor
|
|
26
|
+
from pipecat.processors.frame_processor import FrameDirection, FrameProcessor
|
|
28
27
|
from pipecat.utils.time import time_now_iso8601
|
|
29
28
|
|
|
30
29
|
|
|
@@ -105,7 +104,7 @@ class DTMFAggregator(FrameProcessor):
|
|
|
105
104
|
|
|
106
105
|
# For first digit, schedule interruption.
|
|
107
106
|
if is_first_digit:
|
|
108
|
-
await self.
|
|
107
|
+
await self.push_interruption_task_frame_and_wait()
|
|
109
108
|
|
|
110
109
|
# Check for immediate flush conditions
|
|
111
110
|
if frame.button == self._termination_digit:
|
|
@@ -4,20 +4,20 @@
|
|
|
4
4
|
# SPDX-License-Identifier: BSD 2-Clause License
|
|
5
5
|
#
|
|
6
6
|
|
|
7
|
-
"""Gated
|
|
7
|
+
"""Gated LLM context aggregator for controlled message flow."""
|
|
8
8
|
|
|
9
|
-
from pipecat.frames.frames import CancelFrame, EndFrame, Frame, StartFrame
|
|
9
|
+
from pipecat.frames.frames import CancelFrame, EndFrame, Frame, LLMContextFrame, StartFrame
|
|
10
10
|
from pipecat.processors.aggregators.openai_llm_context import OpenAILLMContextFrame
|
|
11
11
|
from pipecat.processors.frame_processor import FrameDirection, FrameProcessor
|
|
12
12
|
from pipecat.sync.base_notifier import BaseNotifier
|
|
13
13
|
|
|
14
14
|
|
|
15
|
-
class
|
|
16
|
-
"""Aggregator that gates
|
|
15
|
+
class GatedLLMContextAggregator(FrameProcessor):
|
|
16
|
+
"""Aggregator that gates LLM context frames until notified.
|
|
17
17
|
|
|
18
|
-
This aggregator captures
|
|
19
|
-
|
|
20
|
-
|
|
18
|
+
This aggregator captures LLM context frames and holds them until a notifier
|
|
19
|
+
signals that they can be released. This is useful for controlling the flow
|
|
20
|
+
of context frames based on external conditions or timing.
|
|
21
21
|
"""
|
|
22
22
|
|
|
23
23
|
def __init__(self, *, notifier: BaseNotifier, start_open: bool = False, **kwargs):
|
|
@@ -35,7 +35,7 @@ class GatedOpenAILLMContextAggregator(FrameProcessor):
|
|
|
35
35
|
self._gate_task = None
|
|
36
36
|
|
|
37
37
|
async def process_frame(self, frame: Frame, direction: FrameDirection):
|
|
38
|
-
"""Process incoming frames, gating
|
|
38
|
+
"""Process incoming frames, gating LLM context frames.
|
|
39
39
|
|
|
40
40
|
Args:
|
|
41
41
|
frame: The frame to process.
|
|
@@ -49,7 +49,7 @@ class GatedOpenAILLMContextAggregator(FrameProcessor):
|
|
|
49
49
|
if isinstance(frame, (EndFrame, CancelFrame)):
|
|
50
50
|
await self._stop()
|
|
51
51
|
await self.push_frame(frame)
|
|
52
|
-
elif isinstance(frame, OpenAILLMContextFrame):
|
|
52
|
+
elif isinstance(frame, (LLMContextFrame, OpenAILLMContextFrame)):
|
|
53
53
|
if self._start_open:
|
|
54
54
|
self._start_open = False
|
|
55
55
|
await self.push_frame(frame, direction)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright (c) 2025, Daily
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: BSD 2-Clause License
|
|
5
|
+
#
|
|
6
|
+
|
|
7
|
+
"""Gated OpenAI LLM context aggregator for controlled message flow."""
|
|
8
|
+
|
|
9
|
+
from pipecat.processors.aggregators.gated_llm_context import GatedLLMContextAggregator
|
|
10
|
+
|
|
11
|
+
# Alias for backward compatibility with the previous name
|
|
12
|
+
GatedOpenAILLMContextAggregator = GatedLLMContextAggregator
|