copilotkit 0.1.44__tar.gz → 0.2.0a0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. copilotkit-0.2.0a0/PKG-INFO +219 -0
  2. copilotkit-0.2.0a0/README.md +193 -0
  3. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/crewai/__init__.py +8 -0
  4. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/crewai/crewai_agent.py +2 -2
  5. copilotkit-0.2.0a0/copilotkit/crewai/crewai_event_listener.py +242 -0
  6. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/crewai/crewai_sdk.py +75 -54
  7. copilotkit-0.2.0a0/copilotkit/crewai/imports.py +130 -0
  8. copilotkit-0.2.0a0/copilotkit/crewai/utils.py +166 -0
  9. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/pyproject.toml +2 -2
  10. copilotkit-0.1.44/PKG-INFO +0 -89
  11. copilotkit-0.1.44/README.md +0 -63
  12. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/__init__.py +0 -0
  13. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/action.py +0 -0
  14. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/agent.py +0 -0
  15. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/exc.py +0 -0
  16. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/html.py +0 -0
  17. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/integrations/__init__.py +0 -0
  18. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/integrations/fastapi.py +0 -0
  19. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/langchain.py +0 -0
  20. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/langgraph.py +0 -0
  21. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/langgraph_agent.py +0 -0
  22. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/logging.py +0 -0
  23. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/parameter.py +0 -0
  24. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/protocol.py +0 -0
  25. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/runloop.py +0 -0
  26. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/sdk.py +0 -0
  27. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/types.py +0 -0
  28. {copilotkit-0.1.44 → copilotkit-0.2.0a0}/copilotkit/utils.py +0 -0
@@ -0,0 +1,219 @@
1
+ Metadata-Version: 2.3
2
+ Name: copilotkit
3
+ Version: 0.2.0a0
4
+ Summary: CopilotKit python SDK
5
+ License: MIT
6
+ Keywords: copilot,copilotkit,langgraph,langchain,ai,langsmith,langserve
7
+ Author: Markus Ecker
8
+ Author-email: markus.ecker@gmail.com
9
+ Requires-Python: >=3.10,<3.13
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Provides-Extra: crewai
16
+ Requires-Dist: crewai (==0.114.0) ; extra == "crewai"
17
+ Requires-Dist: fastapi (>=0.115.0,<0.116.0)
18
+ Requires-Dist: langchain (>=0.3.3,<0.4.0)
19
+ Requires-Dist: langgraph (>=0.2.50)
20
+ Requires-Dist: partialjson (>=0.0.8,<0.0.9)
21
+ Requires-Dist: toml (>=0.10.2,<0.11.0)
22
+ Project-URL: Homepage, https://copilotkit.ai
23
+ Project-URL: Repository, https://github.com/CopilotKit/CopilotKit/tree/main/sdk-python
24
+ Description-Content-Type: text/markdown
25
+
26
+ # CopilotKit Python SDK
27
+
28
+ [![PyPI version](https://badge.fury.io/py/copilotkit.svg)](https://badge.fury.io/py/copilotkit)
29
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
30
+
31
+ The official Python SDK for CopilotKit - build AI copilots and agents into your applications.
32
+
33
+ ## Features
34
+
35
+ - 🚀 Integration with LangGraph and LangChain
36
+ - 🔄 Support for stateful conversations
37
+ - 🔌 FastAPI integration for serving endpoints
38
+ - 🤝 Optional CrewAI integration (requires CrewAI 0.114.0+)
39
+
40
+ ## Version 0.2.0 Breaking Changes
41
+
42
+ ⚠️ As of version 0.2.0, CopilotKit Python SDK now requires CrewAI 0.114.0 or higher.
43
+ If you're using CrewAI with CopilotKit, you must update your CrewAI version.
44
+
45
+ ## Installation
46
+
47
+ ```bash
48
+ pip install copilotkit
49
+ ```
50
+
51
+ With CrewAI support:
52
+
53
+ ```bash
54
+ pip install "copilotkit[crewai]"
55
+ ```
56
+
57
+ ## Quick Start
58
+
59
+ ```python
60
+ from copilotkit import CopilotKitRemoteEndpoint, Action
61
+ from copilotkit.integrations.fastapi import add_fastapi_endpoint
62
+ from fastapi import FastAPI
63
+
64
+ # Define an action handler function
65
+ def greet_user_handler(name: str):
66
+ return f"Hello, {name}!"
67
+
68
+ # Initialize the SDK endpoint
69
+ endpoint = CopilotKitRemoteEndpoint(
70
+ actions=[
71
+ Action(
72
+ name="greet_user",
73
+ handler=greet_user_handler,
74
+ description="Greet the user",
75
+ parameters=[
76
+ {
77
+ "name": "name",
78
+ "type": "string",
79
+ "description": "The name of the user"
80
+ }
81
+ ]
82
+ )
83
+ ]
84
+ )
85
+
86
+ # Create FastAPI app and add CopilotKit endpoint
87
+ app = FastAPI()
88
+ add_fastapi_endpoint(app, endpoint, "/copilotkit")
89
+ ```
90
+
91
+ ## LangGraph Integration
92
+
93
+ CopilotKit provides seamless integration with LangGraph for building sophisticated agent workflows.
94
+
95
+ ### Using LangGraphAgent
96
+
97
+ ```python
98
+ from copilotkit import CopilotKitRemoteEndpoint, LangGraphAgent
99
+ from langgraph.graph import StateGraph, MessagesState
100
+
101
+ # Create your LangGraph
102
+ def create_graph():
103
+ workflow = StateGraph(MessagesState)
104
+ # ... define your graph nodes and edges
105
+ return workflow.compile()
106
+
107
+ # Initialize the SDK with a LangGraph agent
108
+ endpoint = CopilotKitRemoteEndpoint(
109
+ agents=[
110
+ LangGraphAgent(
111
+ name="email_agent",
112
+ description="This agent sends emails",
113
+ graph=create_graph(),
114
+ )
115
+ ]
116
+ )
117
+ ```
118
+
119
+ ### Customizing LangGraph Behavior
120
+
121
+ CopilotKit provides utilities to customize how your LangGraph agents work:
122
+
123
+ ```python
124
+ from copilotkit.langgraph import copilotkit_customize_config, copilotkit_emit_state
125
+
126
+ # Customize configuration
127
+ config = copilotkit_customize_config(
128
+ base_config=None, # or your existing config
129
+ emit_messages=True,
130
+ emit_tool_calls=True,
131
+ emit_intermediate_state=[
132
+ {
133
+ "state_key": "progress",
134
+ "tool": "ProgressTool",
135
+ "tool_argument": "steps"
136
+ },
137
+ ]
138
+ )
139
+
140
+ # Emit intermediate state
141
+ async def long_running_node(state):
142
+ for i in range(10):
143
+ await some_operation(i)
144
+ await copilotkit_emit_state(config, {"progress": i})
145
+ return state
146
+ ```
147
+
148
+ ## CrewAI Integration
149
+
150
+ CopilotKit provides integration with CrewAI for building multi-agent systems.
151
+
152
+ ### Intercepting CopilotKit Actions in CrewAI Flows
153
+
154
+ When building flows that use CopilotKit actions, you need to properly intercept any tool calls that should be handled by the frontend:
155
+
156
+ ```python
157
+ from copilotkit.crewai import create_copilotkit_tool_handlers, check_for_intercepted_actions
158
+
159
+ @router(start_flow)
160
+ async def chat(self):
161
+ # Your existing code...
162
+
163
+ # 1. Create wrapped tool handlers
164
+ wrapped_handlers = create_copilotkit_tool_handlers(
165
+ original_handlers=tool_handlers, # Your original tool handlers
166
+ copilotkit_actions=self.state.copilotkit.actions, # From CopilotKitState
167
+ state=self.state # Flow state for flagging
168
+ )
169
+
170
+ # 2. Use wrapped handlers with LLM call
171
+ response = llm.call(
172
+ messages=[...],
173
+ tools=[
174
+ *self.state.copilotkit.actions, # Include CopilotKit actions
175
+ YOUR_OTHER_TOOLS...
176
+ ],
177
+ available_functions=wrapped_handlers # Use wrapped handlers
178
+ )
179
+
180
+ # 3. Create response message as usual
181
+ message = {"role": "assistant", "content": response}
182
+ self.state.messages.append(message)
183
+
184
+ # 4. Check if a CopilotKit action was intercepted
185
+ if check_for_intercepted_actions(self.state):
186
+ # Return to frontend to handle the action
187
+ return "route_end"
188
+
189
+ # Continue with normal flow...
190
+ ```
191
+
192
+ This pattern ensures that:
193
+
194
+ 1. CopilotKit actions are properly identified
195
+ 2. The frontend gets control when needed
196
+ 3. Other tools continue to work normally
197
+
198
+ ## Documentation
199
+
200
+ For detailed documentation and examples, visit [copilotkit.ai](https://copilotkit.ai)
201
+
202
+ ## Contributing
203
+
204
+ We welcome contributions! Please see our [Contributing Guidelines](https://github.com/CopilotKit/CopilotKit/blob/main/CONTRIBUTING.md) for details.
205
+
206
+ ## License
207
+
208
+ This project is licensed under the MIT License - see the [LICENSE](https://github.com/CopilotKit/CopilotKit/blob/main/LICENSE) file for details.
209
+
210
+ ## Support
211
+
212
+ - 📚 [Documentation](https://docs.copilotkit.ai)
213
+ - 💬 [Discord Community](https://discord.gg/6dffbvGU)
214
+ - 🐛 [Issue Tracker](https://github.com/CopilotKit/CopilotKit/issues)
215
+
216
+ ---
217
+
218
+ Built with ❤️ by the CopilotKit team
219
+
@@ -0,0 +1,193 @@
1
+ # CopilotKit Python SDK
2
+
3
+ [![PyPI version](https://badge.fury.io/py/copilotkit.svg)](https://badge.fury.io/py/copilotkit)
4
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
5
+
6
+ The official Python SDK for CopilotKit - build AI copilots and agents into your applications.
7
+
8
+ ## Features
9
+
10
+ - 🚀 Integration with LangGraph and LangChain
11
+ - 🔄 Support for stateful conversations
12
+ - 🔌 FastAPI integration for serving endpoints
13
+ - 🤝 Optional CrewAI integration (requires CrewAI 0.114.0+)
14
+
15
+ ## Version 0.2.0 Breaking Changes
16
+
17
+ ⚠️ As of version 0.2.0, CopilotKit Python SDK now requires CrewAI 0.114.0 or higher.
18
+ If you're using CrewAI with CopilotKit, you must update your CrewAI version.
19
+
20
+ ## Installation
21
+
22
+ ```bash
23
+ pip install copilotkit
24
+ ```
25
+
26
+ With CrewAI support:
27
+
28
+ ```bash
29
+ pip install "copilotkit[crewai]"
30
+ ```
31
+
32
+ ## Quick Start
33
+
34
+ ```python
35
+ from copilotkit import CopilotKitRemoteEndpoint, Action
36
+ from copilotkit.integrations.fastapi import add_fastapi_endpoint
37
+ from fastapi import FastAPI
38
+
39
+ # Define an action handler function
40
+ def greet_user_handler(name: str):
41
+ return f"Hello, {name}!"
42
+
43
+ # Initialize the SDK endpoint
44
+ endpoint = CopilotKitRemoteEndpoint(
45
+ actions=[
46
+ Action(
47
+ name="greet_user",
48
+ handler=greet_user_handler,
49
+ description="Greet the user",
50
+ parameters=[
51
+ {
52
+ "name": "name",
53
+ "type": "string",
54
+ "description": "The name of the user"
55
+ }
56
+ ]
57
+ )
58
+ ]
59
+ )
60
+
61
+ # Create FastAPI app and add CopilotKit endpoint
62
+ app = FastAPI()
63
+ add_fastapi_endpoint(app, endpoint, "/copilotkit")
64
+ ```
65
+
66
+ ## LangGraph Integration
67
+
68
+ CopilotKit provides seamless integration with LangGraph for building sophisticated agent workflows.
69
+
70
+ ### Using LangGraphAgent
71
+
72
+ ```python
73
+ from copilotkit import CopilotKitRemoteEndpoint, LangGraphAgent
74
+ from langgraph.graph import StateGraph, MessagesState
75
+
76
+ # Create your LangGraph
77
+ def create_graph():
78
+ workflow = StateGraph(MessagesState)
79
+ # ... define your graph nodes and edges
80
+ return workflow.compile()
81
+
82
+ # Initialize the SDK with a LangGraph agent
83
+ endpoint = CopilotKitRemoteEndpoint(
84
+ agents=[
85
+ LangGraphAgent(
86
+ name="email_agent",
87
+ description="This agent sends emails",
88
+ graph=create_graph(),
89
+ )
90
+ ]
91
+ )
92
+ ```
93
+
94
+ ### Customizing LangGraph Behavior
95
+
96
+ CopilotKit provides utilities to customize how your LangGraph agents work:
97
+
98
+ ```python
99
+ from copilotkit.langgraph import copilotkit_customize_config, copilotkit_emit_state
100
+
101
+ # Customize configuration
102
+ config = copilotkit_customize_config(
103
+ base_config=None, # or your existing config
104
+ emit_messages=True,
105
+ emit_tool_calls=True,
106
+ emit_intermediate_state=[
107
+ {
108
+ "state_key": "progress",
109
+ "tool": "ProgressTool",
110
+ "tool_argument": "steps"
111
+ },
112
+ ]
113
+ )
114
+
115
+ # Emit intermediate state
116
+ async def long_running_node(state):
117
+ for i in range(10):
118
+ await some_operation(i)
119
+ await copilotkit_emit_state(config, {"progress": i})
120
+ return state
121
+ ```
122
+
123
+ ## CrewAI Integration
124
+
125
+ CopilotKit provides integration with CrewAI for building multi-agent systems.
126
+
127
+ ### Intercepting CopilotKit Actions in CrewAI Flows
128
+
129
+ When building flows that use CopilotKit actions, you need to properly intercept any tool calls that should be handled by the frontend:
130
+
131
+ ```python
132
+ from copilotkit.crewai import create_copilotkit_tool_handlers, check_for_intercepted_actions
133
+
134
+ @router(start_flow)
135
+ async def chat(self):
136
+ # Your existing code...
137
+
138
+ # 1. Create wrapped tool handlers
139
+ wrapped_handlers = create_copilotkit_tool_handlers(
140
+ original_handlers=tool_handlers, # Your original tool handlers
141
+ copilotkit_actions=self.state.copilotkit.actions, # From CopilotKitState
142
+ state=self.state # Flow state for flagging
143
+ )
144
+
145
+ # 2. Use wrapped handlers with LLM call
146
+ response = llm.call(
147
+ messages=[...],
148
+ tools=[
149
+ *self.state.copilotkit.actions, # Include CopilotKit actions
150
+ YOUR_OTHER_TOOLS...
151
+ ],
152
+ available_functions=wrapped_handlers # Use wrapped handlers
153
+ )
154
+
155
+ # 3. Create response message as usual
156
+ message = {"role": "assistant", "content": response}
157
+ self.state.messages.append(message)
158
+
159
+ # 4. Check if a CopilotKit action was intercepted
160
+ if check_for_intercepted_actions(self.state):
161
+ # Return to frontend to handle the action
162
+ return "route_end"
163
+
164
+ # Continue with normal flow...
165
+ ```
166
+
167
+ This pattern ensures that:
168
+
169
+ 1. CopilotKit actions are properly identified
170
+ 2. The frontend gets control when needed
171
+ 3. Other tools continue to work normally
172
+
173
+ ## Documentation
174
+
175
+ For detailed documentation and examples, visit [copilotkit.ai](https://copilotkit.ai)
176
+
177
+ ## Contributing
178
+
179
+ We welcome contributions! Please see our [Contributing Guidelines](https://github.com/CopilotKit/CopilotKit/blob/main/CONTRIBUTING.md) for details.
180
+
181
+ ## License
182
+
183
+ This project is licensed under the MIT License - see the [LICENSE](https://github.com/CopilotKit/CopilotKit/blob/main/LICENSE) file for details.
184
+
185
+ ## Support
186
+
187
+ - 📚 [Documentation](https://docs.copilotkit.ai)
188
+ - 💬 [Discord Community](https://discord.gg/6dffbvGU)
189
+ - 🐛 [Issue Tracker](https://github.com/CopilotKit/CopilotKit/issues)
190
+
191
+ ---
192
+
193
+ Built with ❤️ by the CopilotKit team
@@ -12,6 +12,11 @@ from .crewai_sdk import (
12
12
  copilotkit_exit,
13
13
  copilotkit_predict_state,
14
14
  )
15
+ from .utils import (
16
+ create_copilotkit_tool_handlers,
17
+ check_for_intercepted_actions,
18
+ COPILOTKIT_ACTION_INTERCEPTED_MARKER
19
+ )
15
20
  __all__ = [
16
21
  "CrewAIAgent",
17
22
  "CopilotKitProperties",
@@ -22,4 +27,7 @@ __all__ = [
22
27
  "copilotkit_stream",
23
28
  "copilotkit_exit",
24
29
  "copilotkit_predict_state",
30
+ "create_copilotkit_tool_handlers",
31
+ "check_for_intercepted_actions",
32
+ "COPILOTKIT_ACTION_INTERCEPTED_MARKER",
25
33
  ]
@@ -5,9 +5,9 @@ CrewAI Agent
5
5
  import uuid
6
6
  import json
7
7
  from copy import deepcopy
8
- from pydantic import BaseModel
9
8
  from typing import Optional, List, Callable
10
9
  from typing_extensions import TypedDict, NotRequired, Any, Dict, cast
10
+ from pydantic import BaseModel
11
11
  from crewai import Crew, Flow
12
12
  from crewai.flow import start
13
13
  from crewai.cli.crew_chat import (
@@ -25,7 +25,7 @@ from copilotkit.protocol import (
25
25
  emit_runtime_events,
26
26
  agent_state_message,
27
27
  )
28
- from .crewai_sdk import (
28
+ from copilotkit.crewai.crewai_sdk import (
29
29
  copilotkit_messages_to_crewai_flow,
30
30
  crewai_flow_messages_to_copilotkit,
31
31
  crewai_flow_async_runner,
@@ -0,0 +1,242 @@
1
+ """
2
+ CrewAI Event Listener
3
+ """
4
+ import asyncio
5
+ import uuid
6
+ import contextvars
7
+ import inspect
8
+ import json
9
+ import logging
10
+ import threading
11
+ import types
12
+ from typing import Any, Dict, List, Optional, Tuple, Union, cast, TypedDict
13
+
14
+ # Import from our centralized imports module
15
+ from copilotkit.crewai.imports import (
16
+ BaseEventListener,
17
+ FlowStartedEvent,
18
+ MethodExecutionStartedEvent,
19
+ MethodExecutionFinishedEvent,
20
+ FlowFinishedEvent,
21
+ LLMCallStartedEvent,
22
+ LLMCallCompletedEvent,
23
+ LLMCallFailedEvent,
24
+ LLMStreamChunkEvent,
25
+ ToolUsageStartedEvent,
26
+ ToolUsageFinishedEvent,
27
+ ToolUsageErrorEvent,
28
+ )
29
+ from copilotkit.runloop import queue_put
30
+ from copilotkit.protocol import (
31
+ RuntimeEventTypes,
32
+ RunStarted,
33
+ RunFinished,
34
+ NodeStarted,
35
+ NodeFinished,
36
+ TextMessageStart,
37
+ TextMessageContent,
38
+ )
39
+
40
+ class LocalExecutionContext(TypedDict):
41
+ """
42
+ Local execution context
43
+ """
44
+ current_message_id: Optional[str]
45
+ current_tool_call_id: Optional[str]
46
+
47
+
48
+ _id_to_execution_context = {}
49
+
50
+ class CopilotKitCrewAIEventListener(BaseEventListener):
51
+ """
52
+ CopilotKit CrewAI Event Listener
53
+ """
54
+
55
+ def _get_execution_context(self, flow_id: int) -> LocalExecutionContext:
56
+ """
57
+ Get the execution context for a given ID
58
+ """
59
+ return _id_to_execution_context[flow_id]
60
+
61
+ def _set_execution_context(self, flow_id: int, execution_context: LocalExecutionContext):
62
+ """
63
+ Set the execution context for a given ID
64
+ """
65
+ _id_to_execution_context[flow_id] = execution_context
66
+
67
+ def _delete_execution_context(self, flow_id: int):
68
+ """
69
+ Delete the execution context for a given ID
70
+ """
71
+ del _id_to_execution_context[flow_id]
72
+
73
+ # Flow lifecycle events
74
+
75
+ async def aon_flow_started(self, source, event): # pylint: disable=unused-argument
76
+ """
77
+ Handle a flow started event
78
+ """
79
+ self._set_execution_context(id(source), LocalExecutionContext(
80
+ current_message_id=None,
81
+ current_tool_call_id=None
82
+ ))
83
+ await queue_put(RunStarted(
84
+ type=RuntimeEventTypes.RUN_STARTED,
85
+ state=source.state
86
+ ), priority=True)
87
+
88
+ async def aon_method_execution_started(self, source, event):
89
+ """
90
+ Handle a method execution started event
91
+ """
92
+ await queue_put(NodeStarted(
93
+ type=RuntimeEventTypes.NODE_STARTED,
94
+ node_name=event.method_name,
95
+ state=source.state
96
+ ), priority=True)
97
+
98
+ async def aon_method_execution_finished(self, source, event):
99
+ """
100
+ Handle a method execution finished event
101
+ """
102
+ await queue_put(NodeFinished(
103
+ type=RuntimeEventTypes.NODE_FINISHED,
104
+ node_name=event.method_name,
105
+ state=source.state
106
+ ), priority=True)
107
+
108
+ async def aon_flow_finished(self, source, event): # pylint: disable=unused-argument
109
+ """
110
+ Handle a flow finished event
111
+ """
112
+ self._delete_execution_context(id(source))
113
+ await queue_put(RunFinished(
114
+ type=RuntimeEventTypes.RUN_FINISHED,
115
+ state=source.state
116
+ ), priority=True)
117
+
118
+ # LLM call events
119
+
120
+ async def aon_llm_call_started(self, source, event): # pylint: disable=unused-argument
121
+ """
122
+ Handle an LLM call started event
123
+ """
124
+ message_id = str(uuid.uuid4())
125
+ self._set_execution_context(id(source), LocalExecutionContext(
126
+ current_message_id=message_id,
127
+ current_tool_call_id=None
128
+ ))
129
+
130
+ async def aon_llm_call_completed(self, source, event): # pylint: disable=unused-argument
131
+ """
132
+ Handle an LLM call completed event
133
+ """
134
+ self._set_execution_context(id(source), LocalExecutionContext(
135
+ current_message_id=None,
136
+ current_tool_call_id=None
137
+ ))
138
+
139
+ async def aon_llm_call_failed(self, source, event): # pylint: disable=unused-argument
140
+ """
141
+ Handle an LLM call failed event
142
+ """
143
+ self._set_execution_context(id(source), LocalExecutionContext(
144
+ current_message_id=None,
145
+ current_tool_call_id=None
146
+ ))
147
+
148
+ async def aon_llm_stream_chunk(self, source, event):
149
+ """
150
+ Handle an LLM stream chunk event
151
+ """
152
+ execution_context = self._get_execution_context(id(source))
153
+ if execution_context["current_message_id"] is None:
154
+ return
155
+ print(TextMessageContent(
156
+ type=RuntimeEventTypes.TEXT_MESSAGE_CONTENT,
157
+ messageId=execution_context["current_message_id"],
158
+ content=event.chunk
159
+ ), flush=True)
160
+
161
+ def setup_listeners(self, crewai_event_bus):
162
+ """
163
+ Setup listeners for CrewAI events
164
+ """
165
+ @crewai_event_bus.on(FlowStartedEvent)
166
+ def on_flow_started(source, event):
167
+ asyncio.get_running_loop().create_task(self.aon_flow_started(source, event))
168
+
169
+ @crewai_event_bus.on(MethodExecutionStartedEvent)
170
+ def on_method_execution_started(source, event):
171
+ asyncio.get_running_loop().create_task(self.aon_method_execution_started(source, event))
172
+
173
+ @crewai_event_bus.on(MethodExecutionFinishedEvent)
174
+ def on_method_execution_finished(source, event):
175
+ asyncio.get_running_loop().create_task(
176
+ self.aon_method_execution_finished(source, event)
177
+ )
178
+
179
+ @crewai_event_bus.on(FlowFinishedEvent)
180
+ def on_flow_finished(source, event):
181
+ asyncio.get_running_loop().create_task(self.aon_flow_finished(source, event))
182
+
183
+ @crewai_event_bus.on(LLMCallStartedEvent)
184
+ def on_llm_call_started(source, event):
185
+ asyncio.get_running_loop().create_task(self.aon_llm_call_started(source, event))
186
+
187
+ @crewai_event_bus.on(LLMCallCompletedEvent)
188
+ def on_llm_call_completed(source, event):
189
+ asyncio.get_running_loop().create_task(self.aon_llm_call_completed(source, event))
190
+
191
+ @crewai_event_bus.on(LLMCallFailedEvent)
192
+ def on_llm_call_failed(source, event):
193
+ asyncio.get_running_loop().create_task(self.aon_llm_call_failed(source, event))
194
+
195
+ @crewai_event_bus.on(LLMStreamChunkEvent)
196
+ def on_llm_stream_chunk(source, event):
197
+ asyncio.get_running_loop().create_task(self.aon_llm_stream_chunk(source, event))
198
+
199
+ @crewai_event_bus.on(ToolUsageStartedEvent)
200
+ def on_tool_usage_started(source, event: ToolUsageStartedEvent):
201
+ asyncio.get_running_loop().create_task(self.on_tool_usage_started(event))
202
+
203
+ @crewai_event_bus.on(ToolUsageFinishedEvent)
204
+ def on_tool_usage_finished(source, event: ToolUsageFinishedEvent):
205
+ asyncio.get_running_loop().create_task(self.on_tool_usage_finished(event))
206
+
207
+ @crewai_event_bus.on(ToolUsageErrorEvent)
208
+ def on_tool_usage_error(source, event: ToolUsageErrorEvent):
209
+ asyncio.get_running_loop().create_task(self.on_tool_usage_error(event))
210
+
211
+ async def on_tool_usage_started(self, event: ToolUsageStartedEvent):
212
+ """CrewAI's on_tool_usage_started event."""
213
+ # Create a proper LocalExecutionContext
214
+ execution_context = LocalExecutionContext(
215
+ current_message_id=None, # Not using message ID for tools
216
+ current_tool_call_id=str(uuid.uuid4()) if hasattr(event, "run_id") else None
217
+ )
218
+
219
+ # Store the event in the execution context
220
+ self._set_execution_context(event.source_id, execution_context)
221
+
222
+ async def on_tool_usage_finished(self, event: ToolUsageFinishedEvent):
223
+ """CrewAI's on_tool_usage_finished event."""
224
+ # Create a proper LocalExecutionContext
225
+ execution_context = LocalExecutionContext(
226
+ current_message_id=None, # Not using message ID for tools
227
+ current_tool_call_id=None # Tool call is finished
228
+ )
229
+
230
+ # Store the event in the execution context
231
+ self._set_execution_context(event.source_id, execution_context)
232
+
233
+ async def on_tool_usage_error(self, event: ToolUsageErrorEvent):
234
+ """CrewAI's on_tool_usage_error event."""
235
+ # Create a proper LocalExecutionContext
236
+ execution_context = LocalExecutionContext(
237
+ current_message_id=None, # Not using message ID for tools
238
+ current_tool_call_id=None # Tool call has errored
239
+ )
240
+
241
+ # Store the event in the execution context
242
+ self._set_execution_context(event.source_id, execution_context)