copilotkit 0.1.43a0__py3-none-any.whl → 0.2.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copilotkit/crewai/__init__.py +8 -0
- copilotkit/crewai/crewai_agent.py +14 -6
- copilotkit/crewai/crewai_event_listener.py +242 -0
- copilotkit/crewai/crewai_sdk.py +75 -54
- copilotkit/crewai/imports.py +130 -0
- copilotkit/crewai/utils.py +166 -0
- copilotkit/langgraph.py +2 -2
- copilotkit/langgraph_agent.py +10 -2
- copilotkit-0.2.0a0.dist-info/METADATA +219 -0
- {copilotkit-0.1.43a0.dist-info → copilotkit-0.2.0a0.dist-info}/RECORD +11 -8
- {copilotkit-0.1.43a0.dist-info → copilotkit-0.2.0a0.dist-info}/WHEEL +1 -1
- copilotkit-0.1.43a0.dist-info/METADATA +0 -90
copilotkit/crewai/__init__.py
CHANGED
|
@@ -12,6 +12,11 @@ from .crewai_sdk import (
|
|
|
12
12
|
copilotkit_exit,
|
|
13
13
|
copilotkit_predict_state,
|
|
14
14
|
)
|
|
15
|
+
from .utils import (
|
|
16
|
+
create_copilotkit_tool_handlers,
|
|
17
|
+
check_for_intercepted_actions,
|
|
18
|
+
COPILOTKIT_ACTION_INTERCEPTED_MARKER
|
|
19
|
+
)
|
|
15
20
|
__all__ = [
|
|
16
21
|
"CrewAIAgent",
|
|
17
22
|
"CopilotKitProperties",
|
|
@@ -22,4 +27,7 @@ __all__ = [
|
|
|
22
27
|
"copilotkit_stream",
|
|
23
28
|
"copilotkit_exit",
|
|
24
29
|
"copilotkit_predict_state",
|
|
30
|
+
"create_copilotkit_tool_handlers",
|
|
31
|
+
"check_for_intercepted_actions",
|
|
32
|
+
"COPILOTKIT_ACTION_INTERCEPTED_MARKER",
|
|
25
33
|
]
|
|
@@ -5,9 +5,9 @@ CrewAI Agent
|
|
|
5
5
|
import uuid
|
|
6
6
|
import json
|
|
7
7
|
from copy import deepcopy
|
|
8
|
-
from pydantic import BaseModel
|
|
9
8
|
from typing import Optional, List, Callable
|
|
10
9
|
from typing_extensions import TypedDict, NotRequired, Any, Dict, cast
|
|
10
|
+
from pydantic import BaseModel
|
|
11
11
|
from crewai import Crew, Flow
|
|
12
12
|
from crewai.flow import start
|
|
13
13
|
from crewai.cli.crew_chat import (
|
|
@@ -25,12 +25,13 @@ from copilotkit.protocol import (
|
|
|
25
25
|
emit_runtime_events,
|
|
26
26
|
agent_state_message,
|
|
27
27
|
)
|
|
28
|
-
from .crewai_sdk import (
|
|
28
|
+
from copilotkit.crewai.crewai_sdk import (
|
|
29
29
|
copilotkit_messages_to_crewai_flow,
|
|
30
30
|
crewai_flow_messages_to_copilotkit,
|
|
31
31
|
crewai_flow_async_runner,
|
|
32
32
|
copilotkit_stream,
|
|
33
|
-
copilotkit_exit
|
|
33
|
+
copilotkit_exit,
|
|
34
|
+
logger
|
|
34
35
|
)
|
|
35
36
|
|
|
36
37
|
from copilotkit.runloop import copilotkit_run, CopilotKitRunExecution
|
|
@@ -296,14 +297,21 @@ class CrewAIAgent(Agent):
|
|
|
296
297
|
if self.flow and self.flow._persistence: # pylint: disable=protected-access
|
|
297
298
|
try:
|
|
298
299
|
stored_state = self.flow._persistence.load_state(thread_id) # pylint: disable=protected-access
|
|
300
|
+
messages = []
|
|
301
|
+
if "messages" in stored_state and stored_state["messages"]:
|
|
302
|
+
try:
|
|
303
|
+
messages = crewai_flow_messages_to_copilotkit(stored_state["messages"])
|
|
304
|
+
except Exception as e: # pylint: disable=broad-except
|
|
305
|
+
# If conversion fails, we'll return empty messages
|
|
306
|
+
logger.warning(f"Failed to convert messages from stored state: {str(e)}")
|
|
299
307
|
return {
|
|
300
308
|
"threadId": thread_id,
|
|
301
309
|
"threadExists": True,
|
|
302
310
|
"state": stored_state,
|
|
303
|
-
"messages":
|
|
311
|
+
"messages": messages
|
|
304
312
|
}
|
|
305
|
-
except: # pylint: disable=
|
|
306
|
-
|
|
313
|
+
except Exception as e: # pylint: disable=broad-except
|
|
314
|
+
logger.warning(f"Failed to load state for thread {thread_id}: {str(e)}")
|
|
307
315
|
|
|
308
316
|
return {
|
|
309
317
|
"threadId": thread_id,
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
"""
|
|
2
|
+
CrewAI Event Listener
|
|
3
|
+
"""
|
|
4
|
+
import asyncio
|
|
5
|
+
import uuid
|
|
6
|
+
import contextvars
|
|
7
|
+
import inspect
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import threading
|
|
11
|
+
import types
|
|
12
|
+
from typing import Any, Dict, List, Optional, Tuple, Union, cast, TypedDict
|
|
13
|
+
|
|
14
|
+
# Import from our centralized imports module
|
|
15
|
+
from copilotkit.crewai.imports import (
|
|
16
|
+
BaseEventListener,
|
|
17
|
+
FlowStartedEvent,
|
|
18
|
+
MethodExecutionStartedEvent,
|
|
19
|
+
MethodExecutionFinishedEvent,
|
|
20
|
+
FlowFinishedEvent,
|
|
21
|
+
LLMCallStartedEvent,
|
|
22
|
+
LLMCallCompletedEvent,
|
|
23
|
+
LLMCallFailedEvent,
|
|
24
|
+
LLMStreamChunkEvent,
|
|
25
|
+
ToolUsageStartedEvent,
|
|
26
|
+
ToolUsageFinishedEvent,
|
|
27
|
+
ToolUsageErrorEvent,
|
|
28
|
+
)
|
|
29
|
+
from copilotkit.runloop import queue_put
|
|
30
|
+
from copilotkit.protocol import (
|
|
31
|
+
RuntimeEventTypes,
|
|
32
|
+
RunStarted,
|
|
33
|
+
RunFinished,
|
|
34
|
+
NodeStarted,
|
|
35
|
+
NodeFinished,
|
|
36
|
+
TextMessageStart,
|
|
37
|
+
TextMessageContent,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
class LocalExecutionContext(TypedDict):
|
|
41
|
+
"""
|
|
42
|
+
Local execution context
|
|
43
|
+
"""
|
|
44
|
+
current_message_id: Optional[str]
|
|
45
|
+
current_tool_call_id: Optional[str]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
_id_to_execution_context = {}
|
|
49
|
+
|
|
50
|
+
class CopilotKitCrewAIEventListener(BaseEventListener):
|
|
51
|
+
"""
|
|
52
|
+
CopilotKit CrewAI Event Listener
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def _get_execution_context(self, flow_id: int) -> LocalExecutionContext:
|
|
56
|
+
"""
|
|
57
|
+
Get the execution context for a given ID
|
|
58
|
+
"""
|
|
59
|
+
return _id_to_execution_context[flow_id]
|
|
60
|
+
|
|
61
|
+
def _set_execution_context(self, flow_id: int, execution_context: LocalExecutionContext):
|
|
62
|
+
"""
|
|
63
|
+
Set the execution context for a given ID
|
|
64
|
+
"""
|
|
65
|
+
_id_to_execution_context[flow_id] = execution_context
|
|
66
|
+
|
|
67
|
+
def _delete_execution_context(self, flow_id: int):
|
|
68
|
+
"""
|
|
69
|
+
Delete the execution context for a given ID
|
|
70
|
+
"""
|
|
71
|
+
del _id_to_execution_context[flow_id]
|
|
72
|
+
|
|
73
|
+
# Flow lifecycle events
|
|
74
|
+
|
|
75
|
+
async def aon_flow_started(self, source, event): # pylint: disable=unused-argument
|
|
76
|
+
"""
|
|
77
|
+
Handle a flow started event
|
|
78
|
+
"""
|
|
79
|
+
self._set_execution_context(id(source), LocalExecutionContext(
|
|
80
|
+
current_message_id=None,
|
|
81
|
+
current_tool_call_id=None
|
|
82
|
+
))
|
|
83
|
+
await queue_put(RunStarted(
|
|
84
|
+
type=RuntimeEventTypes.RUN_STARTED,
|
|
85
|
+
state=source.state
|
|
86
|
+
), priority=True)
|
|
87
|
+
|
|
88
|
+
async def aon_method_execution_started(self, source, event):
|
|
89
|
+
"""
|
|
90
|
+
Handle a method execution started event
|
|
91
|
+
"""
|
|
92
|
+
await queue_put(NodeStarted(
|
|
93
|
+
type=RuntimeEventTypes.NODE_STARTED,
|
|
94
|
+
node_name=event.method_name,
|
|
95
|
+
state=source.state
|
|
96
|
+
), priority=True)
|
|
97
|
+
|
|
98
|
+
async def aon_method_execution_finished(self, source, event):
|
|
99
|
+
"""
|
|
100
|
+
Handle a method execution finished event
|
|
101
|
+
"""
|
|
102
|
+
await queue_put(NodeFinished(
|
|
103
|
+
type=RuntimeEventTypes.NODE_FINISHED,
|
|
104
|
+
node_name=event.method_name,
|
|
105
|
+
state=source.state
|
|
106
|
+
), priority=True)
|
|
107
|
+
|
|
108
|
+
async def aon_flow_finished(self, source, event): # pylint: disable=unused-argument
|
|
109
|
+
"""
|
|
110
|
+
Handle a flow finished event
|
|
111
|
+
"""
|
|
112
|
+
self._delete_execution_context(id(source))
|
|
113
|
+
await queue_put(RunFinished(
|
|
114
|
+
type=RuntimeEventTypes.RUN_FINISHED,
|
|
115
|
+
state=source.state
|
|
116
|
+
), priority=True)
|
|
117
|
+
|
|
118
|
+
# LLM call events
|
|
119
|
+
|
|
120
|
+
async def aon_llm_call_started(self, source, event): # pylint: disable=unused-argument
|
|
121
|
+
"""
|
|
122
|
+
Handle an LLM call started event
|
|
123
|
+
"""
|
|
124
|
+
message_id = str(uuid.uuid4())
|
|
125
|
+
self._set_execution_context(id(source), LocalExecutionContext(
|
|
126
|
+
current_message_id=message_id,
|
|
127
|
+
current_tool_call_id=None
|
|
128
|
+
))
|
|
129
|
+
|
|
130
|
+
async def aon_llm_call_completed(self, source, event): # pylint: disable=unused-argument
|
|
131
|
+
"""
|
|
132
|
+
Handle an LLM call completed event
|
|
133
|
+
"""
|
|
134
|
+
self._set_execution_context(id(source), LocalExecutionContext(
|
|
135
|
+
current_message_id=None,
|
|
136
|
+
current_tool_call_id=None
|
|
137
|
+
))
|
|
138
|
+
|
|
139
|
+
async def aon_llm_call_failed(self, source, event): # pylint: disable=unused-argument
|
|
140
|
+
"""
|
|
141
|
+
Handle an LLM call failed event
|
|
142
|
+
"""
|
|
143
|
+
self._set_execution_context(id(source), LocalExecutionContext(
|
|
144
|
+
current_message_id=None,
|
|
145
|
+
current_tool_call_id=None
|
|
146
|
+
))
|
|
147
|
+
|
|
148
|
+
async def aon_llm_stream_chunk(self, source, event):
|
|
149
|
+
"""
|
|
150
|
+
Handle an LLM stream chunk event
|
|
151
|
+
"""
|
|
152
|
+
execution_context = self._get_execution_context(id(source))
|
|
153
|
+
if execution_context["current_message_id"] is None:
|
|
154
|
+
return
|
|
155
|
+
print(TextMessageContent(
|
|
156
|
+
type=RuntimeEventTypes.TEXT_MESSAGE_CONTENT,
|
|
157
|
+
messageId=execution_context["current_message_id"],
|
|
158
|
+
content=event.chunk
|
|
159
|
+
), flush=True)
|
|
160
|
+
|
|
161
|
+
def setup_listeners(self, crewai_event_bus):
|
|
162
|
+
"""
|
|
163
|
+
Setup listeners for CrewAI events
|
|
164
|
+
"""
|
|
165
|
+
@crewai_event_bus.on(FlowStartedEvent)
|
|
166
|
+
def on_flow_started(source, event):
|
|
167
|
+
asyncio.get_running_loop().create_task(self.aon_flow_started(source, event))
|
|
168
|
+
|
|
169
|
+
@crewai_event_bus.on(MethodExecutionStartedEvent)
|
|
170
|
+
def on_method_execution_started(source, event):
|
|
171
|
+
asyncio.get_running_loop().create_task(self.aon_method_execution_started(source, event))
|
|
172
|
+
|
|
173
|
+
@crewai_event_bus.on(MethodExecutionFinishedEvent)
|
|
174
|
+
def on_method_execution_finished(source, event):
|
|
175
|
+
asyncio.get_running_loop().create_task(
|
|
176
|
+
self.aon_method_execution_finished(source, event)
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
@crewai_event_bus.on(FlowFinishedEvent)
|
|
180
|
+
def on_flow_finished(source, event):
|
|
181
|
+
asyncio.get_running_loop().create_task(self.aon_flow_finished(source, event))
|
|
182
|
+
|
|
183
|
+
@crewai_event_bus.on(LLMCallStartedEvent)
|
|
184
|
+
def on_llm_call_started(source, event):
|
|
185
|
+
asyncio.get_running_loop().create_task(self.aon_llm_call_started(source, event))
|
|
186
|
+
|
|
187
|
+
@crewai_event_bus.on(LLMCallCompletedEvent)
|
|
188
|
+
def on_llm_call_completed(source, event):
|
|
189
|
+
asyncio.get_running_loop().create_task(self.aon_llm_call_completed(source, event))
|
|
190
|
+
|
|
191
|
+
@crewai_event_bus.on(LLMCallFailedEvent)
|
|
192
|
+
def on_llm_call_failed(source, event):
|
|
193
|
+
asyncio.get_running_loop().create_task(self.aon_llm_call_failed(source, event))
|
|
194
|
+
|
|
195
|
+
@crewai_event_bus.on(LLMStreamChunkEvent)
|
|
196
|
+
def on_llm_stream_chunk(source, event):
|
|
197
|
+
asyncio.get_running_loop().create_task(self.aon_llm_stream_chunk(source, event))
|
|
198
|
+
|
|
199
|
+
@crewai_event_bus.on(ToolUsageStartedEvent)
|
|
200
|
+
def on_tool_usage_started(source, event: ToolUsageStartedEvent):
|
|
201
|
+
asyncio.get_running_loop().create_task(self.on_tool_usage_started(event))
|
|
202
|
+
|
|
203
|
+
@crewai_event_bus.on(ToolUsageFinishedEvent)
|
|
204
|
+
def on_tool_usage_finished(source, event: ToolUsageFinishedEvent):
|
|
205
|
+
asyncio.get_running_loop().create_task(self.on_tool_usage_finished(event))
|
|
206
|
+
|
|
207
|
+
@crewai_event_bus.on(ToolUsageErrorEvent)
|
|
208
|
+
def on_tool_usage_error(source, event: ToolUsageErrorEvent):
|
|
209
|
+
asyncio.get_running_loop().create_task(self.on_tool_usage_error(event))
|
|
210
|
+
|
|
211
|
+
async def on_tool_usage_started(self, event: ToolUsageStartedEvent):
|
|
212
|
+
"""CrewAI's on_tool_usage_started event."""
|
|
213
|
+
# Create a proper LocalExecutionContext
|
|
214
|
+
execution_context = LocalExecutionContext(
|
|
215
|
+
current_message_id=None, # Not using message ID for tools
|
|
216
|
+
current_tool_call_id=str(uuid.uuid4()) if hasattr(event, "run_id") else None
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# Store the event in the execution context
|
|
220
|
+
self._set_execution_context(event.source_id, execution_context)
|
|
221
|
+
|
|
222
|
+
async def on_tool_usage_finished(self, event: ToolUsageFinishedEvent):
|
|
223
|
+
"""CrewAI's on_tool_usage_finished event."""
|
|
224
|
+
# Create a proper LocalExecutionContext
|
|
225
|
+
execution_context = LocalExecutionContext(
|
|
226
|
+
current_message_id=None, # Not using message ID for tools
|
|
227
|
+
current_tool_call_id=None # Tool call is finished
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
# Store the event in the execution context
|
|
231
|
+
self._set_execution_context(event.source_id, execution_context)
|
|
232
|
+
|
|
233
|
+
async def on_tool_usage_error(self, event: ToolUsageErrorEvent):
|
|
234
|
+
"""CrewAI's on_tool_usage_error event."""
|
|
235
|
+
# Create a proper LocalExecutionContext
|
|
236
|
+
execution_context = LocalExecutionContext(
|
|
237
|
+
current_message_id=None, # Not using message ID for tools
|
|
238
|
+
current_tool_call_id=None # Tool call has errored
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
# Store the event in the execution context
|
|
242
|
+
self._set_execution_context(event.source_id, execution_context)
|
copilotkit/crewai/crewai_sdk.py
CHANGED
|
@@ -2,10 +2,13 @@
|
|
|
2
2
|
CrewAI integration for CopilotKit
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
+
# new event listener docs
|
|
6
|
+
# https://github.com/crewAIInc/crewAI/blob/main/docs/concepts/event-listener.mdx
|
|
7
|
+
|
|
8
|
+
#
|
|
5
9
|
import uuid
|
|
6
10
|
import json
|
|
7
|
-
import
|
|
8
|
-
from typing_extensions import Any, Dict, List, Literal
|
|
11
|
+
from typing_extensions import Any, Dict, List, Literal, Optional
|
|
9
12
|
from pydantic import BaseModel, Field
|
|
10
13
|
from litellm.types.utils import (
|
|
11
14
|
ModelResponse,
|
|
@@ -16,23 +19,12 @@ from litellm.types.utils import (
|
|
|
16
19
|
)
|
|
17
20
|
from litellm.litellm_core_utils.streaming_handler import CustomStreamWrapper
|
|
18
21
|
from crewai.flow.flow import FlowState, Flow
|
|
19
|
-
from crewai.flow.flow_events import (
|
|
20
|
-
Event as CrewAIFlowEvent,
|
|
21
|
-
FlowStartedEvent,
|
|
22
|
-
MethodExecutionStartedEvent,
|
|
23
|
-
MethodExecutionFinishedEvent,
|
|
24
|
-
FlowFinishedEvent,
|
|
25
|
-
)
|
|
26
22
|
from copilotkit.types import Message
|
|
27
23
|
from copilotkit.logging import get_logger
|
|
28
24
|
from copilotkit.runloop import queue_put, get_context_execution
|
|
29
25
|
from copilotkit.protocol import (
|
|
30
26
|
RuntimeEventTypes,
|
|
31
|
-
RunStarted,
|
|
32
|
-
RunFinished,
|
|
33
27
|
RunError,
|
|
34
|
-
NodeStarted,
|
|
35
|
-
NodeFinished,
|
|
36
28
|
agent_state_message,
|
|
37
29
|
text_message_start,
|
|
38
30
|
text_message_content,
|
|
@@ -44,12 +36,40 @@ from copilotkit.protocol import (
|
|
|
44
36
|
RuntimeMetaEventName,
|
|
45
37
|
PredictStateConfig
|
|
46
38
|
)
|
|
39
|
+
from copilotkit.crewai.crewai_event_listener import CopilotKitCrewAIEventListener
|
|
47
40
|
|
|
48
41
|
logger = get_logger(__name__)
|
|
49
42
|
|
|
43
|
+
copilotkit_crewai_event_listener = CopilotKitCrewAIEventListener()
|
|
44
|
+
|
|
45
|
+
|
|
50
46
|
class CopilotKitProperties(BaseModel):
|
|
51
|
-
"""
|
|
52
|
-
|
|
47
|
+
"""
|
|
48
|
+
CopilotKit properties for the state
|
|
49
|
+
"""
|
|
50
|
+
actions: List[Dict[str, Any]] = []
|
|
51
|
+
|
|
52
|
+
# Properties for tool call interception
|
|
53
|
+
_intercepted_action: bool = False
|
|
54
|
+
_last_intercepted_action_name: Optional[str] = None
|
|
55
|
+
_last_intercepted_action_args: Optional[Dict[str, Any]] = None
|
|
56
|
+
|
|
57
|
+
# Allow arbitrary properties
|
|
58
|
+
model_config = {
|
|
59
|
+
"extra": "allow",
|
|
60
|
+
"arbitrary_types_allowed": True
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
def __setattr__(self, name, value):
|
|
64
|
+
"""
|
|
65
|
+
Custom __setattr__ to handle dynamic properties even in strict Pydantic mode
|
|
66
|
+
"""
|
|
67
|
+
try:
|
|
68
|
+
# Try standard setattr first
|
|
69
|
+
super().__setattr__(name, value)
|
|
70
|
+
except Exception:
|
|
71
|
+
# If that fails, store it in __dict__ directly
|
|
72
|
+
object.__setattr__(self, name, value)
|
|
53
73
|
|
|
54
74
|
class CopilotKitState(FlowState):
|
|
55
75
|
"""CopilotKit state"""
|
|
@@ -62,47 +82,48 @@ async def crewai_flow_async_runner(flow: Flow, inputs: Dict[str, Any]):
|
|
|
62
82
|
asyncio.run().
|
|
63
83
|
"""
|
|
64
84
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
85
|
+
# Note: In CrewAI 0.114.0+, these events are in utilities.events
|
|
86
|
+
# async def crewai_flow_event_subscriber(flow: Any, event: CrewAIFlowEvent):
|
|
87
|
+
# if isinstance(event, FlowStartedEvent):
|
|
88
|
+
# await queue_put(RunStarted(
|
|
89
|
+
# type=RuntimeEventTypes.RUN_STARTED,
|
|
90
|
+
# state=flow.state
|
|
91
|
+
# ), priority=True)
|
|
92
|
+
# elif isinstance(event, MethodExecutionStartedEvent):
|
|
93
|
+
# await queue_put(NodeStarted(
|
|
94
|
+
# type=RuntimeEventTypes.NODE_STARTED,
|
|
95
|
+
# node_name=event.method_name,
|
|
96
|
+
# state=flow.state
|
|
97
|
+
# ), priority=True)
|
|
98
|
+
# elif isinstance(event, MethodExecutionFinishedEvent):
|
|
99
|
+
# await queue_put(NodeFinished(
|
|
100
|
+
# type=RuntimeEventTypes.NODE_FINISHED,
|
|
101
|
+
# node_name=event.method_name,
|
|
102
|
+
# state=flow.state
|
|
103
|
+
# ), priority=True)
|
|
104
|
+
# elif isinstance(event, FlowFinishedEvent):
|
|
105
|
+
# await queue_put(RunFinished(
|
|
106
|
+
# type=RuntimeEventTypes.RUN_FINISHED,
|
|
107
|
+
# state=flow.state
|
|
108
|
+
# ), priority=True)
|
|
109
|
+
|
|
110
|
+
# def crewai_flow_event_subscriber_sync(flow: Any, event: CrewAIFlowEvent):
|
|
111
|
+
# loop = asyncio.get_running_loop()
|
|
112
|
+
# loop.call_soon(lambda: asyncio.create_task(crewai_flow_event_subscriber(flow, event)))
|
|
113
|
+
|
|
114
|
+
# flow.event_emitter.connect(crewai_flow_event_subscriber_sync)
|
|
94
115
|
|
|
95
116
|
try:
|
|
96
|
-
flow.event_emitter.send(
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
)
|
|
103
|
-
|
|
104
|
-
flow._initialize_state(inputs) # pylint: disable=protected-access
|
|
105
|
-
await flow.kickoff_async()
|
|
117
|
+
# flow.event_emitter.send(
|
|
118
|
+
# flow,
|
|
119
|
+
# event=FlowStartedEvent(
|
|
120
|
+
# type="flow_started",
|
|
121
|
+
# flow_name=flow.__class__.__name__,
|
|
122
|
+
# ),
|
|
123
|
+
# )
|
|
124
|
+
|
|
125
|
+
# flow._initialize_state(inputs) # pylint: disable=protected-access
|
|
126
|
+
await flow.kickoff_async(inputs=inputs)
|
|
106
127
|
except Exception as e: # pylint: disable=broad-except
|
|
107
128
|
await queue_put(RunError(
|
|
108
129
|
type=RuntimeEventTypes.RUN_ERROR,
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Centralized imports for CrewAI integration.
|
|
3
|
+
|
|
4
|
+
This module handles importing from different versions of CrewAI to maintain backward compatibility.
|
|
5
|
+
"""
|
|
6
|
+
from typing import Any, Dict, Optional, Union
|
|
7
|
+
import importlib.util
|
|
8
|
+
|
|
9
|
+
# First check if the tool_usage_events module exists
|
|
10
|
+
tool_usage_events_spec = importlib.util.find_spec("crewai.tools.tool_usage_events")
|
|
11
|
+
has_tool_usage_events = tool_usage_events_spec is not None
|
|
12
|
+
|
|
13
|
+
# Try to import from the new locations first (crewai 0.114.0+)
|
|
14
|
+
try:
|
|
15
|
+
# Check if the new events module exists
|
|
16
|
+
from crewai.events import (
|
|
17
|
+
BaseEventListener,
|
|
18
|
+
FlowStartedEvent,
|
|
19
|
+
MethodExecutionStartedEvent,
|
|
20
|
+
MethodExecutionFinishedEvent,
|
|
21
|
+
FlowFinishedEvent,
|
|
22
|
+
LLMCallStartedEvent,
|
|
23
|
+
LLMCallCompletedEvent,
|
|
24
|
+
LLMCallFailedEvent,
|
|
25
|
+
LLMStreamChunkEvent,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
# Import tool events from appropriate location
|
|
29
|
+
if has_tool_usage_events:
|
|
30
|
+
from crewai.tools.tool_usage_events import (
|
|
31
|
+
ToolUsageStartedEvent,
|
|
32
|
+
ToolUsageFinishedEvent,
|
|
33
|
+
ToolUsageErrorEvent,
|
|
34
|
+
)
|
|
35
|
+
else:
|
|
36
|
+
# Try to get them from the same module
|
|
37
|
+
from crewai.events import (
|
|
38
|
+
ToolUsageStartedEvent,
|
|
39
|
+
ToolUsageFinishedEvent,
|
|
40
|
+
ToolUsageErrorEvent,
|
|
41
|
+
)
|
|
42
|
+
# Fall back to older import locations (pre-0.114.0)
|
|
43
|
+
except ImportError:
|
|
44
|
+
try:
|
|
45
|
+
from crewai.utilities.events import (
|
|
46
|
+
BaseEventListener,
|
|
47
|
+
FlowStartedEvent,
|
|
48
|
+
MethodExecutionStartedEvent,
|
|
49
|
+
MethodExecutionFinishedEvent,
|
|
50
|
+
FlowFinishedEvent,
|
|
51
|
+
LLMCallStartedEvent,
|
|
52
|
+
LLMCallCompletedEvent,
|
|
53
|
+
LLMCallFailedEvent,
|
|
54
|
+
LLMStreamChunkEvent,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Also try to get tool events from the same module
|
|
58
|
+
from crewai.utilities.events import (
|
|
59
|
+
ToolUsageStartedEvent,
|
|
60
|
+
ToolUsageFinishedEvent,
|
|
61
|
+
ToolUsageErrorEvent,
|
|
62
|
+
)
|
|
63
|
+
except ImportError:
|
|
64
|
+
# Define stub classes for environments without CrewAI
|
|
65
|
+
class BaseEventStub:
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
class BaseEventListener(BaseEventStub):
|
|
69
|
+
async def aon_flow_started(self, event):
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
async def aon_method_execution_started(self, event):
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
async def aon_method_execution_finished(self, event):
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
async def aon_flow_finished(self, event):
|
|
79
|
+
pass
|
|
80
|
+
|
|
81
|
+
async def aon_llm_call_started(self, event):
|
|
82
|
+
pass
|
|
83
|
+
|
|
84
|
+
async def aon_llm_call_completed(self, event):
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
async def aon_llm_call_failed(self, event):
|
|
88
|
+
pass
|
|
89
|
+
|
|
90
|
+
async def aon_llm_stream_chunk(self, event):
|
|
91
|
+
pass
|
|
92
|
+
|
|
93
|
+
# Define stub event classes
|
|
94
|
+
class EventStub(BaseEventStub):
|
|
95
|
+
def __init__(self, **kwargs):
|
|
96
|
+
for key, value in kwargs.items():
|
|
97
|
+
setattr(self, key, value)
|
|
98
|
+
|
|
99
|
+
class FlowStartedEvent(EventStub):
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
class MethodExecutionStartedEvent(EventStub):
|
|
103
|
+
pass
|
|
104
|
+
|
|
105
|
+
class MethodExecutionFinishedEvent(EventStub):
|
|
106
|
+
pass
|
|
107
|
+
|
|
108
|
+
class FlowFinishedEvent(EventStub):
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
class LLMCallStartedEvent(EventStub):
|
|
112
|
+
pass
|
|
113
|
+
|
|
114
|
+
class LLMCallCompletedEvent(EventStub):
|
|
115
|
+
pass
|
|
116
|
+
|
|
117
|
+
class LLMCallFailedEvent(EventStub):
|
|
118
|
+
pass
|
|
119
|
+
|
|
120
|
+
class LLMStreamChunkEvent(EventStub):
|
|
121
|
+
pass
|
|
122
|
+
|
|
123
|
+
class ToolUsageStartedEvent(EventStub):
|
|
124
|
+
pass
|
|
125
|
+
|
|
126
|
+
class ToolUsageFinishedEvent(EventStub):
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
class ToolUsageErrorEvent(EventStub):
|
|
130
|
+
pass
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utilities for integrating CopilotKit with CrewAI
|
|
3
|
+
"""
|
|
4
|
+
from typing import Dict, List, Any, Callable
|
|
5
|
+
import uuid
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
# Setup logger
|
|
10
|
+
logger = logging.getLogger("copilotkit.crewai.utils")
|
|
11
|
+
|
|
12
|
+
# Global variable to track interception when state modification fails
|
|
13
|
+
_COPILOTKIT_INTERCEPTED_ACTION = None
|
|
14
|
+
|
|
15
|
+
# Constants
|
|
16
|
+
COPILOTKIT_ACTION_INTERCEPTED_MARKER = "[[COPILOTKIT_ACTION_INTERCEPTED]]"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def create_copilotkit_tool_handlers(
|
|
20
|
+
original_handlers: Dict[str, Callable],
|
|
21
|
+
copilotkit_actions: List[Dict[str, Any]],
|
|
22
|
+
state: Any
|
|
23
|
+
) -> Dict[str, Callable]:
|
|
24
|
+
"""
|
|
25
|
+
Creates a wrapper around tool handlers that intercepts CopilotKit actions.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
original_handlers: Dict of original tool handler functions
|
|
29
|
+
copilotkit_actions: List of CopilotKit action definitions
|
|
30
|
+
state: The flow state object where flagging will occur
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
Dict of wrapped tool handler functions
|
|
34
|
+
"""
|
|
35
|
+
# Extract action names from CopilotKit actions
|
|
36
|
+
copilotkit_action_names = [
|
|
37
|
+
action["function"]["name"]
|
|
38
|
+
for action in copilotkit_actions
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
# Store action definitions by name for parameter validation
|
|
42
|
+
action_definitions = {}
|
|
43
|
+
for action in copilotkit_actions:
|
|
44
|
+
action_name = action["function"]["name"]
|
|
45
|
+
action_definitions[action_name] = action
|
|
46
|
+
|
|
47
|
+
logger.debug(f"Setting up interceptors for actions: {copilotkit_action_names}")
|
|
48
|
+
|
|
49
|
+
def interceptor(name):
|
|
50
|
+
def handle_tool_call(**args):
|
|
51
|
+
global _COPILOTKIT_INTERCEPTED_ACTION
|
|
52
|
+
|
|
53
|
+
if name in copilotkit_action_names:
|
|
54
|
+
logger.debug(f"Intercepting call to action: {name}")
|
|
55
|
+
|
|
56
|
+
# Check for required parameters
|
|
57
|
+
if name in action_definitions:
|
|
58
|
+
action_def = action_definitions[name]
|
|
59
|
+
missing_required_params = []
|
|
60
|
+
|
|
61
|
+
# Extract required parameters from action definition
|
|
62
|
+
if "parameters" in action_def["function"]:
|
|
63
|
+
parameters = action_def["function"]["parameters"]
|
|
64
|
+
if "properties" in parameters:
|
|
65
|
+
for param_name, param_props in parameters["properties"].items():
|
|
66
|
+
# Check if parameter is required
|
|
67
|
+
is_required = False
|
|
68
|
+
if "required" in parameters and param_name in parameters["required"]:
|
|
69
|
+
is_required = True
|
|
70
|
+
|
|
71
|
+
# Check if required parameter is missing
|
|
72
|
+
if is_required and (param_name not in args or args[param_name] is None):
|
|
73
|
+
missing_required_params.append(param_name)
|
|
74
|
+
|
|
75
|
+
# If required parameters are missing, return a message asking for them
|
|
76
|
+
if missing_required_params:
|
|
77
|
+
logger.debug(f"Missing required parameters: {missing_required_params}")
|
|
78
|
+
missing_params_str = ", ".join(missing_required_params)
|
|
79
|
+
return f"I need the following information before I can proceed: {missing_params_str}. Can you please provide these details?"
|
|
80
|
+
|
|
81
|
+
# 1. Always set the global variable first for maximum reliability
|
|
82
|
+
_COPILOTKIT_INTERCEPTED_ACTION = {
|
|
83
|
+
"intercepted": True,
|
|
84
|
+
"name": name,
|
|
85
|
+
"args": args
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
# 2. Try to store it in the state as well for redundancy
|
|
89
|
+
try:
|
|
90
|
+
# Try to store it directly in the copilotkit namespace
|
|
91
|
+
if hasattr(state.copilotkit, "__dict__"):
|
|
92
|
+
state.copilotkit.__dict__["_intercepted_action"] = True
|
|
93
|
+
state.copilotkit.__dict__["_last_intercepted_action_name"] = name
|
|
94
|
+
state.copilotkit.__dict__["_last_intercepted_action_args"] = args
|
|
95
|
+
except Exception as e:
|
|
96
|
+
logger.debug(f"Failed to store intercepted state in copilotkit: {e}")
|
|
97
|
+
|
|
98
|
+
# 3. Create a proper tool_call object and add it to the next message
|
|
99
|
+
try:
|
|
100
|
+
# Generate a unique ID for the tool call
|
|
101
|
+
tool_call_id = str(uuid.uuid4())
|
|
102
|
+
|
|
103
|
+
# Create a properly formatted tool call object
|
|
104
|
+
tool_call = {
|
|
105
|
+
"id": tool_call_id,
|
|
106
|
+
"type": "function",
|
|
107
|
+
"function": {
|
|
108
|
+
"name": name,
|
|
109
|
+
"arguments": json.dumps(args)
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
# Store this tool call in state for the next message
|
|
114
|
+
if hasattr(state, "_next_message_tool_calls"):
|
|
115
|
+
state._next_message_tool_calls.append(tool_call)
|
|
116
|
+
else:
|
|
117
|
+
setattr(state, "_next_message_tool_calls", [tool_call])
|
|
118
|
+
except Exception as e:
|
|
119
|
+
logger.debug(f"Failed to create tool call: {e}")
|
|
120
|
+
|
|
121
|
+
# Return a special marker string that LLM response handling can detect
|
|
122
|
+
# This should be a valid non-empty string so the LLM response processor doesn't break
|
|
123
|
+
# but it should be replaced in the final message to the user
|
|
124
|
+
return COPILOTKIT_ACTION_INTERCEPTED_MARKER
|
|
125
|
+
|
|
126
|
+
# Otherwise, call the original handler
|
|
127
|
+
if name in original_handlers:
|
|
128
|
+
return original_handlers[name](**args)
|
|
129
|
+
|
|
130
|
+
# Fallback if handler not found
|
|
131
|
+
return f"No handler found for tool '{name}'"
|
|
132
|
+
|
|
133
|
+
return handle_tool_call
|
|
134
|
+
|
|
135
|
+
# Create a wrapped handler for each original handler
|
|
136
|
+
return {name: interceptor(name) for name in set(list(original_handlers.keys()) + copilotkit_action_names)}
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def check_for_intercepted_actions(state: Any) -> bool:
|
|
140
|
+
"""
|
|
141
|
+
Checks if a CopilotKit action was intercepted during the last LLM call.
|
|
142
|
+
If so, clears the flag and returns True.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
state: The flow state object
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
bool: True if a CopilotKit action was intercepted
|
|
149
|
+
"""
|
|
150
|
+
global _COPILOTKIT_INTERCEPTED_ACTION
|
|
151
|
+
|
|
152
|
+
# First try the copilotkit namespace approach
|
|
153
|
+
try:
|
|
154
|
+
if hasattr(state.copilotkit, "_intercepted_action") and state.copilotkit._intercepted_action:
|
|
155
|
+
# Clear the flag but keep the action details
|
|
156
|
+
state.copilotkit._intercepted_action = False
|
|
157
|
+
return True
|
|
158
|
+
except Exception:
|
|
159
|
+
pass
|
|
160
|
+
|
|
161
|
+
# Then try the global variable approach
|
|
162
|
+
if _COPILOTKIT_INTERCEPTED_ACTION and _COPILOTKIT_INTERCEPTED_ACTION.get("intercepted", False):
|
|
163
|
+
_COPILOTKIT_INTERCEPTED_ACTION["intercepted"] = False
|
|
164
|
+
return True
|
|
165
|
+
|
|
166
|
+
return False
|
copilotkit/langgraph.py
CHANGED
|
@@ -487,6 +487,6 @@ def copilotkit_interrupt(
|
|
|
487
487
|
"__copilotkit_interrupt_value__": interrupt_values,
|
|
488
488
|
"__copilotkit_messages__": [interrupt_message]
|
|
489
489
|
})
|
|
490
|
-
answer = response.content
|
|
490
|
+
answer = response[-1].content
|
|
491
491
|
|
|
492
|
-
return answer,
|
|
492
|
+
return answer, response
|
copilotkit/langgraph_agent.py
CHANGED
|
@@ -10,6 +10,7 @@ from langgraph.types import Command
|
|
|
10
10
|
from langchain.load.dump import dumps as langchain_dumps
|
|
11
11
|
from langchain.schema import BaseMessage, SystemMessage
|
|
12
12
|
from langchain_core.runnables import RunnableConfig, ensure_config
|
|
13
|
+
from langchain_core.messages import RemoveMessage
|
|
13
14
|
|
|
14
15
|
from partialjson.json_parser import JSONParser
|
|
15
16
|
|
|
@@ -68,7 +69,13 @@ def langgraph_default_merge_state( # pylint: disable=unused-argument
|
|
|
68
69
|
|
|
69
70
|
existing_messages = state.get("messages", [])
|
|
70
71
|
existing_message_ids = {message.id for message in existing_messages}
|
|
71
|
-
|
|
72
|
+
message_ids = {message.id for message in messages}
|
|
73
|
+
removed_messages = []
|
|
74
|
+
if len(messages) < len(existing_messages):
|
|
75
|
+
# messages were removed
|
|
76
|
+
removed_messages = [RemoveMessage(id=existing_message.id) for existing_message in existing_messages if existing_message.id not in message_ids]
|
|
77
|
+
|
|
78
|
+
new_messages = removed_messages + [message for message in messages if message.id not in existing_message_ids]
|
|
72
79
|
|
|
73
80
|
return {
|
|
74
81
|
**state,
|
|
@@ -273,7 +280,8 @@ class LangGraphAgent(Agent):
|
|
|
273
280
|
|
|
274
281
|
# An active interrupt event that runs through messages. Use latest message as response
|
|
275
282
|
if self.active_interrupt_event and interrupt_from_meta_events is None:
|
|
276
|
-
|
|
283
|
+
# state["messages"] only includes the messages we need to add at this point, tool call+result if applicable, and user text
|
|
284
|
+
resume_input = Command(resume=state["messages"])
|
|
277
285
|
|
|
278
286
|
if interrupt_from_meta_events and "response" in interrupt_from_meta_events:
|
|
279
287
|
resume_input = Command(resume=interrupt_from_meta_events["response"])
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: copilotkit
|
|
3
|
+
Version: 0.2.0a0
|
|
4
|
+
Summary: CopilotKit python SDK
|
|
5
|
+
License: MIT
|
|
6
|
+
Keywords: copilot,copilotkit,langgraph,langchain,ai,langsmith,langserve
|
|
7
|
+
Author: Markus Ecker
|
|
8
|
+
Author-email: markus.ecker@gmail.com
|
|
9
|
+
Requires-Python: >=3.10,<3.13
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Provides-Extra: crewai
|
|
16
|
+
Requires-Dist: crewai (==0.114.0) ; extra == "crewai"
|
|
17
|
+
Requires-Dist: fastapi (>=0.115.0,<0.116.0)
|
|
18
|
+
Requires-Dist: langchain (>=0.3.3,<0.4.0)
|
|
19
|
+
Requires-Dist: langgraph (>=0.2.50)
|
|
20
|
+
Requires-Dist: partialjson (>=0.0.8,<0.0.9)
|
|
21
|
+
Requires-Dist: toml (>=0.10.2,<0.11.0)
|
|
22
|
+
Project-URL: Homepage, https://copilotkit.ai
|
|
23
|
+
Project-URL: Repository, https://github.com/CopilotKit/CopilotKit/tree/main/sdk-python
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
# CopilotKit Python SDK
|
|
27
|
+
|
|
28
|
+
[](https://badge.fury.io/py/copilotkit)
|
|
29
|
+
[](https://opensource.org/licenses/MIT)
|
|
30
|
+
|
|
31
|
+
The official Python SDK for CopilotKit - build AI copilots and agents into your applications.
|
|
32
|
+
|
|
33
|
+
## Features
|
|
34
|
+
|
|
35
|
+
- 🚀 Integration with LangGraph and LangChain
|
|
36
|
+
- 🔄 Support for stateful conversations
|
|
37
|
+
- 🔌 FastAPI integration for serving endpoints
|
|
38
|
+
- 🤝 Optional CrewAI integration (requires CrewAI 0.114.0+)
|
|
39
|
+
|
|
40
|
+
## Version 0.2.0 Breaking Changes
|
|
41
|
+
|
|
42
|
+
⚠️ As of version 0.2.0, CopilotKit Python SDK now requires CrewAI 0.114.0 or higher.
|
|
43
|
+
If you're using CrewAI with CopilotKit, you must update your CrewAI version.
|
|
44
|
+
|
|
45
|
+
## Installation
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
pip install copilotkit
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
With CrewAI support:
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
pip install "copilotkit[crewai]"
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Quick Start
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
from copilotkit import CopilotKitRemoteEndpoint, Action
|
|
61
|
+
from copilotkit.integrations.fastapi import add_fastapi_endpoint
|
|
62
|
+
from fastapi import FastAPI
|
|
63
|
+
|
|
64
|
+
# Define an action handler function
|
|
65
|
+
def greet_user_handler(name: str):
|
|
66
|
+
return f"Hello, {name}!"
|
|
67
|
+
|
|
68
|
+
# Initialize the SDK endpoint
|
|
69
|
+
endpoint = CopilotKitRemoteEndpoint(
|
|
70
|
+
actions=[
|
|
71
|
+
Action(
|
|
72
|
+
name="greet_user",
|
|
73
|
+
handler=greet_user_handler,
|
|
74
|
+
description="Greet the user",
|
|
75
|
+
parameters=[
|
|
76
|
+
{
|
|
77
|
+
"name": "name",
|
|
78
|
+
"type": "string",
|
|
79
|
+
"description": "The name of the user"
|
|
80
|
+
}
|
|
81
|
+
]
|
|
82
|
+
)
|
|
83
|
+
]
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
# Create FastAPI app and add CopilotKit endpoint
|
|
87
|
+
app = FastAPI()
|
|
88
|
+
add_fastapi_endpoint(app, endpoint, "/copilotkit")
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
## LangGraph Integration
|
|
92
|
+
|
|
93
|
+
CopilotKit provides seamless integration with LangGraph for building sophisticated agent workflows.
|
|
94
|
+
|
|
95
|
+
### Using LangGraphAgent
|
|
96
|
+
|
|
97
|
+
```python
|
|
98
|
+
from copilotkit import CopilotKitRemoteEndpoint, LangGraphAgent
|
|
99
|
+
from langgraph.graph import StateGraph, MessagesState
|
|
100
|
+
|
|
101
|
+
# Create your LangGraph
|
|
102
|
+
def create_graph():
|
|
103
|
+
workflow = StateGraph(MessagesState)
|
|
104
|
+
# ... define your graph nodes and edges
|
|
105
|
+
return workflow.compile()
|
|
106
|
+
|
|
107
|
+
# Initialize the SDK with a LangGraph agent
|
|
108
|
+
endpoint = CopilotKitRemoteEndpoint(
|
|
109
|
+
agents=[
|
|
110
|
+
LangGraphAgent(
|
|
111
|
+
name="email_agent",
|
|
112
|
+
description="This agent sends emails",
|
|
113
|
+
graph=create_graph(),
|
|
114
|
+
)
|
|
115
|
+
]
|
|
116
|
+
)
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
### Customizing LangGraph Behavior
|
|
120
|
+
|
|
121
|
+
CopilotKit provides utilities to customize how your LangGraph agents work:
|
|
122
|
+
|
|
123
|
+
```python
|
|
124
|
+
from copilotkit.langgraph import copilotkit_customize_config, copilotkit_emit_state
|
|
125
|
+
|
|
126
|
+
# Customize configuration
|
|
127
|
+
config = copilotkit_customize_config(
|
|
128
|
+
base_config=None, # or your existing config
|
|
129
|
+
emit_messages=True,
|
|
130
|
+
emit_tool_calls=True,
|
|
131
|
+
emit_intermediate_state=[
|
|
132
|
+
{
|
|
133
|
+
"state_key": "progress",
|
|
134
|
+
"tool": "ProgressTool",
|
|
135
|
+
"tool_argument": "steps"
|
|
136
|
+
},
|
|
137
|
+
]
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# Emit intermediate state
|
|
141
|
+
async def long_running_node(state):
|
|
142
|
+
for i in range(10):
|
|
143
|
+
await some_operation(i)
|
|
144
|
+
await copilotkit_emit_state(config, {"progress": i})
|
|
145
|
+
return state
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
## CrewAI Integration
|
|
149
|
+
|
|
150
|
+
CopilotKit provides integration with CrewAI for building multi-agent systems.
|
|
151
|
+
|
|
152
|
+
### Intercepting CopilotKit Actions in CrewAI Flows
|
|
153
|
+
|
|
154
|
+
When building flows that use CopilotKit actions, you need to properly intercept any tool calls that should be handled by the frontend:
|
|
155
|
+
|
|
156
|
+
```python
|
|
157
|
+
from copilotkit.crewai import create_copilotkit_tool_handlers, check_for_intercepted_actions
|
|
158
|
+
|
|
159
|
+
@router(start_flow)
|
|
160
|
+
async def chat(self):
|
|
161
|
+
# Your existing code...
|
|
162
|
+
|
|
163
|
+
# 1. Create wrapped tool handlers
|
|
164
|
+
wrapped_handlers = create_copilotkit_tool_handlers(
|
|
165
|
+
original_handlers=tool_handlers, # Your original tool handlers
|
|
166
|
+
copilotkit_actions=self.state.copilotkit.actions, # From CopilotKitState
|
|
167
|
+
state=self.state # Flow state for flagging
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
# 2. Use wrapped handlers with LLM call
|
|
171
|
+
response = llm.call(
|
|
172
|
+
messages=[...],
|
|
173
|
+
tools=[
|
|
174
|
+
*self.state.copilotkit.actions, # Include CopilotKit actions
|
|
175
|
+
YOUR_OTHER_TOOLS...
|
|
176
|
+
],
|
|
177
|
+
available_functions=wrapped_handlers # Use wrapped handlers
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
# 3. Create response message as usual
|
|
181
|
+
message = {"role": "assistant", "content": response}
|
|
182
|
+
self.state.messages.append(message)
|
|
183
|
+
|
|
184
|
+
# 4. Check if a CopilotKit action was intercepted
|
|
185
|
+
if check_for_intercepted_actions(self.state):
|
|
186
|
+
# Return to frontend to handle the action
|
|
187
|
+
return "route_end"
|
|
188
|
+
|
|
189
|
+
# Continue with normal flow...
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
This pattern ensures that:
|
|
193
|
+
|
|
194
|
+
1. CopilotKit actions are properly identified
|
|
195
|
+
2. The frontend gets control when needed
|
|
196
|
+
3. Other tools continue to work normally
|
|
197
|
+
|
|
198
|
+
## Documentation
|
|
199
|
+
|
|
200
|
+
For detailed documentation and examples, visit [copilotkit.ai](https://copilotkit.ai)
|
|
201
|
+
|
|
202
|
+
## Contributing
|
|
203
|
+
|
|
204
|
+
We welcome contributions! Please see our [Contributing Guidelines](https://github.com/CopilotKit/CopilotKit/blob/main/CONTRIBUTING.md) for details.
|
|
205
|
+
|
|
206
|
+
## License
|
|
207
|
+
|
|
208
|
+
This project is licensed under the MIT License - see the [LICENSE](https://github.com/CopilotKit/CopilotKit/blob/main/LICENSE) file for details.
|
|
209
|
+
|
|
210
|
+
## Support
|
|
211
|
+
|
|
212
|
+
- 📚 [Documentation](https://docs.copilotkit.ai)
|
|
213
|
+
- 💬 [Discord Community](https://discord.gg/6dffbvGU)
|
|
214
|
+
- 🐛 [Issue Tracker](https://github.com/CopilotKit/CopilotKit/issues)
|
|
215
|
+
|
|
216
|
+
---
|
|
217
|
+
|
|
218
|
+
Built with ❤️ by the CopilotKit team
|
|
219
|
+
|
|
@@ -1,16 +1,19 @@
|
|
|
1
1
|
copilotkit/__init__.py,sha256=cmSvgm_jfy8hDu7VMN3DOzRIsEuICMhsJahplqJ-xwE,694
|
|
2
2
|
copilotkit/action.py,sha256=bg_bPusEfN8TMQV1B3lVyMLbHyLn1E7yVYAvfE2_PXA,1691
|
|
3
3
|
copilotkit/agent.py,sha256=HXDh4-PtdSFpLjp3g9nhO4VLPT9N4A1IdNTH6l4zh9w,1669
|
|
4
|
-
copilotkit/crewai/__init__.py,sha256=
|
|
5
|
-
copilotkit/crewai/crewai_agent.py,sha256=
|
|
6
|
-
copilotkit/crewai/
|
|
4
|
+
copilotkit/crewai/__init__.py,sha256=XRTpm5cj7GG-W6B39m2_w69fmCwc_dfTvI_dDQkuXNY,803
|
|
5
|
+
copilotkit/crewai/crewai_agent.py,sha256=maoMnl0WIHUnFAK5ALUKD9z0GvjtfNGi31zi37LyyaE,15323
|
|
6
|
+
copilotkit/crewai/crewai_event_listener.py,sha256=GiHQQmP7xLX3Yj4O-ocwVgbxJab-L__KNIlOvrf5SVQ,8643
|
|
7
|
+
copilotkit/crewai/crewai_sdk.py,sha256=goBwTnu2cOFwTWboZESBFsaX3eEBALPgJK4mOcxoCAo,19074
|
|
8
|
+
copilotkit/crewai/imports.py,sha256=dD3thCatrH63nYaGQf7cBwYoRnqGxQwcp5VoL_XBXsI,3991
|
|
9
|
+
copilotkit/crewai/utils.py,sha256=yEerz8DvcobD4SgVIDsIhObhDYnyrLxT0VYFB3n1i2w,7134
|
|
7
10
|
copilotkit/exc.py,sha256=xsYVU7RIO50eWQsYtBsreCB5ilyBuDUC4Xepki0uCQs,1032
|
|
8
11
|
copilotkit/html.py,sha256=IHnY4cx89LuYuPPf8Tady2DXfr3MyRWilMHpZ7d8bFk,4357
|
|
9
12
|
copilotkit/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
13
|
copilotkit/integrations/fastapi.py,sha256=SP6UFq2XQWmOWlNnFS-hqfQEl1nMfxSew_uoPpQblhY,10429
|
|
11
14
|
copilotkit/langchain.py,sha256=QFIHnhhAzCKO16F6aFYh_gsCEfm-eaCUPZm-yMTczxU,752
|
|
12
|
-
copilotkit/langgraph.py,sha256=
|
|
13
|
-
copilotkit/langgraph_agent.py,sha256=
|
|
15
|
+
copilotkit/langgraph.py,sha256=0gPW9i0oK9WcVuTzUb5Bd35HThJgdb-_jAjwkko0CC4,14831
|
|
16
|
+
copilotkit/langgraph_agent.py,sha256=bvmsQvJnws-0wEd2CCgOksenaOIMfBvbBky7Hmz2oBA,21657
|
|
14
17
|
copilotkit/logging.py,sha256=lpPEZbI6LMaQLzSgiQMYV4qHrNk-MuO2qZ3zcaHl1nA,499
|
|
15
18
|
copilotkit/parameter.py,sha256=VRgpbJrRgHRRksHHAVUsREB6_S4Xr2W3Q5YGtgMKdBE,1805
|
|
16
19
|
copilotkit/protocol.py,sha256=lgpc6f8tBnNW90xbq0JyI7g4LEENwL78iX4kQAO-C6Y,7641
|
|
@@ -18,6 +21,6 @@ copilotkit/runloop.py,sha256=9pBqWeu4QCRPOizz00se1d0_BvCX3ZIABzNeiXhdyNM,10641
|
|
|
18
21
|
copilotkit/sdk.py,sha256=l4Sk-wUwTpgvSHP3O63FdM_df2xioy2SEwshbXdu__0,11525
|
|
19
22
|
copilotkit/types.py,sha256=_YQluiHpJGGXYB4eWLk0EocDZp5C8rs3ZsMNFhmXEYU,972
|
|
20
23
|
copilotkit/utils.py,sha256=rOAaaFBXgb3iv_cLH9S2steyGwHmnTjaxQTA53qdtQo,203
|
|
21
|
-
copilotkit-0.
|
|
22
|
-
copilotkit-0.
|
|
23
|
-
copilotkit-0.
|
|
24
|
+
copilotkit-0.2.0a0.dist-info/METADATA,sha256=hkqHvJk2m6X3WpaIhwzkpLb_sm9UBpzrFN8dr2NT-lg,6367
|
|
25
|
+
copilotkit-0.2.0a0.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
|
|
26
|
+
copilotkit-0.2.0a0.dist-info/RECORD,,
|
|
@@ -1,90 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.3
|
|
2
|
-
Name: copilotkit
|
|
3
|
-
Version: 0.1.43a0
|
|
4
|
-
Summary: CopilotKit python SDK
|
|
5
|
-
License: MIT
|
|
6
|
-
Keywords: copilot,copilotkit,langgraph,langchain,ai,langsmith,langserve
|
|
7
|
-
Author: Markus Ecker
|
|
8
|
-
Author-email: markus.ecker@gmail.com
|
|
9
|
-
Requires-Python: >=3.10,<3.14
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
-
Classifier: Programming Language :: Python :: 3
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
-
Provides-Extra: crewai
|
|
17
|
-
Requires-Dist: crewai (==0.102.0) ; extra == "crewai"
|
|
18
|
-
Requires-Dist: fastapi (>=0.115.0,<0.116.0)
|
|
19
|
-
Requires-Dist: langchain (>=0.3.3,<0.4.0)
|
|
20
|
-
Requires-Dist: langgraph (>=0.2.50)
|
|
21
|
-
Requires-Dist: partialjson (>=0.0.8,<0.0.9)
|
|
22
|
-
Requires-Dist: toml (>=0.10.2,<0.11.0)
|
|
23
|
-
Project-URL: Homepage, https://copilotkit.ai
|
|
24
|
-
Project-URL: Repository, https://github.com/CopilotKit/CopilotKit/tree/main/sdk-python
|
|
25
|
-
Description-Content-Type: text/markdown
|
|
26
|
-
|
|
27
|
-
# CopilotKit Python SDK
|
|
28
|
-
|
|
29
|
-
[](https://badge.fury.io/py/copilotkit)
|
|
30
|
-
[](https://opensource.org/licenses/MIT)
|
|
31
|
-
|
|
32
|
-
The official Python SDK for CopilotKit - build AI copilots and agents into your applications.
|
|
33
|
-
|
|
34
|
-
## Features
|
|
35
|
-
|
|
36
|
-
- 🚀 Easy integration with LangGraph and LangChain
|
|
37
|
-
- 🔄 Built-in support for stateful conversations
|
|
38
|
-
- 🛠 Extensible agent framework
|
|
39
|
-
- 🔌 FastAPI-ready endpoints
|
|
40
|
-
- 🤝 Optional CrewAI integration
|
|
41
|
-
|
|
42
|
-
## Installation
|
|
43
|
-
|
|
44
|
-
```bash
|
|
45
|
-
pip install copilotkit
|
|
46
|
-
```
|
|
47
|
-
|
|
48
|
-
With CrewAI support:
|
|
49
|
-
|
|
50
|
-
```bash
|
|
51
|
-
pip install "copilotkit[crewai]"
|
|
52
|
-
```
|
|
53
|
-
|
|
54
|
-
## Quick Start
|
|
55
|
-
|
|
56
|
-
```python
|
|
57
|
-
from copilotkit import Copilot
|
|
58
|
-
|
|
59
|
-
# Initialize a copilot
|
|
60
|
-
copilot = Copilot()
|
|
61
|
-
|
|
62
|
-
# Add your tools and configure the copilot
|
|
63
|
-
copilot.add_tool(my_custom_tool)
|
|
64
|
-
|
|
65
|
-
# Run the copilot
|
|
66
|
-
response = copilot.run("Your task description here")
|
|
67
|
-
```
|
|
68
|
-
|
|
69
|
-
## Documentation
|
|
70
|
-
|
|
71
|
-
For detailed documentation and examples, visit [copilotkit.ai](https://copilotkit.ai)
|
|
72
|
-
|
|
73
|
-
## Contributing
|
|
74
|
-
|
|
75
|
-
We welcome contributions! Please see our [Contributing Guidelines](https://github.com/CopilotKit/CopilotKit/blob/main/CONTRIBUTING.md) for details.
|
|
76
|
-
|
|
77
|
-
## License
|
|
78
|
-
|
|
79
|
-
This project is licensed under the MIT License - see the [LICENSE](https://github.com/CopilotKit/CopilotKit/blob/main/LICENSE) file for details.
|
|
80
|
-
|
|
81
|
-
## Support
|
|
82
|
-
|
|
83
|
-
- 📚 [Documentation](https://docs.copilotkit.ai)
|
|
84
|
-
- 💬 [Discord Community](https://discord.gg/6dffbvGU)
|
|
85
|
-
- 🐛 [Issue Tracker](https://github.com/CopilotKit/CopilotKit/issues)
|
|
86
|
-
|
|
87
|
-
---
|
|
88
|
-
|
|
89
|
-
Built with ❤️ by the CopilotKit team
|
|
90
|
-
|