agentcrew-ai 0.8.13__py3-none-any.whl → 0.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- AgentCrew/__init__.py +1 -1
- AgentCrew/app.py +46 -634
- AgentCrew/main_docker.py +1 -30
- AgentCrew/modules/a2a/common/client/card_resolver.py +27 -8
- AgentCrew/modules/a2a/server.py +5 -0
- AgentCrew/modules/a2a/task_manager.py +1 -0
- AgentCrew/modules/agents/local_agent.py +2 -2
- AgentCrew/modules/chat/message/command_processor.py +33 -8
- AgentCrew/modules/chat/message/conversation.py +18 -1
- AgentCrew/modules/chat/message/handler.py +5 -1
- AgentCrew/modules/code_analysis/service.py +50 -7
- AgentCrew/modules/code_analysis/tool.py +9 -8
- AgentCrew/modules/console/completers.py +5 -1
- AgentCrew/modules/console/console_ui.py +23 -11
- AgentCrew/modules/console/conversation_browser/__init__.py +9 -0
- AgentCrew/modules/console/conversation_browser/browser.py +84 -0
- AgentCrew/modules/console/conversation_browser/browser_input_handler.py +279 -0
- AgentCrew/modules/console/{conversation_browser.py → conversation_browser/browser_ui.py} +249 -163
- AgentCrew/modules/console/conversation_handler.py +34 -1
- AgentCrew/modules/console/display_handlers.py +127 -7
- AgentCrew/modules/console/visual_mode/__init__.py +5 -0
- AgentCrew/modules/console/visual_mode/viewer.py +41 -0
- AgentCrew/modules/console/visual_mode/viewer_input_handler.py +315 -0
- AgentCrew/modules/console/visual_mode/viewer_ui.py +608 -0
- AgentCrew/modules/gui/components/command_handler.py +137 -29
- AgentCrew/modules/gui/components/menu_components.py +8 -7
- AgentCrew/modules/gui/themes/README.md +30 -14
- AgentCrew/modules/gui/themes/__init__.py +2 -1
- AgentCrew/modules/gui/themes/atom_light.yaml +1287 -0
- AgentCrew/modules/gui/themes/catppuccin.yaml +1276 -0
- AgentCrew/modules/gui/themes/dracula.yaml +1262 -0
- AgentCrew/modules/gui/themes/nord.yaml +1267 -0
- AgentCrew/modules/gui/themes/saigontech.yaml +1268 -0
- AgentCrew/modules/gui/themes/style_provider.py +78 -264
- AgentCrew/modules/gui/themes/theme_loader.py +379 -0
- AgentCrew/modules/gui/themes/unicorn.yaml +1276 -0
- AgentCrew/modules/gui/widgets/configs/global_settings.py +4 -4
- AgentCrew/modules/gui/widgets/history_sidebar.py +6 -1
- AgentCrew/modules/llm/constants.py +28 -9
- AgentCrew/modules/mcpclient/service.py +0 -1
- AgentCrew/modules/memory/base_service.py +13 -0
- AgentCrew/modules/memory/chroma_service.py +50 -0
- AgentCrew/setup.py +470 -0
- {agentcrew_ai-0.8.13.dist-info → agentcrew_ai-0.9.1.dist-info}/METADATA +1 -1
- {agentcrew_ai-0.8.13.dist-info → agentcrew_ai-0.9.1.dist-info}/RECORD +49 -40
- {agentcrew_ai-0.8.13.dist-info → agentcrew_ai-0.9.1.dist-info}/WHEEL +1 -1
- AgentCrew/modules/gui/themes/atom_light.py +0 -1365
- AgentCrew/modules/gui/themes/catppuccin.py +0 -1404
- AgentCrew/modules/gui/themes/dracula.py +0 -1372
- AgentCrew/modules/gui/themes/nord.py +0 -1365
- AgentCrew/modules/gui/themes/saigontech.py +0 -1359
- AgentCrew/modules/gui/themes/unicorn.py +0 -1372
- {agentcrew_ai-0.8.13.dist-info → agentcrew_ai-0.9.1.dist-info}/entry_points.txt +0 -0
- {agentcrew_ai-0.8.13.dist-info → agentcrew_ai-0.9.1.dist-info}/licenses/LICENSE +0 -0
- {agentcrew_ai-0.8.13.dist-info → agentcrew_ai-0.9.1.dist-info}/top_level.txt +0 -0
|
@@ -125,10 +125,10 @@ class SettingsTab(QWidget):
|
|
|
125
125
|
# Theme dropdown
|
|
126
126
|
theme_label = QLabel("Theme:")
|
|
127
127
|
self.theme_dropdown = QComboBox()
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
)
|
|
131
|
-
self.theme_dropdown.setCurrentText("
|
|
128
|
+
from AgentCrew.modules.gui.themes import ThemeLoader
|
|
129
|
+
|
|
130
|
+
self.theme_dropdown.addItems(sorted(ThemeLoader.get_available_themes()))
|
|
131
|
+
self.theme_dropdown.setCurrentText("catppuccin")
|
|
132
132
|
self.theme_dropdown.setStyleSheet(style_provider.get_combo_box_style())
|
|
133
133
|
global_settings_form_layout.addRow(theme_label, self.theme_dropdown)
|
|
134
134
|
|
|
@@ -23,6 +23,11 @@ from qtpy.QtWidgets import QApplication
|
|
|
23
23
|
|
|
24
24
|
from AgentCrew.modules.gui.themes import StyleProvider
|
|
25
25
|
|
|
26
|
+
from typing import TYPE_CHECKING
|
|
27
|
+
|
|
28
|
+
if TYPE_CHECKING:
|
|
29
|
+
from AgentCrew.modules.chat.message_handler import MessageHandler
|
|
30
|
+
|
|
26
31
|
|
|
27
32
|
class ConversationSidebar(QWidget):
|
|
28
33
|
"""Sidebar widget showing conversation history"""
|
|
@@ -31,7 +36,7 @@ class ConversationSidebar(QWidget):
|
|
|
31
36
|
error_occurred = Signal(str)
|
|
32
37
|
new_conversation_requested = Signal() # Add this new signal
|
|
33
38
|
|
|
34
|
-
def __init__(self, message_handler, parent=None):
|
|
39
|
+
def __init__(self, message_handler: MessageHandler, parent=None):
|
|
35
40
|
super().__init__(parent)
|
|
36
41
|
self.message_handler = message_handler
|
|
37
42
|
# Store conversations locally to filter
|
|
@@ -334,6 +334,16 @@ _DEEPINFRA_MODELS = [
|
|
|
334
334
|
input_token_price_1m=0.43,
|
|
335
335
|
output_token_price_1m=1.75,
|
|
336
336
|
),
|
|
337
|
+
Model(
|
|
338
|
+
id="zai-org/GLM-4.7-Flash",
|
|
339
|
+
provider="deepinfra",
|
|
340
|
+
name="Zai GLM-4.7 Flash",
|
|
341
|
+
description="GLM-4.7-Flash is a 30B-A3B MoE model",
|
|
342
|
+
force_sample_params=SampleParam(temperature=0.7, top_p=1, top_k=40),
|
|
343
|
+
capabilities=["tool_use", "stream", "structured_output"],
|
|
344
|
+
input_token_price_1m=0.06,
|
|
345
|
+
output_token_price_1m=0.4,
|
|
346
|
+
),
|
|
337
347
|
Model(
|
|
338
348
|
id="Qwen/Qwen3-32B",
|
|
339
349
|
provider="deepinfra",
|
|
@@ -376,11 +386,11 @@ _DEEPINFRA_MODELS = [
|
|
|
376
386
|
output_token_price_1m=2.18,
|
|
377
387
|
),
|
|
378
388
|
Model(
|
|
379
|
-
id="moonshotai/Kimi-K2
|
|
389
|
+
id="moonshotai/Kimi-K2.5",
|
|
380
390
|
provider="deepinfra",
|
|
381
|
-
name="Kimi K2
|
|
382
|
-
description="Kimi K2 is
|
|
383
|
-
capabilities=["tool_use", "stream", "structured_output"],
|
|
391
|
+
name="Kimi K2.5",
|
|
392
|
+
description="Kimi K2.5 is an open-source, native multimodal agentic model built through continual pretraining on approximately 15 trillion mixed visual and text tokens atop Kimi-K2-Base",
|
|
393
|
+
capabilities=["tool_use", "stream", "structured_output", "vision"],
|
|
384
394
|
force_sample_params=SampleParam(temperature=0.6),
|
|
385
395
|
input_token_price_1m=0.5,
|
|
386
396
|
output_token_price_1m=2.0,
|
|
@@ -388,7 +398,7 @@ _DEEPINFRA_MODELS = [
|
|
|
388
398
|
Model(
|
|
389
399
|
id="MiniMaxAI/MiniMax-M2",
|
|
390
400
|
provider="deepinfra",
|
|
391
|
-
name="
|
|
401
|
+
name="MiniMax-M2",
|
|
392
402
|
description="MiniMax-M2 is a Mini model built for Max coding & agentic workflows with just 10 billion activated parameters",
|
|
393
403
|
capabilities=["tool_use", "stream", "structured_output"],
|
|
394
404
|
force_sample_params=SampleParam(temperature=0.6),
|
|
@@ -405,6 +415,7 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
405
415
|
description="",
|
|
406
416
|
capabilities=["tool_use", "vision", "stream"],
|
|
407
417
|
default=False,
|
|
418
|
+
max_context_token=200_000,
|
|
408
419
|
input_token_price_1m=0.0,
|
|
409
420
|
output_token_price_1m=0.0,
|
|
410
421
|
),
|
|
@@ -477,6 +488,7 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
477
488
|
description="",
|
|
478
489
|
capabilities=["tool_use", "thinking", "vision", "stream", "structured_output"],
|
|
479
490
|
default=False,
|
|
491
|
+
max_context_token=400_000,
|
|
480
492
|
input_token_price_1m=0.0,
|
|
481
493
|
output_token_price_1m=0.0,
|
|
482
494
|
),
|
|
@@ -487,6 +499,7 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
487
499
|
description="",
|
|
488
500
|
capabilities=["tool_use", "thinking", "vision", "stream", "structured_output"],
|
|
489
501
|
default=False,
|
|
502
|
+
max_context_token=400_000,
|
|
490
503
|
input_token_price_1m=0.0,
|
|
491
504
|
output_token_price_1m=0.0,
|
|
492
505
|
endpoint="response",
|
|
@@ -510,6 +523,7 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
510
523
|
description="",
|
|
511
524
|
capabilities=["tool_use", "vision", "stream", "thinking", "structured_output"],
|
|
512
525
|
default=False,
|
|
526
|
+
max_context_token=264_000,
|
|
513
527
|
input_token_price_1m=0.0,
|
|
514
528
|
output_token_price_1m=0.0,
|
|
515
529
|
),
|
|
@@ -554,6 +568,7 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
554
568
|
description="",
|
|
555
569
|
capabilities=["tool_use", "vision", "stream", "thinking", "structured_output"],
|
|
556
570
|
default=False,
|
|
571
|
+
max_context_token=400_000,
|
|
557
572
|
input_token_price_1m=0.0,
|
|
558
573
|
output_token_price_1m=0.0,
|
|
559
574
|
endpoint="response",
|
|
@@ -562,7 +577,7 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
562
577
|
id="oswe-vscode-prime",
|
|
563
578
|
provider="github_copilot",
|
|
564
579
|
name="Raptor mini",
|
|
565
|
-
max_context_token=
|
|
580
|
+
max_context_token=264_000,
|
|
566
581
|
description="",
|
|
567
582
|
capabilities=["tool_use", "vision", "stream", "structured_output"],
|
|
568
583
|
default=True,
|
|
@@ -576,6 +591,7 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
576
591
|
description="",
|
|
577
592
|
capabilities=["tool_use", "vision", "stream", "thinking"],
|
|
578
593
|
default=False,
|
|
594
|
+
max_context_token=216_000,
|
|
579
595
|
input_token_price_1m=0.0,
|
|
580
596
|
output_token_price_1m=0.0,
|
|
581
597
|
),
|
|
@@ -584,8 +600,9 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
584
600
|
provider="github_copilot",
|
|
585
601
|
name="Claude Sonnet 4.5",
|
|
586
602
|
description="",
|
|
587
|
-
capabilities=["tool_use", "vision", "stream"],
|
|
603
|
+
capabilities=["tool_use", "vision", "stream", "thinking"],
|
|
588
604
|
default=False,
|
|
605
|
+
max_context_token=200_000,
|
|
589
606
|
input_token_price_1m=0.0,
|
|
590
607
|
output_token_price_1m=0.0,
|
|
591
608
|
),
|
|
@@ -594,8 +611,9 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
594
611
|
provider="github_copilot",
|
|
595
612
|
name="Claude Opus 4.1 (Preview)",
|
|
596
613
|
description="",
|
|
597
|
-
capabilities=["tool_use", "vision", "stream"],
|
|
614
|
+
capabilities=["tool_use", "vision", "stream", "thinking"],
|
|
598
615
|
default=False,
|
|
616
|
+
max_context_token=80_000,
|
|
599
617
|
input_token_price_1m=0.0,
|
|
600
618
|
output_token_price_1m=0.0,
|
|
601
619
|
),
|
|
@@ -604,8 +622,9 @@ _GITHUB_COPILOT_MODELS = [
|
|
|
604
622
|
provider="github_copilot",
|
|
605
623
|
name="Claude Opus 4.5 (Preview)",
|
|
606
624
|
description="",
|
|
607
|
-
capabilities=["tool_use", "vision", "stream"],
|
|
625
|
+
capabilities=["tool_use", "vision", "stream", "thinking"],
|
|
608
626
|
default=False,
|
|
627
|
+
max_context_token=200_000,
|
|
609
628
|
input_token_price_1m=0.0,
|
|
610
629
|
output_token_price_1m=0.0,
|
|
611
630
|
),
|
|
@@ -286,7 +286,6 @@ class MCPService:
|
|
|
286
286
|
logger.warning(
|
|
287
287
|
f"MCPService: Failed to send ping to {server_id}: {e}"
|
|
288
288
|
)
|
|
289
|
-
self.connected_servers[server_id] = False
|
|
290
289
|
else:
|
|
291
290
|
logger.warning(
|
|
292
291
|
f"MCPService: Session for {server_id} not available, stopping keep-alive"
|
|
@@ -150,3 +150,16 @@ class BaseMemoryService(ABC):
|
|
|
150
150
|
Dict with success status and information about the operation
|
|
151
151
|
"""
|
|
152
152
|
pass
|
|
153
|
+
|
|
154
|
+
@abstractmethod
|
|
155
|
+
def delete_by_conversation_id(self, conversation_id: str) -> Dict[str, Any]:
|
|
156
|
+
"""
|
|
157
|
+
Delete all memories associated with a specific conversation ID.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
conversation_id: The conversation ID (session_id) to delete memories for
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Dict with success status and count of deleted memories
|
|
164
|
+
"""
|
|
165
|
+
pass
|
|
@@ -655,6 +655,56 @@ class ChromaMemoryService(BaseMemoryService):
|
|
|
655
655
|
"count": len(ids),
|
|
656
656
|
}
|
|
657
657
|
|
|
658
|
+
def delete_by_conversation_id(self, conversation_id: str) -> Dict[str, Any]:
|
|
659
|
+
"""
|
|
660
|
+
Delete all memories associated with a specific conversation ID.
|
|
661
|
+
|
|
662
|
+
Args:
|
|
663
|
+
conversation_id: The conversation ID (session_id) to delete memories for
|
|
664
|
+
|
|
665
|
+
Returns:
|
|
666
|
+
Dict with success status and count of deleted memories
|
|
667
|
+
"""
|
|
668
|
+
try:
|
|
669
|
+
collection = self._initialize_collection()
|
|
670
|
+
|
|
671
|
+
results = collection.get(
|
|
672
|
+
where={"session_id": conversation_id},
|
|
673
|
+
include=["metadatas"],
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
if not results["ids"]:
|
|
677
|
+
return {
|
|
678
|
+
"success": True,
|
|
679
|
+
"message": f"No memories found for conversation {conversation_id}",
|
|
680
|
+
"count": 0,
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
ids_to_remove = results["ids"]
|
|
684
|
+
collection.delete(ids=ids_to_remove)
|
|
685
|
+
|
|
686
|
+
if conversation_id in self.current_conversation_context:
|
|
687
|
+
del self.current_conversation_context[conversation_id]
|
|
688
|
+
|
|
689
|
+
logger.info(
|
|
690
|
+
f"Deleted {len(ids_to_remove)} memories for conversation {conversation_id}"
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
return {
|
|
694
|
+
"success": True,
|
|
695
|
+
"message": f"Successfully removed {len(ids_to_remove)} memories for conversation {conversation_id}",
|
|
696
|
+
"count": len(ids_to_remove),
|
|
697
|
+
}
|
|
698
|
+
except Exception as e:
|
|
699
|
+
logger.error(
|
|
700
|
+
f"Error deleting memories for conversation {conversation_id}: {e}"
|
|
701
|
+
)
|
|
702
|
+
return {
|
|
703
|
+
"success": False,
|
|
704
|
+
"message": f"Error deleting memories: {str(e)}",
|
|
705
|
+
"count": 0,
|
|
706
|
+
}
|
|
707
|
+
|
|
658
708
|
def get_queue_status(self) -> Dict[str, Any]:
|
|
659
709
|
"""Get current queue status for monitoring."""
|
|
660
710
|
return {
|