solace-agent-mesh 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of solace-agent-mesh might be problematic. Click here for more details.
- solace_agent_mesh/__init__.py +0 -3
- solace_agent_mesh/agents/__init__.py +0 -0
- solace_agent_mesh/agents/base_agent_component.py +224 -0
- solace_agent_mesh/agents/global/__init__.py +0 -0
- solace_agent_mesh/agents/global/actions/__init__.py +0 -0
- solace_agent_mesh/agents/global/actions/agent_state_change.py +54 -0
- solace_agent_mesh/agents/global/actions/clear_history.py +32 -0
- solace_agent_mesh/agents/global/actions/convert_file_to_markdown.py +160 -0
- solace_agent_mesh/agents/global/actions/create_file.py +70 -0
- solace_agent_mesh/agents/global/actions/error_action.py +45 -0
- solace_agent_mesh/agents/global/actions/plantuml_diagram.py +93 -0
- solace_agent_mesh/agents/global/actions/plotly_graph.py +117 -0
- solace_agent_mesh/agents/global/actions/retrieve_file.py +51 -0
- solace_agent_mesh/agents/global/global_agent_component.py +38 -0
- solace_agent_mesh/agents/image_processing/__init__.py +0 -0
- solace_agent_mesh/agents/image_processing/actions/__init__.py +0 -0
- solace_agent_mesh/agents/image_processing/actions/create_image.py +75 -0
- solace_agent_mesh/agents/image_processing/actions/describe_image.py +115 -0
- solace_agent_mesh/agents/image_processing/image_processing_agent_component.py +23 -0
- solace_agent_mesh/agents/slack/__init__.py +1 -0
- solace_agent_mesh/agents/slack/actions/__init__.py +1 -0
- solace_agent_mesh/agents/slack/actions/post_message.py +177 -0
- solace_agent_mesh/agents/slack/slack_agent_component.py +59 -0
- solace_agent_mesh/agents/web_request/__init__.py +0 -0
- solace_agent_mesh/agents/web_request/actions/__init__.py +0 -0
- solace_agent_mesh/agents/web_request/actions/do_image_search.py +84 -0
- solace_agent_mesh/agents/web_request/actions/do_news_search.py +47 -0
- solace_agent_mesh/agents/web_request/actions/do_suggestion_search.py +34 -0
- solace_agent_mesh/agents/web_request/actions/do_web_request.py +134 -0
- solace_agent_mesh/agents/web_request/actions/download_file.py +69 -0
- solace_agent_mesh/agents/web_request/web_request_agent_component.py +33 -0
- solace_agent_mesh/cli/__init__.py +1 -0
- solace_agent_mesh/cli/commands/__init__.py +0 -0
- solace_agent_mesh/cli/commands/add/__init__.py +3 -0
- solace_agent_mesh/cli/commands/add/add.py +88 -0
- solace_agent_mesh/cli/commands/add/agent.py +110 -0
- solace_agent_mesh/cli/commands/add/copy_from_plugin.py +90 -0
- solace_agent_mesh/cli/commands/add/gateway.py +221 -0
- solace_agent_mesh/cli/commands/build.py +631 -0
- solace_agent_mesh/cli/commands/chat/__init__.py +3 -0
- solace_agent_mesh/cli/commands/chat/chat.py +361 -0
- solace_agent_mesh/cli/commands/config.py +29 -0
- solace_agent_mesh/cli/commands/init/__init__.py +3 -0
- solace_agent_mesh/cli/commands/init/ai_provider_step.py +76 -0
- solace_agent_mesh/cli/commands/init/broker_step.py +102 -0
- solace_agent_mesh/cli/commands/init/builtin_agent_step.py +88 -0
- solace_agent_mesh/cli/commands/init/check_if_already_done.py +13 -0
- solace_agent_mesh/cli/commands/init/create_config_file_step.py +52 -0
- solace_agent_mesh/cli/commands/init/create_other_project_files_step.py +96 -0
- solace_agent_mesh/cli/commands/init/file_service_step.py +73 -0
- solace_agent_mesh/cli/commands/init/init.py +114 -0
- solace_agent_mesh/cli/commands/init/project_structure_step.py +45 -0
- solace_agent_mesh/cli/commands/init/rest_api_step.py +50 -0
- solace_agent_mesh/cli/commands/init/web_ui_step.py +40 -0
- solace_agent_mesh/cli/commands/plugin/__init__.py +3 -0
- solace_agent_mesh/cli/commands/plugin/add.py +98 -0
- solace_agent_mesh/cli/commands/plugin/build.py +217 -0
- solace_agent_mesh/cli/commands/plugin/create.py +117 -0
- solace_agent_mesh/cli/commands/plugin/plugin.py +109 -0
- solace_agent_mesh/cli/commands/plugin/remove.py +71 -0
- solace_agent_mesh/cli/commands/run.py +68 -0
- solace_agent_mesh/cli/commands/visualizer.py +138 -0
- solace_agent_mesh/cli/config.py +81 -0
- solace_agent_mesh/cli/main.py +306 -0
- solace_agent_mesh/cli/utils.py +246 -0
- solace_agent_mesh/common/__init__.py +0 -0
- solace_agent_mesh/common/action.py +91 -0
- solace_agent_mesh/common/action_list.py +37 -0
- solace_agent_mesh/common/action_response.py +327 -0
- solace_agent_mesh/common/constants.py +3 -0
- solace_agent_mesh/common/mysql_database.py +40 -0
- solace_agent_mesh/common/postgres_database.py +79 -0
- solace_agent_mesh/common/prompt_templates.py +30 -0
- solace_agent_mesh/common/prompt_templates_unused_delete.py +161 -0
- solace_agent_mesh/common/stimulus_utils.py +152 -0
- solace_agent_mesh/common/time.py +24 -0
- solace_agent_mesh/common/utils.py +638 -0
- solace_agent_mesh/configs/agent_global.yaml +74 -0
- solace_agent_mesh/configs/agent_image_processing.yaml +82 -0
- solace_agent_mesh/configs/agent_slack.yaml +64 -0
- solace_agent_mesh/configs/agent_web_request.yaml +75 -0
- solace_agent_mesh/configs/conversation_to_file.yaml +56 -0
- solace_agent_mesh/configs/error_catcher.yaml +56 -0
- solace_agent_mesh/configs/monitor.yaml +0 -0
- solace_agent_mesh/configs/monitor_stim_and_errors_to_slack.yaml +106 -0
- solace_agent_mesh/configs/monitor_user_feedback.yaml +58 -0
- solace_agent_mesh/configs/orchestrator.yaml +241 -0
- solace_agent_mesh/configs/service_embedding.yaml +81 -0
- solace_agent_mesh/configs/service_llm.yaml +265 -0
- solace_agent_mesh/configs/visualize_websocket.yaml +55 -0
- solace_agent_mesh/gateway/__init__.py +0 -0
- solace_agent_mesh/gateway/components/__init__.py +0 -0
- solace_agent_mesh/gateway/components/gateway_base.py +41 -0
- solace_agent_mesh/gateway/components/gateway_input.py +265 -0
- solace_agent_mesh/gateway/components/gateway_output.py +289 -0
- solace_agent_mesh/gateway/identity/bamboohr_identity.py +18 -0
- solace_agent_mesh/gateway/identity/identity_base.py +10 -0
- solace_agent_mesh/gateway/identity/identity_provider.py +60 -0
- solace_agent_mesh/gateway/identity/no_identity.py +9 -0
- solace_agent_mesh/gateway/identity/passthru_identity.py +9 -0
- solace_agent_mesh/monitors/base_monitor_component.py +26 -0
- solace_agent_mesh/monitors/feedback/user_feedback_monitor.py +75 -0
- solace_agent_mesh/monitors/stim_and_errors/stim_and_error_monitor.py +560 -0
- solace_agent_mesh/orchestrator/__init__.py +0 -0
- solace_agent_mesh/orchestrator/action_manager.py +225 -0
- solace_agent_mesh/orchestrator/components/__init__.py +0 -0
- solace_agent_mesh/orchestrator/components/orchestrator_action_manager_timeout_component.py +54 -0
- solace_agent_mesh/orchestrator/components/orchestrator_action_response_component.py +179 -0
- solace_agent_mesh/orchestrator/components/orchestrator_register_component.py +107 -0
- solace_agent_mesh/orchestrator/components/orchestrator_stimulus_processor_component.py +477 -0
- solace_agent_mesh/orchestrator/components/orchestrator_streaming_output_component.py +246 -0
- solace_agent_mesh/orchestrator/orchestrator_main.py +166 -0
- solace_agent_mesh/orchestrator/orchestrator_prompt.py +410 -0
- solace_agent_mesh/services/__init__.py +0 -0
- solace_agent_mesh/services/authorization/providers/base_authorization_provider.py +56 -0
- solace_agent_mesh/services/bamboo_hr_service/__init__.py +3 -0
- solace_agent_mesh/services/bamboo_hr_service/bamboo_hr.py +182 -0
- solace_agent_mesh/services/common/__init__.py +4 -0
- solace_agent_mesh/services/common/auto_expiry.py +45 -0
- solace_agent_mesh/services/common/singleton.py +18 -0
- solace_agent_mesh/services/file_service/__init__.py +14 -0
- solace_agent_mesh/services/file_service/file_manager/__init__.py +0 -0
- solace_agent_mesh/services/file_service/file_manager/bucket_file_manager.py +149 -0
- solace_agent_mesh/services/file_service/file_manager/file_manager_base.py +162 -0
- solace_agent_mesh/services/file_service/file_manager/memory_file_manager.py +64 -0
- solace_agent_mesh/services/file_service/file_manager/volume_file_manager.py +106 -0
- solace_agent_mesh/services/file_service/file_service.py +432 -0
- solace_agent_mesh/services/file_service/file_service_constants.py +54 -0
- solace_agent_mesh/services/file_service/file_transformations.py +131 -0
- solace_agent_mesh/services/file_service/file_utils.py +322 -0
- solace_agent_mesh/services/file_service/transformers/__init__.py +5 -0
- solace_agent_mesh/services/history_service/__init__.py +3 -0
- solace_agent_mesh/services/history_service/history_providers/__init__.py +0 -0
- solace_agent_mesh/services/history_service/history_providers/base_history_provider.py +78 -0
- solace_agent_mesh/services/history_service/history_providers/memory_history_provider.py +167 -0
- solace_agent_mesh/services/history_service/history_providers/redis_history_provider.py +163 -0
- solace_agent_mesh/services/history_service/history_service.py +139 -0
- solace_agent_mesh/services/llm_service/components/llm_request_component.py +293 -0
- solace_agent_mesh/services/llm_service/components/llm_service_component_base.py +152 -0
- solace_agent_mesh/services/middleware_service/__init__.py +0 -0
- solace_agent_mesh/services/middleware_service/middleware_service.py +20 -0
- solace_agent_mesh/templates/action.py +38 -0
- solace_agent_mesh/templates/agent.py +29 -0
- solace_agent_mesh/templates/agent.yaml +70 -0
- solace_agent_mesh/templates/gateway-config-template.yaml +6 -0
- solace_agent_mesh/templates/gateway-default-config.yaml +28 -0
- solace_agent_mesh/templates/gateway-flows.yaml +81 -0
- solace_agent_mesh/templates/gateway-header.yaml +16 -0
- solace_agent_mesh/templates/gateway_base.py +15 -0
- solace_agent_mesh/templates/gateway_input.py +98 -0
- solace_agent_mesh/templates/gateway_output.py +71 -0
- solace_agent_mesh/templates/plugin-pyproject.toml +30 -0
- solace_agent_mesh/templates/rest-api-default-config.yaml +23 -0
- solace_agent_mesh/templates/rest-api-flows.yaml +80 -0
- solace_agent_mesh/templates/slack-default-config.yaml +9 -0
- solace_agent_mesh/templates/slack-flows.yaml +90 -0
- solace_agent_mesh/templates/solace-agent-mesh-default.yaml +77 -0
- solace_agent_mesh/templates/solace-agent-mesh-plugin-default.yaml +8 -0
- solace_agent_mesh/templates/web-default-config.yaml +5 -0
- solace_agent_mesh/templates/web-flows.yaml +86 -0
- solace_agent_mesh/tools/__init__.py +0 -0
- solace_agent_mesh/tools/components/__init__.py +0 -0
- solace_agent_mesh/tools/components/conversation_formatter.py +111 -0
- solace_agent_mesh/tools/components/file_resolver_component.py +58 -0
- solace_agent_mesh/tools/config/runtime_config.py +26 -0
- solace_agent_mesh-0.1.0.dist-info/METADATA +179 -0
- solace_agent_mesh-0.1.0.dist-info/RECORD +170 -0
- solace_agent_mesh-0.1.0.dist-info/entry_points.txt +3 -0
- solace_agent_mesh-0.0.1.dist-info/licenses/LICENSE.txt → solace_agent_mesh-0.1.0.dist-info/licenses/LICENSE +1 -2
- solace_agent_mesh-0.0.1.dist-info/METADATA +0 -51
- solace_agent_mesh-0.0.1.dist-info/RECORD +0 -5
- {solace_agent_mesh-0.0.1.dist-info → solace_agent_mesh-0.1.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
from .base_history_provider import BaseHistoryProvider
|
|
4
|
+
|
|
5
|
+
class RedisHistoryProvider(BaseHistoryProvider):
|
|
6
|
+
def __init__(self, config=None):
|
|
7
|
+
super().__init__(config)
|
|
8
|
+
try:
|
|
9
|
+
import redis
|
|
10
|
+
except ImportError:
|
|
11
|
+
raise ImportError("Please install the redis package to use the RedisHistoryProvider.\n\t$ pip install redis")
|
|
12
|
+
|
|
13
|
+
self.redis_client = redis.Redis(
|
|
14
|
+
host=self.config.get("redis_host", "localhost"),
|
|
15
|
+
port=self.config.get("redis_port", 6379),
|
|
16
|
+
db=self.config.get("redis_db", 0),
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
def _get_history_key(self, session_id: str):
|
|
20
|
+
return f"session:{session_id}:history"
|
|
21
|
+
|
|
22
|
+
def _get_files_key(self, session_id: str):
|
|
23
|
+
return f"session:{session_id}:files"
|
|
24
|
+
|
|
25
|
+
def store_history(self, session_id: str, role: str, content: str | dict):
|
|
26
|
+
key = self._get_history_key(session_id)
|
|
27
|
+
entry = {"role": role, "content": content}
|
|
28
|
+
entry_json = json.dumps(entry)
|
|
29
|
+
|
|
30
|
+
# Check if session exists, if not initialize it
|
|
31
|
+
if not self.redis_client.exists(key):
|
|
32
|
+
self.redis_client.hset(session_id, mapping={
|
|
33
|
+
"num_characters": 0,
|
|
34
|
+
"num_turns": 0,
|
|
35
|
+
"last_active_time": time.time()
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
# Get current stats
|
|
39
|
+
session_meta = self.redis_client.hgetall(session_id)
|
|
40
|
+
num_characters = int(session_meta.get(b"num_characters", 0))
|
|
41
|
+
num_turns = int(session_meta.get(b"num_turns", 0))
|
|
42
|
+
|
|
43
|
+
# Add the new entry
|
|
44
|
+
if self.enforce_alternate_message_roles and num_turns > 0:
|
|
45
|
+
last_entry = json.loads(self.redis_client.lindex(key, -1))
|
|
46
|
+
if last_entry["role"] == role:
|
|
47
|
+
last_entry["content"] += content
|
|
48
|
+
self.redis_client.lset(key, -1, json.dumps(last_entry))
|
|
49
|
+
else:
|
|
50
|
+
self.redis_client.rpush(key, entry_json)
|
|
51
|
+
num_turns += 1
|
|
52
|
+
else:
|
|
53
|
+
self.redis_client.rpush(key, entry_json)
|
|
54
|
+
num_turns += 1
|
|
55
|
+
num_characters += len(str(content))
|
|
56
|
+
|
|
57
|
+
# Enforce max_turns by trimming the oldest entry if needed
|
|
58
|
+
if self.max_turns and num_turns > self.max_turns:
|
|
59
|
+
oldest_entry = json.loads(self.redis_client.lpop(key))
|
|
60
|
+
num_characters -= len(str(oldest_entry["content"]))
|
|
61
|
+
num_turns -= 1
|
|
62
|
+
|
|
63
|
+
# Enforce max_characters
|
|
64
|
+
if self.max_characters:
|
|
65
|
+
while num_characters > self.max_characters and num_turns > 0:
|
|
66
|
+
oldest_entry = json.loads(self.redis_client.lpop(key))
|
|
67
|
+
num_characters -= len(str(oldest_entry["content"]))
|
|
68
|
+
num_turns -= 1
|
|
69
|
+
|
|
70
|
+
# Update metadata and set expiration
|
|
71
|
+
self.redis_client.hset(session_id, mapping={
|
|
72
|
+
"num_characters": num_characters,
|
|
73
|
+
"num_turns": num_turns,
|
|
74
|
+
"last_active_time": time.time()
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
def get_history(self, session_id: str):
|
|
78
|
+
key = self._get_history_key(session_id)
|
|
79
|
+
history = self.redis_client.lrange(key, 0, -1)
|
|
80
|
+
|
|
81
|
+
# Decode JSON entries and return a list of dictionaries
|
|
82
|
+
return [json.loads(entry) for entry in history]
|
|
83
|
+
|
|
84
|
+
def store_file(self, session_id: str, file: dict):
|
|
85
|
+
key = self._get_files_key(session_id)
|
|
86
|
+
file_entry = json.dumps(file)
|
|
87
|
+
|
|
88
|
+
# Avoid duplicate files by checking existing URLs
|
|
89
|
+
existing_files = self.get_files(session_id)
|
|
90
|
+
if any(f.get("url") == file.get("url") for f in existing_files):
|
|
91
|
+
return
|
|
92
|
+
|
|
93
|
+
# Add the file and update metadata
|
|
94
|
+
self.redis_client.rpush(key, file_entry)
|
|
95
|
+
self.redis_client.hset(session_id, "last_active_time", time.time())
|
|
96
|
+
|
|
97
|
+
def get_files(self, session_id: str):
|
|
98
|
+
key = self._get_files_key(session_id)
|
|
99
|
+
current_time = time.time()
|
|
100
|
+
files = self.redis_client.lrange(key, 0, -1)
|
|
101
|
+
|
|
102
|
+
valid_files = []
|
|
103
|
+
for file_json in files:
|
|
104
|
+
file = json.loads(file_json)
|
|
105
|
+
expiration_timestamp = file.get("expiration_timestamp")
|
|
106
|
+
|
|
107
|
+
# Remove expired files
|
|
108
|
+
if expiration_timestamp and current_time > expiration_timestamp:
|
|
109
|
+
self.redis_client.lrem(key, 0, file_json)
|
|
110
|
+
else:
|
|
111
|
+
valid_files.append(file)
|
|
112
|
+
|
|
113
|
+
return valid_files
|
|
114
|
+
|
|
115
|
+
def clear_history(self, session_id: str, keep_levels=0):
|
|
116
|
+
history_key = self._get_history_key(session_id)
|
|
117
|
+
files_key = self._get_files_key(session_id)
|
|
118
|
+
|
|
119
|
+
if keep_levels > 0:
|
|
120
|
+
# Keep the latest `keep_levels` entries
|
|
121
|
+
self.redis_client.ltrim(history_key, -keep_levels, -1)
|
|
122
|
+
|
|
123
|
+
# Recalculate session metadata
|
|
124
|
+
remaining_entries = self.redis_client.lrange(history_key, 0, -1)
|
|
125
|
+
num_characters = sum(len(str(json.loads(entry)["content"])) for entry in remaining_entries)
|
|
126
|
+
num_turns = len(remaining_entries)
|
|
127
|
+
|
|
128
|
+
# Update metadata
|
|
129
|
+
self.redis_client.hset(session_id, mapping={
|
|
130
|
+
"num_characters": num_characters,
|
|
131
|
+
"num_turns": num_turns
|
|
132
|
+
})
|
|
133
|
+
else:
|
|
134
|
+
# Clear all history and files
|
|
135
|
+
self.redis_client.delete(history_key, files_key, session_id)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def get_session_meta(self, session_id: str):
|
|
139
|
+
"""
|
|
140
|
+
Retrieve the session metadata.
|
|
141
|
+
|
|
142
|
+
:param session_id: The session identifier.
|
|
143
|
+
:return: The session metadata.
|
|
144
|
+
"""
|
|
145
|
+
# Check if session exists
|
|
146
|
+
if not self.redis_client.exists(session_id):
|
|
147
|
+
return None
|
|
148
|
+
# Get current stats
|
|
149
|
+
session_meta = self.redis_client.hgetall(session_id)
|
|
150
|
+
num_characters = int(session_meta.get(b"num_characters", 0))
|
|
151
|
+
num_turns = int(session_meta.get(b"num_turns", 0))
|
|
152
|
+
last_active_time = float(session_meta.get(b"last_active_time", 0))
|
|
153
|
+
return {
|
|
154
|
+
"num_characters": num_characters,
|
|
155
|
+
"num_turns": num_turns,
|
|
156
|
+
"last_active_time": last_active_time,
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def get_all_sessions(self)-> list[str]:
|
|
161
|
+
# List all sessions based on Redis keys
|
|
162
|
+
session_keys = self.redis_client.scan_iter("session:*:history")
|
|
163
|
+
return [key.decode().split(":")[1] for key in session_keys]
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import importlib
|
|
3
|
+
from typing import Union
|
|
4
|
+
|
|
5
|
+
from solace_ai_connector.common.log import log
|
|
6
|
+
|
|
7
|
+
from ...common.time import ONE_HOUR, FIVE_MINUTES
|
|
8
|
+
from ..common import AutoExpiry, AutoExpirySingletonMeta
|
|
9
|
+
from .history_providers.memory_history_provider import MemoryHistoryProvider
|
|
10
|
+
from .history_providers.redis_history_provider import RedisHistoryProvider
|
|
11
|
+
from .history_providers.base_history_provider import BaseHistoryProvider
|
|
12
|
+
|
|
13
|
+
HISTORY_PROVIDERS = {
|
|
14
|
+
"redis": RedisHistoryProvider,
|
|
15
|
+
"memory": MemoryHistoryProvider,
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
DEFAULT_PROVIDER = "memory"
|
|
20
|
+
|
|
21
|
+
DEFAULT_MAX_TURNS = 40
|
|
22
|
+
DEFAULT_MAX_CHARACTERS = 50_000
|
|
23
|
+
|
|
24
|
+
DEFAULT_HISTORY_POLICY = {
|
|
25
|
+
"max_turns": DEFAULT_MAX_TURNS,
|
|
26
|
+
"max_characters": DEFAULT_MAX_CHARACTERS,
|
|
27
|
+
"enforce_alternate_message_roles": False,
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# HistoryService class - Manages history storage and retrieval
|
|
32
|
+
class HistoryService(AutoExpiry, metaclass=AutoExpirySingletonMeta):
|
|
33
|
+
history_provider: BaseHistoryProvider
|
|
34
|
+
|
|
35
|
+
def __init__(self, config={}, identifier=None):
|
|
36
|
+
self.identifier = identifier
|
|
37
|
+
self.config = config
|
|
38
|
+
self.provider_type = self.config.get("type", DEFAULT_PROVIDER)
|
|
39
|
+
self.time_to_live = self.config.get("time_to_live", ONE_HOUR)
|
|
40
|
+
self.expiration_check_interval = self.config.get(
|
|
41
|
+
"expiration_check_interval", FIVE_MINUTES
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
if self.provider_type not in HISTORY_PROVIDERS and not self.config.get(
|
|
45
|
+
"module_path"
|
|
46
|
+
):
|
|
47
|
+
raise ValueError(
|
|
48
|
+
f"Unsupported history provider type: {self.provider_type}. No module_path provided."
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
history_policy = {
|
|
52
|
+
**DEFAULT_HISTORY_POLICY,
|
|
53
|
+
**self.config.get("history_policy", {}),
|
|
54
|
+
}
|
|
55
|
+
if self.provider_type in HISTORY_PROVIDERS:
|
|
56
|
+
# Load built-in history provider
|
|
57
|
+
self.history_provider = HISTORY_PROVIDERS[self.provider_type](
|
|
58
|
+
history_policy
|
|
59
|
+
)
|
|
60
|
+
else:
|
|
61
|
+
try:
|
|
62
|
+
# Load the provider from the module path
|
|
63
|
+
module_name = self.provider_type
|
|
64
|
+
module_path = self.config.get("module_path")
|
|
65
|
+
module = importlib.import_module(module_path, package=__package__)
|
|
66
|
+
history_class = getattr(module, module_name)
|
|
67
|
+
if not issubclass(history_class, BaseHistoryProvider):
|
|
68
|
+
raise ValueError(
|
|
69
|
+
f"History provider class {history_class} does not inherit from BaseHistoryProvider"
|
|
70
|
+
)
|
|
71
|
+
self.history_provider = history_class(history_policy)
|
|
72
|
+
except Exception as e:
|
|
73
|
+
raise ImportError("Unable to load component: " + str(e)) from e
|
|
74
|
+
|
|
75
|
+
# Start the background thread for auto-expiry
|
|
76
|
+
self._start_auto_expiry_thread(self.expiration_check_interval)
|
|
77
|
+
|
|
78
|
+
def _delete_expired_items(self):
|
|
79
|
+
"""Checks all history entries and deletes those that have exceeded max_time_to_live."""
|
|
80
|
+
current_time = time.time()
|
|
81
|
+
sessions = self.history_provider.get_all_sessions()
|
|
82
|
+
for session_id in sessions:
|
|
83
|
+
session = self.history_provider.get_session_meta(session_id)
|
|
84
|
+
if not session:
|
|
85
|
+
continue
|
|
86
|
+
elapsed_time = current_time - session["last_active_time"]
|
|
87
|
+
if elapsed_time > self.time_to_live:
|
|
88
|
+
self.history_provider.clear_history(session_id)
|
|
89
|
+
log.debug(f"History for session {session_id} has expired")
|
|
90
|
+
|
|
91
|
+
def store_history(self, session_id: str, role: str, content: Union[str, dict]):
|
|
92
|
+
"""
|
|
93
|
+
Store a new entry in the history.
|
|
94
|
+
|
|
95
|
+
:param session_id: The session identifier.
|
|
96
|
+
:param role: The role of the entry to be stored in the history.
|
|
97
|
+
:param content: The content of the entry to be stored in the history.
|
|
98
|
+
"""
|
|
99
|
+
if not content:
|
|
100
|
+
return
|
|
101
|
+
return self.history_provider.store_history(session_id, role, content)
|
|
102
|
+
|
|
103
|
+
def get_history(self, session_id:str) -> list:
|
|
104
|
+
"""
|
|
105
|
+
Retrieve the entire history.
|
|
106
|
+
|
|
107
|
+
:param session_id: The session identifier.
|
|
108
|
+
:return: The complete history.
|
|
109
|
+
"""
|
|
110
|
+
return self.history_provider.get_history(session_id)
|
|
111
|
+
|
|
112
|
+
def store_file(self, session_id:str, file:dict):
|
|
113
|
+
"""
|
|
114
|
+
Store a file in the history.
|
|
115
|
+
|
|
116
|
+
:param session_id: The session identifier.
|
|
117
|
+
:param file: The file to be stored in the history.
|
|
118
|
+
"""
|
|
119
|
+
if not file:
|
|
120
|
+
return
|
|
121
|
+
return self.history_provider.store_file(session_id, file)
|
|
122
|
+
|
|
123
|
+
def get_files(self, session_id:str) -> list:
|
|
124
|
+
"""
|
|
125
|
+
Retrieve the files for a session.
|
|
126
|
+
|
|
127
|
+
:param session_id: The session identifier.
|
|
128
|
+
:return: The files for the session.
|
|
129
|
+
"""
|
|
130
|
+
return self.history_provider.get_files(session_id)
|
|
131
|
+
|
|
132
|
+
def clear_history(self, session_id:str, keep_levels=0):
|
|
133
|
+
"""
|
|
134
|
+
Clear the history and files, optionally keeping a specified number of recent entries.
|
|
135
|
+
|
|
136
|
+
:param session_id: The session identifier.
|
|
137
|
+
:param keep_levels: Number of most recent history entries to keep. Default is 0 (clear all).
|
|
138
|
+
"""
|
|
139
|
+
return self.history_provider.clear_history(session_id, keep_levels)
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
"""LLM Request Component for performing LLM service requests."""
|
|
2
|
+
|
|
3
|
+
import uuid
|
|
4
|
+
from typing import Dict, Any
|
|
5
|
+
|
|
6
|
+
from solace_ai_connector.components.component_base import ComponentBase
|
|
7
|
+
from solace_ai_connector.common.log import log
|
|
8
|
+
from solace_ai_connector.common.message import Message
|
|
9
|
+
from solace_ai_connector.common.utils import ensure_slash_on_end
|
|
10
|
+
|
|
11
|
+
info = {
|
|
12
|
+
"class_name": "LLMRequestComponent",
|
|
13
|
+
"description": "Component that performs LLM service requests",
|
|
14
|
+
"config_parameters": [
|
|
15
|
+
{
|
|
16
|
+
"name": "llm_service_topic",
|
|
17
|
+
"required": True,
|
|
18
|
+
"description": "The topic for the LLM service",
|
|
19
|
+
},
|
|
20
|
+
{
|
|
21
|
+
"name": "stream_to_flow",
|
|
22
|
+
"required": False,
|
|
23
|
+
"description": (
|
|
24
|
+
"Name the flow to stream the output to - this must be configured for "
|
|
25
|
+
"llm_mode='stream'. This is mutually exclusive with stream_to_next_component."
|
|
26
|
+
),
|
|
27
|
+
"default": "",
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
"name": "stream_to_next_component",
|
|
31
|
+
"required": False,
|
|
32
|
+
"description": (
|
|
33
|
+
"Whether to stream the output to the next component in the flow. "
|
|
34
|
+
"This is mutually exclusive with stream_to_flow."
|
|
35
|
+
),
|
|
36
|
+
"default": False,
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
"name": "llm_mode",
|
|
40
|
+
"required": False,
|
|
41
|
+
"description": (
|
|
42
|
+
"The mode for streaming results: 'sync' or 'stream'. 'stream' "
|
|
43
|
+
"will just stream the results to the named flow. 'none' will "
|
|
44
|
+
"wait for the full response."
|
|
45
|
+
),
|
|
46
|
+
"default": "none",
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
"name": "stream_batch_size",
|
|
50
|
+
"required": False,
|
|
51
|
+
"description": "The minimum number of words in a single streaming result.",
|
|
52
|
+
"default": 15,
|
|
53
|
+
},
|
|
54
|
+
],
|
|
55
|
+
"input_schema": {
|
|
56
|
+
"type": "object",
|
|
57
|
+
"properties": {
|
|
58
|
+
"messages": {
|
|
59
|
+
"type": "array",
|
|
60
|
+
"items": {
|
|
61
|
+
"type": "object",
|
|
62
|
+
"properties": {
|
|
63
|
+
"role": {
|
|
64
|
+
"type": "string",
|
|
65
|
+
"enum": ["system", "user", "assistant"],
|
|
66
|
+
},
|
|
67
|
+
"content": {"type": "string"},
|
|
68
|
+
},
|
|
69
|
+
"required": ["role", "content"],
|
|
70
|
+
},
|
|
71
|
+
},
|
|
72
|
+
"source_info": {
|
|
73
|
+
"type": "object",
|
|
74
|
+
"properties": {
|
|
75
|
+
"type": {"type": "string"},
|
|
76
|
+
},
|
|
77
|
+
"required": ["type"],
|
|
78
|
+
},
|
|
79
|
+
},
|
|
80
|
+
"required": ["messages"],
|
|
81
|
+
},
|
|
82
|
+
"output_schema": {
|
|
83
|
+
"type": "object",
|
|
84
|
+
"properties": {
|
|
85
|
+
"content": {
|
|
86
|
+
"type": "string",
|
|
87
|
+
"description": "The generated response from the model",
|
|
88
|
+
},
|
|
89
|
+
"chunk": {
|
|
90
|
+
"type": "string",
|
|
91
|
+
"description": "The current chunk of the response",
|
|
92
|
+
},
|
|
93
|
+
"response_uuid": {
|
|
94
|
+
"type": "string",
|
|
95
|
+
"description": "The UUID of the response",
|
|
96
|
+
},
|
|
97
|
+
"first_chunk": {
|
|
98
|
+
"type": "boolean",
|
|
99
|
+
"description": "Whether this is the first chunk of the response",
|
|
100
|
+
},
|
|
101
|
+
"last_chunk": {
|
|
102
|
+
"type": "boolean",
|
|
103
|
+
"description": "Whether this is the last chunk of the response",
|
|
104
|
+
},
|
|
105
|
+
"streaming": {
|
|
106
|
+
"type": "boolean",
|
|
107
|
+
"description": "Whether this is a streaming response",
|
|
108
|
+
},
|
|
109
|
+
},
|
|
110
|
+
"required": ["content"],
|
|
111
|
+
},
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class LLMRequestComponent(ComponentBase):
|
|
116
|
+
"""Component that performs LLM service requests."""
|
|
117
|
+
|
|
118
|
+
def __init__(self, child_info=None, **kwargs):
|
|
119
|
+
super().__init__(child_info or info, **kwargs)
|
|
120
|
+
self.init()
|
|
121
|
+
|
|
122
|
+
def init(self):
|
|
123
|
+
"""Initialize the component with configuration parameters."""
|
|
124
|
+
self.llm_service_topic = ensure_slash_on_end(
|
|
125
|
+
self.get_config("llm_service_topic")
|
|
126
|
+
)
|
|
127
|
+
self.stream_to_flow = self.get_config("stream_to_flow")
|
|
128
|
+
self.stream_to_next_component = self.get_config("stream_to_next_component")
|
|
129
|
+
self.llm_mode = self.get_config("llm_mode")
|
|
130
|
+
self.stream_batch_size = self.get_config("stream_batch_size")
|
|
131
|
+
|
|
132
|
+
if self.stream_to_flow and self.stream_to_next_component:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
"stream_to_flow and stream_to_next_component are mutually exclusive"
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
if not self.is_broker_request_response_enabled():
|
|
138
|
+
raise ValueError(
|
|
139
|
+
"LLM service topic is set, but the component does not "
|
|
140
|
+
f"have its broker request/response enabled, {self.__class__.__name__}"
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
def invoke(self, message: Message, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
144
|
+
"""
|
|
145
|
+
Invoke the LLM service request.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
message (Message): The input message.
|
|
149
|
+
data (Dict[str, Any]): The input data containing the messages.
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
Dict[str, Any]: The response from the LLM service.
|
|
153
|
+
"""
|
|
154
|
+
messages = data.get("messages", [])
|
|
155
|
+
source_info = data.get("source_info", {})
|
|
156
|
+
llm_message = self._create_llm_message(message, messages, source_info)
|
|
157
|
+
response_uuid = str(uuid.uuid4())
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
if self.llm_mode == "stream":
|
|
161
|
+
return self._handle_streaming(message, llm_message, response_uuid)
|
|
162
|
+
else:
|
|
163
|
+
return self._handle_sync(llm_message)
|
|
164
|
+
except Exception as e:
|
|
165
|
+
log.error("Error invoking LLM service: %s", e, exc_info=True)
|
|
166
|
+
raise
|
|
167
|
+
|
|
168
|
+
def _handle_sync(self, llm_message: Message) -> Dict[str, Any]:
|
|
169
|
+
"""
|
|
170
|
+
Handle synchronous LLM service request.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
llm_message (Message): The message to send to the LLM service.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Dict[str, Any]: The response from the LLM service.
|
|
177
|
+
"""
|
|
178
|
+
response = self.do_broker_request_response(llm_message)
|
|
179
|
+
return response.get_payload()
|
|
180
|
+
|
|
181
|
+
def _handle_streaming(
|
|
182
|
+
self, input_message: Message, llm_message: Message, response_uuid: str
|
|
183
|
+
) -> Dict[str, Any]:
|
|
184
|
+
"""
|
|
185
|
+
Handle streaming LLM service request.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
input_message (Message): The original input message.
|
|
189
|
+
llm_message (Message): The message to send to the LLM service.
|
|
190
|
+
response_uuid (str): The UUID for the response.
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
Dict[str, Any]: The final response from the LLM service.
|
|
194
|
+
"""
|
|
195
|
+
aggregate_result = ""
|
|
196
|
+
current_batch = ""
|
|
197
|
+
first_chunk = True
|
|
198
|
+
|
|
199
|
+
for response_message, last_message in self.do_broker_request_response(
|
|
200
|
+
llm_message,
|
|
201
|
+
stream=True,
|
|
202
|
+
streaming_complete_expression="input.payload:last_chunk",
|
|
203
|
+
):
|
|
204
|
+
payload = response_message.get_payload()
|
|
205
|
+
content = payload.get("chunk", "")
|
|
206
|
+
aggregate_result += content
|
|
207
|
+
current_batch += content
|
|
208
|
+
|
|
209
|
+
if len(current_batch.split()) >= self.stream_batch_size or last_message:
|
|
210
|
+
self._send_streaming_chunk(
|
|
211
|
+
input_message,
|
|
212
|
+
current_batch,
|
|
213
|
+
aggregate_result,
|
|
214
|
+
response_uuid,
|
|
215
|
+
first_chunk,
|
|
216
|
+
last_message,
|
|
217
|
+
)
|
|
218
|
+
current_batch = ""
|
|
219
|
+
first_chunk = False
|
|
220
|
+
|
|
221
|
+
if last_message:
|
|
222
|
+
break
|
|
223
|
+
|
|
224
|
+
return {
|
|
225
|
+
"content": aggregate_result,
|
|
226
|
+
"response_uuid": response_uuid,
|
|
227
|
+
"streaming": True,
|
|
228
|
+
"last_chunk": True,
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
def _create_llm_message(self, message: Message, messages: list, source_info: dict) -> Message:
|
|
232
|
+
"""
|
|
233
|
+
Create a message for the LLM service request.
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
message (Message): The original input message.
|
|
237
|
+
messages (list): The list of messages to send to the LLM service.
|
|
238
|
+
source_info (dict): Information about the caller to help montoring LLM requests.
|
|
239
|
+
|
|
240
|
+
Returns:
|
|
241
|
+
Message: The created message for the LLM service.
|
|
242
|
+
"""
|
|
243
|
+
user_properties = message.get_user_properties().copy()
|
|
244
|
+
stimulus_uuid = user_properties.get("stimulus_uuid", str(uuid.uuid4()))
|
|
245
|
+
session_id = user_properties.get("session_id", "x")
|
|
246
|
+
originator_id = user_properties.get("originator_id", "x")
|
|
247
|
+
user_properties["llm_request_source_info"] = source_info
|
|
248
|
+
|
|
249
|
+
topic = f"{self.llm_service_topic}{stimulus_uuid}/{session_id}/{originator_id}"
|
|
250
|
+
|
|
251
|
+
return Message(
|
|
252
|
+
topic=topic,
|
|
253
|
+
payload={"messages": messages, "stream": True},
|
|
254
|
+
user_properties=user_properties,
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
def _send_streaming_chunk(
|
|
258
|
+
self,
|
|
259
|
+
input_message: Message,
|
|
260
|
+
chunk: str,
|
|
261
|
+
aggregate_result: str,
|
|
262
|
+
response_uuid: str,
|
|
263
|
+
first_chunk: bool,
|
|
264
|
+
last_chunk: bool,
|
|
265
|
+
):
|
|
266
|
+
"""
|
|
267
|
+
Send a streaming chunk to the specified flow or next component.
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
input_message (Message): The original input message.
|
|
271
|
+
chunk (str): The current chunk of the response.
|
|
272
|
+
aggregate_result (str): The aggregated result so far.
|
|
273
|
+
response_uuid (str): The UUID of the response.
|
|
274
|
+
first_chunk (bool): Whether this is the first chunk.
|
|
275
|
+
last_chunk (bool): Whether this is the last chunk.
|
|
276
|
+
"""
|
|
277
|
+
payload = {
|
|
278
|
+
"chunk": chunk,
|
|
279
|
+
"content": aggregate_result,
|
|
280
|
+
"response_uuid": response_uuid,
|
|
281
|
+
"first_chunk": first_chunk,
|
|
282
|
+
"last_chunk": last_chunk,
|
|
283
|
+
"streaming": True,
|
|
284
|
+
}
|
|
285
|
+
message = Message(
|
|
286
|
+
payload=payload,
|
|
287
|
+
user_properties=input_message.get_user_properties(),
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
if self.stream_to_flow:
|
|
291
|
+
self.send_to_flow(self.stream_to_flow, message)
|
|
292
|
+
elif self.stream_to_next_component:
|
|
293
|
+
self.send_message(message)
|