solana-agent 20.1.5__tar.gz → 21.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {solana_agent-20.1.5 → solana_agent-21.1.0}/PKG-INFO +9 -41
  2. {solana_agent-20.1.5 → solana_agent-21.1.0}/README.md +5 -37
  3. {solana_agent-20.1.5 → solana_agent-21.1.0}/pyproject.toml +5 -5
  4. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/adapters/llm_adapter.py +7 -1
  5. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/client/solana_agent.py +7 -5
  6. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/client/client.py +5 -2
  7. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/providers/llm.py +1 -0
  8. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/services/agent.py +1 -0
  9. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/services/query.py +1 -0
  10. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/services/agent.py +3 -0
  11. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/services/query.py +4 -0
  12. {solana_agent-20.1.5 → solana_agent-21.1.0}/LICENSE +0 -0
  13. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/__init__.py +0 -0
  14. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/adapters/__init__.py +0 -0
  15. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/adapters/mongodb_adapter.py +0 -0
  16. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/client/__init__.py +0 -0
  17. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/domains/__init__.py +0 -0
  18. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/domains/agent.py +0 -0
  19. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/domains/routing.py +0 -0
  20. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/factories/__init__.py +0 -0
  21. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/factories/agent_factory.py +0 -0
  22. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/__init__.py +0 -0
  23. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/plugins/plugins.py +0 -0
  24. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/providers/data_storage.py +0 -0
  25. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/providers/memory.py +0 -0
  26. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/interfaces/services/routing.py +0 -0
  27. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/plugins/__init__.py +0 -0
  28. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/plugins/manager.py +0 -0
  29. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/plugins/registry.py +0 -0
  30. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/plugins/tools/__init__.py +0 -0
  31. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/plugins/tools/auto_tool.py +0 -0
  32. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/repositories/__init__.py +0 -0
  33. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/repositories/memory.py +0 -0
  34. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/services/__init__.py +0 -0
  35. {solana_agent-20.1.5 → solana_agent-21.1.0}/solana_agent/services/routing.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: solana-agent
3
- Version: 20.1.5
3
+ Version: 21.1.0
4
4
  Summary: Agentic IQ
5
5
  License: MIT
6
6
  Keywords: ai,openai,ai agents,agi
@@ -14,10 +14,10 @@ Classifier: Programming Language :: Python :: 3
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Programming Language :: Python :: 3.13
16
16
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
17
- Requires-Dist: openai (>=1.68.2,<2.0.0)
18
- Requires-Dist: pydantic (>=2.11.1,<3.0.0)
17
+ Requires-Dist: openai (>=1.70.0,<2.0.0)
18
+ Requires-Dist: pydantic (>=2.11.2,<3.0.0)
19
19
  Requires-Dist: pymongo (>=4.11.3,<5.0.0)
20
- Requires-Dist: zep-cloud (>=2.8.0,<3.0.0)
20
+ Requires-Dist: zep-cloud (>=2.9.0,<3.0.0)
21
21
  Requires-Dist: zep-python (>=2.0.2,<3.0.0)
22
22
  Project-URL: Documentation, https://docs.solana-agent.com
23
23
  Project-URL: Repository, https://github.com/truemagic-coder/solana-agent
@@ -44,6 +44,7 @@ Build your AI business in three lines of code!
44
44
  * Multi-Agent Swarm
45
45
  * Multi-Modal Streaming
46
46
  * Conversational Memory & History
47
+ * Built-in Internet Search
47
48
  * Intelligent Routing
48
49
  * Business Alignment
49
50
  * Extensible Tooling
@@ -59,6 +60,7 @@ Build your AI business in three lines of code!
59
60
  * Designed for a multi-agent swarm
60
61
  * Seamless text and audio streaming with real-time multi-modal processing
61
62
  * Persistent memory that preserves context across all agent interactions
63
+ * Quick built-in Internet Search to answer your queries
62
64
  * Streamlined message history for all agent interactions
63
65
  * Intelligent query routing to agents with optimal domain expertise or your own custom routing
64
66
  * Unified value system ensuring brand-aligned agent responses
@@ -250,42 +252,6 @@ Plugins like Solana Agent Kit (sakit) integrate automatically with Solana Agent.
250
252
 
251
253
  `pip install sakit`
252
254
 
253
- ### Internet Search
254
- ```python
255
- from solana_agent import SolanaAgent
256
-
257
- config = {
258
- "openai": {
259
- "api_key": "your-openai-api-key",
260
- },
261
- "tools": {
262
- "search_internet": {
263
- "api_key": "your-perplexity-key", # Required
264
- "citations": True, # Optional, defaults to True
265
- "model": "sonar" # Optional, defaults to "sonar"
266
- },
267
- },
268
- "agents": [
269
- {
270
- "name": "research_specialist",
271
- "instructions": "You are an expert researcher who synthesizes complex information clearly.",
272
- "specialization": "Research and knowledge synthesis",
273
- "tools": ["search_internet"],
274
- },
275
- {
276
- "name": "customer_support",
277
- "instructions": "You provide friendly, helpful customer support responses.",
278
- "specialization": "Customer inquiries",
279
- }
280
- ],
281
- }
282
-
283
- solana_agent = SolanaAgent(config=config)
284
-
285
- async for response in solana_agent.process("user123", "What are the latest AI developments?"):
286
- print(response, end="")
287
- ```
288
-
289
255
  ### MCP
290
256
  ```python
291
257
  from solana_agent import SolanaAgent
@@ -402,7 +368,7 @@ solana_agent = SolanaAgent(config=config)
402
368
 
403
369
  test_tool = TestTool()
404
370
 
405
- solana_agent.register_tool(test_tool)
371
+ solana_agent.register_tool("customer_support", test_tool)
406
372
 
407
373
  async for response in solana_agent.process("user123", "What are the latest AI developments?"):
408
374
  print(response, end="")
@@ -410,6 +376,8 @@ async for response in solana_agent.process("user123", "What are the latest AI de
410
376
 
411
377
  ### Custom Prompt Injection at Runtime
412
378
 
379
+ Useful for Knowledge Base answers and FAQs
380
+
413
381
  ```python
414
382
  from solana_agent import SolanaAgent
415
383
 
@@ -19,6 +19,7 @@ Build your AI business in three lines of code!
19
19
  * Multi-Agent Swarm
20
20
  * Multi-Modal Streaming
21
21
  * Conversational Memory & History
22
+ * Built-in Internet Search
22
23
  * Intelligent Routing
23
24
  * Business Alignment
24
25
  * Extensible Tooling
@@ -34,6 +35,7 @@ Build your AI business in three lines of code!
34
35
  * Designed for a multi-agent swarm
35
36
  * Seamless text and audio streaming with real-time multi-modal processing
36
37
  * Persistent memory that preserves context across all agent interactions
38
+ * Quick built-in Internet Search to answer your queries
37
39
  * Streamlined message history for all agent interactions
38
40
  * Intelligent query routing to agents with optimal domain expertise or your own custom routing
39
41
  * Unified value system ensuring brand-aligned agent responses
@@ -225,42 +227,6 @@ Plugins like Solana Agent Kit (sakit) integrate automatically with Solana Agent.
225
227
 
226
228
  `pip install sakit`
227
229
 
228
- ### Internet Search
229
- ```python
230
- from solana_agent import SolanaAgent
231
-
232
- config = {
233
- "openai": {
234
- "api_key": "your-openai-api-key",
235
- },
236
- "tools": {
237
- "search_internet": {
238
- "api_key": "your-perplexity-key", # Required
239
- "citations": True, # Optional, defaults to True
240
- "model": "sonar" # Optional, defaults to "sonar"
241
- },
242
- },
243
- "agents": [
244
- {
245
- "name": "research_specialist",
246
- "instructions": "You are an expert researcher who synthesizes complex information clearly.",
247
- "specialization": "Research and knowledge synthesis",
248
- "tools": ["search_internet"],
249
- },
250
- {
251
- "name": "customer_support",
252
- "instructions": "You provide friendly, helpful customer support responses.",
253
- "specialization": "Customer inquiries",
254
- }
255
- ],
256
- }
257
-
258
- solana_agent = SolanaAgent(config=config)
259
-
260
- async for response in solana_agent.process("user123", "What are the latest AI developments?"):
261
- print(response, end="")
262
- ```
263
-
264
230
  ### MCP
265
231
  ```python
266
232
  from solana_agent import SolanaAgent
@@ -377,7 +343,7 @@ solana_agent = SolanaAgent(config=config)
377
343
 
378
344
  test_tool = TestTool()
379
345
 
380
- solana_agent.register_tool(test_tool)
346
+ solana_agent.register_tool("customer_support", test_tool)
381
347
 
382
348
  async for response in solana_agent.process("user123", "What are the latest AI developments?"):
383
349
  print(response, end="")
@@ -385,6 +351,8 @@ async for response in solana_agent.process("user123", "What are the latest AI de
385
351
 
386
352
  ### Custom Prompt Injection at Runtime
387
353
 
354
+ Useful for Knowledge Base answers and FAQs
355
+
388
356
  ```python
389
357
  from solana_agent import SolanaAgent
390
358
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "solana-agent"
3
- version = "20.1.5"
3
+ version = "21.1.0"
4
4
  description = "Agentic IQ"
5
5
  authors = ["Bevan Hunt <bevan@bevanhunt.com>"]
6
6
  license = "MIT"
@@ -23,15 +23,15 @@ python_paths = [".", "tests"]
23
23
 
24
24
  [tool.poetry.dependencies]
25
25
  python = ">=3.12,<4.0"
26
- openai = "^1.68.2"
27
- pydantic = "^2.11.1"
26
+ openai = "^1.70.0"
27
+ pydantic = "^2.11.2"
28
28
  pymongo = "^4.11.3"
29
- zep-cloud = "^2.8.0"
29
+ zep-cloud = "^2.9.0"
30
30
  zep-python = "^2.0.2"
31
31
 
32
32
  [tool.poetry.group.dev.dependencies]
33
33
  pytest = "^8.3.5"
34
- pytest-cov = "^6.0.0"
34
+ pytest-cov = "^6.1.0"
35
35
  pytest-asyncio = "^0.26.0"
36
36
  pytest-mock = "^3.14.0"
37
37
  pytest-github-actions-annotate-failures = "^0.3.0"
@@ -20,6 +20,7 @@ class OpenAIAdapter(LLMProvider):
20
20
  self.client = OpenAI(api_key=api_key)
21
21
  self.parse_model = "gpt-4o-mini"
22
22
  self.text_model = "gpt-4o-mini"
23
+ self.search_model = "gpt-4o-mini-search-preview"
23
24
  self.transcription_model = "gpt-4o-mini-transcribe"
24
25
  self.tts_model = "tts-1"
25
26
 
@@ -105,6 +106,7 @@ class OpenAIAdapter(LLMProvider):
105
106
  self,
106
107
  prompt: str,
107
108
  system_prompt: str = "",
109
+ search: bool = False,
108
110
  ) -> AsyncGenerator[str, None]: # pragma: no cover
109
111
  """Generate text from OpenAI models."""
110
112
  messages = []
@@ -114,11 +116,15 @@ class OpenAIAdapter(LLMProvider):
114
116
 
115
117
  messages.append({"role": "user", "content": prompt})
116
118
 
119
+ model = self.text_model
120
+ if search:
121
+ model = self.search_model
122
+
117
123
  # Prepare request parameters
118
124
  request_params = {
119
125
  "messages": messages,
120
126
  "stream": True,
121
- "model": self.text_model,
127
+ "model": model,
122
128
  }
123
129
  try:
124
130
  response = self.client.chat.completions.create(**request_params)
@@ -56,6 +56,7 @@ class SolanaAgent(SolanaAgentInterface):
56
56
  "flac", "mp3", "mp4", "mpeg", "mpga", "m4a", "ogg", "wav", "webm"
57
57
  ] = "mp4",
58
58
  router: Optional[RoutingInterface] = None,
59
+ use_openai_search: bool = True,
59
60
  ) -> AsyncGenerator[Union[str, bytes], None]: # pragma: no cover
60
61
  """Process a user message and return the response stream.
61
62
 
@@ -69,6 +70,7 @@ class SolanaAgent(SolanaAgentInterface):
69
70
  audio_output_format: Audio output format
70
71
  audio_input_format: Audio input format
71
72
  router: Optional routing service for processing
73
+ use_openai_search: Flag to use OpenAI search
72
74
 
73
75
  Returns:
74
76
  Async generator yielding response chunks (text strings or audio bytes)
@@ -83,6 +85,7 @@ class SolanaAgent(SolanaAgentInterface):
83
85
  audio_input_format=audio_input_format,
84
86
  prompt=prompt,
85
87
  router=router,
88
+ use_openai_search=use_openai_search,
86
89
  ):
87
90
  yield chunk
88
91
 
@@ -118,11 +121,12 @@ class SolanaAgent(SolanaAgentInterface):
118
121
  user_id, page_num, page_size, sort_order
119
122
  )
120
123
 
121
- def register_tool(self, tool: Tool) -> bool:
124
+ def register_tool(self, agent_name: str, tool: Tool) -> bool:
122
125
  """
123
126
  Register a tool with the agent system.
124
127
 
125
128
  Args:
129
+ agent_name: Name of the agent to register the tool with
126
130
  tool: Tool instance to register
127
131
 
128
132
  Returns:
@@ -131,8 +135,6 @@ class SolanaAgent(SolanaAgentInterface):
131
135
  success = self.query_service.agent_service.tool_registry.register_tool(
132
136
  tool)
133
137
  if success:
134
- agents = self.query_service.agent_service.get_all_ai_agents()
135
- for agent_name in agents:
136
- self.query_service.agent_service.assign_tool_for_agent(
137
- agent_name, tool.name)
138
+ self.query_service.agent_service.assign_tool_for_agent(
139
+ agent_name, tool.name)
138
140
  return success
@@ -2,6 +2,7 @@ from abc import ABC, abstractmethod
2
2
  from typing import Any, AsyncGenerator, Dict, Literal, Optional, Union
3
3
 
4
4
  from solana_agent.interfaces.plugins.plugins import Tool
5
+ from solana_agent.interfaces.services.routing import RoutingService as RoutingInterface
5
6
 
6
7
 
7
8
  class SolanaAgent(ABC):
@@ -12,6 +13,7 @@ class SolanaAgent(ABC):
12
13
  self,
13
14
  user_id: str,
14
15
  message: Union[str, bytes],
16
+ prompt: Optional[str] = None,
15
17
  output_format: Literal["text", "audio"] = "text",
16
18
  audio_voice: Literal["alloy", "ash", "ballad", "coral", "echo",
17
19
  "fable", "onyx", "nova", "sage", "shimmer"] = "nova",
@@ -21,7 +23,8 @@ class SolanaAgent(ABC):
21
23
  audio_input_format: Literal[
22
24
  "flac", "mp3", "mp4", "mpeg", "mpga", "m4a", "ogg", "wav", "webm"
23
25
  ] = "mp4",
24
- prompt: Optional[str] = None,
26
+ router: Optional[RoutingInterface] = None,
27
+ use_openai_search: bool = True,
25
28
  ) -> AsyncGenerator[Union[str, bytes], None]:
26
29
  """Process a user message and return the response stream."""
27
30
  pass
@@ -38,6 +41,6 @@ class SolanaAgent(ABC):
38
41
  pass
39
42
 
40
43
  @abstractmethod
41
- def register_tool(self, tool: Tool) -> bool:
44
+ def register_tool(self, agent_name: str, tool: Tool) -> bool:
42
45
  """Register a tool in the registry."""
43
46
  pass
@@ -15,6 +15,7 @@ class LLMProvider(ABC):
15
15
  self,
16
16
  prompt: str,
17
17
  system_prompt: str = "",
18
+ search: bool = False,
18
19
  ) -> AsyncGenerator[str, None]:
19
20
  """Generate text from the language model."""
20
21
  pass
@@ -34,6 +34,7 @@ class AgentService(ABC):
34
34
  "flac", "mp3", "mp4", "mpeg", "mpga", "m4a", "ogg", "wav", "webm"
35
35
  ] = "mp4",
36
36
  prompt: Optional[str] = None,
37
+ use_openai_search: bool = True,
37
38
  ) -> AsyncGenerator[Union[str, bytes], None]:
38
39
  """Generate a response from an agent."""
39
40
  pass
@@ -20,6 +20,7 @@ class QueryService(ABC):
20
20
  "flac", "mp3", "mp4", "mpeg", "mpga", "m4a", "ogg", "wav", "webm"
21
21
  ] = "mp4",
22
22
  prompt: Optional[str] = None,
23
+ use_openai_search: bool = True,
23
24
  ) -> AsyncGenerator[Union[str, bytes], None]:
24
25
  """Process the user request and generate a response."""
25
26
  pass
@@ -176,6 +176,7 @@ class AgentService(AgentServiceInterface):
176
176
  "flac", "mp3", "mp4", "mpeg", "mpga", "m4a", "ogg", "wav", "webm"
177
177
  ] = "mp4",
178
178
  prompt: Optional[str] = None,
179
+ use_openai_search: bool = True,
179
180
  ) -> AsyncGenerator[Union[str, bytes], None]: # pragma: no cover
180
181
  """Generate a response with support for text/audio input/output.
181
182
 
@@ -190,6 +191,7 @@ class AgentService(AgentServiceInterface):
190
191
  audio_output_format: Audio output format
191
192
  audio_input_format: Audio input format
192
193
  prompt: Optional prompt for the agent
194
+ use_openai_search: Flag to use OpenAI search
193
195
 
194
196
  Yields:
195
197
  Text chunks or audio bytes depending on output_format
@@ -237,6 +239,7 @@ class AgentService(AgentServiceInterface):
237
239
  async for chunk in self.llm_provider.generate_text(
238
240
  prompt=query_text,
239
241
  system_prompt=system_prompt,
242
+ search=use_openai_search,
240
243
  ):
241
244
  # Check for JSON start
242
245
  if chunk.strip().startswith("{") and not is_json:
@@ -49,6 +49,7 @@ class QueryService(QueryServiceInterface):
49
49
  ] = "mp4",
50
50
  prompt: Optional[str] = None,
51
51
  router: Optional[RoutingServiceInterface] = None,
52
+ use_openai_search: bool = True,
52
53
  ) -> AsyncGenerator[Union[str, bytes], None]: # pragma: no cover
53
54
  """Process the user request with appropriate agent.
54
55
 
@@ -62,6 +63,7 @@ class QueryService(QueryServiceInterface):
62
63
  audio_input_format: Audio input format
63
64
  prompt: Optional prompt for the agent
64
65
  router: Optional routing service for processing
66
+ use_openai_search: Flag to use OpenAI search
65
67
 
66
68
  Yields:
67
69
  Response chunks (text strings or audio bytes)
@@ -119,6 +121,7 @@ class QueryService(QueryServiceInterface):
119
121
  audio_output_format=audio_output_format,
120
122
  audio_instructions=audio_instructions,
121
123
  prompt=prompt,
124
+ use_openai_search=use_openai_search,
122
125
  ):
123
126
  yield audio_chunk
124
127
 
@@ -137,6 +140,7 @@ class QueryService(QueryServiceInterface):
137
140
  memory_context=memory_context,
138
141
  output_format="text",
139
142
  prompt=prompt,
143
+ use_openai_search=use_openai_search,
140
144
  ):
141
145
  yield chunk
142
146
  full_text_response += chunk
File without changes