solana-agent 1.1.2__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
solana_agent/ai.py CHANGED
@@ -2,8 +2,10 @@ import asyncio
2
2
  import datetime
3
3
  import json
4
4
  from typing import AsyncGenerator, Literal, Optional, Dict, Any, Callable
5
+ import uuid
6
+ import cohere
5
7
  from pydantic import BaseModel
6
- from motor.motor_asyncio import AsyncIOMotorClient
8
+ from pymongo import MongoClient
7
9
  from openai import OpenAI
8
10
  from openai import AssistantEventHandler
9
11
  from openai.types.beta.threads import TextDelta, Text
@@ -13,6 +15,7 @@ import requests
13
15
  from zep_cloud.client import AsyncZep
14
16
  from zep_cloud.client import Zep
15
17
  from zep_cloud.types import Message
18
+ from pinecone import Pinecone
16
19
 
17
20
 
18
21
  class EventHandler(AssistantEventHandler):
@@ -41,28 +44,37 @@ class ToolConfig(BaseModel):
41
44
 
42
45
  class MongoDatabase:
43
46
  def __init__(self, db_url: str, db_name: str):
44
- self._client = AsyncIOMotorClient(db_url)
47
+ self._client = MongoClient(db_url)
45
48
  self.db = self._client[db_name]
46
49
  self._threads = self.db["threads"]
47
50
  self.messages = self.db["messages"]
51
+ self.kb = self.db["kb"]
48
52
 
49
- async def save_thread_id(self, user_id: str, thread_id: str):
50
- await self._threads.insert_one({"thread_id": thread_id, "user_id": user_id})
53
+ def save_thread_id(self, user_id: str, thread_id: str):
54
+ self._threads.insert_one({"thread_id": thread_id, "user_id": user_id})
51
55
 
52
- async def get_thread_id(self, user_id: str) -> Optional[str]:
53
- document = await self._threads.find_one({"user_id": user_id})
56
+ def get_thread_id(self, user_id: str) -> Optional[str]:
57
+ document = self._threads.find_one({"user_id": user_id})
54
58
  return document["thread_id"] if document else None
55
59
 
56
- async def save_message(self, user_id: str, metadata: Dict[str, Any]):
60
+ def save_message(self, user_id: str, metadata: Dict[str, Any]):
57
61
  metadata["user_id"] = user_id
58
- await self.messages.insert_one(metadata)
62
+ self.messages.insert_one(metadata)
59
63
 
60
- async def delete_all_threads(self):
61
- await self._threads.delete_many({})
64
+ def delete_all_threads(self):
65
+ self._threads.delete_many({})
62
66
 
63
- async def clear_user_history(self, user_id: str):
64
- await self.messages.delete_many({"user_id": user_id})
65
- await self._threads.delete_one({"user_id": user_id})
67
+ def clear_user_history(self, user_id: str):
68
+ self.messages.delete_many({"user_id": user_id})
69
+ self._threads.delete_one({"user_id": user_id})
70
+
71
+ def add_document_to_kb(self, id: str, namespace: str, document: str):
72
+ storage = {}
73
+ storage["namespace"] = namespace
74
+ storage["reference"] = id
75
+ storage["document"] = document
76
+ storage["timestamp"] = datetime.datetime.now(datetime.timezone.utc)
77
+ self.kb.insert_one(storage)
66
78
 
67
79
 
68
80
  class AI:
@@ -75,6 +87,10 @@ class AI:
75
87
  zep_api_key: str = None,
76
88
  perplexity_api_key: str = None,
77
89
  grok_api_key: str = None,
90
+ pinecone_api_key: str = None,
91
+ pinecone_index_name: str = None,
92
+ cohere_api_key: str = None,
93
+ cohere_model: Literal["rerank-v3.5"] = "rerank-v3.5",
78
94
  code_interpreter: bool = True,
79
95
  openai_assistant_model: Literal["gpt-4o-mini",
80
96
  "gpt-4o"] = "gpt-4o-mini",
@@ -92,6 +108,10 @@ class AI:
92
108
  zep_api_key (str, optional): API key for Zep memory integration. Defaults to None
93
109
  perplexity_api_key (str, optional): API key for Perplexity search. Defaults to None
94
110
  grok_api_key (str, optional): API key for X/Twitter search via Grok. Defaults to None
111
+ pinecone_api_key (str, optional): API key for Pinecone. Defaults to None
112
+ pinecone_index_name (str, optional): Name of the Pinecone index. Defaults to None
113
+ cohere_api_key (str, optional): API key for Cohere search. Defaults to None
114
+ cohere_model (Literal["rerank-v3.5"], optional): Cohere model for reranking. Defaults to "rerank-v3.5"
95
115
  code_interpreter (bool, optional): Enable code interpretation. Defaults to True
96
116
  openai_assistant_model (Literal["gpt-4o-mini", "gpt-4o"], optional): OpenAI model for assistant. Defaults to "gpt-4o-mini"
97
117
  openai_embedding_model (Literal["text-embedding-3-small", "text-embedding-3-large"], optional): OpenAI model for text embedding. Defaults to "text-embedding-3-small"
@@ -108,7 +128,7 @@ class AI:
108
128
  Notes:
109
129
  - Requires valid OpenAI API key for core functionality
110
130
  - Database instance for storing messages and threads
111
- - Optional integrations for Zep, Perplexity and Grok
131
+ - Optional integrations for Zep, Perplexity, Pinecone, Cohere, and Grok
112
132
  - Supports code interpretation and custom tool functions
113
133
  - You must create the Pinecone index in the dashboard before using it
114
134
  """
@@ -127,6 +147,17 @@ class AI:
127
147
  self._sync_zep = Zep(api_key=zep_api_key) if zep_api_key else None
128
148
  self._perplexity_api_key = perplexity_api_key
129
149
  self._grok_api_key = grok_api_key
150
+ self._pinecone = (
151
+ Pinecone(api_key=pinecone_api_key) if pinecone_api_key else None
152
+ )
153
+ self._pinecone_index_name = pinecone_index_name if pinecone_index_name else None
154
+ self.kb = (
155
+ self._pinecone.Index(
156
+ self._pinecone_index_name) if self._pinecone else None
157
+ )
158
+ self._co = cohere.ClientV2(
159
+ api_key=cohere_api_key) if cohere_api_key else None
160
+ self._co_model = cohere_model if cohere_api_key else None
130
161
 
131
162
  async def __aenter__(self):
132
163
  assistants = self._client.beta.assistants.list()
@@ -142,7 +173,7 @@ class AI:
142
173
  tools=self._tools,
143
174
  model=self._openai_assistant_model,
144
175
  ).id
145
- await self._database.delete_all_threads()
176
+ self._database.delete_all_threads()
146
177
 
147
178
  return self
148
179
 
@@ -151,12 +182,12 @@ class AI:
151
182
  pass
152
183
 
153
184
  async def _create_thread(self, user_id: str) -> str:
154
- thread_id = await self._database.get_thread_id(user_id)
185
+ thread_id = self._database.get_thread_id(user_id)
155
186
 
156
187
  if thread_id is None:
157
188
  thread = self._client.beta.threads.create()
158
189
  thread_id = thread.id
159
- await self._database.save_thread_id(user_id, thread_id)
190
+ self._database.save_thread_id(user_id, thread_id)
160
191
  if self._zep:
161
192
  try:
162
193
  await self._zep.user.add(user_id=user_id)
@@ -192,6 +223,125 @@ class AI:
192
223
  )
193
224
  return run.status
194
225
 
226
+ def search_kb(self, query: str, namespace: str = "global", limit: int = 3) -> str:
227
+ """Search Pinecone knowledge base using OpenAI embeddings.
228
+
229
+ Args:
230
+ query (str): Search query to find relevant documents
231
+ namespace (str, optional): Namespace of the Pinecone to search. Defaults to "global".
232
+ limit (int, optional): Maximum number of results to return. Defaults to 3.
233
+
234
+ Returns:
235
+ str: JSON string of matched documents or error message
236
+
237
+ Example:
238
+ ```python
239
+ results = ai.search_kb("user123", "machine learning basics")
240
+ # Returns: '["Document 1", "Document 2", ...]'
241
+ ```
242
+
243
+ Note:
244
+ - Requires configured Pinecone index
245
+ - Uses OpenAI embeddings for semantic search
246
+ - Returns JSON-serialized Pinecone match metadata results
247
+ - Returns error message string if search fails
248
+ - Optionally reranks results using Cohere API
249
+ """
250
+ try:
251
+ response = self._client.embeddings.create(
252
+ input=query,
253
+ model=self._openai_embedding_model,
254
+ )
255
+ search_results = self.kb.query(
256
+ vector=response.data[0].embedding,
257
+ top_k=10,
258
+ include_metadata=False,
259
+ include_values=False,
260
+ namespace=namespace,
261
+ )
262
+ matches = search_results.matches
263
+ ids = []
264
+ for match in matches:
265
+ ids.append(match.id)
266
+ docs = []
267
+ for id in ids:
268
+ document = self._database.kb.find_one({"reference": id})
269
+ docs.append(document["document"])
270
+ if self._co:
271
+ try:
272
+ response = self._co.rerank(
273
+ model=self._co_model,
274
+ query=query,
275
+ documents=docs,
276
+ top_n=limit,
277
+ )
278
+ reranked_docs = response.results
279
+ new_docs = []
280
+ for doc in reranked_docs:
281
+ new_docs.append(docs[doc.index])
282
+ return json.dumps(new_docs)
283
+ except Exception:
284
+ return json.dumps(docs[:limit])
285
+ else:
286
+ return json.dumps(docs[:limit])
287
+ except Exception as e:
288
+ return f"Failed to search KB. Error: {e}"
289
+
290
+ def add_document_to_kb(
291
+ self,
292
+ document: str,
293
+ id: str = uuid.uuid4().hex,
294
+ namespace: str = "global",
295
+ ):
296
+ """Add a document to the Pinecone knowledge base with OpenAI embeddings.
297
+
298
+ Args:
299
+ document (str): Document to add to the knowledge base
300
+ id (str, optional): Unique identifier for the document. Defaults to random UUID.
301
+ namespace (str): Namespace of the Pinecone index to search. Defaults to "global".
302
+
303
+ Example:
304
+ ```python
305
+ ai.add_document_to_kb("user123 has 4 cats")
306
+ ```
307
+
308
+ Note:
309
+ - Requires Pinecone index to be configured
310
+ - Uses OpenAI embeddings API
311
+ """
312
+ response = self._client.embeddings.create(
313
+ input=document,
314
+ model=self._openai_embedding_model,
315
+ )
316
+ self.kb.upsert(
317
+ vectors=[
318
+ {
319
+ "id": id,
320
+ "values": response.data[0].embedding,
321
+ }
322
+ ],
323
+ namespace=namespace,
324
+ )
325
+ self._database.add_document_to_kb(id, namespace, document)
326
+
327
+ def delete_document_from_kb(self, id: str, user_id: str = "global"):
328
+ """Delete a document from the Pinecone knowledge base.
329
+
330
+ Args:
331
+ id (str): Unique identifier for the document
332
+ user_id (str): Unique identifier for the user. Defaults to "global".
333
+
334
+ Example:
335
+ ```python
336
+ ai.delete_document_from_kb("user123", "document_id")
337
+ ```
338
+
339
+ Note:
340
+ - Requires Pinecone index to be configured
341
+ """
342
+ self.kb.delete(ids=[id], namespace=user_id)
343
+ self._database.kb.delete_one({"reference": id})
344
+
195
345
  # check time tool - has to be sync
196
346
  def check_time(self) -> str:
197
347
  """Get current UTC time formatted as a string.
@@ -209,7 +359,9 @@ class AI:
209
359
  This is a synchronous tool method required for OpenAI function calling.
210
360
  Always returns time in UTC timezone for consistency.
211
361
  """
212
- return datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z")
362
+ return datetime.datetime.now(datetime.timezone.utc).strftime(
363
+ "%Y-%m-%d %H:%M:%S %Z"
364
+ )
213
365
 
214
366
  # search facts tool - has to be sync
215
367
  def search_facts(
@@ -336,11 +488,13 @@ class AI:
336
488
  use_perplexity: bool = True,
337
489
  use_grok: bool = True,
338
490
  use_facts: bool = True,
491
+ use_kb: bool = True,
339
492
  perplexity_model: Literal[
340
493
  "sonar", "sonar-pro", "sonar-reasoning-pro", "sonar-reasoning"
341
494
  ] = "sonar",
342
495
  openai_model: Literal["o1", "o3-mini"] = "o3-mini",
343
496
  grok_model: Literal["grok-beta"] = "grok-beta",
497
+ namespace: str = "global",
344
498
  ) -> str:
345
499
  """Combine multiple data sources with AI reasoning to answer queries.
346
500
 
@@ -354,6 +508,7 @@ class AI:
354
508
  perplexity_model (Literal, optional): Perplexity model to use. Defaults to "sonar"
355
509
  openai_model (Literal, optional): OpenAI model for reasoning. Defaults to "o3-mini"
356
510
  grok_model (Literal, optional): Grok model for X search. Defaults to "grok-beta"
511
+ namespace (str): Namespace of the Pinecone index to search. Defaults to "global"
357
512
 
358
513
  Returns:
359
514
  str: Reasoned response combining all enabled data sources or error message
@@ -373,6 +528,13 @@ class AI:
373
528
  Will gracefully handle missing or failed data sources.
374
529
  """
375
530
  try:
531
+ if use_kb:
532
+ try:
533
+ kb_results = self.search_kb(query, namespace)
534
+ except Exception:
535
+ kb_results = ""
536
+ else:
537
+ kb_results = ""
376
538
  if use_facts:
377
539
  try:
378
540
  facts = self.search_facts(user_id, query)
@@ -405,7 +567,7 @@ class AI:
405
567
  },
406
568
  {
407
569
  "role": "user",
408
- "content": f"Query: {query}, Facts: {facts}, Internet Search Results: {search_results}, X Search Results: {x_search_results}",
570
+ "content": f"Query: {query}, Facts: {facts}, KB Results: {kb_results}, Internet Search Results: {search_results}, X Search Results: {x_search_results}",
409
571
  },
410
572
  ],
411
573
  )
@@ -474,7 +636,7 @@ class AI:
474
636
  except Exception:
475
637
  pass
476
638
  try:
477
- await self._database.clear_user_history(user_id)
639
+ self._database.clear_user_history(user_id)
478
640
  except Exception:
479
641
  pass
480
642
  try:
@@ -491,7 +653,7 @@ class AI:
491
653
  # Deletes the assistant conversation thread for a user
492
654
  ```
493
655
  """
494
- thread_id = await self._database.get_thread_id(user_id)
656
+ thread_id = self._database.get_thread_id(user_id)
495
657
  await self._client.beta.threads.delete(thread_id=thread_id)
496
658
 
497
659
  async def delete_facts(self, user_id: str):
@@ -547,7 +709,7 @@ class AI:
547
709
  """
548
710
  self._accumulated_value_queue = asyncio.Queue()
549
711
 
550
- thread_id = await self._database.get_thread_id(user_id)
712
+ thread_id = self._database.get_thread_id(user_id)
551
713
 
552
714
  if thread_id is None:
553
715
  thread_id = await self._create_thread(user_id)
@@ -602,7 +764,7 @@ class AI:
602
764
  "timestamp": datetime.datetime.now(datetime.timezone.utc),
603
765
  }
604
766
 
605
- await self._database.save_message(user_id, metadata)
767
+ self._database.save_message(user_id, metadata)
606
768
  if self._zep:
607
769
  messages = [
608
770
  Message(
@@ -668,7 +830,7 @@ class AI:
668
830
  # Reset the queue for each new conversation
669
831
  self._accumulated_value_queue = asyncio.Queue()
670
832
 
671
- thread_id = await self._database.get_thread_id(user_id)
833
+ thread_id = self._database.get_thread_id(user_id)
672
834
 
673
835
  if thread_id is None:
674
836
  thread_id = await self._create_thread(user_id)
@@ -713,7 +875,7 @@ class AI:
713
875
  "timestamp": datetime.datetime.now(datetime.timezone.utc),
714
876
  }
715
877
 
716
- await self._database.save_message(user_id, metadata)
878
+ self._database.save_message(user_id, metadata)
717
879
 
718
880
  if self._zep:
719
881
  messages = [
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: solana-agent
3
- Version: 1.1.2
3
+ Version: 1.2.0
4
4
  Summary: Build self-learning AI Agents
5
5
  License: MIT
6
6
  Keywords: ai,openai,ai agents
@@ -16,9 +16,11 @@ Classifier: Programming Language :: Python :: 3.12
16
16
  Classifier: Programming Language :: Python :: 3.13
17
17
  Classifier: Programming Language :: Python :: 3 :: Only
18
18
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
19
- Requires-Dist: motor (>=3.7.0,<4.0.0)
19
+ Requires-Dist: cohere (>=5.13.12,<6.0.0)
20
20
  Requires-Dist: openai (>=1.61.1,<2.0.0)
21
+ Requires-Dist: pinecone (>=6.0.1,<7.0.0)
21
22
  Requires-Dist: pydantic (>=2.10.6,<3.0.0)
23
+ Requires-Dist: pymongo (>=4.11.1,<5.0.0)
22
24
  Requires-Dist: requests (>=2.32.3,<3.0.0)
23
25
  Requires-Dist: zep-cloud (>=2.4.0,<3.0.0)
24
26
  Project-URL: Repository, https://github.com/truemagic-coder/solana-agent
@@ -42,6 +44,7 @@ Unlike traditional AI assistants that forget conversations after each session, S
42
44
  - 📈 **Continuous Learning**: Evolves with every new interaction
43
45
  - 🎯 **Context-Aware**: Recalls past interactions for more relevant responses
44
46
  - 🔄 **Self-Improving**: Builds knowledge and improves reasoning automatically
47
+ - 🧠 **Knowledge Base**: Add domain-specific knowledge for better reasoning
45
48
  - 🏢 **Enterprise-Ready**: Scales from personal to organization-wide deployment
46
49
  - 🛡️ **Secure**: Secure and private memory and data storage
47
50
 
@@ -62,6 +65,7 @@ Unlike traditional AI assistants that forget conversations after each session, S
62
65
  - X (Twitter) search using Grok
63
66
  - Conversational fact search powered by Zep
64
67
  - Conversational message history using MongoDB (on-prem or hosted)
68
+ - Knowledge Base using Pinecone with reranking by Cohere - available globally or user-specific
65
69
  - Comprehensive reasoning combining multiple data sources
66
70
 
67
71
  ## Why Choose Solana Agent Over LangChain?
@@ -72,6 +76,7 @@ Unlike traditional AI assistants that forget conversations after each session, S
72
76
  - Built-in episodic memory vs LangChain's basic memory types
73
77
  - Persistent cross-session knowledge retention
74
78
  - Automatic self-learning from conversations
79
+ - Knowledge Base to add domain specific knowledge
75
80
 
76
81
  🏢 **Enterprise Focus**
77
82
  - Production-ready out of the box in a few lines of code
@@ -0,0 +1,6 @@
1
+ solana_agent/__init__.py,sha256=zpfnWqANd3OHGWm7NCF5Y6m01BWG4NkNk8SK9Ex48nA,18
2
+ solana_agent/ai.py,sha256=Xi3_9bFhF6qsVNsYCSNJ11rXB3aoFsy-UPY2EgUffHI,36023
3
+ solana_agent-1.2.0.dist-info/LICENSE,sha256=BnSRc-NSFuyF2s496l_4EyrwAP6YimvxWcjPiJ0J7g4,1057
4
+ solana_agent-1.2.0.dist-info/METADATA,sha256=ZNkNWKcJ-nJWs7qmQ7693tGVlQ4-nSKz8doRan-QFx8,4587
5
+ solana_agent-1.2.0.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
6
+ solana_agent-1.2.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.0.1
2
+ Generator: poetry-core 2.1.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,6 +0,0 @@
1
- solana_agent/__init__.py,sha256=zpfnWqANd3OHGWm7NCF5Y6m01BWG4NkNk8SK9Ex48nA,18
2
- solana_agent/ai.py,sha256=mM-oCkrUFO8Lp0dZoqWOlOU0f9lAKmjeuc3eFRWy9zo,29995
3
- solana_agent-1.1.2.dist-info/LICENSE,sha256=BnSRc-NSFuyF2s496l_4EyrwAP6YimvxWcjPiJ0J7g4,1057
4
- solana_agent-1.1.2.dist-info/METADATA,sha256=YNglp_2p5pn02MJjtzqAJUTjo_RyiKtp-zIZklJM3c0,4276
5
- solana_agent-1.1.2.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
6
- solana_agent-1.1.2.dist-info/RECORD,,