solana-agent 1.3.1__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
solana_agent/ai.py CHANGED
@@ -4,6 +4,7 @@ import json
4
4
  from typing import AsyncGenerator, Literal, Optional, Dict, Any, Callable
5
5
  import uuid
6
6
  import cohere
7
+ import pandas as pd
7
8
  from pydantic import BaseModel
8
9
  from pymongo import MongoClient
9
10
  from openai import OpenAI
@@ -109,13 +110,14 @@ class AI:
109
110
  pinecone_index_name: str = None,
110
111
  cohere_api_key: str = None,
111
112
  cohere_model: Literal["rerank-v3.5"] = "rerank-v3.5",
113
+ gemini_api_key: str = None,
112
114
  code_interpreter: bool = True,
113
115
  file_search: bool = True,
114
116
  openai_assistant_model: Literal["gpt-4o-mini",
115
117
  "gpt-4o"] = "gpt-4o-mini",
116
118
  openai_embedding_model: Literal[
117
119
  "text-embedding-3-small", "text-embedding-3-large"
118
- ] = "text-embedding-3-small",
120
+ ] = "text-embedding-3-large",
119
121
  ):
120
122
  """Initialize a new AI assistant with memory and tool integration capabilities.
121
123
 
@@ -131,10 +133,11 @@ class AI:
131
133
  pinecone_index_name (str, optional): Name of the Pinecone index. Defaults to None
132
134
  cohere_api_key (str, optional): API key for Cohere search. Defaults to None
133
135
  cohere_model (Literal["rerank-v3.5"], optional): Cohere model for reranking. Defaults to "rerank-v3.5"
136
+ gemini_api_key (str, optional): API key for Gemini search. Defaults to None
134
137
  code_interpreter (bool, optional): Enable code interpretation. Defaults to True
135
138
  file_search (bool, optional): Enable file search tool. Defaults to True
136
139
  openai_assistant_model (Literal["gpt-4o-mini", "gpt-4o"], optional): OpenAI model for assistant. Defaults to "gpt-4o-mini"
137
- openai_embedding_model (Literal["text-embedding-3-small", "text-embedding-3-large"], optional): OpenAI model for text embedding. Defaults to "text-embedding-3-small"
140
+ openai_embedding_model (Literal["text-embedding-3-small", "text-embedding-3-large"], optional): OpenAI model for text embedding. Defaults to "text-embedding-3-large"
138
141
 
139
142
  Example:
140
143
  ```python
@@ -148,7 +151,7 @@ class AI:
148
151
  Notes:
149
152
  - Requires valid OpenAI API key for core functionality
150
153
  - Database instance for storing messages and threads
151
- - Optional integrations for Zep, Perplexity, Pinecone, Cohere, and Grok
154
+ - Optional integrations for Zep, Perplexity, Pinecone, Cohere, Gemini, and Grok
152
155
  - Supports code interpretation and custom tool functions
153
156
  - You must create the Pinecone index in the dashboard before using it
154
157
  """
@@ -179,6 +182,7 @@ class AI:
179
182
  self._sync_zep = Zep(api_key=zep_api_key) if zep_api_key else None
180
183
  self._perplexity_api_key = perplexity_api_key
181
184
  self._grok_api_key = grok_api_key
185
+ self._gemini_api_key = gemini_api_key
182
186
  self._pinecone = (
183
187
  Pinecone(api_key=pinecone_api_key) if pinecone_api_key else None
184
188
  )
@@ -271,6 +275,135 @@ class AI:
271
275
  )
272
276
  return run.status
273
277
 
278
+ def csv_to_text(self, file, filename: str) -> str:
279
+ """Convert a CSV file to a Markdown table text format optimized for LLM ingestion.
280
+
281
+ Args:
282
+ file (BinaryIO): The CSV file to convert to text.
283
+ filename (str): The name of the CSV file.
284
+
285
+ Returns:
286
+ str: A Markdown formatted table representing the CSV data.
287
+
288
+ Example:
289
+ ```python
290
+ result = ai.csv_to_text("data.csv")
291
+ print(result)
292
+ # Returns a Markdown table such as:
293
+ # **Table: data**
294
+ #
295
+ # | Date | Product | Revenue |
296
+ # | ---------- | -------- | ------- |
297
+ # | 2024-01-01 | Widget A | $100 |
298
+ # | 2024-01-02 | Widget B | $200 |
299
+ ```
300
+
301
+ Note:
302
+ This is a synchronous tool method required for OpenAI function calling.
303
+ The output format preserves the table structure allowing the LLM to understand column relationships and row data.
304
+ """
305
+ df = pd.read_csv(file)
306
+ # Create header and separator rows for Markdown table
307
+ header = "| " + " | ".join(df.columns.astype(str)) + " |"
308
+ separator = "| " + " | ".join(["---"] * len(df.columns)) + " |"
309
+ # Process each row in the dataframe
310
+ rows = "\n".join("| " + " | ".join(map(str, row)) +
311
+ " |" for row in df.values)
312
+ markdown_table = f"**Table: {filename}**\n\n{header}\n{separator}\n{rows}"
313
+ return markdown_table
314
+
315
+ # summarize tool - has to be sync
316
+ def summarize(
317
+ self,
318
+ text: str,
319
+ prompt: str = "Summarize the markdown table into a report, include important metrics and totals.",
320
+ model: Literal["gemini-2.0-flash",
321
+ "gemini-1.5-pro"] = "gemini-1.5-pro",
322
+ ) -> str:
323
+ """Summarize text using Google's Gemini language model.
324
+
325
+ Args:
326
+ text (str): The text content to be summarized
327
+ prompt (str, optional): The prompt to use for summarization. Defaults to "Summarize the markdown table into a report, include important metrics and totals."
328
+ model (Literal["gemini-2.0-flash", "gemini-1.5-pro"], optional):
329
+ Gemini model to use. Defaults to "gemini-1.5-pro"
330
+ - gemini-2.0-flash: Faster, shorter summaries
331
+ - gemini-1.5-pro: More detailed summaries
332
+
333
+ Returns:
334
+ str: Summarized text or error message if summarization fails
335
+
336
+ Example:
337
+ ```python
338
+ summary = ai.summarize("Long article text here...")
339
+ # Returns: "Concise summary of the article..."
340
+ ```
341
+
342
+ Note:
343
+ This is a synchronous tool method required for OpenAI function calling.
344
+ Requires valid Gemini API key to be configured.
345
+ """
346
+ try:
347
+ client = OpenAI(
348
+ api_key=self._gemini_api_key,
349
+ base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
350
+ )
351
+
352
+ completion = client.chat.completions.create(
353
+ model=model,
354
+ messages=[
355
+ {
356
+ "role": "system",
357
+ "content": prompt,
358
+ },
359
+ {"role": "user", "content": text},
360
+ ],
361
+ )
362
+
363
+ return completion.choices[0].message.content
364
+ except Exception as e:
365
+ return f"Failed to summarize text. Error: {e}"
366
+
367
+ def upload_csv_file_to_kb(
368
+ self,
369
+ file,
370
+ filename: str,
371
+ prompt: str = "Summarize the markdown table into a report, include important metrics and totals.",
372
+ namespace: str = "global",
373
+ model: Literal["gemini-2.0-flash",
374
+ "gemini-1.5-pro"] = "gemini-1.5-pro",
375
+ ):
376
+ """Upload and process a CSV file into the knowledge base with AI summarization.
377
+
378
+ Args:
379
+ file (BinaryIO): The CSV file to upload and process
380
+ filename (str): The name of the CSV file
381
+ prompt (str, optional): Custom prompt for summarization. Defaults to "Summarize the markdown table into a report, include important metrics and totals."
382
+ namespace (str, optional): Knowledge base namespace. Defaults to "global".
383
+ model (Literal["gemini-2.0-flash", "gemini-1.5-pro"], optional):
384
+ Gemini model for summarization. Defaults to "gemini-1.5-pro"
385
+
386
+ Example:
387
+ ```python
388
+ ai.upload_csv_file(
389
+ file=open("data.csv", "rb"),
390
+ filename="data.csv",
391
+ )
392
+ ```
393
+
394
+ Note:
395
+ - Converts CSV to Markdown table format
396
+ - Uses Gemini AI to generate a summary
397
+ - Stores summary in Pinecone knowledge base
398
+ - Requires configured Pinecone index
399
+ - Supports custom prompts for targeted summaries
400
+ """
401
+ csv_text = self.csv_to_text(file, filename)
402
+ print(csv_text)
403
+ document = self.summarize(csv_text, prompt, model)
404
+ print(document)
405
+ self.add_document_to_kb(document=document, namespace=namespace)
406
+
274
407
  def delete_vector_store_and_files(self):
275
408
  """Delete the OpenAI vector store and files.
276
409
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: solana-agent
3
- Version: 1.3.1
3
+ Version: 1.4.1
4
4
  Summary: Build self-learning AI Agents
5
5
  License: MIT
6
6
  Keywords: ai,openai,ai agents
@@ -18,6 +18,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
18
18
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
19
19
  Requires-Dist: cohere (>=5.13.12,<6.0.0)
20
20
  Requires-Dist: openai (>=1.63.2,<2.0.0)
21
+ Requires-Dist: pandas (>=2.2.3,<3.0.0)
21
22
  Requires-Dist: pinecone (>=6.0.1,<7.0.0)
22
23
  Requires-Dist: pydantic (>=2.10.6,<3.0.0)
23
24
  Requires-Dist: pymongo (>=4.11.1,<5.0.0)
@@ -65,8 +66,9 @@ Unlike traditional AI assistants that forget conversations after each session, S
65
66
  - X (Twitter) search using Grok
66
67
  - Conversational fact search powered by Zep
67
68
  - Conversational message history using MongoDB (on-prem or hosted)
68
- - Knowledge Base using Pinecone with reranking by Cohere - available globally or user-specific
69
- - File search using OpenAI
69
+ - Knowledge Base (KB) using Pinecone with reranking by Cohere - available globally or user-specific
70
+ - File uploading and searching using OpenAI like for PDFs
71
+ - Upload CSVs to be processed into summary reports and stored in the Knowledge Base (KB) using Gemini
70
72
  - Comprehensive reasoning combining multiple data sources
71
73
 
72
74
  ## Why Choose Solana Agent Over LangChain?
@@ -0,0 +1,6 @@
1
+ solana_agent/__init__.py,sha256=zpfnWqANd3OHGWm7NCF5Y6m01BWG4NkNk8SK9Ex48nA,18
2
+ solana_agent/ai.py,sha256=BprYscIZfeTFyi2CY-E-OOy2fYnkX_vk9r8wd-4qezw,46848
3
+ solana_agent-1.4.1.dist-info/LICENSE,sha256=BnSRc-NSFuyF2s496l_4EyrwAP6YimvxWcjPiJ0J7g4,1057
4
+ solana_agent-1.4.1.dist-info/METADATA,sha256=LstvNC7XomRdUCA4QLDPT0gjUo764dB4aRdu7sx_7lo,4871
5
+ solana_agent-1.4.1.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
6
+ solana_agent-1.4.1.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- solana_agent/__init__.py,sha256=zpfnWqANd3OHGWm7NCF5Y6m01BWG4NkNk8SK9Ex48nA,18
2
- solana_agent/ai.py,sha256=RxbyG9UzvFjZSGTZ2SC7t6U0Pfw6uERaAisxGDqLsUw,41457
3
- solana_agent-1.3.1.dist-info/LICENSE,sha256=BnSRc-NSFuyF2s496l_4EyrwAP6YimvxWcjPiJ0J7g4,1057
4
- solana_agent-1.3.1.dist-info/METADATA,sha256=I6QyN8qmmeFzpNNlXN4ZquFJ3srdK5FuooKEOqmzTrw,4694
5
- solana_agent-1.3.1.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
6
- solana_agent-1.3.1.dist-info/RECORD,,