agno 2.2.13__py3-none-any.whl → 2.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +197 -110
- agno/api/api.py +2 -0
- agno/db/base.py +26 -0
- agno/db/dynamo/dynamo.py +8 -0
- agno/db/dynamo/schemas.py +1 -0
- agno/db/firestore/firestore.py +8 -0
- agno/db/firestore/schemas.py +1 -0
- agno/db/gcs_json/gcs_json_db.py +8 -0
- agno/db/in_memory/in_memory_db.py +8 -1
- agno/db/json/json_db.py +8 -0
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/async_mongo.py +16 -6
- agno/db/mongo/mongo.py +11 -0
- agno/db/mongo/schemas.py +3 -0
- agno/db/mongo/utils.py +17 -0
- agno/db/mysql/mysql.py +76 -3
- agno/db/mysql/schemas.py +20 -10
- agno/db/postgres/async_postgres.py +99 -25
- agno/db/postgres/postgres.py +75 -6
- agno/db/postgres/schemas.py +30 -20
- agno/db/redis/redis.py +15 -2
- agno/db/redis/schemas.py +4 -0
- agno/db/schemas/memory.py +13 -0
- agno/db/singlestore/schemas.py +11 -0
- agno/db/singlestore/singlestore.py +79 -5
- agno/db/sqlite/async_sqlite.py +97 -19
- agno/db/sqlite/schemas.py +10 -0
- agno/db/sqlite/sqlite.py +79 -2
- agno/db/surrealdb/surrealdb.py +8 -0
- agno/knowledge/chunking/semantic.py +7 -2
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/knowledge.py +57 -86
- agno/knowledge/reader/csv_reader.py +7 -9
- agno/knowledge/reader/docx_reader.py +5 -5
- agno/knowledge/reader/field_labeled_csv_reader.py +16 -18
- agno/knowledge/reader/json_reader.py +5 -4
- agno/knowledge/reader/markdown_reader.py +8 -8
- agno/knowledge/reader/pdf_reader.py +11 -11
- agno/knowledge/reader/pptx_reader.py +5 -5
- agno/knowledge/reader/s3_reader.py +3 -3
- agno/knowledge/reader/text_reader.py +8 -8
- agno/knowledge/reader/web_search_reader.py +1 -48
- agno/knowledge/reader/website_reader.py +10 -10
- agno/models/anthropic/claude.py +319 -28
- agno/models/aws/claude.py +32 -0
- agno/models/azure/openai_chat.py +19 -10
- agno/models/base.py +612 -545
- agno/models/cerebras/cerebras.py +8 -11
- agno/models/cohere/chat.py +27 -1
- agno/models/google/gemini.py +39 -7
- agno/models/groq/groq.py +25 -11
- agno/models/meta/llama.py +20 -9
- agno/models/meta/llama_openai.py +3 -19
- agno/models/nebius/nebius.py +4 -4
- agno/models/openai/chat.py +30 -14
- agno/models/openai/responses.py +10 -13
- agno/models/response.py +1 -0
- agno/models/vertexai/claude.py +26 -0
- agno/os/app.py +8 -19
- agno/os/router.py +54 -0
- agno/os/routers/knowledge/knowledge.py +2 -2
- agno/os/schema.py +2 -2
- agno/session/agent.py +57 -92
- agno/session/summary.py +1 -1
- agno/session/team.py +62 -112
- agno/session/workflow.py +353 -57
- agno/team/team.py +227 -125
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/yfinance.py +12 -11
- agno/utils/http.py +111 -0
- agno/utils/media.py +11 -0
- agno/utils/models/claude.py +8 -0
- agno/utils/print_response/agent.py +33 -12
- agno/utils/print_response/team.py +22 -12
- agno/vectordb/couchbase/couchbase.py +6 -2
- agno/workflow/condition.py +13 -0
- agno/workflow/loop.py +13 -0
- agno/workflow/parallel.py +13 -0
- agno/workflow/router.py +13 -0
- agno/workflow/step.py +120 -20
- agno/workflow/steps.py +13 -0
- agno/workflow/workflow.py +76 -63
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/METADATA +6 -2
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/RECORD +91 -88
- agno/tools/googlesearch.py +0 -98
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/WHEEL +0 -0
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/top_level.txt +0 -0
agno/db/postgres/postgres.py
CHANGED
|
@@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
|
|
|
4
4
|
from uuid import uuid4
|
|
5
5
|
|
|
6
6
|
from agno.db.base import BaseDb, SessionType
|
|
7
|
+
from agno.db.migrations.manager import MigrationManager
|
|
7
8
|
from agno.db.postgres.schemas import get_table_schema_definition
|
|
8
9
|
from agno.db.postgres.utils import (
|
|
9
10
|
apply_sorting,
|
|
@@ -26,12 +27,12 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
|
26
27
|
from agno.utils.string import generate_id
|
|
27
28
|
|
|
28
29
|
try:
|
|
29
|
-
from sqlalchemy import Index, String, UniqueConstraint, func, update
|
|
30
|
+
from sqlalchemy import Index, String, UniqueConstraint, func, select, update
|
|
30
31
|
from sqlalchemy.dialects import postgresql
|
|
31
32
|
from sqlalchemy.engine import Engine, create_engine
|
|
32
33
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
33
34
|
from sqlalchemy.schema import Column, MetaData, Table
|
|
34
|
-
from sqlalchemy.sql.expression import
|
|
35
|
+
from sqlalchemy.sql.expression import text
|
|
35
36
|
except ImportError:
|
|
36
37
|
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
37
38
|
|
|
@@ -48,6 +49,7 @@ class PostgresDb(BaseDb):
|
|
|
48
49
|
metrics_table: Optional[str] = None,
|
|
49
50
|
eval_table: Optional[str] = None,
|
|
50
51
|
knowledge_table: Optional[str] = None,
|
|
52
|
+
versions_table: Optional[str] = None,
|
|
51
53
|
id: Optional[str] = None,
|
|
52
54
|
):
|
|
53
55
|
"""
|
|
@@ -68,6 +70,7 @@ class PostgresDb(BaseDb):
|
|
|
68
70
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
69
71
|
knowledge_table (Optional[str]): Name of the table to store knowledge content.
|
|
70
72
|
culture_table (Optional[str]): Name of the table to store cultural knowledge.
|
|
73
|
+
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
71
74
|
id (Optional[str]): ID of the database.
|
|
72
75
|
|
|
73
76
|
Raises:
|
|
@@ -97,6 +100,7 @@ class PostgresDb(BaseDb):
|
|
|
97
100
|
eval_table=eval_table,
|
|
98
101
|
knowledge_table=knowledge_table,
|
|
99
102
|
culture_table=culture_table,
|
|
103
|
+
versions_table=versions_table,
|
|
100
104
|
)
|
|
101
105
|
|
|
102
106
|
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
@@ -126,9 +130,15 @@ class PostgresDb(BaseDb):
|
|
|
126
130
|
(self.metrics_table_name, "metrics"),
|
|
127
131
|
(self.eval_table_name, "evals"),
|
|
128
132
|
(self.knowledge_table_name, "knowledge"),
|
|
133
|
+
(self.versions_table_name, "versions"),
|
|
129
134
|
]
|
|
130
135
|
|
|
131
136
|
for table_name, table_type in tables_to_create:
|
|
137
|
+
if table_name != self.versions_table_name:
|
|
138
|
+
# Also store the schema version for the created table
|
|
139
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
140
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
141
|
+
|
|
132
142
|
self._create_table(table_name=table_name, table_type=table_type, db_schema=self.db_schema)
|
|
133
143
|
|
|
134
144
|
def _create_table(self, table_name: str, table_type: str, db_schema: str) -> Table:
|
|
@@ -271,6 +281,15 @@ class PostgresDb(BaseDb):
|
|
|
271
281
|
)
|
|
272
282
|
return self.culture_table
|
|
273
283
|
|
|
284
|
+
if table_type == "versions":
|
|
285
|
+
self.versions_table = self._get_or_create_table(
|
|
286
|
+
table_name=self.versions_table_name,
|
|
287
|
+
table_type="versions",
|
|
288
|
+
db_schema=self.db_schema,
|
|
289
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
290
|
+
)
|
|
291
|
+
return self.versions_table
|
|
292
|
+
|
|
274
293
|
raise ValueError(f"Unknown table type: {table_type}")
|
|
275
294
|
|
|
276
295
|
def _get_or_create_table(
|
|
@@ -295,6 +314,11 @@ class PostgresDb(BaseDb):
|
|
|
295
314
|
if not create_table_if_not_found:
|
|
296
315
|
return None
|
|
297
316
|
|
|
317
|
+
if table_name != self.versions_table_name:
|
|
318
|
+
# Also store the schema version for the created table
|
|
319
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
320
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
321
|
+
|
|
298
322
|
return self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
|
|
299
323
|
|
|
300
324
|
if not is_valid_table(
|
|
@@ -313,8 +337,43 @@ class PostgresDb(BaseDb):
|
|
|
313
337
|
log_error(f"Error loading existing table {db_schema}.{table_name}: {e}")
|
|
314
338
|
raise
|
|
315
339
|
|
|
316
|
-
|
|
340
|
+
def get_latest_schema_version(self, table_name: str):
|
|
341
|
+
"""Get the latest version of the database schema."""
|
|
342
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
343
|
+
if table is None:
|
|
344
|
+
return "2.0.0"
|
|
345
|
+
with self.Session() as sess:
|
|
346
|
+
stmt = select(table)
|
|
347
|
+
# Latest version for the given table
|
|
348
|
+
stmt = stmt.where(table.c.table_name == table_name)
|
|
349
|
+
stmt = stmt.order_by(table.c.version.desc()).limit(1)
|
|
350
|
+
result = sess.execute(stmt).fetchone()
|
|
351
|
+
if result is None:
|
|
352
|
+
return "2.0.0"
|
|
353
|
+
version_dict = dict(result._mapping)
|
|
354
|
+
return version_dict.get("version") or "2.0.0"
|
|
355
|
+
|
|
356
|
+
def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
357
|
+
"""Upsert the schema version into the database."""
|
|
358
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
359
|
+
if table is None:
|
|
360
|
+
return
|
|
361
|
+
current_datetime = datetime.now().isoformat()
|
|
362
|
+
with self.Session() as sess, sess.begin():
|
|
363
|
+
stmt = postgresql.insert(table).values(
|
|
364
|
+
table_name=table_name,
|
|
365
|
+
version=version,
|
|
366
|
+
created_at=current_datetime, # Store as ISO format string
|
|
367
|
+
updated_at=current_datetime,
|
|
368
|
+
)
|
|
369
|
+
# Update version if table_name already exists
|
|
370
|
+
stmt = stmt.on_conflict_do_update(
|
|
371
|
+
index_elements=["table_name"],
|
|
372
|
+
set_=dict(version=version, updated_at=current_datetime),
|
|
373
|
+
)
|
|
374
|
+
sess.execute(stmt)
|
|
317
375
|
|
|
376
|
+
# -- Session methods --
|
|
318
377
|
def delete_session(self, session_id: str) -> bool:
|
|
319
378
|
"""
|
|
320
379
|
Delete a session from the database.
|
|
@@ -1241,6 +1300,8 @@ class PostgresDb(BaseDb):
|
|
|
1241
1300
|
if memory.memory_id is None:
|
|
1242
1301
|
memory.memory_id = str(uuid4())
|
|
1243
1302
|
|
|
1303
|
+
current_time = int(time.time())
|
|
1304
|
+
|
|
1244
1305
|
stmt = postgresql.insert(table).values(
|
|
1245
1306
|
memory_id=memory.memory_id,
|
|
1246
1307
|
memory=memory.memory,
|
|
@@ -1249,7 +1310,9 @@ class PostgresDb(BaseDb):
|
|
|
1249
1310
|
agent_id=memory.agent_id,
|
|
1250
1311
|
team_id=memory.team_id,
|
|
1251
1312
|
topics=memory.topics,
|
|
1252
|
-
|
|
1313
|
+
feedback=memory.feedback,
|
|
1314
|
+
created_at=memory.created_at,
|
|
1315
|
+
updated_at=memory.created_at,
|
|
1253
1316
|
)
|
|
1254
1317
|
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
1255
1318
|
index_elements=["memory_id"],
|
|
@@ -1259,7 +1322,10 @@ class PostgresDb(BaseDb):
|
|
|
1259
1322
|
input=memory.input,
|
|
1260
1323
|
agent_id=memory.agent_id,
|
|
1261
1324
|
team_id=memory.team_id,
|
|
1262
|
-
|
|
1325
|
+
feedback=memory.feedback,
|
|
1326
|
+
updated_at=current_time,
|
|
1327
|
+
# Preserve created_at on update - don't overwrite existing value
|
|
1328
|
+
created_at=table.c.created_at,
|
|
1263
1329
|
),
|
|
1264
1330
|
).returning(table)
|
|
1265
1331
|
|
|
@@ -1313,6 +1379,7 @@ class PostgresDb(BaseDb):
|
|
|
1313
1379
|
|
|
1314
1380
|
# Use preserved updated_at if flag is set (even if None), otherwise use current time
|
|
1315
1381
|
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1382
|
+
|
|
1316
1383
|
memory_records.append(
|
|
1317
1384
|
{
|
|
1318
1385
|
"memory_id": memory.memory_id,
|
|
@@ -1322,6 +1389,8 @@ class PostgresDb(BaseDb):
|
|
|
1322
1389
|
"agent_id": memory.agent_id,
|
|
1323
1390
|
"team_id": memory.team_id,
|
|
1324
1391
|
"topics": memory.topics,
|
|
1392
|
+
"feedback": memory.feedback,
|
|
1393
|
+
"created_at": memory.created_at,
|
|
1325
1394
|
"updated_at": updated_at,
|
|
1326
1395
|
}
|
|
1327
1396
|
)
|
|
@@ -1333,7 +1402,7 @@ class PostgresDb(BaseDb):
|
|
|
1333
1402
|
update_columns = {
|
|
1334
1403
|
col.name: insert_stmt.excluded[col.name]
|
|
1335
1404
|
for col in table.columns
|
|
1336
|
-
if col.name not in ["memory_id"] # Don't update primary key
|
|
1405
|
+
if col.name not in ["memory_id", "created_at"] # Don't update primary key or created_at
|
|
1337
1406
|
}
|
|
1338
1407
|
stmt = insert_stmt.on_conflict_do_update(index_elements=["memory_id"], set_=update_columns).returning(
|
|
1339
1408
|
table
|
agno/db/postgres/schemas.py
CHANGED
|
@@ -4,7 +4,7 @@ from typing import Any
|
|
|
4
4
|
|
|
5
5
|
try:
|
|
6
6
|
from sqlalchemy.dialects.postgresql import JSONB
|
|
7
|
-
from sqlalchemy.types import
|
|
7
|
+
from sqlalchemy.types import BigInteger, Boolean, Date, String, Text
|
|
8
8
|
except ImportError:
|
|
9
9
|
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
10
10
|
|
|
@@ -15,13 +15,13 @@ SESSION_TABLE_SCHEMA = {
|
|
|
15
15
|
"team_id": {"type": String, "nullable": True},
|
|
16
16
|
"workflow_id": {"type": String, "nullable": True},
|
|
17
17
|
"user_id": {"type": String, "nullable": True},
|
|
18
|
-
"session_data": {"type":
|
|
19
|
-
"agent_data": {"type":
|
|
20
|
-
"team_data": {"type":
|
|
21
|
-
"workflow_data": {"type":
|
|
22
|
-
"metadata": {"type":
|
|
23
|
-
"runs": {"type":
|
|
24
|
-
"summary": {"type":
|
|
18
|
+
"session_data": {"type": JSONB, "nullable": True},
|
|
19
|
+
"agent_data": {"type": JSONB, "nullable": True},
|
|
20
|
+
"team_data": {"type": JSONB, "nullable": True},
|
|
21
|
+
"workflow_data": {"type": JSONB, "nullable": True},
|
|
22
|
+
"metadata": {"type": JSONB, "nullable": True},
|
|
23
|
+
"runs": {"type": JSONB, "nullable": True},
|
|
24
|
+
"summary": {"type": JSONB, "nullable": True},
|
|
25
25
|
"created_at": {"type": BigInteger, "nullable": False, "index": True},
|
|
26
26
|
"updated_at": {"type": BigInteger, "nullable": True},
|
|
27
27
|
"_unique_constraints": [
|
|
@@ -34,20 +34,22 @@ SESSION_TABLE_SCHEMA = {
|
|
|
34
34
|
|
|
35
35
|
MEMORY_TABLE_SCHEMA = {
|
|
36
36
|
"memory_id": {"type": String, "primary_key": True, "nullable": False},
|
|
37
|
-
"memory": {"type":
|
|
38
|
-
"
|
|
37
|
+
"memory": {"type": JSONB, "nullable": False},
|
|
38
|
+
"feedback": {"type": Text, "nullable": True},
|
|
39
|
+
"input": {"type": Text, "nullable": True},
|
|
39
40
|
"agent_id": {"type": String, "nullable": True},
|
|
40
41
|
"team_id": {"type": String, "nullable": True},
|
|
41
42
|
"user_id": {"type": String, "nullable": True, "index": True},
|
|
42
|
-
"topics": {"type":
|
|
43
|
+
"topics": {"type": JSONB, "nullable": True},
|
|
44
|
+
"created_at": {"type": BigInteger, "nullable": False, "index": True},
|
|
43
45
|
"updated_at": {"type": BigInteger, "nullable": True, "index": True},
|
|
44
46
|
}
|
|
45
47
|
|
|
46
48
|
EVAL_TABLE_SCHEMA = {
|
|
47
49
|
"run_id": {"type": String, "primary_key": True, "nullable": False},
|
|
48
50
|
"eval_type": {"type": String, "nullable": False},
|
|
49
|
-
"eval_data": {"type":
|
|
50
|
-
"eval_input": {"type":
|
|
51
|
+
"eval_data": {"type": JSONB, "nullable": False},
|
|
52
|
+
"eval_input": {"type": JSONB, "nullable": False},
|
|
51
53
|
"name": {"type": String, "nullable": True},
|
|
52
54
|
"agent_id": {"type": String, "nullable": True},
|
|
53
55
|
"team_id": {"type": String, "nullable": True},
|
|
@@ -62,14 +64,14 @@ EVAL_TABLE_SCHEMA = {
|
|
|
62
64
|
KNOWLEDGE_TABLE_SCHEMA = {
|
|
63
65
|
"id": {"type": String, "primary_key": True, "nullable": False},
|
|
64
66
|
"name": {"type": String, "nullable": False},
|
|
65
|
-
"description": {"type":
|
|
66
|
-
"metadata": {"type":
|
|
67
|
+
"description": {"type": Text, "nullable": False},
|
|
68
|
+
"metadata": {"type": JSONB, "nullable": True},
|
|
67
69
|
"type": {"type": String, "nullable": True},
|
|
68
70
|
"size": {"type": BigInteger, "nullable": True},
|
|
69
71
|
"linked_to": {"type": String, "nullable": True},
|
|
70
72
|
"access_count": {"type": BigInteger, "nullable": True},
|
|
71
73
|
"status": {"type": String, "nullable": True},
|
|
72
|
-
"status_message": {"type":
|
|
74
|
+
"status_message": {"type": Text, "nullable": True},
|
|
73
75
|
"created_at": {"type": BigInteger, "nullable": True},
|
|
74
76
|
"updated_at": {"type": BigInteger, "nullable": True},
|
|
75
77
|
"external_id": {"type": String, "nullable": True},
|
|
@@ -84,8 +86,8 @@ METRICS_TABLE_SCHEMA = {
|
|
|
84
86
|
"team_sessions_count": {"type": BigInteger, "nullable": False, "default": 0},
|
|
85
87
|
"workflow_sessions_count": {"type": BigInteger, "nullable": False, "default": 0},
|
|
86
88
|
"users_count": {"type": BigInteger, "nullable": False, "default": 0},
|
|
87
|
-
"token_metrics": {"type":
|
|
88
|
-
"model_metrics": {"type":
|
|
89
|
+
"token_metrics": {"type": JSONB, "nullable": False, "default": {}},
|
|
90
|
+
"model_metrics": {"type": JSONB, "nullable": False, "default": {}},
|
|
89
91
|
"date": {"type": Date, "nullable": False, "index": True},
|
|
90
92
|
"aggregation_period": {"type": String, "nullable": False},
|
|
91
93
|
"created_at": {"type": BigInteger, "nullable": False},
|
|
@@ -102,16 +104,23 @@ METRICS_TABLE_SCHEMA = {
|
|
|
102
104
|
CULTURAL_KNOWLEDGE_TABLE_SCHEMA = {
|
|
103
105
|
"id": {"type": String, "primary_key": True, "nullable": False},
|
|
104
106
|
"name": {"type": String, "nullable": False, "index": True},
|
|
105
|
-
"summary": {"type":
|
|
107
|
+
"summary": {"type": Text, "nullable": True},
|
|
106
108
|
"content": {"type": JSONB, "nullable": True},
|
|
107
109
|
"metadata": {"type": JSONB, "nullable": True},
|
|
108
|
-
"input": {"type":
|
|
110
|
+
"input": {"type": Text, "nullable": True},
|
|
109
111
|
"created_at": {"type": BigInteger, "nullable": True},
|
|
110
112
|
"updated_at": {"type": BigInteger, "nullable": True},
|
|
111
113
|
"agent_id": {"type": String, "nullable": True},
|
|
112
114
|
"team_id": {"type": String, "nullable": True},
|
|
113
115
|
}
|
|
114
116
|
|
|
117
|
+
VERSIONS_TABLE_SCHEMA = {
|
|
118
|
+
"table_name": {"type": String, "nullable": False, "primary_key": True},
|
|
119
|
+
"version": {"type": String, "nullable": False},
|
|
120
|
+
"created_at": {"type": String, "nullable": False, "index": True},
|
|
121
|
+
"updated_at": {"type": String, "nullable": True},
|
|
122
|
+
}
|
|
123
|
+
|
|
115
124
|
|
|
116
125
|
def get_table_schema_definition(table_type: str) -> dict[str, Any]:
|
|
117
126
|
"""
|
|
@@ -130,6 +139,7 @@ def get_table_schema_definition(table_type: str) -> dict[str, Any]:
|
|
|
130
139
|
"memories": MEMORY_TABLE_SCHEMA,
|
|
131
140
|
"knowledge": KNOWLEDGE_TABLE_SCHEMA,
|
|
132
141
|
"culture": CULTURAL_KNOWLEDGE_TABLE_SCHEMA,
|
|
142
|
+
"versions": VERSIONS_TABLE_SCHEMA,
|
|
133
143
|
}
|
|
134
144
|
|
|
135
145
|
schema = schemas.get(table_type, {})
|
agno/db/redis/redis.py
CHANGED
|
@@ -29,7 +29,7 @@ from agno.utils.log import log_debug, log_error, log_info
|
|
|
29
29
|
from agno.utils.string import generate_id
|
|
30
30
|
|
|
31
31
|
try:
|
|
32
|
-
from redis import Redis
|
|
32
|
+
from redis import Redis, RedisCluster
|
|
33
33
|
except ImportError:
|
|
34
34
|
raise ImportError("`redis` not installed. Please install it using `pip install redis`")
|
|
35
35
|
|
|
@@ -38,7 +38,7 @@ class RedisDb(BaseDb):
|
|
|
38
38
|
def __init__(
|
|
39
39
|
self,
|
|
40
40
|
id: Optional[str] = None,
|
|
41
|
-
redis_client: Optional[Redis] = None,
|
|
41
|
+
redis_client: Optional[Union[Redis, RedisCluster]] = None,
|
|
42
42
|
db_url: Optional[str] = None,
|
|
43
43
|
db_prefix: str = "agno",
|
|
44
44
|
expire: Optional[int] = None,
|
|
@@ -57,6 +57,8 @@ class RedisDb(BaseDb):
|
|
|
57
57
|
2. Use the db_url
|
|
58
58
|
3. Raise an error if neither is provided
|
|
59
59
|
|
|
60
|
+
db_url only supports single-node Redis connections, if you need Redis Cluster support, provide a redis_client.
|
|
61
|
+
|
|
60
62
|
Args:
|
|
61
63
|
id (Optional[str]): The ID of the database.
|
|
62
64
|
redis_client (Optional[Redis]): Redis client instance to use. If not provided a new client will be created.
|
|
@@ -252,6 +254,14 @@ class RedisDb(BaseDb):
|
|
|
252
254
|
log_error(f"Error getting all records for {table_type}: {e}")
|
|
253
255
|
return []
|
|
254
256
|
|
|
257
|
+
def get_latest_schema_version(self):
|
|
258
|
+
"""Get the latest version of the database schema."""
|
|
259
|
+
pass
|
|
260
|
+
|
|
261
|
+
def upsert_schema_version(self, version: str) -> None:
|
|
262
|
+
"""Upsert the schema version into the database."""
|
|
263
|
+
pass
|
|
264
|
+
|
|
255
265
|
# -- Session methods --
|
|
256
266
|
|
|
257
267
|
def delete_session(self, session_id: str) -> bool:
|
|
@@ -903,6 +913,9 @@ class RedisDb(BaseDb):
|
|
|
903
913
|
"memory_id": memory.memory_id,
|
|
904
914
|
"memory": memory.memory,
|
|
905
915
|
"topics": memory.topics,
|
|
916
|
+
"input": memory.input,
|
|
917
|
+
"feedback": memory.feedback,
|
|
918
|
+
"created_at": memory.created_at,
|
|
906
919
|
"updated_at": int(time.time()),
|
|
907
920
|
}
|
|
908
921
|
|
agno/db/redis/schemas.py
CHANGED
|
@@ -27,6 +27,9 @@ USER_MEMORY_SCHEMA = {
|
|
|
27
27
|
"team_id": {"type": "string"},
|
|
28
28
|
"user_id": {"type": "string"},
|
|
29
29
|
"topics": {"type": "json"},
|
|
30
|
+
"input": {"type": "string"},
|
|
31
|
+
"feedback": {"type": "string"},
|
|
32
|
+
"created_at": {"type": "integer"},
|
|
30
33
|
"updated_at": {"type": "integer"},
|
|
31
34
|
}
|
|
32
35
|
|
|
@@ -114,6 +117,7 @@ def get_table_schema_definition(table_type: str) -> dict[str, Any]:
|
|
|
114
117
|
"metrics": METRICS_SCHEMA,
|
|
115
118
|
"evals": EVAL_SCHEMA,
|
|
116
119
|
"knowledge": KNOWLEDGE_SCHEMA,
|
|
120
|
+
"culture": CULTURAL_KNOWLEDGE_SCHEMA,
|
|
117
121
|
}
|
|
118
122
|
|
|
119
123
|
schema = schemas.get(table_type, {})
|
agno/db/schemas/memory.py
CHANGED
|
@@ -12,17 +12,24 @@ class UserMemory:
|
|
|
12
12
|
topics: Optional[List[str]] = None
|
|
13
13
|
user_id: Optional[str] = None
|
|
14
14
|
input: Optional[str] = None
|
|
15
|
+
created_at: Optional[datetime] = None
|
|
15
16
|
updated_at: Optional[datetime] = None
|
|
16
17
|
feedback: Optional[str] = None
|
|
17
18
|
|
|
18
19
|
agent_id: Optional[str] = None
|
|
19
20
|
team_id: Optional[str] = None
|
|
20
21
|
|
|
22
|
+
def __post_init__(self) -> None:
|
|
23
|
+
"""Automatically set created_at if not provided."""
|
|
24
|
+
if self.created_at is None:
|
|
25
|
+
self.created_at = datetime.now(timezone.utc)
|
|
26
|
+
|
|
21
27
|
def to_dict(self) -> Dict[str, Any]:
|
|
22
28
|
_dict = {
|
|
23
29
|
"memory_id": self.memory_id,
|
|
24
30
|
"memory": self.memory,
|
|
25
31
|
"topics": self.topics,
|
|
32
|
+
"created_at": self.created_at.isoformat() if self.created_at else None,
|
|
26
33
|
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
|
27
34
|
"input": self.input,
|
|
28
35
|
"user_id": self.user_id,
|
|
@@ -36,6 +43,12 @@ class UserMemory:
|
|
|
36
43
|
def from_dict(cls, data: Dict[str, Any]) -> "UserMemory":
|
|
37
44
|
data = dict(data)
|
|
38
45
|
|
|
46
|
+
if created_at := data.get("created_at"):
|
|
47
|
+
if isinstance(created_at, (int, float)):
|
|
48
|
+
data["created_at"] = datetime.fromtimestamp(created_at, tz=timezone.utc)
|
|
49
|
+
else:
|
|
50
|
+
data["created_at"] = datetime.fromisoformat(created_at)
|
|
51
|
+
|
|
39
52
|
# Convert updated_at to datetime
|
|
40
53
|
if updated_at := data.get("updated_at"):
|
|
41
54
|
if isinstance(updated_at, (int, float)):
|
agno/db/singlestore/schemas.py
CHANGED
|
@@ -39,6 +39,8 @@ USER_MEMORY_TABLE_SCHEMA = {
|
|
|
39
39
|
"team_id": {"type": lambda: String(128), "nullable": True},
|
|
40
40
|
"user_id": {"type": lambda: String(128), "nullable": True, "index": True},
|
|
41
41
|
"topics": {"type": JSON, "nullable": True},
|
|
42
|
+
"feedback": {"type": Text, "nullable": True},
|
|
43
|
+
"created_at": {"type": BigInteger, "nullable": False, "index": True},
|
|
42
44
|
"updated_at": {"type": BigInteger, "nullable": True, "index": True},
|
|
43
45
|
}
|
|
44
46
|
|
|
@@ -106,6 +108,14 @@ CULTURAL_KNOWLEDGE_TABLE_SCHEMA = {
|
|
|
106
108
|
}
|
|
107
109
|
|
|
108
110
|
|
|
111
|
+
VERSIONS_TABLE_SCHEMA = {
|
|
112
|
+
"table_name": {"type": lambda: String(128), "nullable": False, "primary_key": True},
|
|
113
|
+
"version": {"type": lambda: String(10), "nullable": False},
|
|
114
|
+
"created_at": {"type": lambda: String(128), "nullable": False, "index": True},
|
|
115
|
+
"updated_at": {"type": lambda: String(128), "nullable": True},
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
|
|
109
119
|
def get_table_schema_definition(table_type: str) -> dict[str, Any]:
|
|
110
120
|
"""
|
|
111
121
|
Get the expected schema definition for the given table.
|
|
@@ -121,6 +131,7 @@ def get_table_schema_definition(table_type: str) -> dict[str, Any]:
|
|
|
121
131
|
"memories": USER_MEMORY_TABLE_SCHEMA,
|
|
122
132
|
"knowledge": KNOWLEDGE_TABLE_SCHEMA,
|
|
123
133
|
"culture": CULTURAL_KNOWLEDGE_TABLE_SCHEMA,
|
|
134
|
+
"versions": VERSIONS_TABLE_SCHEMA,
|
|
124
135
|
}
|
|
125
136
|
schema = schemas.get(table_type, {})
|
|
126
137
|
|
|
@@ -5,6 +5,7 @@ from typing import Any, Dict, List, Optional, Tuple, Union
|
|
|
5
5
|
from uuid import uuid4
|
|
6
6
|
|
|
7
7
|
from agno.db.base import BaseDb, SessionType
|
|
8
|
+
from agno.db.migrations.manager import MigrationManager
|
|
8
9
|
from agno.db.schemas.culture import CulturalKnowledge
|
|
9
10
|
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
10
11
|
from agno.db.schemas.knowledge import KnowledgeRow
|
|
@@ -27,12 +28,12 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
|
27
28
|
from agno.utils.string import generate_id
|
|
28
29
|
|
|
29
30
|
try:
|
|
30
|
-
from sqlalchemy import Index, UniqueConstraint, and_, func, update
|
|
31
|
+
from sqlalchemy import Index, UniqueConstraint, and_, func, select, update
|
|
31
32
|
from sqlalchemy.dialects import mysql
|
|
32
33
|
from sqlalchemy.engine import Engine, create_engine
|
|
33
34
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
34
35
|
from sqlalchemy.schema import Column, MetaData, Table
|
|
35
|
-
from sqlalchemy.sql.expression import
|
|
36
|
+
from sqlalchemy.sql.expression import text
|
|
36
37
|
except ImportError:
|
|
37
38
|
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
38
39
|
|
|
@@ -50,6 +51,7 @@ class SingleStoreDb(BaseDb):
|
|
|
50
51
|
metrics_table: Optional[str] = None,
|
|
51
52
|
eval_table: Optional[str] = None,
|
|
52
53
|
knowledge_table: Optional[str] = None,
|
|
54
|
+
versions_table: Optional[str] = None,
|
|
53
55
|
):
|
|
54
56
|
"""
|
|
55
57
|
Interface for interacting with a SingleStore database.
|
|
@@ -70,7 +72,7 @@ class SingleStoreDb(BaseDb):
|
|
|
70
72
|
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
71
73
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
72
74
|
knowledge_table (Optional[str]): Name of the table to store knowledge content.
|
|
73
|
-
|
|
75
|
+
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
74
76
|
Raises:
|
|
75
77
|
ValueError: If neither db_url nor db_engine is provided.
|
|
76
78
|
ValueError: If none of the tables are provided.
|
|
@@ -89,6 +91,7 @@ class SingleStoreDb(BaseDb):
|
|
|
89
91
|
metrics_table=metrics_table,
|
|
90
92
|
eval_table=eval_table,
|
|
91
93
|
knowledge_table=knowledge_table,
|
|
94
|
+
versions_table=versions_table,
|
|
92
95
|
)
|
|
93
96
|
|
|
94
97
|
_engine: Optional[Engine] = db_engine
|
|
@@ -176,9 +179,15 @@ class SingleStoreDb(BaseDb):
|
|
|
176
179
|
(self.metrics_table_name, "metrics"),
|
|
177
180
|
(self.eval_table_name, "evals"),
|
|
178
181
|
(self.knowledge_table_name, "knowledge"),
|
|
182
|
+
(self.versions_table_name, "versions"),
|
|
179
183
|
]
|
|
180
184
|
|
|
181
185
|
for table_name, table_type in tables_to_create:
|
|
186
|
+
if table_name != self.versions_table_name:
|
|
187
|
+
# Also store the schema version for the created table
|
|
188
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
189
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
190
|
+
|
|
182
191
|
self._create_table(table_name=table_name, table_type=table_type, db_schema=self.db_schema)
|
|
183
192
|
|
|
184
193
|
def _create_table(self, table_name: str, table_type: str, db_schema: Optional[str]) -> Table:
|
|
@@ -355,8 +364,53 @@ class SingleStoreDb(BaseDb):
|
|
|
355
364
|
)
|
|
356
365
|
return self.culture_table
|
|
357
366
|
|
|
367
|
+
if table_type == "versions":
|
|
368
|
+
self.versions_table = self._get_or_create_table(
|
|
369
|
+
table_name=self.versions_table_name,
|
|
370
|
+
table_type="versions",
|
|
371
|
+
db_schema=self.db_schema,
|
|
372
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
373
|
+
)
|
|
374
|
+
return self.versions_table
|
|
375
|
+
|
|
358
376
|
raise ValueError(f"Unknown table type: {table_type}")
|
|
359
377
|
|
|
378
|
+
def get_latest_schema_version(self, table_name: str) -> str:
|
|
379
|
+
"""Get the latest version of the database schema."""
|
|
380
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
381
|
+
if table is None:
|
|
382
|
+
return "2.0.0"
|
|
383
|
+
with self.Session() as sess:
|
|
384
|
+
stmt = select(table)
|
|
385
|
+
# Latest version for the given table
|
|
386
|
+
stmt = stmt.where(table.c.table_name == table_name)
|
|
387
|
+
stmt = stmt.order_by(table.c.version.desc()).limit(1)
|
|
388
|
+
result = sess.execute(stmt).fetchone()
|
|
389
|
+
if result is None:
|
|
390
|
+
return "2.0.0"
|
|
391
|
+
version_dict = dict(result._mapping)
|
|
392
|
+
return version_dict.get("version") or "2.0.0"
|
|
393
|
+
|
|
394
|
+
def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
395
|
+
"""Upsert the schema version into the database."""
|
|
396
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
397
|
+
if table is None:
|
|
398
|
+
return
|
|
399
|
+
current_datetime = datetime.now().isoformat()
|
|
400
|
+
with self.Session() as sess, sess.begin():
|
|
401
|
+
stmt = mysql.insert(table).values(
|
|
402
|
+
table_name=table_name,
|
|
403
|
+
version=version,
|
|
404
|
+
created_at=current_datetime, # Store as ISO format string
|
|
405
|
+
updated_at=current_datetime,
|
|
406
|
+
)
|
|
407
|
+
# Update version if table_name already exists
|
|
408
|
+
stmt = stmt.on_duplicate_key_update(
|
|
409
|
+
version=version,
|
|
410
|
+
updated_at=current_datetime,
|
|
411
|
+
)
|
|
412
|
+
sess.execute(stmt)
|
|
413
|
+
|
|
360
414
|
def _get_or_create_table(
|
|
361
415
|
self,
|
|
362
416
|
table_name: str,
|
|
@@ -382,6 +436,12 @@ class SingleStoreDb(BaseDb):
|
|
|
382
436
|
if not table_is_available:
|
|
383
437
|
if not create_table_if_not_found:
|
|
384
438
|
return None
|
|
439
|
+
|
|
440
|
+
# Also store the schema version for the created table
|
|
441
|
+
if table_name != self.versions_table_name:
|
|
442
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
443
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
444
|
+
|
|
385
445
|
return self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
|
|
386
446
|
|
|
387
447
|
if not is_valid_table(
|
|
@@ -1339,6 +1399,8 @@ class SingleStoreDb(BaseDb):
|
|
|
1339
1399
|
if memory.memory_id is None:
|
|
1340
1400
|
memory.memory_id = str(uuid4())
|
|
1341
1401
|
|
|
1402
|
+
current_time = int(time.time())
|
|
1403
|
+
|
|
1342
1404
|
stmt = mysql.insert(table).values(
|
|
1343
1405
|
memory_id=memory.memory_id,
|
|
1344
1406
|
memory=memory.memory,
|
|
@@ -1347,7 +1409,9 @@ class SingleStoreDb(BaseDb):
|
|
|
1347
1409
|
agent_id=memory.agent_id,
|
|
1348
1410
|
team_id=memory.team_id,
|
|
1349
1411
|
topics=memory.topics,
|
|
1350
|
-
|
|
1412
|
+
feedback=memory.feedback,
|
|
1413
|
+
created_at=memory.created_at,
|
|
1414
|
+
updated_at=current_time,
|
|
1351
1415
|
)
|
|
1352
1416
|
stmt = stmt.on_duplicate_key_update(
|
|
1353
1417
|
memory=stmt.inserted.memory,
|
|
@@ -1356,7 +1420,10 @@ class SingleStoreDb(BaseDb):
|
|
|
1356
1420
|
user_id=stmt.inserted.user_id,
|
|
1357
1421
|
agent_id=stmt.inserted.agent_id,
|
|
1358
1422
|
team_id=stmt.inserted.team_id,
|
|
1359
|
-
|
|
1423
|
+
feedback=stmt.inserted.feedback,
|
|
1424
|
+
updated_at=stmt.inserted.updated_at,
|
|
1425
|
+
# Preserve created_at on update - don't overwrite existing value
|
|
1426
|
+
created_at=table.c.created_at,
|
|
1360
1427
|
)
|
|
1361
1428
|
|
|
1362
1429
|
sess.execute(stmt)
|
|
@@ -1404,11 +1471,13 @@ class SingleStoreDb(BaseDb):
|
|
|
1404
1471
|
# Prepare data for bulk insert
|
|
1405
1472
|
memory_data = []
|
|
1406
1473
|
current_time = int(time.time())
|
|
1474
|
+
|
|
1407
1475
|
for memory in memories:
|
|
1408
1476
|
if memory.memory_id is None:
|
|
1409
1477
|
memory.memory_id = str(uuid4())
|
|
1410
1478
|
# Use preserved updated_at if flag is set, otherwise use current time
|
|
1411
1479
|
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1480
|
+
|
|
1412
1481
|
memory_data.append(
|
|
1413
1482
|
{
|
|
1414
1483
|
"memory_id": memory.memory_id,
|
|
@@ -1418,6 +1487,8 @@ class SingleStoreDb(BaseDb):
|
|
|
1418
1487
|
"agent_id": memory.agent_id,
|
|
1419
1488
|
"team_id": memory.team_id,
|
|
1420
1489
|
"topics": memory.topics,
|
|
1490
|
+
"feedback": memory.feedback,
|
|
1491
|
+
"created_at": memory.created_at,
|
|
1421
1492
|
"updated_at": updated_at,
|
|
1422
1493
|
}
|
|
1423
1494
|
)
|
|
@@ -1434,7 +1505,10 @@ class SingleStoreDb(BaseDb):
|
|
|
1434
1505
|
user_id=stmt.inserted.user_id,
|
|
1435
1506
|
agent_id=stmt.inserted.agent_id,
|
|
1436
1507
|
team_id=stmt.inserted.team_id,
|
|
1508
|
+
feedback=stmt.inserted.feedback,
|
|
1437
1509
|
updated_at=stmt.inserted.updated_at,
|
|
1510
|
+
# Preserve created_at on update
|
|
1511
|
+
created_at=table.c.created_at,
|
|
1438
1512
|
)
|
|
1439
1513
|
sess.execute(stmt, memory_data)
|
|
1440
1514
|
|