agno 2.3.2__py3-none-any.whl → 2.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +513 -185
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +176 -0
- agno/db/dynamo/dynamo.py +11 -0
- agno/db/firestore/firestore.py +5 -1
- agno/db/gcs_json/gcs_json_db.py +5 -2
- agno/db/in_memory/in_memory_db.py +5 -2
- agno/db/json/json_db.py +5 -1
- agno/db/migrations/manager.py +4 -4
- agno/db/mongo/async_mongo.py +158 -34
- agno/db/mongo/mongo.py +6 -2
- agno/db/mysql/mysql.py +48 -54
- agno/db/postgres/async_postgres.py +61 -51
- agno/db/postgres/postgres.py +42 -50
- agno/db/redis/redis.py +5 -0
- agno/db/redis/utils.py +5 -5
- agno/db/singlestore/singlestore.py +99 -108
- agno/db/sqlite/async_sqlite.py +29 -27
- agno/db/sqlite/sqlite.py +30 -26
- agno/knowledge/reader/pdf_reader.py +2 -2
- agno/knowledge/reader/tavily_reader.py +0 -1
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +217 -4
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +67 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/anthropic/claude.py +84 -80
- agno/models/aws/bedrock.py +38 -16
- agno/models/aws/claude.py +97 -277
- agno/models/azure/ai_foundry.py +8 -4
- agno/models/base.py +101 -14
- agno/models/cerebras/cerebras.py +18 -7
- agno/models/cerebras/cerebras_openai.py +4 -2
- agno/models/cohere/chat.py +8 -4
- agno/models/google/gemini.py +578 -20
- agno/models/groq/groq.py +18 -5
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/litellm/chat.py +17 -7
- agno/models/message.py +19 -5
- agno/models/meta/llama.py +20 -4
- agno/models/mistral/mistral.py +8 -4
- agno/models/ollama/chat.py +17 -6
- agno/models/openai/chat.py +17 -6
- agno/models/openai/responses.py +23 -9
- agno/models/vertexai/claude.py +99 -5
- agno/os/interfaces/agui/router.py +1 -0
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/router.py +16 -0
- agno/os/routers/memory/memory.py +143 -0
- agno/os/routers/memory/schemas.py +26 -0
- agno/os/schema.py +21 -6
- agno/os/utils.py +134 -10
- agno/run/base.py +2 -1
- agno/run/workflow.py +1 -1
- agno/team/team.py +565 -219
- agno/tools/mcp/mcp.py +1 -1
- agno/utils/agent.py +119 -1
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +12 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/print_response/agent.py +37 -2
- agno/utils/print_response/team.py +52 -0
- agno/utils/tokens.py +41 -0
- agno/workflow/types.py +2 -2
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/METADATA +45 -40
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/RECORD +73 -66
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/WHEEL +0 -0
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/top_level.txt +0 -0
|
@@ -105,10 +105,13 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
105
105
|
self.db_url: Optional[str] = db_url
|
|
106
106
|
self.db_engine: AsyncEngine = _engine
|
|
107
107
|
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
108
|
-
self.metadata: MetaData = MetaData()
|
|
108
|
+
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
109
109
|
|
|
110
110
|
# Initialize database session factory
|
|
111
|
-
self.async_session_factory = async_sessionmaker(
|
|
111
|
+
self.async_session_factory = async_sessionmaker(
|
|
112
|
+
bind=self.db_engine,
|
|
113
|
+
expire_on_commit=False,
|
|
114
|
+
)
|
|
112
115
|
|
|
113
116
|
# -- DB methods --
|
|
114
117
|
async def table_exists(self, table_name: str) -> bool:
|
|
@@ -135,20 +138,15 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
135
138
|
]
|
|
136
139
|
|
|
137
140
|
for table_name, table_type in tables_to_create:
|
|
138
|
-
|
|
139
|
-
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
140
|
-
await self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
141
|
-
|
|
142
|
-
await self._create_table(table_name=table_name, table_type=table_type, db_schema=self.db_schema)
|
|
141
|
+
await self._get_or_create_table(table_name=table_name, table_type=table_type)
|
|
143
142
|
|
|
144
|
-
async def _create_table(self, table_name: str, table_type: str
|
|
143
|
+
async def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
145
144
|
"""
|
|
146
145
|
Create a table with the appropriate schema based on the table type.
|
|
147
146
|
|
|
148
147
|
Args:
|
|
149
148
|
table_name (str): Name of the table to create
|
|
150
149
|
table_type (str): Type of table (used to get schema definition)
|
|
151
|
-
db_schema (str): Database schema name
|
|
152
150
|
|
|
153
151
|
Returns:
|
|
154
152
|
Table: SQLAlchemy Table object
|
|
@@ -177,8 +175,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
177
175
|
columns.append(Column(*column_args, **column_kwargs)) # type: ignore
|
|
178
176
|
|
|
179
177
|
# Create the table object
|
|
180
|
-
|
|
181
|
-
table = Table(table_name, table_metadata, *columns, schema=db_schema)
|
|
178
|
+
table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
|
|
182
179
|
|
|
183
180
|
# Add multi-column unique constraints with table-specific names
|
|
184
181
|
for constraint in schema_unique_constraints:
|
|
@@ -192,11 +189,17 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
192
189
|
table.append_constraint(Index(idx_name, idx_col))
|
|
193
190
|
|
|
194
191
|
async with self.async_session_factory() as sess, sess.begin():
|
|
195
|
-
await acreate_schema(session=sess, db_schema=db_schema)
|
|
192
|
+
await acreate_schema(session=sess, db_schema=self.db_schema)
|
|
196
193
|
|
|
197
194
|
# Create table
|
|
198
|
-
|
|
199
|
-
|
|
195
|
+
table_created = False
|
|
196
|
+
if not await self.table_exists(table_name):
|
|
197
|
+
async with self.db_engine.begin() as conn:
|
|
198
|
+
await conn.run_sync(table.create, checkfirst=True)
|
|
199
|
+
log_debug(f"Successfully created table '{table_name}'")
|
|
200
|
+
table_created = True
|
|
201
|
+
else:
|
|
202
|
+
log_debug(f"Table '{self.db_schema}.{table_name}' already exists, skipping creation")
|
|
200
203
|
|
|
201
204
|
# Create indexes
|
|
202
205
|
for idx in table.indexes:
|
|
@@ -206,122 +209,126 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
206
209
|
exists_query = text(
|
|
207
210
|
"SELECT 1 FROM pg_indexes WHERE schemaname = :schema AND indexname = :index_name"
|
|
208
211
|
)
|
|
209
|
-
result = await sess.execute(exists_query, {"schema": db_schema, "index_name": idx.name})
|
|
212
|
+
result = await sess.execute(exists_query, {"schema": self.db_schema, "index_name": idx.name})
|
|
210
213
|
exists = result.scalar() is not None
|
|
211
214
|
if exists:
|
|
212
|
-
log_debug(
|
|
215
|
+
log_debug(
|
|
216
|
+
f"Index {idx.name} already exists in {self.db_schema}.{table_name}, skipping creation"
|
|
217
|
+
)
|
|
213
218
|
continue
|
|
214
219
|
|
|
215
220
|
async with self.db_engine.begin() as conn:
|
|
216
221
|
await conn.run_sync(idx.create)
|
|
217
|
-
log_debug(f"Created index: {idx.name} for table {db_schema}.{table_name}")
|
|
222
|
+
log_debug(f"Created index: {idx.name} for table {self.db_schema}.{table_name}")
|
|
218
223
|
|
|
219
224
|
except Exception as e:
|
|
220
225
|
log_error(f"Error creating index {idx.name}: {e}")
|
|
221
226
|
|
|
222
|
-
|
|
227
|
+
# Store the schema version for the created table
|
|
228
|
+
if table_name != self.versions_table_name and table_created:
|
|
229
|
+
# Also store the schema version for the created table
|
|
230
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
231
|
+
await self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
232
|
+
log_info(
|
|
233
|
+
f"Successfully stored version {latest_schema_version.public} in database for table {table_name}"
|
|
234
|
+
)
|
|
235
|
+
|
|
223
236
|
return table
|
|
224
237
|
|
|
225
238
|
except Exception as e:
|
|
226
|
-
log_error(f"Could not create table {db_schema}.{table_name}: {e}")
|
|
239
|
+
log_error(f"Could not create table {self.db_schema}.{table_name}: {e}")
|
|
227
240
|
raise
|
|
228
241
|
|
|
229
242
|
async def _get_table(self, table_type: str) -> Table:
|
|
230
243
|
if table_type == "sessions":
|
|
231
244
|
if not hasattr(self, "session_table"):
|
|
232
245
|
self.session_table = await self._get_or_create_table(
|
|
233
|
-
table_name=self.session_table_name, table_type="sessions"
|
|
246
|
+
table_name=self.session_table_name, table_type="sessions"
|
|
234
247
|
)
|
|
235
248
|
return self.session_table
|
|
236
249
|
|
|
237
250
|
if table_type == "memories":
|
|
238
251
|
if not hasattr(self, "memory_table"):
|
|
239
252
|
self.memory_table = await self._get_or_create_table(
|
|
240
|
-
table_name=self.memory_table_name, table_type="memories"
|
|
253
|
+
table_name=self.memory_table_name, table_type="memories"
|
|
241
254
|
)
|
|
242
255
|
return self.memory_table
|
|
243
256
|
|
|
244
257
|
if table_type == "metrics":
|
|
245
258
|
if not hasattr(self, "metrics_table"):
|
|
246
259
|
self.metrics_table = await self._get_or_create_table(
|
|
247
|
-
table_name=self.metrics_table_name, table_type="metrics"
|
|
260
|
+
table_name=self.metrics_table_name, table_type="metrics"
|
|
248
261
|
)
|
|
249
262
|
return self.metrics_table
|
|
250
263
|
|
|
251
264
|
if table_type == "evals":
|
|
252
265
|
if not hasattr(self, "eval_table"):
|
|
253
|
-
self.eval_table = await self._get_or_create_table(
|
|
254
|
-
table_name=self.eval_table_name, table_type="evals", db_schema=self.db_schema
|
|
255
|
-
)
|
|
266
|
+
self.eval_table = await self._get_or_create_table(table_name=self.eval_table_name, table_type="evals")
|
|
256
267
|
return self.eval_table
|
|
257
268
|
|
|
258
269
|
if table_type == "knowledge":
|
|
259
270
|
if not hasattr(self, "knowledge_table"):
|
|
260
271
|
self.knowledge_table = await self._get_or_create_table(
|
|
261
|
-
table_name=self.knowledge_table_name, table_type="knowledge"
|
|
272
|
+
table_name=self.knowledge_table_name, table_type="knowledge"
|
|
262
273
|
)
|
|
263
274
|
return self.knowledge_table
|
|
264
275
|
|
|
265
276
|
if table_type == "culture":
|
|
266
277
|
if not hasattr(self, "culture_table"):
|
|
267
278
|
self.culture_table = await self._get_or_create_table(
|
|
268
|
-
table_name=self.culture_table_name, table_type="culture"
|
|
279
|
+
table_name=self.culture_table_name, table_type="culture"
|
|
269
280
|
)
|
|
270
281
|
return self.culture_table
|
|
271
282
|
|
|
272
283
|
if table_type == "versions":
|
|
273
284
|
if not hasattr(self, "versions_table"):
|
|
274
285
|
self.versions_table = await self._get_or_create_table(
|
|
275
|
-
table_name=self.versions_table_name, table_type="versions"
|
|
286
|
+
table_name=self.versions_table_name, table_type="versions"
|
|
276
287
|
)
|
|
277
288
|
return self.versions_table
|
|
278
289
|
|
|
279
290
|
raise ValueError(f"Unknown table type: {table_type}")
|
|
280
291
|
|
|
281
|
-
async def _get_or_create_table(self, table_name: str, table_type: str
|
|
292
|
+
async def _get_or_create_table(self, table_name: str, table_type: str) -> Table:
|
|
282
293
|
"""
|
|
283
294
|
Check if the table exists and is valid, else create it.
|
|
284
295
|
|
|
285
296
|
Args:
|
|
286
297
|
table_name (str): Name of the table to get or create
|
|
287
298
|
table_type (str): Type of table (used to get schema definition)
|
|
288
|
-
db_schema (str): Database schema name
|
|
289
299
|
|
|
290
300
|
Returns:
|
|
291
301
|
Table: SQLAlchemy Table object representing the schema.
|
|
292
302
|
"""
|
|
293
303
|
|
|
294
304
|
async with self.async_session_factory() as sess, sess.begin():
|
|
295
|
-
table_is_available = await ais_table_available(
|
|
305
|
+
table_is_available = await ais_table_available(
|
|
306
|
+
session=sess, table_name=table_name, db_schema=self.db_schema
|
|
307
|
+
)
|
|
296
308
|
|
|
297
309
|
if not table_is_available:
|
|
298
|
-
|
|
299
|
-
# Also store the schema version for the created table
|
|
300
|
-
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
301
|
-
await self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
302
|
-
|
|
303
|
-
return await self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
|
|
310
|
+
return await self._create_table(table_name=table_name, table_type=table_type)
|
|
304
311
|
|
|
305
312
|
if not await ais_valid_table(
|
|
306
313
|
db_engine=self.db_engine,
|
|
307
314
|
table_name=table_name,
|
|
308
315
|
table_type=table_type,
|
|
309
|
-
db_schema=db_schema,
|
|
316
|
+
db_schema=self.db_schema,
|
|
310
317
|
):
|
|
311
|
-
raise ValueError(f"Table {db_schema}.{table_name} has an invalid schema")
|
|
318
|
+
raise ValueError(f"Table {self.db_schema}.{table_name} has an invalid schema")
|
|
312
319
|
|
|
313
320
|
try:
|
|
314
321
|
async with self.db_engine.connect() as conn:
|
|
315
322
|
|
|
316
323
|
def create_table(connection):
|
|
317
|
-
return Table(table_name, self.metadata, schema=db_schema, autoload_with=connection)
|
|
324
|
+
return Table(table_name, self.metadata, schema=self.db_schema, autoload_with=connection)
|
|
318
325
|
|
|
319
326
|
table = await conn.run_sync(create_table)
|
|
320
327
|
|
|
321
328
|
return table
|
|
322
329
|
|
|
323
330
|
except Exception as e:
|
|
324
|
-
log_error(f"Error loading existing table {db_schema}.{table_name}: {e}")
|
|
331
|
+
log_error(f"Error loading existing table {self.db_schema}.{table_name}: {e}")
|
|
325
332
|
raise
|
|
326
333
|
|
|
327
334
|
async def get_latest_schema_version(self, table_name: str) -> str:
|
|
@@ -1212,13 +1219,14 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1212
1219
|
raise e
|
|
1213
1220
|
|
|
1214
1221
|
async def get_user_memory_stats(
|
|
1215
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
1222
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
1216
1223
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
1217
1224
|
"""Get user memories stats.
|
|
1218
1225
|
|
|
1219
1226
|
Args:
|
|
1220
1227
|
limit (Optional[int]): The maximum number of user stats to return.
|
|
1221
1228
|
page (Optional[int]): The page number.
|
|
1229
|
+
user_id (Optional[str]): User ID for filtering.
|
|
1222
1230
|
|
|
1223
1231
|
Returns:
|
|
1224
1232
|
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
@@ -1239,17 +1247,19 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1239
1247
|
table = await self._get_table(table_type="memories")
|
|
1240
1248
|
|
|
1241
1249
|
async with self.async_session_factory() as sess, sess.begin():
|
|
1242
|
-
stmt = (
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1247
|
-
)
|
|
1248
|
-
.where(table.c.user_id.is_not(None))
|
|
1249
|
-
.group_by(table.c.user_id)
|
|
1250
|
-
.order_by(func.max(table.c.updated_at).desc())
|
|
1250
|
+
stmt = select(
|
|
1251
|
+
table.c.user_id,
|
|
1252
|
+
func.count(table.c.memory_id).label("total_memories"),
|
|
1253
|
+
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1251
1254
|
)
|
|
1252
1255
|
|
|
1256
|
+
if user_id is not None:
|
|
1257
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1258
|
+
else:
|
|
1259
|
+
stmt = stmt.where(table.c.user_id.is_not(None))
|
|
1260
|
+
stmt = stmt.group_by(table.c.user_id)
|
|
1261
|
+
stmt = stmt.order_by(func.max(table.c.updated_at).desc())
|
|
1262
|
+
|
|
1253
1263
|
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1254
1264
|
total_count = await sess.scalar(count_stmt) or 0
|
|
1255
1265
|
|
agno/db/postgres/postgres.py
CHANGED
|
@@ -104,10 +104,10 @@ class PostgresDb(BaseDb):
|
|
|
104
104
|
)
|
|
105
105
|
|
|
106
106
|
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
107
|
-
self.metadata: MetaData = MetaData()
|
|
107
|
+
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
108
108
|
|
|
109
109
|
# Initialize database session
|
|
110
|
-
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
|
|
110
|
+
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine, expire_on_commit=False))
|
|
111
111
|
|
|
112
112
|
# -- DB methods --
|
|
113
113
|
def table_exists(self, table_name: str) -> bool:
|
|
@@ -134,21 +134,15 @@ class PostgresDb(BaseDb):
|
|
|
134
134
|
]
|
|
135
135
|
|
|
136
136
|
for table_name, table_type in tables_to_create:
|
|
137
|
-
|
|
138
|
-
# Also store the schema version for the created table
|
|
139
|
-
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
140
|
-
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
137
|
+
self._get_or_create_table(table_name=table_name, table_type=table_type, create_table_if_not_found=True)
|
|
141
138
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
def _create_table(self, table_name: str, table_type: str, db_schema: str) -> Table:
|
|
139
|
+
def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
145
140
|
"""
|
|
146
141
|
Create a table with the appropriate schema based on the table type.
|
|
147
142
|
|
|
148
143
|
Args:
|
|
149
144
|
table_name (str): Name of the table to create
|
|
150
145
|
table_type (str): Type of table (used to get schema definition)
|
|
151
|
-
db_schema (str): Database schema name
|
|
152
146
|
|
|
153
147
|
Returns:
|
|
154
148
|
Table: SQLAlchemy Table object
|
|
@@ -177,8 +171,7 @@ class PostgresDb(BaseDb):
|
|
|
177
171
|
columns.append(Column(*column_args, **column_kwargs)) # type: ignore
|
|
178
172
|
|
|
179
173
|
# Create the table object
|
|
180
|
-
|
|
181
|
-
table = Table(table_name, table_metadata, *columns, schema=db_schema)
|
|
174
|
+
table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
|
|
182
175
|
|
|
183
176
|
# Add multi-column unique constraints with table-specific names
|
|
184
177
|
for constraint in schema_unique_constraints:
|
|
@@ -192,10 +185,16 @@ class PostgresDb(BaseDb):
|
|
|
192
185
|
table.append_constraint(Index(idx_name, idx_col))
|
|
193
186
|
|
|
194
187
|
with self.Session() as sess, sess.begin():
|
|
195
|
-
create_schema(session=sess, db_schema=db_schema)
|
|
188
|
+
create_schema(session=sess, db_schema=self.db_schema)
|
|
196
189
|
|
|
197
190
|
# Create table
|
|
198
|
-
|
|
191
|
+
table_created = False
|
|
192
|
+
if not self.table_exists(table_name):
|
|
193
|
+
table.create(self.db_engine, checkfirst=True)
|
|
194
|
+
log_debug(f"Successfully created table '{table_name}'")
|
|
195
|
+
table_created = True
|
|
196
|
+
else:
|
|
197
|
+
log_debug(f"Table {self.db_schema}.{table_name} already exists, skipping creation")
|
|
199
198
|
|
|
200
199
|
# Create indexes
|
|
201
200
|
for idx in table.indexes:
|
|
@@ -206,24 +205,29 @@ class PostgresDb(BaseDb):
|
|
|
206
205
|
"SELECT 1 FROM pg_indexes WHERE schemaname = :schema AND indexname = :index_name"
|
|
207
206
|
)
|
|
208
207
|
exists = (
|
|
209
|
-
sess.execute(exists_query, {"schema": db_schema, "index_name": idx.name}).scalar()
|
|
208
|
+
sess.execute(exists_query, {"schema": self.db_schema, "index_name": idx.name}).scalar()
|
|
210
209
|
is not None
|
|
211
210
|
)
|
|
212
211
|
if exists:
|
|
213
|
-
log_debug(
|
|
212
|
+
log_debug(
|
|
213
|
+
f"Index {idx.name} already exists in {self.db_schema}.{table_name}, skipping creation"
|
|
214
|
+
)
|
|
214
215
|
continue
|
|
215
216
|
|
|
216
217
|
idx.create(self.db_engine)
|
|
217
|
-
log_debug(f"Created index: {idx.name} for table {db_schema}.{table_name}")
|
|
218
|
+
log_debug(f"Created index: {idx.name} for table {self.db_schema}.{table_name}")
|
|
218
219
|
|
|
219
220
|
except Exception as e:
|
|
220
221
|
log_error(f"Error creating index {idx.name}: {e}")
|
|
221
222
|
|
|
222
|
-
|
|
223
|
+
# Store the schema version for the created table
|
|
224
|
+
if table_name != self.versions_table_name and table_created:
|
|
225
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
226
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
223
227
|
return table
|
|
224
228
|
|
|
225
229
|
except Exception as e:
|
|
226
|
-
log_error(f"Could not create table {db_schema}.{table_name}: {e}")
|
|
230
|
+
log_error(f"Could not create table {self.db_schema}.{table_name}: {e}")
|
|
227
231
|
raise
|
|
228
232
|
|
|
229
233
|
def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
|
|
@@ -231,7 +235,6 @@ class PostgresDb(BaseDb):
|
|
|
231
235
|
self.session_table = self._get_or_create_table(
|
|
232
236
|
table_name=self.session_table_name,
|
|
233
237
|
table_type="sessions",
|
|
234
|
-
db_schema=self.db_schema,
|
|
235
238
|
create_table_if_not_found=create_table_if_not_found,
|
|
236
239
|
)
|
|
237
240
|
return self.session_table
|
|
@@ -240,7 +243,6 @@ class PostgresDb(BaseDb):
|
|
|
240
243
|
self.memory_table = self._get_or_create_table(
|
|
241
244
|
table_name=self.memory_table_name,
|
|
242
245
|
table_type="memories",
|
|
243
|
-
db_schema=self.db_schema,
|
|
244
246
|
create_table_if_not_found=create_table_if_not_found,
|
|
245
247
|
)
|
|
246
248
|
return self.memory_table
|
|
@@ -249,7 +251,6 @@ class PostgresDb(BaseDb):
|
|
|
249
251
|
self.metrics_table = self._get_or_create_table(
|
|
250
252
|
table_name=self.metrics_table_name,
|
|
251
253
|
table_type="metrics",
|
|
252
|
-
db_schema=self.db_schema,
|
|
253
254
|
create_table_if_not_found=create_table_if_not_found,
|
|
254
255
|
)
|
|
255
256
|
return self.metrics_table
|
|
@@ -258,7 +259,6 @@ class PostgresDb(BaseDb):
|
|
|
258
259
|
self.eval_table = self._get_or_create_table(
|
|
259
260
|
table_name=self.eval_table_name,
|
|
260
261
|
table_type="evals",
|
|
261
|
-
db_schema=self.db_schema,
|
|
262
262
|
create_table_if_not_found=create_table_if_not_found,
|
|
263
263
|
)
|
|
264
264
|
return self.eval_table
|
|
@@ -267,7 +267,6 @@ class PostgresDb(BaseDb):
|
|
|
267
267
|
self.knowledge_table = self._get_or_create_table(
|
|
268
268
|
table_name=self.knowledge_table_name,
|
|
269
269
|
table_type="knowledge",
|
|
270
|
-
db_schema=self.db_schema,
|
|
271
270
|
create_table_if_not_found=create_table_if_not_found,
|
|
272
271
|
)
|
|
273
272
|
return self.knowledge_table
|
|
@@ -276,7 +275,6 @@ class PostgresDb(BaseDb):
|
|
|
276
275
|
self.culture_table = self._get_or_create_table(
|
|
277
276
|
table_name=self.culture_table_name,
|
|
278
277
|
table_type="culture",
|
|
279
|
-
db_schema=self.db_schema,
|
|
280
278
|
create_table_if_not_found=create_table_if_not_found,
|
|
281
279
|
)
|
|
282
280
|
return self.culture_table
|
|
@@ -285,7 +283,6 @@ class PostgresDb(BaseDb):
|
|
|
285
283
|
self.versions_table = self._get_or_create_table(
|
|
286
284
|
table_name=self.versions_table_name,
|
|
287
285
|
table_type="versions",
|
|
288
|
-
db_schema=self.db_schema,
|
|
289
286
|
create_table_if_not_found=create_table_if_not_found,
|
|
290
287
|
)
|
|
291
288
|
return self.versions_table
|
|
@@ -293,7 +290,7 @@ class PostgresDb(BaseDb):
|
|
|
293
290
|
raise ValueError(f"Unknown table type: {table_type}")
|
|
294
291
|
|
|
295
292
|
def _get_or_create_table(
|
|
296
|
-
self, table_name: str, table_type: str,
|
|
293
|
+
self, table_name: str, table_type: str, create_table_if_not_found: Optional[bool] = False
|
|
297
294
|
) -> Optional[Table]:
|
|
298
295
|
"""
|
|
299
296
|
Check if the table exists and is valid, else create it.
|
|
@@ -301,40 +298,33 @@ class PostgresDb(BaseDb):
|
|
|
301
298
|
Args:
|
|
302
299
|
table_name (str): Name of the table to get or create
|
|
303
300
|
table_type (str): Type of table (used to get schema definition)
|
|
304
|
-
db_schema (str): Database schema name
|
|
305
301
|
|
|
306
302
|
Returns:
|
|
307
303
|
Optional[Table]: SQLAlchemy Table object representing the schema.
|
|
308
304
|
"""
|
|
309
305
|
|
|
310
306
|
with self.Session() as sess, sess.begin():
|
|
311
|
-
table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=db_schema)
|
|
307
|
+
table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
|
|
312
308
|
|
|
313
309
|
if not table_is_available:
|
|
314
310
|
if not create_table_if_not_found:
|
|
315
311
|
return None
|
|
316
|
-
|
|
317
|
-
if table_name != self.versions_table_name:
|
|
318
|
-
# Also store the schema version for the created table
|
|
319
|
-
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
320
|
-
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
321
|
-
|
|
322
|
-
return self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
|
|
312
|
+
return self._create_table(table_name=table_name, table_type=table_type)
|
|
323
313
|
|
|
324
314
|
if not is_valid_table(
|
|
325
315
|
db_engine=self.db_engine,
|
|
326
316
|
table_name=table_name,
|
|
327
317
|
table_type=table_type,
|
|
328
|
-
db_schema=db_schema,
|
|
318
|
+
db_schema=self.db_schema,
|
|
329
319
|
):
|
|
330
|
-
raise ValueError(f"Table {db_schema}.{table_name} has an invalid schema")
|
|
320
|
+
raise ValueError(f"Table {self.db_schema}.{table_name} has an invalid schema")
|
|
331
321
|
|
|
332
322
|
try:
|
|
333
|
-
table = Table(table_name, self.metadata, schema=db_schema, autoload_with=self.db_engine)
|
|
323
|
+
table = Table(table_name, self.metadata, schema=self.db_schema, autoload_with=self.db_engine)
|
|
334
324
|
return table
|
|
335
325
|
|
|
336
326
|
except Exception as e:
|
|
337
|
-
log_error(f"Error loading existing table {db_schema}.{table_name}: {e}")
|
|
327
|
+
log_error(f"Error loading existing table {self.db_schema}.{table_name}: {e}")
|
|
338
328
|
raise
|
|
339
329
|
|
|
340
330
|
def get_latest_schema_version(self, table_name: str):
|
|
@@ -1208,13 +1198,14 @@ class PostgresDb(BaseDb):
|
|
|
1208
1198
|
raise e
|
|
1209
1199
|
|
|
1210
1200
|
def get_user_memory_stats(
|
|
1211
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
1201
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
1212
1202
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
1213
1203
|
"""Get user memories stats.
|
|
1214
1204
|
|
|
1215
1205
|
Args:
|
|
1216
1206
|
limit (Optional[int]): The maximum number of user stats to return.
|
|
1217
1207
|
page (Optional[int]): The page number.
|
|
1208
|
+
user_id (Optional[str]): User ID for filtering.
|
|
1218
1209
|
|
|
1219
1210
|
Returns:
|
|
1220
1211
|
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
@@ -1237,16 +1228,17 @@ class PostgresDb(BaseDb):
|
|
|
1237
1228
|
return [], 0
|
|
1238
1229
|
|
|
1239
1230
|
with self.Session() as sess, sess.begin():
|
|
1240
|
-
stmt = (
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1245
|
-
)
|
|
1246
|
-
.where(table.c.user_id.is_not(None))
|
|
1247
|
-
.group_by(table.c.user_id)
|
|
1248
|
-
.order_by(func.max(table.c.updated_at).desc())
|
|
1231
|
+
stmt = select(
|
|
1232
|
+
table.c.user_id,
|
|
1233
|
+
func.count(table.c.memory_id).label("total_memories"),
|
|
1234
|
+
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1249
1235
|
)
|
|
1236
|
+
if user_id is not None:
|
|
1237
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1238
|
+
else:
|
|
1239
|
+
stmt = stmt.where(table.c.user_id.is_not(None))
|
|
1240
|
+
stmt = stmt.group_by(table.c.user_id)
|
|
1241
|
+
stmt = stmt.order_by(func.max(table.c.updated_at).desc())
|
|
1250
1242
|
|
|
1251
1243
|
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1252
1244
|
total_count = sess.execute(count_stmt).scalar()
|
agno/db/redis/redis.py
CHANGED
|
@@ -841,12 +841,14 @@ class RedisDb(BaseDb):
|
|
|
841
841
|
self,
|
|
842
842
|
limit: Optional[int] = None,
|
|
843
843
|
page: Optional[int] = None,
|
|
844
|
+
user_id: Optional[str] = None,
|
|
844
845
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
845
846
|
"""Get user memory stats from Redis.
|
|
846
847
|
|
|
847
848
|
Args:
|
|
848
849
|
limit (Optional[int]): The maximum number of stats to return.
|
|
849
850
|
page (Optional[int]): The page number to return.
|
|
851
|
+
user_id (Optional[str]): User ID for filtering.
|
|
850
852
|
|
|
851
853
|
Returns:
|
|
852
854
|
Tuple[List[Dict[str, Any]], int]: A tuple containing the list of stats and the total number of stats.
|
|
@@ -861,6 +863,9 @@ class RedisDb(BaseDb):
|
|
|
861
863
|
user_stats = {}
|
|
862
864
|
for memory in all_memories:
|
|
863
865
|
memory_user_id = memory.get("user_id")
|
|
866
|
+
# filter by user_id if provided
|
|
867
|
+
if user_id is not None and memory_user_id != user_id:
|
|
868
|
+
continue
|
|
864
869
|
if memory_user_id is None:
|
|
865
870
|
continue
|
|
866
871
|
|
agno/db/redis/utils.py
CHANGED
|
@@ -3,14 +3,14 @@
|
|
|
3
3
|
import json
|
|
4
4
|
import time
|
|
5
5
|
from datetime import date, datetime, timedelta, timezone
|
|
6
|
-
from typing import Any, Dict, List, Optional
|
|
6
|
+
from typing import Any, Dict, List, Optional, Union
|
|
7
7
|
from uuid import UUID
|
|
8
8
|
|
|
9
9
|
from agno.db.schemas.culture import CulturalKnowledge
|
|
10
10
|
from agno.utils.log import log_warning
|
|
11
11
|
|
|
12
12
|
try:
|
|
13
|
-
from redis import Redis
|
|
13
|
+
from redis import Redis, RedisCluster
|
|
14
14
|
except ImportError:
|
|
15
15
|
raise ImportError("`redis` not installed. Please install it using `pip install redis`")
|
|
16
16
|
|
|
@@ -51,7 +51,7 @@ def generate_index_key(prefix: str, table_type: str, index_field: str, index_val
|
|
|
51
51
|
return f"{prefix}:{table_type}:index:{index_field}:{index_value}"
|
|
52
52
|
|
|
53
53
|
|
|
54
|
-
def get_all_keys_for_table(redis_client: Redis, prefix: str, table_type: str) -> List[str]:
|
|
54
|
+
def get_all_keys_for_table(redis_client: Union[Redis, RedisCluster], prefix: str, table_type: str) -> List[str]:
|
|
55
55
|
"""Get all relevant keys for the given table type.
|
|
56
56
|
|
|
57
57
|
Args:
|
|
@@ -130,7 +130,7 @@ def apply_filters(records: List[Dict[str, Any]], conditions: Dict[str, Any]) ->
|
|
|
130
130
|
|
|
131
131
|
|
|
132
132
|
def create_index_entries(
|
|
133
|
-
redis_client: Redis,
|
|
133
|
+
redis_client: Union[Redis, RedisCluster],
|
|
134
134
|
prefix: str,
|
|
135
135
|
table_type: str,
|
|
136
136
|
record_id: str,
|
|
@@ -144,7 +144,7 @@ def create_index_entries(
|
|
|
144
144
|
|
|
145
145
|
|
|
146
146
|
def remove_index_entries(
|
|
147
|
-
redis_client: Redis,
|
|
147
|
+
redis_client: Union[Redis, RedisCluster],
|
|
148
148
|
prefix: str,
|
|
149
149
|
table_type: str,
|
|
150
150
|
record_id: str,
|