agno 2.2.13__py3-none-any.whl → 2.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +197 -110
- agno/api/api.py +2 -0
- agno/db/base.py +26 -0
- agno/db/dynamo/dynamo.py +8 -0
- agno/db/dynamo/schemas.py +1 -0
- agno/db/firestore/firestore.py +8 -0
- agno/db/firestore/schemas.py +1 -0
- agno/db/gcs_json/gcs_json_db.py +8 -0
- agno/db/in_memory/in_memory_db.py +8 -1
- agno/db/json/json_db.py +8 -0
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/async_mongo.py +16 -6
- agno/db/mongo/mongo.py +11 -0
- agno/db/mongo/schemas.py +3 -0
- agno/db/mongo/utils.py +17 -0
- agno/db/mysql/mysql.py +76 -3
- agno/db/mysql/schemas.py +20 -10
- agno/db/postgres/async_postgres.py +99 -25
- agno/db/postgres/postgres.py +75 -6
- agno/db/postgres/schemas.py +30 -20
- agno/db/redis/redis.py +15 -2
- agno/db/redis/schemas.py +4 -0
- agno/db/schemas/memory.py +13 -0
- agno/db/singlestore/schemas.py +11 -0
- agno/db/singlestore/singlestore.py +79 -5
- agno/db/sqlite/async_sqlite.py +97 -19
- agno/db/sqlite/schemas.py +10 -0
- agno/db/sqlite/sqlite.py +79 -2
- agno/db/surrealdb/surrealdb.py +8 -0
- agno/knowledge/chunking/semantic.py +7 -2
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/knowledge.py +57 -86
- agno/knowledge/reader/csv_reader.py +7 -9
- agno/knowledge/reader/docx_reader.py +5 -5
- agno/knowledge/reader/field_labeled_csv_reader.py +16 -18
- agno/knowledge/reader/json_reader.py +5 -4
- agno/knowledge/reader/markdown_reader.py +8 -8
- agno/knowledge/reader/pdf_reader.py +11 -11
- agno/knowledge/reader/pptx_reader.py +5 -5
- agno/knowledge/reader/s3_reader.py +3 -3
- agno/knowledge/reader/text_reader.py +8 -8
- agno/knowledge/reader/web_search_reader.py +1 -48
- agno/knowledge/reader/website_reader.py +10 -10
- agno/models/anthropic/claude.py +319 -28
- agno/models/aws/claude.py +32 -0
- agno/models/azure/openai_chat.py +19 -10
- agno/models/base.py +612 -545
- agno/models/cerebras/cerebras.py +8 -11
- agno/models/cohere/chat.py +27 -1
- agno/models/google/gemini.py +39 -7
- agno/models/groq/groq.py +25 -11
- agno/models/meta/llama.py +20 -9
- agno/models/meta/llama_openai.py +3 -19
- agno/models/nebius/nebius.py +4 -4
- agno/models/openai/chat.py +30 -14
- agno/models/openai/responses.py +10 -13
- agno/models/response.py +1 -0
- agno/models/vertexai/claude.py +26 -0
- agno/os/app.py +8 -19
- agno/os/router.py +54 -0
- agno/os/routers/knowledge/knowledge.py +2 -2
- agno/os/schema.py +2 -2
- agno/session/agent.py +57 -92
- agno/session/summary.py +1 -1
- agno/session/team.py +62 -112
- agno/session/workflow.py +353 -57
- agno/team/team.py +227 -125
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/yfinance.py +12 -11
- agno/utils/http.py +111 -0
- agno/utils/media.py +11 -0
- agno/utils/models/claude.py +8 -0
- agno/utils/print_response/agent.py +33 -12
- agno/utils/print_response/team.py +22 -12
- agno/vectordb/couchbase/couchbase.py +6 -2
- agno/workflow/condition.py +13 -0
- agno/workflow/loop.py +13 -0
- agno/workflow/parallel.py +13 -0
- agno/workflow/router.py +13 -0
- agno/workflow/step.py +120 -20
- agno/workflow/steps.py +13 -0
- agno/workflow/workflow.py +76 -63
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/METADATA +6 -2
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/RECORD +91 -88
- agno/tools/googlesearch.py +0 -98
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/WHEEL +0 -0
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.13.dist-info → agno-2.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,938 @@
|
|
|
1
|
+
"""Migration v2.3.0: Schema updates for memories and PostgreSQL JSONB
|
|
2
|
+
|
|
3
|
+
Changes:
|
|
4
|
+
- Add created_at column to memories table (all databases)
|
|
5
|
+
- Add feedback column to memories table (all databases)
|
|
6
|
+
- Change JSON to JSONB for PostgreSQL
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import time
|
|
10
|
+
from typing import Any, List, Tuple
|
|
11
|
+
|
|
12
|
+
from agno.db.base import AsyncBaseDb, BaseDb
|
|
13
|
+
from agno.utils.log import log_error, log_info, log_warning
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
from sqlalchemy import text
|
|
17
|
+
except ImportError:
|
|
18
|
+
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def up(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
22
|
+
"""
|
|
23
|
+
Apply the following changes to the database:
|
|
24
|
+
- Add created_at, feedback columns to memories table
|
|
25
|
+
- Convert JSON to JSONB for PostgreSQL
|
|
26
|
+
- Change String to Text for long fields (PostgreSQL)
|
|
27
|
+
- Add default values to metrics table (MySQL)
|
|
28
|
+
|
|
29
|
+
Notice only the changes related to the given table_type are applied.
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
bool: True if any migration was applied, False otherwise.
|
|
33
|
+
"""
|
|
34
|
+
db_type = type(db).__name__
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
if db_type == "PostgresDb":
|
|
38
|
+
return _migrate_postgres(db, table_type, table_name)
|
|
39
|
+
elif db_type == "MySQLDb":
|
|
40
|
+
return _migrate_mysql(db, table_type, table_name)
|
|
41
|
+
elif db_type == "SqliteDb":
|
|
42
|
+
return _migrate_sqlite(db, table_type, table_name)
|
|
43
|
+
elif db_type == "SingleStoreDb":
|
|
44
|
+
return _migrate_singlestore(db, table_type, table_name)
|
|
45
|
+
else:
|
|
46
|
+
log_info(f"{db_type} does not require schema migrations (NoSQL/document store)")
|
|
47
|
+
return False
|
|
48
|
+
except Exception as e:
|
|
49
|
+
log_error(f"Error running migration v2.3.0 for {db_type} on table {table_name}: {e}")
|
|
50
|
+
raise
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
async def async_up(db: AsyncBaseDb, table_type: str, table_name: str) -> bool:
|
|
54
|
+
"""
|
|
55
|
+
Apply the following changes to the database:
|
|
56
|
+
- Add created_at, feedback columns to memories table
|
|
57
|
+
- Convert JSON to JSONB for PostgreSQL
|
|
58
|
+
- Change String to Text for long fields (PostgreSQL)
|
|
59
|
+
- Add default values to metrics table (MySQL)
|
|
60
|
+
|
|
61
|
+
Notice only the changes related to the given table_type are applied.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
bool: True if any migration was applied, False otherwise.
|
|
65
|
+
"""
|
|
66
|
+
db_type = type(db).__name__
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
if db_type == "AsyncPostgresDb":
|
|
70
|
+
return await _migrate_async_postgres(db, table_type, table_name)
|
|
71
|
+
elif db_type == "AsyncSqliteDb":
|
|
72
|
+
return await _migrate_async_sqlite(db, table_type, table_name)
|
|
73
|
+
else:
|
|
74
|
+
log_info(f"{db_type} does not require schema migrations (NoSQL/document store)")
|
|
75
|
+
return False
|
|
76
|
+
except Exception as e:
|
|
77
|
+
log_error(f"Error running migration v2.3.0 for {db_type} on table {table_name}: {e}")
|
|
78
|
+
raise
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def down(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
82
|
+
"""
|
|
83
|
+
Revert the following changes to the database:
|
|
84
|
+
- Remove created_at, feedback columns from memories table
|
|
85
|
+
- Revert JSONB to JSON for PostgreSQL (if needed)
|
|
86
|
+
|
|
87
|
+
Notice only the changes related to the given table_type are reverted.
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
bool: True if any migration was reverted, False otherwise.
|
|
91
|
+
"""
|
|
92
|
+
db_type = type(db).__name__
|
|
93
|
+
|
|
94
|
+
try:
|
|
95
|
+
if db_type == "PostgresDb":
|
|
96
|
+
return _revert_postgres(db, table_type, table_name)
|
|
97
|
+
elif db_type == "MySQLDb":
|
|
98
|
+
return _revert_mysql(db, table_type, table_name)
|
|
99
|
+
elif db_type == "SqliteDb":
|
|
100
|
+
return _revert_sqlite(db, table_type, table_name)
|
|
101
|
+
elif db_type == "SingleStoreDb":
|
|
102
|
+
return _revert_singlestore(db, table_type, table_name)
|
|
103
|
+
else:
|
|
104
|
+
log_info(f"Revert not implemented for {db_type}")
|
|
105
|
+
return False
|
|
106
|
+
except Exception as e:
|
|
107
|
+
log_error(f"Error reverting migration v2.3.0 for {db_type} on table {table_name}: {e}")
|
|
108
|
+
raise
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
async def async_down(db: AsyncBaseDb, table_type: str, table_name: str) -> bool:
|
|
112
|
+
"""
|
|
113
|
+
Revert the following changes to the database:
|
|
114
|
+
- Remove created_at, feedback columns from memories table
|
|
115
|
+
- Revert JSONB to JSON for PostgreSQL (if needed)
|
|
116
|
+
|
|
117
|
+
Notice only the changes related to the given table_type are reverted.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
bool: True if any migration was reverted, False otherwise.
|
|
121
|
+
"""
|
|
122
|
+
db_type = type(db).__name__
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
if db_type == "AsyncPostgresDb":
|
|
126
|
+
return await _revert_async_postgres(db, table_type, table_name)
|
|
127
|
+
elif db_type == "AsyncSqliteDb":
|
|
128
|
+
return await _revert_async_sqlite(db, table_type, table_name)
|
|
129
|
+
else:
|
|
130
|
+
log_info(f"Revert not implemented for {db_type}")
|
|
131
|
+
return False
|
|
132
|
+
except Exception as e:
|
|
133
|
+
log_error(f"Error reverting migration v2.3.0 for {db_type} on table {table_name} asynchronously: {e}")
|
|
134
|
+
raise
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _migrate_postgres(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
138
|
+
"""Migrate PostgreSQL database."""
|
|
139
|
+
from sqlalchemy import text
|
|
140
|
+
|
|
141
|
+
db_schema = db.db_schema or "public" # type: ignore
|
|
142
|
+
|
|
143
|
+
with db.Session() as sess, sess.begin(): # type: ignore
|
|
144
|
+
# Check if table exists
|
|
145
|
+
table_exists = sess.execute(
|
|
146
|
+
text(
|
|
147
|
+
"""
|
|
148
|
+
SELECT EXISTS (
|
|
149
|
+
SELECT FROM information_schema.tables
|
|
150
|
+
WHERE table_schema = :schema
|
|
151
|
+
AND table_name = :table_name
|
|
152
|
+
)
|
|
153
|
+
"""
|
|
154
|
+
),
|
|
155
|
+
{"schema": db_schema, "table_name": table_name},
|
|
156
|
+
).scalar()
|
|
157
|
+
|
|
158
|
+
if not table_exists:
|
|
159
|
+
log_info(f"Table {table_name} does not exist, skipping migration")
|
|
160
|
+
return False
|
|
161
|
+
if table_type == "memories":
|
|
162
|
+
# Check if columns already exist
|
|
163
|
+
check_columns = sess.execute(
|
|
164
|
+
text(
|
|
165
|
+
"""
|
|
166
|
+
SELECT column_name
|
|
167
|
+
FROM information_schema.columns
|
|
168
|
+
WHERE table_schema = :schema
|
|
169
|
+
AND table_name = :table_name
|
|
170
|
+
"""
|
|
171
|
+
),
|
|
172
|
+
{"schema": db_schema, "table_name": table_name},
|
|
173
|
+
).fetchall()
|
|
174
|
+
existing_columns = {row[0] for row in check_columns}
|
|
175
|
+
|
|
176
|
+
# Add created_at if it doesn't exist
|
|
177
|
+
if "created_at" not in existing_columns:
|
|
178
|
+
log_info(f"-- Adding created_at column to {table_name}")
|
|
179
|
+
current_time = int(time.time())
|
|
180
|
+
# Add created_at column
|
|
181
|
+
sess.execute(
|
|
182
|
+
text(
|
|
183
|
+
f"""
|
|
184
|
+
ALTER TABLE {db_schema}.{table_name}
|
|
185
|
+
ADD COLUMN created_at BIGINT
|
|
186
|
+
"""
|
|
187
|
+
),
|
|
188
|
+
)
|
|
189
|
+
# Populate created_at
|
|
190
|
+
sess.execute(
|
|
191
|
+
text(
|
|
192
|
+
f"""
|
|
193
|
+
UPDATE {db_schema}.{table_name}
|
|
194
|
+
SET created_at = COALESCE(updated_at, :default_time)
|
|
195
|
+
"""
|
|
196
|
+
),
|
|
197
|
+
{"default_time": current_time},
|
|
198
|
+
)
|
|
199
|
+
# Set created_at as non nullable
|
|
200
|
+
sess.execute(
|
|
201
|
+
text(
|
|
202
|
+
f"""
|
|
203
|
+
ALTER TABLE {db_schema}.{table_name}
|
|
204
|
+
ALTER COLUMN created_at SET NOT NULL
|
|
205
|
+
"""
|
|
206
|
+
),
|
|
207
|
+
)
|
|
208
|
+
# Add index
|
|
209
|
+
sess.execute(
|
|
210
|
+
text(
|
|
211
|
+
f"""
|
|
212
|
+
CREATE INDEX IF NOT EXISTS idx_{table_name}_created_at
|
|
213
|
+
ON {db_schema}.{table_name}(created_at)
|
|
214
|
+
"""
|
|
215
|
+
)
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Add feedback if it doesn't exist
|
|
219
|
+
if "feedback" not in existing_columns:
|
|
220
|
+
log_info(f"Adding feedback column to {table_name}")
|
|
221
|
+
sess.execute(
|
|
222
|
+
text(
|
|
223
|
+
f"""
|
|
224
|
+
ALTER TABLE {db_schema}.{table_name}
|
|
225
|
+
ADD COLUMN feedback TEXT
|
|
226
|
+
"""
|
|
227
|
+
)
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
json_columns = [
|
|
231
|
+
("memory", table_name),
|
|
232
|
+
("topics", table_name),
|
|
233
|
+
]
|
|
234
|
+
_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
235
|
+
|
|
236
|
+
if table_type == "sessions":
|
|
237
|
+
json_columns = [
|
|
238
|
+
("session_data", table_name),
|
|
239
|
+
("agent_data", table_name),
|
|
240
|
+
("team_data", table_name),
|
|
241
|
+
("workflow_data", table_name),
|
|
242
|
+
("metadata", table_name),
|
|
243
|
+
("runs", table_name),
|
|
244
|
+
("summary", table_name),
|
|
245
|
+
]
|
|
246
|
+
_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
247
|
+
if table_type == "evals":
|
|
248
|
+
json_columns = [
|
|
249
|
+
("eval_data", table_name),
|
|
250
|
+
("eval_input", table_name),
|
|
251
|
+
]
|
|
252
|
+
_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
253
|
+
if table_type == "metrics":
|
|
254
|
+
json_columns = [
|
|
255
|
+
("token_metrics", table_name),
|
|
256
|
+
("model_metrics", table_name),
|
|
257
|
+
]
|
|
258
|
+
_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
259
|
+
if table_type == "knowledge":
|
|
260
|
+
json_columns = [
|
|
261
|
+
("metadata", table_name),
|
|
262
|
+
]
|
|
263
|
+
_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
264
|
+
if table_type == "culture":
|
|
265
|
+
json_columns = [
|
|
266
|
+
("metadata", table_name),
|
|
267
|
+
]
|
|
268
|
+
_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
269
|
+
|
|
270
|
+
sess.commit()
|
|
271
|
+
return True
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def _convert_json_to_jsonb(sess: Any, db_schema: str, json_columns: List[Tuple[str, str]]) -> None:
|
|
275
|
+
for column_name, table_name in json_columns:
|
|
276
|
+
table_full_name = f"{db_schema}.{table_name}" if db_schema else table_name
|
|
277
|
+
# Check current type
|
|
278
|
+
col_type = sess.execute(
|
|
279
|
+
text(
|
|
280
|
+
"""
|
|
281
|
+
SELECT data_type
|
|
282
|
+
FROM information_schema.columns
|
|
283
|
+
WHERE table_schema = :schema
|
|
284
|
+
AND table_name = :table_name
|
|
285
|
+
AND column_name = :column_name
|
|
286
|
+
"""
|
|
287
|
+
),
|
|
288
|
+
{"schema": db_schema, "table_name": table_name, "column_name": column_name},
|
|
289
|
+
).scalar()
|
|
290
|
+
|
|
291
|
+
if col_type == "json":
|
|
292
|
+
log_info(f"-- Converting {table_name}.{column_name} from JSON to JSONB")
|
|
293
|
+
sess.execute(
|
|
294
|
+
text(
|
|
295
|
+
f"""
|
|
296
|
+
ALTER TABLE {table_full_name}
|
|
297
|
+
ALTER COLUMN {column_name} TYPE JSONB USING {column_name}::jsonb
|
|
298
|
+
"""
|
|
299
|
+
)
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
async def _migrate_async_postgres(db: AsyncBaseDb, table_type: str, table_name: str) -> bool:
|
|
304
|
+
"""Migrate PostgreSQL database."""
|
|
305
|
+
from sqlalchemy import text
|
|
306
|
+
|
|
307
|
+
db_schema = db.db_schema or "public" # type: ignore
|
|
308
|
+
|
|
309
|
+
async with db.async_session_factory() as sess, sess.begin(): # type: ignore
|
|
310
|
+
# Check if table exists
|
|
311
|
+
result = await sess.execute(
|
|
312
|
+
text(
|
|
313
|
+
"""
|
|
314
|
+
SELECT EXISTS (
|
|
315
|
+
SELECT FROM information_schema.tables
|
|
316
|
+
WHERE table_schema = :schema
|
|
317
|
+
AND table_name = :table_name
|
|
318
|
+
)
|
|
319
|
+
"""
|
|
320
|
+
),
|
|
321
|
+
{"schema": db_schema, "table_name": table_name},
|
|
322
|
+
)
|
|
323
|
+
table_exists = result.scalar()
|
|
324
|
+
|
|
325
|
+
if not table_exists:
|
|
326
|
+
log_info(f"Table {table_name} does not exist, skipping migration")
|
|
327
|
+
return False
|
|
328
|
+
if table_type == "memories":
|
|
329
|
+
# Check if columns already exist
|
|
330
|
+
result = await sess.execute(
|
|
331
|
+
text(
|
|
332
|
+
"""
|
|
333
|
+
SELECT column_name
|
|
334
|
+
FROM information_schema.columns
|
|
335
|
+
WHERE table_schema = :schema
|
|
336
|
+
AND table_name = :table_name
|
|
337
|
+
"""
|
|
338
|
+
),
|
|
339
|
+
{"schema": db_schema, "table_name": table_name},
|
|
340
|
+
)
|
|
341
|
+
check_columns = result.fetchall()
|
|
342
|
+
existing_columns = {row[0] for row in check_columns}
|
|
343
|
+
|
|
344
|
+
# Add created_at if it doesn't exist
|
|
345
|
+
if "created_at" not in existing_columns:
|
|
346
|
+
log_info(f"-- Adding created_at column to {table_name}")
|
|
347
|
+
current_time = int(time.time())
|
|
348
|
+
# Add created_at column
|
|
349
|
+
await sess.execute(
|
|
350
|
+
text(
|
|
351
|
+
f"""
|
|
352
|
+
ALTER TABLE {db_schema}.{table_name}
|
|
353
|
+
ADD COLUMN created_at BIGINT
|
|
354
|
+
"""
|
|
355
|
+
),
|
|
356
|
+
)
|
|
357
|
+
# Populate created_at
|
|
358
|
+
await sess.execute(
|
|
359
|
+
text(
|
|
360
|
+
f"""
|
|
361
|
+
UPDATE {db_schema}.{table_name}
|
|
362
|
+
SET created_at = COALESCE(updated_at, :default_time)
|
|
363
|
+
"""
|
|
364
|
+
),
|
|
365
|
+
{"default_time": current_time},
|
|
366
|
+
)
|
|
367
|
+
# Set created_at as non nullable
|
|
368
|
+
await sess.execute(
|
|
369
|
+
text(
|
|
370
|
+
f"""
|
|
371
|
+
ALTER TABLE {db_schema}.{table_name}
|
|
372
|
+
ALTER COLUMN created_at SET NOT NULL
|
|
373
|
+
"""
|
|
374
|
+
),
|
|
375
|
+
)
|
|
376
|
+
# Add index
|
|
377
|
+
await sess.execute(
|
|
378
|
+
text(
|
|
379
|
+
f"""
|
|
380
|
+
CREATE INDEX IF NOT EXISTS idx_{table_name}_created_at
|
|
381
|
+
ON {db_schema}.{table_name}(created_at)
|
|
382
|
+
"""
|
|
383
|
+
)
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
# Add feedback if it doesn't exist
|
|
387
|
+
if "feedback" not in existing_columns:
|
|
388
|
+
log_info(f"Adding feedback column to {table_name}")
|
|
389
|
+
await sess.execute(
|
|
390
|
+
text(
|
|
391
|
+
f"""
|
|
392
|
+
ALTER TABLE {db_schema}.{table_name}
|
|
393
|
+
ADD COLUMN feedback TEXT
|
|
394
|
+
"""
|
|
395
|
+
)
|
|
396
|
+
)
|
|
397
|
+
|
|
398
|
+
json_columns = [
|
|
399
|
+
("memory", table_name),
|
|
400
|
+
("topics", table_name),
|
|
401
|
+
]
|
|
402
|
+
await _async_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
403
|
+
if table_type == "sessions":
|
|
404
|
+
json_columns = [
|
|
405
|
+
("session_data", table_name),
|
|
406
|
+
("agent_data", table_name),
|
|
407
|
+
("team_data", table_name),
|
|
408
|
+
("workflow_data", table_name),
|
|
409
|
+
("metadata", table_name),
|
|
410
|
+
("runs", table_name),
|
|
411
|
+
("summary", table_name),
|
|
412
|
+
]
|
|
413
|
+
await _async_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
414
|
+
|
|
415
|
+
if table_type == "evals":
|
|
416
|
+
json_columns = [
|
|
417
|
+
("eval_data", table_name),
|
|
418
|
+
("eval_input", table_name),
|
|
419
|
+
]
|
|
420
|
+
await _async_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
421
|
+
if table_type == "metrics":
|
|
422
|
+
json_columns = [
|
|
423
|
+
("token_metrics", table_name),
|
|
424
|
+
("model_metrics", table_name),
|
|
425
|
+
]
|
|
426
|
+
await _async_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
427
|
+
if table_type == "knowledge":
|
|
428
|
+
json_columns = [
|
|
429
|
+
("metadata", table_name),
|
|
430
|
+
]
|
|
431
|
+
await _async_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
432
|
+
|
|
433
|
+
if table_type == "culture":
|
|
434
|
+
json_columns = [
|
|
435
|
+
("metadata", table_name),
|
|
436
|
+
]
|
|
437
|
+
await _async_convert_json_to_jsonb(sess, db_schema, json_columns)
|
|
438
|
+
|
|
439
|
+
await sess.commit()
|
|
440
|
+
return True
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
async def _async_convert_json_to_jsonb(sess: Any, db_schema: str, json_columns: List[Tuple[str, str]]) -> None:
|
|
444
|
+
for column_name, table_name in json_columns:
|
|
445
|
+
table_full_name = f"{db_schema}.{table_name}" if db_schema else table_name
|
|
446
|
+
# Check current type
|
|
447
|
+
result = await sess.execute(
|
|
448
|
+
text(
|
|
449
|
+
"""
|
|
450
|
+
SELECT data_type
|
|
451
|
+
FROM information_schema.columns
|
|
452
|
+
WHERE table_schema = :schema
|
|
453
|
+
AND table_name = :table_name
|
|
454
|
+
AND column_name = :column_name
|
|
455
|
+
"""
|
|
456
|
+
),
|
|
457
|
+
{"schema": db_schema, "table_name": table_name, "column_name": column_name},
|
|
458
|
+
)
|
|
459
|
+
col_type = result.scalar()
|
|
460
|
+
|
|
461
|
+
if col_type == "json":
|
|
462
|
+
log_info(f"-- Converting {table_name}.{column_name} from JSON to JSONB")
|
|
463
|
+
await sess.execute(
|
|
464
|
+
text(
|
|
465
|
+
f"""
|
|
466
|
+
ALTER TABLE {table_full_name}
|
|
467
|
+
ALTER COLUMN {column_name} TYPE JSONB USING {column_name}::jsonb
|
|
468
|
+
"""
|
|
469
|
+
)
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def _migrate_mysql(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
474
|
+
"""Migrate MySQL database."""
|
|
475
|
+
from sqlalchemy import text
|
|
476
|
+
|
|
477
|
+
db_schema = db.db_schema or "agno" # type: ignore
|
|
478
|
+
|
|
479
|
+
with db.Session() as sess, sess.begin(): # type: ignore
|
|
480
|
+
# Check if table exists
|
|
481
|
+
table_exists = sess.execute(
|
|
482
|
+
text(
|
|
483
|
+
"""
|
|
484
|
+
SELECT EXISTS (
|
|
485
|
+
SELECT 1 FROM INFORMATION_SCHEMA.TABLES
|
|
486
|
+
WHERE TABLE_SCHEMA = :schema
|
|
487
|
+
AND TABLE_NAME = :table_name
|
|
488
|
+
)
|
|
489
|
+
"""
|
|
490
|
+
),
|
|
491
|
+
{"schema": db_schema, "table_name": table_name},
|
|
492
|
+
).scalar()
|
|
493
|
+
|
|
494
|
+
if not table_exists:
|
|
495
|
+
log_info(f"Table {table_name} does not exist, skipping migration")
|
|
496
|
+
return False
|
|
497
|
+
if table_type == "memories":
|
|
498
|
+
# Check if columns already exist
|
|
499
|
+
check_columns = sess.execute(
|
|
500
|
+
text(
|
|
501
|
+
"""
|
|
502
|
+
SELECT COLUMN_NAME
|
|
503
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
504
|
+
WHERE TABLE_SCHEMA = :schema
|
|
505
|
+
AND TABLE_NAME = :table_name
|
|
506
|
+
"""
|
|
507
|
+
),
|
|
508
|
+
{"schema": db_schema, "table_name": table_name},
|
|
509
|
+
).fetchall()
|
|
510
|
+
existing_columns = {row[0] for row in check_columns}
|
|
511
|
+
|
|
512
|
+
# Add created_at if it doesn't exist
|
|
513
|
+
if "created_at" not in existing_columns:
|
|
514
|
+
log_info(f"-- Adding created_at column to {table_name}")
|
|
515
|
+
current_time = int(time.time())
|
|
516
|
+
# Add created_at column
|
|
517
|
+
sess.execute(
|
|
518
|
+
text(
|
|
519
|
+
f"""
|
|
520
|
+
ALTER TABLE `{db_schema}`.`{table_name}`
|
|
521
|
+
ADD COLUMN `created_at` BIGINT,
|
|
522
|
+
ADD INDEX `idx_{table_name}_created_at` (`created_at`)
|
|
523
|
+
"""
|
|
524
|
+
),
|
|
525
|
+
)
|
|
526
|
+
# Populate created_at
|
|
527
|
+
sess.execute(
|
|
528
|
+
text(
|
|
529
|
+
f"""
|
|
530
|
+
UPDATE `{db_schema}`.`{table_name}`
|
|
531
|
+
SET `created_at` = COALESCE(`updated_at`, :default_time)
|
|
532
|
+
"""
|
|
533
|
+
),
|
|
534
|
+
{"default_time": current_time},
|
|
535
|
+
)
|
|
536
|
+
# Set created_at as non nullable
|
|
537
|
+
sess.execute(
|
|
538
|
+
text(
|
|
539
|
+
f"""
|
|
540
|
+
ALTER TABLE `{db_schema}`.`{table_name}`
|
|
541
|
+
MODIFY COLUMN `created_at` BIGINT NOT NULL
|
|
542
|
+
"""
|
|
543
|
+
)
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
# Add feedback if it doesn't exist
|
|
547
|
+
if "feedback" not in existing_columns:
|
|
548
|
+
log_info(f"-- Adding feedback column to {table_name}")
|
|
549
|
+
sess.execute(
|
|
550
|
+
text(
|
|
551
|
+
f"""
|
|
552
|
+
ALTER TABLE `{db_schema}`.`{table_name}`
|
|
553
|
+
ADD COLUMN `feedback` TEXT
|
|
554
|
+
"""
|
|
555
|
+
)
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
sess.commit()
|
|
559
|
+
return True
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
def _migrate_sqlite(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
563
|
+
"""Migrate SQLite database."""
|
|
564
|
+
|
|
565
|
+
with db.Session() as sess, sess.begin(): # type: ignore
|
|
566
|
+
# Check if table exists
|
|
567
|
+
table_exists = sess.execute(
|
|
568
|
+
text(
|
|
569
|
+
"""
|
|
570
|
+
SELECT COUNT(*) FROM sqlite_master
|
|
571
|
+
WHERE type='table' AND name=:table_name
|
|
572
|
+
"""
|
|
573
|
+
),
|
|
574
|
+
{"table_name": table_name},
|
|
575
|
+
).scalar()
|
|
576
|
+
|
|
577
|
+
if not table_exists:
|
|
578
|
+
log_info(f"Table {table_name} does not exist, skipping migration")
|
|
579
|
+
return False
|
|
580
|
+
if table_type == "memories":
|
|
581
|
+
# SQLite doesn't support ALTER TABLE ADD COLUMN with constraints easily
|
|
582
|
+
# We'll use a simpler approach
|
|
583
|
+
# Check if columns already exist using PRAGMA
|
|
584
|
+
result = sess.execute(text(f"PRAGMA table_info({table_name})"))
|
|
585
|
+
columns_info = result.fetchall()
|
|
586
|
+
existing_columns = {row[1] for row in columns_info} # row[1] contains column name
|
|
587
|
+
|
|
588
|
+
# Add created_at if it doesn't exist
|
|
589
|
+
if "created_at" not in existing_columns:
|
|
590
|
+
log_info(f"-- Adding created_at column to {table_name}")
|
|
591
|
+
current_time = int(time.time())
|
|
592
|
+
# Add created_at column with NOT NULL constraint and default value
|
|
593
|
+
# SQLite doesn't support ALTER COLUMN, so we add NOT NULL directly
|
|
594
|
+
sess.execute(
|
|
595
|
+
text(f"ALTER TABLE {table_name} ADD COLUMN created_at BIGINT NOT NULL DEFAULT {current_time}"),
|
|
596
|
+
)
|
|
597
|
+
# Populate created_at for existing rows
|
|
598
|
+
sess.execute(
|
|
599
|
+
text(
|
|
600
|
+
f"""
|
|
601
|
+
UPDATE {table_name}
|
|
602
|
+
SET created_at = COALESCE(updated_at, :default_time)
|
|
603
|
+
WHERE created_at = :default_time
|
|
604
|
+
"""
|
|
605
|
+
),
|
|
606
|
+
{"default_time": current_time},
|
|
607
|
+
)
|
|
608
|
+
# Add index
|
|
609
|
+
sess.execute(
|
|
610
|
+
text(f"CREATE INDEX IF NOT EXISTS idx_{table_name}_created_at ON {table_name}(created_at)")
|
|
611
|
+
)
|
|
612
|
+
|
|
613
|
+
# Add feedback if it doesn't exist
|
|
614
|
+
if "feedback" not in existing_columns:
|
|
615
|
+
log_info(f"-- Adding feedback column to {table_name}")
|
|
616
|
+
sess.execute(text(f"ALTER TABLE {table_name} ADD COLUMN feedback VARCHAR"))
|
|
617
|
+
|
|
618
|
+
sess.commit()
|
|
619
|
+
return True
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
async def _migrate_async_sqlite(db: AsyncBaseDb, table_type: str, table_name: str) -> bool:
|
|
623
|
+
"""Migrate SQLite database."""
|
|
624
|
+
|
|
625
|
+
async with db.async_session_factory() as sess, sess.begin(): # type: ignore
|
|
626
|
+
# Check if table exists
|
|
627
|
+
result = await sess.execute(
|
|
628
|
+
text(
|
|
629
|
+
"""
|
|
630
|
+
SELECT COUNT(*) FROM sqlite_master
|
|
631
|
+
WHERE type='table' AND name=:table_name
|
|
632
|
+
"""
|
|
633
|
+
),
|
|
634
|
+
{"table_name": table_name},
|
|
635
|
+
)
|
|
636
|
+
table_exists = result.scalar()
|
|
637
|
+
|
|
638
|
+
if not table_exists:
|
|
639
|
+
log_info(f"Table {table_name} does not exist, skipping migration")
|
|
640
|
+
return False
|
|
641
|
+
if table_type == "memories":
|
|
642
|
+
# SQLite doesn't support ALTER TABLE ADD COLUMN with constraints easily
|
|
643
|
+
# We'll use a simpler approach
|
|
644
|
+
# Check if columns already exist using PRAGMA
|
|
645
|
+
result = await sess.execute(text(f"PRAGMA table_info({table_name})"))
|
|
646
|
+
columns_info = result.fetchall()
|
|
647
|
+
existing_columns = {row[1] for row in columns_info} # row[1] contains column name
|
|
648
|
+
|
|
649
|
+
# Add created_at if it doesn't exist
|
|
650
|
+
if "created_at" not in existing_columns:
|
|
651
|
+
log_info(f"-- Adding created_at column to {table_name}")
|
|
652
|
+
current_time = int(time.time())
|
|
653
|
+
# Add created_at column with NOT NULL constraint and default value
|
|
654
|
+
# SQLite doesn't support ALTER COLUMN, so we add NOT NULL directly
|
|
655
|
+
await sess.execute(
|
|
656
|
+
text(f"ALTER TABLE {table_name} ADD COLUMN created_at BIGINT NOT NULL DEFAULT {current_time}"),
|
|
657
|
+
)
|
|
658
|
+
# Populate created_at for existing rows
|
|
659
|
+
await sess.execute(
|
|
660
|
+
text(
|
|
661
|
+
f"""
|
|
662
|
+
UPDATE {table_name}
|
|
663
|
+
SET created_at = COALESCE(updated_at, :default_time)
|
|
664
|
+
WHERE created_at = :default_time
|
|
665
|
+
"""
|
|
666
|
+
),
|
|
667
|
+
{"default_time": current_time},
|
|
668
|
+
)
|
|
669
|
+
# Add index
|
|
670
|
+
await sess.execute(
|
|
671
|
+
text(f"CREATE INDEX IF NOT EXISTS idx_{table_name}_created_at ON {table_name}(created_at)")
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
# Add feedback if it doesn't exist
|
|
675
|
+
if "feedback" not in existing_columns:
|
|
676
|
+
log_info(f"-- Adding feedback column to {table_name}")
|
|
677
|
+
await sess.execute(text(f"ALTER TABLE {table_name} ADD COLUMN feedback VARCHAR"))
|
|
678
|
+
|
|
679
|
+
await sess.commit()
|
|
680
|
+
return True
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
def _migrate_singlestore(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
684
|
+
"""Migrate SingleStore database."""
|
|
685
|
+
from sqlalchemy import text
|
|
686
|
+
|
|
687
|
+
db_schema = db.db_schema or "agno" # type: ignore
|
|
688
|
+
|
|
689
|
+
with db.Session() as sess, sess.begin(): # type: ignore
|
|
690
|
+
# Check if table exists
|
|
691
|
+
table_exists = sess.execute(
|
|
692
|
+
text(
|
|
693
|
+
"""
|
|
694
|
+
SELECT EXISTS (
|
|
695
|
+
SELECT 1 FROM INFORMATION_SCHEMA.TABLES
|
|
696
|
+
WHERE TABLE_SCHEMA = :schema
|
|
697
|
+
AND TABLE_NAME = :table_name
|
|
698
|
+
)
|
|
699
|
+
"""
|
|
700
|
+
),
|
|
701
|
+
{"schema": db_schema, "table_name": table_name},
|
|
702
|
+
).scalar()
|
|
703
|
+
|
|
704
|
+
if not table_exists:
|
|
705
|
+
log_info(f"Table {table_name} does not exist, skipping migration")
|
|
706
|
+
return False
|
|
707
|
+
if table_type == "memories":
|
|
708
|
+
# Check if columns already exist
|
|
709
|
+
check_columns = sess.execute(
|
|
710
|
+
text(
|
|
711
|
+
"""
|
|
712
|
+
SELECT COLUMN_NAME
|
|
713
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
714
|
+
WHERE TABLE_SCHEMA = :schema
|
|
715
|
+
AND TABLE_NAME = :table_name
|
|
716
|
+
"""
|
|
717
|
+
),
|
|
718
|
+
{"schema": db_schema, "table_name": table_name},
|
|
719
|
+
).fetchall()
|
|
720
|
+
existing_columns = {row[0] for row in check_columns}
|
|
721
|
+
|
|
722
|
+
# Add created_at if it doesn't exist
|
|
723
|
+
if "created_at" not in existing_columns:
|
|
724
|
+
log_info(f"-- Adding created_at column to {table_name}")
|
|
725
|
+
current_time = int(time.time())
|
|
726
|
+
# Add created_at column
|
|
727
|
+
sess.execute(
|
|
728
|
+
text(
|
|
729
|
+
f"""
|
|
730
|
+
ALTER TABLE `{db_schema}`.`{table_name}`
|
|
731
|
+
ADD COLUMN `created_at` BIGINT,
|
|
732
|
+
ADD INDEX `idx_{table_name}_created_at` (`created_at`)
|
|
733
|
+
"""
|
|
734
|
+
),
|
|
735
|
+
)
|
|
736
|
+
# Populate created_at
|
|
737
|
+
sess.execute(
|
|
738
|
+
text(
|
|
739
|
+
f"""
|
|
740
|
+
UPDATE `{db_schema}`.`{table_name}`
|
|
741
|
+
SET `created_at` = COALESCE(`updated_at`, :default_time)
|
|
742
|
+
"""
|
|
743
|
+
),
|
|
744
|
+
{"default_time": current_time},
|
|
745
|
+
)
|
|
746
|
+
|
|
747
|
+
# Add feedback if it doesn't exist
|
|
748
|
+
if "feedback" not in existing_columns:
|
|
749
|
+
log_info(f"-- Adding feedback column to {table_name}")
|
|
750
|
+
sess.execute(
|
|
751
|
+
text(
|
|
752
|
+
f"""
|
|
753
|
+
ALTER TABLE `{db_schema}`.`{table_name}`
|
|
754
|
+
ADD COLUMN `feedback` TEXT
|
|
755
|
+
"""
|
|
756
|
+
)
|
|
757
|
+
)
|
|
758
|
+
|
|
759
|
+
sess.commit()
|
|
760
|
+
return True
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
def _revert_postgres(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
764
|
+
"""Revert PostgreSQL migration."""
|
|
765
|
+
from sqlalchemy import text
|
|
766
|
+
|
|
767
|
+
db_schema = db.db_schema or "agno" # type: ignore
|
|
768
|
+
|
|
769
|
+
with db.Session() as sess, sess.begin(): # type: ignore
|
|
770
|
+
# Check if table exists
|
|
771
|
+
table_exists = sess.execute(
|
|
772
|
+
text(
|
|
773
|
+
"""
|
|
774
|
+
SELECT EXISTS (
|
|
775
|
+
SELECT FROM information_schema.tables
|
|
776
|
+
WHERE table_schema = :schema
|
|
777
|
+
AND table_name = :table_name
|
|
778
|
+
)
|
|
779
|
+
"""
|
|
780
|
+
),
|
|
781
|
+
{"schema": db_schema, "table_name": table_name},
|
|
782
|
+
).scalar()
|
|
783
|
+
|
|
784
|
+
if not table_exists:
|
|
785
|
+
log_info(f"Table {table_name} does not exist, skipping revert")
|
|
786
|
+
return False
|
|
787
|
+
if table_type == "memories":
|
|
788
|
+
# Remove columns (in reverse order)
|
|
789
|
+
sess.execute(text(f"ALTER TABLE {db_schema}.{table_name} DROP COLUMN IF EXISTS feedback"))
|
|
790
|
+
sess.execute(text(f"DROP INDEX IF EXISTS idx_{table_name}_created_at"))
|
|
791
|
+
sess.execute(text(f"ALTER TABLE {db_schema}.{table_name} DROP COLUMN IF EXISTS created_at"))
|
|
792
|
+
sess.commit()
|
|
793
|
+
return True
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
async def _revert_async_postgres(db: AsyncBaseDb, table_type: str, table_name: str) -> bool:
|
|
797
|
+
"""Revert PostgreSQL migration."""
|
|
798
|
+
from sqlalchemy import text
|
|
799
|
+
|
|
800
|
+
db_schema = db.db_schema or "agno" # type: ignore
|
|
801
|
+
|
|
802
|
+
async with db.async_session_factory() as sess, sess.begin(): # type: ignore
|
|
803
|
+
# Check if table exists
|
|
804
|
+
result = await sess.execute(
|
|
805
|
+
text(
|
|
806
|
+
"""
|
|
807
|
+
SELECT EXISTS (
|
|
808
|
+
SELECT FROM information_schema.tables
|
|
809
|
+
WHERE table_schema = :schema
|
|
810
|
+
AND table_name = :table_name
|
|
811
|
+
)
|
|
812
|
+
"""
|
|
813
|
+
),
|
|
814
|
+
{"schema": db_schema, "table_name": table_name},
|
|
815
|
+
)
|
|
816
|
+
table_exists = result.scalar()
|
|
817
|
+
|
|
818
|
+
if not table_exists:
|
|
819
|
+
log_info(f"Table {table_name} does not exist, skipping revert")
|
|
820
|
+
return False
|
|
821
|
+
if table_type == "memories":
|
|
822
|
+
# Remove columns (in reverse order)
|
|
823
|
+
await sess.execute(text(f"ALTER TABLE {db_schema}.{table_name} DROP COLUMN IF EXISTS feedback"))
|
|
824
|
+
await sess.execute(text(f"DROP INDEX IF EXISTS idx_{table_name}_created_at"))
|
|
825
|
+
await sess.execute(text(f"ALTER TABLE {db_schema}.{table_name} DROP COLUMN IF EXISTS created_at"))
|
|
826
|
+
await sess.commit()
|
|
827
|
+
return True
|
|
828
|
+
|
|
829
|
+
|
|
830
|
+
def _revert_mysql(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
831
|
+
"""Revert MySQL migration."""
|
|
832
|
+
from sqlalchemy import text
|
|
833
|
+
|
|
834
|
+
db_schema = db.db_schema or "agno" # type: ignore
|
|
835
|
+
|
|
836
|
+
with db.Session() as sess, sess.begin(): # type: ignore
|
|
837
|
+
# Check if table exists
|
|
838
|
+
table_exists = sess.execute(
|
|
839
|
+
text(
|
|
840
|
+
"""
|
|
841
|
+
SELECT EXISTS (
|
|
842
|
+
SELECT 1 FROM INFORMATION_SCHEMA.TABLES
|
|
843
|
+
WHERE TABLE_SCHEMA = :schema
|
|
844
|
+
AND TABLE_NAME = :table_name
|
|
845
|
+
)
|
|
846
|
+
"""
|
|
847
|
+
),
|
|
848
|
+
{"schema": db_schema, "table_name": table_name},
|
|
849
|
+
).scalar()
|
|
850
|
+
|
|
851
|
+
if not table_exists:
|
|
852
|
+
log_info(f"Table {table_name} does not exist, skipping revert")
|
|
853
|
+
return False
|
|
854
|
+
if table_type == "memories":
|
|
855
|
+
# Get existing columns
|
|
856
|
+
existing_columns = {
|
|
857
|
+
row[0]
|
|
858
|
+
for row in sess.execute(
|
|
859
|
+
text(
|
|
860
|
+
"""
|
|
861
|
+
SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS
|
|
862
|
+
WHERE TABLE_SCHEMA = :schema AND TABLE_NAME = :table_name
|
|
863
|
+
"""
|
|
864
|
+
),
|
|
865
|
+
{"schema": db_schema, "table_name": table_name},
|
|
866
|
+
)
|
|
867
|
+
}
|
|
868
|
+
# Drop feedback column if it exists
|
|
869
|
+
if "feedback" in existing_columns:
|
|
870
|
+
sess.execute(text(f"ALTER TABLE `{db_schema}`.`{table_name}` DROP COLUMN `feedback`"))
|
|
871
|
+
# Drop created_at index if it exists
|
|
872
|
+
index_exists = sess.execute(
|
|
873
|
+
text(
|
|
874
|
+
"""
|
|
875
|
+
SELECT COUNT(1) FROM INFORMATION_SCHEMA.STATISTICS
|
|
876
|
+
WHERE TABLE_SCHEMA = :schema
|
|
877
|
+
AND TABLE_NAME = :table_name
|
|
878
|
+
AND INDEX_NAME = :index_name
|
|
879
|
+
"""
|
|
880
|
+
),
|
|
881
|
+
{"schema": db_schema, "table_name": table_name, "index_name": f"idx_{table_name}_created_at"},
|
|
882
|
+
).scalar()
|
|
883
|
+
if index_exists:
|
|
884
|
+
sess.execute(text(f"ALTER TABLE `{db_schema}`.`{table_name}` DROP INDEX `idx_{table_name}_created_at`"))
|
|
885
|
+
# Drop created_at column if it exists
|
|
886
|
+
if "created_at" in existing_columns:
|
|
887
|
+
sess.execute(text(f"ALTER TABLE `{db_schema}`.`{table_name}` DROP COLUMN `created_at`"))
|
|
888
|
+
|
|
889
|
+
sess.commit()
|
|
890
|
+
return True
|
|
891
|
+
|
|
892
|
+
|
|
893
|
+
def _revert_sqlite(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
894
|
+
"""Revert SQLite migration."""
|
|
895
|
+
log_warning(f"-- SQLite does not support DROP COLUMN easily. Manual migration may be required for {table_name}.")
|
|
896
|
+
|
|
897
|
+
return False
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
async def _revert_async_sqlite(db: AsyncBaseDb, table_type: str, table_name: str) -> bool:
|
|
901
|
+
"""Revert SQLite migration."""
|
|
902
|
+
log_warning(f"-- SQLite does not support DROP COLUMN easily. Manual migration may be required for {table_name}.")
|
|
903
|
+
|
|
904
|
+
return False
|
|
905
|
+
|
|
906
|
+
|
|
907
|
+
def _revert_singlestore(db: BaseDb, table_type: str, table_name: str) -> bool:
|
|
908
|
+
"""Revert SingleStore migration."""
|
|
909
|
+
from sqlalchemy import text
|
|
910
|
+
|
|
911
|
+
db_schema = db.db_schema or "agno" # type: ignore
|
|
912
|
+
|
|
913
|
+
with db.Session() as sess, sess.begin(): # type: ignore
|
|
914
|
+
# Check if table exists
|
|
915
|
+
table_exists = sess.execute(
|
|
916
|
+
text(
|
|
917
|
+
"""
|
|
918
|
+
SELECT EXISTS (
|
|
919
|
+
SELECT 1 FROM INFORMATION_SCHEMA.TABLES
|
|
920
|
+
WHERE TABLE_SCHEMA = :schema
|
|
921
|
+
AND TABLE_NAME = :table_name
|
|
922
|
+
)
|
|
923
|
+
"""
|
|
924
|
+
),
|
|
925
|
+
{"schema": db_schema, "table_name": table_name},
|
|
926
|
+
).scalar()
|
|
927
|
+
|
|
928
|
+
if not table_exists:
|
|
929
|
+
log_info(f"Table {table_name} does not exist, skipping revert")
|
|
930
|
+
return False
|
|
931
|
+
if table_type == "memories":
|
|
932
|
+
sess.execute(text(f"ALTER TABLE `{db_schema}`.`{table_name}` DROP COLUMN IF EXISTS `feedback`"))
|
|
933
|
+
sess.execute(
|
|
934
|
+
text(f"ALTER TABLE `{db_schema}`.`{table_name}` DROP INDEX IF EXISTS `idx_{table_name}_created_at`")
|
|
935
|
+
)
|
|
936
|
+
sess.execute(text(f"ALTER TABLE `{db_schema}`.`{table_name}` DROP COLUMN IF EXISTS `created_at`"))
|
|
937
|
+
sess.commit()
|
|
938
|
+
return True
|