agno 2.3.21__py3-none-any.whl → 2.3.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. agno/agent/agent.py +26 -1
  2. agno/agent/remote.py +233 -72
  3. agno/client/a2a/__init__.py +10 -0
  4. agno/client/a2a/client.py +554 -0
  5. agno/client/a2a/schemas.py +112 -0
  6. agno/client/a2a/utils.py +369 -0
  7. agno/db/migrations/utils.py +19 -0
  8. agno/db/migrations/v1_to_v2.py +54 -16
  9. agno/db/migrations/versions/v2_3_0.py +92 -53
  10. agno/db/postgres/async_postgres.py +162 -40
  11. agno/db/postgres/postgres.py +181 -31
  12. agno/db/postgres/utils.py +6 -2
  13. agno/knowledge/chunking/document.py +3 -2
  14. agno/knowledge/chunking/markdown.py +8 -3
  15. agno/knowledge/chunking/recursive.py +2 -2
  16. agno/models/openai/chat.py +1 -1
  17. agno/models/openai/responses.py +14 -7
  18. agno/os/middleware/jwt.py +66 -27
  19. agno/os/routers/agents/router.py +2 -2
  20. agno/os/routers/knowledge/knowledge.py +3 -3
  21. agno/os/routers/teams/router.py +2 -2
  22. agno/os/routers/workflows/router.py +2 -2
  23. agno/reasoning/deepseek.py +11 -1
  24. agno/reasoning/gemini.py +6 -2
  25. agno/reasoning/groq.py +8 -3
  26. agno/reasoning/openai.py +2 -0
  27. agno/remote/base.py +105 -8
  28. agno/skills/__init__.py +17 -0
  29. agno/skills/agent_skills.py +370 -0
  30. agno/skills/errors.py +32 -0
  31. agno/skills/loaders/__init__.py +4 -0
  32. agno/skills/loaders/base.py +27 -0
  33. agno/skills/loaders/local.py +216 -0
  34. agno/skills/skill.py +65 -0
  35. agno/skills/utils.py +107 -0
  36. agno/skills/validator.py +277 -0
  37. agno/team/remote.py +219 -59
  38. agno/team/team.py +22 -2
  39. agno/tools/mcp/mcp.py +299 -17
  40. agno/tools/mcp/multi_mcp.py +269 -14
  41. agno/utils/mcp.py +49 -8
  42. agno/utils/string.py +43 -1
  43. agno/workflow/condition.py +4 -2
  44. agno/workflow/loop.py +20 -1
  45. agno/workflow/remote.py +172 -32
  46. agno/workflow/router.py +4 -1
  47. agno/workflow/steps.py +4 -0
  48. {agno-2.3.21.dist-info → agno-2.3.22.dist-info}/METADATA +13 -14
  49. {agno-2.3.21.dist-info → agno-2.3.22.dist-info}/RECORD +52 -38
  50. {agno-2.3.21.dist-info → agno-2.3.22.dist-info}/WHEEL +0 -0
  51. {agno-2.3.21.dist-info → agno-2.3.22.dist-info}/licenses/LICENSE +0 -0
  52. {agno-2.3.21.dist-info → agno-2.3.22.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  import time
2
2
  import warnings
3
3
  from datetime import date, datetime, timedelta, timezone
4
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
4
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union, cast
5
5
  from uuid import uuid4
6
6
 
7
7
  if TYPE_CHECKING:
@@ -28,6 +28,7 @@ from agno.db.schemas.knowledge import KnowledgeRow
28
28
  from agno.db.schemas.memory import UserMemory
29
29
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
30
30
  from agno.utils.log import log_debug, log_error, log_info, log_warning
31
+ from agno.utils.string import sanitize_postgres_string, sanitize_postgres_strings
31
32
 
32
33
  try:
33
34
  from sqlalchemy import Index, String, Table, UniqueConstraint, and_, case, func, or_, update
@@ -68,6 +69,15 @@ class AsyncPostgresDb(AsyncBaseDb):
68
69
  2. Use the db_url
69
70
  3. Raise an error if neither is provided
70
71
 
72
+ Connection Pool Configuration:
73
+ When creating an engine from db_url, the following settings are applied:
74
+ - pool_pre_ping=True: Validates connections before use to handle terminated
75
+ connections (e.g., "terminating connection due to administrator command")
76
+ - pool_recycle=3600: Recycles connections after 1 hour to prevent stale connections
77
+
78
+ These settings help handle connection terminations gracefully. If you need
79
+ custom pool settings, provide a pre-configured db_engine instead.
80
+
71
81
  Args:
72
82
  id (Optional[str]): The ID of the database.
73
83
  db_url (Optional[str]): The database URL to connect to.
@@ -112,7 +122,11 @@ class AsyncPostgresDb(AsyncBaseDb):
112
122
 
113
123
  _engine: Optional[AsyncEngine] = db_engine
114
124
  if _engine is None and db_url is not None:
115
- _engine = create_async_engine(db_url)
125
+ _engine = create_async_engine(
126
+ db_url,
127
+ pool_pre_ping=True,
128
+ pool_recycle=3600,
129
+ )
116
130
  if _engine is None:
117
131
  raise ValueError("One of db_url or db_engine must be provided")
118
132
 
@@ -522,6 +536,11 @@ class AsyncPostgresDb(AsyncBaseDb):
522
536
 
523
537
  if user_id is not None:
524
538
  stmt = stmt.where(table.c.user_id == user_id)
539
+
540
+ # Filter by session_type to ensure we get the correct session type
541
+ session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
542
+ stmt = stmt.where(table.c.session_type == session_type_value)
543
+
525
544
  result = await sess.execute(stmt)
526
545
  row = result.fetchone()
527
546
  if row is None:
@@ -604,9 +623,7 @@ class AsyncPostgresDb(AsyncBaseDb):
604
623
  stmt = stmt.where(table.c.created_at <= end_timestamp)
605
624
  if session_name is not None:
606
625
  stmt = stmt.where(
607
- func.coalesce(func.json_extract_path_text(table.c.session_data, "session_name"), "").ilike(
608
- f"%{session_name}%"
609
- )
626
+ func.coalesce(table.c.session_data["session_name"].astext, "").ilike(f"%{session_name}%")
610
627
  )
611
628
  if session_type is not None:
612
629
  session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
@@ -670,6 +687,8 @@ class AsyncPostgresDb(AsyncBaseDb):
670
687
  table = await self._get_table(table_type="sessions")
671
688
 
672
689
  async with self.async_session_factory() as sess, sess.begin():
690
+ # Sanitize session_name to remove null bytes
691
+ sanitized_session_name = sanitize_postgres_string(session_name)
673
692
  stmt = (
674
693
  update(table)
675
694
  .where(table.c.session_id == session_id)
@@ -679,7 +698,7 @@ class AsyncPostgresDb(AsyncBaseDb):
679
698
  func.jsonb_set(
680
699
  func.cast(table.c.session_data, postgresql.JSONB),
681
700
  text("'{session_name}'"),
682
- func.to_jsonb(session_name),
701
+ func.to_jsonb(sanitized_session_name),
683
702
  ),
684
703
  postgresql.JSON,
685
704
  )
@@ -732,6 +751,21 @@ class AsyncPostgresDb(AsyncBaseDb):
732
751
  try:
733
752
  table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
734
753
  session_dict = session.to_dict()
754
+ # Sanitize JSON/dict fields to remove null bytes from nested strings
755
+ if session_dict.get("agent_data"):
756
+ session_dict["agent_data"] = sanitize_postgres_strings(session_dict["agent_data"])
757
+ if session_dict.get("team_data"):
758
+ session_dict["team_data"] = sanitize_postgres_strings(session_dict["team_data"])
759
+ if session_dict.get("workflow_data"):
760
+ session_dict["workflow_data"] = sanitize_postgres_strings(session_dict["workflow_data"])
761
+ if session_dict.get("session_data"):
762
+ session_dict["session_data"] = sanitize_postgres_strings(session_dict["session_data"])
763
+ if session_dict.get("summary"):
764
+ session_dict["summary"] = sanitize_postgres_strings(session_dict["summary"])
765
+ if session_dict.get("metadata"):
766
+ session_dict["metadata"] = sanitize_postgres_strings(session_dict["metadata"])
767
+ if session_dict.get("runs"):
768
+ session_dict["runs"] = sanitize_postgres_strings(session_dict["runs"])
735
769
 
736
770
  if isinstance(session, AgentSession):
737
771
  async with self.async_session_factory() as sess, sess.begin():
@@ -929,22 +963,40 @@ class AsyncPostgresDb(AsyncBaseDb):
929
963
  table = await self._get_table(table_type="memories")
930
964
 
931
965
  async with self.async_session_factory() as sess, sess.begin():
966
+ # Filter out NULL topics and ensure topics is an array before extracting elements
967
+ # jsonb_typeof returns 'array' for JSONB arrays
968
+ conditions = [
969
+ table.c.topics.is_not(None),
970
+ func.jsonb_typeof(table.c.topics) == "array",
971
+ ]
972
+ if user_id is not None:
973
+ conditions.append(table.c.user_id == user_id)
974
+
932
975
  try:
933
- stmt = select(func.jsonb_array_elements_text(table.c.topics))
934
- if user_id is not None:
935
- stmt = stmt.where(table.c.user_id == user_id)
976
+ # jsonb_array_elements_text is a set-returning function that must be used with select_from
977
+ stmt = select(func.jsonb_array_elements_text(table.c.topics).label("topic"))
978
+ stmt = stmt.select_from(table)
979
+ stmt = stmt.where(and_(*conditions))
936
980
  result = await sess.execute(stmt)
937
981
  except ProgrammingError:
938
982
  # Retrying with json_array_elements_text. This works in older versions,
939
983
  # where the topics column was of type JSON instead of JSONB
940
- stmt = select(func.json_array_elements_text(table.c.topics))
984
+ # For JSON (not JSONB), we use json_typeof
985
+ json_conditions = [
986
+ table.c.topics.is_not(None),
987
+ func.json_typeof(table.c.topics) == "array",
988
+ ]
941
989
  if user_id is not None:
942
- stmt = stmt.where(table.c.user_id == user_id)
990
+ json_conditions.append(table.c.user_id == user_id)
991
+ stmt = select(func.json_array_elements_text(table.c.topics).label("topic"))
992
+ stmt = stmt.select_from(table)
993
+ stmt = stmt.where(and_(*json_conditions))
943
994
  result = await sess.execute(stmt)
944
995
 
945
996
  records = result.fetchall()
946
-
947
- return list(set([record[0] for record in records]))
997
+ # Extract topics from records - each record is a Row with a 'topic' attribute
998
+ topics = [record.topic for record in records if record.topic is not None]
999
+ return list(set(topics))
948
1000
 
949
1001
  except Exception as e:
950
1002
  log_error(f"Exception reading from memory table: {e}")
@@ -1259,16 +1311,26 @@ class AsyncPostgresDb(AsyncBaseDb):
1259
1311
 
1260
1312
  # Serialize content, categories, and notes into a JSON dict for DB storage
1261
1313
  content_dict = serialize_cultural_knowledge(cultural_knowledge)
1314
+ # Sanitize content_dict to remove null bytes from nested strings
1315
+ if content_dict:
1316
+ content_dict = cast(Dict[str, Any], sanitize_postgres_strings(content_dict))
1317
+
1318
+ # Sanitize string fields to remove null bytes (PostgreSQL doesn't allow them)
1319
+ sanitized_name = sanitize_postgres_string(cultural_knowledge.name)
1320
+ sanitized_summary = sanitize_postgres_string(cultural_knowledge.summary)
1321
+ sanitized_input = sanitize_postgres_string(cultural_knowledge.input)
1262
1322
 
1263
1323
  async with self.async_session_factory() as sess, sess.begin():
1264
1324
  # Use PostgreSQL-specific insert with on_conflict_do_update
1265
1325
  insert_stmt = postgresql.insert(table).values(
1266
1326
  id=cultural_knowledge.id,
1267
- name=cultural_knowledge.name,
1268
- summary=cultural_knowledge.summary,
1327
+ name=sanitized_name,
1328
+ summary=sanitized_summary,
1269
1329
  content=content_dict if content_dict else None,
1270
- metadata=cultural_knowledge.metadata,
1271
- input=cultural_knowledge.input,
1330
+ metadata=sanitize_postgres_strings(cultural_knowledge.metadata)
1331
+ if cultural_knowledge.metadata
1332
+ else None,
1333
+ input=sanitized_input,
1272
1334
  created_at=cultural_knowledge.created_at,
1273
1335
  updated_at=int(time.time()),
1274
1336
  agent_id=cultural_knowledge.agent_id,
@@ -1277,11 +1339,13 @@ class AsyncPostgresDb(AsyncBaseDb):
1277
1339
 
1278
1340
  # Update all fields except id on conflict
1279
1341
  update_dict = {
1280
- "name": cultural_knowledge.name,
1281
- "summary": cultural_knowledge.summary,
1342
+ "name": sanitized_name,
1343
+ "summary": sanitized_summary,
1282
1344
  "content": content_dict if content_dict else None,
1283
- "metadata": cultural_knowledge.metadata,
1284
- "input": cultural_knowledge.input,
1345
+ "metadata": sanitize_postgres_strings(cultural_knowledge.metadata)
1346
+ if cultural_knowledge.metadata
1347
+ else None,
1348
+ "input": sanitized_input,
1285
1349
  "updated_at": int(time.time()),
1286
1350
  "agent_id": cultural_knowledge.agent_id,
1287
1351
  "team_id": cultural_knowledge.team_id,
@@ -1399,6 +1463,13 @@ class AsyncPostgresDb(AsyncBaseDb):
1399
1463
 
1400
1464
  current_time = int(time.time())
1401
1465
 
1466
+ # Sanitize string fields to remove null bytes (PostgreSQL doesn't allow them)
1467
+ sanitized_input = sanitize_postgres_string(memory.input)
1468
+ sanitized_feedback = sanitize_postgres_string(memory.feedback)
1469
+ # Sanitize JSONB fields to remove null bytes from nested strings
1470
+ sanitized_memory = sanitize_postgres_strings(memory.memory) if memory.memory else None
1471
+ sanitized_topics = sanitize_postgres_strings(memory.topics) if memory.topics else None
1472
+
1402
1473
  async with self.async_session_factory() as sess:
1403
1474
  async with sess.begin():
1404
1475
  if memory.memory_id is None:
@@ -1406,25 +1477,27 @@ class AsyncPostgresDb(AsyncBaseDb):
1406
1477
 
1407
1478
  stmt = postgresql.insert(table).values(
1408
1479
  memory_id=memory.memory_id,
1409
- memory=memory.memory,
1410
- input=memory.input,
1480
+ memory=sanitized_memory,
1481
+ input=sanitized_input,
1411
1482
  user_id=memory.user_id,
1412
1483
  agent_id=memory.agent_id,
1413
1484
  team_id=memory.team_id,
1414
- topics=memory.topics,
1415
- feedback=memory.feedback,
1485
+ topics=sanitized_topics,
1486
+ feedback=sanitized_feedback,
1416
1487
  created_at=memory.created_at,
1417
- updated_at=memory.created_at,
1488
+ updated_at=memory.updated_at
1489
+ if memory.updated_at is not None
1490
+ else (memory.created_at if memory.created_at is not None else current_time),
1418
1491
  )
1419
1492
  stmt = stmt.on_conflict_do_update( # type: ignore
1420
1493
  index_elements=["memory_id"],
1421
1494
  set_=dict(
1422
- memory=memory.memory,
1423
- topics=memory.topics,
1424
- input=memory.input,
1495
+ memory=sanitized_memory,
1496
+ topics=sanitized_topics,
1497
+ input=sanitized_input,
1425
1498
  agent_id=memory.agent_id,
1426
1499
  team_id=memory.team_id,
1427
- feedback=memory.feedback,
1500
+ feedback=sanitized_feedback,
1428
1501
  updated_at=current_time,
1429
1502
  # Preserve created_at on update - don't overwrite existing value
1430
1503
  created_at=table.c.created_at,
@@ -1538,7 +1611,7 @@ class AsyncPostgresDb(AsyncBaseDb):
1538
1611
  Exception: If an error occurs during metrics calculation.
1539
1612
  """
1540
1613
  try:
1541
- table = await self._get_table(table_type="metrics")
1614
+ table = await self._get_table(table_type="metrics", create_table_if_not_found=True)
1542
1615
 
1543
1616
  starting_date = await self._get_metrics_calculation_starting_date(table)
1544
1617
 
@@ -1614,7 +1687,7 @@ class AsyncPostgresDb(AsyncBaseDb):
1614
1687
  Exception: If an error occurs during retrieval.
1615
1688
  """
1616
1689
  try:
1617
- table = await self._get_table(table_type="metrics")
1690
+ table = await self._get_table(table_type="metrics", create_table_if_not_found=True)
1618
1691
 
1619
1692
  async with self.async_session_factory() as sess, sess.begin():
1620
1693
  stmt = select(table)
@@ -1664,7 +1737,7 @@ class AsyncPostgresDb(AsyncBaseDb):
1664
1737
  Returns:
1665
1738
  Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1666
1739
  """
1667
- table = await self._get_table(table_type="knowledge")
1740
+ table = await self._get_table(table_type="knowledge", create_table_if_not_found=True)
1668
1741
 
1669
1742
  try:
1670
1743
  async with self.async_session_factory() as sess, sess.begin():
@@ -1708,8 +1781,7 @@ class AsyncPostgresDb(AsyncBaseDb):
1708
1781
  stmt = select(table)
1709
1782
 
1710
1783
  # Apply sorting
1711
- if sort_by is not None:
1712
- stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
1784
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1713
1785
 
1714
1786
  # Get total count before applying limit and pagination
1715
1787
  count_stmt = select(func.count()).select_from(stmt.alias())
@@ -1766,10 +1838,19 @@ class AsyncPostgresDb(AsyncBaseDb):
1766
1838
  }
1767
1839
 
1768
1840
  # Build insert and update data only for fields that exist in the table
1841
+ # String fields that need sanitization
1842
+ string_fields = {"name", "description", "type", "status", "status_message", "external_id", "linked_to"}
1843
+
1769
1844
  for model_field, table_column in field_mapping.items():
1770
1845
  if table_column in table_columns:
1771
1846
  value = getattr(knowledge_row, model_field, None)
1772
1847
  if value is not None:
1848
+ # Sanitize string fields to remove null bytes
1849
+ if table_column in string_fields and isinstance(value, str):
1850
+ value = sanitize_postgres_string(value)
1851
+ # Sanitize metadata dict if present
1852
+ elif table_column == "metadata" and isinstance(value, dict):
1853
+ value = sanitize_postgres_strings(value)
1773
1854
  insert_data[table_column] = value
1774
1855
  # Don't include ID in update_fields since it's the primary key
1775
1856
  if table_column != "id":
@@ -1820,12 +1901,26 @@ class AsyncPostgresDb(AsyncBaseDb):
1820
1901
  Exception: If an error occurs during creation.
1821
1902
  """
1822
1903
  try:
1823
- table = await self._get_table(table_type="evals")
1904
+ table = await self._get_table(table_type="evals", create_table_if_not_found=True)
1824
1905
 
1825
1906
  async with self.async_session_factory() as sess, sess.begin():
1826
1907
  current_time = int(time.time())
1908
+ eval_data = eval_run.model_dump()
1909
+ # Sanitize string fields in eval_run
1910
+ if eval_data.get("name"):
1911
+ eval_data["name"] = sanitize_postgres_string(eval_data["name"])
1912
+ if eval_data.get("evaluated_component_name"):
1913
+ eval_data["evaluated_component_name"] = sanitize_postgres_string(
1914
+ eval_data["evaluated_component_name"]
1915
+ )
1916
+ # Sanitize nested dicts/JSON fields
1917
+ if eval_data.get("eval_data"):
1918
+ eval_data["eval_data"] = sanitize_postgres_strings(eval_data["eval_data"])
1919
+ if eval_data.get("eval_input"):
1920
+ eval_data["eval_input"] = sanitize_postgres_strings(eval_data["eval_input"])
1921
+
1827
1922
  stmt = postgresql.insert(table).values(
1828
- {"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
1923
+ {"created_at": current_time, "updated_at": current_time, **eval_data}
1829
1924
  )
1830
1925
  await sess.execute(stmt)
1831
1926
 
@@ -2027,8 +2122,12 @@ class AsyncPostgresDb(AsyncBaseDb):
2027
2122
  try:
2028
2123
  table = await self._get_table(table_type="evals")
2029
2124
  async with self.async_session_factory() as sess, sess.begin():
2125
+ # Sanitize string field to remove null bytes
2126
+ sanitized_name = sanitize_postgres_string(name)
2030
2127
  stmt = (
2031
- table.update().where(table.c.run_id == eval_run_id).values(name=name, updated_at=int(time.time()))
2128
+ table.update()
2129
+ .where(table.c.run_id == eval_run_id)
2130
+ .values(name=sanitized_name, updated_at=int(time.time()))
2032
2131
  )
2033
2132
  await sess.execute(stmt)
2034
2133
 
@@ -2176,6 +2275,13 @@ class AsyncPostgresDb(AsyncBaseDb):
2176
2275
  trace_dict = trace.to_dict()
2177
2276
  trace_dict.pop("total_spans", None)
2178
2277
  trace_dict.pop("error_count", None)
2278
+ # Sanitize string fields and nested JSON structures
2279
+ if trace_dict.get("name"):
2280
+ trace_dict["name"] = sanitize_postgres_string(trace_dict["name"])
2281
+ if trace_dict.get("status"):
2282
+ trace_dict["status"] = sanitize_postgres_string(trace_dict["status"])
2283
+ # Sanitize any nested dict/JSON fields
2284
+ trace_dict = cast(Dict[str, Any], sanitize_postgres_strings(trace_dict))
2179
2285
 
2180
2286
  async with self.async_session_factory() as sess, sess.begin():
2181
2287
  # Use upsert to handle concurrent inserts atomically
@@ -2494,7 +2600,15 @@ class AsyncPostgresDb(AsyncBaseDb):
2494
2600
  table = await self._get_table(table_type="spans", create_table_if_not_found=True)
2495
2601
 
2496
2602
  async with self.async_session_factory() as sess, sess.begin():
2497
- stmt = postgresql.insert(table).values(span.to_dict())
2603
+ span_dict = span.to_dict()
2604
+ # Sanitize string fields and nested JSON structures
2605
+ if span_dict.get("name"):
2606
+ span_dict["name"] = sanitize_postgres_string(span_dict["name"])
2607
+ if span_dict.get("status_code"):
2608
+ span_dict["status_code"] = sanitize_postgres_string(span_dict["status_code"])
2609
+ # Sanitize any nested dict/JSON fields
2610
+ span_dict = cast(Dict[str, Any], sanitize_postgres_strings(span_dict))
2611
+ stmt = postgresql.insert(table).values(span_dict)
2498
2612
  await sess.execute(stmt)
2499
2613
 
2500
2614
  except Exception as e:
@@ -2514,7 +2628,15 @@ class AsyncPostgresDb(AsyncBaseDb):
2514
2628
 
2515
2629
  async with self.async_session_factory() as sess, sess.begin():
2516
2630
  for span in spans:
2517
- stmt = postgresql.insert(table).values(span.to_dict())
2631
+ span_dict = span.to_dict()
2632
+ # Sanitize string fields and nested JSON structures
2633
+ if span_dict.get("name"):
2634
+ span_dict["name"] = sanitize_postgres_string(span_dict["name"])
2635
+ if span_dict.get("status_code"):
2636
+ span_dict["status_code"] = sanitize_postgres_string(span_dict["status_code"])
2637
+ # Sanitize any nested dict/JSON fields
2638
+ span_dict = sanitize_postgres_strings(span_dict)
2639
+ stmt = postgresql.insert(table).values(span_dict)
2518
2640
  await sess.execute(stmt)
2519
2641
 
2520
2642
  except Exception as e: