agno 2.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +5540 -2273
- agno/api/api.py +2 -0
- agno/api/os.py +1 -1
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +689 -6
- agno/db/dynamo/dynamo.py +933 -37
- agno/db/dynamo/schemas.py +174 -10
- agno/db/dynamo/utils.py +63 -4
- agno/db/firestore/firestore.py +831 -9
- agno/db/firestore/schemas.py +51 -0
- agno/db/firestore/utils.py +102 -4
- agno/db/gcs_json/gcs_json_db.py +660 -12
- agno/db/gcs_json/utils.py +60 -26
- agno/db/in_memory/in_memory_db.py +287 -14
- agno/db/in_memory/utils.py +60 -2
- agno/db/json/json_db.py +590 -14
- agno/db/json/utils.py +60 -26
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/v1_to_v2.py +43 -13
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/__init__.py +15 -1
- agno/db/mongo/async_mongo.py +2760 -0
- agno/db/mongo/mongo.py +879 -11
- agno/db/mongo/schemas.py +42 -0
- agno/db/mongo/utils.py +80 -8
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2912 -0
- agno/db/mysql/mysql.py +946 -68
- agno/db/mysql/schemas.py +72 -10
- agno/db/mysql/utils.py +198 -7
- agno/db/postgres/__init__.py +2 -1
- agno/db/postgres/async_postgres.py +2579 -0
- agno/db/postgres/postgres.py +942 -57
- agno/db/postgres/schemas.py +81 -18
- agno/db/postgres/utils.py +164 -2
- agno/db/redis/redis.py +671 -7
- agno/db/redis/schemas.py +50 -0
- agno/db/redis/utils.py +65 -7
- agno/db/schemas/__init__.py +2 -1
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/evals.py +1 -0
- agno/db/schemas/memory.py +17 -2
- agno/db/singlestore/schemas.py +63 -0
- agno/db/singlestore/singlestore.py +949 -83
- agno/db/singlestore/utils.py +60 -2
- agno/db/sqlite/__init__.py +2 -1
- agno/db/sqlite/async_sqlite.py +2911 -0
- agno/db/sqlite/schemas.py +62 -0
- agno/db/sqlite/sqlite.py +965 -46
- agno/db/sqlite/utils.py +169 -8
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +334 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1908 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +2 -0
- agno/eval/__init__.py +10 -0
- agno/eval/accuracy.py +75 -55
- agno/eval/agent_as_judge.py +861 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +16 -7
- agno/eval/reliability.py +28 -16
- agno/eval/utils.py +35 -17
- agno/exceptions.py +27 -2
- agno/filters.py +354 -0
- agno/guardrails/prompt_injection.py +1 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +1 -1
- agno/knowledge/chunking/agentic.py +13 -10
- agno/knowledge/chunking/fixed.py +4 -1
- agno/knowledge/chunking/semantic.py +9 -4
- agno/knowledge/chunking/strategy.py +59 -15
- agno/knowledge/embedder/fastembed.py +1 -1
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/ollama.py +8 -0
- agno/knowledge/embedder/openai.py +8 -8
- agno/knowledge/embedder/sentence_transformer.py +6 -2
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/knowledge.py +1618 -318
- agno/knowledge/reader/base.py +6 -2
- agno/knowledge/reader/csv_reader.py +8 -10
- agno/knowledge/reader/docx_reader.py +5 -6
- agno/knowledge/reader/field_labeled_csv_reader.py +16 -20
- agno/knowledge/reader/json_reader.py +5 -4
- agno/knowledge/reader/markdown_reader.py +8 -8
- agno/knowledge/reader/pdf_reader.py +17 -19
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +32 -3
- agno/knowledge/reader/s3_reader.py +3 -3
- agno/knowledge/reader/tavily_reader.py +193 -0
- agno/knowledge/reader/text_reader.py +22 -10
- agno/knowledge/reader/web_search_reader.py +1 -48
- agno/knowledge/reader/website_reader.py +10 -10
- agno/knowledge/reader/wikipedia_reader.py +33 -1
- agno/knowledge/types.py +1 -0
- agno/knowledge/utils.py +72 -7
- agno/media.py +22 -6
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +544 -83
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/aimlapi.py +17 -0
- agno/models/anthropic/claude.py +515 -40
- agno/models/aws/bedrock.py +102 -21
- agno/models/aws/claude.py +131 -274
- agno/models/azure/ai_foundry.py +41 -19
- agno/models/azure/openai_chat.py +39 -8
- agno/models/base.py +1249 -525
- agno/models/cerebras/cerebras.py +91 -21
- agno/models/cerebras/cerebras_openai.py +21 -2
- agno/models/cohere/chat.py +40 -6
- agno/models/cometapi/cometapi.py +18 -1
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +18 -1
- agno/models/deepseek/deepseek.py +69 -3
- agno/models/fireworks/fireworks.py +18 -1
- agno/models/google/gemini.py +877 -80
- agno/models/google/utils.py +22 -0
- agno/models/groq/groq.py +51 -18
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/internlm/internlm.py +18 -1
- agno/models/langdb/langdb.py +13 -1
- agno/models/litellm/chat.py +44 -9
- agno/models/litellm/litellm_openai.py +18 -1
- agno/models/message.py +28 -5
- agno/models/meta/llama.py +47 -14
- agno/models/meta/llama_openai.py +22 -17
- agno/models/mistral/mistral.py +8 -4
- agno/models/nebius/nebius.py +6 -7
- agno/models/nvidia/nvidia.py +20 -3
- agno/models/ollama/chat.py +24 -8
- agno/models/openai/chat.py +104 -29
- agno/models/openai/responses.py +101 -81
- agno/models/openrouter/openrouter.py +60 -3
- agno/models/perplexity/perplexity.py +17 -1
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +24 -4
- agno/models/response.py +73 -2
- agno/models/sambanova/sambanova.py +20 -3
- agno/models/siliconflow/siliconflow.py +19 -2
- agno/models/together/together.py +20 -3
- agno/models/utils.py +254 -8
- agno/models/vercel/v0.py +20 -3
- agno/models/vertexai/__init__.py +0 -0
- agno/models/vertexai/claude.py +190 -0
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +19 -2
- agno/os/app.py +549 -152
- agno/os/auth.py +190 -3
- agno/os/config.py +23 -0
- agno/os/interfaces/a2a/router.py +8 -11
- agno/os/interfaces/a2a/utils.py +1 -1
- agno/os/interfaces/agui/router.py +18 -3
- agno/os/interfaces/agui/utils.py +152 -39
- agno/os/interfaces/slack/router.py +55 -37
- agno/os/interfaces/slack/slack.py +9 -1
- agno/os/interfaces/whatsapp/router.py +0 -1
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/mcp.py +110 -52
- agno/os/middleware/__init__.py +2 -0
- agno/os/middleware/jwt.py +676 -112
- agno/os/router.py +40 -1478
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +599 -0
- agno/os/routers/agents/schema.py +261 -0
- agno/os/routers/evals/evals.py +96 -39
- agno/os/routers/evals/schemas.py +65 -33
- agno/os/routers/evals/utils.py +80 -10
- agno/os/routers/health.py +10 -4
- agno/os/routers/knowledge/knowledge.py +196 -38
- agno/os/routers/knowledge/schemas.py +82 -22
- agno/os/routers/memory/memory.py +279 -52
- agno/os/routers/memory/schemas.py +46 -17
- agno/os/routers/metrics/metrics.py +20 -8
- agno/os/routers/metrics/schemas.py +16 -16
- agno/os/routers/session/session.py +462 -34
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +512 -0
- agno/os/routers/teams/schema.py +257 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +499 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +624 -0
- agno/os/routers/workflows/schema.py +75 -0
- agno/os/schema.py +256 -693
- agno/os/scopes.py +469 -0
- agno/os/utils.py +514 -36
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/openai.py +5 -0
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +155 -32
- agno/run/base.py +55 -3
- agno/run/requirement.py +181 -0
- agno/run/team.py +125 -38
- agno/run/workflow.py +72 -18
- agno/session/agent.py +102 -89
- agno/session/summary.py +56 -15
- agno/session/team.py +164 -90
- agno/session/workflow.py +405 -40
- agno/table.py +10 -0
- agno/team/team.py +3974 -1903
- agno/tools/dalle.py +2 -4
- agno/tools/eleven_labs.py +23 -25
- agno/tools/exa.py +21 -16
- agno/tools/file.py +153 -23
- agno/tools/file_generation.py +16 -10
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +193 -38
- agno/tools/gmail.py +238 -14
- agno/tools/google_drive.py +271 -0
- agno/tools/googlecalendar.py +36 -8
- agno/tools/googlesheets.py +20 -5
- agno/tools/jira.py +20 -0
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +3 -3
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/notion.py +204 -0
- agno/tools/parallel.py +314 -0
- agno/tools/postgres.py +76 -36
- agno/tools/redshift.py +406 -0
- agno/tools/scrapegraph.py +1 -1
- agno/tools/shopify.py +1519 -0
- agno/tools/slack.py +18 -3
- agno/tools/spotify.py +919 -0
- agno/tools/tavily.py +146 -0
- agno/tools/toolkit.py +25 -0
- agno/tools/workflow.py +8 -1
- agno/tools/yfinance.py +12 -11
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +157 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +111 -0
- agno/utils/agent.py +938 -0
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +151 -3
- agno/utils/gemini.py +15 -5
- agno/utils/hooks.py +118 -4
- agno/utils/http.py +113 -2
- agno/utils/knowledge.py +12 -5
- agno/utils/log.py +1 -0
- agno/utils/mcp.py +92 -2
- agno/utils/media.py +187 -1
- agno/utils/merge_dict.py +3 -3
- agno/utils/message.py +60 -0
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +49 -14
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/print_response/agent.py +109 -16
- agno/utils/print_response/team.py +223 -30
- agno/utils/print_response/workflow.py +251 -34
- agno/utils/streamlit.py +1 -1
- agno/utils/team.py +98 -9
- agno/utils/tokens.py +657 -0
- agno/vectordb/base.py +39 -7
- agno/vectordb/cassandra/cassandra.py +21 -5
- agno/vectordb/chroma/chromadb.py +43 -12
- agno/vectordb/clickhouse/clickhousedb.py +21 -5
- agno/vectordb/couchbase/couchbase.py +29 -5
- agno/vectordb/lancedb/lance_db.py +92 -181
- agno/vectordb/langchaindb/langchaindb.py +24 -4
- agno/vectordb/lightrag/lightrag.py +17 -3
- agno/vectordb/llamaindex/llamaindexdb.py +25 -5
- agno/vectordb/milvus/milvus.py +50 -37
- agno/vectordb/mongodb/__init__.py +7 -1
- agno/vectordb/mongodb/mongodb.py +36 -30
- agno/vectordb/pgvector/pgvector.py +201 -77
- agno/vectordb/pineconedb/pineconedb.py +41 -23
- agno/vectordb/qdrant/qdrant.py +67 -54
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +682 -0
- agno/vectordb/singlestore/singlestore.py +50 -29
- agno/vectordb/surrealdb/surrealdb.py +31 -41
- agno/vectordb/upstashdb/upstashdb.py +34 -6
- agno/vectordb/weaviate/weaviate.py +53 -14
- agno/workflow/__init__.py +2 -0
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +120 -18
- agno/workflow/loop.py +77 -10
- agno/workflow/parallel.py +231 -143
- agno/workflow/router.py +118 -17
- agno/workflow/step.py +609 -170
- agno/workflow/steps.py +73 -6
- agno/workflow/types.py +96 -21
- agno/workflow/workflow.py +2039 -262
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/METADATA +201 -66
- agno-2.3.13.dist-info/RECORD +613 -0
- agno/tools/googlesearch.py +0 -98
- agno/tools/mcp.py +0 -679
- agno/tools/memori.py +0 -339
- agno-2.1.2.dist-info/RECORD +0 -543
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +0 -0
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/licenses/LICENSE +0 -0
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
agno/tools/notion.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from typing import Any, Dict, List, Optional, cast
|
|
4
|
+
|
|
5
|
+
from agno.tools import Toolkit
|
|
6
|
+
from agno.utils.log import log_debug, logger
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
from notion_client import Client
|
|
10
|
+
except ImportError:
|
|
11
|
+
raise ImportError("`notion-client` not installed. Please install using `pip install notion-client`")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class NotionTools(Toolkit):
|
|
15
|
+
"""
|
|
16
|
+
Notion toolkit for creating and managing Notion pages.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
api_key (Optional[str]): Notion API key (integration token). If not provided, uses NOTION_API_KEY env var.
|
|
20
|
+
database_id (Optional[str]): The ID of the database to work with. If not provided, uses NOTION_DATABASE_ID env var.
|
|
21
|
+
enable_create_page (bool): Enable creating pages. Default is True.
|
|
22
|
+
enable_update_page (bool): Enable updating pages. Default is True.
|
|
23
|
+
enable_search_pages (bool): Enable searching pages. Default is True.
|
|
24
|
+
all (bool): Enable all tools. Overrides individual flags when True. Default is False.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
api_key: Optional[str] = None,
|
|
30
|
+
database_id: Optional[str] = None,
|
|
31
|
+
enable_create_page: bool = True,
|
|
32
|
+
enable_update_page: bool = True,
|
|
33
|
+
enable_search_pages: bool = True,
|
|
34
|
+
all: bool = False,
|
|
35
|
+
**kwargs,
|
|
36
|
+
):
|
|
37
|
+
self.api_key = api_key or os.getenv("NOTION_API_KEY")
|
|
38
|
+
self.database_id = database_id or os.getenv("NOTION_DATABASE_ID")
|
|
39
|
+
|
|
40
|
+
if not self.api_key:
|
|
41
|
+
raise ValueError(
|
|
42
|
+
"Notion API key is required. Either pass api_key parameter or set NOTION_API_KEY environment variable."
|
|
43
|
+
)
|
|
44
|
+
if not self.database_id:
|
|
45
|
+
raise ValueError(
|
|
46
|
+
"Notion database ID is required. Either pass database_id parameter or set NOTION_DATABASE_ID environment variable."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
self.client = Client(auth=self.api_key)
|
|
50
|
+
|
|
51
|
+
tools: List[Any] = []
|
|
52
|
+
if all or enable_create_page:
|
|
53
|
+
tools.append(self.create_page)
|
|
54
|
+
if all or enable_update_page:
|
|
55
|
+
tools.append(self.update_page)
|
|
56
|
+
if all or enable_search_pages:
|
|
57
|
+
tools.append(self.search_pages)
|
|
58
|
+
|
|
59
|
+
super().__init__(name="notion_tools", tools=tools, **kwargs)
|
|
60
|
+
|
|
61
|
+
def create_page(self, title: str, tag: str, content: str) -> str:
|
|
62
|
+
"""Create a new page in the Notion database with a title, tag, and content.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
title (str): The title of the page
|
|
66
|
+
tag (str): The tag/category for the page (e.g., travel, tech, general-blogs, fashion, documents)
|
|
67
|
+
content (str): The content to add to the page
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
str: JSON string with page creation details
|
|
71
|
+
"""
|
|
72
|
+
try:
|
|
73
|
+
log_debug(f"Creating Notion page with title: {title}, tag: {tag}")
|
|
74
|
+
|
|
75
|
+
# Create the page in the database
|
|
76
|
+
new_page = cast(
|
|
77
|
+
Dict[str, Any],
|
|
78
|
+
self.client.pages.create(
|
|
79
|
+
parent={"database_id": self.database_id},
|
|
80
|
+
properties={"Name": {"title": [{"text": {"content": title}}]}, "Tag": {"select": {"name": tag}}},
|
|
81
|
+
children=[
|
|
82
|
+
{
|
|
83
|
+
"object": "block",
|
|
84
|
+
"type": "paragraph",
|
|
85
|
+
"paragraph": {"rich_text": [{"type": "text", "text": {"content": content}}]},
|
|
86
|
+
}
|
|
87
|
+
],
|
|
88
|
+
),
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
result = {"success": True, "page_id": new_page["id"], "url": new_page["url"], "title": title, "tag": tag}
|
|
92
|
+
return json.dumps(result, indent=2)
|
|
93
|
+
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logger.exception(e)
|
|
96
|
+
return json.dumps({"success": False, "error": str(e)})
|
|
97
|
+
|
|
98
|
+
def update_page(self, page_id: str, content: str) -> str:
|
|
99
|
+
"""Add content to an existing Notion page.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
page_id (str): The ID of the page to update
|
|
103
|
+
content (str): The content to append to the page
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
str: JSON string with update status
|
|
107
|
+
"""
|
|
108
|
+
try:
|
|
109
|
+
log_debug(f"Updating Notion page: {page_id}")
|
|
110
|
+
|
|
111
|
+
# Append content to the page
|
|
112
|
+
self.client.blocks.children.append(
|
|
113
|
+
block_id=page_id,
|
|
114
|
+
children=[
|
|
115
|
+
{
|
|
116
|
+
"object": "block",
|
|
117
|
+
"type": "paragraph",
|
|
118
|
+
"paragraph": {"rich_text": [{"type": "text", "text": {"content": content}}]},
|
|
119
|
+
}
|
|
120
|
+
],
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
result = {"success": True, "page_id": page_id, "message": "Content added successfully"}
|
|
124
|
+
return json.dumps(result, indent=2)
|
|
125
|
+
|
|
126
|
+
except Exception as e:
|
|
127
|
+
logger.exception(e)
|
|
128
|
+
return json.dumps({"success": False, "error": str(e)})
|
|
129
|
+
|
|
130
|
+
def search_pages(self, tag: str) -> str:
|
|
131
|
+
"""Search for pages in the database by tag.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
tag (str): The tag to search for
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
str: JSON string with list of matching pages
|
|
138
|
+
"""
|
|
139
|
+
try:
|
|
140
|
+
log_debug(f"Searching for pages with tag: {tag}")
|
|
141
|
+
|
|
142
|
+
import httpx
|
|
143
|
+
|
|
144
|
+
headers = {
|
|
145
|
+
"Authorization": f"Bearer {self.api_key}",
|
|
146
|
+
"Notion-Version": "2022-06-28",
|
|
147
|
+
"Content-Type": "application/json",
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
payload = {"filter": {"property": "Tag", "select": {"equals": tag}}}
|
|
151
|
+
|
|
152
|
+
# The SDK client does not support the query method
|
|
153
|
+
response = httpx.post(
|
|
154
|
+
f"https://api.notion.com/v1/databases/{self.database_id}/query",
|
|
155
|
+
headers=headers,
|
|
156
|
+
json=payload,
|
|
157
|
+
timeout=30.0,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
if response.status_code != 200:
|
|
161
|
+
return json.dumps(
|
|
162
|
+
{
|
|
163
|
+
"success": False,
|
|
164
|
+
"error": f"API request failed with status {response.status_code}",
|
|
165
|
+
"message": response.text,
|
|
166
|
+
}
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
data = response.json()
|
|
170
|
+
pages = []
|
|
171
|
+
|
|
172
|
+
for page in data.get("results", []):
|
|
173
|
+
try:
|
|
174
|
+
page_title = "Untitled"
|
|
175
|
+
if page.get("properties", {}).get("Name", {}).get("title"):
|
|
176
|
+
page_title = page["properties"]["Name"]["title"][0]["text"]["content"]
|
|
177
|
+
|
|
178
|
+
page_tag = None
|
|
179
|
+
if page.get("properties", {}).get("Tag", {}).get("select"):
|
|
180
|
+
page_tag = page["properties"]["Tag"]["select"]["name"]
|
|
181
|
+
|
|
182
|
+
page_info = {
|
|
183
|
+
"page_id": page["id"],
|
|
184
|
+
"title": page_title,
|
|
185
|
+
"tag": page_tag,
|
|
186
|
+
"url": page.get("url", ""),
|
|
187
|
+
}
|
|
188
|
+
pages.append(page_info)
|
|
189
|
+
except Exception as page_error:
|
|
190
|
+
log_debug(f"Error parsing page: {page_error}")
|
|
191
|
+
continue
|
|
192
|
+
|
|
193
|
+
result = {"success": True, "count": len(pages), "pages": pages}
|
|
194
|
+
return json.dumps(result, indent=2)
|
|
195
|
+
|
|
196
|
+
except Exception as e:
|
|
197
|
+
logger.exception(e)
|
|
198
|
+
return json.dumps(
|
|
199
|
+
{
|
|
200
|
+
"success": False,
|
|
201
|
+
"error": str(e),
|
|
202
|
+
"message": "Failed to search pages. Make sure the database is shared with the integration and has a 'Tag' property.",
|
|
203
|
+
}
|
|
204
|
+
)
|
agno/tools/parallel.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from os import getenv
|
|
3
|
+
from typing import Any, Dict, List, Optional
|
|
4
|
+
|
|
5
|
+
from agno.tools import Toolkit
|
|
6
|
+
from agno.utils.log import log_error
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
from parallel import Parallel as ParallelClient
|
|
10
|
+
except ImportError:
|
|
11
|
+
raise ImportError("`parallel-web` not installed. Please install using `pip install parallel-web`")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CustomJSONEncoder(json.JSONEncoder):
|
|
15
|
+
"""Custom JSON encoder that handles non-serializable types by converting them to strings."""
|
|
16
|
+
|
|
17
|
+
def default(self, obj):
|
|
18
|
+
try:
|
|
19
|
+
return super().default(obj)
|
|
20
|
+
except TypeError:
|
|
21
|
+
return str(obj)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class ParallelTools(Toolkit):
|
|
25
|
+
"""
|
|
26
|
+
ParallelTools provides access to Parallel's web search and extraction APIs.
|
|
27
|
+
|
|
28
|
+
Parallel offers powerful APIs optimized for AI agents:
|
|
29
|
+
- Search API: AI-optimized web search that returns relevant excerpts tailored for LLMs
|
|
30
|
+
- Extract API: Extract content from specific URLs in clean markdown format, handling JavaScript-heavy pages and PDFs
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
api_key (Optional[str]): Parallel API key. If not provided, will use PARALLEL_API_KEY environment variable.
|
|
34
|
+
enable_search (bool): Enable Search API functionality. Default is True.
|
|
35
|
+
enable_extract (bool): Enable Extract API functionality. Default is True.
|
|
36
|
+
all (bool): Enable all tools. Overrides individual flags when True. Default is False.
|
|
37
|
+
max_results (int): Default maximum number of results for search operations. Default is 10.
|
|
38
|
+
max_chars_per_result (int): Default maximum characters per result for search operations. Default is 10000.
|
|
39
|
+
beta_version (str): Beta API version header. Default is "search-extract-2025-10-10".
|
|
40
|
+
mode (Optional[str]): Default search mode. Options: "one-shot" or "agentic". Default is None.
|
|
41
|
+
include_domains (Optional[List[str]]): Default domains to restrict results to. Default is None.
|
|
42
|
+
exclude_domains (Optional[List[str]]): Default domains to exclude from results. Default is None.
|
|
43
|
+
max_age_seconds (Optional[int]): Default cache age threshold (minimum 600). Default is None.
|
|
44
|
+
timeout_seconds (Optional[float]): Default timeout for content retrieval. Default is None.
|
|
45
|
+
disable_cache_fallback (Optional[bool]): Default cache fallback behavior. Default is None.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
api_key: Optional[str] = None,
|
|
51
|
+
enable_search: bool = True,
|
|
52
|
+
enable_extract: bool = True,
|
|
53
|
+
all: bool = False,
|
|
54
|
+
max_results: int = 10,
|
|
55
|
+
max_chars_per_result: int = 10000,
|
|
56
|
+
beta_version: str = "search-extract-2025-10-10",
|
|
57
|
+
mode: Optional[str] = None,
|
|
58
|
+
include_domains: Optional[List[str]] = None,
|
|
59
|
+
exclude_domains: Optional[List[str]] = None,
|
|
60
|
+
max_age_seconds: Optional[int] = None,
|
|
61
|
+
timeout_seconds: Optional[float] = None,
|
|
62
|
+
disable_cache_fallback: Optional[bool] = None,
|
|
63
|
+
**kwargs,
|
|
64
|
+
):
|
|
65
|
+
self.api_key: Optional[str] = api_key or getenv("PARALLEL_API_KEY")
|
|
66
|
+
if not self.api_key:
|
|
67
|
+
log_error("PARALLEL_API_KEY not set. Please set the PARALLEL_API_KEY environment variable.")
|
|
68
|
+
|
|
69
|
+
self.max_results = max_results
|
|
70
|
+
self.max_chars_per_result = max_chars_per_result
|
|
71
|
+
self.beta_version = beta_version
|
|
72
|
+
self.mode = mode
|
|
73
|
+
self.include_domains = include_domains
|
|
74
|
+
self.exclude_domains = exclude_domains
|
|
75
|
+
self.max_age_seconds = max_age_seconds
|
|
76
|
+
self.timeout_seconds = timeout_seconds
|
|
77
|
+
self.disable_cache_fallback = disable_cache_fallback
|
|
78
|
+
|
|
79
|
+
self.parallel_client = ParallelClient(
|
|
80
|
+
api_key=self.api_key, default_headers={"parallel-beta": self.beta_version}
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
tools: List[Any] = []
|
|
84
|
+
if all or enable_search:
|
|
85
|
+
tools.append(self.parallel_search)
|
|
86
|
+
if all or enable_extract:
|
|
87
|
+
tools.append(self.parallel_extract)
|
|
88
|
+
|
|
89
|
+
super().__init__(name="parallel_tools", tools=tools, **kwargs)
|
|
90
|
+
|
|
91
|
+
def parallel_search(
|
|
92
|
+
self,
|
|
93
|
+
objective: Optional[str] = None,
|
|
94
|
+
search_queries: Optional[List[str]] = None,
|
|
95
|
+
max_results: Optional[int] = None,
|
|
96
|
+
max_chars_per_result: Optional[int] = None,
|
|
97
|
+
) -> str:
|
|
98
|
+
"""Use this function to search the web using Parallel's Search API with a natural language objective.
|
|
99
|
+
You must provide at least one of objective or search_queries.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
objective (Optional[str]): Natural-language description of what the web search is trying to find.
|
|
103
|
+
search_queries (Optional[List[str]]): Traditional keyword queries with optional search operators.
|
|
104
|
+
max_results (Optional[int]): Upper bound on results returned. Overrides constructor default.
|
|
105
|
+
max_chars_per_result (Optional[int]): Upper bound on total characters per url for excerpts.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
str: A JSON formatted string containing the search results with URLs, titles, publish dates, and relevant excerpts.
|
|
109
|
+
"""
|
|
110
|
+
try:
|
|
111
|
+
if not objective and not search_queries:
|
|
112
|
+
return json.dumps({"error": "Please provide at least one of: objective or search_queries"}, indent=2)
|
|
113
|
+
|
|
114
|
+
# Use instance defaults if not provided
|
|
115
|
+
final_max_results = max_results if max_results is not None else self.max_results
|
|
116
|
+
|
|
117
|
+
search_params: Dict[str, Any] = {
|
|
118
|
+
"max_results": final_max_results,
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
# Add objective if provided
|
|
122
|
+
if objective:
|
|
123
|
+
search_params["objective"] = objective
|
|
124
|
+
|
|
125
|
+
# Add search_queries if provided
|
|
126
|
+
if search_queries:
|
|
127
|
+
search_params["search_queries"] = search_queries
|
|
128
|
+
|
|
129
|
+
# Add mode from constructor default
|
|
130
|
+
if self.mode:
|
|
131
|
+
search_params["mode"] = self.mode
|
|
132
|
+
|
|
133
|
+
# Add excerpts configuration
|
|
134
|
+
excerpts_config: Dict[str, Any] = {}
|
|
135
|
+
final_max_chars = max_chars_per_result if max_chars_per_result is not None else self.max_chars_per_result
|
|
136
|
+
if final_max_chars is not None:
|
|
137
|
+
excerpts_config["max_chars_per_result"] = final_max_chars
|
|
138
|
+
|
|
139
|
+
if excerpts_config:
|
|
140
|
+
search_params["excerpts"] = excerpts_config
|
|
141
|
+
|
|
142
|
+
# Add source_policy from constructor defaults
|
|
143
|
+
source_policy: Dict[str, Any] = {}
|
|
144
|
+
if self.include_domains:
|
|
145
|
+
source_policy["include_domains"] = self.include_domains
|
|
146
|
+
if self.exclude_domains:
|
|
147
|
+
source_policy["exclude_domains"] = self.exclude_domains
|
|
148
|
+
|
|
149
|
+
if source_policy:
|
|
150
|
+
search_params["source_policy"] = source_policy
|
|
151
|
+
|
|
152
|
+
# Add fetch_policy from constructor defaults
|
|
153
|
+
fetch_policy: Dict[str, Any] = {}
|
|
154
|
+
if self.max_age_seconds is not None:
|
|
155
|
+
fetch_policy["max_age_seconds"] = self.max_age_seconds
|
|
156
|
+
if self.timeout_seconds is not None:
|
|
157
|
+
fetch_policy["timeout_seconds"] = self.timeout_seconds
|
|
158
|
+
if self.disable_cache_fallback is not None:
|
|
159
|
+
fetch_policy["disable_cache_fallback"] = self.disable_cache_fallback
|
|
160
|
+
|
|
161
|
+
if fetch_policy:
|
|
162
|
+
search_params["fetch_policy"] = fetch_policy
|
|
163
|
+
|
|
164
|
+
search_result = self.parallel_client.beta.search(**search_params)
|
|
165
|
+
|
|
166
|
+
# Use model_dump() if available, otherwise convert to dict
|
|
167
|
+
try:
|
|
168
|
+
if hasattr(search_result, "model_dump"):
|
|
169
|
+
return json.dumps(search_result.model_dump(), cls=CustomJSONEncoder)
|
|
170
|
+
except Exception:
|
|
171
|
+
pass
|
|
172
|
+
|
|
173
|
+
# Manually format the results
|
|
174
|
+
formatted_results: Dict[str, Any] = {
|
|
175
|
+
"search_id": getattr(search_result, "search_id", ""),
|
|
176
|
+
"results": [],
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
if hasattr(search_result, "results") and search_result.results:
|
|
180
|
+
results_list: List[Dict[str, Any]] = []
|
|
181
|
+
for result in search_result.results:
|
|
182
|
+
formatted_result: Dict[str, Any] = {
|
|
183
|
+
"title": getattr(result, "title", ""),
|
|
184
|
+
"url": getattr(result, "url", ""),
|
|
185
|
+
"publish_date": getattr(result, "publish_date", ""),
|
|
186
|
+
"excerpt": getattr(result, "excerpt", ""),
|
|
187
|
+
}
|
|
188
|
+
results_list.append(formatted_result)
|
|
189
|
+
formatted_results["results"] = results_list
|
|
190
|
+
|
|
191
|
+
if hasattr(search_result, "warnings"):
|
|
192
|
+
formatted_results["warnings"] = search_result.warnings
|
|
193
|
+
|
|
194
|
+
if hasattr(search_result, "usage"):
|
|
195
|
+
formatted_results["usage"] = search_result.usage
|
|
196
|
+
|
|
197
|
+
return json.dumps(formatted_results, cls=CustomJSONEncoder, indent=2)
|
|
198
|
+
|
|
199
|
+
except Exception as e:
|
|
200
|
+
log_error(f"Error searching Parallel for objective '{objective}': {e}")
|
|
201
|
+
return json.dumps({"error": f"Search failed: {str(e)}"}, indent=2)
|
|
202
|
+
|
|
203
|
+
def parallel_extract(
|
|
204
|
+
self,
|
|
205
|
+
urls: List[str],
|
|
206
|
+
objective: Optional[str] = None,
|
|
207
|
+
search_queries: Optional[List[str]] = None,
|
|
208
|
+
excerpts: bool = True,
|
|
209
|
+
max_chars_per_excerpt: Optional[int] = None,
|
|
210
|
+
full_content: bool = False,
|
|
211
|
+
max_chars_for_full_content: Optional[int] = None,
|
|
212
|
+
) -> str:
|
|
213
|
+
"""Use this function to extract content from specific URLs using Parallel's Extract API.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
urls (List[str]): List of public URLs to extract content from.
|
|
217
|
+
objective (Optional[str]): Search focus to guide content extraction.
|
|
218
|
+
search_queries (Optional[List[str]]): Keywords for targeting relevant content.
|
|
219
|
+
excerpts (bool): Include relevant text snippets.
|
|
220
|
+
max_chars_per_excerpt (Optional[int]): Upper bound on total characters per url. Only used when excerpts is True.
|
|
221
|
+
full_content (bool): Include complete page text.
|
|
222
|
+
max_chars_for_full_content (Optional[int]): Limit on characters per url. Only used when full_content is True.
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
str: A JSON formatted string containing extracted content with titles, publish dates, excerpts and/or full content.
|
|
226
|
+
"""
|
|
227
|
+
try:
|
|
228
|
+
if not urls:
|
|
229
|
+
return json.dumps({"error": "Please provide at least one URL to extract"}, indent=2)
|
|
230
|
+
|
|
231
|
+
extract_params: Dict[str, Any] = {
|
|
232
|
+
"urls": urls,
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
# Add objective if provided
|
|
236
|
+
if objective:
|
|
237
|
+
extract_params["objective"] = objective
|
|
238
|
+
|
|
239
|
+
# Add search_queries if provided
|
|
240
|
+
if search_queries:
|
|
241
|
+
extract_params["search_queries"] = search_queries
|
|
242
|
+
|
|
243
|
+
# Add excerpts configuration
|
|
244
|
+
if excerpts and max_chars_per_excerpt is not None:
|
|
245
|
+
extract_params["excerpts"] = {"max_chars_per_result": max_chars_per_excerpt}
|
|
246
|
+
else:
|
|
247
|
+
extract_params["excerpts"] = excerpts
|
|
248
|
+
|
|
249
|
+
# Add full_content configuration
|
|
250
|
+
if full_content and max_chars_for_full_content is not None:
|
|
251
|
+
extract_params["full_content"] = {"max_chars_per_result": max_chars_for_full_content}
|
|
252
|
+
else:
|
|
253
|
+
extract_params["full_content"] = full_content
|
|
254
|
+
|
|
255
|
+
# Add fetch_policy from constructor defaults
|
|
256
|
+
fetch_policy: Dict[str, Any] = {}
|
|
257
|
+
if self.max_age_seconds is not None:
|
|
258
|
+
fetch_policy["max_age_seconds"] = self.max_age_seconds
|
|
259
|
+
if self.timeout_seconds is not None:
|
|
260
|
+
fetch_policy["timeout_seconds"] = self.timeout_seconds
|
|
261
|
+
if self.disable_cache_fallback is not None:
|
|
262
|
+
fetch_policy["disable_cache_fallback"] = self.disable_cache_fallback
|
|
263
|
+
|
|
264
|
+
if fetch_policy:
|
|
265
|
+
extract_params["fetch_policy"] = fetch_policy
|
|
266
|
+
|
|
267
|
+
extract_result = self.parallel_client.beta.extract(**extract_params)
|
|
268
|
+
|
|
269
|
+
# Use model_dump() if available, otherwise convert to dict
|
|
270
|
+
try:
|
|
271
|
+
if hasattr(extract_result, "model_dump"):
|
|
272
|
+
return json.dumps(extract_result.model_dump(), cls=CustomJSONEncoder)
|
|
273
|
+
except Exception:
|
|
274
|
+
pass
|
|
275
|
+
|
|
276
|
+
# Manually format the results
|
|
277
|
+
formatted_results: Dict[str, Any] = {
|
|
278
|
+
"extract_id": getattr(extract_result, "extract_id", ""),
|
|
279
|
+
"results": [],
|
|
280
|
+
"errors": [],
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
if hasattr(extract_result, "results") and extract_result.results:
|
|
284
|
+
results_list: List[Dict[str, Any]] = []
|
|
285
|
+
for result in extract_result.results:
|
|
286
|
+
formatted_result: Dict[str, Any] = {
|
|
287
|
+
"url": getattr(result, "url", ""),
|
|
288
|
+
"title": getattr(result, "title", ""),
|
|
289
|
+
"publish_date": getattr(result, "publish_date", ""),
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
if excerpts and hasattr(result, "excerpts"):
|
|
293
|
+
formatted_result["excerpts"] = result.excerpts
|
|
294
|
+
|
|
295
|
+
if full_content and hasattr(result, "full_content"):
|
|
296
|
+
formatted_result["full_content"] = result.full_content
|
|
297
|
+
|
|
298
|
+
results_list.append(formatted_result)
|
|
299
|
+
formatted_results["results"] = results_list
|
|
300
|
+
|
|
301
|
+
if hasattr(extract_result, "errors") and extract_result.errors:
|
|
302
|
+
formatted_results["errors"] = extract_result.errors
|
|
303
|
+
|
|
304
|
+
if hasattr(extract_result, "warnings"):
|
|
305
|
+
formatted_results["warnings"] = extract_result.warnings
|
|
306
|
+
|
|
307
|
+
if hasattr(extract_result, "usage"):
|
|
308
|
+
formatted_results["usage"] = extract_result.usage
|
|
309
|
+
|
|
310
|
+
return json.dumps(formatted_results, cls=CustomJSONEncoder, indent=2)
|
|
311
|
+
|
|
312
|
+
except Exception as e:
|
|
313
|
+
log_error(f"Error extracting from Parallel: {e}")
|
|
314
|
+
return json.dumps({"error": f"Extract failed: {str(e)}"}, indent=2)
|
agno/tools/postgres.py
CHANGED
|
@@ -14,6 +14,21 @@ from agno.utils.log import log_debug, log_error
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class PostgresTools(Toolkit):
|
|
17
|
+
"""
|
|
18
|
+
A toolkit for interacting with PostgreSQL databases.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
connection (Optional[PgConnection[DictRow]]): Existing database connection to reuse.
|
|
22
|
+
db_name (Optional[str]): Database name to connect to.
|
|
23
|
+
user (Optional[str]): Username for authentication.
|
|
24
|
+
password (Optional[str]): Password for authentication.
|
|
25
|
+
host (Optional[str]): PostgreSQL server hostname.
|
|
26
|
+
port (Optional[int]): PostgreSQL server port number.
|
|
27
|
+
table_schema (str): Default schema for table operations. Default is "public".
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
_requires_connect: bool = True
|
|
31
|
+
|
|
17
32
|
def __init__(
|
|
18
33
|
self,
|
|
19
34
|
connection: Optional[PgConnection[DictRow]] = None,
|
|
@@ -44,50 +59,71 @@ class PostgresTools(Toolkit):
|
|
|
44
59
|
|
|
45
60
|
super().__init__(name="postgres_tools", tools=tools, **kwargs)
|
|
46
61
|
|
|
47
|
-
|
|
48
|
-
def connection(self) -> PgConnection[DictRow]:
|
|
49
|
-
"""
|
|
50
|
-
Returns the Postgres psycopg connection.
|
|
51
|
-
:return psycopg.connection.Connection: psycopg connection
|
|
62
|
+
def connect(self) -> PgConnection[DictRow]:
|
|
52
63
|
"""
|
|
53
|
-
|
|
54
|
-
log_debug("Establishing new PostgreSQL connection.")
|
|
55
|
-
connection_kwargs: Dict[str, Any] = {"row_factory": dict_row}
|
|
56
|
-
if self.db_name:
|
|
57
|
-
connection_kwargs["dbname"] = self.db_name
|
|
58
|
-
if self.user:
|
|
59
|
-
connection_kwargs["user"] = self.user
|
|
60
|
-
if self.password:
|
|
61
|
-
connection_kwargs["password"] = self.password
|
|
62
|
-
if self.host:
|
|
63
|
-
connection_kwargs["host"] = self.host
|
|
64
|
-
if self.port:
|
|
65
|
-
connection_kwargs["port"] = self.port
|
|
66
|
-
|
|
67
|
-
connection_kwargs["options"] = f"-c search_path={self.table_schema}"
|
|
68
|
-
|
|
69
|
-
self._connection = psycopg.connect(**connection_kwargs)
|
|
70
|
-
self._connection.read_only = True
|
|
64
|
+
Establish a connection to the PostgreSQL database.
|
|
71
65
|
|
|
66
|
+
Returns:
|
|
67
|
+
The database connection object.
|
|
68
|
+
"""
|
|
69
|
+
if self._connection is not None and not self._connection.closed:
|
|
70
|
+
log_debug("Connection already established, reusing existing connection")
|
|
71
|
+
return self._connection
|
|
72
|
+
|
|
73
|
+
log_debug("Establishing new PostgreSQL connection.")
|
|
74
|
+
connection_kwargs: Dict[str, Any] = {"row_factory": dict_row}
|
|
75
|
+
if self.db_name:
|
|
76
|
+
connection_kwargs["dbname"] = self.db_name
|
|
77
|
+
if self.user:
|
|
78
|
+
connection_kwargs["user"] = self.user
|
|
79
|
+
if self.password:
|
|
80
|
+
connection_kwargs["password"] = self.password
|
|
81
|
+
if self.host:
|
|
82
|
+
connection_kwargs["host"] = self.host
|
|
83
|
+
if self.port:
|
|
84
|
+
connection_kwargs["port"] = self.port
|
|
85
|
+
|
|
86
|
+
connection_kwargs["options"] = f"-c search_path={self.table_schema}"
|
|
87
|
+
|
|
88
|
+
self._connection = psycopg.connect(**connection_kwargs)
|
|
89
|
+
self._connection.read_only = True
|
|
72
90
|
return self._connection
|
|
73
91
|
|
|
74
|
-
def
|
|
75
|
-
return self
|
|
76
|
-
|
|
77
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
78
|
-
self.close()
|
|
79
|
-
|
|
80
|
-
def close(self):
|
|
92
|
+
def close(self) -> None:
|
|
81
93
|
"""Closes the database connection if it's open."""
|
|
82
94
|
if self._connection and not self._connection.closed:
|
|
83
95
|
log_debug("Closing PostgreSQL connection.")
|
|
84
96
|
self._connection.close()
|
|
85
97
|
self._connection = None
|
|
86
98
|
|
|
99
|
+
@property
|
|
100
|
+
def is_connected(self) -> bool:
|
|
101
|
+
"""Check if a connection is currently established."""
|
|
102
|
+
return self._connection is not None and not self._connection.closed
|
|
103
|
+
|
|
104
|
+
def _ensure_connection(self) -> PgConnection[DictRow]:
|
|
105
|
+
"""
|
|
106
|
+
Ensure a connection exists, creating one if necessary.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
The database connection object.
|
|
110
|
+
"""
|
|
111
|
+
if not self.is_connected:
|
|
112
|
+
return self.connect()
|
|
113
|
+
return self._connection # type: ignore
|
|
114
|
+
|
|
115
|
+
def __enter__(self):
|
|
116
|
+
return self.connect()
|
|
117
|
+
|
|
118
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
119
|
+
if self.is_connected:
|
|
120
|
+
self.close()
|
|
121
|
+
|
|
87
122
|
def _execute_query(self, query: str, params: Optional[tuple] = None) -> str:
|
|
88
123
|
try:
|
|
89
|
-
|
|
90
|
-
|
|
124
|
+
connection = self._ensure_connection()
|
|
125
|
+
with connection.cursor() as cursor:
|
|
126
|
+
log_debug("Running PostgreSQL query")
|
|
91
127
|
cursor.execute(query, params)
|
|
92
128
|
|
|
93
129
|
if cursor.description is None:
|
|
@@ -105,8 +141,8 @@ class PostgresTools(Toolkit):
|
|
|
105
141
|
|
|
106
142
|
except psycopg.Error as e:
|
|
107
143
|
log_error(f"Database error: {e}")
|
|
108
|
-
if self.
|
|
109
|
-
self.
|
|
144
|
+
if self._connection and not self._connection.closed:
|
|
145
|
+
self._connection.rollback()
|
|
110
146
|
return f"Error executing query: {e}"
|
|
111
147
|
except Exception as e:
|
|
112
148
|
log_error(f"An unexpected error occurred: {e}")
|
|
@@ -146,7 +182,8 @@ class PostgresTools(Toolkit):
|
|
|
146
182
|
A string containing a summary of the table.
|
|
147
183
|
"""
|
|
148
184
|
try:
|
|
149
|
-
|
|
185
|
+
connection = self._ensure_connection()
|
|
186
|
+
with connection.cursor() as cursor:
|
|
150
187
|
# First, get column information using a parameterized query
|
|
151
188
|
schema_query = """
|
|
152
189
|
SELECT column_name, data_type
|
|
@@ -230,7 +267,8 @@ class PostgresTools(Toolkit):
|
|
|
230
267
|
stmt = sql.SQL("SELECT * FROM {tbl};").format(tbl=table_identifier)
|
|
231
268
|
|
|
232
269
|
try:
|
|
233
|
-
|
|
270
|
+
connection = self._ensure_connection()
|
|
271
|
+
with connection.cursor() as cursor:
|
|
234
272
|
cursor.execute(stmt)
|
|
235
273
|
|
|
236
274
|
if cursor.description is None:
|
|
@@ -245,6 +283,8 @@ class PostgresTools(Toolkit):
|
|
|
245
283
|
|
|
246
284
|
return f"Successfully exported table '{table}' to '{path}'."
|
|
247
285
|
except (psycopg.Error, IOError) as e:
|
|
286
|
+
if self._connection and not self._connection.closed:
|
|
287
|
+
self._connection.rollback()
|
|
248
288
|
return f"Error exporting table: {e}"
|
|
249
289
|
|
|
250
290
|
def run_query(self, query: str) -> str:
|