intentkit 0.6.0.dev12__py3-none-any.whl → 0.6.0.dev14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of intentkit might be problematic. Click here for more details.

Files changed (68) hide show
  1. intentkit/__init__.py +1 -1
  2. intentkit/abstracts/skill.py +12 -0
  3. intentkit/core/engine.py +2 -1
  4. intentkit/core/node.py +3 -1
  5. intentkit/core/skill.py +11 -0
  6. intentkit/models/skill.py +19 -0
  7. intentkit/skills/base.py +37 -17
  8. intentkit/skills/cryptocompare/fetch_news.py +2 -2
  9. intentkit/skills/cryptocompare/fetch_price.py +2 -2
  10. intentkit/skills/cryptocompare/fetch_top_exchanges.py +2 -2
  11. intentkit/skills/cryptocompare/fetch_top_market_cap.py +2 -2
  12. intentkit/skills/cryptocompare/fetch_top_volume.py +2 -2
  13. intentkit/skills/cryptocompare/fetch_trading_signals.py +2 -2
  14. intentkit/skills/defillama/base.py +3 -3
  15. intentkit/skills/enso/base.py +2 -2
  16. intentkit/skills/enso/networks.py +1 -1
  17. intentkit/skills/enso/route.py +1 -1
  18. intentkit/skills/enso/tokens.py +1 -1
  19. intentkit/skills/firecrawl/README.md +35 -2
  20. intentkit/skills/firecrawl/__init__.py +8 -0
  21. intentkit/skills/firecrawl/clear.py +87 -0
  22. intentkit/skills/firecrawl/crawl.py +2 -10
  23. intentkit/skills/firecrawl/query.py +4 -4
  24. intentkit/skills/firecrawl/schema.json +16 -0
  25. intentkit/skills/firecrawl/scrape.py +2 -8
  26. intentkit/skills/firecrawl/utils.py +63 -44
  27. intentkit/skills/heurist/image_generation_animagine_xl.py +1 -1
  28. intentkit/skills/heurist/image_generation_arthemy_comics.py +1 -1
  29. intentkit/skills/heurist/image_generation_arthemy_real.py +1 -1
  30. intentkit/skills/heurist/image_generation_braindance.py +1 -1
  31. intentkit/skills/heurist/image_generation_cyber_realistic_xl.py +1 -1
  32. intentkit/skills/heurist/image_generation_flux_1_dev.py +1 -1
  33. intentkit/skills/heurist/image_generation_sdxl.py +1 -1
  34. intentkit/skills/lifi/token_execute.py +1 -1
  35. intentkit/skills/openai/dalle_image_generation.py +1 -1
  36. intentkit/skills/openai/gpt_image_generation.py +1 -1
  37. intentkit/skills/openai/gpt_image_to_image.py +1 -1
  38. intentkit/skills/supabase/__init__.py +116 -0
  39. intentkit/skills/supabase/base.py +72 -0
  40. intentkit/skills/supabase/delete_data.py +102 -0
  41. intentkit/skills/supabase/fetch_data.py +120 -0
  42. intentkit/skills/supabase/insert_data.py +70 -0
  43. intentkit/skills/supabase/invoke_function.py +74 -0
  44. intentkit/skills/supabase/schema.json +168 -0
  45. intentkit/skills/supabase/supabase.svg +15 -0
  46. intentkit/skills/supabase/update_data.py +105 -0
  47. intentkit/skills/supabase/upsert_data.py +77 -0
  48. intentkit/skills/system/read_agent_api_key.py +1 -1
  49. intentkit/skills/system/regenerate_agent_api_key.py +1 -1
  50. intentkit/skills/token/base.py +1 -39
  51. intentkit/skills/twitter/follow_user.py +3 -3
  52. intentkit/skills/twitter/get_mentions.py +6 -6
  53. intentkit/skills/twitter/get_timeline.py +5 -5
  54. intentkit/skills/twitter/get_user_by_username.py +3 -3
  55. intentkit/skills/twitter/get_user_tweets.py +5 -5
  56. intentkit/skills/twitter/like_tweet.py +3 -3
  57. intentkit/skills/twitter/post_tweet.py +4 -4
  58. intentkit/skills/twitter/reply_tweet.py +4 -4
  59. intentkit/skills/twitter/retweet.py +3 -3
  60. intentkit/skills/twitter/search_tweets.py +5 -5
  61. intentkit/skills/unrealspeech/text_to_speech.py +1 -1
  62. intentkit/skills/web_scraper/document_indexer.py +2 -2
  63. intentkit/skills/web_scraper/scrape_and_index.py +8 -8
  64. intentkit/skills/web_scraper/website_indexer.py +4 -4
  65. {intentkit-0.6.0.dev12.dist-info → intentkit-0.6.0.dev14.dist-info}/METADATA +1 -1
  66. {intentkit-0.6.0.dev12.dist-info → intentkit-0.6.0.dev14.dist-info}/RECORD +68 -57
  67. {intentkit-0.6.0.dev12.dist-info → intentkit-0.6.0.dev14.dist-info}/WHEEL +0 -0
  68. {intentkit-0.6.0.dev12.dist-info → intentkit-0.6.0.dev14.dist-info}/licenses/LICENSE +0 -0
@@ -1,11 +1,9 @@
1
1
  """Utilities for Firecrawl skill content indexing and querying."""
2
2
 
3
3
  import logging
4
- import pickle
5
4
  import re
6
5
  from typing import Any, Dict, List, Optional, Tuple
7
6
 
8
- import faiss
9
7
  from langchain.text_splitter import RecursiveCharacterTextSplitter
10
8
  from langchain_community.vectorstores import FAISS
11
9
  from langchain_core.documents import Document
@@ -77,46 +75,52 @@ class FirecrawlVectorStoreManager:
77
75
  openai_api_key=openai_api_key, model="text-embedding-3-small"
78
76
  )
79
77
 
80
- def encode_vector_store(self, vector_store: FAISS) -> Dict[str, bytes]:
81
- """Encode FAISS vector store to bytes for storage."""
82
- try:
83
- # Serialize the index
84
- index_bytes = faiss.serialize_index(vector_store.index)
85
-
86
- # Serialize the docstore and index_to_docstore_id
87
- docstore_bytes = pickle.dumps(vector_store.docstore)
88
- index_to_docstore_bytes = pickle.dumps(vector_store.index_to_docstore_id)
78
+ def encode_vector_store(self, vector_store: FAISS) -> Dict[str, str]:
79
+ """Encode FAISS vector store to base64 for storage (compatible with web_scraper)."""
80
+ import base64
81
+ import os
82
+ import tempfile
89
83
 
90
- return {
91
- "index": index_bytes,
92
- "docstore": docstore_bytes,
93
- "index_to_docstore": index_to_docstore_bytes,
94
- }
84
+ try:
85
+ with tempfile.TemporaryDirectory() as temp_dir:
86
+ vector_store.save_local(temp_dir)
87
+
88
+ encoded_files = {}
89
+ for filename in os.listdir(temp_dir):
90
+ file_path = os.path.join(temp_dir, filename)
91
+ if os.path.isfile(file_path):
92
+ with open(file_path, "rb") as f:
93
+ encoded_files[filename] = base64.b64encode(f.read()).decode(
94
+ "utf-8"
95
+ )
96
+
97
+ return encoded_files
95
98
  except Exception as e:
96
99
  logger.error(f"Error encoding vector store: {e}")
97
100
  raise
98
101
 
99
102
  def decode_vector_store(
100
- self, faiss_data: Dict[str, bytes], embeddings: OpenAIEmbeddings
103
+ self, encoded_files: Dict[str, str], embeddings: OpenAIEmbeddings
101
104
  ) -> FAISS:
102
- """Decode FAISS vector store from stored bytes."""
103
- try:
104
- # Deserialize the index
105
- index = faiss.deserialize_index(faiss_data["index"])
106
-
107
- # Deserialize the docstore and index_to_docstore_id
108
- docstore = pickle.loads(faiss_data["docstore"])
109
- index_to_docstore_id = pickle.loads(faiss_data["index_to_docstore"])
110
-
111
- # Create FAISS vector store
112
- vector_store = FAISS(
113
- embedding_function=embeddings,
114
- index=index,
115
- docstore=docstore,
116
- index_to_docstore_id=index_to_docstore_id,
117
- )
105
+ """Decode base64 files back to FAISS vector store (compatible with web_scraper)."""
106
+ import base64
107
+ import os
108
+ import tempfile
118
109
 
119
- return vector_store
110
+ try:
111
+ with tempfile.TemporaryDirectory() as temp_dir:
112
+ # Decode and write files
113
+ for filename, encoded_content in encoded_files.items():
114
+ file_path = os.path.join(temp_dir, filename)
115
+ with open(file_path, "wb") as f:
116
+ f.write(base64.b64decode(encoded_content))
117
+
118
+ # Load vector store
119
+ return FAISS.load_local(
120
+ temp_dir,
121
+ embeddings,
122
+ allow_dangerous_deserialization=True,
123
+ )
120
124
  except Exception as e:
121
125
  logger.error(f"Error decoding vector store: {e}")
122
126
  raise
@@ -124,9 +128,9 @@ class FirecrawlVectorStoreManager:
124
128
  async def load_vector_store(self, agent_id: str) -> Optional[FAISS]:
125
129
  """Load existing vector store for an agent."""
126
130
  try:
127
- vector_store_key = f"firecrawl_vector_store_{agent_id}"
131
+ vector_store_key = f"vector_store_{agent_id}"
128
132
  stored_data = await self.skill_store.get_agent_skill_data(
129
- agent_id, "firecrawl", vector_store_key
133
+ agent_id, "web_scraper", vector_store_key
130
134
  )
131
135
 
132
136
  if not stored_data or "faiss_files" not in stored_data:
@@ -139,14 +143,27 @@ class FirecrawlVectorStoreManager:
139
143
  logger.error(f"Error loading vector store for agent {agent_id}: {e}")
140
144
  return None
141
145
 
142
- async def save_vector_store(self, agent_id: str, vector_store: FAISS) -> None:
143
- """Save vector store for an agent."""
146
+ async def save_vector_store(
147
+ self,
148
+ agent_id: str,
149
+ vector_store: FAISS,
150
+ chunk_size: int = 1000,
151
+ chunk_overlap: int = 200,
152
+ ) -> None:
153
+ """Save vector store for an agent (compatible with web_scraper format)."""
144
154
  try:
145
- vector_store_key = f"firecrawl_vector_store_{agent_id}"
146
- encoded_data = self.encode_vector_store(vector_store)
155
+ vector_store_key = f"vector_store_{agent_id}"
156
+ encoded_files = self.encode_vector_store(vector_store)
157
+
158
+ # Use the same data structure as web_scraper
159
+ storage_data = {
160
+ "faiss_files": encoded_files,
161
+ "chunk_size": chunk_size,
162
+ "chunk_overlap": chunk_overlap,
163
+ }
147
164
 
148
165
  await self.skill_store.save_agent_skill_data(
149
- agent_id, "firecrawl", vector_store_key, {"faiss_files": encoded_data}
166
+ agent_id, "web_scraper", vector_store_key, storage_data
150
167
  )
151
168
 
152
169
  except Exception as e:
@@ -176,9 +193,9 @@ class FirecrawlMetadataManager:
176
193
  ) -> None:
177
194
  """Update metadata for an agent."""
178
195
  try:
179
- metadata_key = f"firecrawl_metadata_{agent_id}"
196
+ metadata_key = f"indexed_urls_{agent_id}"
180
197
  await self.skill_store.save_agent_skill_data(
181
- agent_id, "firecrawl", metadata_key, new_metadata
198
+ agent_id, "web_scraper", metadata_key, new_metadata
182
199
  )
183
200
  except Exception as e:
184
201
  logger.error(f"Error updating metadata for agent {agent_id}: {e}")
@@ -235,7 +252,9 @@ async def index_documents(
235
252
  was_merged = False
236
253
 
237
254
  # Save the vector store
238
- await vs_manager.save_vector_store(agent_id, vector_store)
255
+ await vs_manager.save_vector_store(
256
+ agent_id, vector_store, chunk_size, chunk_overlap
257
+ )
239
258
 
240
259
  logger.info(
241
260
  f"Successfully indexed {len(split_docs)} chunks for agent {agent_id}"
@@ -137,7 +137,7 @@ class ImageGenerationAnimagineXL(HeuristBaseTool):
137
137
  # Store the image URL
138
138
  image_url = response.text.strip('"')
139
139
  # Generate a key with agent ID as prefix
140
- image_key = f"{context.agent.id}/heurist/{job_id}"
140
+ image_key = f"{context.agent_id}/heurist/{job_id}"
141
141
  # Store the image and get the CDN URL
142
142
  stored_url = await store_image(image_url, image_key)
143
143
 
@@ -137,7 +137,7 @@ class ImageGenerationArthemyComics(HeuristBaseTool):
137
137
  # Store the image URL
138
138
  image_url = response.text.strip('"')
139
139
  # Generate a key with agent ID as prefix
140
- image_key = f"{context.agent.id}/heurist/{job_id}"
140
+ image_key = f"{context.agent_id}/heurist/{job_id}"
141
141
  # Store the image and get the CDN URL
142
142
  stored_url = await store_image(image_url, image_key)
143
143
 
@@ -137,7 +137,7 @@ class ImageGenerationArthemyReal(HeuristBaseTool):
137
137
  # Store the image URL
138
138
  image_url = response.text.strip('"')
139
139
  # Generate a key with agent ID as prefix
140
- image_key = f"{context.agent.id}/heurist/{job_id}"
140
+ image_key = f"{context.agent_id}/heurist/{job_id}"
141
141
  # Store the image and get the CDN URL
142
142
  stored_url = await store_image(image_url, image_key)
143
143
 
@@ -137,7 +137,7 @@ class ImageGenerationBrainDance(HeuristBaseTool):
137
137
  # Store the image URL
138
138
  image_url = response.text.strip('"')
139
139
  # Generate a key with agent ID as prefix
140
- image_key = f"{context.agent.id}/heurist/{job_id}"
140
+ image_key = f"{context.agent_id}/heurist/{job_id}"
141
141
  # Store the image and get the CDN URL
142
142
  stored_url = await store_image(image_url, image_key)
143
143
 
@@ -137,7 +137,7 @@ class ImageGenerationCyberRealisticXL(HeuristBaseTool):
137
137
  # Store the image URL
138
138
  image_url = response.text.strip('"')
139
139
  # Generate a key with agent ID as prefix
140
- image_key = f"{context.agent.id}/heurist/{job_id}"
140
+ image_key = f"{context.agent_id}/heurist/{job_id}"
141
141
  # Store the image and get the CDN URL
142
142
  stored_url = await store_image(image_url, image_key)
143
143
 
@@ -137,7 +137,7 @@ class ImageGenerationFlux1Dev(HeuristBaseTool):
137
137
  # Store the image URL
138
138
  image_url = response.text.strip('"')
139
139
  # Generate a key with agent ID as prefix
140
- image_key = f"{context.agent.id}/heurist/{job_id}"
140
+ image_key = f"{context.agent_id}/heurist/{job_id}"
141
141
  # Store the image and get the CDN URL
142
142
  stored_url = await store_image(image_url, image_key)
143
143
 
@@ -136,7 +136,7 @@ class ImageGenerationSDXL(HeuristBaseTool):
136
136
  # Store the image URL
137
137
  image_url = response.text.strip('"')
138
138
  # Generate a key with agent ID as prefix
139
- image_key = f"{context.agent.id}/heurist/{job_id}"
139
+ image_key = f"{context.agent_id}/heurist/{job_id}"
140
140
  # Store the image and get the CDN URL
141
141
  stored_url = await store_image(image_url, image_key)
142
142
 
@@ -129,7 +129,7 @@ class TokenExecute(LiFiBaseTool):
129
129
 
130
130
  # Get agent context for CDP wallet
131
131
  context = self.context_from_config(config)
132
- agent_id = context.agent.id
132
+ agent_id = context.agent_id
133
133
 
134
134
  self.logger.info(
135
135
  f"Executing LiFi transfer: {from_amount} {from_token} on {from_chain} -> {to_token} on {to_chain}"
@@ -109,7 +109,7 @@ class DALLEImageGeneration(OpenAIBaseTool):
109
109
  image_url = image_url.strip('"')
110
110
 
111
111
  # Generate a key with agent ID as prefix
112
- image_key = f"{context.agent.id}/dalle/{job_id}"
112
+ image_key = f"{context.agent_id}/dalle/{job_id}"
113
113
 
114
114
  # Store the image and get the CDN URL
115
115
  stored_url = await store_image(image_url, image_key)
@@ -133,7 +133,7 @@ class GPTImageGeneration(OpenAIBaseTool):
133
133
  image_bytes = base64.b64decode(base64_image)
134
134
 
135
135
  # Generate a key with agent ID as prefix
136
- image_key = f"{context.agent.id}/gpt-image/{job_id}"
136
+ image_key = f"{context.agent_id}/gpt-image/{job_id}"
137
137
 
138
138
  # Store the image bytes and get the CDN URL
139
139
  stored_url = await store_image_bytes(image_bytes, image_key, content_type)
@@ -157,7 +157,7 @@ class GPTImageToImage(OpenAIBaseTool):
157
157
  image_bytes = base64.b64decode(base64_image)
158
158
 
159
159
  # Generate a key with agent ID as prefix
160
- image_key = f"{context.agent.id}/gpt-image-edit/{job_id}"
160
+ image_key = f"{context.agent_id}/gpt-image-edit/{job_id}"
161
161
 
162
162
  # Store the image bytes and get the CDN URL
163
163
  stored_url = await store_image_bytes(image_bytes, image_key)
@@ -0,0 +1,116 @@
1
+ """Supabase skills."""
2
+
3
+ import logging
4
+ from typing import TypedDict
5
+
6
+ from intentkit.abstracts.skill import SkillStoreABC
7
+ from intentkit.skills.base import SkillConfig, SkillState
8
+ from intentkit.skills.supabase.base import SupabaseBaseTool
9
+ from intentkit.skills.supabase.delete_data import SupabaseDeleteData
10
+ from intentkit.skills.supabase.fetch_data import SupabaseFetchData
11
+ from intentkit.skills.supabase.insert_data import SupabaseInsertData
12
+ from intentkit.skills.supabase.invoke_function import SupabaseInvokeFunction
13
+ from intentkit.skills.supabase.update_data import SupabaseUpdateData
14
+ from intentkit.skills.supabase.upsert_data import SupabaseUpsertData
15
+
16
+ # Cache skills at the system level, because they are stateless
17
+ _cache: dict[str, SupabaseBaseTool] = {}
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class SkillStates(TypedDict):
23
+ fetch_data: SkillState
24
+ insert_data: SkillState
25
+ update_data: SkillState
26
+ upsert_data: SkillState
27
+ delete_data: SkillState
28
+ invoke_function: SkillState
29
+
30
+
31
+ class Config(SkillConfig):
32
+ """Configuration for Supabase skills."""
33
+
34
+ states: SkillStates
35
+ supabase_url: str
36
+ supabase_key: str
37
+ public_write_tables: str = ""
38
+
39
+
40
+ async def get_skills(
41
+ config: "Config",
42
+ is_private: bool,
43
+ store: SkillStoreABC,
44
+ **_,
45
+ ) -> list[SupabaseBaseTool]:
46
+ """Get all Supabase skills."""
47
+ available_skills = []
48
+
49
+ # Include skills based on their state
50
+ for skill_name, state in config["states"].items():
51
+ if state == "disabled":
52
+ continue
53
+ elif state == "public" or (state == "private" and is_private):
54
+ available_skills.append(skill_name)
55
+
56
+ # Get each skill using the cached getter
57
+ result = []
58
+ for name in available_skills:
59
+ skill = get_supabase_skill(name, store)
60
+ if skill:
61
+ result.append(skill)
62
+ return result
63
+
64
+
65
+ def get_supabase_skill(
66
+ name: str,
67
+ store: SkillStoreABC,
68
+ ) -> SupabaseBaseTool:
69
+ """Get a Supabase skill by name.
70
+
71
+ Args:
72
+ name: The name of the skill to get
73
+ store: The skill store for persisting data
74
+
75
+ Returns:
76
+ The requested Supabase skill
77
+ """
78
+ if name == "fetch_data":
79
+ if name not in _cache:
80
+ _cache[name] = SupabaseFetchData(
81
+ skill_store=store,
82
+ )
83
+ return _cache[name]
84
+ elif name == "insert_data":
85
+ if name not in _cache:
86
+ _cache[name] = SupabaseInsertData(
87
+ skill_store=store,
88
+ )
89
+ return _cache[name]
90
+ elif name == "update_data":
91
+ if name not in _cache:
92
+ _cache[name] = SupabaseUpdateData(
93
+ skill_store=store,
94
+ )
95
+ return _cache[name]
96
+ elif name == "upsert_data":
97
+ if name not in _cache:
98
+ _cache[name] = SupabaseUpsertData(
99
+ skill_store=store,
100
+ )
101
+ return _cache[name]
102
+ elif name == "delete_data":
103
+ if name not in _cache:
104
+ _cache[name] = SupabaseDeleteData(
105
+ skill_store=store,
106
+ )
107
+ return _cache[name]
108
+ elif name == "invoke_function":
109
+ if name not in _cache:
110
+ _cache[name] = SupabaseInvokeFunction(
111
+ skill_store=store,
112
+ )
113
+ return _cache[name]
114
+ else:
115
+ logger.warning(f"Unknown Supabase skill: {name}")
116
+ return None
@@ -0,0 +1,72 @@
1
+ from typing import Type
2
+
3
+ from langchain_core.tools import ToolException
4
+ from pydantic import BaseModel, Field
5
+
6
+ from intentkit.abstracts.skill import SkillStoreABC
7
+ from intentkit.skills.base import IntentKitSkill, SkillContext
8
+
9
+
10
+ class SupabaseBaseTool(IntentKitSkill):
11
+ """Base class for Supabase tools."""
12
+
13
+ name: str = Field(description="The name of the tool")
14
+ description: str = Field(description="A description of what the tool does")
15
+ args_schema: Type[BaseModel]
16
+ skill_store: SkillStoreABC = Field(
17
+ description="The skill store for persisting data"
18
+ )
19
+
20
+ @property
21
+ def category(self) -> str:
22
+ return "supabase"
23
+
24
+ def get_supabase_config(self, config: dict) -> tuple[str, str]:
25
+ """Get Supabase URL and key from config.
26
+
27
+ Args:
28
+ config: The agent configuration
29
+
30
+ Returns:
31
+ Tuple of (supabase_url, supabase_key)
32
+
33
+ Raises:
34
+ ValueError: If required config is missing
35
+ """
36
+ supabase_url = config.get("supabase_url")
37
+ supabase_key = config.get("supabase_key")
38
+
39
+ if not supabase_url:
40
+ raise ValueError("supabase_url is required in config")
41
+ if not supabase_key:
42
+ raise ValueError("supabase_key is required in config")
43
+
44
+ return supabase_url, supabase_key
45
+
46
+ def validate_table_access(self, table: str, context: SkillContext) -> None:
47
+ """Validate if the table can be accessed for write operations in public mode.
48
+
49
+ Args:
50
+ table: The table name to validate
51
+ context: The skill context containing configuration and mode info
52
+
53
+ Raises:
54
+ ToolException: If table access is not allowed in public mode
55
+ """
56
+ # If in private mode (owner mode), no restrictions apply
57
+ if context.is_private:
58
+ return
59
+
60
+ # In public mode, check if table is in allowed list
61
+ public_write_tables = context.config.get("public_write_tables", "")
62
+ if not public_write_tables:
63
+ return
64
+
65
+ allowed_tables = [
66
+ t.strip() for t in public_write_tables.split(",") if t.strip()
67
+ ]
68
+ if table not in allowed_tables:
69
+ raise ToolException(
70
+ f"Table '{table}' is not allowed for public write operations. "
71
+ f"Allowed tables: {', '.join(allowed_tables)}"
72
+ )
@@ -0,0 +1,102 @@
1
+ import logging
2
+ from typing import Any, Dict, Type
3
+
4
+ from langchain_core.runnables import RunnableConfig
5
+ from langchain_core.tools import ToolException
6
+ from pydantic import BaseModel, Field
7
+ from supabase import Client, create_client
8
+
9
+ from intentkit.skills.supabase.base import SupabaseBaseTool
10
+
11
+ NAME = "supabase_delete_data"
12
+ PROMPT = "Delete data from a Supabase table based on filtering conditions."
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class SupabaseDeleteDataInput(BaseModel):
18
+ """Input for SupabaseDeleteData tool."""
19
+
20
+ table: str = Field(description="The name of the table to delete data from")
21
+ filters: Dict[str, Any] = Field(
22
+ description="Dictionary of filters to identify which records to delete (e.g., {'id': 123})"
23
+ )
24
+ returning: str = Field(
25
+ default="*",
26
+ description="Columns to return from deleted records (default: '*' for all)",
27
+ )
28
+
29
+
30
+ class SupabaseDeleteData(SupabaseBaseTool):
31
+ """Tool for deleting data from Supabase tables.
32
+
33
+ This tool allows deleting records from Supabase tables based on filter conditions.
34
+ """
35
+
36
+ name: str = NAME
37
+ description: str = PROMPT
38
+ args_schema: Type[BaseModel] = SupabaseDeleteDataInput
39
+
40
+ async def _arun(
41
+ self,
42
+ table: str,
43
+ filters: Dict[str, Any],
44
+ returning: str = "*",
45
+ config: RunnableConfig = None,
46
+ **kwargs,
47
+ ):
48
+ try:
49
+ context = self.context_from_config(config)
50
+
51
+ # Validate table access for public mode
52
+ self.validate_table_access(table, context)
53
+
54
+ supabase_url, supabase_key = self.get_supabase_config(context.config)
55
+
56
+ # Create Supabase client
57
+ supabase: Client = create_client(supabase_url, supabase_key)
58
+
59
+ # Start building the delete query
60
+ query = supabase.table(table).delete()
61
+
62
+ # Apply filters to identify which records to delete
63
+ for column, value in filters.items():
64
+ if isinstance(value, dict):
65
+ # Handle complex filters like {'gte': 18}
66
+ for operator, filter_value in value.items():
67
+ if operator == "eq":
68
+ query = query.eq(column, filter_value)
69
+ elif operator == "neq":
70
+ query = query.neq(column, filter_value)
71
+ elif operator == "gt":
72
+ query = query.gt(column, filter_value)
73
+ elif operator == "gte":
74
+ query = query.gte(column, filter_value)
75
+ elif operator == "lt":
76
+ query = query.lt(column, filter_value)
77
+ elif operator == "lte":
78
+ query = query.lte(column, filter_value)
79
+ elif operator == "like":
80
+ query = query.like(column, filter_value)
81
+ elif operator == "ilike":
82
+ query = query.ilike(column, filter_value)
83
+ elif operator == "in":
84
+ query = query.in_(column, filter_value)
85
+ else:
86
+ logger.warning(f"Unknown filter operator: {operator}")
87
+ else:
88
+ # Simple equality filter
89
+ query = query.eq(column, value)
90
+
91
+ # Execute the delete
92
+ response = query.execute()
93
+
94
+ return {
95
+ "success": True,
96
+ "data": response.data,
97
+ "count": len(response.data) if response.data else 0,
98
+ }
99
+
100
+ except Exception as e:
101
+ logger.error(f"Error deleting data from Supabase: {str(e)}")
102
+ raise ToolException(f"Failed to delete data from table '{table}': {str(e)}")