intentkit 0.6.19.dev2__py3-none-any.whl → 0.6.21.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of intentkit might be problematic. Click here for more details.

intentkit/__init__.py CHANGED
@@ -3,7 +3,7 @@
3
3
  A powerful platform for building AI agents with blockchain and cryptocurrency capabilities.
4
4
  """
5
5
 
6
- __version__ = "0.6.19-dev2"
6
+ __version__ = "0.6.21-dev1"
7
7
  __author__ = "hyacinthus"
8
8
  __email__ = "hyacinthus@gmail.com"
9
9
 
intentkit/skills/base.py CHANGED
@@ -1,4 +1,3 @@
1
- import asyncio
2
1
  import logging
3
2
  from typing import Any, Callable, Dict, Literal, NotRequired, Optional, TypedDict, Union
4
3
 
@@ -6,18 +5,21 @@ from langchain_core.tools import BaseTool
6
5
  from langchain_core.tools.base import ToolException
7
6
  from langgraph.runtime import get_runtime
8
7
  from pydantic import (
9
- BaseModel,
10
8
  ValidationError,
11
9
  )
12
10
  from pydantic.v1 import ValidationError as ValidationErrorV1
13
11
  from redis.exceptions import RedisError
12
+ from web3 import Web3
14
13
 
15
14
  from intentkit.abstracts.graph import AgentContext
16
15
  from intentkit.abstracts.skill import SkillStoreABC
17
- from intentkit.models.agent import Agent
18
16
  from intentkit.models.redis import get_redis
17
+ from intentkit.utils.chain import ChainProvider
19
18
  from intentkit.utils.error import RateLimitExceeded
20
19
 
20
+ # Global cache for Web3 clients by network_id
21
+ _web3_client_cache: Dict[str, Web3] = {}
22
+
21
23
  SkillState = Literal["disabled", "public", "private"]
22
24
  SkillOwnerState = Literal["disabled", "private"]
23
25
  APIKeyProviderValue = Literal["platform", "agent_owner"]
@@ -32,36 +34,6 @@ class SkillConfig(TypedDict):
32
34
  __extra__: NotRequired[Dict[str, Any]]
33
35
 
34
36
 
35
- class SkillContext(BaseModel):
36
- skill_category: str
37
- agent_id: str
38
- user_id: Optional[str] = None
39
- chat_id: Optional[str] = None
40
- app_id: Optional[str] = None
41
- entrypoint: Literal["web", "twitter", "telegram", "trigger", "api"]
42
- is_private: bool
43
- payer: Optional[str] = None
44
- _agent: Optional[Agent] = None
45
-
46
- @property
47
- def agent(self) -> Agent:
48
- if self._agent is None:
49
- self._agent = asyncio.run(Agent.get(self.agent_id))
50
- return self._agent
51
-
52
- @property
53
- def config(self) -> Dict[str, Any]:
54
- agent = self.agent
55
- config = None
56
- if agent.skills:
57
- config = agent.skills.get(self.skill_category)
58
- if not config:
59
- raise ValueError(
60
- f"Skill {self.skill_category} not found in agent {self.agent_id}"
61
- )
62
- return config
63
-
64
-
65
37
  class IntentKitSkill(BaseTool):
66
38
  """Abstract base class for IntentKit skills.
67
39
  Will have predefined abilities.
@@ -185,3 +157,23 @@ class IntentKitSkill(BaseTool):
185
157
  if runtime.context is None or not isinstance(runtime.context, AgentContext):
186
158
  raise ValueError("No AgentContext found")
187
159
  return runtime.context
160
+
161
+ def web3_client(self) -> Web3:
162
+ """Get a Web3 client for the skill."""
163
+ context = self.get_context()
164
+ agent = context.agent
165
+ network_id = agent.network_id
166
+
167
+ # Check global cache first
168
+ if network_id in _web3_client_cache:
169
+ return _web3_client_cache[network_id]
170
+
171
+ # Create new Web3 client and cache it
172
+ chain_provider: ChainProvider = self.skill_store.get_system_config(
173
+ "chain_provider"
174
+ )
175
+ chain = chain_provider.get_chain_config(network_id)
176
+ web3_client = Web3(Web3.HTTPProvider(chain.rpc_url))
177
+ _web3_client_cache[network_id] = web3_client
178
+
179
+ return web3_client
@@ -5,8 +5,9 @@ from typing import Type
5
5
 
6
6
  from pydantic import BaseModel, Field
7
7
 
8
+ from intentkit.abstracts.graph import AgentContext
8
9
  from intentkit.abstracts.skill import SkillStoreABC
9
- from intentkit.skills.base import IntentKitSkill, SkillContext
10
+ from intentkit.skills.base import IntentKitSkill
10
11
  from intentkit.skills.defillama.config.chains import (
11
12
  get_chain_from_alias,
12
13
  )
@@ -39,7 +40,7 @@ class DefiLlamaBaseTool(IntentKitSkill):
39
40
  return "defillama"
40
41
 
41
42
  async def check_rate_limit(
42
- self, context: SkillContext, max_requests: int = 30, interval: int = 5
43
+ self, context: AgentContext, max_requests: int = 30, interval: int = 5
43
44
  ) -> tuple[bool, str | None]:
44
45
  """Check if the rate limit has been exceeded.
45
46
 
@@ -5,18 +5,22 @@ The Firecrawl skills provide advanced web scraping and content indexing capabili
5
5
  ## Skills Overview
6
6
 
7
7
  ### 1. firecrawl_scrape
8
- Scrapes a single webpage and optionally indexes the content for future querying.
8
+ Scrapes a single webpage and REPLACES any existing indexed content for that URL, preventing duplicates.
9
9
 
10
10
  **Parameters:**
11
11
  - `url` (required): The URL to scrape
12
- - `formats` (optional): Output formats - markdown, html, rawHtml, screenshot, links, extract (default: ["markdown"])
12
+ - `formats` (optional): Output formats - markdown, html, rawHtml, screenshot, links, json (default: ["markdown"])
13
+ - `only_main_content` (optional): Extract only main content (default: true)
13
14
  - `include_tags` (optional): HTML tags to include (e.g., ["h1", "h2", "p"])
14
15
  - `exclude_tags` (optional): HTML tags to exclude
15
- - `only_main_content` (optional): Extract only main content (default: true)
16
+ - `wait_for` (optional): Wait time in milliseconds before scraping
17
+ - `timeout` (optional): Maximum timeout in milliseconds (default: 30000)
16
18
  - `index_content` (optional): Whether to index content for querying (default: true)
17
19
  - `chunk_size` (optional): Size of text chunks for indexing (default: 1000)
18
20
  - `chunk_overlap` (optional): Overlap between chunks (default: 200)
19
21
 
22
+ **Use Case:** Use this when you want to refresh/update content from a URL that was previously scraped, ensuring no duplicate or stale content remains.
23
+
20
24
  ### 2. firecrawl_crawl
21
25
  Crawls multiple pages from a website and indexes all content.
22
26
 
@@ -158,8 +162,9 @@ Prompt: "Use firecrawl_scrape to scrape https://example.com and index the conten
158
162
  ### Documentation Indexing
159
163
  ```
160
164
  1. Scrape main documentation page
161
- 2. Crawl related documentation sections
162
- 3. Query for specific technical information
165
+ 2. Crawl related documentation sections
166
+ 3. Use scrape again to update changed pages (replaces old content)
167
+ 4. Query for specific technical information
163
168
  ```
164
169
 
165
170
  ### Competitive Analysis
@@ -205,6 +210,7 @@ Prompt: "Use firecrawl_scrape to scrape https://example.com and index the conten
205
210
  - **PDF Support**: Can scrape and index PDF documents
206
211
  - **Intelligent Chunking**: Optimized text splitting for better search
207
212
  - **Independent Storage**: Uses its own dedicated vector store for Firecrawl content
213
+ - **Content Replacement**: Replace mode prevents duplicate/stale content
208
214
  - **Metadata Rich**: Includes source URLs, timestamps, and content types
209
215
  - **Semantic Search**: Uses OpenAI embeddings for intelligent querying
210
216
  - **Batch Processing**: Efficient handling of multiple pages
@@ -34,7 +34,7 @@
34
34
  "Agent Owner + All Users",
35
35
  "Agent Owner Only"
36
36
  ],
37
- "description": "Scrape single web pages and extract content in various formats (markdown, HTML, JSON, etc.). Handles JavaScript-rendered content, PDFs, and dynamic websites.",
37
+ "description": "Scrape single web pages and REPLACE any existing indexed content for that URL. Unlike regular scrape, this prevents duplicate content when re-scraping the same page. Use this to refresh/update content from a previously scraped URL.",
38
38
  "default": "private"
39
39
  },
40
40
  "firecrawl_crawl": {
@@ -62,10 +62,11 @@ class FirecrawlScrapeInput(BaseModel):
62
62
 
63
63
 
64
64
  class FirecrawlScrape(FirecrawlBaseTool):
65
- """Tool for scraping web pages using Firecrawl.
65
+ """Tool for scraping web pages using Firecrawl with REPLACE behavior.
66
66
 
67
- This tool uses Firecrawl's API to scrape web pages and convert them into clean,
68
- LLM-ready formats like markdown, HTML, or structured JSON data.
67
+ This tool uses Firecrawl's API to scrape web pages and REPLACES any existing
68
+ indexed content for the same URL instead of appending to it. This prevents
69
+ duplicate content when re-scraping the same page.
69
70
 
70
71
  Attributes:
71
72
  name: The name of the tool.
@@ -75,10 +76,10 @@ class FirecrawlScrape(FirecrawlBaseTool):
75
76
 
76
77
  name: str = "firecrawl_scrape"
77
78
  description: str = (
78
- "Scrape a single web page and extract its content in various formats (markdown, HTML, JSON, etc.). "
79
+ "Scrape a single web page and REPLACE any existing indexed content for that URL. "
80
+ "Unlike regular scrape, this tool removes old content before adding new content, preventing duplicates. "
79
81
  "This tool can handle JavaScript-rendered content, PDFs, and dynamic websites. "
80
- "Optionally indexes the content for later querying using the firecrawl_query_indexed_content tool. "
81
- "Use this when you need to extract clean, structured content from a specific URL."
82
+ "Use this when you want to refresh/update content from a URL that was previously scraped."
82
83
  )
83
84
  args_schema: Type[BaseModel] = FirecrawlScrapeInput
84
85
 
@@ -187,7 +188,7 @@ class FirecrawlScrape(FirecrawlBaseTool):
187
188
  result_data = data.get("data", {})
188
189
 
189
190
  # Format the results based on requested formats
190
- formatted_result = f"Successfully scraped: {url}\n\n"
191
+ formatted_result = f"Successfully scraped (REPLACE mode): {url}\n\n"
191
192
 
192
193
  if "markdown" in formats and result_data.get("markdown"):
193
194
  formatted_result += "## Markdown Content\n"
@@ -236,13 +237,16 @@ class FirecrawlScrape(FirecrawlBaseTool):
236
237
  formatted_result += f"Language: {metadata['language']}\n"
237
238
  formatted_result += "\n"
238
239
 
239
- # Index content if requested
240
+ # Index content if requested - REPLACE MODE
240
241
  if index_content and result_data.get("markdown"):
241
242
  try:
242
- # Import indexing utilities from firecrawl utils
243
+ # Import indexing utilities
244
+ from langchain_community.vectorstores import FAISS
245
+
243
246
  from intentkit.skills.firecrawl.utils import (
247
+ FirecrawlDocumentProcessor,
244
248
  FirecrawlMetadataManager,
245
- index_documents,
249
+ FirecrawlVectorStoreManager,
246
250
  )
247
251
 
248
252
  # Create document from scraped content
@@ -261,38 +265,149 @@ class FirecrawlScrape(FirecrawlBaseTool):
261
265
  # Get agent ID for indexing
262
266
  agent_id = context.agent_id
263
267
  if agent_id:
264
- # Index the document
265
- total_chunks, was_merged = await index_documents(
266
- [document],
267
- agent_id,
268
- self.skill_store,
269
- chunk_size,
270
- chunk_overlap,
271
- )
272
-
273
- # Update metadata
268
+ # Initialize managers
269
+ vs_manager = FirecrawlVectorStoreManager(self.skill_store)
274
270
  metadata_manager = FirecrawlMetadataManager(
275
271
  self.skill_store
276
272
  )
277
- new_metadata = metadata_manager.create_url_metadata(
278
- [url], [document], "firecrawl_scrape"
273
+
274
+ # Load existing vector store
275
+ existing_vector_store = await vs_manager.load_vector_store(
276
+ agent_id
279
277
  )
280
- await metadata_manager.update_metadata(
281
- agent_id, new_metadata
278
+
279
+ # Split the new document into chunks
280
+ split_docs = FirecrawlDocumentProcessor.split_documents(
281
+ [document], chunk_size, chunk_overlap
282
282
  )
283
283
 
284
- formatted_result += "\n## Content Indexing\n"
285
- formatted_result += (
286
- "Successfully indexed content into vector store:\n"
284
+ # Create embeddings
285
+ embeddings = vs_manager.create_embeddings()
286
+
287
+ if existing_vector_store:
288
+ # Get all existing documents and filter out those from the same URL
289
+ try:
290
+ # Try to access documents directly if available
291
+ if hasattr(
292
+ existing_vector_store, "docstore"
293
+ ) and hasattr(
294
+ existing_vector_store.docstore, "_dict"
295
+ ):
296
+ # Access FAISS documents directly
297
+ all_docs = list(
298
+ existing_vector_store.docstore._dict.values()
299
+ )
300
+ else:
301
+ # Fallback: use a reasonable k value for similarity search
302
+ # Use a dummy query to retrieve documents
303
+ all_docs = existing_vector_store.similarity_search(
304
+ "dummy", # Use a dummy query instead of empty string
305
+ k=1000, # Use reasonable upper bound
306
+ )
307
+
308
+ # Filter out documents from the same URL
309
+ preserved_docs = [
310
+ doc
311
+ for doc in all_docs
312
+ if doc.metadata.get("source") != url
313
+ ]
314
+
315
+ logger.info(
316
+ f"firecrawl_scrape: Preserving {len(preserved_docs)} docs from other URLs, "
317
+ f"replacing content from {url}"
318
+ )
319
+
320
+ # Create new vector store with preserved docs + new docs
321
+ if preserved_docs:
322
+ # Combine preserved and new documents
323
+ all_documents = preserved_docs + split_docs
324
+ new_vector_store = FAISS.from_documents(
325
+ all_documents, embeddings
326
+ )
327
+ formatted_result += "\n## Content Replacement\n"
328
+ formatted_result += f"Replaced existing content for URL: {url}\n"
329
+ num_preserved_urls = len(
330
+ set(
331
+ doc.metadata.get("source", "")
332
+ for doc in preserved_docs
333
+ )
334
+ )
335
+ formatted_result += f"Preserved content from {num_preserved_urls} other URLs\n"
336
+ else:
337
+ # No other documents to preserve, just create from new docs
338
+ new_vector_store = FAISS.from_documents(
339
+ split_docs, embeddings
340
+ )
341
+ formatted_result += "\n## Content Replacement\n"
342
+ formatted_result += f"Created new index with content from: {url}\n"
343
+ except Exception as e:
344
+ logger.warning(
345
+ f"Could not preserve other URLs, creating fresh index: {e}"
346
+ )
347
+ # Fallback: create new store with just the new documents
348
+ new_vector_store = FAISS.from_documents(
349
+ split_docs, embeddings
350
+ )
351
+ formatted_result += "\n## Content Replacement\n"
352
+ formatted_result += f"Created fresh index with content from: {url}\n"
353
+ else:
354
+ # No existing store, create new one
355
+ new_vector_store = FAISS.from_documents(
356
+ split_docs, embeddings
357
+ )
358
+ formatted_result += "\n## Content Indexing\n"
359
+ formatted_result += (
360
+ f"Created new index with content from: {url}\n"
361
+ )
362
+
363
+ # Save the new vector store
364
+ await vs_manager.save_vector_store(
365
+ agent_id, new_vector_store, chunk_size, chunk_overlap
287
366
  )
288
- formatted_result += f"- Chunks created: {total_chunks}\n"
367
+
368
+ # Update metadata to track all URLs
369
+ # Get existing metadata to preserve other URLs
370
+ metadata_key = f"indexed_urls_{agent_id}"
371
+ existing_metadata = (
372
+ await self.skill_store.get_agent_skill_data(
373
+ agent_id, "firecrawl", metadata_key
374
+ )
375
+ )
376
+
377
+ if existing_metadata and existing_metadata.get("urls"):
378
+ # Remove the current URL and add it back (to update timestamp)
379
+ existing_urls = [
380
+ u for u in existing_metadata["urls"] if u != url
381
+ ]
382
+ existing_urls.append(url)
383
+ updated_metadata = {
384
+ "urls": existing_urls,
385
+ "document_count": len(existing_urls),
386
+ "source_type": "firecrawl_mixed",
387
+ "indexed_at": str(len(existing_urls)),
388
+ }
389
+ else:
390
+ # Create new metadata
391
+ updated_metadata = metadata_manager.create_url_metadata(
392
+ [url], [document], "firecrawl_scrape"
393
+ )
394
+
395
+ await metadata_manager.update_metadata(
396
+ agent_id, updated_metadata
397
+ )
398
+
399
+ formatted_result += "\n## Content Indexing (REPLACE MODE)\n"
400
+ formatted_result += "Successfully REPLACED indexed content in vector store:\n"
401
+ formatted_result += f"- Chunks created: {len(split_docs)}\n"
289
402
  formatted_result += f"- Chunk size: {chunk_size}\n"
290
403
  formatted_result += f"- Chunk overlap: {chunk_overlap}\n"
291
- formatted_result += f"- Content merged with existing: {'Yes' if was_merged else 'No'}\n"
404
+ formatted_result += (
405
+ "- Previous content for this URL: REPLACED\n"
406
+ )
292
407
  formatted_result += "Use the 'firecrawl_query_indexed_content' skill to search this content.\n"
293
408
 
294
409
  logger.info(
295
- f"firecrawl_scrape: Successfully indexed {url} with {total_chunks} chunks"
410
+ f"firecrawl_scrape: Successfully replaced content for {url} with {len(split_docs)} chunks"
296
411
  )
297
412
  else:
298
413
  formatted_result += "\n## Content Indexing\n"
@@ -3,8 +3,9 @@ from typing import Type
3
3
  from langchain_core.tools import ToolException
4
4
  from pydantic import BaseModel, Field
5
5
 
6
+ from intentkit.abstracts.graph import AgentContext
6
7
  from intentkit.abstracts.skill import SkillStoreABC
7
- from intentkit.skills.base import IntentKitSkill, SkillContext
8
+ from intentkit.skills.base import IntentKitSkill
8
9
 
9
10
 
10
11
  class SupabaseBaseTool(IntentKitSkill):
@@ -21,9 +22,7 @@ class SupabaseBaseTool(IntentKitSkill):
21
22
  def category(self) -> str:
22
23
  return "supabase"
23
24
 
24
- def get_supabase_config(
25
- self, config: dict, context: SkillContext
26
- ) -> tuple[str, str]:
25
+ def get_supabase_config(self, context: AgentContext) -> tuple[str, str]:
27
26
  """Get Supabase URL and key from config.
28
27
 
29
28
  Args:
@@ -36,6 +35,7 @@ class SupabaseBaseTool(IntentKitSkill):
36
35
  Raises:
37
36
  ValueError: If required config is missing
38
37
  """
38
+ config = context.agent.skill_config(self.category)
39
39
  supabase_url = config.get("supabase_url")
40
40
 
41
41
  # Use public_key for public operations if available, otherwise fall back to supabase_key
@@ -52,7 +52,7 @@ class SupabaseBaseTool(IntentKitSkill):
52
52
 
53
53
  return supabase_url, supabase_key
54
54
 
55
- def validate_table_access(self, table: str, context: SkillContext) -> None:
55
+ def validate_table_access(self, table: str, context: AgentContext) -> None:
56
56
  """Validate if the table can be accessed for write operations in public mode.
57
57
 
58
58
  Args:
@@ -66,8 +66,10 @@ class SupabaseBaseTool(IntentKitSkill):
66
66
  if context.is_private:
67
67
  return
68
68
 
69
+ config = context.agent.skill_config(self.category)
70
+
69
71
  # In public mode, check if table is in allowed list
70
- public_write_tables = context.config.get("public_write_tables", "")
72
+ public_write_tables = config.get("public_write_tables", "")
71
73
  if not public_write_tables:
72
74
  return
73
75
 
@@ -45,12 +45,11 @@ class SupabaseDeleteData(SupabaseBaseTool):
45
45
  ):
46
46
  try:
47
47
  context = self.get_context()
48
- skill_config = context.agent.skill_config(self.category)
49
48
 
50
49
  # Validate table access for public mode
51
50
  self.validate_table_access(table, context)
52
51
 
53
- supabase_url, supabase_key = self.get_supabase_config(skill_config, context)
52
+ supabase_url, supabase_key = self.get_supabase_config(context)
54
53
 
55
54
  # Create Supabase client
56
55
  supabase: Client = create_client(supabase_url, supabase_key)
@@ -60,8 +60,7 @@ class SupabaseFetchData(SupabaseBaseTool):
60
60
  ):
61
61
  try:
62
62
  context = self.get_context()
63
- skill_config = context.agent.skill_config(self.category)
64
- supabase_url, supabase_key = self.get_supabase_config(skill_config, context)
63
+ supabase_url, supabase_key = self.get_supabase_config(context)
65
64
 
66
65
  # Create Supabase client
67
66
  supabase: Client = create_client(supabase_url, supabase_key)
@@ -45,12 +45,11 @@ class SupabaseInsertData(SupabaseBaseTool):
45
45
  ):
46
46
  try:
47
47
  context = self.get_context()
48
- skill_config = context.agent.skill_config(self.category)
49
48
 
50
49
  # Validate table access for public mode
51
50
  self.validate_table_access(table, context)
52
51
 
53
- supabase_url, supabase_key = self.get_supabase_config(skill_config, context)
52
+ supabase_url, supabase_key = self.get_supabase_config(context)
54
53
 
55
54
  # Create Supabase client
56
55
  supabase: Client = create_client(supabase_url, supabase_key)
@@ -44,8 +44,7 @@ class SupabaseInvokeFunction(SupabaseBaseTool):
44
44
  ):
45
45
  try:
46
46
  context = self.get_context()
47
- skill_config = context.agent.skill_config(self.category)
48
- supabase_url, supabase_key = self.get_supabase_config(skill_config, context)
47
+ supabase_url, supabase_key = self.get_supabase_config(context)
49
48
 
50
49
  # Create Supabase client
51
50
  supabase: Client = create_client(supabase_url, supabase_key)
@@ -48,12 +48,11 @@ class SupabaseUpdateData(SupabaseBaseTool):
48
48
  ):
49
49
  try:
50
50
  context = self.get_context()
51
- skill_config = context.agent.skill_config(self.category)
52
51
 
53
52
  # Validate table access for public mode
54
53
  self.validate_table_access(table, context)
55
54
 
56
- supabase_url, supabase_key = self.get_supabase_config(skill_config, context)
55
+ supabase_url, supabase_key = self.get_supabase_config(context)
57
56
 
58
57
  # Create Supabase client
59
58
  supabase: Client = create_client(supabase_url, supabase_key)
@@ -50,12 +50,11 @@ class SupabaseUpsertData(SupabaseBaseTool):
50
50
  ):
51
51
  try:
52
52
  context = self.get_context()
53
- skill_config = context.agent.skill_config(self.category)
54
53
 
55
54
  # Validate table access for public mode
56
55
  self.validate_table_access(table, context)
57
56
 
58
- supabase_url, supabase_key = self.get_supabase_config(skill_config, context)
57
+ supabase_url, supabase_key = self.get_supabase_config(context)
59
58
 
60
59
  # Create Supabase client
61
60
  supabase: Client = create_client(supabase_url, supabase_key)
@@ -1,6 +1,7 @@
1
1
  from typing import List, Optional, Tuple, Type
2
2
 
3
3
  from pydantic import BaseModel, Field
4
+ from web3.exceptions import ContractLogicError
4
5
 
5
6
  from intentkit.models.chat import ChatMessageAttachment, ChatMessageAttachmentType
6
7
  from intentkit.skills.xmtp.base import XmtpBaseTool
@@ -14,9 +15,6 @@ class TransferInput(BaseModel):
14
15
  amount: str = Field(
15
16
  description="The amount to transfer (as string to handle large numbers)"
16
17
  )
17
- decimals: int = Field(
18
- description="Number of decimal places for the token (18 for ETH, varies for ERC20 tokens)"
19
- )
20
18
  currency: str = Field(description="Currency symbol (e.g., 'ETH', 'USDC', 'DAI')")
21
19
  token_contract_address: Optional[str] = Field(
22
20
  default=None,
@@ -44,7 +42,6 @@ class XmtpTransfer(XmtpBaseTool):
44
42
  from_address: str,
45
43
  to_address: str,
46
44
  amount: str,
47
- decimals: int,
48
45
  currency: str,
49
46
  token_contract_address: Optional[str],
50
47
  ) -> Tuple[str, List[ChatMessageAttachment]]:
@@ -54,10 +51,8 @@ class XmtpTransfer(XmtpBaseTool):
54
51
  from_address: The sender address
55
52
  to_address: The recipient address
56
53
  amount: Amount to transfer
57
- decimals: Token decimals
58
54
  currency: Currency symbol
59
55
  token_contract_address: Token contract address (None for ETH)
60
- config: LangChain runnable config
61
56
 
62
57
  Returns:
63
58
  Tuple of (content_message, list_of_attachments)
@@ -80,6 +75,65 @@ class XmtpTransfer(XmtpBaseTool):
80
75
 
81
76
  chain_id_hex = chain_id_hex_by_network[agent.network_id]
82
77
 
78
+ # Validate token contract and get decimals
79
+ if token_contract_address:
80
+ # Validate ERC20 contract and get token info
81
+ web3 = self.web3_client()
82
+
83
+ # ERC20 ABI for symbol() and decimals() functions
84
+ erc20_abi = [
85
+ {
86
+ "constant": True,
87
+ "inputs": [],
88
+ "name": "symbol",
89
+ "outputs": [{"name": "", "type": "string"}],
90
+ "type": "function",
91
+ },
92
+ {
93
+ "constant": True,
94
+ "inputs": [],
95
+ "name": "decimals",
96
+ "outputs": [{"name": "", "type": "uint8"}],
97
+ "type": "function",
98
+ },
99
+ ]
100
+
101
+ try:
102
+ # Create contract instance
103
+ contract = web3.eth.contract(
104
+ address=web3.to_checksum_address(token_contract_address),
105
+ abi=erc20_abi,
106
+ )
107
+
108
+ # Get token symbol and decimals
109
+ token_symbol = contract.functions.symbol().call()
110
+ decimals = contract.functions.decimals().call()
111
+
112
+ # Validate symbol matches currency (case insensitive)
113
+ if token_symbol.upper() != currency.upper():
114
+ raise ValueError(
115
+ f"Token symbol mismatch: contract symbol is '{token_symbol}', "
116
+ f"but currency parameter is '{currency}'"
117
+ )
118
+
119
+ except ContractLogicError:
120
+ raise ValueError(
121
+ f"Invalid ERC20 contract address: {token_contract_address}. "
122
+ "The address does not point to a valid ERC20 token contract."
123
+ )
124
+ except Exception as e:
125
+ raise ValueError(
126
+ f"Failed to validate ERC20 contract {token_contract_address}: {str(e)}"
127
+ )
128
+ else:
129
+ # For ETH transfers, use 18 decimals
130
+ decimals = 18
131
+ # Validate currency is ETH for native transfers
132
+ if currency.upper() != "ETH":
133
+ raise ValueError(
134
+ f"For native transfers, currency must be 'ETH', got '{currency}'"
135
+ )
136
+
83
137
  # Calculate amount in smallest unit (wei for ETH, token units for ERC20)
84
138
  amount_int = int(float(amount) * (10**decimals))
85
139
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: intentkit
3
- Version: 0.6.19.dev2
3
+ Version: 0.6.21.dev1
4
4
  Summary: Intent-based AI Agent Platform - Core Package
5
5
  Project-URL: Homepage, https://github.com/crestal-network/intentkit
6
6
  Project-URL: Repository, https://github.com/crestal-network/intentkit
@@ -1,4 +1,4 @@
1
- intentkit/__init__.py,sha256=URY46LF0PzcfF5ekBoFEG5w1RPHfC3Ht-Gw_t45X9Sk,384
1
+ intentkit/__init__.py,sha256=QEhgtBUptnUt512WzzDehT2foX-2-54HSasqRgAVfJw,384
2
2
  intentkit/abstracts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  intentkit/abstracts/agent.py,sha256=108gb5W8Q1Sy4G55F2_ZFv2-_CnY76qrBtpIr0Oxxqk,1489
4
4
  intentkit/abstracts/api.py,sha256=ZUc24vaQvQVbbjznx7bV0lbbQxdQPfEV8ZxM2R6wZWo,166
@@ -36,7 +36,7 @@ intentkit/models/redis.py,sha256=UoN8jqLREO1VO9_w6m-JhldpP19iEHj4TiGVCMutQW4,370
36
36
  intentkit/models/skill.py,sha256=h_2wtKEbYE29TLsMdaSnjfOv6vXY6GwMU_abw-ONX28,16374
37
37
  intentkit/models/user.py,sha256=P7l6LOsZmXZ5tDPTczTbqDtDB_MKc_9_ddZkAB2npPk,9288
38
38
  intentkit/skills/__init__.py,sha256=WkjmKB4xvy36zyXMroPMf_DTPgQloNS3L73nVnBmuQI,303
39
- intentkit/skills/base.py,sha256=T02h8M9xZAekLPQdqEefmGQhhwdq6OlMf4IuAcdHngs,6316
39
+ intentkit/skills/base.py,sha256=s3gY5y7u7vEQsplDm-iYfs_hrWaHJRs4Bd-XZvqE-G0,6223
40
40
  intentkit/skills/skills.toml,sha256=BCqO6nQVaU3wSpY0Js1xjakLzfttsq6hcHcJbw7q958,2734
41
41
  intentkit/skills/acolyt/__init__.py,sha256=qHQXFlqyyx4deRxC0rts_ZEEpDVV-vWXPncqI_ZMOi4,2074
42
42
  intentkit/skills/acolyt/acolyt.jpg,sha256=CwrrouzXzYvnHi1rprYruvZqPopG06ppMczEZmZ7D2s,11559
@@ -124,7 +124,7 @@ intentkit/skills/dapplooker/dapplooker_token_data.py,sha256=TtxdK2nRoEi5rxFJhMDD
124
124
  intentkit/skills/dapplooker/schema.json,sha256=OdlkQqTEK42Zop4RfnU7pMM8frSzH29ti2MHmNAgq5I,2227
125
125
  intentkit/skills/defillama/__init__.py,sha256=Q8s7r6MITlM_cXVb43dYzYP-xtx3HKBGECPDFDR6WUM,10121
126
126
  intentkit/skills/defillama/api.py,sha256=_dgNEjMFD_y5Z0y4lh1Vd1JrkRn_wFyaUShKaTf5DtE,11617
127
- intentkit/skills/defillama/base.py,sha256=KdRlo9U4UlsEZDwyIPQewWIKEq61x4EmnLWubg592xo,4102
127
+ intentkit/skills/defillama/base.py,sha256=IHYwrvN12JE3qdqpWP__4RGzeeNobj5VS-tnCT5yLGs,4139
128
128
  intentkit/skills/defillama/defillama.jpeg,sha256=n5u5PgvsUVwX9Q2-Gh_fe6YKxWjBsNEnbKaf2X3yJC0,4629
129
129
  intentkit/skills/defillama/schema.json,sha256=cRxah1E_rFndjTT5ukp8aiVWOwyMwLUHfz1T2ux_K_E,10561
130
130
  intentkit/skills/defillama/coins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -198,15 +198,15 @@ intentkit/skills/enso/abi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
198
198
  intentkit/skills/enso/abi/approval.py,sha256=IsyQLFxzAttocrtCB2PhbgprA7Vqujzpxvg0hJbeJ00,9867
199
199
  intentkit/skills/enso/abi/erc20.py,sha256=IScqZhHpMt_eFfYtMXw0-w5jptkAK0xsqqUDjbWdb2s,439
200
200
  intentkit/skills/enso/abi/route.py,sha256=ng9U2RSyS5R3d-b0m5ELa4rFpaUDO9HcgSoX9P_wWZo,4746
201
- intentkit/skills/firecrawl/README.md,sha256=LCi6ju-QO0nXti4y9-ltcF-bwrgXGT7NJpz67vFUcCo,6912
201
+ intentkit/skills/firecrawl/README.md,sha256=OP5rCC5aNx9A4YjgotZB-JFdBR_0qHiWmYLuA52a8Tw,7366
202
202
  intentkit/skills/firecrawl/__init__.py,sha256=QQ0I5vlUgsLRFqHO17vbq-3ERKL3nzoo2B4MFGH0Igg,3160
203
203
  intentkit/skills/firecrawl/base.py,sha256=8BqD3X6RK0RedWU-qsa5qPMpuXWTZ6NbYLSpppFK_EU,1334
204
204
  intentkit/skills/firecrawl/clear.py,sha256=mfzQg8e6sbCwSzJGN_Lqfgxt-0pvtH_dBtNSJpMQA5A,2830
205
205
  intentkit/skills/firecrawl/crawl.py,sha256=lhySK1TbxGcLAXQi1zvrp4Zdo5ghhBFvxc4mFMl5LoI,18278
206
206
  intentkit/skills/firecrawl/firecrawl.png,sha256=6GoGlIMYuIDo-TqMlZbD4QYkmxvQ7krqAa5MANumJqk,5065
207
207
  intentkit/skills/firecrawl/query.py,sha256=LZzIy-LmqyEa8cZoBm-Eoen6GRy3NJxfuQcGi54Hwp0,4364
208
- intentkit/skills/firecrawl/schema.json,sha256=3LfZPS-mdKNh8r7IQ-oAMFAq_xS5dVs9sV8PXeEUh6o,4439
209
- intentkit/skills/firecrawl/scrape.py,sha256=P2Pwbi5l6bbN1S8akwwr9dhtUHw20UBHdN0c2B5J9Rs,13642
208
+ intentkit/skills/firecrawl/schema.json,sha256=q3ynbCO1NDidHZd3Nh7TNZ6lCv6y26XW7WBrYlj-JM0,4513
209
+ intentkit/skills/firecrawl/scrape.py,sha256=2axmz5hZVnNGvTPTi0r0WAN4MoYNQZzOFtMZd5pRgcg,20704
210
210
  intentkit/skills/firecrawl/utils.py,sha256=Ot_vEg4Z30_BY3Xbh59gb_Tu17tSCmytRw49RGAzZ88,10093
211
211
  intentkit/skills/github/README.md,sha256=SzYGJ9qSPaZl68iD8AQJGKTMLv0keQZesnSK-VhrAfs,1802
212
212
  intentkit/skills/github/__init__.py,sha256=Vva9jMtACSM_cZXy5JY0h6Q1ejR1jm-Xu3Q6PwyB72o,1471
@@ -291,15 +291,15 @@ intentkit/skills/slack/schema.json,sha256=zaWSka1GM6_X-xNQBeIAn8lovskZo78stklBoH
291
291
  intentkit/skills/slack/send_message.py,sha256=esspr3NygCVa9rEYULWdVnP2jFnLH3CxepI8R3bsC7g,2507
292
292
  intentkit/skills/slack/slack.jpg,sha256=b_tlvObAfE2KL-D9jhZQ8qAlc1UnXStvF3TENqFtTNE,10818
293
293
  intentkit/skills/supabase/__init__.py,sha256=CCdOpA4g27MxaUrYSMh8KgjHez7De0PNtlscoPX7Hh8,3417
294
- intentkit/skills/supabase/base.py,sha256=o0SQwbBC_enC6-u2euDpqd4teZKAHNd55V-tAYuasok,2813
295
- intentkit/skills/supabase/delete_data.py,sha256=0svExLN0ncs4fJNK8zaqTdgHAhFq_rP2VqEkFwcCR4E,3820
296
- intentkit/skills/supabase/fetch_data.py,sha256=qNzdTU1EbZVsuvQbHEKyc36cK356BB8R4Y2q60SmqRo,4706
297
- intentkit/skills/supabase/insert_data.py,sha256=F_UoY44LKGWNIjlUqU-7phY0RAmJJAWSm0KwEekLHk8,2200
298
- intentkit/skills/supabase/invoke_function.py,sha256=GPdvLuTQFIqK3FaZYW0AcO90j0sQeDQJMeJcn1yGRoQ,2493
294
+ intentkit/skills/supabase/base.py,sha256=fY_Dk6yOOWgOgFqUSlN6MqnDDC6Lee37sT5gO83wPN4,2933
295
+ intentkit/skills/supabase/delete_data.py,sha256=nraDtSx40Fu9yB8iBD-0-Dc9LLp4U1W1lb75e1MSSVw,3737
296
+ intentkit/skills/supabase/fetch_data.py,sha256=v67g_-k77SYLSpeWBFZnOoia3uSZApjGR-i0g-jjxmY,4623
297
+ intentkit/skills/supabase/insert_data.py,sha256=v6Hau9dPOoejKMLg4DWQqRQbOBPqHwFq-OlwzwnkzM4,2117
298
+ intentkit/skills/supabase/invoke_function.py,sha256=1km5yswfLRapNcVbXQsSVlt6ZikPGFUOmwnwLj9gLIQ,2410
299
299
  intentkit/skills/supabase/schema.json,sha256=cqjo20flg6Xlv6b-2nrsJAbdCMBCJfmlfz8tGFJIlGY,5194
300
300
  intentkit/skills/supabase/supabase.svg,sha256=65_80QCtJiKKV4EAuny_xbOD5JlTONEiq9xqO00hDtM,1107
301
- intentkit/skills/supabase/update_data.py,sha256=tz4vbzl5JtgZNMcgRMjWfhrx1mNMnzG5zJffggQyhGM,3955
302
- intentkit/skills/supabase/upsert_data.py,sha256=99NbljX3lQ34cFBQuCIVoswIiFMJb2DhO2KrYjzuNa8,2470
301
+ intentkit/skills/supabase/update_data.py,sha256=IOB78dSbP3yrsNuE2DH5z-UztADXpu890myq42c4hQU,3872
302
+ intentkit/skills/supabase/upsert_data.py,sha256=tZOch4jOfXwrk6V-1D6UIWXRRgpgfO9dIXWZLnEJvyw,2387
303
303
  intentkit/skills/system/__init__.py,sha256=bqNYJCjLx9p23E21ELLP-T0B_NP0ltzT0TMqsBI-9Bg,3668
304
304
  intentkit/skills/system/add_autonomous_task.py,sha256=YnkxBaNLPUEPt7rWWWKFVXugdb_wCcxCpSr1cuCSPio,3164
305
305
  intentkit/skills/system/base.py,sha256=Sm4lSNgbxwGK5YimnBfwi3Hc8E1EwSMZIXsCJbIPiLM,700
@@ -401,7 +401,7 @@ intentkit/skills/xmtp/base.py,sha256=85ZEuNLJmI_NmBPkbvDXQrNvJNG8dp9MbcbQYQQ3QZ8
401
401
  intentkit/skills/xmtp/price.py,sha256=LqM3tWiW42bYIRqfvsZUvYpG5H5ife3WUhR-pxiS9I8,2648
402
402
  intentkit/skills/xmtp/schema.json,sha256=GFJKYPQVAcfiybL1uhAHANYeQUR0JWWxPgPhXW92N0s,3089
403
403
  intentkit/skills/xmtp/swap.py,sha256=8YEjfOTS-BtKKuXT1QLedBTM9h4QUF0rVYtLkC7WPG0,8412
404
- intentkit/skills/xmtp/transfer.py,sha256=qmSIsSrWR-S5JFlBP4YjxudsWlKsCpp-JjDQjYUhdHg,6182
404
+ intentkit/skills/xmtp/transfer.py,sha256=GSo6xJ2RjSbeLsz4jXaod5g6NXnqm2KgEzvMrAs8SgM,8308
405
405
  intentkit/skills/xmtp/xmtp.png,sha256=vQzT-71zIb8aPodg-GkGSQbBnjGAPczWGm3es2ZkJe8,6681
406
406
  intentkit/utils/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
407
407
  intentkit/utils/chain.py,sha256=3GBHuAbXxQr_HlOvkbB2kruYSkweucfxI5u-swXzY40,15135
@@ -411,7 +411,7 @@ intentkit/utils/random.py,sha256=DymMxu9g0kuQLgJUqalvgksnIeLdS-v0aRk5nQU0mLI,452
411
411
  intentkit/utils/s3.py,sha256=9trQNkKQ5VgxWsewVsV8Y0q_pXzGRvsCYP8xauyUYkg,8549
412
412
  intentkit/utils/slack_alert.py,sha256=s7UpRgyzLW7Pbmt8cKzTJgMA9bm4EP-1rQ5KXayHu6E,2264
413
413
  intentkit/utils/tx.py,sha256=2yLLGuhvfBEY5n_GJ8wmIWLCzn0FsYKv5kRNzw_sLUI,1454
414
- intentkit-0.6.19.dev2.dist-info/METADATA,sha256=yH0g5MnOWthCWld7D-Xu--mKeaavKQdWxj7gLSinejo,6414
415
- intentkit-0.6.19.dev2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
416
- intentkit-0.6.19.dev2.dist-info/licenses/LICENSE,sha256=Bln6DhK-LtcO4aXy-PBcdZv2f24MlJFm_qn222biJtE,1071
417
- intentkit-0.6.19.dev2.dist-info/RECORD,,
414
+ intentkit-0.6.21.dev1.dist-info/METADATA,sha256=jVCR2IIIJHNVxdwBySRRt_shQxkTXgOp_Nykh3Xr37U,6414
415
+ intentkit-0.6.21.dev1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
416
+ intentkit-0.6.21.dev1.dist-info/licenses/LICENSE,sha256=Bln6DhK-LtcO4aXy-PBcdZv2f24MlJFm_qn222biJtE,1071
417
+ intentkit-0.6.21.dev1.dist-info/RECORD,,