intentkit 0.6.19.dev1__py3-none-any.whl → 0.6.20.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of intentkit might be problematic. Click here for more details.

intentkit/__init__.py CHANGED
@@ -3,7 +3,7 @@
3
3
  A powerful platform for building AI agents with blockchain and cryptocurrency capabilities.
4
4
  """
5
5
 
6
- __version__ = "0.6.19-dev1"
6
+ __version__ = "0.6.20-dev.1"
7
7
  __author__ = "hyacinthus"
8
8
  __email__ = "hyacinthus@gmail.com"
9
9
 
intentkit/clients/cdp.py CHANGED
@@ -144,6 +144,17 @@ class CdpClient:
144
144
  wallet_secret=wallet_secret,
145
145
  )
146
146
  self._wallet_provider = CdpEvmServerWalletProvider(self._wallet_provider_config)
147
+ # hack for cdp bug
148
+ if network_id == "base-mainnet":
149
+ self._wallet_provider._network.network_id = "base"
150
+ elif network_id == "arbitrum-mainnet":
151
+ self._wallet_provider._network.network_id = "arbitrum"
152
+ elif network_id == "optimism-mainnet":
153
+ self._wallet_provider._network.network_id = "optimism"
154
+ elif network_id == "polygon-mainnet":
155
+ self._wallet_provider._network.network_id = "polygon"
156
+ elif network_id == "ethereum-mainnet":
157
+ self._wallet_provider._network.network_id = "ethereum"
147
158
  return self._wallet_provider
148
159
 
149
160
  async def get_account(self) -> EvmServerAccount:
intentkit/core/credit.py CHANGED
@@ -150,6 +150,9 @@ async def recharge(
150
150
  + user_account.reward_credits,
151
151
  base_amount=amount,
152
152
  base_original_amount=amount,
153
+ base_free_amount=Decimal("0"), # No free credits involved in base amount
154
+ base_reward_amount=Decimal("0"), # No reward credits involved in base amount
155
+ base_permanent_amount=amount, # All base amount is permanent for recharge
153
156
  permanent_amount=amount, # Set permanent_amount since this is a permanent credit
154
157
  free_amount=Decimal("0"), # No free credits involved
155
158
  reward_amount=Decimal("0"), # No reward credits involved
@@ -277,6 +280,11 @@ async def reward(
277
280
  + user_account.reward_credits,
278
281
  base_amount=amount,
279
282
  base_original_amount=amount,
283
+ base_free_amount=Decimal("0"), # No free credits involved in base amount
284
+ base_reward_amount=amount, # All base amount is reward for reward events
285
+ base_permanent_amount=Decimal(
286
+ "0"
287
+ ), # No permanent credits involved in base amount
280
288
  reward_amount=amount, # Set reward_amount since this is a reward credit
281
289
  free_amount=Decimal("0"), # No free credits involved
282
290
  permanent_amount=Decimal("0"), # No permanent credits involved
@@ -451,6 +459,9 @@ async def adjustment(
451
459
  + user_account.reward_credits,
452
460
  base_amount=abs_amount,
453
461
  base_original_amount=abs_amount,
462
+ base_free_amount=free_amount,
463
+ base_reward_amount=reward_amount,
464
+ base_permanent_amount=permanent_amount,
454
465
  free_amount=free_amount,
455
466
  reward_amount=reward_amount,
456
467
  permanent_amount=permanent_amount,
@@ -984,6 +995,9 @@ async def expense_message(
984
995
  + user_account.reward_credits,
985
996
  base_amount=base_amount,
986
997
  base_original_amount=base_original_amount,
998
+ base_free_amount=base_free_amount,
999
+ base_reward_amount=base_reward_amount,
1000
+ base_permanent_amount=base_permanent_amount,
987
1001
  base_llm_amount=base_llm_amount,
988
1002
  fee_platform_amount=fee_platform_amount,
989
1003
  fee_platform_free_amount=fee_platform_free_amount,
@@ -1415,6 +1429,9 @@ async def expense_skill(
1415
1429
  base_amount=skill_cost_info.base_amount,
1416
1430
  base_original_amount=skill_cost_info.base_original_amount,
1417
1431
  base_skill_amount=skill_cost_info.base_skill_amount,
1432
+ base_free_amount=base_free_amount,
1433
+ base_reward_amount=base_reward_amount,
1434
+ base_permanent_amount=base_permanent_amount,
1418
1435
  fee_platform_amount=skill_cost_info.fee_platform_amount,
1419
1436
  fee_platform_free_amount=fee_platform_free_amount,
1420
1437
  fee_platform_reward_amount=fee_platform_reward_amount,
@@ -1589,6 +1606,9 @@ async def refill_free_credits_for_account(
1589
1606
  + updated_account.reward_credits,
1590
1607
  base_amount=amount_to_add,
1591
1608
  base_original_amount=amount_to_add,
1609
+ base_free_amount=amount_to_add,
1610
+ base_reward_amount=Decimal("0"),
1611
+ base_permanent_amount=Decimal("0"),
1592
1612
  free_amount=amount_to_add, # Set free_amount since this is a free credit refill
1593
1613
  reward_amount=Decimal("0"), # No reward credits involved
1594
1614
  permanent_amount=Decimal("0"), # No permanent credits involved
@@ -1864,6 +1884,9 @@ async def expense_summarize(
1864
1884
  base_amount=base_amount,
1865
1885
  base_original_amount=base_original_amount,
1866
1886
  base_llm_amount=base_llm_amount,
1887
+ base_free_amount=base_free_amount,
1888
+ base_reward_amount=base_reward_amount,
1889
+ base_permanent_amount=base_permanent_amount,
1867
1890
  fee_platform_amount=fee_platform_amount,
1868
1891
  fee_platform_free_amount=fee_platform_free_amount,
1869
1892
  fee_platform_reward_amount=fee_platform_reward_amount,
@@ -685,7 +685,7 @@
685
685
  "network_id": {
686
686
  "title": "Default Network",
687
687
  "type": "string",
688
- "description": "Default Network",
688
+ "description": "Default Network, please note that some CDP Wallet native skills like swap only support the base network.",
689
689
  "default": "base-mainnet",
690
690
  "enum": [
691
691
  "ethereum-mainnet",
@@ -697,8 +697,7 @@
697
697
  "arbitrum-mainnet",
698
698
  "arbitrum-sepolia",
699
699
  "optimism-mainnet",
700
- "optimism-sepolia",
701
- "solana"
700
+ "optimism-sepolia"
702
701
  ],
703
702
  "x-group": "onchain"
704
703
  }
@@ -715,7 +714,7 @@
715
714
  "readonly_wallet_address": {
716
715
  "title": "Readonly Wallet Address",
717
716
  "type": "string",
718
- "description": "Wallet address for readonly wallet provider",
717
+ "description": "Set the wallet address as agent wallet, then it can only be analyzed by the agent.",
719
718
  "maxLength": 100,
720
719
  "x-group": "onchain"
721
720
  }
intentkit/models/chat.py CHANGED
@@ -503,18 +503,17 @@ class ChatMessage(ChatMessageCreate):
503
503
  def sanitize_privacy(self) -> "ChatMessage":
504
504
  """Remove sensitive information from the chat message.
505
505
 
506
- This method clears the message content and removes parameters and response
506
+ This method clears the skill parameters and response
507
507
  from skill calls while preserving the structure and metadata.
508
508
 
509
509
  Returns:
510
510
  ChatMessage: A new ChatMessage instance with sensitive data removed
511
511
  """
512
+ if self.author_type != AuthorType.SKILL:
513
+ return self
512
514
  # Create a copy of the current message
513
515
  sanitized_data = self.model_dump()
514
516
 
515
- # Clear the message content
516
- sanitized_data["message"] = ""
517
-
518
517
  # Clear sensitive data from skill calls
519
518
  if sanitized_data.get("skill_calls"):
520
519
  for skill_call in sanitized_data["skill_calls"]:
@@ -520,6 +520,7 @@ class CreditAccount(BaseModel):
520
520
  balance_after=free_quota,
521
521
  base_amount=free_quota,
522
522
  base_original_amount=free_quota,
523
+ base_free_amount=free_quota,
523
524
  free_amount=free_quota, # Set free_amount since this is a free credit refill
524
525
  reward_amount=Decimal("0"), # No reward credits involved
525
526
  permanent_amount=Decimal("0"), # No permanent credits involved
@@ -805,6 +806,21 @@ class CreditEventTable(Base):
805
806
  default=0,
806
807
  nullable=True,
807
808
  )
809
+ base_free_amount = Column(
810
+ Numeric(22, 4),
811
+ default=0,
812
+ nullable=True,
813
+ )
814
+ base_reward_amount = Column(
815
+ Numeric(22, 4),
816
+ default=0,
817
+ nullable=True,
818
+ )
819
+ base_permanent_amount = Column(
820
+ Numeric(22, 4),
821
+ default=0,
822
+ nullable=True,
823
+ )
808
824
  fee_platform_amount = Column(
809
825
  Numeric(22, 4),
810
826
  default=0,
@@ -979,6 +995,18 @@ class CreditEvent(BaseModel):
979
995
  Optional[Decimal],
980
996
  Field(default=Decimal("0"), description="Base skill cost amount"),
981
997
  ]
998
+ base_free_amount: Annotated[
999
+ Optional[Decimal],
1000
+ Field(default=Decimal("0"), description="Base free credit amount"),
1001
+ ]
1002
+ base_reward_amount: Annotated[
1003
+ Optional[Decimal],
1004
+ Field(default=Decimal("0"), description="Base reward credit amount"),
1005
+ ]
1006
+ base_permanent_amount: Annotated[
1007
+ Optional[Decimal],
1008
+ Field(default=Decimal("0"), description="Base permanent credit amount"),
1009
+ ]
982
1010
  fee_platform_amount: Annotated[
983
1011
  Optional[Decimal],
984
1012
  Field(default=Decimal("0"), description="Platform fee amount"),
@@ -1073,6 +1101,9 @@ class CreditEvent(BaseModel):
1073
1101
  "base_original_amount",
1074
1102
  "base_llm_amount",
1075
1103
  "base_skill_amount",
1104
+ "base_free_amount",
1105
+ "base_reward_amount",
1106
+ "base_permanent_amount",
1076
1107
  "fee_platform_amount",
1077
1108
  "fee_platform_free_amount",
1078
1109
  "fee_platform_reward_amount",
@@ -5,18 +5,22 @@ The Firecrawl skills provide advanced web scraping and content indexing capabili
5
5
  ## Skills Overview
6
6
 
7
7
  ### 1. firecrawl_scrape
8
- Scrapes a single webpage and optionally indexes the content for future querying.
8
+ Scrapes a single webpage and REPLACES any existing indexed content for that URL, preventing duplicates.
9
9
 
10
10
  **Parameters:**
11
11
  - `url` (required): The URL to scrape
12
- - `formats` (optional): Output formats - markdown, html, rawHtml, screenshot, links, extract (default: ["markdown"])
12
+ - `formats` (optional): Output formats - markdown, html, rawHtml, screenshot, links, json (default: ["markdown"])
13
+ - `only_main_content` (optional): Extract only main content (default: true)
13
14
  - `include_tags` (optional): HTML tags to include (e.g., ["h1", "h2", "p"])
14
15
  - `exclude_tags` (optional): HTML tags to exclude
15
- - `only_main_content` (optional): Extract only main content (default: true)
16
+ - `wait_for` (optional): Wait time in milliseconds before scraping
17
+ - `timeout` (optional): Maximum timeout in milliseconds (default: 30000)
16
18
  - `index_content` (optional): Whether to index content for querying (default: true)
17
19
  - `chunk_size` (optional): Size of text chunks for indexing (default: 1000)
18
20
  - `chunk_overlap` (optional): Overlap between chunks (default: 200)
19
21
 
22
+ **Use Case:** Use this when you want to refresh/update content from a URL that was previously scraped, ensuring no duplicate or stale content remains.
23
+
20
24
  ### 2. firecrawl_crawl
21
25
  Crawls multiple pages from a website and indexes all content.
22
26
 
@@ -158,8 +162,9 @@ Prompt: "Use firecrawl_scrape to scrape https://example.com and index the conten
158
162
  ### Documentation Indexing
159
163
  ```
160
164
  1. Scrape main documentation page
161
- 2. Crawl related documentation sections
162
- 3. Query for specific technical information
165
+ 2. Crawl related documentation sections
166
+ 3. Use scrape again to update changed pages (replaces old content)
167
+ 4. Query for specific technical information
163
168
  ```
164
169
 
165
170
  ### Competitive Analysis
@@ -205,6 +210,7 @@ Prompt: "Use firecrawl_scrape to scrape https://example.com and index the conten
205
210
  - **PDF Support**: Can scrape and index PDF documents
206
211
  - **Intelligent Chunking**: Optimized text splitting for better search
207
212
  - **Independent Storage**: Uses its own dedicated vector store for Firecrawl content
213
+ - **Content Replacement**: Replace mode prevents duplicate/stale content
208
214
  - **Metadata Rich**: Includes source URLs, timestamps, and content types
209
215
  - **Semantic Search**: Uses OpenAI embeddings for intelligent querying
210
216
  - **Batch Processing**: Efficient handling of multiple pages
@@ -34,7 +34,7 @@
34
34
  "Agent Owner + All Users",
35
35
  "Agent Owner Only"
36
36
  ],
37
- "description": "Scrape single web pages and extract content in various formats (markdown, HTML, JSON, etc.). Handles JavaScript-rendered content, PDFs, and dynamic websites.",
37
+ "description": "Scrape single web pages and REPLACE any existing indexed content for that URL. Unlike regular scrape, this prevents duplicate content when re-scraping the same page. Use this to refresh/update content from a previously scraped URL.",
38
38
  "default": "private"
39
39
  },
40
40
  "firecrawl_crawl": {
@@ -62,10 +62,11 @@ class FirecrawlScrapeInput(BaseModel):
62
62
 
63
63
 
64
64
  class FirecrawlScrape(FirecrawlBaseTool):
65
- """Tool for scraping web pages using Firecrawl.
65
+ """Tool for scraping web pages using Firecrawl with REPLACE behavior.
66
66
 
67
- This tool uses Firecrawl's API to scrape web pages and convert them into clean,
68
- LLM-ready formats like markdown, HTML, or structured JSON data.
67
+ This tool uses Firecrawl's API to scrape web pages and REPLACES any existing
68
+ indexed content for the same URL instead of appending to it. This prevents
69
+ duplicate content when re-scraping the same page.
69
70
 
70
71
  Attributes:
71
72
  name: The name of the tool.
@@ -75,10 +76,10 @@ class FirecrawlScrape(FirecrawlBaseTool):
75
76
 
76
77
  name: str = "firecrawl_scrape"
77
78
  description: str = (
78
- "Scrape a single web page and extract its content in various formats (markdown, HTML, JSON, etc.). "
79
+ "Scrape a single web page and REPLACE any existing indexed content for that URL. "
80
+ "Unlike regular scrape, this tool removes old content before adding new content, preventing duplicates. "
79
81
  "This tool can handle JavaScript-rendered content, PDFs, and dynamic websites. "
80
- "Optionally indexes the content for later querying using the firecrawl_query_indexed_content tool. "
81
- "Use this when you need to extract clean, structured content from a specific URL."
82
+ "Use this when you want to refresh/update content from a URL that was previously scraped."
82
83
  )
83
84
  args_schema: Type[BaseModel] = FirecrawlScrapeInput
84
85
 
@@ -187,7 +188,7 @@ class FirecrawlScrape(FirecrawlBaseTool):
187
188
  result_data = data.get("data", {})
188
189
 
189
190
  # Format the results based on requested formats
190
- formatted_result = f"Successfully scraped: {url}\n\n"
191
+ formatted_result = f"Successfully scraped (REPLACE mode): {url}\n\n"
191
192
 
192
193
  if "markdown" in formats and result_data.get("markdown"):
193
194
  formatted_result += "## Markdown Content\n"
@@ -236,13 +237,16 @@ class FirecrawlScrape(FirecrawlBaseTool):
236
237
  formatted_result += f"Language: {metadata['language']}\n"
237
238
  formatted_result += "\n"
238
239
 
239
- # Index content if requested
240
+ # Index content if requested - REPLACE MODE
240
241
  if index_content and result_data.get("markdown"):
241
242
  try:
242
- # Import indexing utilities from firecrawl utils
243
+ # Import indexing utilities
244
+ from langchain_community.vectorstores import FAISS
245
+
243
246
  from intentkit.skills.firecrawl.utils import (
247
+ FirecrawlDocumentProcessor,
244
248
  FirecrawlMetadataManager,
245
- index_documents,
249
+ FirecrawlVectorStoreManager,
246
250
  )
247
251
 
248
252
  # Create document from scraped content
@@ -261,38 +265,149 @@ class FirecrawlScrape(FirecrawlBaseTool):
261
265
  # Get agent ID for indexing
262
266
  agent_id = context.agent_id
263
267
  if agent_id:
264
- # Index the document
265
- total_chunks, was_merged = await index_documents(
266
- [document],
267
- agent_id,
268
- self.skill_store,
269
- chunk_size,
270
- chunk_overlap,
271
- )
272
-
273
- # Update metadata
268
+ # Initialize managers
269
+ vs_manager = FirecrawlVectorStoreManager(self.skill_store)
274
270
  metadata_manager = FirecrawlMetadataManager(
275
271
  self.skill_store
276
272
  )
277
- new_metadata = metadata_manager.create_url_metadata(
278
- [url], [document], "firecrawl_scrape"
273
+
274
+ # Load existing vector store
275
+ existing_vector_store = await vs_manager.load_vector_store(
276
+ agent_id
279
277
  )
280
- await metadata_manager.update_metadata(
281
- agent_id, new_metadata
278
+
279
+ # Split the new document into chunks
280
+ split_docs = FirecrawlDocumentProcessor.split_documents(
281
+ [document], chunk_size, chunk_overlap
282
282
  )
283
283
 
284
- formatted_result += "\n## Content Indexing\n"
285
- formatted_result += (
286
- "Successfully indexed content into vector store:\n"
284
+ # Create embeddings
285
+ embeddings = vs_manager.create_embeddings()
286
+
287
+ if existing_vector_store:
288
+ # Get all existing documents and filter out those from the same URL
289
+ try:
290
+ # Try to access documents directly if available
291
+ if hasattr(
292
+ existing_vector_store, "docstore"
293
+ ) and hasattr(
294
+ existing_vector_store.docstore, "_dict"
295
+ ):
296
+ # Access FAISS documents directly
297
+ all_docs = list(
298
+ existing_vector_store.docstore._dict.values()
299
+ )
300
+ else:
301
+ # Fallback: use a reasonable k value for similarity search
302
+ # Use a dummy query to retrieve documents
303
+ all_docs = existing_vector_store.similarity_search(
304
+ "dummy", # Use a dummy query instead of empty string
305
+ k=1000, # Use reasonable upper bound
306
+ )
307
+
308
+ # Filter out documents from the same URL
309
+ preserved_docs = [
310
+ doc
311
+ for doc in all_docs
312
+ if doc.metadata.get("source") != url
313
+ ]
314
+
315
+ logger.info(
316
+ f"firecrawl_scrape: Preserving {len(preserved_docs)} docs from other URLs, "
317
+ f"replacing content from {url}"
318
+ )
319
+
320
+ # Create new vector store with preserved docs + new docs
321
+ if preserved_docs:
322
+ # Combine preserved and new documents
323
+ all_documents = preserved_docs + split_docs
324
+ new_vector_store = FAISS.from_documents(
325
+ all_documents, embeddings
326
+ )
327
+ formatted_result += "\n## Content Replacement\n"
328
+ formatted_result += f"Replaced existing content for URL: {url}\n"
329
+ num_preserved_urls = len(
330
+ set(
331
+ doc.metadata.get("source", "")
332
+ for doc in preserved_docs
333
+ )
334
+ )
335
+ formatted_result += f"Preserved content from {num_preserved_urls} other URLs\n"
336
+ else:
337
+ # No other documents to preserve, just create from new docs
338
+ new_vector_store = FAISS.from_documents(
339
+ split_docs, embeddings
340
+ )
341
+ formatted_result += "\n## Content Replacement\n"
342
+ formatted_result += f"Created new index with content from: {url}\n"
343
+ except Exception as e:
344
+ logger.warning(
345
+ f"Could not preserve other URLs, creating fresh index: {e}"
346
+ )
347
+ # Fallback: create new store with just the new documents
348
+ new_vector_store = FAISS.from_documents(
349
+ split_docs, embeddings
350
+ )
351
+ formatted_result += "\n## Content Replacement\n"
352
+ formatted_result += f"Created fresh index with content from: {url}\n"
353
+ else:
354
+ # No existing store, create new one
355
+ new_vector_store = FAISS.from_documents(
356
+ split_docs, embeddings
357
+ )
358
+ formatted_result += "\n## Content Indexing\n"
359
+ formatted_result += (
360
+ f"Created new index with content from: {url}\n"
361
+ )
362
+
363
+ # Save the new vector store
364
+ await vs_manager.save_vector_store(
365
+ agent_id, new_vector_store, chunk_size, chunk_overlap
287
366
  )
288
- formatted_result += f"- Chunks created: {total_chunks}\n"
367
+
368
+ # Update metadata to track all URLs
369
+ # Get existing metadata to preserve other URLs
370
+ metadata_key = f"indexed_urls_{agent_id}"
371
+ existing_metadata = (
372
+ await self.skill_store.get_agent_skill_data(
373
+ agent_id, "firecrawl", metadata_key
374
+ )
375
+ )
376
+
377
+ if existing_metadata and existing_metadata.get("urls"):
378
+ # Remove the current URL and add it back (to update timestamp)
379
+ existing_urls = [
380
+ u for u in existing_metadata["urls"] if u != url
381
+ ]
382
+ existing_urls.append(url)
383
+ updated_metadata = {
384
+ "urls": existing_urls,
385
+ "document_count": len(existing_urls),
386
+ "source_type": "firecrawl_mixed",
387
+ "indexed_at": str(len(existing_urls)),
388
+ }
389
+ else:
390
+ # Create new metadata
391
+ updated_metadata = metadata_manager.create_url_metadata(
392
+ [url], [document], "firecrawl_scrape"
393
+ )
394
+
395
+ await metadata_manager.update_metadata(
396
+ agent_id, updated_metadata
397
+ )
398
+
399
+ formatted_result += "\n## Content Indexing (REPLACE MODE)\n"
400
+ formatted_result += "Successfully REPLACED indexed content in vector store:\n"
401
+ formatted_result += f"- Chunks created: {len(split_docs)}\n"
289
402
  formatted_result += f"- Chunk size: {chunk_size}\n"
290
403
  formatted_result += f"- Chunk overlap: {chunk_overlap}\n"
291
- formatted_result += f"- Content merged with existing: {'Yes' if was_merged else 'No'}\n"
404
+ formatted_result += (
405
+ "- Previous content for this URL: REPLACED\n"
406
+ )
292
407
  formatted_result += "Use the 'firecrawl_query_indexed_content' skill to search this content.\n"
293
408
 
294
409
  logger.info(
295
- f"firecrawl_scrape: Successfully indexed {url} with {total_chunks} chunks"
410
+ f"firecrawl_scrape: Successfully replaced content for {url} with {len(split_docs)} chunks"
296
411
  )
297
412
  else:
298
413
  formatted_result += "\n## Content Indexing\n"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: intentkit
3
- Version: 0.6.19.dev1
3
+ Version: 0.6.20.dev1
4
4
  Summary: Intent-based AI Agent Platform - Core Package
5
5
  Project-URL: Homepage, https://github.com/crestal-network/intentkit
6
6
  Project-URL: Repository, https://github.com/crestal-network/intentkit
@@ -1,4 +1,4 @@
1
- intentkit/__init__.py,sha256=quyhcKChclMLhjuy8LzOR07Mg7jKRnXvuDEATLIHjyU,384
1
+ intentkit/__init__.py,sha256=L5N8UBhOoj8vD0NB2G81lATWypQbespoMFUeoZRNITg,385
2
2
  intentkit/abstracts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  intentkit/abstracts/agent.py,sha256=108gb5W8Q1Sy4G55F2_ZFv2-_CnY76qrBtpIr0Oxxqk,1489
4
4
  intentkit/abstracts/api.py,sha256=ZUc24vaQvQVbbjznx7bV0lbbQxdQPfEV8ZxM2R6wZWo,166
@@ -7,7 +7,7 @@ intentkit/abstracts/graph.py,sha256=sX5hVemXsODvwIYLHufaf-zSXmW97bXRoZuyxYqaEV4,
7
7
  intentkit/abstracts/skill.py,sha256=cIJ6BkASD31U1IEkE8rdAawq99w_xsg0lt3oalqa1ZA,5071
8
8
  intentkit/abstracts/twitter.py,sha256=cEtP7ygR_b-pHdc9i8kBuyooz1cPoGUGwsBHDpowJyY,1262
9
9
  intentkit/clients/__init__.py,sha256=sQ_6_bRC2MPWLPH-skQ3qsEe8ce-dUGL7i8VJOautHg,298
10
- intentkit/clients/cdp.py,sha256=VaIFzgfqpq5H4bHbFBe8UPabhkwe8s5pge_P5FssGqU,6453
10
+ intentkit/clients/cdp.py,sha256=_A0QRRi6uPYr_AL26arW-Yofez0JcrEQdfxGCVgC7kM,7038
11
11
  intentkit/clients/twitter.py,sha256=Lfa7srHOFnY96SXcElW0jfg7XKS_WliWnXjPZEe6SQc,18976
12
12
  intentkit/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  intentkit/config/config.py,sha256=jhPr7FzZZHUsLIdyiOcaKeAsZQ8FdEo6yUaiIcvmryo,8879
@@ -15,19 +15,19 @@ intentkit/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  intentkit/core/agent.py,sha256=GIKDn1dTenIHWMRxe-ud7hd1cQaHzbTDdypy5IAgPfU,16658
16
16
  intentkit/core/api.py,sha256=WfoaHNquujYJIpNPuTR1dSaaxog0S3X2W4lG9Ehmkm4,3284
17
17
  intentkit/core/client.py,sha256=J5K7f08-ucszBKAbn9K3QNOFKIC__7amTbKYii1jFkI,3056
18
- intentkit/core/credit.py,sha256=kA9h8BcsEOZ57AEvK9idlQqD0vej_EWgeOc-c_DRevw,68341
18
+ intentkit/core/credit.py,sha256=1aKT3hNLCvcQYjNckMxBsUpdtc4RH_KZQPAfzpOs-DU,69552
19
19
  intentkit/core/engine.py,sha256=H4ew1jhn2coMYJ3zR9isM1Y-XbnXNMg91SeDoeXdQ4U,36562
20
20
  intentkit/core/node.py,sha256=7h9zgDSd928bzUi3m3EZnKkhbwqlbRAQUr_uz7gKB5Y,8880
21
21
  intentkit/core/prompt.py,sha256=a6nogIGZuDt2u2EuDd29DAv73OjCBOn-bZnuqYRvY7A,15804
22
22
  intentkit/core/skill.py,sha256=vPK37sDRT9kzkMBymPwqZ5uEdxTTRtb_DfREIeyz-Xw,5788
23
23
  intentkit/models/agent.py,sha256=uC5AErdVucaEajKCXAcF6C3VwYRVIhXTIfOBp-n-Xhg,66310
24
24
  intentkit/models/agent_data.py,sha256=mVsiK8TziYa1W1ujU1KwI9osIVIeSM7XJEogGRL1WVU,28263
25
- intentkit/models/agent_schema.json,sha256=vhW6k9sPAU7Wx0VQjhNc-v9-pu0q6JsRK0z3Mgo5W6w,20999
25
+ intentkit/models/agent_schema.json,sha256=psrYONIzAbiuZB4zzYQFmANP1pw31TV_900TagSYT7o,21109
26
26
  intentkit/models/app_setting.py,sha256=iYbW63QD91bt4oEYV3wOXHuRFav2b4VXLwb_StgUQtQ,8230
27
27
  intentkit/models/base.py,sha256=o-zRjVrak-f5Jokdvj8BjLm8gcC3yYiYMCTLegwT2lA,185
28
- intentkit/models/chat.py,sha256=4z5y0Q77XsVABeGMRXnxlY2Ol6gnivTxeMrlO04IB-Q,20494
28
+ intentkit/models/chat.py,sha256=cDccEHU8nd7Y5uhrHDCuZGwqrRwhqCaeztMiZcemiug,20469
29
29
  intentkit/models/conversation.py,sha256=nrbDIw-3GK5BYi_xkI15FLdx4a6SNrFK8wfAGLCsrqk,9032
30
- intentkit/models/credit.py,sha256=o5jJGX1Hn8vWhkMRzo3eV8aIFEHBFkKtlLl0x5_8rLo,44198
30
+ intentkit/models/credit.py,sha256=BHX_Ty1u-TlPCDMjnMbogoUtlbcff0tHHVVZLzkrnGY,45090
31
31
  intentkit/models/db.py,sha256=nuDX6NEtnfD5YLr2iVpAAXsgHbSpG5diqfLC-PkHsA4,4406
32
32
  intentkit/models/db_mig.py,sha256=vT6Tanm-BHC2T7dTztuB1UG494EFBAlHADKsNzR6xaQ,3577
33
33
  intentkit/models/generator.py,sha256=lyZu9U9rZUGkqd_QT5SAhay9DY358JJY8EhDSpN8I1M,10298
@@ -198,15 +198,15 @@ intentkit/skills/enso/abi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
198
198
  intentkit/skills/enso/abi/approval.py,sha256=IsyQLFxzAttocrtCB2PhbgprA7Vqujzpxvg0hJbeJ00,9867
199
199
  intentkit/skills/enso/abi/erc20.py,sha256=IScqZhHpMt_eFfYtMXw0-w5jptkAK0xsqqUDjbWdb2s,439
200
200
  intentkit/skills/enso/abi/route.py,sha256=ng9U2RSyS5R3d-b0m5ELa4rFpaUDO9HcgSoX9P_wWZo,4746
201
- intentkit/skills/firecrawl/README.md,sha256=LCi6ju-QO0nXti4y9-ltcF-bwrgXGT7NJpz67vFUcCo,6912
201
+ intentkit/skills/firecrawl/README.md,sha256=OP5rCC5aNx9A4YjgotZB-JFdBR_0qHiWmYLuA52a8Tw,7366
202
202
  intentkit/skills/firecrawl/__init__.py,sha256=QQ0I5vlUgsLRFqHO17vbq-3ERKL3nzoo2B4MFGH0Igg,3160
203
203
  intentkit/skills/firecrawl/base.py,sha256=8BqD3X6RK0RedWU-qsa5qPMpuXWTZ6NbYLSpppFK_EU,1334
204
204
  intentkit/skills/firecrawl/clear.py,sha256=mfzQg8e6sbCwSzJGN_Lqfgxt-0pvtH_dBtNSJpMQA5A,2830
205
205
  intentkit/skills/firecrawl/crawl.py,sha256=lhySK1TbxGcLAXQi1zvrp4Zdo5ghhBFvxc4mFMl5LoI,18278
206
206
  intentkit/skills/firecrawl/firecrawl.png,sha256=6GoGlIMYuIDo-TqMlZbD4QYkmxvQ7krqAa5MANumJqk,5065
207
207
  intentkit/skills/firecrawl/query.py,sha256=LZzIy-LmqyEa8cZoBm-Eoen6GRy3NJxfuQcGi54Hwp0,4364
208
- intentkit/skills/firecrawl/schema.json,sha256=3LfZPS-mdKNh8r7IQ-oAMFAq_xS5dVs9sV8PXeEUh6o,4439
209
- intentkit/skills/firecrawl/scrape.py,sha256=P2Pwbi5l6bbN1S8akwwr9dhtUHw20UBHdN0c2B5J9Rs,13642
208
+ intentkit/skills/firecrawl/schema.json,sha256=q3ynbCO1NDidHZd3Nh7TNZ6lCv6y26XW7WBrYlj-JM0,4513
209
+ intentkit/skills/firecrawl/scrape.py,sha256=2axmz5hZVnNGvTPTi0r0WAN4MoYNQZzOFtMZd5pRgcg,20704
210
210
  intentkit/skills/firecrawl/utils.py,sha256=Ot_vEg4Z30_BY3Xbh59gb_Tu17tSCmytRw49RGAzZ88,10093
211
211
  intentkit/skills/github/README.md,sha256=SzYGJ9qSPaZl68iD8AQJGKTMLv0keQZesnSK-VhrAfs,1802
212
212
  intentkit/skills/github/__init__.py,sha256=Vva9jMtACSM_cZXy5JY0h6Q1ejR1jm-Xu3Q6PwyB72o,1471
@@ -411,7 +411,7 @@ intentkit/utils/random.py,sha256=DymMxu9g0kuQLgJUqalvgksnIeLdS-v0aRk5nQU0mLI,452
411
411
  intentkit/utils/s3.py,sha256=9trQNkKQ5VgxWsewVsV8Y0q_pXzGRvsCYP8xauyUYkg,8549
412
412
  intentkit/utils/slack_alert.py,sha256=s7UpRgyzLW7Pbmt8cKzTJgMA9bm4EP-1rQ5KXayHu6E,2264
413
413
  intentkit/utils/tx.py,sha256=2yLLGuhvfBEY5n_GJ8wmIWLCzn0FsYKv5kRNzw_sLUI,1454
414
- intentkit-0.6.19.dev1.dist-info/METADATA,sha256=BgyilNciFNBjtrR1TbV2aLHYv66HE9owOSnAYWk_dlo,6414
415
- intentkit-0.6.19.dev1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
416
- intentkit-0.6.19.dev1.dist-info/licenses/LICENSE,sha256=Bln6DhK-LtcO4aXy-PBcdZv2f24MlJFm_qn222biJtE,1071
417
- intentkit-0.6.19.dev1.dist-info/RECORD,,
414
+ intentkit-0.6.20.dev1.dist-info/METADATA,sha256=oGwdu4cAD3dMnV6di-S4CTtXCr8vJH37NZNXn3yRqEA,6414
415
+ intentkit-0.6.20.dev1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
416
+ intentkit-0.6.20.dev1.dist-info/licenses/LICENSE,sha256=Bln6DhK-LtcO4aXy-PBcdZv2f24MlJFm_qn222biJtE,1071
417
+ intentkit-0.6.20.dev1.dist-info/RECORD,,