intentkit 0.8.11.dev1__py3-none-any.whl → 0.8.12.dev2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of intentkit might be problematic. Click here for more details.
- intentkit/__init__.py +1 -1
- intentkit/abstracts/graph.py +4 -0
- intentkit/abstracts/skill.py +0 -81
- intentkit/core/agent.py +0 -95
- intentkit/core/engine.py +11 -4
- intentkit/models/skill.py +38 -40
- intentkit/skills/base.py +67 -0
- intentkit/skills/casino/deck_draw.py +3 -4
- intentkit/skills/casino/deck_shuffle.py +4 -2
- intentkit/skills/cryptocompare/base.py +4 -13
- intentkit/skills/cryptocompare/fetch_news.py +1 -1
- intentkit/skills/cryptocompare/fetch_price.py +1 -1
- intentkit/skills/cryptocompare/fetch_top_exchanges.py +1 -1
- intentkit/skills/cryptocompare/fetch_top_market_cap.py +1 -1
- intentkit/skills/cryptocompare/fetch_top_volume.py +1 -1
- intentkit/skills/cryptocompare/fetch_trading_signals.py +1 -1
- intentkit/skills/defillama/base.py +3 -9
- intentkit/skills/enso/networks.py +1 -6
- intentkit/skills/enso/route.py +3 -5
- intentkit/skills/enso/tokens.py +2 -12
- intentkit/skills/firecrawl/clear.py +3 -6
- intentkit/skills/firecrawl/scrape.py +2 -4
- intentkit/skills/firecrawl/utils.py +14 -5
- intentkit/skills/twitter/base.py +4 -13
- intentkit/skills/twitter/follow_user.py +1 -3
- intentkit/skills/twitter/get_mentions.py +2 -7
- intentkit/skills/twitter/get_timeline.py +3 -9
- intentkit/skills/twitter/get_user_by_username.py +1 -3
- intentkit/skills/twitter/get_user_tweets.py +3 -9
- intentkit/skills/twitter/like_tweet.py +1 -3
- intentkit/skills/twitter/post_tweet.py +1 -3
- intentkit/skills/twitter/reply_tweet.py +1 -3
- intentkit/skills/twitter/retweet.py +1 -3
- intentkit/skills/twitter/search_tweets.py +3 -9
- intentkit/skills/web_scraper/scrape_and_index.py +2 -4
- intentkit/skills/web_scraper/utils.py +7 -11
- {intentkit-0.8.11.dev1.dist-info → intentkit-0.8.12.dev2.dist-info}/METADATA +1 -1
- {intentkit-0.8.11.dev1.dist-info → intentkit-0.8.12.dev2.dist-info}/RECORD +40 -40
- {intentkit-0.8.11.dev1.dist-info → intentkit-0.8.12.dev2.dist-info}/WHEEL +0 -0
- {intentkit-0.8.11.dev1.dist-info → intentkit-0.8.12.dev2.dist-info}/licenses/LICENSE +0 -0
|
@@ -52,9 +52,7 @@ class DefiLlamaBaseTool(IntentKitSkill):
|
|
|
52
52
|
Returns:
|
|
53
53
|
Rate limit status and error message if limited
|
|
54
54
|
"""
|
|
55
|
-
rate_limit = await self.
|
|
56
|
-
context.agent_id, self.name, "rate_limit"
|
|
57
|
-
)
|
|
55
|
+
rate_limit = await self.get_agent_skill_data("rate_limit")
|
|
58
56
|
current_time = datetime.now(tz=timezone.utc)
|
|
59
57
|
|
|
60
58
|
if (
|
|
@@ -67,18 +65,14 @@ class DefiLlamaBaseTool(IntentKitSkill):
|
|
|
67
65
|
return True, "Rate limit exceeded"
|
|
68
66
|
|
|
69
67
|
rate_limit["count"] += 1
|
|
70
|
-
await self.
|
|
71
|
-
context.agent_id, self.name, "rate_limit", rate_limit
|
|
72
|
-
)
|
|
68
|
+
await self.save_agent_skill_data("rate_limit", rate_limit)
|
|
73
69
|
return False, None
|
|
74
70
|
|
|
75
71
|
new_rate_limit = {
|
|
76
72
|
"count": 1,
|
|
77
73
|
"reset_time": (current_time + timedelta(minutes=interval)).isoformat(),
|
|
78
74
|
}
|
|
79
|
-
await self.
|
|
80
|
-
context.agent_id, self.name, "rate_limit", new_rate_limit
|
|
81
|
-
)
|
|
75
|
+
await self.save_agent_skill_data("rate_limit", new_rate_limit)
|
|
82
76
|
return False, None
|
|
83
77
|
|
|
84
78
|
async def validate_chain(self, chain: str | None) -> tuple[bool, str | None]:
|
|
@@ -83,12 +83,7 @@ class EnsoGetNetworks(EnsoBaseTool):
|
|
|
83
83
|
exclude_none=True
|
|
84
84
|
)
|
|
85
85
|
|
|
86
|
-
await self.
|
|
87
|
-
context.agent_id,
|
|
88
|
-
"enso_get_networks",
|
|
89
|
-
"networks",
|
|
90
|
-
networks_memory,
|
|
91
|
-
)
|
|
86
|
+
await self.save_agent_skill_data("networks", networks_memory)
|
|
92
87
|
|
|
93
88
|
return EnsoGetNetworksOutput(res=networks)
|
|
94
89
|
except httpx.RequestError as req_err:
|
intentkit/skills/enso/route.py
CHANGED
|
@@ -181,7 +181,6 @@ class EnsoRouteShortcut(EnsoBaseTool):
|
|
|
181
181
|
"""
|
|
182
182
|
|
|
183
183
|
context = self.get_context()
|
|
184
|
-
agent_id = context.agent_id
|
|
185
184
|
resolved_chain_id = self.resolve_chain_id(context, chainId)
|
|
186
185
|
api_token = self.get_api_token(context)
|
|
187
186
|
# Use the wallet provider to send the transaction
|
|
@@ -191,8 +190,8 @@ class EnsoRouteShortcut(EnsoBaseTool):
|
|
|
191
190
|
async with httpx.AsyncClient() as client:
|
|
192
191
|
try:
|
|
193
192
|
network_name = None
|
|
194
|
-
networks = await self.
|
|
195
|
-
|
|
193
|
+
networks = await self.get_agent_skill_data_raw(
|
|
194
|
+
"enso_get_networks", "networks"
|
|
196
195
|
)
|
|
197
196
|
|
|
198
197
|
if networks:
|
|
@@ -221,8 +220,7 @@ class EnsoRouteShortcut(EnsoBaseTool):
|
|
|
221
220
|
"Authorization": f"Bearer {api_token}",
|
|
222
221
|
}
|
|
223
222
|
|
|
224
|
-
token_decimals = await self.
|
|
225
|
-
agent_id,
|
|
223
|
+
token_decimals = await self.get_agent_skill_data_raw(
|
|
226
224
|
"enso_get_tokens",
|
|
227
225
|
"decimals",
|
|
228
226
|
)
|
intentkit/skills/enso/tokens.py
CHANGED
|
@@ -154,7 +154,6 @@ class EnsoGetTokens(EnsoBaseTool):
|
|
|
154
154
|
url = f"{base_url}/api/v1/tokens"
|
|
155
155
|
|
|
156
156
|
context = self.get_context()
|
|
157
|
-
agent_id = context.agent_id
|
|
158
157
|
resolved_chain_id = self.resolve_chain_id(context, chainId)
|
|
159
158
|
api_token = self.get_api_token(context)
|
|
160
159
|
main_tokens = self.get_main_tokens(context)
|
|
@@ -178,11 +177,7 @@ class EnsoGetTokens(EnsoBaseTool):
|
|
|
178
177
|
response.raise_for_status()
|
|
179
178
|
json_dict = response.json()
|
|
180
179
|
|
|
181
|
-
token_decimals = await self.
|
|
182
|
-
agent_id,
|
|
183
|
-
"enso_get_tokens",
|
|
184
|
-
"decimals",
|
|
185
|
-
)
|
|
180
|
+
token_decimals = await self.get_agent_skill_data("decimals")
|
|
186
181
|
if not token_decimals:
|
|
187
182
|
token_decimals = {}
|
|
188
183
|
|
|
@@ -203,12 +198,7 @@ class EnsoGetTokens(EnsoBaseTool):
|
|
|
203
198
|
if u_token.address:
|
|
204
199
|
token_decimals[u_token.address] = u_token.decimals
|
|
205
200
|
|
|
206
|
-
await self.
|
|
207
|
-
agent_id,
|
|
208
|
-
"enso_get_tokens",
|
|
209
|
-
"decimals",
|
|
210
|
-
token_decimals,
|
|
211
|
-
)
|
|
201
|
+
await self.save_agent_skill_data("decimals", token_decimals)
|
|
212
202
|
|
|
213
203
|
return res
|
|
214
204
|
except httpx.RequestError as req_err:
|
|
@@ -3,6 +3,7 @@ from typing import Type
|
|
|
3
3
|
|
|
4
4
|
from pydantic import BaseModel, Field
|
|
5
5
|
|
|
6
|
+
from intentkit.models.skill import AgentSkillData
|
|
6
7
|
from intentkit.skills.firecrawl.base import FirecrawlBaseTool
|
|
7
8
|
|
|
8
9
|
logger = logging.getLogger(__name__)
|
|
@@ -62,15 +63,11 @@ class FirecrawlClearIndexedContent(FirecrawlBaseTool):
|
|
|
62
63
|
try:
|
|
63
64
|
# Delete vector store data (using web_scraper storage format for compatibility)
|
|
64
65
|
vector_store_key = f"vector_store_{agent_id}"
|
|
65
|
-
await
|
|
66
|
-
agent_id, "web_scraper", vector_store_key
|
|
67
|
-
)
|
|
66
|
+
await AgentSkillData.delete(agent_id, "web_scraper", vector_store_key)
|
|
68
67
|
|
|
69
68
|
# Delete metadata
|
|
70
69
|
metadata_key = f"indexed_urls_{agent_id}"
|
|
71
|
-
await
|
|
72
|
-
agent_id, "web_scraper", metadata_key
|
|
73
|
-
)
|
|
70
|
+
await AgentSkillData.delete(agent_id, "web_scraper", metadata_key)
|
|
74
71
|
|
|
75
72
|
logger.info(
|
|
76
73
|
f"firecrawl_clear: Successfully cleared all indexed content for agent {agent_id}"
|
|
@@ -368,10 +368,8 @@ class FirecrawlScrape(FirecrawlBaseTool):
|
|
|
368
368
|
# Update metadata to track all URLs
|
|
369
369
|
# Get existing metadata to preserve other URLs
|
|
370
370
|
metadata_key = f"indexed_urls_{agent_id}"
|
|
371
|
-
existing_metadata = (
|
|
372
|
-
|
|
373
|
-
agent_id, "firecrawl", metadata_key
|
|
374
|
-
)
|
|
371
|
+
existing_metadata = await self.get_agent_skill_data_raw(
|
|
372
|
+
"firecrawl", metadata_key
|
|
375
373
|
)
|
|
376
374
|
|
|
377
375
|
if existing_metadata and existing_metadata.get("urls"):
|
|
@@ -10,6 +10,7 @@ from langchain_core.documents import Document
|
|
|
10
10
|
from langchain_openai import OpenAIEmbeddings
|
|
11
11
|
|
|
12
12
|
from intentkit.abstracts.skill import SkillStoreABC
|
|
13
|
+
from intentkit.models.skill import AgentSkillData, AgentSkillDataCreate
|
|
13
14
|
|
|
14
15
|
logger = logging.getLogger(__name__)
|
|
15
16
|
|
|
@@ -129,7 +130,7 @@ class FirecrawlVectorStoreManager:
|
|
|
129
130
|
"""Load existing vector store for an agent."""
|
|
130
131
|
try:
|
|
131
132
|
vector_store_key = f"vector_store_{agent_id}"
|
|
132
|
-
stored_data = await
|
|
133
|
+
stored_data = await AgentSkillData.get(
|
|
133
134
|
agent_id, "web_scraper", vector_store_key
|
|
134
135
|
)
|
|
135
136
|
|
|
@@ -162,9 +163,13 @@ class FirecrawlVectorStoreManager:
|
|
|
162
163
|
"chunk_overlap": chunk_overlap,
|
|
163
164
|
}
|
|
164
165
|
|
|
165
|
-
|
|
166
|
-
agent_id,
|
|
166
|
+
skill_data = AgentSkillDataCreate(
|
|
167
|
+
agent_id=agent_id,
|
|
168
|
+
skill="web_scraper",
|
|
169
|
+
key=vector_store_key,
|
|
170
|
+
data=storage_data,
|
|
167
171
|
)
|
|
172
|
+
await skill_data.save()
|
|
168
173
|
|
|
169
174
|
except Exception as e:
|
|
170
175
|
logger.error(f"Error saving vector store for agent {agent_id}: {e}")
|
|
@@ -194,9 +199,13 @@ class FirecrawlMetadataManager:
|
|
|
194
199
|
"""Update metadata for an agent."""
|
|
195
200
|
try:
|
|
196
201
|
metadata_key = f"indexed_urls_{agent_id}"
|
|
197
|
-
|
|
198
|
-
agent_id,
|
|
202
|
+
skill_data = AgentSkillDataCreate(
|
|
203
|
+
agent_id=agent_id,
|
|
204
|
+
skill="web_scraper",
|
|
205
|
+
key=metadata_key,
|
|
206
|
+
data=new_metadata,
|
|
199
207
|
)
|
|
208
|
+
await skill_data.save()
|
|
200
209
|
except Exception as e:
|
|
201
210
|
logger.error(f"Error updating metadata for agent {agent_id}: {e}")
|
|
202
211
|
raise
|
intentkit/skills/twitter/base.py
CHANGED
|
@@ -63,22 +63,17 @@ class TwitterBaseTool(IntentKitSkill):
|
|
|
63
63
|
def category(self) -> str:
|
|
64
64
|
return "twitter"
|
|
65
65
|
|
|
66
|
-
async def check_rate_limit(
|
|
67
|
-
self, agent_id: str, max_requests: int = 1, interval: int = 15
|
|
68
|
-
) -> None:
|
|
66
|
+
async def check_rate_limit(self, max_requests: int = 1, interval: int = 15) -> None:
|
|
69
67
|
"""Check if the rate limit has been exceeded.
|
|
70
68
|
|
|
71
69
|
Args:
|
|
72
|
-
agent_id: The ID of the agent.
|
|
73
70
|
max_requests: Maximum number of requests allowed within the rate limit window.
|
|
74
71
|
interval: Time interval in minutes for the rate limit window.
|
|
75
72
|
|
|
76
73
|
Raises:
|
|
77
74
|
RateLimitExceeded: If the rate limit has been exceeded.
|
|
78
75
|
"""
|
|
79
|
-
rate_limit = await self.
|
|
80
|
-
agent_id, self.name, "rate_limit"
|
|
81
|
-
)
|
|
76
|
+
rate_limit = await self.get_agent_skill_data("rate_limit")
|
|
82
77
|
|
|
83
78
|
current_time = datetime.now(tz=timezone.utc)
|
|
84
79
|
|
|
@@ -92,9 +87,7 @@ class TwitterBaseTool(IntentKitSkill):
|
|
|
92
87
|
raise RateLimitExceeded("Rate limit exceeded")
|
|
93
88
|
|
|
94
89
|
rate_limit["count"] += 1
|
|
95
|
-
await self.
|
|
96
|
-
agent_id, self.name, "rate_limit", rate_limit
|
|
97
|
-
)
|
|
90
|
+
await self.save_agent_skill_data("rate_limit", rate_limit)
|
|
98
91
|
|
|
99
92
|
return
|
|
100
93
|
|
|
@@ -103,7 +96,5 @@ class TwitterBaseTool(IntentKitSkill):
|
|
|
103
96
|
"count": 1,
|
|
104
97
|
"reset_time": (current_time + timedelta(minutes=interval)).isoformat(),
|
|
105
98
|
}
|
|
106
|
-
await self.
|
|
107
|
-
agent_id, self.name, "rate_limit", new_rate_limit
|
|
108
|
-
)
|
|
99
|
+
await self.save_agent_skill_data("rate_limit", new_rate_limit)
|
|
109
100
|
return
|
|
@@ -49,9 +49,7 @@ class TwitterFollowUser(TwitterBaseTool):
|
|
|
49
49
|
|
|
50
50
|
# Check rate limit only when not using OAuth
|
|
51
51
|
if not twitter.use_key:
|
|
52
|
-
await self.check_rate_limit(
|
|
53
|
-
context.agent_id, max_requests=5, interval=15
|
|
54
|
-
)
|
|
52
|
+
await self.check_rate_limit(max_requests=5, interval=15)
|
|
55
53
|
|
|
56
54
|
# Follow the user using tweepy client
|
|
57
55
|
response = await client.follow_user(
|
|
@@ -56,15 +56,12 @@ class TwitterGetMentions(TwitterBaseTool):
|
|
|
56
56
|
# Check rate limit only when not using OAuth
|
|
57
57
|
if not twitter.use_key:
|
|
58
58
|
await self.check_rate_limit(
|
|
59
|
-
context.agent_id,
|
|
60
59
|
max_requests=1,
|
|
61
60
|
interval=15,
|
|
62
61
|
)
|
|
63
62
|
|
|
64
63
|
# get since id from store
|
|
65
|
-
last = await self.
|
|
66
|
-
context.agent_id, self.name, "last"
|
|
67
|
-
)
|
|
64
|
+
last = await self.get_agent_skill_data("last")
|
|
68
65
|
last = last or {}
|
|
69
66
|
max_results = 10
|
|
70
67
|
since_id = last.get("since_id")
|
|
@@ -113,9 +110,7 @@ class TwitterGetMentions(TwitterBaseTool):
|
|
|
113
110
|
# Update since_id in store
|
|
114
111
|
if mentions.get("meta") and mentions["meta"].get("newest_id"):
|
|
115
112
|
last["since_id"] = mentions["meta"].get("newest_id")
|
|
116
|
-
await self.
|
|
117
|
-
context.agent_id, self.name, "last", last
|
|
118
|
-
)
|
|
113
|
+
await self.save_agent_skill_data("last", last)
|
|
119
114
|
|
|
120
115
|
return mentions
|
|
121
116
|
|
|
@@ -52,14 +52,10 @@ class TwitterGetTimeline(TwitterBaseTool):
|
|
|
52
52
|
|
|
53
53
|
# Check rate limit only when not using OAuth
|
|
54
54
|
if not twitter.use_key:
|
|
55
|
-
await self.check_rate_limit(
|
|
56
|
-
context.agent_id, max_requests=1, interval=15
|
|
57
|
-
)
|
|
55
|
+
await self.check_rate_limit(max_requests=1, interval=15)
|
|
58
56
|
|
|
59
57
|
# get since id from store
|
|
60
|
-
last = await self.
|
|
61
|
-
context.agent_id, self.name, "last"
|
|
62
|
-
)
|
|
58
|
+
last = await self.get_agent_skill_data("last")
|
|
63
59
|
last = last or {}
|
|
64
60
|
since_id = last.get("since_id")
|
|
65
61
|
|
|
@@ -101,9 +97,7 @@ class TwitterGetTimeline(TwitterBaseTool):
|
|
|
101
97
|
# Update the since_id in store for the next request
|
|
102
98
|
if timeline.get("meta") and timeline["meta"].get("newest_id"):
|
|
103
99
|
last["since_id"] = timeline["meta"]["newest_id"]
|
|
104
|
-
await self.
|
|
105
|
-
context.agent_id, self.name, "last", last
|
|
106
|
-
)
|
|
100
|
+
await self.save_agent_skill_data("last", last)
|
|
107
101
|
|
|
108
102
|
return timeline
|
|
109
103
|
|
|
@@ -50,9 +50,7 @@ class TwitterGetUserByUsername(TwitterBaseTool):
|
|
|
50
50
|
|
|
51
51
|
# Check rate limit only when not using OAuth
|
|
52
52
|
if not twitter.use_key:
|
|
53
|
-
await self.check_rate_limit(
|
|
54
|
-
context.agent_id, max_requests=5, interval=60 * 24
|
|
55
|
-
)
|
|
53
|
+
await self.check_rate_limit(max_requests=5, interval=60 * 24)
|
|
56
54
|
|
|
57
55
|
user_data = await client.get_user(
|
|
58
56
|
username=username,
|
|
@@ -66,14 +66,10 @@ class TwitterGetUserTweets(TwitterBaseTool):
|
|
|
66
66
|
|
|
67
67
|
# Check rate limit only when not using OAuth
|
|
68
68
|
if not twitter.use_key:
|
|
69
|
-
await self.check_rate_limit(
|
|
70
|
-
context.agent_id, max_requests=1, interval=15
|
|
71
|
-
)
|
|
69
|
+
await self.check_rate_limit(max_requests=1, interval=15)
|
|
72
70
|
|
|
73
71
|
# get since id from store
|
|
74
|
-
last = await self.
|
|
75
|
-
context.agent_id, self.name, user_id
|
|
76
|
-
)
|
|
72
|
+
last = await self.get_agent_skill_data(user_id)
|
|
77
73
|
last = last or {}
|
|
78
74
|
since_id = last.get("since_id")
|
|
79
75
|
|
|
@@ -112,9 +108,7 @@ class TwitterGetUserTweets(TwitterBaseTool):
|
|
|
112
108
|
# Update the since_id in store for the next request
|
|
113
109
|
if tweets.get("meta") and tweets["meta"].get("newest_id"):
|
|
114
110
|
last["since_id"] = tweets["meta"]["newest_id"]
|
|
115
|
-
await self.
|
|
116
|
-
context.agent_id, self.name, user_id, last
|
|
117
|
-
)
|
|
111
|
+
await self.save_agent_skill_data(user_id, last)
|
|
118
112
|
|
|
119
113
|
return tweets
|
|
120
114
|
|
|
@@ -47,9 +47,7 @@ class TwitterLikeTweet(TwitterBaseTool):
|
|
|
47
47
|
|
|
48
48
|
# Check rate limit only when not using OAuth
|
|
49
49
|
if not twitter.use_key:
|
|
50
|
-
await self.check_rate_limit(
|
|
51
|
-
context.agent_id, max_requests=100, interval=1440
|
|
52
|
-
)
|
|
50
|
+
await self.check_rate_limit(max_requests=100, interval=1440)
|
|
53
51
|
|
|
54
52
|
# Like the tweet using tweepy client
|
|
55
53
|
response = await client.like(tweet_id=tweet_id, user_auth=twitter.use_key)
|
|
@@ -61,9 +61,7 @@ class TwitterPostTweet(TwitterBaseTool):
|
|
|
61
61
|
|
|
62
62
|
# Check rate limit only when not using OAuth
|
|
63
63
|
if not twitter.use_key:
|
|
64
|
-
await self.check_rate_limit(
|
|
65
|
-
context.agent_id, max_requests=24, interval=1440
|
|
66
|
-
)
|
|
64
|
+
await self.check_rate_limit(max_requests=24, interval=1440)
|
|
67
65
|
|
|
68
66
|
media_ids = []
|
|
69
67
|
image_warning = ""
|
|
@@ -63,9 +63,7 @@ class TwitterReplyTweet(TwitterBaseTool):
|
|
|
63
63
|
|
|
64
64
|
# Check rate limit only when not using OAuth
|
|
65
65
|
if not twitter.use_key:
|
|
66
|
-
await self.check_rate_limit(
|
|
67
|
-
context.agent_id, max_requests=48, interval=1440
|
|
68
|
-
)
|
|
66
|
+
await self.check_rate_limit(max_requests=48, interval=1440)
|
|
69
67
|
|
|
70
68
|
media_ids = []
|
|
71
69
|
image_warning = ""
|
|
@@ -47,9 +47,7 @@ class TwitterRetweet(TwitterBaseTool):
|
|
|
47
47
|
|
|
48
48
|
# Check rate limit only when not using OAuth
|
|
49
49
|
if not twitter.use_key:
|
|
50
|
-
await self.check_rate_limit(
|
|
51
|
-
context.agent_id, max_requests=5, interval=15
|
|
52
|
-
)
|
|
50
|
+
await self.check_rate_limit(max_requests=5, interval=15)
|
|
53
51
|
|
|
54
52
|
# Get authenticated user's ID
|
|
55
53
|
user_id = twitter.self_id
|
|
@@ -49,14 +49,10 @@ class TwitterSearchTweets(TwitterBaseTool):
|
|
|
49
49
|
|
|
50
50
|
# Check rate limit only when not using OAuth
|
|
51
51
|
if not twitter.use_key:
|
|
52
|
-
await self.check_rate_limit(
|
|
53
|
-
context.agent_id, max_requests=1, interval=15
|
|
54
|
-
)
|
|
52
|
+
await self.check_rate_limit(max_requests=1, interval=15)
|
|
55
53
|
|
|
56
54
|
# Get since_id from store to avoid duplicate results
|
|
57
|
-
last = await self.
|
|
58
|
-
context.agent_id, self.name, query
|
|
59
|
-
)
|
|
55
|
+
last = await self.get_agent_skill_data(query)
|
|
60
56
|
last = last or {}
|
|
61
57
|
since_id = last.get("since_id")
|
|
62
58
|
|
|
@@ -104,9 +100,7 @@ class TwitterSearchTweets(TwitterBaseTool):
|
|
|
104
100
|
if tweets.get("meta") and tweets.get("meta").get("newest_id"):
|
|
105
101
|
last["since_id"] = tweets["meta"]["newest_id"]
|
|
106
102
|
last["timestamp"] = datetime.datetime.now().isoformat()
|
|
107
|
-
await self.
|
|
108
|
-
context.agent_id, self.name, query, last
|
|
109
|
-
)
|
|
103
|
+
await self.save_agent_skill_data(query, last)
|
|
110
104
|
|
|
111
105
|
return tweets
|
|
112
106
|
|
|
@@ -196,9 +196,8 @@ class QueryIndexedContent(WebScraperBaseTool):
|
|
|
196
196
|
|
|
197
197
|
logger.info(f"[{agent_id}] Looking for vector store: {vector_store_key}")
|
|
198
198
|
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
)
|
|
199
|
+
vs_manager = VectorStoreManager(self.skill_store)
|
|
200
|
+
stored_data = await vs_manager.get_existing_vector_store(agent_id)
|
|
202
201
|
|
|
203
202
|
if not stored_data:
|
|
204
203
|
logger.warning(f"[{agent_id}] No vector store found")
|
|
@@ -210,7 +209,6 @@ class QueryIndexedContent(WebScraperBaseTool):
|
|
|
210
209
|
|
|
211
210
|
# Create embeddings and decode vector store
|
|
212
211
|
logger.info(f"[{agent_id}] Decoding vector store")
|
|
213
|
-
vs_manager = VectorStoreManager(self.skill_store)
|
|
214
212
|
embeddings = vs_manager.create_embeddings()
|
|
215
213
|
vector_store = vs_manager.decode_vector_store(
|
|
216
214
|
stored_data["faiss_files"], embeddings
|
|
@@ -18,6 +18,7 @@ from langchain_openai import OpenAIEmbeddings
|
|
|
18
18
|
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
|
19
19
|
|
|
20
20
|
from intentkit.abstracts.skill import SkillStoreABC
|
|
21
|
+
from intentkit.models.skill import AgentSkillData, AgentSkillDataCreate
|
|
21
22
|
|
|
22
23
|
logger = logging.getLogger(__name__)
|
|
23
24
|
|
|
@@ -113,9 +114,7 @@ class VectorStoreManager:
|
|
|
113
114
|
async def get_existing_vector_store(self, agent_id: str) -> Optional[Dict]:
|
|
114
115
|
"""Get existing vector store data if it exists."""
|
|
115
116
|
vector_store_key, _ = self.get_storage_keys(agent_id)
|
|
116
|
-
return await
|
|
117
|
-
agent_id, "web_scraper", vector_store_key
|
|
118
|
-
)
|
|
117
|
+
return await AgentSkillData.get(agent_id, "web_scraper", vector_store_key)
|
|
119
118
|
|
|
120
119
|
async def merge_with_existing(
|
|
121
120
|
self,
|
|
@@ -184,12 +183,13 @@ class VectorStoreManager:
|
|
|
184
183
|
|
|
185
184
|
try:
|
|
186
185
|
# Save to storage
|
|
187
|
-
|
|
186
|
+
skill_data = AgentSkillDataCreate(
|
|
188
187
|
agent_id=agent_id,
|
|
189
188
|
skill="web_scraper",
|
|
190
189
|
key=vector_store_key,
|
|
191
190
|
data=storage_data,
|
|
192
191
|
)
|
|
192
|
+
await skill_data.save()
|
|
193
193
|
|
|
194
194
|
logger.info(f"[{agent_id}] Successfully saved vector store")
|
|
195
195
|
|
|
@@ -319,12 +319,7 @@ class MetadataManager:
|
|
|
319
319
|
"""Get existing metadata for an agent."""
|
|
320
320
|
vs_manager = VectorStoreManager(self.skill_store)
|
|
321
321
|
_, metadata_key = vs_manager.get_storage_keys(agent_id)
|
|
322
|
-
return (
|
|
323
|
-
await self.skill_store.get_agent_skill_data(
|
|
324
|
-
agent_id, "web_scraper", metadata_key
|
|
325
|
-
)
|
|
326
|
-
or {}
|
|
327
|
-
)
|
|
322
|
+
return await AgentSkillData.get(agent_id, "web_scraper", metadata_key) or {}
|
|
328
323
|
|
|
329
324
|
def create_url_metadata(
|
|
330
325
|
self,
|
|
@@ -391,12 +386,13 @@ class MetadataManager:
|
|
|
391
386
|
existing_metadata.update(new_metadata)
|
|
392
387
|
|
|
393
388
|
# Save updated metadata
|
|
394
|
-
|
|
389
|
+
skill_data = AgentSkillDataCreate(
|
|
395
390
|
agent_id=agent_id,
|
|
396
391
|
skill="web_scraper",
|
|
397
392
|
key=metadata_key,
|
|
398
393
|
data=existing_metadata,
|
|
399
394
|
)
|
|
395
|
+
await skill_data.save()
|
|
400
396
|
|
|
401
397
|
|
|
402
398
|
class ResponseFormatter:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: intentkit
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.12.dev2
|
|
4
4
|
Summary: Intent-based AI Agent Platform - Core Package
|
|
5
5
|
Project-URL: Homepage, https://github.com/crestalnetwork/intentkit
|
|
6
6
|
Project-URL: Repository, https://github.com/crestalnetwork/intentkit
|