python-slack-agents 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- python_slack_agents-0.5.0.dist-info/METADATA +318 -0
- python_slack_agents-0.5.0.dist-info/RECORD +54 -0
- python_slack_agents-0.5.0.dist-info/WHEEL +4 -0
- python_slack_agents-0.5.0.dist-info/entry_points.txt +2 -0
- python_slack_agents-0.5.0.dist-info/licenses/LICENSE +202 -0
- slack_agents/Dockerfile +25 -0
- slack_agents/__init__.py +25 -0
- slack_agents/access/__init__.py +0 -0
- slack_agents/access/allow_all.py +9 -0
- slack_agents/access/allow_list.py +19 -0
- slack_agents/access/base.py +20 -0
- slack_agents/agent_loop.py +208 -0
- slack_agents/cli/__init__.py +48 -0
- slack_agents/cli/build_docker.py +94 -0
- slack_agents/cli/export_conversations.py +84 -0
- slack_agents/cli/export_conversations_html.py +605 -0
- slack_agents/cli/export_usage.py +81 -0
- slack_agents/cli/export_usage_csv.py +151 -0
- slack_agents/cli/healthcheck.py +67 -0
- slack_agents/cli/run.py +16 -0
- slack_agents/config.py +113 -0
- slack_agents/conversations.py +273 -0
- slack_agents/files.py +59 -0
- slack_agents/llm/__init__.py +1 -0
- slack_agents/llm/anthropic.py +207 -0
- slack_agents/llm/base.py +82 -0
- slack_agents/llm/openai.py +283 -0
- slack_agents/main.py +55 -0
- slack_agents/observability.py +175 -0
- slack_agents/py.typed +0 -0
- slack_agents/scripts/__init__.py +0 -0
- slack_agents/scripts/download_fonts.py +39 -0
- slack_agents/slack/__init__.py +0 -0
- slack_agents/slack/actions.py +119 -0
- slack_agents/slack/agent.py +688 -0
- slack_agents/slack/canvases.py +225 -0
- slack_agents/slack/files.py +102 -0
- slack_agents/slack/format.py +55 -0
- slack_agents/slack/streaming.py +70 -0
- slack_agents/slack/streaming_formatter.py +182 -0
- slack_agents/slack/tool_blocks.py +97 -0
- slack_agents/storage/__init__.py +0 -0
- slack_agents/storage/base.py +304 -0
- slack_agents/storage/postgres.py +612 -0
- slack_agents/storage/postgres.sql +120 -0
- slack_agents/storage/sqlite.py +473 -0
- slack_agents/storage/sqlite.sql +73 -0
- slack_agents/tools/__init__.py +0 -0
- slack_agents/tools/base.py +140 -0
- slack_agents/tools/canvas.py +401 -0
- slack_agents/tools/file_exporter.py +582 -0
- slack_agents/tools/file_importer.py +363 -0
- slack_agents/tools/mcp_http.py +203 -0
- slack_agents/tools/user_context.py +239 -0
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
"""Block Kit builders for collapsible tool call messages."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
_MAX_SECTION_TEXT = 3000
|
|
6
|
+
|
|
7
|
+
ICON_CALLING = "\u25b8" # ▸
|
|
8
|
+
ICON_SUCCESS = "\u2713" # ✓
|
|
9
|
+
ICON_ERROR = "\u2717" # ✗
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
_TRUNCATION_SUFFIX = "\n... (truncated)"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _truncate(text: str, *, max_len: int = _MAX_SECTION_TEXT) -> str:
|
|
16
|
+
if len(text) <= max_len:
|
|
17
|
+
return text
|
|
18
|
+
return text[: max_len - len(_TRUNCATION_SUFFIX)] + _TRUNCATION_SUFFIX
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def build_calling_blocks(tool_name: str) -> list[dict]:
|
|
22
|
+
"""Calling state: section block with processing indicator."""
|
|
23
|
+
return [
|
|
24
|
+
{
|
|
25
|
+
"type": "section",
|
|
26
|
+
"text": {
|
|
27
|
+
"type": "mrkdwn",
|
|
28
|
+
"text": f"{ICON_CALLING} tool: _{tool_name}_ (processing...)",
|
|
29
|
+
},
|
|
30
|
+
}
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _tool_value(tool_id: str, tool_name: str) -> str:
|
|
35
|
+
return json.dumps({"tool_id": tool_id, "tool_name": tool_name})
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _tool_header(icon: str, tool_name: str, action_id: str, label: str, value: str):
|
|
39
|
+
return {
|
|
40
|
+
"type": "section",
|
|
41
|
+
"text": {"type": "mrkdwn", "text": f"{icon} tool: _{tool_name}_"},
|
|
42
|
+
"accessory": {
|
|
43
|
+
"type": "overflow",
|
|
44
|
+
"action_id": action_id,
|
|
45
|
+
"options": [
|
|
46
|
+
{
|
|
47
|
+
"text": {"type": "plain_text", "text": label},
|
|
48
|
+
"value": value,
|
|
49
|
+
}
|
|
50
|
+
],
|
|
51
|
+
},
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def build_collapsed_blocks(tool_name: str, is_error: bool, tool_id: str) -> list[dict]:
|
|
56
|
+
icon = ICON_ERROR if is_error else ICON_SUCCESS
|
|
57
|
+
return [
|
|
58
|
+
_tool_header(
|
|
59
|
+
icon,
|
|
60
|
+
tool_name,
|
|
61
|
+
f"tool_expand_{tool_id}",
|
|
62
|
+
"Show Details",
|
|
63
|
+
_tool_value(tool_id, tool_name),
|
|
64
|
+
)
|
|
65
|
+
]
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _wrap_code_block(label: str, content: str) -> dict:
|
|
69
|
+
"""Build a section block with a labelled code fence, respecting Slack's 3000-char limit."""
|
|
70
|
+
# The wrapper adds: "*Label:*\n```\n" + content + "\n```"
|
|
71
|
+
# Reserve space for the wrapper so the total stays under the limit.
|
|
72
|
+
wrapper_len = len(f"*{label}:*\n```\n\n```")
|
|
73
|
+
truncated = _truncate(content, max_len=_MAX_SECTION_TEXT - wrapper_len)
|
|
74
|
+
return {
|
|
75
|
+
"type": "section",
|
|
76
|
+
"text": {
|
|
77
|
+
"type": "mrkdwn",
|
|
78
|
+
"text": f"*{label}:*\n```\n{truncated}\n```",
|
|
79
|
+
},
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def build_expanded_blocks(
|
|
84
|
+
tool_name: str, is_error: bool, tool_id: str, input_json: str, output_json: str
|
|
85
|
+
) -> list[dict]:
|
|
86
|
+
icon = ICON_ERROR if is_error else ICON_SUCCESS
|
|
87
|
+
return [
|
|
88
|
+
_tool_header(
|
|
89
|
+
icon,
|
|
90
|
+
tool_name,
|
|
91
|
+
f"tool_collapse_{tool_id}",
|
|
92
|
+
"Hide Details",
|
|
93
|
+
_tool_value(tool_id, tool_name),
|
|
94
|
+
),
|
|
95
|
+
_wrap_code_block("Input", input_json),
|
|
96
|
+
_wrap_code_block("Output", output_json),
|
|
97
|
+
]
|
|
File without changes
|
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
"""Abstract base class for storage providers."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from datetime import datetime, timezone
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class BaseStorageProvider(ABC):
|
|
11
|
+
"""Generic persistence layer.
|
|
12
|
+
|
|
13
|
+
Implementations must provide the 6 abstract primitives (get, set, delete,
|
|
14
|
+
append, get_list, query). All higher-level domain methods have default
|
|
15
|
+
implementations built on those primitives so that non-relational backends
|
|
16
|
+
(Redis, DynamoDB, ...) work out of the box.
|
|
17
|
+
|
|
18
|
+
Relational backends (PostgreSQL, SQLite) should override the domain methods
|
|
19
|
+
with proper SQL for better performance.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
async def initialize(self) -> None:
|
|
23
|
+
"""Initialize the storage backend (create tables, connect, etc.)."""
|
|
24
|
+
|
|
25
|
+
# ------------------------------------------------------------------
|
|
26
|
+
# Abstract primitives — must be implemented by every backend
|
|
27
|
+
# ------------------------------------------------------------------
|
|
28
|
+
|
|
29
|
+
@abstractmethod
|
|
30
|
+
async def get(self, namespace: str, key: str) -> dict | None:
|
|
31
|
+
"""Get a value by namespace and key. Returns None if not found."""
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
async def set(self, namespace: str, key: str, value: dict) -> None:
|
|
35
|
+
"""Set a value by namespace and key (upsert)."""
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
async def delete(self, namespace: str, key: str) -> None:
|
|
39
|
+
"""Delete a value by namespace and key."""
|
|
40
|
+
|
|
41
|
+
@abstractmethod
|
|
42
|
+
async def append(self, namespace: str, key: str, item: dict) -> str:
|
|
43
|
+
"""Append an item to a list. Returns the item's ID."""
|
|
44
|
+
|
|
45
|
+
@abstractmethod
|
|
46
|
+
async def get_list(self, namespace: str, key: str) -> list[dict]:
|
|
47
|
+
"""Get all items in a list, ordered by insertion time."""
|
|
48
|
+
|
|
49
|
+
@abstractmethod
|
|
50
|
+
async def query(self, namespace: str, filters: dict) -> list[dict]:
|
|
51
|
+
"""Query items in a namespace by filters. Simple equality matching."""
|
|
52
|
+
|
|
53
|
+
async def close(self) -> None:
|
|
54
|
+
"""Close connections and clean up resources."""
|
|
55
|
+
|
|
56
|
+
# ------------------------------------------------------------------
|
|
57
|
+
# Domain methods — default implementations using the primitives above.
|
|
58
|
+
# Relational backends should override for efficiency.
|
|
59
|
+
# ------------------------------------------------------------------
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def supports_export(self) -> bool:
|
|
63
|
+
"""Whether this backend supports conversation export."""
|
|
64
|
+
return False
|
|
65
|
+
|
|
66
|
+
async def get_or_create_conversation(
|
|
67
|
+
self,
|
|
68
|
+
agent_name: str,
|
|
69
|
+
channel_id: str,
|
|
70
|
+
thread_id: str,
|
|
71
|
+
channel_name: str | None = None,
|
|
72
|
+
) -> int | str:
|
|
73
|
+
"""Get or create a conversation, return its ID."""
|
|
74
|
+
key = f"{agent_name}:{channel_id}:{thread_id}"
|
|
75
|
+
existing = await self.get("conversations", key)
|
|
76
|
+
if existing:
|
|
77
|
+
return existing["id"]
|
|
78
|
+
conv_id = await self.append(
|
|
79
|
+
"conversations",
|
|
80
|
+
key,
|
|
81
|
+
{
|
|
82
|
+
"agent_name": agent_name,
|
|
83
|
+
"channel_id": channel_id,
|
|
84
|
+
"channel_name": channel_name,
|
|
85
|
+
"thread_id": thread_id,
|
|
86
|
+
},
|
|
87
|
+
)
|
|
88
|
+
await self.set("conversations", key, {"id": conv_id})
|
|
89
|
+
return conv_id
|
|
90
|
+
|
|
91
|
+
async def has_conversation(self, agent_name: str, channel_id: str, thread_id: str) -> bool:
|
|
92
|
+
"""Check if a conversation exists for the given thread."""
|
|
93
|
+
key = f"{agent_name}:{channel_id}:{thread_id}"
|
|
94
|
+
return await self.get("conversations", key) is not None
|
|
95
|
+
|
|
96
|
+
async def create_message(
|
|
97
|
+
self,
|
|
98
|
+
conversation_id: int | str,
|
|
99
|
+
user_id: str,
|
|
100
|
+
user_name: str,
|
|
101
|
+
user_handle: str,
|
|
102
|
+
) -> int | str:
|
|
103
|
+
"""Create a new message in a conversation, return its ID."""
|
|
104
|
+
return await self.append(
|
|
105
|
+
"messages",
|
|
106
|
+
str(conversation_id),
|
|
107
|
+
{
|
|
108
|
+
"conversation_id": str(conversation_id),
|
|
109
|
+
"user_id": user_id,
|
|
110
|
+
"user_name": user_name,
|
|
111
|
+
"user_handle": user_handle,
|
|
112
|
+
"created_at": datetime.now(timezone.utc).isoformat(),
|
|
113
|
+
},
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
async def get_message_blocks(
|
|
117
|
+
self, conversation_id: int | str
|
|
118
|
+
) -> list[tuple[int | str, list[dict]]]:
|
|
119
|
+
"""Return ``(message_id, blocks)`` pairs for a conversation."""
|
|
120
|
+
messages_data = await self.get_list("messages", str(conversation_id))
|
|
121
|
+
result: list[tuple[int | str, list[dict]]] = []
|
|
122
|
+
for msg in messages_data:
|
|
123
|
+
blocks = await self.get_list("blocks", str(msg["id"]))
|
|
124
|
+
result.append((msg["id"], blocks))
|
|
125
|
+
return result
|
|
126
|
+
|
|
127
|
+
async def append_text_block(
|
|
128
|
+
self,
|
|
129
|
+
message_id: int | str,
|
|
130
|
+
text: str,
|
|
131
|
+
*,
|
|
132
|
+
is_user: bool = False,
|
|
133
|
+
source_file_id: int | str | None = None,
|
|
134
|
+
) -> None:
|
|
135
|
+
await self.append(
|
|
136
|
+
"blocks",
|
|
137
|
+
str(message_id),
|
|
138
|
+
{
|
|
139
|
+
"block_type": "user_text" if is_user else "text",
|
|
140
|
+
"content": {"text": text},
|
|
141
|
+
"source_file_id": str(source_file_id) if source_file_id else None,
|
|
142
|
+
"created_at": datetime.now(timezone.utc).isoformat(),
|
|
143
|
+
},
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
async def append_file_block(
|
|
147
|
+
self,
|
|
148
|
+
message_id: int | str,
|
|
149
|
+
content: dict,
|
|
150
|
+
*,
|
|
151
|
+
is_user: bool,
|
|
152
|
+
filename: str,
|
|
153
|
+
mimetype: str,
|
|
154
|
+
size_bytes: int,
|
|
155
|
+
tool_block_id: int | str | None = None,
|
|
156
|
+
) -> int | str:
|
|
157
|
+
return await self.append(
|
|
158
|
+
"blocks",
|
|
159
|
+
str(message_id),
|
|
160
|
+
{
|
|
161
|
+
"block_type": "user_file" if is_user else "file",
|
|
162
|
+
"content": content,
|
|
163
|
+
"filename": filename,
|
|
164
|
+
"mimetype": mimetype,
|
|
165
|
+
"size_bytes": size_bytes,
|
|
166
|
+
"tool_block_id": str(tool_block_id) if tool_block_id else None,
|
|
167
|
+
"created_at": datetime.now(timezone.utc).isoformat(),
|
|
168
|
+
},
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
async def append_tool_block(
|
|
172
|
+
self,
|
|
173
|
+
message_id: int | str,
|
|
174
|
+
*,
|
|
175
|
+
tool_call_id: str,
|
|
176
|
+
tool_name: str,
|
|
177
|
+
tool_input: dict,
|
|
178
|
+
tool_output: str,
|
|
179
|
+
is_error: bool,
|
|
180
|
+
) -> int | str:
|
|
181
|
+
return await self.append(
|
|
182
|
+
"blocks",
|
|
183
|
+
str(message_id),
|
|
184
|
+
{
|
|
185
|
+
"block_type": "tool_use",
|
|
186
|
+
"content": {
|
|
187
|
+
"tool_call_id": tool_call_id,
|
|
188
|
+
"tool_name": tool_name,
|
|
189
|
+
"tool_input": tool_input,
|
|
190
|
+
"tool_output": tool_output,
|
|
191
|
+
"is_error": is_error,
|
|
192
|
+
},
|
|
193
|
+
"created_at": datetime.now(timezone.utc).isoformat(),
|
|
194
|
+
},
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
async def append_usage_block(
|
|
198
|
+
self,
|
|
199
|
+
message_id: int | str,
|
|
200
|
+
*,
|
|
201
|
+
model: str,
|
|
202
|
+
version: str,
|
|
203
|
+
input_tokens: int,
|
|
204
|
+
output_tokens: int,
|
|
205
|
+
cache_creation_input_tokens: int,
|
|
206
|
+
cache_read_input_tokens: int,
|
|
207
|
+
peak_single_call_input_tokens: int,
|
|
208
|
+
estimated_cost_usd: float | None,
|
|
209
|
+
) -> None:
|
|
210
|
+
await self.append(
|
|
211
|
+
"blocks",
|
|
212
|
+
str(message_id),
|
|
213
|
+
{
|
|
214
|
+
"block_type": "usage",
|
|
215
|
+
"content": {
|
|
216
|
+
"model": model,
|
|
217
|
+
"version": version,
|
|
218
|
+
"input_tokens": input_tokens,
|
|
219
|
+
"output_tokens": output_tokens,
|
|
220
|
+
"cache_creation_input_tokens": cache_creation_input_tokens,
|
|
221
|
+
"cache_read_input_tokens": cache_read_input_tokens,
|
|
222
|
+
"peak_single_call_input_tokens": peak_single_call_input_tokens,
|
|
223
|
+
"estimated_cost_usd": estimated_cost_usd,
|
|
224
|
+
},
|
|
225
|
+
"created_at": datetime.now(timezone.utc).isoformat(),
|
|
226
|
+
},
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
async def get_tool_call(self, tool_call_id: str) -> dict | None:
|
|
230
|
+
"""Look up a tool call by its LLM-generated ID.
|
|
231
|
+
|
|
232
|
+
Default scans blocks via query(); relational backends should override
|
|
233
|
+
with an indexed lookup.
|
|
234
|
+
"""
|
|
235
|
+
rows = await self.query("blocks", {"block_type": "tool_use"})
|
|
236
|
+
for row in rows:
|
|
237
|
+
content = row.get("content", {})
|
|
238
|
+
if isinstance(content, str):
|
|
239
|
+
content = json.loads(content)
|
|
240
|
+
if content.get("tool_call_id") == tool_call_id:
|
|
241
|
+
return {
|
|
242
|
+
"tool_name": content["tool_name"],
|
|
243
|
+
"input_json": json.dumps(content.get("tool_input", {}), indent=2),
|
|
244
|
+
"output_json": content.get("tool_output", ""),
|
|
245
|
+
"is_error": content.get("is_error", False),
|
|
246
|
+
}
|
|
247
|
+
return None
|
|
248
|
+
|
|
249
|
+
async def upsert_heartbeat(self, agent_name: str, last_ping_pong_time: float) -> None:
|
|
250
|
+
"""Write a heartbeat row for this agent."""
|
|
251
|
+
await self.set(
|
|
252
|
+
"heartbeats",
|
|
253
|
+
agent_name,
|
|
254
|
+
{"last_ping_pong_time": last_ping_pong_time},
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
async def get_heartbeat(self, agent_name: str) -> dict | None:
|
|
258
|
+
"""Read the heartbeat for *agent_name*.
|
|
259
|
+
|
|
260
|
+
Returns ``{"last_ping_pong_time": <float>}`` or ``None``.
|
|
261
|
+
"""
|
|
262
|
+
return await self.get("heartbeats", agent_name)
|
|
263
|
+
|
|
264
|
+
async def get_conversations_for_export(
|
|
265
|
+
self,
|
|
266
|
+
agent_name: str,
|
|
267
|
+
*,
|
|
268
|
+
handle: str | None = None,
|
|
269
|
+
date_from: str | datetime | None = None,
|
|
270
|
+
date_to: str | datetime | None = None,
|
|
271
|
+
) -> list[dict]:
|
|
272
|
+
"""Find conversations for an agent, optionally filtered."""
|
|
273
|
+
conversations = await self.query("conversations", {"agent_name": agent_name})
|
|
274
|
+
if not handle and not date_from and not date_to:
|
|
275
|
+
return conversations
|
|
276
|
+
results = []
|
|
277
|
+
for conv in conversations:
|
|
278
|
+
messages = await self.get_list("messages", str(conv["id"]))
|
|
279
|
+
if handle and not any(m.get("user_handle") == handle for m in messages):
|
|
280
|
+
continue
|
|
281
|
+
if date_from and not any(m.get("created_at", "") >= str(date_from) for m in messages):
|
|
282
|
+
continue
|
|
283
|
+
if date_to and not any(m.get("created_at", "") <= str(date_to) for m in messages):
|
|
284
|
+
continue
|
|
285
|
+
results.append(conv)
|
|
286
|
+
return results
|
|
287
|
+
|
|
288
|
+
async def get_messages_with_blocks(self, conversation_id: int | str) -> list[dict]:
|
|
289
|
+
"""Get all messages with their blocks for export purposes."""
|
|
290
|
+
messages = await self.get_list("messages", str(conversation_id))
|
|
291
|
+
result = []
|
|
292
|
+
for msg in messages:
|
|
293
|
+
blocks = await self.get_list("blocks", str(msg["id"]))
|
|
294
|
+
result.append(
|
|
295
|
+
{
|
|
296
|
+
"id": msg["id"],
|
|
297
|
+
"user_id": msg.get("user_id", ""),
|
|
298
|
+
"user_name": msg.get("user_name", ""),
|
|
299
|
+
"user_handle": msg.get("user_handle", ""),
|
|
300
|
+
"created_at": msg.get("created_at"),
|
|
301
|
+
"blocks": blocks,
|
|
302
|
+
}
|
|
303
|
+
)
|
|
304
|
+
return result
|