rossum-agent 1.0.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. rossum_agent/__init__.py +9 -0
  2. rossum_agent/agent/__init__.py +32 -0
  3. rossum_agent/agent/core.py +932 -0
  4. rossum_agent/agent/memory.py +176 -0
  5. rossum_agent/agent/models.py +160 -0
  6. rossum_agent/agent/request_classifier.py +152 -0
  7. rossum_agent/agent/skills.py +132 -0
  8. rossum_agent/agent/types.py +5 -0
  9. rossum_agent/agent_logging.py +56 -0
  10. rossum_agent/api/__init__.py +1 -0
  11. rossum_agent/api/cli.py +51 -0
  12. rossum_agent/api/dependencies.py +190 -0
  13. rossum_agent/api/main.py +180 -0
  14. rossum_agent/api/models/__init__.py +1 -0
  15. rossum_agent/api/models/schemas.py +301 -0
  16. rossum_agent/api/routes/__init__.py +1 -0
  17. rossum_agent/api/routes/chats.py +95 -0
  18. rossum_agent/api/routes/files.py +113 -0
  19. rossum_agent/api/routes/health.py +44 -0
  20. rossum_agent/api/routes/messages.py +218 -0
  21. rossum_agent/api/services/__init__.py +1 -0
  22. rossum_agent/api/services/agent_service.py +451 -0
  23. rossum_agent/api/services/chat_service.py +197 -0
  24. rossum_agent/api/services/file_service.py +65 -0
  25. rossum_agent/assets/Primary_light_logo.png +0 -0
  26. rossum_agent/bedrock_client.py +64 -0
  27. rossum_agent/prompts/__init__.py +27 -0
  28. rossum_agent/prompts/base_prompt.py +80 -0
  29. rossum_agent/prompts/system_prompt.py +24 -0
  30. rossum_agent/py.typed +0 -0
  31. rossum_agent/redis_storage.py +482 -0
  32. rossum_agent/rossum_mcp_integration.py +123 -0
  33. rossum_agent/skills/hook-debugging.md +31 -0
  34. rossum_agent/skills/organization-setup.md +60 -0
  35. rossum_agent/skills/rossum-deployment.md +102 -0
  36. rossum_agent/skills/schema-patching.md +61 -0
  37. rossum_agent/skills/schema-pruning.md +23 -0
  38. rossum_agent/skills/ui-settings.md +45 -0
  39. rossum_agent/streamlit_app/__init__.py +1 -0
  40. rossum_agent/streamlit_app/app.py +646 -0
  41. rossum_agent/streamlit_app/beep_sound.py +36 -0
  42. rossum_agent/streamlit_app/cli.py +17 -0
  43. rossum_agent/streamlit_app/render_modules.py +123 -0
  44. rossum_agent/streamlit_app/response_formatting.py +305 -0
  45. rossum_agent/tools/__init__.py +214 -0
  46. rossum_agent/tools/core.py +173 -0
  47. rossum_agent/tools/deploy.py +404 -0
  48. rossum_agent/tools/dynamic_tools.py +365 -0
  49. rossum_agent/tools/file_tools.py +62 -0
  50. rossum_agent/tools/formula.py +187 -0
  51. rossum_agent/tools/skills.py +31 -0
  52. rossum_agent/tools/spawn_mcp.py +227 -0
  53. rossum_agent/tools/subagents/__init__.py +31 -0
  54. rossum_agent/tools/subagents/base.py +303 -0
  55. rossum_agent/tools/subagents/hook_debug.py +591 -0
  56. rossum_agent/tools/subagents/knowledge_base.py +305 -0
  57. rossum_agent/tools/subagents/mcp_helpers.py +47 -0
  58. rossum_agent/tools/subagents/schema_patching.py +471 -0
  59. rossum_agent/url_context.py +167 -0
  60. rossum_agent/user_detection.py +100 -0
  61. rossum_agent/utils.py +128 -0
  62. rossum_agent-1.0.0rc0.dist-info/METADATA +311 -0
  63. rossum_agent-1.0.0rc0.dist-info/RECORD +67 -0
  64. rossum_agent-1.0.0rc0.dist-info/WHEEL +5 -0
  65. rossum_agent-1.0.0rc0.dist-info/entry_points.txt +3 -0
  66. rossum_agent-1.0.0rc0.dist-info/licenses/LICENSE +21 -0
  67. rossum_agent-1.0.0rc0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,482 @@
1
+ """Redis-based chat persistence."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import base64
6
+ import datetime as dt
7
+ import json
8
+ import logging
9
+ import os
10
+ import shutil
11
+ import subprocess
12
+ from dataclasses import dataclass, field
13
+ from pathlib import Path
14
+ from typing import Any, cast
15
+
16
+ import redis
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def extract_text_from_content(content: str | list[dict[str, Any]] | None) -> str:
22
+ """Extract text from message content which can be a string or multimodal list."""
23
+ if content is None:
24
+ return ""
25
+ if isinstance(content, str):
26
+ return content
27
+ if isinstance(content, list):
28
+ return " ".join(
29
+ block.get("text", "") for block in content if isinstance(block, dict) and block.get("type") == "text"
30
+ )
31
+ return ""
32
+
33
+
34
+ def get_commit_sha() -> str | None:
35
+ """Get short commit SHA or None if not in a git repository."""
36
+ try:
37
+ if not (git_executable := shutil.which("git")):
38
+ return None
39
+ result = subprocess.run(
40
+ [git_executable, "rev-parse", "--short", "HEAD"],
41
+ capture_output=True,
42
+ text=True,
43
+ timeout=5,
44
+ check=False,
45
+ )
46
+ return result.stdout.strip() if result.returncode == 0 else None
47
+ except (subprocess.SubprocessError, FileNotFoundError, OSError):
48
+ logger.debug("Failed to get git commit SHA")
49
+ return None
50
+
51
+
52
+ @dataclass
53
+ class ChatMetadata:
54
+ """Metadata for a chat session."""
55
+
56
+ commit_sha: str | None = None
57
+ total_input_tokens: int = 0
58
+ total_output_tokens: int = 0
59
+ total_tool_calls: int = 0
60
+ total_steps: int = 0
61
+ mcp_mode: str = "read-only"
62
+
63
+ def to_dict(self) -> dict[str, Any]:
64
+ """Convert to dictionary for JSON serialization."""
65
+ return {
66
+ "commit_sha": self.commit_sha,
67
+ "total_input_tokens": self.total_input_tokens,
68
+ "total_output_tokens": self.total_output_tokens,
69
+ "total_tool_calls": self.total_tool_calls,
70
+ "total_steps": self.total_steps,
71
+ "mcp_mode": self.mcp_mode,
72
+ }
73
+
74
+ @classmethod
75
+ def from_dict(cls, data: dict[str, Any]) -> ChatMetadata:
76
+ """Create from dictionary."""
77
+ return cls(
78
+ commit_sha=data.get("commit_sha"),
79
+ total_input_tokens=data.get("total_input_tokens", 0),
80
+ total_output_tokens=data.get("total_output_tokens", 0),
81
+ total_tool_calls=data.get("total_tool_calls", 0),
82
+ total_steps=data.get("total_steps", 0),
83
+ mcp_mode=data.get("mcp_mode", "read-only"),
84
+ )
85
+
86
+
87
+ @dataclass
88
+ class ChatData:
89
+ """Data structure for chat storage results."""
90
+
91
+ messages: list[dict[str, Any]] = field(default_factory=list)
92
+ output_dir: str | None = None
93
+ metadata: ChatMetadata = field(default_factory=ChatMetadata)
94
+
95
+
96
+ class RedisStorage:
97
+ """Redis storage for chat conversations."""
98
+
99
+ def __init__(self, host: str | None = None, port: int | None = None, ttl_days: int = 30) -> None:
100
+ """Initialize Redis storage.
101
+
102
+ Args:
103
+ host: Redis host (defaults to REDIS_HOST env var or 'localhost')
104
+ port: Redis port (defaults to REDIS_PORT env var or 6379)
105
+ ttl_days: Time-to-live for chat data in days (default: 30)
106
+ """
107
+ self.host = host or os.getenv("REDIS_HOST", "localhost")
108
+ self.port = int(port if port is not None else int(os.getenv("REDIS_PORT", "6379")))
109
+ self.ttl = dt.timedelta(days=ttl_days)
110
+ self._client: redis.Redis | None = None
111
+
112
+ @property
113
+ def client(self) -> redis.Redis:
114
+ """Get or create Redis client."""
115
+ if self._client is None:
116
+ self._client = redis.Redis(
117
+ host=self.host, port=self.port, decode_responses=False, socket_connect_timeout=5
118
+ )
119
+ return self._client
120
+
121
+ def save_chat(
122
+ self,
123
+ user_id: str | None,
124
+ chat_id: str,
125
+ messages: list[dict[str, Any]],
126
+ output_dir: str | Path | None = None,
127
+ metadata: ChatMetadata | None = None,
128
+ ) -> bool:
129
+ try:
130
+ key = self._get_chat_key(user_id, chat_id)
131
+ payload = {
132
+ "messages": messages,
133
+ "output_dir": str(output_dir) if output_dir else None,
134
+ "metadata": metadata.to_dict() if metadata else ChatMetadata().to_dict(),
135
+ }
136
+ value = json.dumps(payload).encode("utf-8")
137
+ self.client.setex(key, self.ttl, value)
138
+
139
+ files_saved = 0
140
+ if output_dir:
141
+ output_path = Path(output_dir) if isinstance(output_dir, str) else output_dir
142
+ files_saved = self.save_all_files(chat_id, output_path)
143
+
144
+ logger.info(
145
+ f"Saved chat {chat_id} to Redis "
146
+ f"(messages={len(messages)}, user: {user_id or 'shared'}, files={files_saved})"
147
+ )
148
+ return True
149
+ except Exception as e:
150
+ logger.error(f"Failed to save chat {chat_id}: {e}", exc_info=True)
151
+ return False
152
+
153
+ def load_chat(self, user_id: str | None, chat_id: str, output_dir: Path | None = None) -> ChatData | None:
154
+ """Load chat from Redis and restore files to output directory.
155
+
156
+ Args:
157
+ user_id: Optional user identifier
158
+ chat_id: Chat identifier
159
+ output_dir: Directory to restore files to. If None, a new temp directory is created.
160
+
161
+ Returns:
162
+ ChatData containing messages, output_dir, and metadata, or None if chat not found
163
+ """
164
+ try:
165
+ key = self._get_chat_key(user_id, chat_id)
166
+ value = self.client.get(key)
167
+ if value is None:
168
+ logger.info(f"Chat {chat_id} not found in Redis (user: {user_id or 'shared'})")
169
+ return None
170
+
171
+ data = json.loads(cast("bytes", value).decode("utf-8"))
172
+ messages = data if isinstance(data, list) else data.get("messages", [])
173
+ stored_output_dir = data.get("output_dir") if isinstance(data, dict) else None
174
+ metadata_dict = data.get("metadata", {}) if isinstance(data, dict) else {}
175
+ metadata = ChatMetadata.from_dict(metadata_dict)
176
+
177
+ files_loaded = 0
178
+ if output_dir:
179
+ files_loaded = self.load_all_files(chat_id, output_dir)
180
+
181
+ logger.info(
182
+ f"Loaded chat {chat_id} from Redis "
183
+ f"({len(messages)} messages, {files_loaded} files, user: {user_id or 'shared'})"
184
+ )
185
+ return ChatData(messages=messages, output_dir=stored_output_dir, metadata=metadata)
186
+ except Exception as e:
187
+ logger.error(f"Failed to load chat {chat_id}: {e}", exc_info=True)
188
+ return None
189
+
190
+ def delete_chat(self, user_id: str | None, chat_id: str) -> bool:
191
+ try:
192
+ key = self._get_chat_key(user_id, chat_id)
193
+ deleted = self.client.delete(key)
194
+ logger.info(f"Deleted chat {chat_id} from Redis (deleted={deleted}, user: {user_id or 'shared'})")
195
+ return bool(deleted)
196
+ except Exception as e:
197
+ logger.error(f"Failed to delete chat {chat_id}: {e}", exc_info=True)
198
+ return False
199
+
200
+ def chat_exists(self, user_id: str | None, chat_id: str) -> bool:
201
+ try:
202
+ key = self._get_chat_key(user_id, chat_id)
203
+ return bool(self.client.exists(key))
204
+ except Exception as e:
205
+ logger.error(f"Failed to check if chat {chat_id} exists: {e}", exc_info=True)
206
+ return False
207
+
208
+ def is_connected(self) -> bool:
209
+ try:
210
+ self.client.ping()
211
+ return True
212
+ except Exception:
213
+ return False
214
+
215
+ def _get_chat_key(self, user_id: str | None, chat_id: str) -> str:
216
+ """Generate Redis key for a chat."""
217
+ return f"user:{user_id}:chat:{chat_id}" if user_id else f"chat:{chat_id}"
218
+
219
+ def _get_chat_pattern(self, user_id: str | None = None) -> str:
220
+ """Generate Redis key pattern for listing chats."""
221
+ return f"user:{user_id}:chat:*" if user_id else "chat:*"
222
+
223
+ def list_all_chats(self, user_id: str | None = None) -> list[dict[str, Any]]:
224
+ """List all chat conversations with metadata.
225
+
226
+ Args:
227
+ user_id: Optional user ID to filter chats (None = all chats or shared chats)
228
+
229
+ Returns:
230
+ List of dicts with chat_id, timestamp, message_count, first_message,
231
+ and metadata (commit_sha, total_input_tokens, total_output_tokens,
232
+ total_tool_calls, total_steps)
233
+ """
234
+ try:
235
+ pattern = self._get_chat_pattern(user_id)
236
+ keys = cast("list[bytes]", self.client.keys(pattern.encode("utf-8")))
237
+ chats = []
238
+
239
+ for key in keys:
240
+ key_str = key.decode("utf-8")
241
+ chat_id = key_str.replace(f"user:{user_id}:chat:", "") if user_id else key_str.replace("chat:", "")
242
+
243
+ chat_data = self.load_chat(user_id, chat_id)
244
+
245
+ if chat_data:
246
+ messages = chat_data.messages
247
+ timestamp_str = chat_id.split("_")[1]
248
+ timestamp = int(dt.datetime.strptime(timestamp_str, "%Y%m%d%H%M%S").timestamp())
249
+ first_message_content = messages[0].get("content") if messages else None
250
+ first_message = extract_text_from_content(first_message_content)
251
+ first_user_content = next(
252
+ (m.get("content") for m in messages if m.get("role") == "user"),
253
+ None,
254
+ )
255
+ first_user = extract_text_from_content(first_user_content)
256
+ preview = first_user[:100] if first_user else None
257
+
258
+ chats.append(
259
+ {
260
+ "chat_id": chat_id,
261
+ "timestamp": timestamp,
262
+ "message_count": len(messages),
263
+ "first_message": first_message[:100],
264
+ "preview": preview,
265
+ "commit_sha": chat_data.metadata.commit_sha,
266
+ "total_input_tokens": chat_data.metadata.total_input_tokens,
267
+ "total_output_tokens": chat_data.metadata.total_output_tokens,
268
+ "total_tool_calls": chat_data.metadata.total_tool_calls,
269
+ "total_steps": chat_data.metadata.total_steps,
270
+ }
271
+ )
272
+
273
+ chats.sort(key=lambda x: x["timestamp"], reverse=True)
274
+ logger.info(f"Found {len(chats)} chats in Redis (user: {user_id or 'shared'})")
275
+ return chats
276
+ except Exception as e:
277
+ logger.error(f"Failed to list chats: {e}", exc_info=True)
278
+ return []
279
+
280
+ def save_file(self, chat_id: str, file_path: Path | str, content: bytes | None = None) -> bool:
281
+ """Save a file to Redis associated with a chat session.
282
+
283
+ Args:
284
+ chat_id: Chat session ID
285
+ file_path: Path to the file (or filename)
286
+ content: Optional file content as bytes. If not provided, reads from file_path
287
+
288
+ Returns:
289
+ True if successful, False otherwise
290
+ """
291
+ try:
292
+ if isinstance(file_path, str):
293
+ file_path = Path(file_path)
294
+
295
+ filename = file_path.name
296
+ key = f"file:{chat_id}:{filename}"
297
+
298
+ if content is None:
299
+ if not file_path.exists():
300
+ logger.error(f"File not found: {file_path}")
301
+ return False
302
+ content = file_path.read_bytes()
303
+
304
+ # Store file with metadata
305
+ metadata = {
306
+ "filename": filename,
307
+ "size": len(content),
308
+ "timestamp": dt.datetime.now(dt.UTC).isoformat(),
309
+ "content": base64.b64encode(content).decode("utf-8"),
310
+ }
311
+
312
+ value = json.dumps(metadata).encode("utf-8")
313
+ self.client.setex(key, self.ttl, value)
314
+ logger.info(f"Saved file {filename} for chat {chat_id} to Redis ({len(content)} bytes)")
315
+ return True
316
+ except Exception as e:
317
+ logger.error(f"Failed to save file {filename} for chat {chat_id}: {e}", exc_info=True)
318
+ return False
319
+
320
+ def load_file(self, chat_id: str, filename: str) -> bytes | None:
321
+ """Load a file from Redis for a chat session.
322
+
323
+ Args:
324
+ chat_id: Chat session ID
325
+ filename: Name of the file to load
326
+
327
+ Returns:
328
+ File content as bytes, or None if not found
329
+ """
330
+ try:
331
+ key = f"file:{chat_id}:{filename}"
332
+ value = self.client.get(key)
333
+ if value is None:
334
+ logger.info(f"File {filename} not found for chat {chat_id}")
335
+ return None
336
+
337
+ metadata: dict[str, Any] = json.loads(cast("bytes", value).decode("utf-8"))
338
+ content = base64.b64decode(metadata["content"])
339
+ logger.info(f"Loaded file {filename} for chat {chat_id} ({len(content)} bytes)")
340
+ return content
341
+ except Exception as e:
342
+ logger.error(f"Failed to load file {filename} for chat {chat_id}: {e}", exc_info=True)
343
+ return None
344
+
345
+ def list_files(self, chat_id: str) -> list[dict[str, Any]]:
346
+ """List all files for a chat session.
347
+
348
+ Args:
349
+ chat_id: Chat session ID
350
+
351
+ Returns:
352
+ List of dicts with filename, size, and timestamp
353
+ """
354
+ try:
355
+ pattern = f"file:{chat_id}:*"
356
+ keys = cast("list[bytes]", self.client.keys(pattern.encode("utf-8")))
357
+ files = []
358
+
359
+ for key in keys:
360
+ key_str = key.decode("utf-8")
361
+ filename = key_str.split(":")[-1]
362
+ value = self.client.get(key)
363
+ if value:
364
+ metadata: dict[str, Any] = json.loads(cast("bytes", value).decode("utf-8"))
365
+ files.append(
366
+ {
367
+ "filename": filename,
368
+ "size": metadata.get("size", 0),
369
+ "timestamp": metadata.get("timestamp", ""),
370
+ }
371
+ )
372
+
373
+ logger.info(f"Found {len(files)} files for chat {chat_id}")
374
+ return files
375
+ except Exception as e:
376
+ logger.error(f"Failed to list files for chat {chat_id}: {e}", exc_info=True)
377
+ return []
378
+
379
+ def delete_file(self, chat_id: str, filename: str) -> bool:
380
+ """Delete a file from Redis for a chat session.
381
+
382
+ Args:
383
+ chat_id: Chat session ID
384
+ filename: Name of the file to delete
385
+
386
+ Returns:
387
+ True if deleted, False otherwise
388
+ """
389
+ try:
390
+ key = f"file:{chat_id}:{filename}"
391
+ deleted = self.client.delete(key)
392
+ logger.info(f"Deleted file {filename} for chat {chat_id} (deleted={deleted})")
393
+ return bool(deleted)
394
+ except Exception as e:
395
+ logger.error(f"Failed to delete file {filename} for chat {chat_id}: {e}", exc_info=True)
396
+ return False
397
+
398
+ def delete_all_files(self, chat_id: str) -> int:
399
+ """Delete all files for a chat session.
400
+
401
+ Args:
402
+ chat_id: Chat session ID
403
+
404
+ Returns:
405
+ Number of files deleted
406
+ """
407
+ try:
408
+ pattern = f"file:{chat_id}:*"
409
+ keys = cast("list[bytes]", self.client.keys(pattern.encode("utf-8")))
410
+ if not keys:
411
+ logger.info(f"No files to delete for chat {chat_id}")
412
+ return 0
413
+
414
+ deleted = cast("int", self.client.delete(*keys))
415
+ logger.info(f"Deleted {deleted} files for chat {chat_id}")
416
+ return deleted
417
+ except Exception as e:
418
+ logger.error(f"Failed to delete files for chat {chat_id}: {e}", exc_info=True)
419
+ return 0
420
+
421
+ def save_all_files(self, chat_id: str, output_dir: Path) -> int:
422
+ """Save all files from output directory to Redis.
423
+
424
+ Args:
425
+ chat_id: Chat session ID
426
+ output_dir: Directory containing files to save
427
+
428
+ Returns:
429
+ Number of files saved successfully
430
+ """
431
+ saved_count = 0
432
+ try:
433
+ if not output_dir.exists() or not output_dir.is_dir():
434
+ logger.warning(f"Output directory does not exist: {output_dir}")
435
+ return 0
436
+
437
+ files = [f for f in output_dir.iterdir() if f.is_file()]
438
+ for file_path in files:
439
+ if self.save_file(chat_id, file_path):
440
+ saved_count += 1
441
+
442
+ logger.info(f"Saved {saved_count}/{len(files)} files for chat {chat_id} to Redis")
443
+ return saved_count
444
+ except Exception as e:
445
+ logger.error(f"Failed to save files for chat {chat_id}: {e}", exc_info=True)
446
+ return saved_count
447
+
448
+ def load_all_files(self, chat_id: str, output_dir: Path) -> int:
449
+ """Load all files from Redis to output directory.
450
+
451
+ Args:
452
+ chat_id: Chat session ID
453
+ output_dir: Directory where files will be restored
454
+
455
+ Returns:
456
+ Number of files loaded successfully
457
+ """
458
+ loaded_count = 0
459
+ try:
460
+ output_dir.mkdir(parents=True, exist_ok=True)
461
+ files = self.list_files(chat_id)
462
+
463
+ for file_info in files:
464
+ filename = file_info["filename"]
465
+ content = self.load_file(chat_id, filename)
466
+ if content:
467
+ file_path = output_dir / filename
468
+ file_path.write_bytes(content)
469
+ loaded_count += 1
470
+
471
+ logger.info(f"Loaded {loaded_count}/{len(files)} files for chat {chat_id} from Redis")
472
+ return loaded_count
473
+ except Exception as e:
474
+ logger.error(f"Failed to load files for chat {chat_id}: {e}", exc_info=True)
475
+ return loaded_count
476
+
477
+ def close(self) -> None:
478
+ """Close Redis connection."""
479
+ if self._client is not None:
480
+ self._client.close()
481
+ self._client = None
482
+ logger.info("Closed Redis connection")
@@ -0,0 +1,123 @@
1
+ """MCP Tools Integration Module.
2
+
3
+ Provides functionality to connect to the rossum-mcp server and convert MCP tools
4
+ to Anthropic tool format for use with the Claude API.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import logging
10
+ import os
11
+ from contextlib import asynccontextmanager
12
+ from dataclasses import dataclass
13
+ from typing import TYPE_CHECKING, Any, Literal
14
+
15
+ from anthropic.types import ToolParam
16
+ from fastmcp import Client
17
+ from fastmcp.client.transports import StdioTransport
18
+
19
+ if TYPE_CHECKING:
20
+ from collections.abc import AsyncIterator
21
+
22
+ from mcp.types import Tool as MCPTool
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+
27
+ @dataclass
28
+ class MCPConnection:
29
+ """Holds the MCP client and provides tool operations."""
30
+
31
+ client: Client
32
+ _tools: list[MCPTool] | None = None
33
+
34
+ async def get_tools(self) -> list[MCPTool]:
35
+ """Get the list of available MCP tools (cached)."""
36
+ if self._tools is None:
37
+ self._tools = await self.client.list_tools()
38
+ return self._tools
39
+
40
+ async def call_tool(self, name: str, arguments: dict[str, Any] | None = None) -> Any:
41
+ """Call an MCP tool by name with the given arguments.
42
+
43
+ Args:
44
+ name: The name of the tool to call.
45
+ arguments: Optional dictionary of arguments to pass to the tool.
46
+
47
+ Returns:
48
+ The result of the tool call.
49
+ """
50
+ logger.info(f"Calling MCP tool {name}")
51
+
52
+ result = await self.client.call_tool(name, arguments or {})
53
+ # Prefer structured_content (raw dict) over data (parsed pydantic model)
54
+ # because FastMCP's json_schema_to_type has a bug where nested dict fields
55
+ # like config: dict[str, Any] become empty dataclasses, losing all data.
56
+ # See: https://github.com/jlowin/fastmcp/issues/XXX
57
+ if result.structured_content is not None:
58
+ return result.structured_content
59
+ if result.data is not None:
60
+ return result.data
61
+ if result.content:
62
+ text_parts = [str(block.text) for block in result.content if hasattr(block, "text") and block.text]
63
+ if len(text_parts) == 1:
64
+ return text_parts[0]
65
+ return "\n".join(text_parts) if text_parts else None
66
+ return None
67
+
68
+
69
+ def create_mcp_transport(
70
+ rossum_api_token: str, rossum_api_base_url: str, mcp_mode: Literal["read-only", "read-write"] = "read-only"
71
+ ) -> StdioTransport:
72
+ """Create a StdioTransport for the rossum-mcp server.
73
+
74
+ Args:
75
+ rossum_api_token: Rossum API token for authentication.
76
+ rossum_api_base_url: Rossum API base URL.
77
+
78
+ Returns:
79
+ Configured StdioTransport for the rossum-mcp server.
80
+ """
81
+ return StdioTransport(
82
+ command="rossum-mcp",
83
+ args=[],
84
+ env={
85
+ **os.environ,
86
+ "ROSSUM_API_BASE_URL": rossum_api_base_url.rstrip("/"),
87
+ "ROSSUM_API_TOKEN": rossum_api_token,
88
+ "ROSSUM_MCP_MODE": mcp_mode,
89
+ },
90
+ )
91
+
92
+
93
+ @asynccontextmanager
94
+ async def connect_mcp_server(
95
+ rossum_api_token: str, rossum_api_base_url: str, mcp_mode: Literal["read-only", "read-write"] = "read-only"
96
+ ) -> AsyncIterator[MCPConnection]:
97
+ """Connect to the rossum-mcp server and yield an MCPConnection.
98
+
99
+ This context manager handles the lifecycle of the MCP client connection.
100
+ Tools are cached after the first retrieval for efficiency.
101
+
102
+ Args:
103
+ rossum_api_token: Rossum API token for authentication.
104
+ rossum_api_base_url: Rossum API base URL.
105
+
106
+ Yields:
107
+ MCPConnection with the connected client.
108
+ """
109
+ transport = create_mcp_transport(
110
+ rossum_api_token=rossum_api_token, rossum_api_base_url=rossum_api_base_url, mcp_mode=mcp_mode
111
+ )
112
+
113
+ client = Client(transport)
114
+ async with client:
115
+ yield MCPConnection(client=client)
116
+
117
+
118
+ def mcp_tools_to_anthropic_format(mcp_tools: list[MCPTool]) -> list[ToolParam]:
119
+ """Convert MCP tools to Anthropic tool format."""
120
+ return [
121
+ ToolParam(name=mcp_tool.name, description=mcp_tool.description or "", input_schema=mcp_tool.inputSchema)
122
+ for mcp_tool in mcp_tools
123
+ ]
@@ -0,0 +1,31 @@
1
+ # Hook Debugging Skill
2
+
3
+ **Goal**: Identify and fix hook issues.
4
+
5
+ ## Tools
6
+
7
+ | Tool | Purpose |
8
+ |------|---------|
9
+ | `search_knowledge_base` | **USE FIRST** - Rossum docs contain extension configs, known issues, required schema fields |
10
+ | `debug_hook(hook_id, annotation_id)` | Spawns Opus sub-agent for code analysis - returns verified fix |
11
+
12
+ ## Constraints
13
+
14
+ - **ALWAYS search knowledge base first** - it contains solutions, required configurations, and known issues that save debugging time
15
+ - **Use `debug_hook` for Python code** - do not analyze hook code yourself
16
+ - **Trust `debug_hook` results** - do not re-analyze or modify the returned fix
17
+
18
+ ## `debug_hook` Usage
19
+
20
+ ```python
21
+ debug_hook(hook_id="12345", annotation_id="67890")
22
+ ```
23
+
24
+ The sub-agent fetches hook code and annotation data automatically. No need to call `get_hook` or `get_annotation` first.
25
+
26
+ ## Relations Reference
27
+
28
+ | Type | Tools | Use Case |
29
+ |------|-------|----------|
30
+ | Relations | `get_relation`, `list_relations` | Track edits, duplicates, attachments |
31
+ | Document Relations | `get_document_relation`, `list_document_relations` | Track exports, e-invoice docs |
@@ -0,0 +1,60 @@
1
+ # Organization Setup Skill
2
+
3
+ **Goal**: Set up Rossum for new customers with correct document types and regional configurations.
4
+
5
+ ## Queue Creation
6
+
7
+ | Scenario | Tool |
8
+ |----------|------|
9
+ | New customer onboarding | `create_queue_from_template` |
10
+ | Copy existing config | `create_queue` with custom schema |
11
+ | Empty queue | `create_queue` |
12
+
13
+ ## Templates
14
+
15
+ | Template | Region/Type |
16
+ |----------|-------------|
17
+ | EU Demo Template | European invoices |
18
+ | US Demo Template | US invoices |
19
+ | UK Demo Template | UK invoices |
20
+ | CZ Demo Template | Czech invoices |
21
+ | Chinese Invoices (Fapiao) Demo Template | Chinese Fapiao |
22
+ | Credit Note Demo Template | Credit notes |
23
+ | Debit Note Demo Template | Debit notes |
24
+ | Purchase Order Demo Template | Purchase orders |
25
+ | Delivery Note Demo Template | Delivery notes |
26
+ | Proforma Invoice Demo Template | Proforma invoices |
27
+ | Certificates of Analysis Demo Template | Certificates of analysis |
28
+
29
+ Regional variants: `AP&R {Region} Demo Template`, `Tax Invoice {Region} Demo Template`
30
+
31
+ ## Schema Customization
32
+
33
+ **Load `schema-pruning` skill** for bulk field removal, **`schema-patching` skill** for adding fields.
34
+
35
+ ### Schema Pruning
36
+
37
+ Use `prune_schema_fields(schema_id, fields_to_keep=[...])` to remove unwanted fields in one call. Specify leaf field IDs only—parent containers (sections, multivalues, tuples) are preserved automatically. Alternatively, use `fields_to_remove` parameter to remove specific fields instead.
38
+
39
+ | Field status | Action |
40
+ |--------------|--------|
41
+ | Requested + exists in template | Keep |
42
+ | Requested + not in template | Add to correct section |
43
+ | In template + not requested | **Remove** |
44
+
45
+ **Section placement** (verify against actual schema):
46
+
47
+ | Field semantics | Typical section |
48
+ |-----------------|-----------------|
49
+ | Document ID, dates, order numbers | `basic_info_section` |
50
+ | Vendor/supplier info | Section with `sender_` prefixed fields |
51
+ | Customer/recipient info | Section with `recipient_` prefixed fields |
52
+ | Amounts, totals, tax | `amounts_section` |
53
+ | Line item columns | `line_items_section` (multivalue) |
54
+
55
+ ## Constraints
56
+
57
+ - Match region to template (EU/US/UK/CZ/CN defaults differ)
58
+ - Templates include pre-configured engine
59
+ - No mermaid diagrams unless explicitly requested
60
+ - Customize via `update_schema` after creation