spaik-sdk 0.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spaik_sdk/__init__.py +21 -0
- spaik_sdk/agent/__init__.py +0 -0
- spaik_sdk/agent/base_agent.py +249 -0
- spaik_sdk/attachments/__init__.py +22 -0
- spaik_sdk/attachments/builder.py +61 -0
- spaik_sdk/attachments/file_storage_provider.py +27 -0
- spaik_sdk/attachments/mime_types.py +118 -0
- spaik_sdk/attachments/models.py +63 -0
- spaik_sdk/attachments/provider_support.py +53 -0
- spaik_sdk/attachments/storage/__init__.py +0 -0
- spaik_sdk/attachments/storage/base_file_storage.py +32 -0
- spaik_sdk/attachments/storage/impl/__init__.py +0 -0
- spaik_sdk/attachments/storage/impl/local_file_storage.py +101 -0
- spaik_sdk/audio/__init__.py +12 -0
- spaik_sdk/audio/options.py +53 -0
- spaik_sdk/audio/providers/__init__.py +1 -0
- spaik_sdk/audio/providers/google_tts.py +77 -0
- spaik_sdk/audio/providers/openai_stt.py +71 -0
- spaik_sdk/audio/providers/openai_tts.py +111 -0
- spaik_sdk/audio/stt.py +61 -0
- spaik_sdk/audio/tts.py +124 -0
- spaik_sdk/config/credentials_provider.py +10 -0
- spaik_sdk/config/env.py +59 -0
- spaik_sdk/config/env_credentials_provider.py +7 -0
- spaik_sdk/config/get_credentials_provider.py +14 -0
- spaik_sdk/image_gen/__init__.py +9 -0
- spaik_sdk/image_gen/image_generator.py +83 -0
- spaik_sdk/image_gen/options.py +24 -0
- spaik_sdk/image_gen/providers/__init__.py +0 -0
- spaik_sdk/image_gen/providers/google.py +75 -0
- spaik_sdk/image_gen/providers/openai.py +60 -0
- spaik_sdk/llm/__init__.py +0 -0
- spaik_sdk/llm/cancellation_handle.py +10 -0
- spaik_sdk/llm/consumption/__init__.py +0 -0
- spaik_sdk/llm/consumption/consumption_estimate.py +26 -0
- spaik_sdk/llm/consumption/consumption_estimate_builder.py +113 -0
- spaik_sdk/llm/consumption/consumption_extractor.py +59 -0
- spaik_sdk/llm/consumption/token_usage.py +31 -0
- spaik_sdk/llm/converters.py +146 -0
- spaik_sdk/llm/cost/__init__.py +1 -0
- spaik_sdk/llm/cost/builtin_cost_provider.py +83 -0
- spaik_sdk/llm/cost/cost_estimate.py +8 -0
- spaik_sdk/llm/cost/cost_provider.py +28 -0
- spaik_sdk/llm/extract_error_message.py +37 -0
- spaik_sdk/llm/langchain_loop_manager.py +270 -0
- spaik_sdk/llm/langchain_service.py +196 -0
- spaik_sdk/llm/message_handler.py +188 -0
- spaik_sdk/llm/streaming/__init__.py +1 -0
- spaik_sdk/llm/streaming/block_manager.py +152 -0
- spaik_sdk/llm/streaming/models.py +42 -0
- spaik_sdk/llm/streaming/streaming_content_handler.py +157 -0
- spaik_sdk/llm/streaming/streaming_event_handler.py +215 -0
- spaik_sdk/llm/streaming/streaming_state_manager.py +58 -0
- spaik_sdk/models/__init__.py +0 -0
- spaik_sdk/models/factories/__init__.py +0 -0
- spaik_sdk/models/factories/anthropic_factory.py +33 -0
- spaik_sdk/models/factories/base_model_factory.py +71 -0
- spaik_sdk/models/factories/google_factory.py +30 -0
- spaik_sdk/models/factories/ollama_factory.py +41 -0
- spaik_sdk/models/factories/openai_factory.py +50 -0
- spaik_sdk/models/llm_config.py +46 -0
- spaik_sdk/models/llm_families.py +7 -0
- spaik_sdk/models/llm_model.py +17 -0
- spaik_sdk/models/llm_wrapper.py +25 -0
- spaik_sdk/models/model_registry.py +156 -0
- spaik_sdk/models/providers/__init__.py +0 -0
- spaik_sdk/models/providers/anthropic_provider.py +29 -0
- spaik_sdk/models/providers/azure_provider.py +31 -0
- spaik_sdk/models/providers/base_provider.py +62 -0
- spaik_sdk/models/providers/google_provider.py +26 -0
- spaik_sdk/models/providers/ollama_provider.py +26 -0
- spaik_sdk/models/providers/openai_provider.py +26 -0
- spaik_sdk/models/providers/provider_type.py +90 -0
- spaik_sdk/orchestration/__init__.py +24 -0
- spaik_sdk/orchestration/base_orchestrator.py +238 -0
- spaik_sdk/orchestration/checkpoint.py +80 -0
- spaik_sdk/orchestration/models.py +103 -0
- spaik_sdk/prompt/__init__.py +0 -0
- spaik_sdk/prompt/get_prompt_loader.py +13 -0
- spaik_sdk/prompt/local_prompt_loader.py +21 -0
- spaik_sdk/prompt/prompt_loader.py +48 -0
- spaik_sdk/prompt/prompt_loader_mode.py +14 -0
- spaik_sdk/py.typed +1 -0
- spaik_sdk/recording/__init__.py +1 -0
- spaik_sdk/recording/base_playback.py +90 -0
- spaik_sdk/recording/base_recorder.py +50 -0
- spaik_sdk/recording/conditional_recorder.py +38 -0
- spaik_sdk/recording/impl/__init__.py +1 -0
- spaik_sdk/recording/impl/local_playback.py +76 -0
- spaik_sdk/recording/impl/local_recorder.py +85 -0
- spaik_sdk/recording/langchain_serializer.py +88 -0
- spaik_sdk/server/__init__.py +1 -0
- spaik_sdk/server/api/routers/__init__.py +0 -0
- spaik_sdk/server/api/routers/api_builder.py +149 -0
- spaik_sdk/server/api/routers/audio_router_factory.py +201 -0
- spaik_sdk/server/api/routers/file_router_factory.py +111 -0
- spaik_sdk/server/api/routers/thread_router_factory.py +284 -0
- spaik_sdk/server/api/streaming/__init__.py +0 -0
- spaik_sdk/server/api/streaming/format_sse_event.py +41 -0
- spaik_sdk/server/api/streaming/negotiate_streaming_response.py +8 -0
- spaik_sdk/server/api/streaming/streaming_negotiator.py +10 -0
- spaik_sdk/server/authorization/__init__.py +0 -0
- spaik_sdk/server/authorization/base_authorizer.py +64 -0
- spaik_sdk/server/authorization/base_user.py +13 -0
- spaik_sdk/server/authorization/dummy_authorizer.py +17 -0
- spaik_sdk/server/job_processor/__init__.py +0 -0
- spaik_sdk/server/job_processor/base_job_processor.py +8 -0
- spaik_sdk/server/job_processor/thread_job_processor.py +32 -0
- spaik_sdk/server/pubsub/__init__.py +1 -0
- spaik_sdk/server/pubsub/cancellation_publisher.py +7 -0
- spaik_sdk/server/pubsub/cancellation_subscriber.py +38 -0
- spaik_sdk/server/pubsub/event_publisher.py +13 -0
- spaik_sdk/server/pubsub/impl/__init__.py +1 -0
- spaik_sdk/server/pubsub/impl/local_cancellation_pubsub.py +48 -0
- spaik_sdk/server/pubsub/impl/signalr_publisher.py +36 -0
- spaik_sdk/server/queue/__init__.py +1 -0
- spaik_sdk/server/queue/agent_job_queue.py +27 -0
- spaik_sdk/server/queue/impl/__init__.py +1 -0
- spaik_sdk/server/queue/impl/azure_queue.py +24 -0
- spaik_sdk/server/response/__init__.py +0 -0
- spaik_sdk/server/response/agent_response_generator.py +39 -0
- spaik_sdk/server/response/response_generator.py +13 -0
- spaik_sdk/server/response/simple_agent_response_generator.py +14 -0
- spaik_sdk/server/services/__init__.py +0 -0
- spaik_sdk/server/services/thread_converters.py +113 -0
- spaik_sdk/server/services/thread_models.py +90 -0
- spaik_sdk/server/services/thread_service.py +91 -0
- spaik_sdk/server/storage/__init__.py +1 -0
- spaik_sdk/server/storage/base_thread_repository.py +51 -0
- spaik_sdk/server/storage/impl/__init__.py +0 -0
- spaik_sdk/server/storage/impl/in_memory_thread_repository.py +100 -0
- spaik_sdk/server/storage/impl/local_file_thread_repository.py +217 -0
- spaik_sdk/server/storage/thread_filter.py +166 -0
- spaik_sdk/server/storage/thread_metadata.py +53 -0
- spaik_sdk/thread/__init__.py +0 -0
- spaik_sdk/thread/adapters/__init__.py +0 -0
- spaik_sdk/thread/adapters/cli/__init__.py +0 -0
- spaik_sdk/thread/adapters/cli/block_display.py +92 -0
- spaik_sdk/thread/adapters/cli/display_manager.py +84 -0
- spaik_sdk/thread/adapters/cli/live_cli.py +235 -0
- spaik_sdk/thread/adapters/event_adapter.py +28 -0
- spaik_sdk/thread/adapters/streaming_block_adapter.py +57 -0
- spaik_sdk/thread/adapters/sync_adapter.py +76 -0
- spaik_sdk/thread/models.py +224 -0
- spaik_sdk/thread/thread_container.py +468 -0
- spaik_sdk/tools/__init__.py +0 -0
- spaik_sdk/tools/impl/__init__.py +0 -0
- spaik_sdk/tools/impl/mcp_tool_provider.py +93 -0
- spaik_sdk/tools/impl/search_tool_provider.py +18 -0
- spaik_sdk/tools/tool_provider.py +131 -0
- spaik_sdk/tracing/__init__.py +13 -0
- spaik_sdk/tracing/agent_trace.py +72 -0
- spaik_sdk/tracing/get_trace_sink.py +15 -0
- spaik_sdk/tracing/local_trace_sink.py +23 -0
- spaik_sdk/tracing/trace_sink.py +19 -0
- spaik_sdk/tracing/trace_sink_mode.py +14 -0
- spaik_sdk/utils/__init__.py +0 -0
- spaik_sdk/utils/init_logger.py +24 -0
- spaik_sdk-0.6.2.dist-info/METADATA +379 -0
- spaik_sdk-0.6.2.dist-info/RECORD +161 -0
- spaik_sdk-0.6.2.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import pickle
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Dict, List, Optional
|
|
5
|
+
|
|
6
|
+
from spaik_sdk.server.storage.base_thread_repository import BaseThreadRepository
|
|
7
|
+
from spaik_sdk.server.storage.thread_filter import ThreadFilter
|
|
8
|
+
from spaik_sdk.server.storage.thread_metadata import ThreadMetadata
|
|
9
|
+
from spaik_sdk.thread.models import ThreadMessage
|
|
10
|
+
from spaik_sdk.thread.thread_container import ThreadContainer
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class LocalFileThreadRepository(BaseThreadRepository):
|
|
14
|
+
"""Local file-based implementation of thread repository for development"""
|
|
15
|
+
|
|
16
|
+
def __init__(self, data_dir: str = "data"):
|
|
17
|
+
self.data_dir = Path(data_dir)
|
|
18
|
+
self.threads_dir = self.data_dir / "threads"
|
|
19
|
+
self.metadata_file = self.data_dir / "metadata.json"
|
|
20
|
+
|
|
21
|
+
# Create directories if they don't exist
|
|
22
|
+
self.threads_dir.mkdir(parents=True, exist_ok=True)
|
|
23
|
+
|
|
24
|
+
# Load metadata cache
|
|
25
|
+
self._metadata_cache: Dict[str, ThreadMetadata] = {}
|
|
26
|
+
self._load_metadata_cache()
|
|
27
|
+
|
|
28
|
+
def _load_metadata_cache(self) -> None:
|
|
29
|
+
"""Load metadata from disk into memory cache"""
|
|
30
|
+
if self.metadata_file.exists():
|
|
31
|
+
try:
|
|
32
|
+
with open(self.metadata_file, "r") as f:
|
|
33
|
+
data = json.load(f)
|
|
34
|
+
# Convert dict to ThreadMetadata objects
|
|
35
|
+
metadata_cache = {}
|
|
36
|
+
for thread_id, metadata_dict in data.items():
|
|
37
|
+
if isinstance(metadata_dict, dict):
|
|
38
|
+
# Create ThreadMetadata with explicit arguments to satisfy type checker
|
|
39
|
+
metadata_cache[thread_id] = ThreadMetadata(
|
|
40
|
+
thread_id=metadata_dict.get("thread_id", thread_id),
|
|
41
|
+
title=metadata_dict.get("title", "New Thread"),
|
|
42
|
+
message_count=metadata_dict.get("message_count", 0),
|
|
43
|
+
last_activity_time=metadata_dict.get("last_activity_time", 0),
|
|
44
|
+
created_at=metadata_dict.get("created_at", 0),
|
|
45
|
+
author_id=metadata_dict.get("author_id", "unknown"),
|
|
46
|
+
type=metadata_dict.get("type", "chat"),
|
|
47
|
+
)
|
|
48
|
+
self._metadata_cache = metadata_cache
|
|
49
|
+
except (json.JSONDecodeError, TypeError, KeyError):
|
|
50
|
+
# If metadata is corrupted, rebuild it
|
|
51
|
+
self._rebuild_metadata_cache()
|
|
52
|
+
|
|
53
|
+
def _save_metadata_cache(self) -> None:
|
|
54
|
+
"""Save metadata cache to disk"""
|
|
55
|
+
data = {
|
|
56
|
+
thread_id: {
|
|
57
|
+
"thread_id": metadata.thread_id,
|
|
58
|
+
"title": metadata.title,
|
|
59
|
+
"message_count": metadata.message_count,
|
|
60
|
+
"last_activity_time": metadata.last_activity_time,
|
|
61
|
+
"created_at": metadata.created_at,
|
|
62
|
+
"author_id": metadata.author_id,
|
|
63
|
+
"type": metadata.type,
|
|
64
|
+
}
|
|
65
|
+
for thread_id, metadata in self._metadata_cache.items()
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
with open(self.metadata_file, "w") as f:
|
|
69
|
+
json.dump(data, f, indent=2)
|
|
70
|
+
|
|
71
|
+
def _rebuild_metadata_cache(self) -> None:
|
|
72
|
+
"""Rebuild metadata cache by reading all thread files"""
|
|
73
|
+
self._metadata_cache.clear()
|
|
74
|
+
|
|
75
|
+
for thread_file in self.threads_dir.glob("*.pkl"):
|
|
76
|
+
thread_id = thread_file.stem
|
|
77
|
+
try:
|
|
78
|
+
thread = self._load_thread_from_file(thread_id)
|
|
79
|
+
if thread:
|
|
80
|
+
metadata = ThreadMetadata.from_thread_container(thread)
|
|
81
|
+
self._metadata_cache[thread_id] = metadata
|
|
82
|
+
except Exception:
|
|
83
|
+
# Skip corrupted files
|
|
84
|
+
continue
|
|
85
|
+
|
|
86
|
+
self._save_metadata_cache()
|
|
87
|
+
|
|
88
|
+
def _thread_file_path(self, thread_id: str) -> Path:
|
|
89
|
+
"""Get file path for a thread"""
|
|
90
|
+
return self.threads_dir / f"{thread_id}.pkl"
|
|
91
|
+
|
|
92
|
+
def _load_thread_from_file(self, thread_id: str) -> Optional[ThreadContainer]:
|
|
93
|
+
"""Load thread from pickle file"""
|
|
94
|
+
file_path = self._thread_file_path(thread_id)
|
|
95
|
+
if not file_path.exists():
|
|
96
|
+
return None
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
with open(file_path, "rb") as f:
|
|
100
|
+
return pickle.load(f)
|
|
101
|
+
except (pickle.PickleError, EOFError, OSError):
|
|
102
|
+
return None
|
|
103
|
+
|
|
104
|
+
def _save_thread_to_file(self, thread: ThreadContainer) -> None:
|
|
105
|
+
"""Save thread to pickle file"""
|
|
106
|
+
file_path = self._thread_file_path(thread.thread_id)
|
|
107
|
+
|
|
108
|
+
# Create a serializable copy to avoid issues with non-picklable subscribers
|
|
109
|
+
serializable_thread = thread.create_serializable_copy()
|
|
110
|
+
|
|
111
|
+
with open(file_path, "wb") as f:
|
|
112
|
+
pickle.dump(serializable_thread, f)
|
|
113
|
+
|
|
114
|
+
async def save_thread(self, thread_container: ThreadContainer) -> None:
|
|
115
|
+
"""Save complete thread container to disk"""
|
|
116
|
+
self._save_thread_to_file(thread_container)
|
|
117
|
+
|
|
118
|
+
# Update metadata cache
|
|
119
|
+
metadata = ThreadMetadata.from_thread_container(thread_container)
|
|
120
|
+
self._metadata_cache[thread_container.thread_id] = metadata
|
|
121
|
+
self._save_metadata_cache()
|
|
122
|
+
|
|
123
|
+
async def load_thread(self, thread_id: str) -> Optional[ThreadContainer]:
|
|
124
|
+
"""Load thread container from disk"""
|
|
125
|
+
return self._load_thread_from_file(thread_id)
|
|
126
|
+
|
|
127
|
+
async def get_message(self, thread_id: str, message_id: str) -> Optional[ThreadMessage]:
|
|
128
|
+
"""Get message from disk"""
|
|
129
|
+
thread = await self.load_thread(thread_id)
|
|
130
|
+
if not thread:
|
|
131
|
+
return None
|
|
132
|
+
|
|
133
|
+
for message in thread.messages:
|
|
134
|
+
if message.id == message_id:
|
|
135
|
+
return message
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
async def upsert_message(self, thread_id: str, message: ThreadMessage) -> None:
|
|
139
|
+
"""Upsert message to disk"""
|
|
140
|
+
thread = await self.load_thread(thread_id)
|
|
141
|
+
if not thread:
|
|
142
|
+
return
|
|
143
|
+
|
|
144
|
+
# Find existing message and replace, or add new one
|
|
145
|
+
for i, existing_msg in enumerate(thread.messages):
|
|
146
|
+
if existing_msg.id == message.id:
|
|
147
|
+
thread.messages[i] = message
|
|
148
|
+
break
|
|
149
|
+
else:
|
|
150
|
+
thread.messages.append(message)
|
|
151
|
+
|
|
152
|
+
# Save updated thread
|
|
153
|
+
await self.save_thread(thread)
|
|
154
|
+
|
|
155
|
+
async def delete_message(self, thread_id: str, message_id: str) -> None:
|
|
156
|
+
"""Delete message from disk"""
|
|
157
|
+
thread = await self.load_thread(thread_id)
|
|
158
|
+
if not thread:
|
|
159
|
+
return
|
|
160
|
+
|
|
161
|
+
thread.messages = [msg for msg in thread.messages if msg.id != message_id]
|
|
162
|
+
|
|
163
|
+
# Save updated thread
|
|
164
|
+
await self.save_thread(thread)
|
|
165
|
+
|
|
166
|
+
async def thread_exists(self, thread_id: str) -> bool:
|
|
167
|
+
"""Check if thread exists on disk"""
|
|
168
|
+
return self._thread_file_path(thread_id).exists()
|
|
169
|
+
|
|
170
|
+
async def delete_thread(self, thread_id: str) -> bool:
|
|
171
|
+
"""Delete thread and all its messages from disk"""
|
|
172
|
+
file_path = self._thread_file_path(thread_id)
|
|
173
|
+
|
|
174
|
+
if file_path.exists():
|
|
175
|
+
try:
|
|
176
|
+
file_path.unlink() # Delete file
|
|
177
|
+
|
|
178
|
+
# Remove from metadata cache
|
|
179
|
+
if thread_id in self._metadata_cache:
|
|
180
|
+
del self._metadata_cache[thread_id]
|
|
181
|
+
self._save_metadata_cache()
|
|
182
|
+
|
|
183
|
+
return True
|
|
184
|
+
except OSError:
|
|
185
|
+
return False
|
|
186
|
+
return False
|
|
187
|
+
|
|
188
|
+
async def list_threads(self, filter: ThreadFilter) -> List[ThreadMetadata]:
|
|
189
|
+
"""List threads matching the filter from disk metadata"""
|
|
190
|
+
result = []
|
|
191
|
+
|
|
192
|
+
for metadata in self._metadata_cache.values():
|
|
193
|
+
if filter.matches(metadata):
|
|
194
|
+
result.append(metadata)
|
|
195
|
+
|
|
196
|
+
# Sort by last activity time (most recent first)
|
|
197
|
+
result.sort(key=lambda x: x.last_activity_time, reverse=True)
|
|
198
|
+
return result
|
|
199
|
+
|
|
200
|
+
def clear_all(self) -> None:
|
|
201
|
+
"""Clear all data from disk (useful for testing)"""
|
|
202
|
+
# Remove all thread files
|
|
203
|
+
for thread_file in self.threads_dir.glob("*.pkl"):
|
|
204
|
+
thread_file.unlink()
|
|
205
|
+
|
|
206
|
+
# Clear metadata
|
|
207
|
+
self._metadata_cache.clear()
|
|
208
|
+
if self.metadata_file.exists():
|
|
209
|
+
self.metadata_file.unlink()
|
|
210
|
+
|
|
211
|
+
def get_thread_count(self) -> int:
|
|
212
|
+
"""Get total number of threads stored"""
|
|
213
|
+
return len(self._metadata_cache)
|
|
214
|
+
|
|
215
|
+
def get_data_dir(self) -> Path:
|
|
216
|
+
"""Get the data directory path"""
|
|
217
|
+
return self.data_dir
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import List, Optional, Set
|
|
3
|
+
|
|
4
|
+
from spaik_sdk.server.storage.thread_metadata import ThreadMetadata
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class ThreadFilter:
|
|
9
|
+
"""Filter for thread metadata with fluent builder interface"""
|
|
10
|
+
|
|
11
|
+
thread_ids: Optional[Set[str]] = None
|
|
12
|
+
author_ids: Optional[Set[str]] = None
|
|
13
|
+
types: Optional[Set[str]] = None
|
|
14
|
+
title_contains: Optional[str] = None
|
|
15
|
+
min_message_count: Optional[int] = None
|
|
16
|
+
max_message_count: Optional[int] = None
|
|
17
|
+
min_last_activity: Optional[int] = None
|
|
18
|
+
max_last_activity: Optional[int] = None
|
|
19
|
+
min_created_at: Optional[int] = None
|
|
20
|
+
max_created_at: Optional[int] = None
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def builder(cls) -> "ThreadFilterBuilder":
|
|
24
|
+
"""Create a new filter builder"""
|
|
25
|
+
return ThreadFilterBuilder()
|
|
26
|
+
|
|
27
|
+
def matches(self, metadata: ThreadMetadata) -> bool:
|
|
28
|
+
"""Check if thread metadata matches all filter criteria"""
|
|
29
|
+
|
|
30
|
+
if self.thread_ids is not None and metadata.thread_id not in self.thread_ids:
|
|
31
|
+
return False
|
|
32
|
+
|
|
33
|
+
if self.author_ids is not None and metadata.author_id not in self.author_ids:
|
|
34
|
+
return False
|
|
35
|
+
|
|
36
|
+
if self.types is not None and metadata.type not in self.types:
|
|
37
|
+
return False
|
|
38
|
+
|
|
39
|
+
if self.title_contains is not None and self.title_contains.lower() not in metadata.title.lower():
|
|
40
|
+
return False
|
|
41
|
+
|
|
42
|
+
if self.min_message_count is not None and metadata.message_count < self.min_message_count:
|
|
43
|
+
return False
|
|
44
|
+
|
|
45
|
+
if self.max_message_count is not None and metadata.message_count > self.max_message_count:
|
|
46
|
+
return False
|
|
47
|
+
|
|
48
|
+
if self.min_last_activity is not None and metadata.last_activity_time < self.min_last_activity:
|
|
49
|
+
return False
|
|
50
|
+
|
|
51
|
+
if self.max_last_activity is not None and metadata.last_activity_time > self.max_last_activity:
|
|
52
|
+
return False
|
|
53
|
+
|
|
54
|
+
if self.min_created_at is not None and metadata.created_at < self.min_created_at:
|
|
55
|
+
return False
|
|
56
|
+
|
|
57
|
+
if self.max_created_at is not None and metadata.created_at > self.max_created_at:
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
return True
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class ThreadFilterBuilder:
|
|
64
|
+
"""Fluent builder for ThreadFilter"""
|
|
65
|
+
|
|
66
|
+
def __init__(self):
|
|
67
|
+
self._filter = ThreadFilter()
|
|
68
|
+
|
|
69
|
+
def with_thread_id(self, thread_id: str) -> "ThreadFilterBuilder":
|
|
70
|
+
"""Filter by specific thread ID"""
|
|
71
|
+
if self._filter.thread_ids is None:
|
|
72
|
+
self._filter.thread_ids = set()
|
|
73
|
+
self._filter.thread_ids.add(thread_id)
|
|
74
|
+
return self
|
|
75
|
+
|
|
76
|
+
def with_thread_ids(self, thread_ids: List[str]) -> "ThreadFilterBuilder":
|
|
77
|
+
"""Filter by multiple thread IDs"""
|
|
78
|
+
if self._filter.thread_ids is None:
|
|
79
|
+
self._filter.thread_ids = set()
|
|
80
|
+
self._filter.thread_ids.update(thread_ids)
|
|
81
|
+
return self
|
|
82
|
+
|
|
83
|
+
def with_author_id(self, author_id: str) -> "ThreadFilterBuilder":
|
|
84
|
+
"""Filter by specific author ID"""
|
|
85
|
+
if self._filter.author_ids is None:
|
|
86
|
+
self._filter.author_ids = set()
|
|
87
|
+
self._filter.author_ids.add(author_id)
|
|
88
|
+
return self
|
|
89
|
+
|
|
90
|
+
def with_author_ids(self, author_ids: List[str]) -> "ThreadFilterBuilder":
|
|
91
|
+
"""Filter by multiple author IDs"""
|
|
92
|
+
if self._filter.author_ids is None:
|
|
93
|
+
self._filter.author_ids = set()
|
|
94
|
+
self._filter.author_ids.update(author_ids)
|
|
95
|
+
return self
|
|
96
|
+
|
|
97
|
+
def with_type(self, thread_type: str) -> "ThreadFilterBuilder":
|
|
98
|
+
"""Filter by specific thread type"""
|
|
99
|
+
if self._filter.types is None:
|
|
100
|
+
self._filter.types = set()
|
|
101
|
+
self._filter.types.add(thread_type)
|
|
102
|
+
return self
|
|
103
|
+
|
|
104
|
+
def with_types(self, thread_types: List[str]) -> "ThreadFilterBuilder":
|
|
105
|
+
"""Filter by multiple thread types"""
|
|
106
|
+
if self._filter.types is None:
|
|
107
|
+
self._filter.types = set()
|
|
108
|
+
self._filter.types.update(thread_types)
|
|
109
|
+
return self
|
|
110
|
+
|
|
111
|
+
def with_title_containing(self, text: str) -> "ThreadFilterBuilder":
|
|
112
|
+
"""Filter by title containing text (case insensitive)"""
|
|
113
|
+
self._filter.title_contains = text
|
|
114
|
+
return self
|
|
115
|
+
|
|
116
|
+
def with_min_messages(self, min_count: int) -> "ThreadFilterBuilder":
|
|
117
|
+
"""Filter threads with at least this many messages"""
|
|
118
|
+
self._filter.min_message_count = min_count
|
|
119
|
+
return self
|
|
120
|
+
|
|
121
|
+
def with_max_messages(self, max_count: int) -> "ThreadFilterBuilder":
|
|
122
|
+
"""Filter threads with at most this many messages"""
|
|
123
|
+
self._filter.max_message_count = max_count
|
|
124
|
+
return self
|
|
125
|
+
|
|
126
|
+
def with_message_count_range(self, min_count: int, max_count: int) -> "ThreadFilterBuilder":
|
|
127
|
+
"""Filter threads with message count in range [min_count, max_count]"""
|
|
128
|
+
self._filter.min_message_count = min_count
|
|
129
|
+
self._filter.max_message_count = max_count
|
|
130
|
+
return self
|
|
131
|
+
|
|
132
|
+
def with_activity_after(self, timestamp: int) -> "ThreadFilterBuilder":
|
|
133
|
+
"""Filter threads with last activity after timestamp (UTC millis)"""
|
|
134
|
+
self._filter.min_last_activity = timestamp
|
|
135
|
+
return self
|
|
136
|
+
|
|
137
|
+
def with_activity_before(self, timestamp: int) -> "ThreadFilterBuilder":
|
|
138
|
+
"""Filter threads with last activity before timestamp (UTC millis)"""
|
|
139
|
+
self._filter.max_last_activity = timestamp
|
|
140
|
+
return self
|
|
141
|
+
|
|
142
|
+
def with_activity_range(self, start: int, end: int) -> "ThreadFilterBuilder":
|
|
143
|
+
"""Filter threads with last activity in range [start, end] (UTC millis)"""
|
|
144
|
+
self._filter.min_last_activity = start
|
|
145
|
+
self._filter.max_last_activity = end
|
|
146
|
+
return self
|
|
147
|
+
|
|
148
|
+
def with_created_after(self, timestamp: int) -> "ThreadFilterBuilder":
|
|
149
|
+
"""Filter threads created after timestamp (UTC millis)"""
|
|
150
|
+
self._filter.min_created_at = timestamp
|
|
151
|
+
return self
|
|
152
|
+
|
|
153
|
+
def with_created_before(self, timestamp: int) -> "ThreadFilterBuilder":
|
|
154
|
+
"""Filter threads created before timestamp (UTC millis)"""
|
|
155
|
+
self._filter.max_created_at = timestamp
|
|
156
|
+
return self
|
|
157
|
+
|
|
158
|
+
def with_created_range(self, start: int, end: int) -> "ThreadFilterBuilder":
|
|
159
|
+
"""Filter threads created in range [start, end] (UTC millis)"""
|
|
160
|
+
self._filter.min_created_at = start
|
|
161
|
+
self._filter.max_created_at = end
|
|
162
|
+
return self
|
|
163
|
+
|
|
164
|
+
def build(self) -> ThreadFilter:
|
|
165
|
+
"""Build the final filter"""
|
|
166
|
+
return self._filter
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from spaik_sdk.thread.thread_container import ThreadContainer
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass
|
|
7
|
+
class ThreadMetadata:
|
|
8
|
+
thread_id: str
|
|
9
|
+
title: str
|
|
10
|
+
message_count: int
|
|
11
|
+
last_activity_time: int
|
|
12
|
+
created_at: int
|
|
13
|
+
author_id: str
|
|
14
|
+
type: str
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
def from_thread_container(cls, thread: ThreadContainer, thread_type: str = "chat") -> "ThreadMetadata":
|
|
18
|
+
"""Create ThreadMetadata from a ThreadContainer"""
|
|
19
|
+
|
|
20
|
+
# Extract title from first user message or fallback to system prompt
|
|
21
|
+
title = "New Thread"
|
|
22
|
+
first_user_message = None
|
|
23
|
+
|
|
24
|
+
# Find first non-AI message for title and author
|
|
25
|
+
for msg in thread.messages:
|
|
26
|
+
if not msg.ai:
|
|
27
|
+
first_user_message = msg
|
|
28
|
+
content = msg.get_text_content().strip()
|
|
29
|
+
if content:
|
|
30
|
+
# Use first 50 chars as title
|
|
31
|
+
title = content[:50] + ("..." if len(content) > 50 else "")
|
|
32
|
+
break
|
|
33
|
+
|
|
34
|
+
# Fallback to system prompt for title if no user message
|
|
35
|
+
if title == "New Thread" and thread.system_prompt:
|
|
36
|
+
prompt_preview = thread.system_prompt[:50]
|
|
37
|
+
title = f"System: {prompt_preview}" + ("..." if len(thread.system_prompt) > 50 else "")
|
|
38
|
+
|
|
39
|
+
# Extract author_id from first user message or default
|
|
40
|
+
author_id = first_user_message.author_id if first_user_message else "unknown"
|
|
41
|
+
|
|
42
|
+
# Use first message timestamp as created_at, or current time if no messages
|
|
43
|
+
created_at = thread.messages[0].timestamp if thread.messages else thread.get_last_activity_time()
|
|
44
|
+
|
|
45
|
+
return cls(
|
|
46
|
+
thread_id=thread.thread_id,
|
|
47
|
+
title=title,
|
|
48
|
+
message_count=len(thread.messages),
|
|
49
|
+
last_activity_time=thread.get_last_activity_time(),
|
|
50
|
+
created_at=created_at,
|
|
51
|
+
author_id=author_id,
|
|
52
|
+
type=thread_type,
|
|
53
|
+
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from rich.panel import Panel
|
|
6
|
+
from rich.text import Text
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BlockDisplayType(Enum):
|
|
10
|
+
REASONING = "reasoning"
|
|
11
|
+
RESPONSE = "response"
|
|
12
|
+
TOOL_CALL = "tool_call"
|
|
13
|
+
ERROR = "error"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BlockDisplay:
|
|
17
|
+
"""Represents a displayable block with content and styling info"""
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
block_id: str,
|
|
22
|
+
display_type: BlockDisplayType,
|
|
23
|
+
content: str = "",
|
|
24
|
+
streaming: bool = False,
|
|
25
|
+
tool_name: Optional[str] = None,
|
|
26
|
+
tool_error: Optional[str] = None,
|
|
27
|
+
):
|
|
28
|
+
self.block_id = block_id
|
|
29
|
+
self.display_type = display_type
|
|
30
|
+
self.content = content
|
|
31
|
+
self.streaming = streaming
|
|
32
|
+
self.tool_name = tool_name
|
|
33
|
+
self.tool_error = tool_error
|
|
34
|
+
self.created_at = time.time()
|
|
35
|
+
|
|
36
|
+
def to_panel(self) -> Panel:
|
|
37
|
+
"""Convert this block to a Rich Panel"""
|
|
38
|
+
if self.display_type == BlockDisplayType.REASONING:
|
|
39
|
+
text = Text()
|
|
40
|
+
text.append("š§ ", style="blue bold")
|
|
41
|
+
display_content = self.content[:400] + ("..." if len(self.content) > 400 else "")
|
|
42
|
+
if self.streaming:
|
|
43
|
+
text.append(display_content, style="blue")
|
|
44
|
+
else:
|
|
45
|
+
text.append(display_content)
|
|
46
|
+
|
|
47
|
+
title = "AI Thinking" + (" š" if self.streaming else " ā
")
|
|
48
|
+
return Panel(text, title=title, border_style="blue")
|
|
49
|
+
|
|
50
|
+
elif self.display_type == BlockDisplayType.RESPONSE:
|
|
51
|
+
text = Text()
|
|
52
|
+
text.append("š¤ ", style="green bold")
|
|
53
|
+
text.append(self.content)
|
|
54
|
+
|
|
55
|
+
title = f"Response ({self.block_id[:8]})" + (" š" if self.streaming else " ā
")
|
|
56
|
+
return Panel(text, title=title, border_style="green")
|
|
57
|
+
|
|
58
|
+
elif self.display_type == BlockDisplayType.TOOL_CALL:
|
|
59
|
+
text = Text()
|
|
60
|
+
text.append("š§ ", style="yellow bold")
|
|
61
|
+
text.append(f"{self.tool_name or 'unknown'}", style="yellow")
|
|
62
|
+
|
|
63
|
+
if self.streaming:
|
|
64
|
+
text.append(" (running...)", style="bright_black")
|
|
65
|
+
elif self.tool_error:
|
|
66
|
+
text.append(f"\n\nā Error:\n{self.tool_error}", style="red")
|
|
67
|
+
title = f"Tool: {self.tool_name} ā"
|
|
68
|
+
border_style = "red"
|
|
69
|
+
elif self.content:
|
|
70
|
+
result = self.content[:200] + ("..." if len(self.content) > 200 else "")
|
|
71
|
+
text.append(f"\n\nā Result:\n{result}", style="green")
|
|
72
|
+
title = f"Tool: {self.tool_name} ā
"
|
|
73
|
+
border_style = "green"
|
|
74
|
+
else:
|
|
75
|
+
title = f"Tool: {self.tool_name} ā³"
|
|
76
|
+
border_style = "bright_black"
|
|
77
|
+
|
|
78
|
+
if self.streaming:
|
|
79
|
+
title = f"Tool: {self.tool_name} š"
|
|
80
|
+
border_style = "yellow"
|
|
81
|
+
elif not hasattr(locals(), "title"):
|
|
82
|
+
title = f"Tool: {self.tool_name} ā³"
|
|
83
|
+
border_style = "bright_black"
|
|
84
|
+
|
|
85
|
+
return Panel(text, title=title, border_style=border_style)
|
|
86
|
+
|
|
87
|
+
elif self.display_type == BlockDisplayType.ERROR:
|
|
88
|
+
text = Text()
|
|
89
|
+
text.append("ā Error", style="red bold")
|
|
90
|
+
return Panel(text, title="Error", border_style="red")
|
|
91
|
+
|
|
92
|
+
return Panel("Unknown block type")
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from typing import Dict, Optional
|
|
2
|
+
|
|
3
|
+
from rich.console import Console, Group
|
|
4
|
+
from rich.live import Live
|
|
5
|
+
from rich.text import Text
|
|
6
|
+
|
|
7
|
+
from spaik_sdk.thread.adapters.cli.block_display import BlockDisplay, BlockDisplayType
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class DisplayManager:
|
|
11
|
+
"""Manages Rich component mappings and targeted updates"""
|
|
12
|
+
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.console = Console()
|
|
15
|
+
self.live: Optional[Live] = None
|
|
16
|
+
self.blocks: Dict[str, BlockDisplay] = {} # block_id -> BlockDisplay
|
|
17
|
+
self._running = False
|
|
18
|
+
|
|
19
|
+
def start(self):
|
|
20
|
+
"""Start live display"""
|
|
21
|
+
if self._running:
|
|
22
|
+
return
|
|
23
|
+
|
|
24
|
+
self._running = True
|
|
25
|
+
|
|
26
|
+
self.live = Live(self._create_initial_display(), console=self.console, refresh_per_second=10)
|
|
27
|
+
self.live.start()
|
|
28
|
+
|
|
29
|
+
def stop(self):
|
|
30
|
+
"""Stop live display"""
|
|
31
|
+
if not self._running:
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
self._running = False
|
|
35
|
+
self.blocks = {}
|
|
36
|
+
if self.live:
|
|
37
|
+
try:
|
|
38
|
+
self.live.stop()
|
|
39
|
+
except (BlockingIOError, OSError):
|
|
40
|
+
# Rich display cleanup failed, but that's ok
|
|
41
|
+
# This can happen when stdout buffer is full/blocked
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
def _create_initial_display(self):
|
|
45
|
+
"""Create the initial display"""
|
|
46
|
+
return Text("Waiting for activity...")
|
|
47
|
+
|
|
48
|
+
def update_block_content(self, block_id: str, content: Optional[str] = None, streaming: bool = False):
|
|
49
|
+
"""Update a block's content"""
|
|
50
|
+
if block_id in self.blocks:
|
|
51
|
+
if content:
|
|
52
|
+
self.blocks[block_id].content = content
|
|
53
|
+
self.blocks[block_id].streaming = streaming
|
|
54
|
+
self._refresh_display()
|
|
55
|
+
|
|
56
|
+
def update_tool_result(self, block_id: str, result: str, error: Optional[str] = None):
|
|
57
|
+
"""Update a tool block's result"""
|
|
58
|
+
if block_id in self.blocks:
|
|
59
|
+
if error:
|
|
60
|
+
self.blocks[block_id].tool_error = error
|
|
61
|
+
else:
|
|
62
|
+
self.blocks[block_id].content = result
|
|
63
|
+
self.blocks[block_id].streaming = False
|
|
64
|
+
self._refresh_display()
|
|
65
|
+
|
|
66
|
+
def add_block(
|
|
67
|
+
self, block_id: str, display_type: BlockDisplayType, content: str = "", streaming: bool = False, tool_name: Optional[str] = None
|
|
68
|
+
):
|
|
69
|
+
"""Add a new block"""
|
|
70
|
+
self.blocks[block_id] = BlockDisplay(
|
|
71
|
+
block_id=block_id, display_type=display_type, content=content, streaming=streaming, tool_name=tool_name
|
|
72
|
+
)
|
|
73
|
+
self._refresh_display()
|
|
74
|
+
|
|
75
|
+
def _refresh_display(self):
|
|
76
|
+
"""Refresh the live display with current panels"""
|
|
77
|
+
if not self.live:
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
panels = [block.to_panel() for block in self.blocks.values()]
|
|
81
|
+
if panels:
|
|
82
|
+
self.live.update(Group(*panels))
|
|
83
|
+
else:
|
|
84
|
+
self.live.update(Text("Waiting for activity..."))
|