spaik-sdk 0.6.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. spaik_sdk/__init__.py +21 -0
  2. spaik_sdk/agent/__init__.py +0 -0
  3. spaik_sdk/agent/base_agent.py +249 -0
  4. spaik_sdk/attachments/__init__.py +22 -0
  5. spaik_sdk/attachments/builder.py +61 -0
  6. spaik_sdk/attachments/file_storage_provider.py +27 -0
  7. spaik_sdk/attachments/mime_types.py +118 -0
  8. spaik_sdk/attachments/models.py +63 -0
  9. spaik_sdk/attachments/provider_support.py +53 -0
  10. spaik_sdk/attachments/storage/__init__.py +0 -0
  11. spaik_sdk/attachments/storage/base_file_storage.py +32 -0
  12. spaik_sdk/attachments/storage/impl/__init__.py +0 -0
  13. spaik_sdk/attachments/storage/impl/local_file_storage.py +101 -0
  14. spaik_sdk/audio/__init__.py +12 -0
  15. spaik_sdk/audio/options.py +53 -0
  16. spaik_sdk/audio/providers/__init__.py +1 -0
  17. spaik_sdk/audio/providers/google_tts.py +77 -0
  18. spaik_sdk/audio/providers/openai_stt.py +71 -0
  19. spaik_sdk/audio/providers/openai_tts.py +111 -0
  20. spaik_sdk/audio/stt.py +61 -0
  21. spaik_sdk/audio/tts.py +124 -0
  22. spaik_sdk/config/credentials_provider.py +10 -0
  23. spaik_sdk/config/env.py +59 -0
  24. spaik_sdk/config/env_credentials_provider.py +7 -0
  25. spaik_sdk/config/get_credentials_provider.py +14 -0
  26. spaik_sdk/image_gen/__init__.py +9 -0
  27. spaik_sdk/image_gen/image_generator.py +83 -0
  28. spaik_sdk/image_gen/options.py +24 -0
  29. spaik_sdk/image_gen/providers/__init__.py +0 -0
  30. spaik_sdk/image_gen/providers/google.py +75 -0
  31. spaik_sdk/image_gen/providers/openai.py +60 -0
  32. spaik_sdk/llm/__init__.py +0 -0
  33. spaik_sdk/llm/cancellation_handle.py +10 -0
  34. spaik_sdk/llm/consumption/__init__.py +0 -0
  35. spaik_sdk/llm/consumption/consumption_estimate.py +26 -0
  36. spaik_sdk/llm/consumption/consumption_estimate_builder.py +113 -0
  37. spaik_sdk/llm/consumption/consumption_extractor.py +59 -0
  38. spaik_sdk/llm/consumption/token_usage.py +31 -0
  39. spaik_sdk/llm/converters.py +146 -0
  40. spaik_sdk/llm/cost/__init__.py +1 -0
  41. spaik_sdk/llm/cost/builtin_cost_provider.py +83 -0
  42. spaik_sdk/llm/cost/cost_estimate.py +8 -0
  43. spaik_sdk/llm/cost/cost_provider.py +28 -0
  44. spaik_sdk/llm/extract_error_message.py +37 -0
  45. spaik_sdk/llm/langchain_loop_manager.py +270 -0
  46. spaik_sdk/llm/langchain_service.py +196 -0
  47. spaik_sdk/llm/message_handler.py +188 -0
  48. spaik_sdk/llm/streaming/__init__.py +1 -0
  49. spaik_sdk/llm/streaming/block_manager.py +152 -0
  50. spaik_sdk/llm/streaming/models.py +42 -0
  51. spaik_sdk/llm/streaming/streaming_content_handler.py +157 -0
  52. spaik_sdk/llm/streaming/streaming_event_handler.py +215 -0
  53. spaik_sdk/llm/streaming/streaming_state_manager.py +58 -0
  54. spaik_sdk/models/__init__.py +0 -0
  55. spaik_sdk/models/factories/__init__.py +0 -0
  56. spaik_sdk/models/factories/anthropic_factory.py +33 -0
  57. spaik_sdk/models/factories/base_model_factory.py +71 -0
  58. spaik_sdk/models/factories/google_factory.py +30 -0
  59. spaik_sdk/models/factories/ollama_factory.py +41 -0
  60. spaik_sdk/models/factories/openai_factory.py +50 -0
  61. spaik_sdk/models/llm_config.py +46 -0
  62. spaik_sdk/models/llm_families.py +7 -0
  63. spaik_sdk/models/llm_model.py +17 -0
  64. spaik_sdk/models/llm_wrapper.py +25 -0
  65. spaik_sdk/models/model_registry.py +156 -0
  66. spaik_sdk/models/providers/__init__.py +0 -0
  67. spaik_sdk/models/providers/anthropic_provider.py +29 -0
  68. spaik_sdk/models/providers/azure_provider.py +31 -0
  69. spaik_sdk/models/providers/base_provider.py +62 -0
  70. spaik_sdk/models/providers/google_provider.py +26 -0
  71. spaik_sdk/models/providers/ollama_provider.py +26 -0
  72. spaik_sdk/models/providers/openai_provider.py +26 -0
  73. spaik_sdk/models/providers/provider_type.py +90 -0
  74. spaik_sdk/orchestration/__init__.py +24 -0
  75. spaik_sdk/orchestration/base_orchestrator.py +238 -0
  76. spaik_sdk/orchestration/checkpoint.py +80 -0
  77. spaik_sdk/orchestration/models.py +103 -0
  78. spaik_sdk/prompt/__init__.py +0 -0
  79. spaik_sdk/prompt/get_prompt_loader.py +13 -0
  80. spaik_sdk/prompt/local_prompt_loader.py +21 -0
  81. spaik_sdk/prompt/prompt_loader.py +48 -0
  82. spaik_sdk/prompt/prompt_loader_mode.py +14 -0
  83. spaik_sdk/py.typed +1 -0
  84. spaik_sdk/recording/__init__.py +1 -0
  85. spaik_sdk/recording/base_playback.py +90 -0
  86. spaik_sdk/recording/base_recorder.py +50 -0
  87. spaik_sdk/recording/conditional_recorder.py +38 -0
  88. spaik_sdk/recording/impl/__init__.py +1 -0
  89. spaik_sdk/recording/impl/local_playback.py +76 -0
  90. spaik_sdk/recording/impl/local_recorder.py +85 -0
  91. spaik_sdk/recording/langchain_serializer.py +88 -0
  92. spaik_sdk/server/__init__.py +1 -0
  93. spaik_sdk/server/api/routers/__init__.py +0 -0
  94. spaik_sdk/server/api/routers/api_builder.py +149 -0
  95. spaik_sdk/server/api/routers/audio_router_factory.py +201 -0
  96. spaik_sdk/server/api/routers/file_router_factory.py +111 -0
  97. spaik_sdk/server/api/routers/thread_router_factory.py +284 -0
  98. spaik_sdk/server/api/streaming/__init__.py +0 -0
  99. spaik_sdk/server/api/streaming/format_sse_event.py +41 -0
  100. spaik_sdk/server/api/streaming/negotiate_streaming_response.py +8 -0
  101. spaik_sdk/server/api/streaming/streaming_negotiator.py +10 -0
  102. spaik_sdk/server/authorization/__init__.py +0 -0
  103. spaik_sdk/server/authorization/base_authorizer.py +64 -0
  104. spaik_sdk/server/authorization/base_user.py +13 -0
  105. spaik_sdk/server/authorization/dummy_authorizer.py +17 -0
  106. spaik_sdk/server/job_processor/__init__.py +0 -0
  107. spaik_sdk/server/job_processor/base_job_processor.py +8 -0
  108. spaik_sdk/server/job_processor/thread_job_processor.py +32 -0
  109. spaik_sdk/server/pubsub/__init__.py +1 -0
  110. spaik_sdk/server/pubsub/cancellation_publisher.py +7 -0
  111. spaik_sdk/server/pubsub/cancellation_subscriber.py +38 -0
  112. spaik_sdk/server/pubsub/event_publisher.py +13 -0
  113. spaik_sdk/server/pubsub/impl/__init__.py +1 -0
  114. spaik_sdk/server/pubsub/impl/local_cancellation_pubsub.py +48 -0
  115. spaik_sdk/server/pubsub/impl/signalr_publisher.py +36 -0
  116. spaik_sdk/server/queue/__init__.py +1 -0
  117. spaik_sdk/server/queue/agent_job_queue.py +27 -0
  118. spaik_sdk/server/queue/impl/__init__.py +1 -0
  119. spaik_sdk/server/queue/impl/azure_queue.py +24 -0
  120. spaik_sdk/server/response/__init__.py +0 -0
  121. spaik_sdk/server/response/agent_response_generator.py +39 -0
  122. spaik_sdk/server/response/response_generator.py +13 -0
  123. spaik_sdk/server/response/simple_agent_response_generator.py +14 -0
  124. spaik_sdk/server/services/__init__.py +0 -0
  125. spaik_sdk/server/services/thread_converters.py +113 -0
  126. spaik_sdk/server/services/thread_models.py +90 -0
  127. spaik_sdk/server/services/thread_service.py +91 -0
  128. spaik_sdk/server/storage/__init__.py +1 -0
  129. spaik_sdk/server/storage/base_thread_repository.py +51 -0
  130. spaik_sdk/server/storage/impl/__init__.py +0 -0
  131. spaik_sdk/server/storage/impl/in_memory_thread_repository.py +100 -0
  132. spaik_sdk/server/storage/impl/local_file_thread_repository.py +217 -0
  133. spaik_sdk/server/storage/thread_filter.py +166 -0
  134. spaik_sdk/server/storage/thread_metadata.py +53 -0
  135. spaik_sdk/thread/__init__.py +0 -0
  136. spaik_sdk/thread/adapters/__init__.py +0 -0
  137. spaik_sdk/thread/adapters/cli/__init__.py +0 -0
  138. spaik_sdk/thread/adapters/cli/block_display.py +92 -0
  139. spaik_sdk/thread/adapters/cli/display_manager.py +84 -0
  140. spaik_sdk/thread/adapters/cli/live_cli.py +235 -0
  141. spaik_sdk/thread/adapters/event_adapter.py +28 -0
  142. spaik_sdk/thread/adapters/streaming_block_adapter.py +57 -0
  143. spaik_sdk/thread/adapters/sync_adapter.py +76 -0
  144. spaik_sdk/thread/models.py +224 -0
  145. spaik_sdk/thread/thread_container.py +468 -0
  146. spaik_sdk/tools/__init__.py +0 -0
  147. spaik_sdk/tools/impl/__init__.py +0 -0
  148. spaik_sdk/tools/impl/mcp_tool_provider.py +93 -0
  149. spaik_sdk/tools/impl/search_tool_provider.py +18 -0
  150. spaik_sdk/tools/tool_provider.py +131 -0
  151. spaik_sdk/tracing/__init__.py +13 -0
  152. spaik_sdk/tracing/agent_trace.py +72 -0
  153. spaik_sdk/tracing/get_trace_sink.py +15 -0
  154. spaik_sdk/tracing/local_trace_sink.py +23 -0
  155. spaik_sdk/tracing/trace_sink.py +19 -0
  156. spaik_sdk/tracing/trace_sink_mode.py +14 -0
  157. spaik_sdk/utils/__init__.py +0 -0
  158. spaik_sdk/utils/init_logger.py +24 -0
  159. spaik_sdk-0.6.2.dist-info/METADATA +379 -0
  160. spaik_sdk-0.6.2.dist-info/RECORD +161 -0
  161. spaik_sdk-0.6.2.dist-info/WHEEL +4 -0
@@ -0,0 +1,188 @@
1
+ import time
2
+ import uuid
3
+ from typing import Any, AsyncGenerator, Dict, List, Optional
4
+
5
+ from spaik_sdk.attachments.models import Attachment
6
+ from spaik_sdk.llm.streaming.streaming_event_handler import EventType, StreamingEventHandler
7
+ from spaik_sdk.recording.base_recorder import BaseRecorder
8
+ from spaik_sdk.thread.models import MessageBlock, MessageBlockType, ThreadMessage
9
+ from spaik_sdk.thread.thread_container import ThreadContainer
10
+ from spaik_sdk.utils.init_logger import init_logger
11
+
12
+ logger = init_logger(__name__)
13
+
14
+
15
+ class MessageHandler:
16
+ """Manages conversation message history using ThreadContainer."""
17
+
18
+ def __init__(
19
+ self,
20
+ thread_container: ThreadContainer,
21
+ assistant_name: str,
22
+ assistant_id: str,
23
+ recorder: Optional[BaseRecorder] = None,
24
+ ):
25
+ self.thread_container = thread_container
26
+ self.streaming_handler = StreamingEventHandler(recorder)
27
+ self.assistant_name = assistant_name
28
+ self.assistant_id = assistant_id
29
+ self._update_previous_message_count()
30
+
31
+ def _update_previous_message_count(self) -> None:
32
+ self._previous_message_count = self.thread_container.get_nof_messages_including_system() + 1
33
+
34
+ def add_user_message(
35
+ self,
36
+ user_input: str,
37
+ author_id: str,
38
+ author_name: str,
39
+ attachments: Optional[List[Attachment]] = None,
40
+ ) -> None:
41
+ """Add a user message to both thread container and LangChain messages."""
42
+
43
+ # Add to thread container
44
+ block_id = str(uuid.uuid4())
45
+ user_message = ThreadMessage(
46
+ id=str(uuid.uuid4()),
47
+ ai=False,
48
+ author_id=author_id,
49
+ author_name=author_name,
50
+ timestamp=int(time.time() * 1000),
51
+ blocks=[
52
+ MessageBlock(
53
+ id=block_id,
54
+ streaming=False,
55
+ type=MessageBlockType.PLAIN,
56
+ content=user_input,
57
+ )
58
+ ],
59
+ attachments=attachments,
60
+ )
61
+ self.thread_container.add_message(user_message)
62
+
63
+ # Add to LangChain messages for compatibility
64
+ self._update_previous_message_count()
65
+
66
+ def add_error(self, error_text: str, author_id: str = "system") -> str:
67
+ """Add an error message to the thread container and return message ID"""
68
+ return self.thread_container.add_error_message(error_text, author_id)
69
+
70
+ def handle_cancellation(self) -> None:
71
+ """Handle cancellation of the agent."""
72
+ self.thread_container.cancel_generation()
73
+
74
+ async def process_agent_token_stream(
75
+ self,
76
+ agent_stream,
77
+ ) -> AsyncGenerator[Dict[str, Any], None]:
78
+ """Process agent event stream for individual tokens and yield them in real-time."""
79
+ async for streaming_event in self.streaming_handler.process_stream(agent_stream):
80
+ if streaming_event.event_type == EventType.MESSAGE_START:
81
+ logger.debug(f"🔍 Processing MESSAGE_START for message: {streaming_event.message_id}")
82
+ # Create AI message in thread container
83
+ assert streaming_event.message_id is not None
84
+ ai_message = ThreadMessage(
85
+ id=streaming_event.message_id,
86
+ ai=True,
87
+ author_id=self.assistant_id,
88
+ author_name=self.assistant_name,
89
+ timestamp=int(time.time() * 1000),
90
+ blocks=[],
91
+ )
92
+ self.thread_container.add_message(ai_message)
93
+
94
+ elif streaming_event.event_type == EventType.BLOCK_START:
95
+ logger.debug(f"🔍 Processing BLOCK_START for block: {streaming_event.block_id}")
96
+ # Add new block to the message
97
+ assert streaming_event.block_id is not None
98
+ assert streaming_event.block_type is not None
99
+ assert streaming_event.message_id is not None
100
+ new_block = MessageBlock(
101
+ id=streaming_event.block_id,
102
+ streaming=True,
103
+ type=streaming_event.block_type,
104
+ tool_call_id=streaming_event.tool_call_id,
105
+ tool_call_args=streaming_event.tool_args,
106
+ tool_name=streaming_event.tool_name,
107
+ )
108
+ self.thread_container.add_message_block(streaming_event.message_id, new_block)
109
+
110
+ elif streaming_event.event_type == EventType.BLOCK_END:
111
+ logger.debug(f"🔚 Processing BLOCK_END for block: {streaming_event.block_id}")
112
+ # Mark individual block as non-streaming (completed)
113
+ assert streaming_event.block_id is not None
114
+ assert streaming_event.message_id is not None
115
+ logger.debug(f"🔚 Processing BLOCK_END for block: {streaming_event.block_id}")
116
+ self.thread_container.finalize_streaming_blocks(streaming_event.message_id, [streaming_event.block_id])
117
+ logger.debug(f"✅ Block {streaming_event.block_id} marked as completed")
118
+
119
+ elif streaming_event.event_type in [EventType.REASONING, EventType.REASONING_SUMMARY, EventType.TOKEN]:
120
+ logger.debug(f"🔍 Processing {streaming_event.event_type.value} for block: {streaming_event.block_id}")
121
+ # Add streaming content
122
+ assert streaming_event.block_id is not None
123
+ assert streaming_event.content is not None
124
+ self.thread_container.add_streaming_message_chunk(streaming_event.block_id, streaming_event.content)
125
+
126
+ # Yield for external consumption
127
+ yield {
128
+ "type": streaming_event.event_type.value,
129
+ "content": streaming_event.content,
130
+ "block_id": streaming_event.block_id,
131
+ "message_id": streaming_event.message_id,
132
+ }
133
+
134
+ elif streaming_event.event_type == EventType.USAGE_METADATA:
135
+ logger.info(f"📊 Processing USAGE_METADATA for message: {streaming_event.message_id}")
136
+ # Store consumption metadata in ThreadContainer
137
+ if streaming_event.message_id and streaming_event.usage_metadata:
138
+ self.thread_container.add_consumption_metadata(streaming_event.message_id, streaming_event.usage_metadata)
139
+ # Yield usage metadata for external consumption
140
+ yield {
141
+ "type": "usage_metadata",
142
+ "message_id": streaming_event.message_id,
143
+ "usage_metadata": streaming_event.usage_metadata,
144
+ }
145
+
146
+ elif streaming_event.event_type == EventType.COMPLETE:
147
+ logger.debug(f"🔍 Processing COMPLETE for message: {streaming_event.message_id}")
148
+ # Mark all blocks as non-streaming
149
+ if streaming_event.message_id and streaming_event.blocks:
150
+ self.thread_container.finalize_streaming_blocks(streaming_event.message_id, streaming_event.blocks)
151
+
152
+ yield {
153
+ "type": "complete",
154
+ "message": streaming_event.message,
155
+ "blocks": streaming_event.blocks,
156
+ "message_id": streaming_event.message_id,
157
+ }
158
+
159
+ elif streaming_event.event_type == EventType.TOOL_USE:
160
+ logger.debug(f"🔍 Processing TOOL_USE for block: {streaming_event.block_id}")
161
+ # Handle tool use event - the block is already created, just yield for external consumption
162
+ yield {
163
+ "type": "tool_use",
164
+ "tool_call_id": streaming_event.tool_call_id,
165
+ "tool_name": streaming_event.tool_name,
166
+ "tool_args": streaming_event.tool_args,
167
+ "block_id": streaming_event.block_id,
168
+ "message_id": streaming_event.message_id,
169
+ }
170
+
171
+ elif streaming_event.event_type == EventType.TOOL_RESPONSE:
172
+ logger.debug(f"🔍 Processing TOOL_RESPONSE for block: {streaming_event.block_id}")
173
+ # Handle tool response - update the thread container with the response
174
+ assert streaming_event.tool_call_id is not None
175
+ assert streaming_event.content is not None
176
+ self.thread_container.update_tool_use_block_with_response(
177
+ streaming_event.tool_call_id, streaming_event.content, streaming_event.error
178
+ )
179
+
180
+ # Yield for external consumption
181
+ yield {
182
+ "type": "tool_response",
183
+ "tool_call_id": streaming_event.tool_call_id,
184
+ "response": streaming_event.content,
185
+ "error": streaming_event.error,
186
+ "block_id": streaming_event.block_id,
187
+ "message_id": streaming_event.message_id,
188
+ }
@@ -0,0 +1 @@
1
+
@@ -0,0 +1,152 @@
1
+ import time
2
+ import uuid
3
+ from typing import Any, AsyncGenerator, Dict, List, Optional
4
+
5
+ from spaik_sdk.llm.streaming.models import EventType, StreamingEvent
6
+ from spaik_sdk.thread.models import MessageBlockType
7
+
8
+
9
+ class BlockManager:
10
+ """Manages different types of content blocks during streaming."""
11
+
12
+ def __init__(self):
13
+ self.current_blocks: Dict[str, str] = {} # block_id -> block_type
14
+ self.block_timestamps: Dict[str, float] = {} # block_id -> creation_timestamp
15
+ self.reasoning_block_id: Optional[str] = None
16
+ self.regular_block_id: Optional[str] = None
17
+ self.summary_block_id: Optional[str] = None
18
+ self.tool_use_blocks: Dict[str, str] = {} # tool_call_id -> block_id
19
+ self.reasoning_detected = False
20
+ self.last_block_type: Optional[str] = None # Track the last type of block created
21
+
22
+ def reset(self):
23
+ """Reset block manager state."""
24
+ self.current_blocks = {}
25
+ self.block_timestamps = {}
26
+ self.reasoning_block_id = None
27
+ self.regular_block_id = None
28
+ self.summary_block_id = None
29
+ self.tool_use_blocks = {}
30
+ self.reasoning_detected = False
31
+ self.last_block_type = None
32
+
33
+ def mark_reasoning_detected(self):
34
+ """Mark that reasoning activity has been detected."""
35
+ self.reasoning_detected = True
36
+
37
+ def reset_reasoning_block(self):
38
+ """Reset reasoning block for mid-response thinking (creates new reasoning block)."""
39
+ if self.reasoning_block_id:
40
+ # Remove the current reasoning block from tracking
41
+ self.current_blocks.pop(self.reasoning_block_id, None)
42
+ self.block_timestamps.pop(self.reasoning_block_id, None) # Remove timestamp too
43
+ self.reasoning_block_id = None
44
+
45
+ def get_block_ids(self) -> List[str]:
46
+ """Get list of all current block IDs."""
47
+ return list(self.current_blocks.keys())
48
+
49
+ def should_create_new_reasoning_block(self) -> bool:
50
+ """Check if we need a new reasoning block based on timestamps.
51
+
52
+ Rule: If there's any tool call newer than the current reasoning block, create new reasoning block.
53
+ This ensures reasoning gets properly segmented around tool calls.
54
+ """
55
+ if self.reasoning_block_id is None:
56
+ return False
57
+
58
+ current_reasoning_timestamp = self.block_timestamps.get(self.reasoning_block_id, 0)
59
+
60
+ # Check if any tool block is newer than our current reasoning block
61
+ for tool_block_id in self.tool_use_blocks.values():
62
+ tool_timestamp = self.block_timestamps.get(tool_block_id, 0)
63
+ if tool_timestamp > current_reasoning_timestamp:
64
+ return True
65
+
66
+ return False
67
+
68
+ async def ensure_tool_use_block(
69
+ self, message_id: str, tool_call_id: str, tool_name: str, tool_args: Dict[str, Any]
70
+ ) -> AsyncGenerator[StreamingEvent, None]:
71
+ """Ensure tool use block exists for the given tool call, create if needed."""
72
+ if tool_call_id not in self.tool_use_blocks:
73
+ block_id = f"tool_{uuid.uuid4()}"
74
+ self.tool_use_blocks[tool_call_id] = block_id
75
+ self.current_blocks[block_id] = "tool_use"
76
+ self.block_timestamps[block_id] = time.time()
77
+ self.last_block_type = "tool_use" # Track that we created a tool block
78
+
79
+ yield StreamingEvent(
80
+ event_type=EventType.BLOCK_START,
81
+ block_id=block_id,
82
+ block_type=MessageBlockType.TOOL_USE,
83
+ message_id=message_id,
84
+ tool_call_id=tool_call_id,
85
+ tool_name=tool_name,
86
+ tool_args=tool_args,
87
+ )
88
+
89
+ def get_tool_use_block_id(self, tool_call_id: str) -> Optional[str]:
90
+ """Get the block ID for a specific tool call."""
91
+ return self.tool_use_blocks.get(tool_call_id)
92
+
93
+ async def ensure_reasoning_block(self, message_id: str) -> AsyncGenerator[StreamingEvent, None]:
94
+ """Ensure reasoning block exists, create if needed."""
95
+ if self.reasoning_block_id is None:
96
+ self.reasoning_block_id = f"reasoning_{uuid.uuid4()}"
97
+ self.current_blocks[self.reasoning_block_id] = "reasoning"
98
+ self.block_timestamps[self.reasoning_block_id] = time.time()
99
+ self.last_block_type = "reasoning" # Track that we created a reasoning block
100
+
101
+ yield StreamingEvent(
102
+ event_type=EventType.BLOCK_START,
103
+ block_id=self.reasoning_block_id,
104
+ block_type=MessageBlockType.REASONING,
105
+ message_id=message_id,
106
+ )
107
+
108
+ async def ensure_regular_block(self, message_id: str) -> AsyncGenerator[StreamingEvent, None]:
109
+ """Ensure regular content block exists, create if needed."""
110
+ # Create a new regular block if:
111
+ # 1. No regular block exists yet, OR
112
+ # 2. The last block created was not a regular block (meaning there was an interruption)
113
+ should_create_new_block = self.regular_block_id is None or self.last_block_type not in [None, "plain"]
114
+
115
+ if should_create_new_block:
116
+ self.regular_block_id = f"plain_{uuid.uuid4()}"
117
+ self.current_blocks[self.regular_block_id] = "plain"
118
+ self.block_timestamps[self.regular_block_id] = time.time()
119
+ self.last_block_type = "plain" # Track that we created a regular block
120
+
121
+ yield StreamingEvent(
122
+ event_type=EventType.BLOCK_START, block_id=self.regular_block_id, block_type=MessageBlockType.PLAIN, message_id=message_id
123
+ )
124
+
125
+ async def ensure_summary_block(self, message_id: str) -> AsyncGenerator[StreamingEvent, None]:
126
+ """Ensure summary block exists, create if needed."""
127
+ if self.summary_block_id is None:
128
+ self.summary_block_id = f"summary_{uuid.uuid4()}"
129
+ self.current_blocks[self.summary_block_id] = "summary"
130
+ self.block_timestamps[self.summary_block_id] = time.time()
131
+ self.last_block_type = "summary" # Track that we created a summary block
132
+
133
+ # Note: We don't yield BLOCK_START for summary blocks as they're handled differently
134
+ # This is an async generator so we need at least one yield or return to make it work
135
+ return
136
+ yield # This line will never be reached but satisfies the type checker
137
+
138
+ def get_reasoning_block_id(self) -> Optional[str]:
139
+ """Get the reasoning block ID."""
140
+ return self.reasoning_block_id
141
+
142
+ def get_regular_block_id(self) -> Optional[str]:
143
+ """Get the regular content block ID."""
144
+ return self.regular_block_id
145
+
146
+ def get_summary_block_id(self) -> Optional[str]:
147
+ """Get the summary block ID."""
148
+ return self.summary_block_id
149
+
150
+ def has_reasoning_activity(self) -> bool:
151
+ """Check if reasoning activity has been detected."""
152
+ return self.reasoning_detected
@@ -0,0 +1,42 @@
1
+ from dataclasses import dataclass
2
+ from enum import Enum
3
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
4
+
5
+ from spaik_sdk.thread.models import MessageBlockType
6
+
7
+ if TYPE_CHECKING:
8
+ from spaik_sdk.llm.consumption.token_usage import TokenUsage
9
+
10
+
11
+ class EventType(str, Enum):
12
+ """Types of streaming events."""
13
+
14
+ MESSAGE_START = "message_start"
15
+ BLOCK_START = "block_start"
16
+ BLOCK_END = "block_end"
17
+ REASONING = "reasoning"
18
+ REASONING_SUMMARY = "reasoning_summary"
19
+ TOKEN = "token"
20
+ TOOL_USE = "tool_use"
21
+ TOOL_RESPONSE = "tool_response"
22
+ USAGE_METADATA = "usage_metadata"
23
+ COMPLETE = "complete"
24
+ ERROR = "error"
25
+
26
+
27
+ @dataclass
28
+ class StreamingEvent:
29
+ """Represents a processed streaming event."""
30
+
31
+ event_type: EventType
32
+ content: Optional[str] = None
33
+ block_id: Optional[str] = None
34
+ message_id: Optional[str] = None
35
+ block_type: Optional[MessageBlockType] = None
36
+ blocks: Optional[List[str]] = None
37
+ message: Optional[Any] = None
38
+ error: Optional[str] = None
39
+ tool_call_id: Optional[str] = None
40
+ tool_name: Optional[str] = None
41
+ tool_args: Optional[Dict[str, Any]] = None
42
+ usage_metadata: Optional["TokenUsage"] = None
@@ -0,0 +1,157 @@
1
+ import uuid
2
+ from typing import Any, AsyncGenerator, Dict, Optional
3
+
4
+ from spaik_sdk.llm.streaming.block_manager import BlockManager
5
+ from spaik_sdk.llm.streaming.models import EventType, StreamingEvent
6
+ from spaik_sdk.llm.streaming.streaming_state_manager import StreamingStateManager
7
+ from spaik_sdk.utils.init_logger import init_logger
8
+
9
+ logger = init_logger(__name__)
10
+
11
+
12
+ class StreamingContentHandler:
13
+ """Handles processing of reasoning and regular content during streaming."""
14
+
15
+ def __init__(self, block_manager: BlockManager, state_manager: StreamingStateManager):
16
+ self.block_manager = block_manager
17
+ self.state_manager = state_manager
18
+
19
+ async def handle_reasoning_content(self, reasoning_content: str) -> AsyncGenerator[StreamingEvent, None]:
20
+ """Handle reasoning content and yield events."""
21
+ async for event in self._ensure_streaming_started():
22
+ yield event
23
+
24
+ # Type guard to ensure message_id is not None
25
+ if self.state_manager.current_message_id is None:
26
+ return
27
+
28
+ # Check if we need a new reasoning block based on timestamps
29
+ if self.block_manager.should_create_new_reasoning_block():
30
+ # Reset current reasoning block to force creation of a new one
31
+ self.block_manager.reset_reasoning_block()
32
+
33
+ # Check if this is creating a new reasoning block
34
+ creating_new_block = self.block_manager.get_reasoning_block_id() is None
35
+
36
+ # Ensure reasoning block exists
37
+ async for streaming_event in self.block_manager.ensure_reasoning_block(self.state_manager.current_message_id):
38
+ yield streaming_event
39
+
40
+ # Track that we've created a reasoning block
41
+ if creating_new_block:
42
+ self.state_manager.increment_reasoning_blocks()
43
+
44
+ # For Google models with thinking_budget, we might not have actual reasoning content
45
+ # but we still want to show that reasoning is happening
46
+ if not reasoning_content and self.block_manager.has_reasoning_activity():
47
+ reasoning_content = "[Thinking process active - reasoning tokens being used internally]"
48
+
49
+ # Always yield reasoning content (even if empty) to ensure thread container tracks it
50
+ yield StreamingEvent(
51
+ event_type=EventType.REASONING,
52
+ content=reasoning_content, # This could be empty string or placeholder text
53
+ block_id=self.block_manager.get_reasoning_block_id(),
54
+ message_id=self.state_manager.current_message_id,
55
+ )
56
+
57
+ async def handle_regular_content(self, regular_content: str) -> AsyncGenerator[StreamingEvent, None]:
58
+ """Handle regular content and yield events."""
59
+ async for event in self._ensure_streaming_started():
60
+ yield event
61
+
62
+ # Type guard to ensure message_id is not None
63
+ if self.state_manager.current_message_id is None:
64
+ return
65
+
66
+ # Ensure regular block exists
67
+ async for streaming_event in self.block_manager.ensure_regular_block(self.state_manager.current_message_id):
68
+ yield streaming_event
69
+
70
+ yield StreamingEvent(
71
+ event_type=EventType.TOKEN,
72
+ content=regular_content,
73
+ block_id=self.block_manager.get_regular_block_id(),
74
+ message_id=self.state_manager.current_message_id,
75
+ )
76
+
77
+ async def end_thinking_session_if_needed(self) -> AsyncGenerator[StreamingEvent, None]:
78
+ """End thinking session and emit BLOCK_END event if needed."""
79
+ if self.state_manager.in_thinking_session:
80
+ self.state_manager.end_thinking_session()
81
+ # Emit BLOCK_END event for the current reasoning block
82
+ current_reasoning_block_id = self.block_manager.get_reasoning_block_id()
83
+ if current_reasoning_block_id and self.state_manager.current_message_id:
84
+ logger.debug(f"🔚 Emitting BLOCK_END for reasoning block: {current_reasoning_block_id}")
85
+ yield StreamingEvent(
86
+ event_type=EventType.BLOCK_END, block_id=current_reasoning_block_id, message_id=self.state_manager.current_message_id
87
+ )
88
+ else:
89
+ logger.debug(
90
+ "❌ Cannot emit BLOCK_END - block_id: %s, message_id: %s",
91
+ current_reasoning_block_id,
92
+ self.state_manager.current_message_id,
93
+ )
94
+
95
+ async def end_final_thinking_session_if_needed(self) -> AsyncGenerator[StreamingEvent, None]:
96
+ """End thinking session at stream end if still active."""
97
+ if self.state_manager.in_thinking_session:
98
+ current_reasoning_block_id = self.block_manager.get_reasoning_block_id()
99
+ if current_reasoning_block_id and self.state_manager.current_message_id:
100
+ logger.debug(f"🔚 Stream ending - emitting BLOCK_END for final reasoning block: {current_reasoning_block_id}")
101
+ yield StreamingEvent(
102
+ event_type=EventType.BLOCK_END, block_id=current_reasoning_block_id, message_id=self.state_manager.current_message_id
103
+ )
104
+ self.state_manager.end_thinking_session()
105
+
106
+ async def _ensure_streaming_started(self) -> AsyncGenerator[StreamingEvent, None]:
107
+ """Ensure streaming has been initialized and yield MESSAGE_START if needed."""
108
+ if not self.state_manager.streaming_started:
109
+ self.state_manager.current_message_id = str(uuid.uuid4())
110
+ self.state_manager.streaming_started = True
111
+
112
+ yield StreamingEvent(event_type=EventType.MESSAGE_START, message_id=self.state_manager.current_message_id)
113
+
114
+ async def handle_tool_use(self, tool_call_id: str, tool_name: str, tool_args: Dict[str, Any]) -> AsyncGenerator[StreamingEvent, None]:
115
+ """Handle tool use and yield events."""
116
+ async for event in self._ensure_streaming_started():
117
+ yield event
118
+
119
+ # Type guard to ensure message_id is not None
120
+ if self.state_manager.current_message_id is None:
121
+ return
122
+
123
+ # Ensure tool use block exists
124
+ async for streaming_event in self.block_manager.ensure_tool_use_block(
125
+ self.state_manager.current_message_id, tool_call_id, tool_name, tool_args
126
+ ):
127
+ yield streaming_event
128
+
129
+ # Emit tool use event
130
+ yield StreamingEvent(
131
+ event_type=EventType.TOOL_USE,
132
+ block_id=self.block_manager.get_tool_use_block_id(tool_call_id),
133
+ message_id=self.state_manager.current_message_id,
134
+ tool_call_id=tool_call_id,
135
+ tool_name=tool_name,
136
+ tool_args=tool_args,
137
+ )
138
+
139
+ async def handle_tool_response(
140
+ self, tool_call_id: str, response: str, error: Optional[str] = None
141
+ ) -> AsyncGenerator[StreamingEvent, None]:
142
+ """Handle tool response and yield events."""
143
+ if self.state_manager.current_message_id is None:
144
+ return
145
+
146
+ # Get the tool use block for this tool call
147
+ block_id = self.block_manager.get_tool_use_block_id(tool_call_id)
148
+ if block_id:
149
+ # Emit tool response event
150
+ yield StreamingEvent(
151
+ event_type=EventType.TOOL_RESPONSE,
152
+ content=response,
153
+ block_id=block_id,
154
+ message_id=self.state_manager.current_message_id,
155
+ tool_call_id=tool_call_id,
156
+ error=error,
157
+ )