py-aidol 0.3.0__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aidol/api/__init__.py +3 -0
- aidol/api/aidol.py +7 -6
- aidol/api/chatroom.py +325 -0
- aidol/api/common.py +4 -3
- aidol/api/companion.py +23 -16
- aidol/context/__init__.py +26 -0
- aidol/context/builder.py +376 -0
- aidol/factories.py +8 -0
- aidol/models/__init__.py +2 -1
- aidol/models/chatroom.py +48 -0
- aidol/protocols.py +64 -0
- aidol/providers/__init__.py +9 -0
- aidol/providers/llm/__init__.py +15 -0
- aidol/providers/llm/base.py +147 -0
- aidol/providers/llm/openai.py +101 -0
- aidol/repositories/__init__.py +2 -0
- aidol/repositories/chatroom.py +142 -0
- aidol/schemas/__init__.py +35 -0
- aidol/schemas/chatroom.py +147 -0
- aidol/schemas/model_settings.py +35 -0
- aidol/schemas/persona.py +20 -0
- aidol/services/__init__.py +2 -0
- aidol/services/image_generation_service.py +24 -12
- aidol/services/response_generation_service.py +63 -0
- aidol/settings.py +46 -0
- {py_aidol-0.3.0.dist-info → py_aidol-0.5.0.dist-info}/METADATA +26 -6
- py_aidol-0.5.0.dist-info/RECORD +41 -0
- {py_aidol-0.3.0.dist-info → py_aidol-0.5.0.dist-info}/WHEEL +1 -1
- py_aidol-0.3.0.dist-info/RECORD +0 -27
aidol/context/builder.py
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
"""Message context builder for LLM calls.
|
|
2
|
+
|
|
3
|
+
Provider-based context builder for AIdol standalone.
|
|
4
|
+
Integrators can extend this with platform-specific features.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
from typing import Self
|
|
12
|
+
from zoneinfo import ZoneInfo
|
|
13
|
+
|
|
14
|
+
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
|
|
15
|
+
|
|
16
|
+
from aidol.providers.llm import ProviderConstraints
|
|
17
|
+
from aidol.schemas import Persona
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def format_utc_offset(utc_offset: timedelta | None) -> str:
|
|
23
|
+
"""Format UTC offset timedelta as a string.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
utc_offset: The UTC offset timedelta, or None.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Formatted UTC offset string (e.g., "UTC+9", "UTC-5:30", "UTC").
|
|
30
|
+
|
|
31
|
+
Example:
|
|
32
|
+
>>> from datetime import timedelta
|
|
33
|
+
>>> format_utc_offset(timedelta(hours=9))
|
|
34
|
+
"UTC+9"
|
|
35
|
+
>>> format_utc_offset(timedelta(hours=5, minutes=30))
|
|
36
|
+
"UTC+5:30"
|
|
37
|
+
>>> format_utc_offset(timedelta(hours=-5))
|
|
38
|
+
"UTC-5"
|
|
39
|
+
>>> format_utc_offset(None)
|
|
40
|
+
"UTC"
|
|
41
|
+
"""
|
|
42
|
+
if utc_offset is None:
|
|
43
|
+
return "UTC"
|
|
44
|
+
|
|
45
|
+
total_seconds = int(utc_offset.total_seconds())
|
|
46
|
+
|
|
47
|
+
# Handle negative timezones correctly by separating sign and absolute value
|
|
48
|
+
sign = 1 if total_seconds >= 0 else -1
|
|
49
|
+
abs_seconds = abs(total_seconds)
|
|
50
|
+
hours = sign * (abs_seconds // 3600)
|
|
51
|
+
minutes = (abs_seconds % 3600) // 60
|
|
52
|
+
|
|
53
|
+
if minutes == 0:
|
|
54
|
+
return f"UTC{hours:+d}"
|
|
55
|
+
# Support 30-minute offsets (e.g., India UTC+5:30, Newfoundland UTC-3:30)
|
|
56
|
+
return f"UTC{hours:+d}:{minutes:02d}"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def format_datetime_korean(dt: datetime) -> str:
|
|
60
|
+
"""
|
|
61
|
+
한국어 형식으로 datetime 포맷팅.
|
|
62
|
+
|
|
63
|
+
형식: YYYY-MM-DD (요일) HH:MM (UTC±X)
|
|
64
|
+
예시: 2025-10-27 (월요일) 14:30 (UTC+9)
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
dt (datetime): The datetime to format (must be timezone-aware).
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
str: The formatted datetime string in Korean.
|
|
71
|
+
"""
|
|
72
|
+
# 한글 요일 매핑 (로케일 독립적)
|
|
73
|
+
weekdays_korean = {
|
|
74
|
+
0: "월요일",
|
|
75
|
+
1: "화요일",
|
|
76
|
+
2: "수요일",
|
|
77
|
+
3: "목요일",
|
|
78
|
+
4: "금요일",
|
|
79
|
+
5: "토요일",
|
|
80
|
+
6: "일요일",
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# ISO 8601 날짜 형식
|
|
84
|
+
date_str = dt.strftime("%Y-%m-%d")
|
|
85
|
+
|
|
86
|
+
# 한글 요일
|
|
87
|
+
weekday = weekdays_korean[dt.weekday()]
|
|
88
|
+
|
|
89
|
+
# 시간 (24시간 형식)
|
|
90
|
+
time_str = dt.strftime("%H:%M")
|
|
91
|
+
|
|
92
|
+
# UTC 오프셋
|
|
93
|
+
offset_str = format_utc_offset(dt.utcoffset())
|
|
94
|
+
|
|
95
|
+
return f"{date_str} ({weekday}) {time_str} ({offset_str})"
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class MessageContextBuilder:
|
|
99
|
+
"""Provider-based message context builder.
|
|
100
|
+
|
|
101
|
+
Assembles LLM context step-by-step with builder pattern.
|
|
102
|
+
Applies provider constraints (system message combining, alternating turns, etc.).
|
|
103
|
+
|
|
104
|
+
For AIdol standalone use. Integrators can extend for platform-specific needs.
|
|
105
|
+
|
|
106
|
+
Components:
|
|
107
|
+
1. Persona system prompt
|
|
108
|
+
2. Real-time context (current time)
|
|
109
|
+
3. Purpose-specific prompts (decision, generation, etc.)
|
|
110
|
+
4. Current conversation messages
|
|
111
|
+
|
|
112
|
+
Usage:
|
|
113
|
+
builder = MessageContextBuilder(provider, persona)
|
|
114
|
+
context = (
|
|
115
|
+
builder
|
|
116
|
+
.with_persona()
|
|
117
|
+
.with_real_time_context()
|
|
118
|
+
.with_purpose_prompts([selection_prompt])
|
|
119
|
+
.with_current_conversation(messages)
|
|
120
|
+
.build()
|
|
121
|
+
)
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
def __init__(
|
|
125
|
+
self,
|
|
126
|
+
provider: ProviderConstraints,
|
|
127
|
+
persona: Persona | None = None,
|
|
128
|
+
) -> None:
|
|
129
|
+
"""Initialize MessageContextBuilder.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
provider: Provider with constraint properties for context building.
|
|
133
|
+
persona: Optional persona with system prompt.
|
|
134
|
+
"""
|
|
135
|
+
self.provider = provider
|
|
136
|
+
self.persona = persona
|
|
137
|
+
|
|
138
|
+
# Component buffers
|
|
139
|
+
self._persona_prompts: list[BaseMessage] = []
|
|
140
|
+
self._context_prompts: list[BaseMessage] = []
|
|
141
|
+
self._purpose_prompts: list[SystemMessage] = []
|
|
142
|
+
self._current: list[BaseMessage] = []
|
|
143
|
+
|
|
144
|
+
def with_persona(self) -> Self:
|
|
145
|
+
"""Add persona system prompt.
|
|
146
|
+
|
|
147
|
+
Adds system prompt from persona if available.
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
self for method chaining.
|
|
151
|
+
"""
|
|
152
|
+
if self.persona and self.persona.system_prompt:
|
|
153
|
+
self._persona_prompts = [SystemMessage(content=self.persona.system_prompt)]
|
|
154
|
+
return self
|
|
155
|
+
|
|
156
|
+
def with_real_time_context(self) -> Self:
|
|
157
|
+
"""Add real-time context (current time).
|
|
158
|
+
|
|
159
|
+
Adds current time based on persona's timezone.
|
|
160
|
+
Subclasses can override _format_current_time() for custom formatting.
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
self for method chaining.
|
|
164
|
+
"""
|
|
165
|
+
if self.persona:
|
|
166
|
+
time_str = self._format_current_time()
|
|
167
|
+
self._context_prompts = [SystemMessage(content=f"현재 시각: {time_str}.")]
|
|
168
|
+
return self
|
|
169
|
+
|
|
170
|
+
def _format_current_time(self) -> str:
|
|
171
|
+
"""Format current time in Korean format.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Formatted current time string in Korean.
|
|
175
|
+
"""
|
|
176
|
+
timezone_name = self.persona.timezone_name if self.persona else "UTC"
|
|
177
|
+
tz = ZoneInfo(timezone_name)
|
|
178
|
+
now = datetime.now(tz)
|
|
179
|
+
return format_datetime_korean(now)
|
|
180
|
+
|
|
181
|
+
def with_purpose_prompts(self, prompts: list[SystemMessage] | None = None) -> Self:
|
|
182
|
+
"""Add purpose-specific system prompts.
|
|
183
|
+
|
|
184
|
+
Service layer provides these prompts (OCP compliance).
|
|
185
|
+
Examples: response format prompt, conversation constraints.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
prompts: Purpose-specific system prompts.
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
self for method chaining.
|
|
192
|
+
"""
|
|
193
|
+
if prompts:
|
|
194
|
+
self._purpose_prompts = list(prompts)
|
|
195
|
+
return self
|
|
196
|
+
|
|
197
|
+
def with_current_conversation(self, messages: list[BaseMessage]) -> Self:
|
|
198
|
+
"""Add current conversation messages.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
messages: Current conversation messages (Human/AI messages).
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
self for method chaining.
|
|
205
|
+
"""
|
|
206
|
+
self._current = list(messages)
|
|
207
|
+
return self
|
|
208
|
+
|
|
209
|
+
def build(self) -> list[BaseMessage]:
|
|
210
|
+
"""Assemble all components with provider constraints applied.
|
|
211
|
+
|
|
212
|
+
Assembly order:
|
|
213
|
+
1. System messages (persona + context + purpose prompts)
|
|
214
|
+
2. Current conversation
|
|
215
|
+
|
|
216
|
+
Provider constraints:
|
|
217
|
+
- combine_system_messages: Merge all system messages into one
|
|
218
|
+
- require_first_user_message: Ensure first non-system message is from user
|
|
219
|
+
- enforce_alternating_turns: Deduplicate consecutive same-role messages
|
|
220
|
+
|
|
221
|
+
Returns:
|
|
222
|
+
List of messages ready for LLM call.
|
|
223
|
+
"""
|
|
224
|
+
# 1. Assemble system messages
|
|
225
|
+
system_messages = (
|
|
226
|
+
self._persona_prompts + self._context_prompts + self._purpose_prompts
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
# 2. Apply combine_system_messages constraint
|
|
230
|
+
messages: list[BaseMessage]
|
|
231
|
+
if self.provider.combine_system_messages and len(system_messages) > 1:
|
|
232
|
+
# Some providers (e.g., Anthropic) require all system messages to be combined into one
|
|
233
|
+
combined_content = "\n\n".join(str(m.content) for m in system_messages)
|
|
234
|
+
messages = [SystemMessage(content=combined_content)]
|
|
235
|
+
else:
|
|
236
|
+
messages = list(system_messages)
|
|
237
|
+
|
|
238
|
+
# 3. Add current conversation
|
|
239
|
+
# From this point, all added messages are non-system messages
|
|
240
|
+
messages.extend(self._current)
|
|
241
|
+
|
|
242
|
+
# 4. Apply provider constraints
|
|
243
|
+
return self._apply_provider_constraints(messages)
|
|
244
|
+
|
|
245
|
+
def _apply_provider_constraints(
|
|
246
|
+
self, messages: list[BaseMessage]
|
|
247
|
+
) -> list[BaseMessage]:
|
|
248
|
+
"""Apply provider constraints to assembled messages.
|
|
249
|
+
|
|
250
|
+
Reusable by subclasses that override build() but need the same constraint logic.
|
|
251
|
+
|
|
252
|
+
Constraints applied:
|
|
253
|
+
1. Verify all SystemMessages are at front
|
|
254
|
+
2. Ensure first non-system message is from user (if required)
|
|
255
|
+
3. Deduplicate consecutive same-role messages (if required)
|
|
256
|
+
|
|
257
|
+
Args:
|
|
258
|
+
messages: Assembled message list.
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
Message list with provider constraints applied.
|
|
262
|
+
"""
|
|
263
|
+
verify_system_messages_at_front(messages)
|
|
264
|
+
|
|
265
|
+
if self.provider.require_first_user_message:
|
|
266
|
+
ensure_first_user_message(messages)
|
|
267
|
+
|
|
268
|
+
if self.provider.enforce_alternating_turns:
|
|
269
|
+
messages = deduplicate_consecutive_same_role_messages(messages)
|
|
270
|
+
|
|
271
|
+
return messages
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def verify_system_messages_at_front(messages: list[BaseMessage]) -> None:
|
|
275
|
+
"""Verify all SystemMessages are at the front.
|
|
276
|
+
|
|
277
|
+
This function enforces the runtime precondition of build():
|
|
278
|
+
- _memory, _examples, _current buffers must not contain SystemMessage
|
|
279
|
+
- All SystemMessages must be in _persona_prompts, _context_prompts, _purpose_prompts only
|
|
280
|
+
|
|
281
|
+
Exported for reuse by integrators.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
messages: Message list to verify.
|
|
285
|
+
|
|
286
|
+
Raises:
|
|
287
|
+
AssertionError: If SystemMessage found after non-system messages.
|
|
288
|
+
"""
|
|
289
|
+
found_non_system = False
|
|
290
|
+
for i, message in enumerate(messages):
|
|
291
|
+
if isinstance(message, SystemMessage):
|
|
292
|
+
if found_non_system:
|
|
293
|
+
raise AssertionError(
|
|
294
|
+
f"SystemMessage found at index {i} after non-system messages. "
|
|
295
|
+
f"All SystemMessages must be at the front. "
|
|
296
|
+
f"Check that _memory, _examples, and _current buffers contain no SystemMessage."
|
|
297
|
+
)
|
|
298
|
+
else:
|
|
299
|
+
found_non_system = True
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def ensure_first_user_message(messages: list[BaseMessage]) -> None:
|
|
303
|
+
"""Ensure first non-system message is from user.
|
|
304
|
+
|
|
305
|
+
Some providers (e.g., Anthropic) require the first non-system message
|
|
306
|
+
to be a user message. This function adds a placeholder if needed.
|
|
307
|
+
|
|
308
|
+
Exported for reuse by integrators.
|
|
309
|
+
|
|
310
|
+
Args:
|
|
311
|
+
messages: Message list to modify in place.
|
|
312
|
+
"""
|
|
313
|
+
first_non_system_idx = next(
|
|
314
|
+
(i for i, msg in enumerate(messages) if not isinstance(msg, SystemMessage)),
|
|
315
|
+
None,
|
|
316
|
+
)
|
|
317
|
+
if first_non_system_idx is not None and not isinstance(
|
|
318
|
+
messages[first_non_system_idx], HumanMessage
|
|
319
|
+
):
|
|
320
|
+
messages.insert(first_non_system_idx, HumanMessage(content="."))
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def deduplicate_consecutive_same_role_messages(
|
|
324
|
+
messages: list[BaseMessage],
|
|
325
|
+
) -> list[BaseMessage]:
|
|
326
|
+
"""Keep only the latest message when consecutive same-role messages occur.
|
|
327
|
+
|
|
328
|
+
Some LLM providers (e.g., Anthropic) require strict user-assistant alternation.
|
|
329
|
+
When consecutive same-role messages are found, this function keeps only
|
|
330
|
+
the most recent message and discards earlier ones.
|
|
331
|
+
|
|
332
|
+
Precondition: build()'s verify_system_messages_at_front() has already validated
|
|
333
|
+
the structure as [SystemMessage(s), ...non-system...]. Only non-system messages
|
|
334
|
+
are subject to the alternation rule.
|
|
335
|
+
|
|
336
|
+
Example:
|
|
337
|
+
Input: [SystemMessage, SystemMessage, HumanMessage, HumanMessage, AIMessage]
|
|
338
|
+
Output: [SystemMessage, SystemMessage, HumanMessage (latest), AIMessage]
|
|
339
|
+
|
|
340
|
+
Exported for reuse by integrators.
|
|
341
|
+
|
|
342
|
+
Args:
|
|
343
|
+
messages: Message list assembled and validated by build().
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
New list with consecutive same-role messages deduplicated.
|
|
347
|
+
"""
|
|
348
|
+
if not messages:
|
|
349
|
+
return []
|
|
350
|
+
|
|
351
|
+
result: list[BaseMessage] = []
|
|
352
|
+
prev_role: type | None = None
|
|
353
|
+
|
|
354
|
+
for msg in messages:
|
|
355
|
+
current_role = type(msg)
|
|
356
|
+
|
|
357
|
+
if current_role is SystemMessage:
|
|
358
|
+
# SystemMessage는 항상 추가하고 prev_role은 업데이트하지 않음
|
|
359
|
+
# (build()에서 시스템 메시지는 이미 맨 앞에 모두 배치됨)
|
|
360
|
+
result.append(msg)
|
|
361
|
+
else:
|
|
362
|
+
# Non-system 메시지만 교대 규칙 적용
|
|
363
|
+
if current_role != prev_role:
|
|
364
|
+
result.append(msg)
|
|
365
|
+
else:
|
|
366
|
+
# 같은 역할이 연속되면 이전 메시지를 최신 메시지로 교체
|
|
367
|
+
logger.debug(
|
|
368
|
+
"Replacing consecutive %s message to enforce alternating turns",
|
|
369
|
+
current_role.__name__,
|
|
370
|
+
)
|
|
371
|
+
result[-1] = msg
|
|
372
|
+
|
|
373
|
+
# prev_role은 non-system 메시지에 대해서만 업데이트
|
|
374
|
+
prev_role = current_role
|
|
375
|
+
|
|
376
|
+
return result
|
aidol/factories.py
CHANGED
|
@@ -8,6 +8,7 @@ from aioia_core.factories import BaseRepositoryFactory
|
|
|
8
8
|
|
|
9
9
|
from aidol.repositories.aidol import AIdolRepository
|
|
10
10
|
from aidol.repositories.aidol_lead import AIdolLeadRepository
|
|
11
|
+
from aidol.repositories.chatroom import ChatroomRepository
|
|
11
12
|
from aidol.repositories.companion import CompanionRepository
|
|
12
13
|
|
|
13
14
|
|
|
@@ -18,6 +19,13 @@ class AIdolRepositoryFactory(BaseRepositoryFactory[AIdolRepository]):
|
|
|
18
19
|
super().__init__(repository_class=AIdolRepository)
|
|
19
20
|
|
|
20
21
|
|
|
22
|
+
class ChatroomRepositoryFactory(BaseRepositoryFactory[ChatroomRepository]):
|
|
23
|
+
"""Factory for creating Chatroom repositories."""
|
|
24
|
+
|
|
25
|
+
def __init__(self):
|
|
26
|
+
super().__init__(repository_class=ChatroomRepository)
|
|
27
|
+
|
|
28
|
+
|
|
21
29
|
class CompanionRepositoryFactory(BaseRepositoryFactory[CompanionRepository]):
|
|
22
30
|
"""Factory for creating Companion repositories."""
|
|
23
31
|
|
aidol/models/__init__.py
CHANGED
|
@@ -4,6 +4,7 @@ AIdol database models
|
|
|
4
4
|
|
|
5
5
|
from aidol.models.aidol import DBAIdol
|
|
6
6
|
from aidol.models.aidol_lead import DBAIdolLead
|
|
7
|
+
from aidol.models.chatroom import DBChatroom, DBMessage
|
|
7
8
|
from aidol.models.companion import DBCompanion
|
|
8
9
|
|
|
9
|
-
__all__ = ["DBAIdol", "DBAIdolLead", "DBCompanion"]
|
|
10
|
+
__all__ = ["DBAIdol", "DBAIdolLead", "DBChatroom", "DBCompanion", "DBMessage"]
|
aidol/models/chatroom.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Chatroom and message database models
|
|
3
|
+
|
|
4
|
+
Uses aioia_core.models.BaseModel which provides:
|
|
5
|
+
- id: Mapped[str] (primary key, UUID default)
|
|
6
|
+
- created_at: Mapped[datetime]
|
|
7
|
+
- updated_at: Mapped[datetime]
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from aioia_core.models import BaseModel
|
|
11
|
+
from sqlalchemy import ForeignKey, Index, String, Text
|
|
12
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DBChatroom(BaseModel):
|
|
16
|
+
"""Chatroom database model"""
|
|
17
|
+
|
|
18
|
+
__tablename__ = "chatrooms"
|
|
19
|
+
|
|
20
|
+
# id, created_at, updated_at inherited from BaseModel
|
|
21
|
+
name: Mapped[str] = mapped_column(String, nullable=False)
|
|
22
|
+
language: Mapped[str] = mapped_column(String, nullable=False, default="en")
|
|
23
|
+
|
|
24
|
+
__table_args__ = (Index("ix_chatrooms_language", "language"),)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class DBMessage(BaseModel):
|
|
28
|
+
"""Message database model"""
|
|
29
|
+
|
|
30
|
+
__tablename__ = "messages"
|
|
31
|
+
|
|
32
|
+
# id, created_at, updated_at inherited from BaseModel
|
|
33
|
+
chatroom_id: Mapped[str] = mapped_column(ForeignKey("chatrooms.id"), nullable=False)
|
|
34
|
+
sender_type: Mapped[str] = mapped_column(
|
|
35
|
+
String, nullable=False
|
|
36
|
+
) # "user" | "companion"
|
|
37
|
+
content: Mapped[str] = mapped_column(Text, nullable=False)
|
|
38
|
+
claim_token: Mapped[str | None] = mapped_column(
|
|
39
|
+
String(36), nullable=True, index=True
|
|
40
|
+
) # Anonymous user identifier for DAU/MAU analytics
|
|
41
|
+
companion_id: Mapped[str | None] = mapped_column(
|
|
42
|
+
String, nullable=True, index=True
|
|
43
|
+
) # Companion identifier for analytics
|
|
44
|
+
|
|
45
|
+
__table_args__ = (
|
|
46
|
+
Index("ix_messages_chatroom_created", "chatroom_id", "created_at"),
|
|
47
|
+
Index("ix_messages_sender_type", "sender_type"),
|
|
48
|
+
)
|
aidol/protocols.py
CHANGED
|
@@ -20,9 +20,14 @@ from aidol.schemas import (
|
|
|
20
20
|
AIdolLead,
|
|
21
21
|
AIdolLeadCreate,
|
|
22
22
|
AIdolUpdate,
|
|
23
|
+
Chatroom,
|
|
24
|
+
ChatroomCreate,
|
|
25
|
+
ChatroomUpdate,
|
|
23
26
|
Companion,
|
|
24
27
|
CompanionCreate,
|
|
25
28
|
CompanionUpdate,
|
|
29
|
+
Message,
|
|
30
|
+
MessageCreate,
|
|
26
31
|
)
|
|
27
32
|
|
|
28
33
|
|
|
@@ -30,6 +35,65 @@ class NoUpdate(BaseModel):
|
|
|
30
35
|
"""Placeholder for repositories without update support."""
|
|
31
36
|
|
|
32
37
|
|
|
38
|
+
class ChatroomRepositoryProtocol(
|
|
39
|
+
CrudRepositoryProtocol[Chatroom, ChatroomCreate, ChatroomUpdate], Protocol
|
|
40
|
+
):
|
|
41
|
+
"""Protocol defining chatroom repository expectations.
|
|
42
|
+
|
|
43
|
+
This protocol enables type-safe dependency injection by defining
|
|
44
|
+
the exact interface that ChatroomRouter uses. Platform-specific
|
|
45
|
+
adapters implement this protocol to convert their repository
|
|
46
|
+
responses to aidol schemas.
|
|
47
|
+
|
|
48
|
+
Inherits CRUD operations from CrudRepositoryProtocol.
|
|
49
|
+
Additional domain-specific methods:
|
|
50
|
+
get_messages_by_chatroom_id: Get messages with pagination.
|
|
51
|
+
add_message_to_chatroom: Add a message to a chatroom.
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def get_messages_by_chatroom_id(
|
|
55
|
+
self, chatroom_id: str, limit: int, offset: int
|
|
56
|
+
) -> list[Message]:
|
|
57
|
+
"""Get messages from a chatroom with pagination.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
chatroom_id: Chatroom ID.
|
|
61
|
+
limit: Maximum number of messages.
|
|
62
|
+
offset: Number of messages to skip.
|
|
63
|
+
"""
|
|
64
|
+
...
|
|
65
|
+
|
|
66
|
+
def add_message_to_chatroom(
|
|
67
|
+
self, chatroom_id: str, message: MessageCreate
|
|
68
|
+
) -> Message:
|
|
69
|
+
"""Add a message to a chatroom.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
chatroom_id: Chatroom ID.
|
|
73
|
+
message: MessageCreate or CompanionMessageCreate schema.
|
|
74
|
+
"""
|
|
75
|
+
...
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class ChatroomRepositoryFactoryProtocol(Protocol):
|
|
79
|
+
"""Protocol for factory that creates ChatroomRepositoryProtocol instances.
|
|
80
|
+
|
|
81
|
+
Implementations:
|
|
82
|
+
- aidol.factories.ChatroomRepositoryFactory (standalone)
|
|
83
|
+
- ChatroomRepositoryFactoryAdapter (platform integration)
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
def create_repository(
|
|
87
|
+
self, db_session: Session | None = None
|
|
88
|
+
) -> ChatroomRepositoryProtocol:
|
|
89
|
+
"""Create a repository instance.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
db_session: Optional database session.
|
|
93
|
+
"""
|
|
94
|
+
...
|
|
95
|
+
|
|
96
|
+
|
|
33
97
|
class AIdolRepositoryProtocol(
|
|
34
98
|
CrudRepositoryProtocol[AIdol, AIdolCreate, AIdolUpdate], Protocol
|
|
35
99
|
):
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"""LLM Provider abstractions for AIdol.
|
|
2
|
+
|
|
3
|
+
This module defines the LLMProvider Protocol for platform-agnostic LLM integration.
|
|
4
|
+
Integrators can implement this protocol using any LLM SDK (LangChain, LiteLLM, etc.).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from aidol.providers.llm.base import LLMProvider
|
|
8
|
+
|
|
9
|
+
__all__ = ["LLMProvider"]
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""LLM provider Protocol and implementations for AIdol."""
|
|
2
|
+
|
|
3
|
+
from aidol.providers.llm.base import (
|
|
4
|
+
LLMProvider,
|
|
5
|
+
ProviderConstraints,
|
|
6
|
+
lookup_context_window,
|
|
7
|
+
)
|
|
8
|
+
from aidol.providers.llm.openai import OpenAILLMProvider
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"LLMProvider",
|
|
12
|
+
"OpenAILLMProvider",
|
|
13
|
+
"ProviderConstraints",
|
|
14
|
+
"lookup_context_window",
|
|
15
|
+
]
|