nornweave 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nornweave/__init__.py +3 -0
- nornweave/adapters/__init__.py +1 -0
- nornweave/adapters/base.py +5 -0
- nornweave/adapters/mailgun.py +196 -0
- nornweave/adapters/resend.py +510 -0
- nornweave/adapters/sendgrid.py +492 -0
- nornweave/adapters/ses.py +824 -0
- nornweave/cli.py +186 -0
- nornweave/core/__init__.py +26 -0
- nornweave/core/config.py +172 -0
- nornweave/core/exceptions.py +25 -0
- nornweave/core/interfaces.py +390 -0
- nornweave/core/storage.py +192 -0
- nornweave/core/utils.py +23 -0
- nornweave/huginn/__init__.py +10 -0
- nornweave/huginn/client.py +296 -0
- nornweave/huginn/config.py +52 -0
- nornweave/huginn/resources.py +165 -0
- nornweave/huginn/server.py +202 -0
- nornweave/models/__init__.py +113 -0
- nornweave/models/attachment.py +136 -0
- nornweave/models/event.py +275 -0
- nornweave/models/inbox.py +33 -0
- nornweave/models/message.py +284 -0
- nornweave/models/thread.py +172 -0
- nornweave/muninn/__init__.py +14 -0
- nornweave/muninn/tools.py +207 -0
- nornweave/search/__init__.py +1 -0
- nornweave/search/embeddings.py +1 -0
- nornweave/search/vector_store.py +1 -0
- nornweave/skuld/__init__.py +1 -0
- nornweave/skuld/rate_limiter.py +1 -0
- nornweave/skuld/scheduler.py +1 -0
- nornweave/skuld/sender.py +25 -0
- nornweave/skuld/webhooks.py +1 -0
- nornweave/storage/__init__.py +20 -0
- nornweave/storage/database.py +165 -0
- nornweave/storage/gcs.py +144 -0
- nornweave/storage/local.py +152 -0
- nornweave/storage/s3.py +164 -0
- nornweave/urdr/__init__.py +14 -0
- nornweave/urdr/adapters/__init__.py +16 -0
- nornweave/urdr/adapters/base.py +385 -0
- nornweave/urdr/adapters/postgres.py +50 -0
- nornweave/urdr/adapters/sqlite.py +51 -0
- nornweave/urdr/migrations/env.py +94 -0
- nornweave/urdr/migrations/script.py.mako +26 -0
- nornweave/urdr/migrations/versions/.gitkeep +0 -0
- nornweave/urdr/migrations/versions/20260131_0001_initial_schema.py +182 -0
- nornweave/urdr/migrations/versions/20260131_0002_extended_schema.py +241 -0
- nornweave/urdr/orm.py +641 -0
- nornweave/verdandi/__init__.py +45 -0
- nornweave/verdandi/attachments.py +471 -0
- nornweave/verdandi/content.py +420 -0
- nornweave/verdandi/headers.py +404 -0
- nornweave/verdandi/parser.py +25 -0
- nornweave/verdandi/sanitizer.py +9 -0
- nornweave/verdandi/threading.py +359 -0
- nornweave/yggdrasil/__init__.py +1 -0
- nornweave/yggdrasil/app.py +86 -0
- nornweave/yggdrasil/dependencies.py +190 -0
- nornweave/yggdrasil/middleware/__init__.py +1 -0
- nornweave/yggdrasil/middleware/auth.py +1 -0
- nornweave/yggdrasil/middleware/logging.py +1 -0
- nornweave/yggdrasil/routes/__init__.py +1 -0
- nornweave/yggdrasil/routes/v1/__init__.py +1 -0
- nornweave/yggdrasil/routes/v1/inboxes.py +124 -0
- nornweave/yggdrasil/routes/v1/messages.py +200 -0
- nornweave/yggdrasil/routes/v1/search.py +84 -0
- nornweave/yggdrasil/routes/v1/threads.py +142 -0
- nornweave/yggdrasil/routes/webhooks/__init__.py +1 -0
- nornweave/yggdrasil/routes/webhooks/mailgun.py +136 -0
- nornweave/yggdrasil/routes/webhooks/resend.py +344 -0
- nornweave/yggdrasil/routes/webhooks/sendgrid.py +15 -0
- nornweave/yggdrasil/routes/webhooks/ses.py +15 -0
- nornweave-0.1.2.dist-info/METADATA +324 -0
- nornweave-0.1.2.dist-info/RECORD +80 -0
- nornweave-0.1.2.dist-info/WHEEL +4 -0
- nornweave-0.1.2.dist-info/entry_points.txt +5 -0
- nornweave-0.1.2.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,385 @@
|
|
|
1
|
+
"""Base storage adapter with shared SQLAlchemy functionality."""
|
|
2
|
+
|
|
3
|
+
import uuid
|
|
4
|
+
from datetime import UTC, datetime, timedelta
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
from sqlalchemy import or_, select
|
|
8
|
+
|
|
9
|
+
from nornweave.core.interfaces import StorageInterface
|
|
10
|
+
from nornweave.urdr.orm import AttachmentORM, EventORM, InboxORM, MessageORM, ThreadORM
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
14
|
+
|
|
15
|
+
from nornweave.models.event import Event, EventType
|
|
16
|
+
from nornweave.models.inbox import Inbox
|
|
17
|
+
from nornweave.models.message import Message
|
|
18
|
+
from nornweave.models.thread import Thread
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def generate_uuid() -> str:
|
|
22
|
+
"""Generate a new UUID string."""
|
|
23
|
+
return str(uuid.uuid4())
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class BaseSQLAlchemyAdapter(StorageInterface):
|
|
27
|
+
"""Base adapter with shared SQLAlchemy logic for Postgres and SQLite."""
|
|
28
|
+
|
|
29
|
+
def __init__(self, session: AsyncSession) -> None:
|
|
30
|
+
"""Initialize with an async session."""
|
|
31
|
+
self._session = session
|
|
32
|
+
|
|
33
|
+
# -------------------------------------------------------------------------
|
|
34
|
+
# Inbox methods
|
|
35
|
+
# -------------------------------------------------------------------------
|
|
36
|
+
async def create_inbox(self, inbox: Inbox) -> Inbox:
|
|
37
|
+
"""Create an inbox."""
|
|
38
|
+
orm_inbox = InboxORM.from_pydantic(inbox)
|
|
39
|
+
if not orm_inbox.id:
|
|
40
|
+
orm_inbox.id = generate_uuid()
|
|
41
|
+
self._session.add(orm_inbox)
|
|
42
|
+
await self._session.flush()
|
|
43
|
+
await self._session.refresh(orm_inbox)
|
|
44
|
+
return orm_inbox.to_pydantic()
|
|
45
|
+
|
|
46
|
+
async def get_inbox(self, inbox_id: str) -> Inbox | None:
|
|
47
|
+
"""Get an inbox by id."""
|
|
48
|
+
result = await self._session.get(InboxORM, inbox_id)
|
|
49
|
+
return result.to_pydantic() if result else None
|
|
50
|
+
|
|
51
|
+
async def get_inbox_by_email(self, email_address: str) -> Inbox | None:
|
|
52
|
+
"""Get an inbox by email address."""
|
|
53
|
+
stmt = select(InboxORM).where(InboxORM.email_address == email_address)
|
|
54
|
+
result = await self._session.execute(stmt)
|
|
55
|
+
orm_inbox = result.scalar_one_or_none()
|
|
56
|
+
return orm_inbox.to_pydantic() if orm_inbox else None
|
|
57
|
+
|
|
58
|
+
async def delete_inbox(self, inbox_id: str) -> bool:
|
|
59
|
+
"""Delete an inbox."""
|
|
60
|
+
orm_inbox = await self._session.get(InboxORM, inbox_id)
|
|
61
|
+
if orm_inbox is None:
|
|
62
|
+
return False
|
|
63
|
+
await self._session.delete(orm_inbox)
|
|
64
|
+
await self._session.flush()
|
|
65
|
+
return True
|
|
66
|
+
|
|
67
|
+
async def list_inboxes(
|
|
68
|
+
self,
|
|
69
|
+
*,
|
|
70
|
+
limit: int = 50,
|
|
71
|
+
offset: int = 0,
|
|
72
|
+
) -> list[Inbox]:
|
|
73
|
+
"""List all inboxes."""
|
|
74
|
+
stmt = select(InboxORM).order_by(InboxORM.email_address).limit(limit).offset(offset)
|
|
75
|
+
result = await self._session.execute(stmt)
|
|
76
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
77
|
+
|
|
78
|
+
# -------------------------------------------------------------------------
|
|
79
|
+
# Thread methods
|
|
80
|
+
# -------------------------------------------------------------------------
|
|
81
|
+
async def create_thread(self, thread: Thread) -> Thread:
|
|
82
|
+
"""Create a thread."""
|
|
83
|
+
orm_thread = ThreadORM.from_pydantic(thread)
|
|
84
|
+
if not orm_thread.id:
|
|
85
|
+
orm_thread.id = generate_uuid()
|
|
86
|
+
self._session.add(orm_thread)
|
|
87
|
+
await self._session.flush()
|
|
88
|
+
await self._session.refresh(orm_thread)
|
|
89
|
+
return orm_thread.to_pydantic()
|
|
90
|
+
|
|
91
|
+
async def get_thread(self, thread_id: str) -> Thread | None:
|
|
92
|
+
"""Get a thread by id."""
|
|
93
|
+
result = await self._session.get(ThreadORM, thread_id)
|
|
94
|
+
return result.to_pydantic() if result else None
|
|
95
|
+
|
|
96
|
+
async def get_thread_by_participant_hash(
|
|
97
|
+
self,
|
|
98
|
+
inbox_id: str,
|
|
99
|
+
participant_hash: str,
|
|
100
|
+
) -> Thread | None:
|
|
101
|
+
"""Get a thread by inbox and participant hash."""
|
|
102
|
+
stmt = select(ThreadORM).where(
|
|
103
|
+
ThreadORM.inbox_id == inbox_id,
|
|
104
|
+
ThreadORM.participant_hash == participant_hash,
|
|
105
|
+
)
|
|
106
|
+
result = await self._session.execute(stmt)
|
|
107
|
+
orm_thread = result.scalar_one_or_none()
|
|
108
|
+
return orm_thread.to_pydantic() if orm_thread else None
|
|
109
|
+
|
|
110
|
+
async def update_thread(self, thread: Thread) -> Thread:
|
|
111
|
+
"""Update a thread."""
|
|
112
|
+
orm_thread = await self._session.get(ThreadORM, thread.id)
|
|
113
|
+
if orm_thread is None:
|
|
114
|
+
raise ValueError(f"Thread {thread.id} not found")
|
|
115
|
+
orm_thread.subject = thread.subject
|
|
116
|
+
orm_thread.last_message_at = thread.last_message_at
|
|
117
|
+
orm_thread.participant_hash = thread.participant_hash
|
|
118
|
+
await self._session.flush()
|
|
119
|
+
await self._session.refresh(orm_thread)
|
|
120
|
+
return orm_thread.to_pydantic()
|
|
121
|
+
|
|
122
|
+
async def list_threads_for_inbox(
|
|
123
|
+
self,
|
|
124
|
+
inbox_id: str,
|
|
125
|
+
*,
|
|
126
|
+
limit: int = 20,
|
|
127
|
+
offset: int = 0,
|
|
128
|
+
) -> list[Thread]:
|
|
129
|
+
"""List threads for an inbox, ordered by last_message_at DESC."""
|
|
130
|
+
stmt = (
|
|
131
|
+
select(ThreadORM)
|
|
132
|
+
.where(ThreadORM.inbox_id == inbox_id)
|
|
133
|
+
.order_by(ThreadORM.last_message_at.desc().nulls_last())
|
|
134
|
+
.limit(limit)
|
|
135
|
+
.offset(offset)
|
|
136
|
+
)
|
|
137
|
+
result = await self._session.execute(stmt)
|
|
138
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
139
|
+
|
|
140
|
+
# -------------------------------------------------------------------------
|
|
141
|
+
# Message methods
|
|
142
|
+
# -------------------------------------------------------------------------
|
|
143
|
+
async def create_message(self, message: Message) -> Message:
|
|
144
|
+
"""Create a message."""
|
|
145
|
+
orm_message = MessageORM.from_pydantic(message)
|
|
146
|
+
if not orm_message.id:
|
|
147
|
+
orm_message.id = generate_uuid()
|
|
148
|
+
if orm_message.created_at is None:
|
|
149
|
+
orm_message.created_at = datetime.now(UTC)
|
|
150
|
+
self._session.add(orm_message)
|
|
151
|
+
await self._session.flush()
|
|
152
|
+
await self._session.refresh(orm_message)
|
|
153
|
+
return orm_message.to_pydantic()
|
|
154
|
+
|
|
155
|
+
async def get_message(self, message_id: str) -> Message | None:
|
|
156
|
+
"""Get a message by id."""
|
|
157
|
+
result = await self._session.get(MessageORM, message_id)
|
|
158
|
+
return result.to_pydantic() if result else None
|
|
159
|
+
|
|
160
|
+
async def list_messages_for_inbox(
|
|
161
|
+
self,
|
|
162
|
+
inbox_id: str,
|
|
163
|
+
*,
|
|
164
|
+
limit: int = 50,
|
|
165
|
+
offset: int = 0,
|
|
166
|
+
) -> list[Message]:
|
|
167
|
+
"""List messages for an inbox, ordered by created_at."""
|
|
168
|
+
stmt = (
|
|
169
|
+
select(MessageORM)
|
|
170
|
+
.where(MessageORM.inbox_id == inbox_id)
|
|
171
|
+
.order_by(MessageORM.created_at)
|
|
172
|
+
.limit(limit)
|
|
173
|
+
.offset(offset)
|
|
174
|
+
)
|
|
175
|
+
result = await self._session.execute(stmt)
|
|
176
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
177
|
+
|
|
178
|
+
async def list_messages_for_thread(
|
|
179
|
+
self,
|
|
180
|
+
thread_id: str,
|
|
181
|
+
*,
|
|
182
|
+
limit: int = 100,
|
|
183
|
+
offset: int = 0,
|
|
184
|
+
) -> list[Message]:
|
|
185
|
+
"""List messages for a thread, ordered by created_at."""
|
|
186
|
+
stmt = (
|
|
187
|
+
select(MessageORM)
|
|
188
|
+
.where(MessageORM.thread_id == thread_id)
|
|
189
|
+
.order_by(MessageORM.created_at)
|
|
190
|
+
.limit(limit)
|
|
191
|
+
.offset(offset)
|
|
192
|
+
)
|
|
193
|
+
result = await self._session.execute(stmt)
|
|
194
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
195
|
+
|
|
196
|
+
async def search_messages(
|
|
197
|
+
self,
|
|
198
|
+
inbox_id: str,
|
|
199
|
+
query: str,
|
|
200
|
+
*,
|
|
201
|
+
limit: int = 50,
|
|
202
|
+
offset: int = 0,
|
|
203
|
+
) -> list[Message]:
|
|
204
|
+
"""Search messages by content. Override in subclass for dialect-specific search."""
|
|
205
|
+
# Default implementation using LIKE (case-sensitive)
|
|
206
|
+
# Subclasses can override for ILIKE (Postgres) or COLLATE NOCASE (SQLite)
|
|
207
|
+
pattern = f"%{query}%"
|
|
208
|
+
stmt = (
|
|
209
|
+
select(MessageORM)
|
|
210
|
+
.where(
|
|
211
|
+
MessageORM.inbox_id == inbox_id,
|
|
212
|
+
or_(
|
|
213
|
+
MessageORM.content_clean.like(pattern),
|
|
214
|
+
MessageORM.content_raw.like(pattern),
|
|
215
|
+
),
|
|
216
|
+
)
|
|
217
|
+
.order_by(MessageORM.created_at.desc())
|
|
218
|
+
.limit(limit)
|
|
219
|
+
.offset(offset)
|
|
220
|
+
)
|
|
221
|
+
result = await self._session.execute(stmt)
|
|
222
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
223
|
+
|
|
224
|
+
# -------------------------------------------------------------------------
|
|
225
|
+
# Event methods
|
|
226
|
+
# -------------------------------------------------------------------------
|
|
227
|
+
async def create_event(self, event: Event) -> Event:
|
|
228
|
+
"""Create an event."""
|
|
229
|
+
orm_event = EventORM.from_pydantic(event)
|
|
230
|
+
if not orm_event.id:
|
|
231
|
+
orm_event.id = generate_uuid()
|
|
232
|
+
if orm_event.created_at is None:
|
|
233
|
+
orm_event.created_at = datetime.now(UTC)
|
|
234
|
+
self._session.add(orm_event)
|
|
235
|
+
await self._session.flush()
|
|
236
|
+
await self._session.refresh(orm_event)
|
|
237
|
+
return orm_event.to_pydantic()
|
|
238
|
+
|
|
239
|
+
async def get_event(self, event_id: str) -> Event | None:
|
|
240
|
+
"""Get an event by id."""
|
|
241
|
+
result = await self._session.get(EventORM, event_id)
|
|
242
|
+
return result.to_pydantic() if result else None
|
|
243
|
+
|
|
244
|
+
async def list_events(
|
|
245
|
+
self,
|
|
246
|
+
*,
|
|
247
|
+
event_type: EventType | None = None,
|
|
248
|
+
limit: int = 50,
|
|
249
|
+
offset: int = 0,
|
|
250
|
+
) -> list[Event]:
|
|
251
|
+
"""List events, optionally filtered by type, ordered by created_at DESC."""
|
|
252
|
+
stmt = select(EventORM)
|
|
253
|
+
if event_type is not None:
|
|
254
|
+
stmt = stmt.where(EventORM.type == event_type.value)
|
|
255
|
+
stmt = stmt.order_by(EventORM.created_at.desc()).limit(limit).offset(offset)
|
|
256
|
+
result = await self._session.execute(stmt)
|
|
257
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
258
|
+
|
|
259
|
+
# -------------------------------------------------------------------------
|
|
260
|
+
# Attachment methods
|
|
261
|
+
# -------------------------------------------------------------------------
|
|
262
|
+
async def create_attachment(
|
|
263
|
+
self,
|
|
264
|
+
message_id: str,
|
|
265
|
+
filename: str,
|
|
266
|
+
content_type: str,
|
|
267
|
+
size_bytes: int,
|
|
268
|
+
*,
|
|
269
|
+
disposition: str = "attachment",
|
|
270
|
+
content_id: str | None = None,
|
|
271
|
+
storage_path: str | None = None,
|
|
272
|
+
storage_backend: str | None = None,
|
|
273
|
+
content_hash: str | None = None,
|
|
274
|
+
content: bytes | None = None,
|
|
275
|
+
) -> str:
|
|
276
|
+
"""Create an attachment record."""
|
|
277
|
+
attachment_id = generate_uuid()
|
|
278
|
+
orm_attachment = AttachmentORM(
|
|
279
|
+
id=attachment_id,
|
|
280
|
+
message_id=message_id,
|
|
281
|
+
filename=filename,
|
|
282
|
+
content_type=content_type,
|
|
283
|
+
size_bytes=size_bytes,
|
|
284
|
+
disposition=disposition,
|
|
285
|
+
content_id=content_id,
|
|
286
|
+
storage_path=storage_path,
|
|
287
|
+
storage_backend=storage_backend,
|
|
288
|
+
content_hash=content_hash,
|
|
289
|
+
content=content,
|
|
290
|
+
created_at=datetime.now(UTC),
|
|
291
|
+
)
|
|
292
|
+
self._session.add(orm_attachment)
|
|
293
|
+
await self._session.flush()
|
|
294
|
+
return attachment_id
|
|
295
|
+
|
|
296
|
+
async def get_attachment(self, attachment_id: str) -> dict[str, Any] | None:
|
|
297
|
+
"""Get an attachment by id."""
|
|
298
|
+
result = await self._session.get(AttachmentORM, attachment_id)
|
|
299
|
+
if result is None:
|
|
300
|
+
return None
|
|
301
|
+
return {
|
|
302
|
+
"id": result.id,
|
|
303
|
+
"message_id": result.message_id,
|
|
304
|
+
"filename": result.filename,
|
|
305
|
+
"content_type": result.content_type,
|
|
306
|
+
"size_bytes": result.size_bytes,
|
|
307
|
+
"disposition": result.disposition,
|
|
308
|
+
"content_id": result.content_id,
|
|
309
|
+
"storage_path": result.storage_path,
|
|
310
|
+
"storage_backend": result.storage_backend,
|
|
311
|
+
"content_hash": result.content_hash,
|
|
312
|
+
"content": result.content,
|
|
313
|
+
"created_at": result.created_at,
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
async def list_attachments_for_message(self, message_id: str) -> list[dict[str, Any]]:
|
|
317
|
+
"""List attachments for a message."""
|
|
318
|
+
stmt = select(AttachmentORM).where(AttachmentORM.message_id == message_id)
|
|
319
|
+
result = await self._session.execute(stmt)
|
|
320
|
+
return [
|
|
321
|
+
{
|
|
322
|
+
"id": row.id,
|
|
323
|
+
"message_id": row.message_id,
|
|
324
|
+
"filename": row.filename,
|
|
325
|
+
"content_type": row.content_type,
|
|
326
|
+
"size_bytes": row.size_bytes,
|
|
327
|
+
"disposition": row.disposition,
|
|
328
|
+
"content_id": row.content_id,
|
|
329
|
+
"storage_path": row.storage_path,
|
|
330
|
+
"storage_backend": row.storage_backend,
|
|
331
|
+
"content_hash": row.content_hash,
|
|
332
|
+
"created_at": row.created_at,
|
|
333
|
+
}
|
|
334
|
+
for row in result.scalars().all()
|
|
335
|
+
]
|
|
336
|
+
|
|
337
|
+
async def delete_attachment(self, attachment_id: str) -> bool:
|
|
338
|
+
"""Delete an attachment."""
|
|
339
|
+
orm_attachment = await self._session.get(AttachmentORM, attachment_id)
|
|
340
|
+
if orm_attachment is None:
|
|
341
|
+
return False
|
|
342
|
+
await self._session.delete(orm_attachment)
|
|
343
|
+
await self._session.flush()
|
|
344
|
+
return True
|
|
345
|
+
|
|
346
|
+
# -------------------------------------------------------------------------
|
|
347
|
+
# Additional threading/message lookup methods
|
|
348
|
+
# -------------------------------------------------------------------------
|
|
349
|
+
async def get_message_by_provider_id(
|
|
350
|
+
self,
|
|
351
|
+
inbox_id: str,
|
|
352
|
+
provider_message_id: str,
|
|
353
|
+
) -> Message | None:
|
|
354
|
+
"""Get a message by provider message ID (e.g., Mailgun ID, SES ID)."""
|
|
355
|
+
stmt = select(MessageORM).where(
|
|
356
|
+
MessageORM.inbox_id == inbox_id,
|
|
357
|
+
MessageORM.provider_message_id == provider_message_id,
|
|
358
|
+
)
|
|
359
|
+
result = await self._session.execute(stmt)
|
|
360
|
+
orm_message = result.scalar_one_or_none()
|
|
361
|
+
return orm_message.to_pydantic() if orm_message else None
|
|
362
|
+
|
|
363
|
+
async def get_thread_by_subject(
|
|
364
|
+
self,
|
|
365
|
+
inbox_id: str,
|
|
366
|
+
normalized_subject: str,
|
|
367
|
+
*,
|
|
368
|
+
since: datetime | None = None,
|
|
369
|
+
) -> Thread | None:
|
|
370
|
+
"""Get a thread by normalized subject within a time window."""
|
|
371
|
+
if since is None:
|
|
372
|
+
since = datetime.now(UTC) - timedelta(days=7)
|
|
373
|
+
|
|
374
|
+
stmt = (
|
|
375
|
+
select(ThreadORM)
|
|
376
|
+
.where(
|
|
377
|
+
ThreadORM.inbox_id == inbox_id,
|
|
378
|
+
ThreadORM.normalized_subject == normalized_subject,
|
|
379
|
+
ThreadORM.last_message_at >= since,
|
|
380
|
+
)
|
|
381
|
+
.order_by(ThreadORM.last_message_at.desc())
|
|
382
|
+
)
|
|
383
|
+
result = await self._session.execute(stmt)
|
|
384
|
+
orm_thread = result.scalar_one_or_none()
|
|
385
|
+
return orm_thread.to_pydantic() if orm_thread else None
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""PostgreSQL storage adapter (Urdr)."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import or_, select
|
|
6
|
+
|
|
7
|
+
from nornweave.urdr.adapters.base import BaseSQLAlchemyAdapter
|
|
8
|
+
from nornweave.urdr.orm import MessageORM
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
12
|
+
|
|
13
|
+
from nornweave.models.message import Message
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class PostgresAdapter(BaseSQLAlchemyAdapter):
|
|
17
|
+
"""PostgreSQL implementation of StorageInterface.
|
|
18
|
+
|
|
19
|
+
Uses asyncpg for async database access and ILIKE for case-insensitive search.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, session: AsyncSession) -> None:
|
|
23
|
+
"""Initialize with an async session (asyncpg-backed)."""
|
|
24
|
+
super().__init__(session)
|
|
25
|
+
|
|
26
|
+
async def search_messages(
|
|
27
|
+
self,
|
|
28
|
+
inbox_id: str,
|
|
29
|
+
query: str,
|
|
30
|
+
*,
|
|
31
|
+
limit: int = 50,
|
|
32
|
+
offset: int = 0,
|
|
33
|
+
) -> list[Message]:
|
|
34
|
+
"""Search messages using PostgreSQL ILIKE for case-insensitive matching."""
|
|
35
|
+
pattern = f"%{query}%"
|
|
36
|
+
stmt = (
|
|
37
|
+
select(MessageORM)
|
|
38
|
+
.where(
|
|
39
|
+
MessageORM.inbox_id == inbox_id,
|
|
40
|
+
or_(
|
|
41
|
+
MessageORM.content_clean.ilike(pattern),
|
|
42
|
+
MessageORM.content_raw.ilike(pattern),
|
|
43
|
+
),
|
|
44
|
+
)
|
|
45
|
+
.order_by(MessageORM.created_at.desc())
|
|
46
|
+
.limit(limit)
|
|
47
|
+
.offset(offset)
|
|
48
|
+
)
|
|
49
|
+
result = await self._session.execute(stmt)
|
|
50
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""SQLite storage adapter (Urdr) for local development."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import func, or_, select
|
|
6
|
+
|
|
7
|
+
from nornweave.urdr.adapters.base import BaseSQLAlchemyAdapter
|
|
8
|
+
from nornweave.urdr.orm import MessageORM
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
12
|
+
|
|
13
|
+
from nornweave.models.message import Message
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class SQLiteAdapter(BaseSQLAlchemyAdapter):
|
|
17
|
+
"""SQLite implementation of StorageInterface.
|
|
18
|
+
|
|
19
|
+
Uses aiosqlite for async database access. SQLite LIKE is case-insensitive
|
|
20
|
+
by default for ASCII, but we use lower() for consistent behavior.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, session: AsyncSession) -> None:
|
|
24
|
+
"""Initialize with an async session (aiosqlite-backed)."""
|
|
25
|
+
super().__init__(session)
|
|
26
|
+
|
|
27
|
+
async def search_messages(
|
|
28
|
+
self,
|
|
29
|
+
inbox_id: str,
|
|
30
|
+
query: str,
|
|
31
|
+
*,
|
|
32
|
+
limit: int = 50,
|
|
33
|
+
offset: int = 0,
|
|
34
|
+
) -> list[Message]:
|
|
35
|
+
"""Search messages using SQLite LIKE with lower() for case-insensitive matching."""
|
|
36
|
+
pattern = f"%{query.lower()}%"
|
|
37
|
+
stmt = (
|
|
38
|
+
select(MessageORM)
|
|
39
|
+
.where(
|
|
40
|
+
MessageORM.inbox_id == inbox_id,
|
|
41
|
+
or_(
|
|
42
|
+
func.lower(MessageORM.content_clean).like(pattern),
|
|
43
|
+
func.lower(MessageORM.content_raw).like(pattern),
|
|
44
|
+
),
|
|
45
|
+
)
|
|
46
|
+
.order_by(MessageORM.created_at.desc())
|
|
47
|
+
.limit(limit)
|
|
48
|
+
.offset(offset)
|
|
49
|
+
)
|
|
50
|
+
result = await self._session.execute(stmt)
|
|
51
|
+
return [row.to_pydantic() for row in result.scalars().all()]
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
"""Alembic migration environment."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
from logging.config import fileConfig
|
|
6
|
+
|
|
7
|
+
from alembic import context
|
|
8
|
+
from sqlalchemy import engine_from_config, pool
|
|
9
|
+
|
|
10
|
+
# Import ORM models to ensure metadata is populated
|
|
11
|
+
from nornweave.urdr.orm import Base
|
|
12
|
+
|
|
13
|
+
config = context.config
|
|
14
|
+
if config.config_file_name is not None:
|
|
15
|
+
fileConfig(config.config_file_name)
|
|
16
|
+
|
|
17
|
+
# Set target_metadata from ORM Base
|
|
18
|
+
target_metadata = Base.metadata
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_url() -> str:
|
|
22
|
+
"""Get database URL from environment or config.
|
|
23
|
+
|
|
24
|
+
Converts async URLs to sync for Alembic:
|
|
25
|
+
- postgresql+asyncpg:// -> postgresql://
|
|
26
|
+
- sqlite+aiosqlite:// -> sqlite://
|
|
27
|
+
"""
|
|
28
|
+
# First try environment variable
|
|
29
|
+
url = os.environ.get("DATABASE_URL", "")
|
|
30
|
+
|
|
31
|
+
# Fall back to alembic.ini if not set
|
|
32
|
+
if not url:
|
|
33
|
+
url = config.get_main_option("sqlalchemy.url", "")
|
|
34
|
+
|
|
35
|
+
if not url:
|
|
36
|
+
raise ValueError(
|
|
37
|
+
"DATABASE_URL environment variable or sqlalchemy.url in alembic.ini required"
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# Convert async URL to sync for Alembic
|
|
41
|
+
url = re.sub(r"^postgresql\+asyncpg://", "postgresql://", url)
|
|
42
|
+
url = re.sub(r"^sqlite\+aiosqlite://", "sqlite://", url)
|
|
43
|
+
|
|
44
|
+
return url
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def run_migrations_offline() -> None:
|
|
48
|
+
"""Run migrations in 'offline' mode.
|
|
49
|
+
|
|
50
|
+
This configures the context with just a URL and not an Engine,
|
|
51
|
+
though an Engine is acceptable here as well.
|
|
52
|
+
"""
|
|
53
|
+
url = get_url()
|
|
54
|
+
context.configure(
|
|
55
|
+
url=url,
|
|
56
|
+
target_metadata=target_metadata,
|
|
57
|
+
literal_binds=True,
|
|
58
|
+
dialect_opts={"paramstyle": "named"},
|
|
59
|
+
render_as_batch=True, # Required for SQLite ALTER TABLE support
|
|
60
|
+
)
|
|
61
|
+
with context.begin_transaction():
|
|
62
|
+
context.run_migrations()
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def run_migrations_online() -> None:
|
|
66
|
+
"""Run migrations in 'online' mode.
|
|
67
|
+
|
|
68
|
+
In this scenario we need to create an Engine and associate a
|
|
69
|
+
connection with the context.
|
|
70
|
+
"""
|
|
71
|
+
# Override the URL in config
|
|
72
|
+
configuration = config.get_section(config.config_ini_section, {})
|
|
73
|
+
configuration["sqlalchemy.url"] = get_url()
|
|
74
|
+
|
|
75
|
+
connectable = engine_from_config(
|
|
76
|
+
configuration,
|
|
77
|
+
prefix="sqlalchemy.",
|
|
78
|
+
poolclass=pool.NullPool,
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
with connectable.connect() as connection:
|
|
82
|
+
context.configure(
|
|
83
|
+
connection=connection,
|
|
84
|
+
target_metadata=target_metadata,
|
|
85
|
+
render_as_batch=True, # Required for SQLite ALTER TABLE support
|
|
86
|
+
)
|
|
87
|
+
with context.begin_transaction():
|
|
88
|
+
context.run_migrations()
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
if context.is_offline_mode():
|
|
92
|
+
run_migrations_offline()
|
|
93
|
+
else:
|
|
94
|
+
run_migrations_online()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""${message}
|
|
2
|
+
|
|
3
|
+
Revision ID: ${up_revision}
|
|
4
|
+
Revises: ${down_revision | comma,n}
|
|
5
|
+
Create Date: ${create_date}
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
from typing import Sequence, Union
|
|
9
|
+
|
|
10
|
+
from alembic import op
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
${imports if imports else ""}
|
|
13
|
+
|
|
14
|
+
# revision identifiers, used by Alembic.
|
|
15
|
+
revision: str = ${repr(up_revision)}
|
|
16
|
+
down_revision: Union[str, None] = ${repr(down_revision)}
|
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
|
18
|
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def upgrade() -> None:
|
|
22
|
+
${upgrades if upgrades else "pass"}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def downgrade() -> None:
|
|
26
|
+
${downgrades if downgrades else "pass"}
|
|
File without changes
|