nornweave 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nornweave/__init__.py +3 -0
- nornweave/adapters/__init__.py +1 -0
- nornweave/adapters/base.py +5 -0
- nornweave/adapters/mailgun.py +196 -0
- nornweave/adapters/resend.py +510 -0
- nornweave/adapters/sendgrid.py +492 -0
- nornweave/adapters/ses.py +824 -0
- nornweave/cli.py +186 -0
- nornweave/core/__init__.py +26 -0
- nornweave/core/config.py +172 -0
- nornweave/core/exceptions.py +25 -0
- nornweave/core/interfaces.py +390 -0
- nornweave/core/storage.py +192 -0
- nornweave/core/utils.py +23 -0
- nornweave/huginn/__init__.py +10 -0
- nornweave/huginn/client.py +296 -0
- nornweave/huginn/config.py +52 -0
- nornweave/huginn/resources.py +165 -0
- nornweave/huginn/server.py +202 -0
- nornweave/models/__init__.py +113 -0
- nornweave/models/attachment.py +136 -0
- nornweave/models/event.py +275 -0
- nornweave/models/inbox.py +33 -0
- nornweave/models/message.py +284 -0
- nornweave/models/thread.py +172 -0
- nornweave/muninn/__init__.py +14 -0
- nornweave/muninn/tools.py +207 -0
- nornweave/search/__init__.py +1 -0
- nornweave/search/embeddings.py +1 -0
- nornweave/search/vector_store.py +1 -0
- nornweave/skuld/__init__.py +1 -0
- nornweave/skuld/rate_limiter.py +1 -0
- nornweave/skuld/scheduler.py +1 -0
- nornweave/skuld/sender.py +25 -0
- nornweave/skuld/webhooks.py +1 -0
- nornweave/storage/__init__.py +20 -0
- nornweave/storage/database.py +165 -0
- nornweave/storage/gcs.py +144 -0
- nornweave/storage/local.py +152 -0
- nornweave/storage/s3.py +164 -0
- nornweave/urdr/__init__.py +14 -0
- nornweave/urdr/adapters/__init__.py +16 -0
- nornweave/urdr/adapters/base.py +385 -0
- nornweave/urdr/adapters/postgres.py +50 -0
- nornweave/urdr/adapters/sqlite.py +51 -0
- nornweave/urdr/migrations/env.py +94 -0
- nornweave/urdr/migrations/script.py.mako +26 -0
- nornweave/urdr/migrations/versions/.gitkeep +0 -0
- nornweave/urdr/migrations/versions/20260131_0001_initial_schema.py +182 -0
- nornweave/urdr/migrations/versions/20260131_0002_extended_schema.py +241 -0
- nornweave/urdr/orm.py +641 -0
- nornweave/verdandi/__init__.py +45 -0
- nornweave/verdandi/attachments.py +471 -0
- nornweave/verdandi/content.py +420 -0
- nornweave/verdandi/headers.py +404 -0
- nornweave/verdandi/parser.py +25 -0
- nornweave/verdandi/sanitizer.py +9 -0
- nornweave/verdandi/threading.py +359 -0
- nornweave/yggdrasil/__init__.py +1 -0
- nornweave/yggdrasil/app.py +86 -0
- nornweave/yggdrasil/dependencies.py +190 -0
- nornweave/yggdrasil/middleware/__init__.py +1 -0
- nornweave/yggdrasil/middleware/auth.py +1 -0
- nornweave/yggdrasil/middleware/logging.py +1 -0
- nornweave/yggdrasil/routes/__init__.py +1 -0
- nornweave/yggdrasil/routes/v1/__init__.py +1 -0
- nornweave/yggdrasil/routes/v1/inboxes.py +124 -0
- nornweave/yggdrasil/routes/v1/messages.py +200 -0
- nornweave/yggdrasil/routes/v1/search.py +84 -0
- nornweave/yggdrasil/routes/v1/threads.py +142 -0
- nornweave/yggdrasil/routes/webhooks/__init__.py +1 -0
- nornweave/yggdrasil/routes/webhooks/mailgun.py +136 -0
- nornweave/yggdrasil/routes/webhooks/resend.py +344 -0
- nornweave/yggdrasil/routes/webhooks/sendgrid.py +15 -0
- nornweave/yggdrasil/routes/webhooks/ses.py +15 -0
- nornweave-0.1.2.dist-info/METADATA +324 -0
- nornweave-0.1.2.dist-info/RECORD +80 -0
- nornweave-0.1.2.dist-info/WHEEL +4 -0
- nornweave-0.1.2.dist-info/entry_points.txt +5 -0
- nornweave-0.1.2.dist-info/licenses/LICENSE +201 -0
nornweave/storage/gcs.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
"""Google Cloud Storage backend for attachments."""
|
|
2
|
+
|
|
3
|
+
from datetime import timedelta
|
|
4
|
+
from typing import Any, cast
|
|
5
|
+
|
|
6
|
+
from nornweave.core.storage import AttachmentMetadata, AttachmentStorageBackend, StorageResult
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GCSStorage(AttachmentStorageBackend):
|
|
10
|
+
"""Store attachments in Google Cloud Storage.
|
|
11
|
+
|
|
12
|
+
Recommended for production deployments on GCP.
|
|
13
|
+
Uses signed URLs for secure downloads.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
bucket: str,
|
|
19
|
+
prefix: str = "attachments",
|
|
20
|
+
credentials_path: str | None = None,
|
|
21
|
+
project: str | None = None,
|
|
22
|
+
) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Initialize GCS storage.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
bucket: GCS bucket name
|
|
28
|
+
prefix: Blob prefix for attachments
|
|
29
|
+
credentials_path: Path to service account JSON (uses ADC if not set)
|
|
30
|
+
project: GCP project ID
|
|
31
|
+
"""
|
|
32
|
+
self.bucket_name = bucket
|
|
33
|
+
self.prefix = prefix.strip("/")
|
|
34
|
+
self._credentials_path = credentials_path
|
|
35
|
+
self._project = project
|
|
36
|
+
self._client: Any = None
|
|
37
|
+
self._bucket: Any = None
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def backend_name(self) -> str:
|
|
41
|
+
return "gcs"
|
|
42
|
+
|
|
43
|
+
def _get_client(self) -> tuple[Any, Any]:
|
|
44
|
+
"""Get or create GCS client and bucket (lazy initialization)."""
|
|
45
|
+
if self._client is None:
|
|
46
|
+
try:
|
|
47
|
+
from google.cloud import storage
|
|
48
|
+
except ImportError:
|
|
49
|
+
raise ImportError(
|
|
50
|
+
"google-cloud-storage is required for GCS storage. "
|
|
51
|
+
"Install with: pip install google-cloud-storage"
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
if self._credentials_path:
|
|
55
|
+
self._client = storage.Client.from_service_account_json(
|
|
56
|
+
self._credentials_path,
|
|
57
|
+
project=self._project,
|
|
58
|
+
)
|
|
59
|
+
else:
|
|
60
|
+
self._client = storage.Client(project=self._project)
|
|
61
|
+
|
|
62
|
+
self._bucket = self._client.bucket(self.bucket_name)
|
|
63
|
+
|
|
64
|
+
return self._client, self._bucket
|
|
65
|
+
|
|
66
|
+
def _build_blob_name(self, attachment_id: str, filename: str) -> str:
|
|
67
|
+
"""Build GCS blob name."""
|
|
68
|
+
return f"{self.prefix}/{attachment_id}/{filename}"
|
|
69
|
+
|
|
70
|
+
async def store(
|
|
71
|
+
self,
|
|
72
|
+
attachment_id: str,
|
|
73
|
+
content: bytes,
|
|
74
|
+
metadata: AttachmentMetadata,
|
|
75
|
+
) -> StorageResult:
|
|
76
|
+
"""Store attachment in GCS."""
|
|
77
|
+
_, bucket = self._get_client()
|
|
78
|
+
storage_key = self._build_blob_name(attachment_id, metadata.filename)
|
|
79
|
+
|
|
80
|
+
blob = bucket.blob(storage_key)
|
|
81
|
+
blob.metadata = {
|
|
82
|
+
"message_id": metadata.message_id,
|
|
83
|
+
"content_disposition": metadata.content_disposition,
|
|
84
|
+
"content_id": metadata.content_id or "",
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
blob.upload_from_string(
|
|
88
|
+
content,
|
|
89
|
+
content_type=metadata.content_type,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
return StorageResult(
|
|
93
|
+
storage_key=storage_key,
|
|
94
|
+
size_bytes=len(content),
|
|
95
|
+
content_hash=self.compute_hash(content),
|
|
96
|
+
backend=self.backend_name,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
async def retrieve(self, storage_key: str) -> bytes:
|
|
100
|
+
"""Retrieve attachment from GCS."""
|
|
101
|
+
_, bucket = self._get_client()
|
|
102
|
+
blob = bucket.blob(storage_key)
|
|
103
|
+
|
|
104
|
+
if not blob.exists():
|
|
105
|
+
raise FileNotFoundError(f"Attachment not found: {storage_key}")
|
|
106
|
+
|
|
107
|
+
return cast("bytes", blob.download_as_bytes())
|
|
108
|
+
|
|
109
|
+
async def delete(self, storage_key: str) -> bool:
|
|
110
|
+
"""Delete attachment from GCS."""
|
|
111
|
+
_, bucket = self._get_client()
|
|
112
|
+
blob = bucket.blob(storage_key)
|
|
113
|
+
|
|
114
|
+
if not blob.exists():
|
|
115
|
+
return False
|
|
116
|
+
|
|
117
|
+
blob.delete()
|
|
118
|
+
return True
|
|
119
|
+
|
|
120
|
+
async def get_download_url(
|
|
121
|
+
self,
|
|
122
|
+
storage_key: str,
|
|
123
|
+
expires_in: timedelta = timedelta(hours=1),
|
|
124
|
+
filename: str | None = None,
|
|
125
|
+
) -> str:
|
|
126
|
+
"""Generate signed download URL."""
|
|
127
|
+
_, bucket = self._get_client()
|
|
128
|
+
blob = bucket.blob(storage_key)
|
|
129
|
+
|
|
130
|
+
kwargs: dict[str, Any] = {
|
|
131
|
+
"expiration": expires_in,
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
if filename:
|
|
135
|
+
kwargs["response_disposition"] = f'attachment; filename="{filename}"'
|
|
136
|
+
|
|
137
|
+
url = blob.generate_signed_url(**kwargs)
|
|
138
|
+
return cast("str", url)
|
|
139
|
+
|
|
140
|
+
async def exists(self, storage_key: str) -> bool:
|
|
141
|
+
"""Check if attachment exists in GCS."""
|
|
142
|
+
_, bucket = self._get_client()
|
|
143
|
+
blob = bucket.blob(storage_key)
|
|
144
|
+
return cast("bool", blob.exists())
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"""Local filesystem storage backend for attachments."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import hmac
|
|
5
|
+
import time
|
|
6
|
+
from datetime import datetime, timedelta
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from urllib.parse import urlencode
|
|
9
|
+
|
|
10
|
+
from nornweave.core.storage import AttachmentMetadata, AttachmentStorageBackend, StorageResult
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class LocalFilesystemStorage(AttachmentStorageBackend):
|
|
14
|
+
"""Store attachments on local filesystem.
|
|
15
|
+
|
|
16
|
+
Good for development and simple deployments.
|
|
17
|
+
Files are organized by date: base_path/YYYY/MM/DD/attachment_id/filename
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
base_path: str = "./data/attachments",
|
|
23
|
+
serve_url_prefix: str = "/v1/attachments",
|
|
24
|
+
signing_secret: str | None = None,
|
|
25
|
+
) -> None:
|
|
26
|
+
"""
|
|
27
|
+
Initialize local storage.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
base_path: Base directory for attachment storage
|
|
31
|
+
serve_url_prefix: URL prefix for download URLs
|
|
32
|
+
signing_secret: Secret for signing download URLs (uses app secret if not set)
|
|
33
|
+
"""
|
|
34
|
+
self.base_path = Path(base_path)
|
|
35
|
+
self.serve_url_prefix = serve_url_prefix.rstrip("/")
|
|
36
|
+
self._signing_secret = signing_secret or "default-signing-secret"
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def backend_name(self) -> str:
|
|
40
|
+
return "local"
|
|
41
|
+
|
|
42
|
+
async def store(
|
|
43
|
+
self,
|
|
44
|
+
attachment_id: str,
|
|
45
|
+
content: bytes,
|
|
46
|
+
metadata: AttachmentMetadata,
|
|
47
|
+
) -> StorageResult:
|
|
48
|
+
"""Store attachment on local filesystem."""
|
|
49
|
+
# Create date-based path
|
|
50
|
+
date_path = datetime.utcnow().strftime("%Y/%m/%d")
|
|
51
|
+
storage_key = f"{date_path}/{attachment_id}/{metadata.filename}"
|
|
52
|
+
|
|
53
|
+
full_path = self.base_path / storage_key
|
|
54
|
+
full_path.parent.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
|
|
56
|
+
# Write content
|
|
57
|
+
full_path.write_bytes(content)
|
|
58
|
+
|
|
59
|
+
return StorageResult(
|
|
60
|
+
storage_key=storage_key,
|
|
61
|
+
size_bytes=len(content),
|
|
62
|
+
content_hash=self.compute_hash(content),
|
|
63
|
+
backend=self.backend_name,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
async def retrieve(self, storage_key: str) -> bytes:
|
|
67
|
+
"""Retrieve attachment from filesystem."""
|
|
68
|
+
full_path = self.base_path / storage_key
|
|
69
|
+
|
|
70
|
+
if not full_path.exists():
|
|
71
|
+
raise FileNotFoundError(f"Attachment not found: {storage_key}")
|
|
72
|
+
|
|
73
|
+
return full_path.read_bytes()
|
|
74
|
+
|
|
75
|
+
async def delete(self, storage_key: str) -> bool:
|
|
76
|
+
"""Delete attachment from filesystem."""
|
|
77
|
+
full_path = self.base_path / storage_key
|
|
78
|
+
|
|
79
|
+
if not full_path.exists():
|
|
80
|
+
return False
|
|
81
|
+
|
|
82
|
+
full_path.unlink()
|
|
83
|
+
|
|
84
|
+
# Try to clean up empty parent directories
|
|
85
|
+
import contextlib
|
|
86
|
+
|
|
87
|
+
with contextlib.suppress(OSError):
|
|
88
|
+
full_path.parent.rmdir()
|
|
89
|
+
|
|
90
|
+
return True
|
|
91
|
+
|
|
92
|
+
async def get_download_url(
|
|
93
|
+
self,
|
|
94
|
+
storage_key: str,
|
|
95
|
+
expires_in: timedelta = timedelta(hours=1),
|
|
96
|
+
filename: str | None = None,
|
|
97
|
+
) -> str:
|
|
98
|
+
"""Generate a signed download URL."""
|
|
99
|
+
# Extract attachment_id from storage key
|
|
100
|
+
parts = storage_key.split("/")
|
|
101
|
+
attachment_id = parts[-2] if len(parts) >= 2 else storage_key
|
|
102
|
+
|
|
103
|
+
# Create signed token
|
|
104
|
+
expiry = int(time.time() + expires_in.total_seconds())
|
|
105
|
+
signature = self._sign_url(attachment_id, expiry)
|
|
106
|
+
|
|
107
|
+
# Build URL with query params
|
|
108
|
+
params = {"token": signature, "expires": str(expiry)}
|
|
109
|
+
if filename:
|
|
110
|
+
params["filename"] = filename
|
|
111
|
+
|
|
112
|
+
return f"{self.serve_url_prefix}/{attachment_id}/download?{urlencode(params)}"
|
|
113
|
+
|
|
114
|
+
async def exists(self, storage_key: str) -> bool:
|
|
115
|
+
"""Check if attachment exists."""
|
|
116
|
+
full_path = self.base_path / storage_key
|
|
117
|
+
return full_path.exists()
|
|
118
|
+
|
|
119
|
+
def _sign_url(self, attachment_id: str, expiry: int) -> str:
|
|
120
|
+
"""Create HMAC signature for URL."""
|
|
121
|
+
message = f"{attachment_id}:{expiry}"
|
|
122
|
+
signature = hmac.new(
|
|
123
|
+
self._signing_secret.encode(),
|
|
124
|
+
message.encode(),
|
|
125
|
+
hashlib.sha256,
|
|
126
|
+
).hexdigest()[:32]
|
|
127
|
+
return signature
|
|
128
|
+
|
|
129
|
+
def verify_signed_url(
|
|
130
|
+
self,
|
|
131
|
+
attachment_id: str,
|
|
132
|
+
token: str,
|
|
133
|
+
expires: int,
|
|
134
|
+
) -> bool:
|
|
135
|
+
"""
|
|
136
|
+
Verify a signed download URL.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
attachment_id: The attachment ID from the URL
|
|
140
|
+
token: The signature token from the URL
|
|
141
|
+
expires: The expiry timestamp from the URL
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
True if signature is valid and not expired
|
|
145
|
+
"""
|
|
146
|
+
# Check expiry
|
|
147
|
+
if expires < time.time():
|
|
148
|
+
return False
|
|
149
|
+
|
|
150
|
+
# Verify signature
|
|
151
|
+
expected = self._sign_url(attachment_id, expires)
|
|
152
|
+
return hmac.compare_digest(token, expected)
|
nornweave/storage/s3.py
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
"""AWS S3 storage backend for attachments."""
|
|
2
|
+
|
|
3
|
+
from datetime import timedelta
|
|
4
|
+
from typing import Any, cast
|
|
5
|
+
|
|
6
|
+
from nornweave.core.storage import AttachmentMetadata, AttachmentStorageBackend, StorageResult
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class S3Storage(AttachmentStorageBackend):
|
|
10
|
+
"""Store attachments in AWS S3.
|
|
11
|
+
|
|
12
|
+
Recommended for production deployments.
|
|
13
|
+
Uses presigned URLs for secure downloads.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
bucket: str,
|
|
19
|
+
prefix: str = "attachments",
|
|
20
|
+
region: str = "us-east-1",
|
|
21
|
+
access_key: str | None = None,
|
|
22
|
+
secret_key: str | None = None,
|
|
23
|
+
endpoint_url: str | None = None,
|
|
24
|
+
) -> None:
|
|
25
|
+
"""
|
|
26
|
+
Initialize S3 storage.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
bucket: S3 bucket name
|
|
30
|
+
prefix: Key prefix for attachments
|
|
31
|
+
region: AWS region
|
|
32
|
+
access_key: AWS access key (uses IAM role if not set)
|
|
33
|
+
secret_key: AWS secret key (uses IAM role if not set)
|
|
34
|
+
endpoint_url: Custom endpoint URL (for S3-compatible services)
|
|
35
|
+
"""
|
|
36
|
+
self.bucket = bucket
|
|
37
|
+
self.prefix = prefix.strip("/")
|
|
38
|
+
self.region = region
|
|
39
|
+
self._access_key = access_key
|
|
40
|
+
self._secret_key = secret_key
|
|
41
|
+
self._endpoint_url = endpoint_url
|
|
42
|
+
self._client: Any = None
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def backend_name(self) -> str:
|
|
46
|
+
return "s3"
|
|
47
|
+
|
|
48
|
+
def _get_client(self) -> Any:
|
|
49
|
+
"""Get or create S3 client (lazy initialization)."""
|
|
50
|
+
if self._client is None:
|
|
51
|
+
try:
|
|
52
|
+
import boto3
|
|
53
|
+
except ImportError:
|
|
54
|
+
raise ImportError(
|
|
55
|
+
"boto3 is required for S3 storage. Install with: pip install boto3"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
client_kwargs: dict[str, Any] = {
|
|
59
|
+
"service_name": "s3",
|
|
60
|
+
"region_name": self.region,
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if self._access_key and self._secret_key:
|
|
64
|
+
client_kwargs["aws_access_key_id"] = self._access_key
|
|
65
|
+
client_kwargs["aws_secret_access_key"] = self._secret_key
|
|
66
|
+
|
|
67
|
+
if self._endpoint_url:
|
|
68
|
+
client_kwargs["endpoint_url"] = self._endpoint_url
|
|
69
|
+
|
|
70
|
+
self._client = boto3.client(**client_kwargs)
|
|
71
|
+
|
|
72
|
+
return self._client
|
|
73
|
+
|
|
74
|
+
def _build_key(self, attachment_id: str, filename: str) -> str:
|
|
75
|
+
"""Build S3 object key."""
|
|
76
|
+
return f"{self.prefix}/{attachment_id}/{filename}"
|
|
77
|
+
|
|
78
|
+
async def store(
|
|
79
|
+
self,
|
|
80
|
+
attachment_id: str,
|
|
81
|
+
content: bytes,
|
|
82
|
+
metadata: AttachmentMetadata,
|
|
83
|
+
) -> StorageResult:
|
|
84
|
+
"""Store attachment in S3."""
|
|
85
|
+
client = self._get_client()
|
|
86
|
+
storage_key = self._build_key(attachment_id, metadata.filename)
|
|
87
|
+
|
|
88
|
+
# Upload with metadata
|
|
89
|
+
client.put_object(
|
|
90
|
+
Bucket=self.bucket,
|
|
91
|
+
Key=storage_key,
|
|
92
|
+
Body=content,
|
|
93
|
+
ContentType=metadata.content_type,
|
|
94
|
+
Metadata={
|
|
95
|
+
"message_id": metadata.message_id,
|
|
96
|
+
"content_disposition": metadata.content_disposition,
|
|
97
|
+
"content_id": metadata.content_id or "",
|
|
98
|
+
},
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
return StorageResult(
|
|
102
|
+
storage_key=storage_key,
|
|
103
|
+
size_bytes=len(content),
|
|
104
|
+
content_hash=self.compute_hash(content),
|
|
105
|
+
backend=self.backend_name,
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
async def retrieve(self, storage_key: str) -> bytes:
|
|
109
|
+
"""Retrieve attachment from S3."""
|
|
110
|
+
client = self._get_client()
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
response = client.get_object(Bucket=self.bucket, Key=storage_key)
|
|
114
|
+
return cast("bytes", response["Body"].read())
|
|
115
|
+
except client.exceptions.NoSuchKey:
|
|
116
|
+
raise FileNotFoundError(f"Attachment not found: {storage_key}")
|
|
117
|
+
|
|
118
|
+
async def delete(self, storage_key: str) -> bool:
|
|
119
|
+
"""Delete attachment from S3."""
|
|
120
|
+
client = self._get_client()
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
# Check if exists first
|
|
124
|
+
client.head_object(Bucket=self.bucket, Key=storage_key)
|
|
125
|
+
except client.exceptions.ClientError:
|
|
126
|
+
return False
|
|
127
|
+
|
|
128
|
+
client.delete_object(Bucket=self.bucket, Key=storage_key)
|
|
129
|
+
return True
|
|
130
|
+
|
|
131
|
+
async def get_download_url(
|
|
132
|
+
self,
|
|
133
|
+
storage_key: str,
|
|
134
|
+
expires_in: timedelta = timedelta(hours=1),
|
|
135
|
+
filename: str | None = None,
|
|
136
|
+
) -> str:
|
|
137
|
+
"""Generate presigned download URL."""
|
|
138
|
+
client = self._get_client()
|
|
139
|
+
|
|
140
|
+
params: dict[str, Any] = {
|
|
141
|
+
"Bucket": self.bucket,
|
|
142
|
+
"Key": storage_key,
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if filename:
|
|
146
|
+
params["ResponseContentDisposition"] = f'attachment; filename="{filename}"'
|
|
147
|
+
|
|
148
|
+
url = client.generate_presigned_url(
|
|
149
|
+
"get_object",
|
|
150
|
+
Params=params,
|
|
151
|
+
ExpiresIn=int(expires_in.total_seconds()),
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
return cast("str", url)
|
|
155
|
+
|
|
156
|
+
async def exists(self, storage_key: str) -> bool:
|
|
157
|
+
"""Check if attachment exists in S3."""
|
|
158
|
+
client = self._get_client()
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
client.head_object(Bucket=self.bucket, Key=storage_key)
|
|
162
|
+
return True
|
|
163
|
+
except client.exceptions.ClientError:
|
|
164
|
+
return False
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""Urdr (The Well): Storage layer."""
|
|
2
|
+
|
|
3
|
+
from nornweave.urdr.adapters import PostgresAdapter, SQLiteAdapter
|
|
4
|
+
from nornweave.urdr.orm import Base, EventORM, InboxORM, MessageORM, ThreadORM
|
|
5
|
+
|
|
6
|
+
__all__ = [
|
|
7
|
+
"Base",
|
|
8
|
+
"EventORM",
|
|
9
|
+
"InboxORM",
|
|
10
|
+
"MessageORM",
|
|
11
|
+
"PostgresAdapter", # May be None if asyncpg not installed
|
|
12
|
+
"SQLiteAdapter",
|
|
13
|
+
"ThreadORM",
|
|
14
|
+
]
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Storage adapters: Postgres, SQLite."""
|
|
2
|
+
|
|
3
|
+
from nornweave.urdr.adapters.base import BaseSQLAlchemyAdapter
|
|
4
|
+
from nornweave.urdr.adapters.sqlite import SQLiteAdapter
|
|
5
|
+
|
|
6
|
+
# PostgresAdapter requires asyncpg - import conditionally
|
|
7
|
+
try:
|
|
8
|
+
from nornweave.urdr.adapters.postgres import PostgresAdapter
|
|
9
|
+
except ImportError:
|
|
10
|
+
PostgresAdapter = None # type: ignore[misc, assignment]
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"BaseSQLAlchemyAdapter",
|
|
14
|
+
"PostgresAdapter",
|
|
15
|
+
"SQLiteAdapter",
|
|
16
|
+
]
|