kailash 0.4.2__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/client/__init__.py +12 -0
- kailash/client/enhanced_client.py +306 -0
- kailash/core/actors/__init__.py +16 -0
- kailash/core/actors/connection_actor.py +566 -0
- kailash/core/actors/supervisor.py +364 -0
- kailash/edge/__init__.py +16 -0
- kailash/edge/compliance.py +834 -0
- kailash/edge/discovery.py +659 -0
- kailash/edge/location.py +582 -0
- kailash/gateway/__init__.py +33 -0
- kailash/gateway/api.py +289 -0
- kailash/gateway/enhanced_gateway.py +357 -0
- kailash/gateway/resource_resolver.py +217 -0
- kailash/gateway/security.py +227 -0
- kailash/middleware/auth/models.py +2 -2
- kailash/middleware/database/base_models.py +1 -7
- kailash/middleware/database/repositories.py +3 -1
- kailash/middleware/gateway/__init__.py +22 -0
- kailash/middleware/gateway/checkpoint_manager.py +398 -0
- kailash/middleware/gateway/deduplicator.py +382 -0
- kailash/middleware/gateway/durable_gateway.py +417 -0
- kailash/middleware/gateway/durable_request.py +498 -0
- kailash/middleware/gateway/event_store.py +459 -0
- kailash/nodes/admin/audit_log.py +364 -6
- kailash/nodes/admin/permission_check.py +817 -33
- kailash/nodes/admin/role_management.py +1242 -108
- kailash/nodes/admin/schema_manager.py +438 -0
- kailash/nodes/admin/user_management.py +1209 -681
- kailash/nodes/api/http.py +95 -71
- kailash/nodes/base.py +281 -164
- kailash/nodes/base_async.py +30 -31
- kailash/nodes/code/__init__.py +8 -1
- kailash/nodes/code/async_python.py +1035 -0
- kailash/nodes/code/python.py +1 -0
- kailash/nodes/data/async_sql.py +12 -25
- kailash/nodes/data/sql.py +20 -11
- kailash/nodes/data/workflow_connection_pool.py +643 -0
- kailash/nodes/rag/__init__.py +1 -4
- kailash/resources/__init__.py +40 -0
- kailash/resources/factory.py +533 -0
- kailash/resources/health.py +319 -0
- kailash/resources/reference.py +288 -0
- kailash/resources/registry.py +392 -0
- kailash/runtime/async_local.py +711 -302
- kailash/testing/__init__.py +34 -0
- kailash/testing/async_test_case.py +353 -0
- kailash/testing/async_utils.py +345 -0
- kailash/testing/fixtures.py +458 -0
- kailash/testing/mock_registry.py +495 -0
- kailash/utils/resource_manager.py +420 -0
- kailash/workflow/__init__.py +8 -0
- kailash/workflow/async_builder.py +621 -0
- kailash/workflow/async_patterns.py +766 -0
- kailash/workflow/builder.py +93 -10
- kailash/workflow/cyclic_runner.py +111 -41
- kailash/workflow/graph.py +7 -2
- kailash/workflow/resilience.py +11 -1
- {kailash-0.4.2.dist-info → kailash-0.6.0.dist-info}/METADATA +12 -7
- {kailash-0.4.2.dist-info → kailash-0.6.0.dist-info}/RECORD +64 -28
- {kailash-0.4.2.dist-info → kailash-0.6.0.dist-info}/WHEEL +0 -0
- {kailash-0.4.2.dist-info → kailash-0.6.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.4.2.dist-info → kailash-0.6.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.4.2.dist-info → kailash-0.6.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,217 @@
|
|
1
|
+
"""Resource resolution system for gateway.
|
2
|
+
|
3
|
+
This module provides resource reference resolution to handle non-serializable
|
4
|
+
objects like database connections, HTTP clients, and caches through the API.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import hashlib
|
8
|
+
import json
|
9
|
+
import logging
|
10
|
+
from dataclasses import dataclass
|
11
|
+
from typing import Any, Callable, Dict, Optional
|
12
|
+
|
13
|
+
from ..resources.factory import CacheFactory, DatabasePoolFactory, HttpClientFactory
|
14
|
+
from ..resources.registry import ResourceFactory, ResourceRegistry
|
15
|
+
from .security import SecretManager
|
16
|
+
|
17
|
+
logger = logging.getLogger(__name__)
|
18
|
+
|
19
|
+
|
20
|
+
@dataclass
|
21
|
+
class ResourceReference:
|
22
|
+
"""Reference to a resource that can be resolved by the gateway."""
|
23
|
+
|
24
|
+
type: str # database, http_client, cache, message_queue, etc.
|
25
|
+
config: Dict[str, Any]
|
26
|
+
credentials_ref: Optional[str] = None
|
27
|
+
|
28
|
+
def to_dict(self) -> Dict[str, Any]:
|
29
|
+
"""Convert to JSON-serializable dict."""
|
30
|
+
return {
|
31
|
+
"type": self.type,
|
32
|
+
"config": self.config,
|
33
|
+
"credentials_ref": self.credentials_ref,
|
34
|
+
}
|
35
|
+
|
36
|
+
@classmethod
|
37
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ResourceReference":
|
38
|
+
"""Create from dictionary."""
|
39
|
+
return cls(
|
40
|
+
type=data["type"],
|
41
|
+
config=data["config"],
|
42
|
+
credentials_ref=data.get("credentials_ref"),
|
43
|
+
)
|
44
|
+
|
45
|
+
|
46
|
+
class ResourceResolver:
|
47
|
+
"""Resolves resource references to actual resources."""
|
48
|
+
|
49
|
+
def __init__(
|
50
|
+
self, resource_registry: ResourceRegistry, secret_manager: SecretManager
|
51
|
+
):
|
52
|
+
self.resource_registry = resource_registry
|
53
|
+
self.secret_manager = secret_manager
|
54
|
+
self._resolvers = {
|
55
|
+
"database": self._resolve_database,
|
56
|
+
"http_client": self._resolve_http_client,
|
57
|
+
"cache": self._resolve_cache,
|
58
|
+
"message_queue": self._resolve_message_queue,
|
59
|
+
"s3": self._resolve_s3_client,
|
60
|
+
}
|
61
|
+
|
62
|
+
async def resolve(self, reference: ResourceReference) -> Any:
|
63
|
+
"""Resolve a resource reference."""
|
64
|
+
resolver = self._resolvers.get(reference.type)
|
65
|
+
if not resolver:
|
66
|
+
raise ValueError(f"Unknown resource type: {reference.type}")
|
67
|
+
|
68
|
+
# Get credentials if needed
|
69
|
+
credentials = None
|
70
|
+
if reference.credentials_ref:
|
71
|
+
credentials = await self.secret_manager.get_secret(
|
72
|
+
reference.credentials_ref
|
73
|
+
)
|
74
|
+
|
75
|
+
# Resolve resource
|
76
|
+
return await resolver(reference.config, credentials)
|
77
|
+
|
78
|
+
async def _resolve_database(
|
79
|
+
self, config: Dict[str, Any], credentials: Optional[Dict[str, Any]]
|
80
|
+
) -> Any:
|
81
|
+
"""Resolve database resource."""
|
82
|
+
# Merge config with credentials
|
83
|
+
connection_config = {**config}
|
84
|
+
if credentials:
|
85
|
+
# Only add known database credential fields
|
86
|
+
for key in ["user", "password", "username", "host", "port", "database"]:
|
87
|
+
if key in credentials:
|
88
|
+
connection_config[key] = credentials[key]
|
89
|
+
|
90
|
+
# Create unique key for this configuration
|
91
|
+
config_str = json.dumps(connection_config, sort_keys=True)
|
92
|
+
pool_key = f"db_{hashlib.md5(config_str.encode()).hexdigest()[:8]}"
|
93
|
+
|
94
|
+
try:
|
95
|
+
# Try to get existing pool
|
96
|
+
return await self.resource_registry.get_resource(pool_key)
|
97
|
+
except:
|
98
|
+
# Register and create new pool
|
99
|
+
factory = DatabasePoolFactory(**connection_config)
|
100
|
+
|
101
|
+
# Health check
|
102
|
+
async def health_check(pool):
|
103
|
+
try:
|
104
|
+
async with pool.acquire() as conn:
|
105
|
+
await conn.fetchval("SELECT 1")
|
106
|
+
return True
|
107
|
+
except Exception:
|
108
|
+
return False
|
109
|
+
|
110
|
+
# Cleanup
|
111
|
+
async def cleanup(pool):
|
112
|
+
await pool.close()
|
113
|
+
|
114
|
+
self.resource_registry.register_factory(
|
115
|
+
pool_key, factory, health_check=health_check, cleanup_handler=cleanup
|
116
|
+
)
|
117
|
+
return await self.resource_registry.get_resource(pool_key)
|
118
|
+
|
119
|
+
async def _resolve_http_client(
|
120
|
+
self, config: Dict[str, Any], credentials: Optional[Dict[str, Any]]
|
121
|
+
) -> Any:
|
122
|
+
"""Resolve HTTP client resource."""
|
123
|
+
# Apply credentials as headers if provided
|
124
|
+
if credentials and "headers" not in config:
|
125
|
+
config["headers"] = {}
|
126
|
+
|
127
|
+
if credentials:
|
128
|
+
if "api_key" in credentials:
|
129
|
+
config["headers"]["Authorization"] = f"Bearer {credentials['api_key']}"
|
130
|
+
elif "token" in credentials:
|
131
|
+
config["headers"]["Authorization"] = f"Bearer {credentials['token']}"
|
132
|
+
elif "headers" in credentials:
|
133
|
+
config["headers"].update(credentials["headers"])
|
134
|
+
|
135
|
+
# Create unique key
|
136
|
+
config_str = json.dumps(config, sort_keys=True)
|
137
|
+
client_key = f"http_{hashlib.md5(config_str.encode()).hexdigest()[:8]}"
|
138
|
+
|
139
|
+
try:
|
140
|
+
return await self.resource_registry.get_resource(client_key)
|
141
|
+
except:
|
142
|
+
factory = HttpClientFactory(**config)
|
143
|
+
|
144
|
+
async def cleanup(session):
|
145
|
+
await session.close()
|
146
|
+
|
147
|
+
self.resource_registry.register_factory(
|
148
|
+
client_key, factory, cleanup_handler=cleanup
|
149
|
+
)
|
150
|
+
return await self.resource_registry.get_resource(client_key)
|
151
|
+
|
152
|
+
async def _resolve_cache(
|
153
|
+
self, config: Dict[str, Any], credentials: Optional[Dict[str, Any]]
|
154
|
+
) -> Any:
|
155
|
+
"""Resolve cache resource."""
|
156
|
+
if credentials and "password" in credentials:
|
157
|
+
config["password"] = credentials["password"]
|
158
|
+
|
159
|
+
# Create unique key
|
160
|
+
cache_key = (
|
161
|
+
f"cache_{config.get('host', 'localhost')}_{config.get('port', 6379)}"
|
162
|
+
)
|
163
|
+
|
164
|
+
try:
|
165
|
+
return await self.resource_registry.get_resource(cache_key)
|
166
|
+
except:
|
167
|
+
factory = CacheFactory(**config)
|
168
|
+
|
169
|
+
async def health_check(cache):
|
170
|
+
try:
|
171
|
+
await cache.ping()
|
172
|
+
return True
|
173
|
+
except Exception:
|
174
|
+
return False
|
175
|
+
|
176
|
+
async def cleanup(cache):
|
177
|
+
await cache.aclose()
|
178
|
+
|
179
|
+
self.resource_registry.register_factory(
|
180
|
+
cache_key, factory, health_check=health_check, cleanup_handler=cleanup
|
181
|
+
)
|
182
|
+
return await self.resource_registry.get_resource(cache_key)
|
183
|
+
|
184
|
+
async def _resolve_message_queue(
|
185
|
+
self, config: Dict[str, Any], credentials: Optional[Dict[str, Any]]
|
186
|
+
) -> Any:
|
187
|
+
"""Resolve message queue resource."""
|
188
|
+
# Implementation depends on the message queue system
|
189
|
+
# This is a placeholder
|
190
|
+
queue_type = config.get("type", "rabbitmq")
|
191
|
+
|
192
|
+
if queue_type == "rabbitmq":
|
193
|
+
# Would implement RabbitMQ connection
|
194
|
+
pass
|
195
|
+
elif queue_type == "kafka":
|
196
|
+
# Would implement Kafka connection
|
197
|
+
pass
|
198
|
+
|
199
|
+
raise NotImplementedError(f"Message queue type {queue_type} not implemented")
|
200
|
+
|
201
|
+
async def _resolve_s3_client(
|
202
|
+
self, config: Dict[str, Any], credentials: Optional[Dict[str, Any]]
|
203
|
+
) -> Any:
|
204
|
+
"""Resolve S3 client resource."""
|
205
|
+
# Implementation would depend on boto3 or aioboto3
|
206
|
+
# This is a placeholder
|
207
|
+
if credentials:
|
208
|
+
if "access_key" in credentials:
|
209
|
+
config["aws_access_key_id"] = credentials["access_key"]
|
210
|
+
if "secret_key" in credentials:
|
211
|
+
config["aws_secret_access_key"] = credentials["secret_key"]
|
212
|
+
|
213
|
+
# Create unique key
|
214
|
+
s3_key = f"s3_{config.get('region', 'us-east-1')}"
|
215
|
+
|
216
|
+
# Would implement S3 client creation here
|
217
|
+
raise NotImplementedError("S3 client resolution not yet implemented")
|
@@ -0,0 +1,227 @@
|
|
1
|
+
"""Security and secret management for gateway.
|
2
|
+
|
3
|
+
This module provides secure credential management with encryption
|
4
|
+
and multiple backend options for storing secrets.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import asyncio
|
8
|
+
import base64
|
9
|
+
import json
|
10
|
+
import logging
|
11
|
+
import os
|
12
|
+
from abc import ABC, abstractmethod
|
13
|
+
from datetime import UTC, datetime, timedelta
|
14
|
+
from typing import Any, Dict, Optional, Tuple
|
15
|
+
|
16
|
+
from cryptography.fernet import Fernet
|
17
|
+
|
18
|
+
logger = logging.getLogger(__name__)
|
19
|
+
|
20
|
+
|
21
|
+
class SecretNotFoundError(Exception):
|
22
|
+
"""Raised when secret is not found."""
|
23
|
+
|
24
|
+
pass
|
25
|
+
|
26
|
+
|
27
|
+
class SecretBackend(ABC):
|
28
|
+
"""Abstract backend for secret storage."""
|
29
|
+
|
30
|
+
@abstractmethod
|
31
|
+
async def get_secret(self, reference: str) -> Dict[str, Any]:
|
32
|
+
"""Get secret by reference."""
|
33
|
+
pass
|
34
|
+
|
35
|
+
@abstractmethod
|
36
|
+
async def store_secret(self, reference: str, secret: Dict[str, Any]) -> None:
|
37
|
+
"""Store a secret."""
|
38
|
+
pass
|
39
|
+
|
40
|
+
@abstractmethod
|
41
|
+
async def delete_secret(self, reference: str) -> None:
|
42
|
+
"""Delete a secret."""
|
43
|
+
pass
|
44
|
+
|
45
|
+
|
46
|
+
class SecretManager:
|
47
|
+
"""Manages secrets for resource credentials."""
|
48
|
+
|
49
|
+
def __init__(
|
50
|
+
self,
|
51
|
+
backend: Optional[SecretBackend] = None,
|
52
|
+
encryption_key: Optional[str] = None,
|
53
|
+
cache_ttl: int = 300, # 5 minutes
|
54
|
+
):
|
55
|
+
self.backend = backend or EnvironmentSecretBackend()
|
56
|
+
self._cache: Dict[str, Tuple[Any, datetime]] = {}
|
57
|
+
self._ttl = timedelta(seconds=cache_ttl)
|
58
|
+
self._lock = asyncio.Lock()
|
59
|
+
|
60
|
+
# Set up encryption
|
61
|
+
if encryption_key:
|
62
|
+
self._cipher = Fernet(encryption_key.encode())
|
63
|
+
else:
|
64
|
+
# Generate key from environment or use default
|
65
|
+
key = os.environ.get("KAILASH_ENCRYPTION_KEY")
|
66
|
+
if not key:
|
67
|
+
# Warning: This is not secure for production!
|
68
|
+
logger.warning(
|
69
|
+
"Using default encryption key - not secure for production!"
|
70
|
+
)
|
71
|
+
# Generate a proper Fernet key
|
72
|
+
key = Fernet.generate_key()
|
73
|
+
elif isinstance(key, str):
|
74
|
+
key = key.encode()
|
75
|
+
self._cipher = Fernet(key)
|
76
|
+
|
77
|
+
async def get_secret(self, reference: str) -> Dict[str, Any]:
|
78
|
+
"""Get secret by reference."""
|
79
|
+
async with self._lock:
|
80
|
+
# Check cache
|
81
|
+
if reference in self._cache:
|
82
|
+
value, timestamp = self._cache[reference]
|
83
|
+
if datetime.now(UTC) - timestamp < self._ttl:
|
84
|
+
return value
|
85
|
+
else:
|
86
|
+
# Expired, remove from cache
|
87
|
+
del self._cache[reference]
|
88
|
+
|
89
|
+
# Fetch from backend
|
90
|
+
encrypted_secret = await self.backend.get_secret(reference)
|
91
|
+
|
92
|
+
# Decrypt if needed
|
93
|
+
if isinstance(encrypted_secret, str) and encrypted_secret.startswith(
|
94
|
+
"encrypted:"
|
95
|
+
):
|
96
|
+
decrypted = self._cipher.decrypt(encrypted_secret[10:].encode()).decode()
|
97
|
+
secret = json.loads(decrypted)
|
98
|
+
elif isinstance(encrypted_secret, dict) and "value" in encrypted_secret:
|
99
|
+
# Handle case where backend returns {"value": "encrypted:..."}
|
100
|
+
value = encrypted_secret["value"]
|
101
|
+
if isinstance(value, str) and value.startswith("encrypted:"):
|
102
|
+
decrypted = self._cipher.decrypt(value[10:].encode()).decode()
|
103
|
+
secret = json.loads(decrypted)
|
104
|
+
else:
|
105
|
+
secret = encrypted_secret
|
106
|
+
else:
|
107
|
+
secret = encrypted_secret
|
108
|
+
|
109
|
+
# Cache it
|
110
|
+
async with self._lock:
|
111
|
+
self._cache[reference] = (secret, datetime.now(UTC))
|
112
|
+
|
113
|
+
return secret
|
114
|
+
|
115
|
+
async def store_secret(
|
116
|
+
self, reference: str, secret: Dict[str, Any], encrypt: bool = True
|
117
|
+
) -> None:
|
118
|
+
"""Store a secret."""
|
119
|
+
if encrypt:
|
120
|
+
# Encrypt the secret
|
121
|
+
secret_json = json.dumps(secret)
|
122
|
+
encrypted = self._cipher.encrypt(secret_json.encode())
|
123
|
+
encrypted_value = f"encrypted:{encrypted.decode()}"
|
124
|
+
await self.backend.store_secret(reference, encrypted_value)
|
125
|
+
else:
|
126
|
+
await self.backend.store_secret(reference, secret)
|
127
|
+
|
128
|
+
# Clear from cache
|
129
|
+
async with self._lock:
|
130
|
+
if reference in self._cache:
|
131
|
+
del self._cache[reference]
|
132
|
+
|
133
|
+
async def delete_secret(self, reference: str) -> None:
|
134
|
+
"""Delete a secret."""
|
135
|
+
await self.backend.delete_secret(reference)
|
136
|
+
|
137
|
+
# Clear from cache
|
138
|
+
async with self._lock:
|
139
|
+
if reference in self._cache:
|
140
|
+
del self._cache[reference]
|
141
|
+
|
142
|
+
async def clear_cache(self):
|
143
|
+
"""Clear the secret cache."""
|
144
|
+
async with self._lock:
|
145
|
+
self._cache.clear()
|
146
|
+
|
147
|
+
|
148
|
+
class EnvironmentSecretBackend(SecretBackend):
|
149
|
+
"""Secret backend using environment variables."""
|
150
|
+
|
151
|
+
def __init__(self, prefix: str = "KAILASH_SECRET_"):
|
152
|
+
self.prefix = prefix
|
153
|
+
|
154
|
+
async def get_secret(self, reference: str) -> Dict[str, Any]:
|
155
|
+
"""Get secret from environment."""
|
156
|
+
# Convert reference to env var name
|
157
|
+
env_var = f"{self.prefix}{reference.upper()}"
|
158
|
+
|
159
|
+
value = os.environ.get(env_var)
|
160
|
+
if not value:
|
161
|
+
raise SecretNotFoundError(f"Secret {reference} not found")
|
162
|
+
|
163
|
+
# Try to parse as JSON
|
164
|
+
try:
|
165
|
+
return json.loads(value)
|
166
|
+
except json.JSONDecodeError:
|
167
|
+
# Return as simple key-value
|
168
|
+
return {"value": value}
|
169
|
+
|
170
|
+
async def store_secret(self, reference: str, secret: Any) -> None:
|
171
|
+
"""Store secret in environment (not recommended for production)."""
|
172
|
+
env_var = f"{self.prefix}{reference.upper()}"
|
173
|
+
|
174
|
+
if isinstance(secret, dict):
|
175
|
+
os.environ[env_var] = json.dumps(secret)
|
176
|
+
else:
|
177
|
+
os.environ[env_var] = str(secret)
|
178
|
+
|
179
|
+
async def delete_secret(self, reference: str) -> None:
|
180
|
+
"""Delete secret from environment."""
|
181
|
+
env_var = f"{self.prefix}{reference.upper()}"
|
182
|
+
if env_var in os.environ:
|
183
|
+
del os.environ[env_var]
|
184
|
+
|
185
|
+
|
186
|
+
class FileSecretBackend(SecretBackend):
|
187
|
+
"""Secret backend using encrypted file storage."""
|
188
|
+
|
189
|
+
def __init__(self, secrets_dir: str = "/etc/kailash/secrets"):
|
190
|
+
self.secrets_dir = secrets_dir
|
191
|
+
os.makedirs(secrets_dir, exist_ok=True)
|
192
|
+
|
193
|
+
async def get_secret(self, reference: str) -> Dict[str, Any]:
|
194
|
+
"""Get secret from file."""
|
195
|
+
file_path = os.path.join(self.secrets_dir, f"{reference}.json")
|
196
|
+
|
197
|
+
if not os.path.exists(file_path):
|
198
|
+
raise SecretNotFoundError(f"Secret {reference} not found")
|
199
|
+
|
200
|
+
with open(file_path, "r") as f:
|
201
|
+
return json.load(f)
|
202
|
+
|
203
|
+
async def store_secret(self, reference: str, secret: Any) -> None:
|
204
|
+
"""Store secret in file."""
|
205
|
+
file_path = os.path.join(self.secrets_dir, f"{reference}.json")
|
206
|
+
|
207
|
+
with open(file_path, "w") as f:
|
208
|
+
if isinstance(secret, str):
|
209
|
+
f.write(secret)
|
210
|
+
else:
|
211
|
+
json.dump(secret, f)
|
212
|
+
|
213
|
+
# Set restrictive permissions
|
214
|
+
os.chmod(file_path, 0o600)
|
215
|
+
|
216
|
+
async def delete_secret(self, reference: str) -> None:
|
217
|
+
"""Delete secret file."""
|
218
|
+
file_path = os.path.join(self.secrets_dir, f"{reference}.json")
|
219
|
+
if os.path.exists(file_path):
|
220
|
+
os.remove(file_path)
|
221
|
+
|
222
|
+
|
223
|
+
# For production, you would implement:
|
224
|
+
# - VaultSecretBackend for HashiCorp Vault
|
225
|
+
# - AWSSecretsManagerBackend for AWS Secrets Manager
|
226
|
+
# - AzureKeyVaultBackend for Azure Key Vault
|
227
|
+
# - GCPSecretManagerBackend for Google Cloud Secret Manager
|
@@ -6,7 +6,7 @@ These models can be imported anywhere in the codebase safely.
|
|
6
6
|
"""
|
7
7
|
|
8
8
|
from dataclasses import dataclass
|
9
|
-
from datetime import datetime
|
9
|
+
from datetime import UTC, datetime
|
10
10
|
from typing import List, Optional
|
11
11
|
|
12
12
|
|
@@ -102,7 +102,7 @@ class RefreshTokenData:
|
|
102
102
|
|
103
103
|
def __post_init__(self):
|
104
104
|
if self.created_at is None:
|
105
|
-
self.created_at = datetime.
|
105
|
+
self.created_at = datetime.now(UTC)
|
106
106
|
|
107
107
|
|
108
108
|
@dataclass
|
@@ -8,13 +8,7 @@ import uuid
|
|
8
8
|
from datetime import datetime, timezone
|
9
9
|
from typing import Any, Dict, List, Optional
|
10
10
|
|
11
|
-
from sqlalchemy import
|
12
|
-
JSON,
|
13
|
-
Boolean,
|
14
|
-
CheckConstraint,
|
15
|
-
Column,
|
16
|
-
DateTime,
|
17
|
-
)
|
11
|
+
from sqlalchemy import JSON, Boolean, CheckConstraint, Column, DateTime
|
18
12
|
from sqlalchemy import Enum as SQLEnum
|
19
13
|
from sqlalchemy import (
|
20
14
|
Float,
|
@@ -56,7 +56,9 @@ class BaseRepository:
|
|
56
56
|
"""Execute database query using SDK node."""
|
57
57
|
try:
|
58
58
|
if self.use_async:
|
59
|
-
result = await self.db_node.
|
59
|
+
result = await self.db_node.execute_async(
|
60
|
+
query=query, params=params or {}
|
61
|
+
)
|
60
62
|
else:
|
61
63
|
result = self.db_node.execute(query=query, params=params or {})
|
62
64
|
|
@@ -0,0 +1,22 @@
|
|
1
|
+
"""Durable gateway implementation for production-grade request handling.
|
2
|
+
|
3
|
+
This module provides:
|
4
|
+
- Request durability with checkpointing
|
5
|
+
- Automatic deduplication
|
6
|
+
- Event sourcing for full auditability
|
7
|
+
- Long-running request support
|
8
|
+
"""
|
9
|
+
|
10
|
+
from .checkpoint_manager import CheckpointManager
|
11
|
+
from .deduplicator import RequestDeduplicator
|
12
|
+
from .durable_request import DurableRequest, RequestState
|
13
|
+
from .event_store import EventStore, RequestEvent
|
14
|
+
|
15
|
+
__all__ = [
|
16
|
+
"DurableRequest",
|
17
|
+
"RequestState",
|
18
|
+
"CheckpointManager",
|
19
|
+
"RequestDeduplicator",
|
20
|
+
"EventStore",
|
21
|
+
"RequestEvent",
|
22
|
+
]
|