wappa 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wappa might be problematic. Click here for more details.
- wappa/__init__.py +85 -0
- wappa/api/__init__.py +1 -0
- wappa/api/controllers/__init__.py +10 -0
- wappa/api/controllers/webhook_controller.py +441 -0
- wappa/api/dependencies/__init__.py +15 -0
- wappa/api/dependencies/whatsapp_dependencies.py +220 -0
- wappa/api/dependencies/whatsapp_media_dependencies.py +26 -0
- wappa/api/middleware/__init__.py +7 -0
- wappa/api/middleware/error_handler.py +158 -0
- wappa/api/middleware/owner.py +99 -0
- wappa/api/middleware/request_logging.py +184 -0
- wappa/api/routes/__init__.py +6 -0
- wappa/api/routes/health.py +102 -0
- wappa/api/routes/webhooks.py +211 -0
- wappa/api/routes/whatsapp/__init__.py +15 -0
- wappa/api/routes/whatsapp/whatsapp_interactive.py +429 -0
- wappa/api/routes/whatsapp/whatsapp_media.py +440 -0
- wappa/api/routes/whatsapp/whatsapp_messages.py +195 -0
- wappa/api/routes/whatsapp/whatsapp_specialized.py +516 -0
- wappa/api/routes/whatsapp/whatsapp_templates.py +431 -0
- wappa/api/routes/whatsapp_combined.py +35 -0
- wappa/cli/__init__.py +9 -0
- wappa/cli/main.py +199 -0
- wappa/core/__init__.py +6 -0
- wappa/core/config/__init__.py +5 -0
- wappa/core/config/settings.py +161 -0
- wappa/core/events/__init__.py +41 -0
- wappa/core/events/default_handlers.py +642 -0
- wappa/core/events/event_dispatcher.py +244 -0
- wappa/core/events/event_handler.py +247 -0
- wappa/core/events/webhook_factory.py +219 -0
- wappa/core/factory/__init__.py +15 -0
- wappa/core/factory/plugin.py +68 -0
- wappa/core/factory/wappa_builder.py +326 -0
- wappa/core/logging/__init__.py +5 -0
- wappa/core/logging/context.py +100 -0
- wappa/core/logging/logger.py +343 -0
- wappa/core/plugins/__init__.py +34 -0
- wappa/core/plugins/auth_plugin.py +169 -0
- wappa/core/plugins/cors_plugin.py +128 -0
- wappa/core/plugins/custom_middleware_plugin.py +182 -0
- wappa/core/plugins/database_plugin.py +235 -0
- wappa/core/plugins/rate_limit_plugin.py +183 -0
- wappa/core/plugins/redis_plugin.py +224 -0
- wappa/core/plugins/wappa_core_plugin.py +261 -0
- wappa/core/plugins/webhook_plugin.py +253 -0
- wappa/core/types.py +108 -0
- wappa/core/wappa_app.py +546 -0
- wappa/database/__init__.py +18 -0
- wappa/database/adapter.py +107 -0
- wappa/database/adapters/__init__.py +17 -0
- wappa/database/adapters/mysql_adapter.py +187 -0
- wappa/database/adapters/postgresql_adapter.py +169 -0
- wappa/database/adapters/sqlite_adapter.py +174 -0
- wappa/domain/__init__.py +28 -0
- wappa/domain/builders/__init__.py +5 -0
- wappa/domain/builders/message_builder.py +189 -0
- wappa/domain/entities/__init__.py +5 -0
- wappa/domain/enums/messenger_platform.py +123 -0
- wappa/domain/factories/__init__.py +6 -0
- wappa/domain/factories/media_factory.py +450 -0
- wappa/domain/factories/message_factory.py +497 -0
- wappa/domain/factories/messenger_factory.py +244 -0
- wappa/domain/interfaces/__init__.py +32 -0
- wappa/domain/interfaces/base_repository.py +94 -0
- wappa/domain/interfaces/cache_factory.py +85 -0
- wappa/domain/interfaces/cache_interface.py +199 -0
- wappa/domain/interfaces/expiry_repository.py +68 -0
- wappa/domain/interfaces/media_interface.py +311 -0
- wappa/domain/interfaces/messaging_interface.py +523 -0
- wappa/domain/interfaces/pubsub_repository.py +151 -0
- wappa/domain/interfaces/repository_factory.py +108 -0
- wappa/domain/interfaces/shared_state_repository.py +122 -0
- wappa/domain/interfaces/state_repository.py +123 -0
- wappa/domain/interfaces/tables_repository.py +215 -0
- wappa/domain/interfaces/user_repository.py +114 -0
- wappa/domain/interfaces/webhooks/__init__.py +1 -0
- wappa/domain/models/media_result.py +110 -0
- wappa/domain/models/platforms/__init__.py +15 -0
- wappa/domain/models/platforms/platform_config.py +104 -0
- wappa/domain/services/__init__.py +11 -0
- wappa/domain/services/tenant_credentials_service.py +56 -0
- wappa/messaging/__init__.py +7 -0
- wappa/messaging/whatsapp/__init__.py +1 -0
- wappa/messaging/whatsapp/client/__init__.py +5 -0
- wappa/messaging/whatsapp/client/whatsapp_client.py +417 -0
- wappa/messaging/whatsapp/handlers/__init__.py +13 -0
- wappa/messaging/whatsapp/handlers/whatsapp_interactive_handler.py +653 -0
- wappa/messaging/whatsapp/handlers/whatsapp_media_handler.py +579 -0
- wappa/messaging/whatsapp/handlers/whatsapp_specialized_handler.py +434 -0
- wappa/messaging/whatsapp/handlers/whatsapp_template_handler.py +416 -0
- wappa/messaging/whatsapp/messenger/__init__.py +5 -0
- wappa/messaging/whatsapp/messenger/whatsapp_messenger.py +904 -0
- wappa/messaging/whatsapp/models/__init__.py +61 -0
- wappa/messaging/whatsapp/models/basic_models.py +65 -0
- wappa/messaging/whatsapp/models/interactive_models.py +287 -0
- wappa/messaging/whatsapp/models/media_models.py +215 -0
- wappa/messaging/whatsapp/models/specialized_models.py +304 -0
- wappa/messaging/whatsapp/models/template_models.py +261 -0
- wappa/persistence/cache_factory.py +93 -0
- wappa/persistence/json/__init__.py +14 -0
- wappa/persistence/json/cache_adapters.py +271 -0
- wappa/persistence/json/handlers/__init__.py +1 -0
- wappa/persistence/json/handlers/state_handler.py +250 -0
- wappa/persistence/json/handlers/table_handler.py +263 -0
- wappa/persistence/json/handlers/user_handler.py +213 -0
- wappa/persistence/json/handlers/utils/__init__.py +1 -0
- wappa/persistence/json/handlers/utils/file_manager.py +153 -0
- wappa/persistence/json/handlers/utils/key_factory.py +11 -0
- wappa/persistence/json/handlers/utils/serialization.py +121 -0
- wappa/persistence/json/json_cache_factory.py +76 -0
- wappa/persistence/json/storage_manager.py +285 -0
- wappa/persistence/memory/__init__.py +14 -0
- wappa/persistence/memory/cache_adapters.py +271 -0
- wappa/persistence/memory/handlers/__init__.py +1 -0
- wappa/persistence/memory/handlers/state_handler.py +250 -0
- wappa/persistence/memory/handlers/table_handler.py +280 -0
- wappa/persistence/memory/handlers/user_handler.py +213 -0
- wappa/persistence/memory/handlers/utils/__init__.py +1 -0
- wappa/persistence/memory/handlers/utils/key_factory.py +11 -0
- wappa/persistence/memory/handlers/utils/memory_store.py +317 -0
- wappa/persistence/memory/handlers/utils/ttl_manager.py +235 -0
- wappa/persistence/memory/memory_cache_factory.py +76 -0
- wappa/persistence/memory/storage_manager.py +235 -0
- wappa/persistence/redis/README.md +699 -0
- wappa/persistence/redis/__init__.py +11 -0
- wappa/persistence/redis/cache_adapters.py +285 -0
- wappa/persistence/redis/ops.py +880 -0
- wappa/persistence/redis/redis_cache_factory.py +71 -0
- wappa/persistence/redis/redis_client.py +231 -0
- wappa/persistence/redis/redis_handler/__init__.py +26 -0
- wappa/persistence/redis/redis_handler/state_handler.py +176 -0
- wappa/persistence/redis/redis_handler/table.py +158 -0
- wappa/persistence/redis/redis_handler/user.py +138 -0
- wappa/persistence/redis/redis_handler/utils/__init__.py +12 -0
- wappa/persistence/redis/redis_handler/utils/key_factory.py +32 -0
- wappa/persistence/redis/redis_handler/utils/serde.py +146 -0
- wappa/persistence/redis/redis_handler/utils/tenant_cache.py +268 -0
- wappa/persistence/redis/redis_manager.py +189 -0
- wappa/processors/__init__.py +6 -0
- wappa/processors/base_processor.py +262 -0
- wappa/processors/factory.py +550 -0
- wappa/processors/whatsapp_processor.py +810 -0
- wappa/schemas/__init__.py +6 -0
- wappa/schemas/core/__init__.py +71 -0
- wappa/schemas/core/base_message.py +499 -0
- wappa/schemas/core/base_status.py +322 -0
- wappa/schemas/core/base_webhook.py +312 -0
- wappa/schemas/core/types.py +253 -0
- wappa/schemas/core/webhook_interfaces/__init__.py +48 -0
- wappa/schemas/core/webhook_interfaces/base_components.py +293 -0
- wappa/schemas/core/webhook_interfaces/universal_webhooks.py +348 -0
- wappa/schemas/factory.py +754 -0
- wappa/schemas/webhooks/__init__.py +3 -0
- wappa/schemas/whatsapp/__init__.py +6 -0
- wappa/schemas/whatsapp/base_models.py +285 -0
- wappa/schemas/whatsapp/message_types/__init__.py +93 -0
- wappa/schemas/whatsapp/message_types/audio.py +350 -0
- wappa/schemas/whatsapp/message_types/button.py +267 -0
- wappa/schemas/whatsapp/message_types/contact.py +464 -0
- wappa/schemas/whatsapp/message_types/document.py +421 -0
- wappa/schemas/whatsapp/message_types/errors.py +195 -0
- wappa/schemas/whatsapp/message_types/image.py +424 -0
- wappa/schemas/whatsapp/message_types/interactive.py +430 -0
- wappa/schemas/whatsapp/message_types/location.py +416 -0
- wappa/schemas/whatsapp/message_types/order.py +372 -0
- wappa/schemas/whatsapp/message_types/reaction.py +271 -0
- wappa/schemas/whatsapp/message_types/sticker.py +328 -0
- wappa/schemas/whatsapp/message_types/system.py +317 -0
- wappa/schemas/whatsapp/message_types/text.py +411 -0
- wappa/schemas/whatsapp/message_types/unsupported.py +273 -0
- wappa/schemas/whatsapp/message_types/video.py +344 -0
- wappa/schemas/whatsapp/status_models.py +479 -0
- wappa/schemas/whatsapp/validators.py +454 -0
- wappa/schemas/whatsapp/webhook_container.py +438 -0
- wappa/webhooks/__init__.py +17 -0
- wappa/webhooks/core/__init__.py +71 -0
- wappa/webhooks/core/base_message.py +499 -0
- wappa/webhooks/core/base_status.py +322 -0
- wappa/webhooks/core/base_webhook.py +312 -0
- wappa/webhooks/core/types.py +253 -0
- wappa/webhooks/core/webhook_interfaces/__init__.py +48 -0
- wappa/webhooks/core/webhook_interfaces/base_components.py +293 -0
- wappa/webhooks/core/webhook_interfaces/universal_webhooks.py +441 -0
- wappa/webhooks/factory.py +754 -0
- wappa/webhooks/whatsapp/__init__.py +6 -0
- wappa/webhooks/whatsapp/base_models.py +285 -0
- wappa/webhooks/whatsapp/message_types/__init__.py +93 -0
- wappa/webhooks/whatsapp/message_types/audio.py +350 -0
- wappa/webhooks/whatsapp/message_types/button.py +267 -0
- wappa/webhooks/whatsapp/message_types/contact.py +464 -0
- wappa/webhooks/whatsapp/message_types/document.py +421 -0
- wappa/webhooks/whatsapp/message_types/errors.py +195 -0
- wappa/webhooks/whatsapp/message_types/image.py +424 -0
- wappa/webhooks/whatsapp/message_types/interactive.py +430 -0
- wappa/webhooks/whatsapp/message_types/location.py +416 -0
- wappa/webhooks/whatsapp/message_types/order.py +372 -0
- wappa/webhooks/whatsapp/message_types/reaction.py +271 -0
- wappa/webhooks/whatsapp/message_types/sticker.py +328 -0
- wappa/webhooks/whatsapp/message_types/system.py +317 -0
- wappa/webhooks/whatsapp/message_types/text.py +411 -0
- wappa/webhooks/whatsapp/message_types/unsupported.py +273 -0
- wappa/webhooks/whatsapp/message_types/video.py +344 -0
- wappa/webhooks/whatsapp/status_models.py +479 -0
- wappa/webhooks/whatsapp/validators.py +454 -0
- wappa/webhooks/whatsapp/webhook_container.py +438 -0
- wappa-0.1.0.dist-info/METADATA +269 -0
- wappa-0.1.0.dist-info/RECORD +211 -0
- wappa-0.1.0.dist-info/WHEEL +4 -0
- wappa-0.1.0.dist-info/entry_points.txt +2 -0
- wappa-0.1.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Redis cache factory implementation for Wappa framework.
|
|
3
|
+
|
|
4
|
+
Creates Redis-backed cache instances using the existing Redis handler infrastructure
|
|
5
|
+
with ICache adapters for uniform interface.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from ...domain.interfaces.cache_factory import ICacheFactory
|
|
9
|
+
from ...domain.interfaces.cache_interface import ICache
|
|
10
|
+
from .cache_adapters import (
|
|
11
|
+
RedisStateCacheAdapter,
|
|
12
|
+
RedisTableCacheAdapter,
|
|
13
|
+
RedisUserCacheAdapter,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class RedisCacheFactory(ICacheFactory):
|
|
18
|
+
"""
|
|
19
|
+
Factory for creating Redis-backed cache instances.
|
|
20
|
+
|
|
21
|
+
Uses the existing Redis handler infrastructure with proper pool assignments:
|
|
22
|
+
- State cache: Uses state_handler pool (db1)
|
|
23
|
+
- User cache: Uses users pool (db0)
|
|
24
|
+
- Table cache: Uses table pool (db2)
|
|
25
|
+
|
|
26
|
+
All instances implement the ICache interface through adapters.
|
|
27
|
+
|
|
28
|
+
Context (tenant_id, user_id) is injected at construction time, eliminating
|
|
29
|
+
manual parameter passing.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, tenant_id: str, user_id: str):
|
|
33
|
+
"""Initialize Redis cache factory with context injection."""
|
|
34
|
+
super().__init__(tenant_id, user_id)
|
|
35
|
+
|
|
36
|
+
def create_state_cache(self) -> ICache:
|
|
37
|
+
"""
|
|
38
|
+
Create Redis state cache instance.
|
|
39
|
+
|
|
40
|
+
Uses context (tenant_id, user_id) injected at construction time.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
ICache adapter wrapping RedisStateHandler configured for state_handler pool
|
|
44
|
+
"""
|
|
45
|
+
return RedisStateCacheAdapter(
|
|
46
|
+
tenant_id=self.tenant_id, user_id=self.user_id, redis_alias="state_handler"
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
def create_user_cache(self) -> ICache:
|
|
50
|
+
"""
|
|
51
|
+
Create Redis user cache instance.
|
|
52
|
+
|
|
53
|
+
Uses context (tenant_id, user_id) injected at construction time.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
ICache adapter wrapping RedisUser configured for users pool
|
|
57
|
+
"""
|
|
58
|
+
return RedisUserCacheAdapter(
|
|
59
|
+
tenant_id=self.tenant_id, user_id=self.user_id, redis_alias="users"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
def create_table_cache(self) -> ICache:
|
|
63
|
+
"""
|
|
64
|
+
Create Redis table cache instance.
|
|
65
|
+
|
|
66
|
+
Uses context (tenant_id) injected at construction time.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
ICache adapter wrapping RedisTable configured for table pool
|
|
70
|
+
"""
|
|
71
|
+
return RedisTableCacheAdapter(tenant_id=self.tenant_id, redis_alias="table")
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
# wappa/persistence/redis/redis_client.py
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Redis helper that is **fork-safe** and asyncio-native for Wappa framework caching.
|
|
5
|
+
|
|
6
|
+
Why so elaborate?
|
|
7
|
+
-----------------
|
|
8
|
+
• Gunicorn / Uvicorn workers often `fork()` after import time.
|
|
9
|
+
Re-using a parent-process connection in the child silently breaks
|
|
10
|
+
pub/sub and can leak file descriptors.
|
|
11
|
+
|
|
12
|
+
• Each worker therefore needs its *own* connection-pool.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
import logging
|
|
18
|
+
import os
|
|
19
|
+
from collections.abc import AsyncIterator
|
|
20
|
+
from contextlib import asynccontextmanager
|
|
21
|
+
from typing import ClassVar, Literal, cast
|
|
22
|
+
|
|
23
|
+
from redis.asyncio import ConnectionPool, Redis
|
|
24
|
+
|
|
25
|
+
log = logging.getLogger("RedisClient")
|
|
26
|
+
|
|
27
|
+
# Predefined Redis pool aliases with their database numbers for Wappa cache
|
|
28
|
+
PoolAlias = Literal["users", "state_handler", "table"]
|
|
29
|
+
|
|
30
|
+
POOL_DB_MAPPING = {
|
|
31
|
+
"users": 0, # User-specific cache operations
|
|
32
|
+
"state_handler": 1, # Handler state cache operations
|
|
33
|
+
"table": 2, # Table/data cache operations
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class RedisClient:
|
|
38
|
+
"""
|
|
39
|
+
Fork-safe, asyncio-native **multi-pool** Redis manager for Wappa cache.
|
|
40
|
+
|
|
41
|
+
Supports exactly 3 predefined pools:
|
|
42
|
+
- "users" (db 0): User-specific cache operations
|
|
43
|
+
- "state_handler" (db 1): Handler state cache operations
|
|
44
|
+
- "table" (db 2): Table/data cache operations
|
|
45
|
+
|
|
46
|
+
Every worker process keeps its own pools to avoid post-fork descriptor reuse.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
_pools: ClassVar[dict[PoolAlias, ConnectionPool]] = {}
|
|
50
|
+
_clients: ClassVar[dict[PoolAlias, Redis]] = {}
|
|
51
|
+
_pid: ClassVar[int | None] = None
|
|
52
|
+
|
|
53
|
+
# ---------- life-cycle --------------------------------------------------
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def setup_single_url(cls, base_url: str, *, max_connections: int = 64) -> None:
|
|
57
|
+
"""
|
|
58
|
+
Set up all 3 Redis pools from a single base URL by appending database numbers.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
base_url: Base Redis URL (e.g., "redis://localhost:6379")
|
|
62
|
+
max_connections: Max connections per pool
|
|
63
|
+
|
|
64
|
+
Example:
|
|
65
|
+
RedisClient.setup_single_url("redis://localhost:6379")
|
|
66
|
+
# Creates:
|
|
67
|
+
# - users: redis://localhost:6379/0
|
|
68
|
+
# - state_handler: redis://localhost:6379/1
|
|
69
|
+
# - table: redis://localhost:6379/2
|
|
70
|
+
"""
|
|
71
|
+
# Ensure base URL doesn't already have a database
|
|
72
|
+
if base_url.rstrip("/").split("/")[-1].isdigit():
|
|
73
|
+
log.warning(
|
|
74
|
+
f"Base URL '{base_url}' appears to contain a database number. Using as-is for base."
|
|
75
|
+
)
|
|
76
|
+
base_url = "/".join(base_url.rstrip("/").split("/")[:-1])
|
|
77
|
+
|
|
78
|
+
for alias, db_num in POOL_DB_MAPPING.items():
|
|
79
|
+
url = f"{base_url.rstrip('/')}/{db_num}"
|
|
80
|
+
cls._setup_pool(cast(PoolAlias, alias), url, max_connections)
|
|
81
|
+
|
|
82
|
+
@classmethod
|
|
83
|
+
def setup_multiple_urls(
|
|
84
|
+
cls, urls: dict[PoolAlias, str], *, max_connections: int = 64
|
|
85
|
+
) -> None:
|
|
86
|
+
"""
|
|
87
|
+
Set up Redis pools from explicit URLs for each pool.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
urls: Mapping of pool alias to Redis URL
|
|
91
|
+
max_connections: Max connections per pool
|
|
92
|
+
|
|
93
|
+
Example:
|
|
94
|
+
RedisClient.setup_multiple_urls({
|
|
95
|
+
"users": "redis://localhost:6379/0",
|
|
96
|
+
"state_handler": "redis://cache:6379/1",
|
|
97
|
+
"table": "redis://localhost:6379/2"
|
|
98
|
+
})
|
|
99
|
+
"""
|
|
100
|
+
# Validate all required aliases are provided
|
|
101
|
+
missing = set(POOL_DB_MAPPING.keys()) - set(urls.keys())
|
|
102
|
+
if missing:
|
|
103
|
+
raise ValueError(f"Missing required pool aliases: {missing}")
|
|
104
|
+
|
|
105
|
+
extra = set(str(k) for k in urls) - set(
|
|
106
|
+
str(k) for k in POOL_DB_MAPPING
|
|
107
|
+
)
|
|
108
|
+
if extra:
|
|
109
|
+
raise ValueError(
|
|
110
|
+
f"Unknown pool aliases: {extra}. Only {list(POOL_DB_MAPPING.keys())} are allowed."
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
for alias, url in urls.items():
|
|
114
|
+
cls._setup_pool(cast(PoolAlias, alias), url, max_connections)
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def _setup_pool(cls, alias: PoolAlias, url: str, max_connections: int) -> None:
|
|
118
|
+
"""Internal helper to set up a single pool."""
|
|
119
|
+
pid = os.getpid()
|
|
120
|
+
if cls._pid is None:
|
|
121
|
+
cls._pid = pid
|
|
122
|
+
elif cls._pid != pid:
|
|
123
|
+
# process forked – discard inherited pools
|
|
124
|
+
cls._pools.clear()
|
|
125
|
+
cls._clients.clear()
|
|
126
|
+
cls._pid = pid
|
|
127
|
+
|
|
128
|
+
if alias in cls._pools:
|
|
129
|
+
log.debug(f"Redis pool '{alias}' already exists in PID {pid}")
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
log.info(f"Initialising Redis pool '{alias}' in PID {pid} ({url})")
|
|
133
|
+
pool = ConnectionPool.from_url(
|
|
134
|
+
url,
|
|
135
|
+
decode_responses=True,
|
|
136
|
+
encoding="utf-8",
|
|
137
|
+
max_connections=max_connections,
|
|
138
|
+
)
|
|
139
|
+
client = Redis(connection_pool=pool)
|
|
140
|
+
cls._pools[alias] = pool
|
|
141
|
+
cls._clients[alias] = client
|
|
142
|
+
|
|
143
|
+
@classmethod
|
|
144
|
+
async def close(cls, alias: PoolAlias | None = None) -> None:
|
|
145
|
+
"""Close one or all Redis pools for this process."""
|
|
146
|
+
pid = os.getpid()
|
|
147
|
+
if cls._pid != pid:
|
|
148
|
+
log.debug("No Redis pool to close for PID %s", pid)
|
|
149
|
+
return
|
|
150
|
+
|
|
151
|
+
aliases = [alias] if alias else list(cls._pools.keys())
|
|
152
|
+
for a in aliases:
|
|
153
|
+
pool = cls._pools.pop(cast(PoolAlias, a), None)
|
|
154
|
+
if pool:
|
|
155
|
+
log.info("Closing Redis pool '%s' in PID %s", a, pid)
|
|
156
|
+
await pool.disconnect()
|
|
157
|
+
cls._clients.pop(cast(PoolAlias, a), None)
|
|
158
|
+
if not cls._pools:
|
|
159
|
+
cls._pid = None
|
|
160
|
+
|
|
161
|
+
# ---------- access helpers ---------------------------------------------
|
|
162
|
+
|
|
163
|
+
@classmethod
|
|
164
|
+
async def get(cls, alias: PoolAlias = "users") -> Redis:
|
|
165
|
+
"""Return the Redis client for the given alias."""
|
|
166
|
+
client = cls._clients.get(alias)
|
|
167
|
+
if client is None or cls._pid != os.getpid():
|
|
168
|
+
log.error("RedisClient.get() called before setup() in this process.")
|
|
169
|
+
raise RuntimeError(f"RedisClient must be set up for alias '{alias}' first.")
|
|
170
|
+
# quick health check – keep it cheap
|
|
171
|
+
try:
|
|
172
|
+
await client.ping()
|
|
173
|
+
log.debug("Redis PING successful for '%s'.", alias)
|
|
174
|
+
except Exception as exc:
|
|
175
|
+
log.error("Redis ping failed for '%s': %s", alias, exc, exc_info=True)
|
|
176
|
+
raise
|
|
177
|
+
return client
|
|
178
|
+
|
|
179
|
+
@classmethod
|
|
180
|
+
@asynccontextmanager
|
|
181
|
+
async def connection(cls, alias: PoolAlias = "users") -> AsyncIterator[Redis]:
|
|
182
|
+
"""
|
|
183
|
+
Async context manager for Redis connection.
|
|
184
|
+
|
|
185
|
+
Usage::
|
|
186
|
+
|
|
187
|
+
async with RedisClient.connection("state_handler") as r:
|
|
188
|
+
await r.set("key", "value")
|
|
189
|
+
"""
|
|
190
|
+
client = await cls.get(alias)
|
|
191
|
+
try:
|
|
192
|
+
yield client
|
|
193
|
+
finally:
|
|
194
|
+
# Nothing to close – pool handles connections
|
|
195
|
+
pass
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
"""
|
|
199
|
+
# RedisClient 🔌
|
|
200
|
+
|
|
201
|
+
A fork-safe, asyncio-native helper with **3 predefined Redis pools** for Wappa cache:
|
|
202
|
+
|
|
203
|
+
| Pool | Database | Purpose |
|
|
204
|
+
|--------------|----------|----------------------------|
|
|
205
|
+
| users | 0 | User-specific cache data |
|
|
206
|
+
| state_handler| 1 | Handler state cache data |
|
|
207
|
+
| table | 2 | Table/data cache |
|
|
208
|
+
|
|
209
|
+
```python
|
|
210
|
+
from wappa.persistence.redis.redis_client import RedisClient
|
|
211
|
+
|
|
212
|
+
# Option 1: Single URL (creates all 3 pools automatically)
|
|
213
|
+
RedisClient.setup_single_url("redis://localhost:6379")
|
|
214
|
+
|
|
215
|
+
# Option 2: Explicit URLs per pool
|
|
216
|
+
RedisClient.setup_multiple_urls({
|
|
217
|
+
"users": "redis://localhost:6379/0",
|
|
218
|
+
"state_handler": "redis://cache:6379/1",
|
|
219
|
+
"table": "redis://localhost:6379/2"
|
|
220
|
+
})
|
|
221
|
+
|
|
222
|
+
# Usage
|
|
223
|
+
async with RedisClient.connection("state_handler") as r:
|
|
224
|
+
await r.set("key", "value")
|
|
225
|
+
|
|
226
|
+
redis = await RedisClient.get("users")
|
|
227
|
+
await redis.hset("user:123", "name", "Alice")
|
|
228
|
+
```
|
|
229
|
+
|
|
230
|
+
All pools are fork-safe and isolated per worker process.
|
|
231
|
+
"""
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Redis Handler Module for Wappa Cache
|
|
3
|
+
|
|
4
|
+
Repository classes for Redis cache operations with clean separation of concerns.
|
|
5
|
+
Each class handles a specific cache domain: users, state handlers, and tables.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# Core cache handlers
|
|
9
|
+
from .state_handler import RedisStateHandler
|
|
10
|
+
from .table import RedisTable
|
|
11
|
+
from .user import RedisUser
|
|
12
|
+
|
|
13
|
+
# Utils
|
|
14
|
+
from .utils import KeyFactory, TenantCache, dumps, loads
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
# Infrastructure
|
|
18
|
+
"KeyFactory",
|
|
19
|
+
"dumps",
|
|
20
|
+
"loads",
|
|
21
|
+
"TenantCache",
|
|
22
|
+
# Cache Repositories
|
|
23
|
+
"RedisUser",
|
|
24
|
+
"RedisStateHandler",
|
|
25
|
+
"RedisTable",
|
|
26
|
+
]
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
from ..ops import hget, hincrby_with_expire, hset
|
|
10
|
+
from .utils.serde import dumps
|
|
11
|
+
from .utils.tenant_cache import TenantCache
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger("RedisStateHandler")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RedisStateHandler(TenantCache):
|
|
17
|
+
"""
|
|
18
|
+
Repository for handler state management.
|
|
19
|
+
|
|
20
|
+
Extracted from RedisHandler SECTION: Handler State Management:
|
|
21
|
+
- set_handler_state() -> upsert()
|
|
22
|
+
- get_handler_state() -> get()
|
|
23
|
+
- get_handler_state_field() -> get_field()
|
|
24
|
+
- update_handler_state_field() -> update_field()
|
|
25
|
+
- increment_handler_state_field() -> increment_field()
|
|
26
|
+
- append_to_handler_state_list_field() -> append_to_list()
|
|
27
|
+
- handler_exists() -> exists()
|
|
28
|
+
- delete_handler_state() -> delete()
|
|
29
|
+
- create_or_update_handler() -> merge()
|
|
30
|
+
|
|
31
|
+
Single Responsibility: Handler state management only
|
|
32
|
+
|
|
33
|
+
Example usage:
|
|
34
|
+
handler = RedisStateHandler(tenant="mimeia", user_id="user123")
|
|
35
|
+
await handler.upsert("chat_handler", {"step": 1, "context": "greeting"})
|
|
36
|
+
state = await handler.get("chat_handler")
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
user_id: str = Field(..., min_length=1)
|
|
40
|
+
redis_alias: str = "handlers"
|
|
41
|
+
|
|
42
|
+
def _key(self, handler_name: str) -> str:
|
|
43
|
+
"""Build handler key using KeyFactory"""
|
|
44
|
+
return self.keys.handler(self.tenant, handler_name, self.user_id)
|
|
45
|
+
|
|
46
|
+
# ---- Public API extracted from RedisHandler Handler methods -------------
|
|
47
|
+
async def get(
|
|
48
|
+
self, handler_name: str, models: type[BaseModel] | None = None
|
|
49
|
+
) -> dict[str, Any] | None:
|
|
50
|
+
"""
|
|
51
|
+
Get full handler state hash (was get_handler_state)
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
handler_name: Name of the handler
|
|
55
|
+
models: Optional BaseModel class for full object reconstruction
|
|
56
|
+
e.g., HandlerState (will automatically handle nested HandlerContext, HandlerMetadata)
|
|
57
|
+
"""
|
|
58
|
+
key = self._key(handler_name)
|
|
59
|
+
result = await self._get_hash(key, models=models)
|
|
60
|
+
if not result:
|
|
61
|
+
logger.debug(
|
|
62
|
+
f"Handler state not found for '{handler_name}' (user: '{self.user_id}')"
|
|
63
|
+
)
|
|
64
|
+
return result
|
|
65
|
+
|
|
66
|
+
async def upsert(
|
|
67
|
+
self, handler_name: str, state_data: dict[str, Any], ttl: int | None = None
|
|
68
|
+
) -> bool:
|
|
69
|
+
"""Set handler state, overwriting existing (Redis HSET upsert behavior)"""
|
|
70
|
+
key = self._key(handler_name)
|
|
71
|
+
return await self._hset_with_ttl(key, state_data, ttl)
|
|
72
|
+
|
|
73
|
+
async def get_field(self, handler_name: str, field: str) -> Any | None:
|
|
74
|
+
"""Get specific field from handler state (was get_handler_state_field)"""
|
|
75
|
+
key = self._key(handler_name)
|
|
76
|
+
return await hget(key, field, alias=self.redis_alias)
|
|
77
|
+
|
|
78
|
+
async def update_field(
|
|
79
|
+
self, handler_name: str, field: str, value: Any, ttl: int | None = None
|
|
80
|
+
) -> bool:
|
|
81
|
+
"""Update single field in handler state"""
|
|
82
|
+
key = self._key(handler_name)
|
|
83
|
+
|
|
84
|
+
if ttl:
|
|
85
|
+
# Use inherited method with TTL renewal
|
|
86
|
+
return await self._hset_with_ttl(key, {field: value}, ttl)
|
|
87
|
+
else:
|
|
88
|
+
# Use simple hset without TTL renewal
|
|
89
|
+
serialized_value = dumps(value)
|
|
90
|
+
result = await hset(
|
|
91
|
+
key, field=field, value=serialized_value, alias=self.redis_alias
|
|
92
|
+
)
|
|
93
|
+
return result >= 0
|
|
94
|
+
|
|
95
|
+
async def increment_field(
|
|
96
|
+
self, handler_name: str, field: str, increment: int = 1, ttl: int | None = None
|
|
97
|
+
) -> int | None:
|
|
98
|
+
"""Atomically increment integer field (was increment_handler_state_field)"""
|
|
99
|
+
key = self._key(handler_name)
|
|
100
|
+
|
|
101
|
+
new_value, expire_res = await hincrby_with_expire(
|
|
102
|
+
key=key,
|
|
103
|
+
field=field,
|
|
104
|
+
increment=increment,
|
|
105
|
+
ttl=ttl or self.ttl_default,
|
|
106
|
+
alias=self.redis_alias,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
if new_value is not None and expire_res:
|
|
110
|
+
return new_value
|
|
111
|
+
else:
|
|
112
|
+
logger.warning(
|
|
113
|
+
f"Failed to increment handler field '{field}' for '{handler_name}' (user: '{self.user_id}')"
|
|
114
|
+
)
|
|
115
|
+
return None
|
|
116
|
+
|
|
117
|
+
async def append_to_list(
|
|
118
|
+
self, handler_name: str, field: str, value: Any, ttl: int | None = None
|
|
119
|
+
) -> bool:
|
|
120
|
+
"""Append value to list field (was append_to_handler_state_list_field)"""
|
|
121
|
+
key = self._key(handler_name)
|
|
122
|
+
return await self._append_to_list_field(key, field, value, ttl)
|
|
123
|
+
|
|
124
|
+
async def exists(self, handler_name: str) -> bool:
|
|
125
|
+
"""Check if handler state exists (was handler_exists)"""
|
|
126
|
+
key = self._key(handler_name)
|
|
127
|
+
return await self.key_exists(key)
|
|
128
|
+
|
|
129
|
+
async def delete(self, handler_name: str) -> int:
|
|
130
|
+
"""Delete handler state (was delete_handler_state)"""
|
|
131
|
+
key = self._key(handler_name)
|
|
132
|
+
return await self.delete_key(key)
|
|
133
|
+
|
|
134
|
+
async def merge(
|
|
135
|
+
self,
|
|
136
|
+
handler_name: str,
|
|
137
|
+
state_data: dict[str, Any],
|
|
138
|
+
ttl: int | None = None,
|
|
139
|
+
models: type[BaseModel] | None = None,
|
|
140
|
+
) -> dict[str, Any] | None:
|
|
141
|
+
"""
|
|
142
|
+
Merge new data with existing state and save (was create_or_update_handler)
|
|
143
|
+
Returns the final merged state or None on failure
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
handler_name: Name of the handler
|
|
147
|
+
state_data: New state data to merge
|
|
148
|
+
ttl: Optional TTL override
|
|
149
|
+
models: Optional mapping for BaseModel deserialization when reading existing state
|
|
150
|
+
"""
|
|
151
|
+
logger.debug(f"Upsert handler '{handler_name}' for user '{self.user_id}'")
|
|
152
|
+
|
|
153
|
+
# Get existing state with optional BaseModel deserialization
|
|
154
|
+
existing_state = await self.get(handler_name, models=models) or {}
|
|
155
|
+
|
|
156
|
+
# Merge new data with existing
|
|
157
|
+
new_state = {
|
|
158
|
+
**existing_state,
|
|
159
|
+
**state_data,
|
|
160
|
+
"handler_type": handler_name,
|
|
161
|
+
"timestamp": datetime.utcnow().isoformat(),
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
# Save merged state
|
|
165
|
+
success = await self.upsert(handler_name, new_state, ttl)
|
|
166
|
+
|
|
167
|
+
if success:
|
|
168
|
+
logger.debug(
|
|
169
|
+
f"Successfully upserted handler '{handler_name}' for user '{self.user_id}'"
|
|
170
|
+
)
|
|
171
|
+
return new_state
|
|
172
|
+
else:
|
|
173
|
+
logger.error(
|
|
174
|
+
f"Failed to upsert handler '{handler_name}' for user '{self.user_id}'"
|
|
175
|
+
)
|
|
176
|
+
return None
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
from ..ops import hget, hincrby_with_expire
|
|
9
|
+
from .utils.serde import loads
|
|
10
|
+
from .utils.tenant_cache import TenantCache
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger("RedisTable")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RedisTable(TenantCache):
|
|
16
|
+
"""
|
|
17
|
+
Repository for table data management (generic DataFrames/rows).
|
|
18
|
+
|
|
19
|
+
Extracted from RedisHandler SECTION: Table Data Management:
|
|
20
|
+
- set_table_data() -> upsert()
|
|
21
|
+
- get_table_data() -> get()
|
|
22
|
+
- get_field() -> get_field()
|
|
23
|
+
- increment_table_data_field() -> increment_field()
|
|
24
|
+
- append_to_table_data_list_field() -> append_to_list()
|
|
25
|
+
- table_data_exists() -> exists()
|
|
26
|
+
- delete_table_data() -> delete()
|
|
27
|
+
- create_or_update_table_field() -> update_field()
|
|
28
|
+
- find_table_by_field() -> find_by_field()
|
|
29
|
+
- delete_all_tables_by_pkid() -> delete_all_by_pkid()
|
|
30
|
+
|
|
31
|
+
Single Responsibility: Table/DataFrame data management only
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
redis_alias: str = "handlers"
|
|
35
|
+
|
|
36
|
+
def _key(self, table_name: str, pkid: str) -> str:
|
|
37
|
+
"""Build table key using KeyFactory"""
|
|
38
|
+
return self.keys.table(self.tenant, table_name, pkid)
|
|
39
|
+
|
|
40
|
+
# ---- Public API extracted from RedisHandler Table methods ---------------
|
|
41
|
+
async def get(
|
|
42
|
+
self,
|
|
43
|
+
table_name: str,
|
|
44
|
+
pkid: str,
|
|
45
|
+
models: type[BaseModel] | None = None,
|
|
46
|
+
) -> dict[str, Any] | None:
|
|
47
|
+
"""
|
|
48
|
+
Get full table row data (was get_table_data)
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
table_name: Name of the table
|
|
52
|
+
pkid: Primary key identifier
|
|
53
|
+
models: Optional BaseModel class for full object reconstruction
|
|
54
|
+
e.g., TableRow (will automatically handle nested RowMetadata, RowConfig)
|
|
55
|
+
"""
|
|
56
|
+
key = self._key(table_name, pkid)
|
|
57
|
+
result = await self._get_hash(key, models=models)
|
|
58
|
+
if not result:
|
|
59
|
+
logger.debug(f"Table data not found for '{table_name}:{pkid}'")
|
|
60
|
+
return result
|
|
61
|
+
|
|
62
|
+
async def upsert(
|
|
63
|
+
self, table_name: str, pkid: str, data: dict[str, Any], ttl: int | None = None
|
|
64
|
+
) -> bool:
|
|
65
|
+
"""Set table row data (Redis HSET upsert behavior)"""
|
|
66
|
+
key = self._key(table_name, pkid)
|
|
67
|
+
return await self._hset_with_ttl(key, data, ttl)
|
|
68
|
+
|
|
69
|
+
async def get_field(self, table_name: str, pkid: str, field: str) -> Any | None:
|
|
70
|
+
"""Get a specific field from table row data"""
|
|
71
|
+
key = self._key(table_name, pkid)
|
|
72
|
+
raw_value = await hget(key, field, alias=self.redis_alias)
|
|
73
|
+
return loads(raw_value) if raw_value is not None else None
|
|
74
|
+
|
|
75
|
+
async def update_field(
|
|
76
|
+
self, table_name: str, pkid: str, field: str, value: Any, ttl: int | None = None
|
|
77
|
+
) -> bool:
|
|
78
|
+
"""Update single field in table row"""
|
|
79
|
+
key = self._key(table_name, pkid)
|
|
80
|
+
return await self._hset_with_ttl(key, {field: value}, ttl)
|
|
81
|
+
|
|
82
|
+
async def increment_field(
|
|
83
|
+
self,
|
|
84
|
+
table_name: str,
|
|
85
|
+
pkid: str,
|
|
86
|
+
field: str,
|
|
87
|
+
increment: int = 1,
|
|
88
|
+
ttl: int | None = None,
|
|
89
|
+
) -> int | None:
|
|
90
|
+
"""Atomically increment integer field (was increment_table_data_field)"""
|
|
91
|
+
key = self._key(table_name, pkid)
|
|
92
|
+
|
|
93
|
+
new_value, expire_res = await hincrby_with_expire(
|
|
94
|
+
key=key,
|
|
95
|
+
field=field,
|
|
96
|
+
increment=increment,
|
|
97
|
+
ttl=ttl or self.ttl_default,
|
|
98
|
+
alias=self.redis_alias,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
if new_value is not None and expire_res:
|
|
102
|
+
return new_value
|
|
103
|
+
else:
|
|
104
|
+
logger.warning(
|
|
105
|
+
f"Failed to increment table field '{field}' for '{table_name}:{pkid}'"
|
|
106
|
+
)
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
async def append_to_list(
|
|
110
|
+
self, table_name: str, pkid: str, field: str, value: Any, ttl: int | None = None
|
|
111
|
+
) -> bool:
|
|
112
|
+
"""Append value to list field (was append_to_table_data_list_field)"""
|
|
113
|
+
key = self._key(table_name, pkid)
|
|
114
|
+
return await self._append_to_list_field(key, field, value, ttl)
|
|
115
|
+
|
|
116
|
+
async def exists(self, table_name: str, pkid: str) -> bool:
|
|
117
|
+
"""Check if table row exists (was table_data_exists)"""
|
|
118
|
+
key = self._key(table_name, pkid)
|
|
119
|
+
return await self.key_exists(key)
|
|
120
|
+
|
|
121
|
+
async def delete(self, table_name: str, pkid: str) -> int:
|
|
122
|
+
"""Delete table row (was delete_table_data)"""
|
|
123
|
+
key = self._key(table_name, pkid)
|
|
124
|
+
return await self.delete_key(key)
|
|
125
|
+
|
|
126
|
+
async def find_by_field(
|
|
127
|
+
self,
|
|
128
|
+
table_name: str,
|
|
129
|
+
field: str,
|
|
130
|
+
value: Any,
|
|
131
|
+
models: type[BaseModel] | None = None,
|
|
132
|
+
) -> dict[str, Any] | None:
|
|
133
|
+
"""
|
|
134
|
+
Find first row in table where field matches value (was find_table_by_field)
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
table_name: Name of the table
|
|
138
|
+
field: Field name to search
|
|
139
|
+
value: Value to match
|
|
140
|
+
models: Optional BaseModel class for full object reconstruction
|
|
141
|
+
"""
|
|
142
|
+
pattern = self.keys.table(self.tenant, table_name, "*")
|
|
143
|
+
return await self._find_by_field(pattern, field, value, models=models)
|
|
144
|
+
|
|
145
|
+
async def delete_all_by_pkid(self, pkid: str) -> int:
|
|
146
|
+
"""
|
|
147
|
+
Delete all table rows across all tables with same pkid (was delete_all_tables_by_pkid)
|
|
148
|
+
|
|
149
|
+
This creates a pattern that matches any table with the given pkid:
|
|
150
|
+
tenant:df:*:pkid:safe_pkid
|
|
151
|
+
"""
|
|
152
|
+
safe_pkid = pkid.replace(":", "_")
|
|
153
|
+
pattern = f"{self.tenant}:{self.keys.table_prefix}:*:{self.keys.pk_marker}:{safe_pkid}"
|
|
154
|
+
|
|
155
|
+
logger.info(
|
|
156
|
+
f"Deleting all table data with pkid '{pkid}' (pattern: '{pattern}')"
|
|
157
|
+
)
|
|
158
|
+
return await self._delete_by_pattern(pattern)
|