altcodepro-polydb-python 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. altcodepro_polydb_python-2.1.0.dist-info/METADATA +378 -0
  2. altcodepro_polydb_python-2.1.0.dist-info/RECORD +51 -0
  3. altcodepro_polydb_python-2.1.0.dist-info/WHEEL +5 -0
  4. altcodepro_polydb_python-2.1.0.dist-info/licenses/LICENSE +21 -0
  5. altcodepro_polydb_python-2.1.0.dist-info/top_level.txt +1 -0
  6. polydb/__init__.py +64 -0
  7. polydb/adapters/AzureBlobStorageAdapter.py +77 -0
  8. polydb/adapters/AzureFileStorageAdapter.py +79 -0
  9. polydb/adapters/AzureQueueAdapter.py +61 -0
  10. polydb/adapters/AzureTableStorageAdapter.py +182 -0
  11. polydb/adapters/DynamoDBAdapter.py +216 -0
  12. polydb/adapters/EFSAdapter.py +50 -0
  13. polydb/adapters/FirestoreAdapter.py +193 -0
  14. polydb/adapters/GCPStorageAdapter.py +81 -0
  15. polydb/adapters/MongoDBAdapter.py +136 -0
  16. polydb/adapters/PostgreSQLAdapter.py +453 -0
  17. polydb/adapters/PubSubAdapter.py +83 -0
  18. polydb/adapters/S3Adapter.py +86 -0
  19. polydb/adapters/S3CompatibleAdapter.py +90 -0
  20. polydb/adapters/SQSAdapter.py +84 -0
  21. polydb/adapters/VercelKVAdapter.py +327 -0
  22. polydb/adapters/__init__.py +0 -0
  23. polydb/advanced_query.py +147 -0
  24. polydb/audit/AuditStorage.py +136 -0
  25. polydb/audit/__init__.py +7 -0
  26. polydb/audit/context.py +53 -0
  27. polydb/audit/manager.py +47 -0
  28. polydb/audit/models.py +86 -0
  29. polydb/base/NoSQLKVAdapter.py +301 -0
  30. polydb/base/ObjectStorageAdapter.py +42 -0
  31. polydb/base/QueueAdapter.py +27 -0
  32. polydb/base/SharedFilesAdapter.py +32 -0
  33. polydb/base/__init__.py +0 -0
  34. polydb/batch.py +163 -0
  35. polydb/cache.py +204 -0
  36. polydb/databaseFactory.py +748 -0
  37. polydb/decorators.py +21 -0
  38. polydb/errors.py +82 -0
  39. polydb/factory.py +107 -0
  40. polydb/models.py +39 -0
  41. polydb/monitoring.py +313 -0
  42. polydb/multitenancy.py +197 -0
  43. polydb/py.typed +0 -0
  44. polydb/query.py +150 -0
  45. polydb/registry.py +71 -0
  46. polydb/retry.py +76 -0
  47. polydb/schema.py +205 -0
  48. polydb/security.py +458 -0
  49. polydb/types.py +127 -0
  50. polydb/utils.py +61 -0
  51. polydb/validation.py +131 -0
@@ -0,0 +1,27 @@
1
+ from polydb.utils import setup_logger
2
+
3
+
4
+ from abc import ABC, abstractmethod
5
+ from typing import Any, Dict, List
6
+
7
+
8
+ class QueueAdapter(ABC):
9
+ """Base class for Queue/Message services"""
10
+
11
+ def __init__(self):
12
+ self.logger = setup_logger(self.__class__.__name__)
13
+
14
+ @abstractmethod
15
+ def send(self, message: Dict[str, Any], queue_name: str = "default") -> str:
16
+ """Send message to queue"""
17
+ pass
18
+
19
+ @abstractmethod
20
+ def receive(self, queue_name: str = "default", max_messages: int = 1) -> List[Dict[str, Any]]:
21
+ """Receive messages from queue"""
22
+ pass
23
+
24
+ @abstractmethod
25
+ def delete(self, message_id: str, queue_name: str = "default") -> bool:
26
+ """Delete message from queue"""
27
+ pass
@@ -0,0 +1,32 @@
1
+ from polydb.utils import setup_logger
2
+
3
+
4
+ from abc import ABC, abstractmethod
5
+ from typing import List
6
+
7
+
8
+ class SharedFilesAdapter(ABC):
9
+ """Base class for Shared File Storage"""
10
+
11
+ def __init__(self):
12
+ self.logger = setup_logger(self.__class__.__name__)
13
+
14
+ @abstractmethod
15
+ def write(self, path: str, data: bytes) -> bool:
16
+ """Write file"""
17
+ pass
18
+
19
+ @abstractmethod
20
+ def read(self, path: str) -> bytes:
21
+ """Read file"""
22
+ pass
23
+
24
+ @abstractmethod
25
+ def delete(self, path: str) -> bool:
26
+ """Delete file"""
27
+ pass
28
+
29
+ @abstractmethod
30
+ def list(self, directory: str = "/") -> List[str]:
31
+ """List files in directory"""
32
+ pass
File without changes
polydb/batch.py ADDED
@@ -0,0 +1,163 @@
1
+ # src/polydb/batch.py
2
+ """
3
+ Batch operations for high-throughput scenarios
4
+ """
5
+ from typing import List, Dict, Any, Optional, Union
6
+ from dataclasses import dataclass
7
+ from .types import JsonDict
8
+
9
+
10
+ @dataclass
11
+ class BatchResult:
12
+ succeeded: List[JsonDict]
13
+ failed: List[Dict[str, Any]] # {data, error}
14
+ total: int
15
+ success_count: int
16
+ error_count: int
17
+
18
+
19
+ class BatchOperations:
20
+ """Batch CRUD operations"""
21
+
22
+ def __init__(self, factory):
23
+ self.factory = factory
24
+
25
+ def bulk_insert(
26
+ self,
27
+ model: Union[type, str],
28
+ records: List[JsonDict],
29
+ *,
30
+ chunk_size: int = 100,
31
+ fail_fast: bool = False
32
+ ) -> BatchResult:
33
+ """Bulk insert with chunking"""
34
+ succeeded = []
35
+ failed = []
36
+
37
+ for i in range(0, len(records), chunk_size):
38
+ chunk = records[i:i + chunk_size]
39
+
40
+ for record in chunk:
41
+ try:
42
+ result = self.factory.create(model, record)
43
+ succeeded.append(result)
44
+ except Exception as e:
45
+ failed.append({
46
+ 'data': record,
47
+ 'error': str(e)
48
+ })
49
+
50
+ if fail_fast:
51
+ break
52
+
53
+ return BatchResult(
54
+ succeeded=succeeded,
55
+ failed=failed,
56
+ total=len(records),
57
+ success_count=len(succeeded),
58
+ error_count=len(failed)
59
+ )
60
+
61
+ def bulk_update(
62
+ self,
63
+ model: Union[type, str],
64
+ updates: List[Dict[str, Any]], # {entity_id, data}
65
+ *,
66
+ chunk_size: int = 100,
67
+ fail_fast: bool = False
68
+ ) -> BatchResult:
69
+ """Bulk update"""
70
+ succeeded = []
71
+ failed = []
72
+
73
+ for i in range(0, len(updates), chunk_size):
74
+ chunk = updates[i:i + chunk_size]
75
+
76
+ for update in chunk:
77
+ try:
78
+ result = self.factory.update(
79
+ model,
80
+ update['entity_id'],
81
+ update['data']
82
+ )
83
+ succeeded.append(result)
84
+ except Exception as e:
85
+ failed.append({
86
+ 'data': update,
87
+ 'error': str(e)
88
+ })
89
+
90
+ if fail_fast:
91
+ break
92
+
93
+ return BatchResult(
94
+ succeeded=succeeded,
95
+ failed=failed,
96
+ total=len(updates),
97
+ success_count=len(succeeded),
98
+ error_count=len(failed)
99
+ )
100
+
101
+ def bulk_delete(
102
+ self,
103
+ model: Union[type, str],
104
+ entity_ids: List[Any],
105
+ *,
106
+ chunk_size: int = 100,
107
+ fail_fast: bool = False,
108
+ hard: bool = False
109
+ ) -> BatchResult:
110
+ """Bulk delete"""
111
+ succeeded = []
112
+ failed = []
113
+
114
+ for i in range(0, len(entity_ids), chunk_size):
115
+ chunk = entity_ids[i:i + chunk_size]
116
+
117
+ for entity_id in chunk:
118
+ try:
119
+ result = self.factory.delete(model, entity_id, hard=hard)
120
+ succeeded.append(result)
121
+ except Exception as e:
122
+ failed.append({
123
+ 'data': {'entity_id': entity_id},
124
+ 'error': str(e)
125
+ })
126
+
127
+ if fail_fast:
128
+ break
129
+
130
+ return BatchResult(
131
+ succeeded=succeeded,
132
+ failed=failed,
133
+ total=len(entity_ids),
134
+ success_count=len(succeeded),
135
+ error_count=len(failed)
136
+ )
137
+
138
+
139
+ class TransactionManager:
140
+ """Transaction support for SQL operations"""
141
+
142
+ def __init__(self, sql_adapter):
143
+ self.sql = sql_adapter
144
+
145
+ def __enter__(self):
146
+ self.conn = self.sql._get_connection()
147
+ self.conn.autocommit = False
148
+ return self
149
+
150
+ def __exit__(self, exc_type, exc_val, exc_tb):
151
+ if exc_type is None:
152
+ self.conn.commit()
153
+ else:
154
+ self.conn.rollback()
155
+
156
+ self.sql._return_connection(self.conn)
157
+ return False
158
+
159
+ def execute(self, sql: str, params: Optional[List[Any]] = None):
160
+ """Execute within transaction"""
161
+ cursor = self.conn.cursor()
162
+ cursor.execute(sql, params or [])
163
+ cursor.close()
polydb/cache.py ADDED
@@ -0,0 +1,204 @@
1
+ # src/polydb/cache_advanced.py
2
+ """
3
+ Advanced caching with Redis support and strategies
4
+ """
5
+ from typing import Any, Dict, Optional, List
6
+ import json
7
+ import hashlib
8
+ import threading
9
+ from enum import Enum
10
+ import redis
11
+
12
+ class CacheStrategy(Enum):
13
+ """Cache invalidation strategies"""
14
+ LRU = "lru" # Least Recently Used
15
+ LFU = "lfu" # Least Frequently Used
16
+ TTL = "ttl" # Time To Live
17
+ WRITE_THROUGH = "write_through"
18
+ WRITE_BACK = "write_back"
19
+
20
+
21
+ class RedisCacheEngine:
22
+ """Redis-based distributed cache"""
23
+
24
+ def __init__(
25
+ self,
26
+ redis_url: Optional[str] = None,
27
+ prefix: str = "polydb:",
28
+ default_ttl: int = 3600
29
+ ):
30
+ self.prefix = prefix
31
+ self.default_ttl = default_ttl
32
+ self._client = None
33
+ self._lock = threading.Lock()
34
+ self.redis_url = redis_url
35
+ self._initialize()
36
+
37
+ def _initialize(self):
38
+ """Initialize Redis connection"""
39
+ try:
40
+ with self._lock:
41
+ if not self._client:
42
+ if self.redis_url:
43
+ self._client = redis.from_url(self.redis_url)
44
+ else:
45
+ import os
46
+ redis_host = os.getenv('REDIS_HOST', 'localhost')
47
+ redis_port = int(os.getenv('REDIS_PORT', '6379'))
48
+ redis_db = int(os.getenv('REDIS_DB', '0'))
49
+
50
+ self._client = redis.Redis(
51
+ host=redis_host,
52
+ port=redis_port,
53
+ db=redis_db,
54
+ decode_responses=True
55
+ )
56
+ except ImportError:
57
+ raise ImportError("Redis not installed. Install with: pip install redis")
58
+
59
+ def _make_key(self, model: str, query: Dict[str, Any]) -> str:
60
+ """Generate cache key"""
61
+ query_str = json.dumps(query, sort_keys=True)
62
+ query_hash = hashlib.md5(query_str.encode()).hexdigest()
63
+ return f"{self.prefix}{model}:{query_hash}"
64
+
65
+ def get(self, model: str, query: Dict[str, Any]) -> Optional[Any]:
66
+ """Get from cache"""
67
+ if not self._client:
68
+ return None
69
+
70
+ key = self._make_key(model, query)
71
+
72
+ try:
73
+ data = self._client.get(key)
74
+ if data:
75
+ # Increment access count for LFU
76
+ self._client.incr(f"{key}:access_count")
77
+ return json.loads(data)
78
+ return None
79
+ except Exception:
80
+ return None
81
+
82
+ def set(
83
+ self,
84
+ model: str,
85
+ query: Dict[str, Any],
86
+ value: Any,
87
+ ttl: Optional[int] = None
88
+ ):
89
+ """Set cache with TTL"""
90
+ if not self._client:
91
+ return
92
+
93
+ key = self._make_key(model, query)
94
+ ttl = ttl or self.default_ttl
95
+
96
+ try:
97
+ data = json.dumps(value)
98
+ self._client.setex(key, ttl, data)
99
+
100
+ # Initialize access count
101
+ self._client.set(f"{key}:access_count", 0, ex=ttl)
102
+ except Exception:
103
+ pass
104
+
105
+ def invalidate(
106
+ self,
107
+ model: str,
108
+ query: Optional[Dict[str, Any]] = None
109
+ ):
110
+ """Invalidate cache"""
111
+ if not self._client:
112
+ return
113
+
114
+ if query:
115
+ key = self._make_key(model, query)
116
+ self._client.delete(key, f"{key}:access_count")
117
+ else:
118
+ # Invalidate all for model
119
+ pattern = f"{self.prefix}{model}:*"
120
+ keys = self._client.keys(pattern)
121
+ if keys:
122
+ self._client.delete(*keys)
123
+
124
+ def clear(self):
125
+ """Clear entire cache"""
126
+ if not self._client:
127
+ return
128
+
129
+ pattern = f"{self.prefix}*"
130
+ keys = self._client.keys(pattern)
131
+ if keys:
132
+ self._client.delete(*keys)
133
+
134
+ def get_stats(self) -> Dict[str, Any]:
135
+ """Get cache statistics"""
136
+ if not self._client:
137
+ return {}
138
+
139
+ try:
140
+ info = self._client.info('stats')
141
+ return {
142
+ 'hits': info.get('keyspace_hits', 0),
143
+ 'misses': info.get('keyspace_misses', 0),
144
+ 'hit_rate': info.get('keyspace_hits', 0) /
145
+ max(info.get('keyspace_hits', 0) + info.get('keyspace_misses', 0), 1)
146
+ }
147
+ except Exception:
148
+ return {}
149
+
150
+
151
+ class CacheWarmer:
152
+ """Pre-populate cache with frequently accessed data"""
153
+
154
+ def __init__(self, factory, cache_engine):
155
+ self.factory = factory
156
+ self.cache = cache_engine
157
+
158
+ def warm_model(
159
+ self,
160
+ model,
161
+ queries: List[Dict[str, Any]],
162
+ ttl: Optional[int] = None
163
+ ):
164
+ """Warm cache for specific queries"""
165
+ for query in queries:
166
+ try:
167
+ results = self.factory.read(model, query=query)
168
+ self.cache.set(
169
+ model.__name__ if isinstance(model, type) else model,
170
+ query,
171
+ results,
172
+ ttl
173
+ )
174
+ except Exception:
175
+ pass
176
+
177
+ def warm_popular_queries(
178
+ self,
179
+ model,
180
+ limit: int = 10,
181
+ ttl: Optional[int] = None
182
+ ):
183
+ """Warm cache with most popular queries"""
184
+ # This would need query log analysis
185
+ # Placeholder implementation
186
+ pass
187
+
188
+
189
+ class CacheInvalidationStrategy:
190
+ """Manages cache invalidation strategies"""
191
+
192
+ def __init__(self, cache_engine, strategy: CacheStrategy = CacheStrategy.TTL):
193
+ self.cache = cache_engine
194
+ self.strategy = strategy
195
+
196
+ def invalidate_on_write(self, model: str, data: Dict[str, Any]):
197
+ """Invalidate cache on write operations"""
198
+ if self.strategy in [CacheStrategy.WRITE_THROUGH, CacheStrategy.WRITE_BACK]:
199
+ self.cache.invalidate(model)
200
+
201
+ def invalidate_related(self, model: str, relationships: List[str]):
202
+ """Invalidate related models"""
203
+ for related in relationships:
204
+ self.cache.invalidate(related)