altcodepro-polydb-python 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. altcodepro_polydb_python-2.1.0.dist-info/METADATA +378 -0
  2. altcodepro_polydb_python-2.1.0.dist-info/RECORD +51 -0
  3. altcodepro_polydb_python-2.1.0.dist-info/WHEEL +5 -0
  4. altcodepro_polydb_python-2.1.0.dist-info/licenses/LICENSE +21 -0
  5. altcodepro_polydb_python-2.1.0.dist-info/top_level.txt +1 -0
  6. polydb/__init__.py +64 -0
  7. polydb/adapters/AzureBlobStorageAdapter.py +77 -0
  8. polydb/adapters/AzureFileStorageAdapter.py +79 -0
  9. polydb/adapters/AzureQueueAdapter.py +61 -0
  10. polydb/adapters/AzureTableStorageAdapter.py +182 -0
  11. polydb/adapters/DynamoDBAdapter.py +216 -0
  12. polydb/adapters/EFSAdapter.py +50 -0
  13. polydb/adapters/FirestoreAdapter.py +193 -0
  14. polydb/adapters/GCPStorageAdapter.py +81 -0
  15. polydb/adapters/MongoDBAdapter.py +136 -0
  16. polydb/adapters/PostgreSQLAdapter.py +453 -0
  17. polydb/adapters/PubSubAdapter.py +83 -0
  18. polydb/adapters/S3Adapter.py +86 -0
  19. polydb/adapters/S3CompatibleAdapter.py +90 -0
  20. polydb/adapters/SQSAdapter.py +84 -0
  21. polydb/adapters/VercelKVAdapter.py +327 -0
  22. polydb/adapters/__init__.py +0 -0
  23. polydb/advanced_query.py +147 -0
  24. polydb/audit/AuditStorage.py +136 -0
  25. polydb/audit/__init__.py +7 -0
  26. polydb/audit/context.py +53 -0
  27. polydb/audit/manager.py +47 -0
  28. polydb/audit/models.py +86 -0
  29. polydb/base/NoSQLKVAdapter.py +301 -0
  30. polydb/base/ObjectStorageAdapter.py +42 -0
  31. polydb/base/QueueAdapter.py +27 -0
  32. polydb/base/SharedFilesAdapter.py +32 -0
  33. polydb/base/__init__.py +0 -0
  34. polydb/batch.py +163 -0
  35. polydb/cache.py +204 -0
  36. polydb/databaseFactory.py +748 -0
  37. polydb/decorators.py +21 -0
  38. polydb/errors.py +82 -0
  39. polydb/factory.py +107 -0
  40. polydb/models.py +39 -0
  41. polydb/monitoring.py +313 -0
  42. polydb/multitenancy.py +197 -0
  43. polydb/py.typed +0 -0
  44. polydb/query.py +150 -0
  45. polydb/registry.py +71 -0
  46. polydb/retry.py +76 -0
  47. polydb/schema.py +205 -0
  48. polydb/security.py +458 -0
  49. polydb/types.py +127 -0
  50. polydb/utils.py +61 -0
  51. polydb/validation.py +131 -0
@@ -0,0 +1,193 @@
1
+ # src/polydb/adapters/FirestoreAdapter.py
2
+ import os
3
+ import threading
4
+ from typing import Any, Dict, List, Optional
5
+ from google.cloud import firestore
6
+ from google.cloud import storage
7
+ from google.cloud.firestore import Client
8
+ from polydb.base.NoSQLKVAdapter import NoSQLKVAdapter
9
+
10
+ from ..errors import NoSQLError, ConnectionError
11
+ from ..retry import retry
12
+ from ..types import JsonDict
13
+ from ..models import PartitionConfig
14
+
15
+
16
+ class FirestoreAdapter(NoSQLKVAdapter):
17
+ """Firestore with GCS overflow (limit: 1MB per document)"""
18
+
19
+ FIRESTORE_MAX_SIZE = 1024 * 1024 # 1MB
20
+
21
+ def __init__(self, partition_config: Optional[PartitionConfig] = None):
22
+ super().__init__(partition_config)
23
+ self.max_size = self.FIRESTORE_MAX_SIZE
24
+ self.bucket_name = os.getenv("GCS_OVERFLOW_BUCKET", "firestore-overflow")
25
+ self._client: Optional[Client] = None
26
+ self._storage_client = None
27
+ self._bucket = None
28
+ self._client_lock = threading.Lock()
29
+ self._initialize_client()
30
+
31
+ def _initialize_client(self):
32
+ try:
33
+ with self._client_lock:
34
+ if not self._client:
35
+ self._client = firestore.Client()
36
+ self._storage_client = storage.Client()
37
+ self._bucket = self._storage_client.bucket(self.bucket_name)
38
+
39
+ # Ensure bucket exists
40
+ try:
41
+ self._bucket.create()
42
+ except:
43
+ pass # Already exists
44
+
45
+ self.logger.info("Firestore initialized with GCS overflow")
46
+ except Exception as e:
47
+ raise ConnectionError(f"Firestore init failed: {str(e)}")
48
+
49
+ def _get_collection(self, model: type) -> Any:
50
+ if not self._client:
51
+ self._initialize_client()
52
+
53
+ meta = getattr(model, '__polydb__', {})
54
+ collection_name = meta.get('collection') or meta.get('table') or model.__name__.lower()
55
+ return self._client.collection(collection_name) # type: ignore
56
+
57
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
58
+ def _put_raw(self, model: type, pk: str, rk: str, data: JsonDict) -> JsonDict:
59
+ try:
60
+ import json
61
+ import hashlib
62
+
63
+ doc_id = f"{pk}_{rk}"
64
+ data_copy = dict(data)
65
+ data_copy['_pk'] = pk
66
+ data_copy['_rk'] = rk
67
+
68
+ # Check size
69
+ data_bytes = json.dumps(data_copy).encode()
70
+ data_size = len(data_bytes)
71
+
72
+ if data_size > self.FIRESTORE_MAX_SIZE:
73
+ # Store in GCS
74
+ blob_id = hashlib.md5(data_bytes).hexdigest()
75
+ blob_key = f"overflow/{pk}/{rk}/{blob_id}.json"
76
+
77
+ if self._bucket:
78
+ blob = self._bucket.blob(blob_key)
79
+ blob.upload_from_string(data_bytes)
80
+ self.logger.info(f"Stored overflow to GCS: {blob_key} ({data_size} bytes)")
81
+
82
+ # Store reference in Firestore
83
+ reference_data = {
84
+ '_pk': pk,
85
+ '_rk': rk,
86
+ '_overflow': True,
87
+ '_blob_key': blob_key,
88
+ '_size': data_size,
89
+ '_checksum': blob_id,
90
+ }
91
+
92
+ collection = self._get_collection(model)
93
+ collection.document(doc_id).set(reference_data)
94
+ else:
95
+ # Store directly in Firestore
96
+ collection = self._get_collection(model)
97
+ collection.document(doc_id).set(data_copy)
98
+
99
+ return {'_pk': pk, '_rk': rk, 'id': doc_id}
100
+ except Exception as e:
101
+ raise NoSQLError(f"Firestore put failed: {str(e)}")
102
+
103
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
104
+ def _get_raw(self, model: type, pk: str, rk: str) -> Optional[JsonDict]:
105
+ try:
106
+ import json
107
+ import hashlib
108
+
109
+ doc_id = f"{pk}_{rk}"
110
+ collection = self._get_collection(model)
111
+ doc = collection.document(doc_id).get()
112
+
113
+ if not doc.exists:
114
+ return None
115
+
116
+ doc_data = doc.to_dict()
117
+
118
+ # Check if overflow
119
+ if doc_data.get('_overflow'):
120
+ blob_key = doc_data.get('_blob_key')
121
+ checksum = doc_data.get('_checksum')
122
+
123
+ if blob_key and self._bucket:
124
+ blob = self._bucket.blob(blob_key)
125
+ blob_data = blob.download_as_bytes()
126
+
127
+ # Verify checksum
128
+ actual_checksum = hashlib.md5(blob_data).hexdigest()
129
+ if actual_checksum != checksum:
130
+ raise NoSQLError(f"Checksum mismatch: expected {checksum}, got {actual_checksum}")
131
+
132
+ retrieved = json.loads(blob_data.decode())
133
+ self.logger.debug(f"Retrieved overflow from GCS: {blob_key}")
134
+ return retrieved
135
+
136
+ return doc_data
137
+ except Exception as e:
138
+ raise NoSQLError(f"Firestore get failed: {str(e)}")
139
+
140
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
141
+ def _query_raw(self, model: type, filters: Dict[str, Any], limit: Optional[int]) -> List[JsonDict]:
142
+ try:
143
+ collection = self._get_collection(model)
144
+ query = collection
145
+
146
+ for field, value in filters.items():
147
+ if field.endswith('__gt'):
148
+ query = query.where(field[:-4], '>', value)
149
+ elif field.endswith('__gte'):
150
+ query = query.where(field[:-5], '>=', value)
151
+ elif field.endswith('__lt'):
152
+ query = query.where(field[:-4], '<', value)
153
+ elif field.endswith('__lte'):
154
+ query = query.where(field[:-5], '<=', value)
155
+ elif field.endswith('__in'):
156
+ query = query.where(field[:-4], 'in', value)
157
+ else:
158
+ query = query.where(field, '==', value)
159
+
160
+ if limit:
161
+ query = query.limit(limit)
162
+
163
+ docs = query.stream()
164
+ return [doc.to_dict() for doc in docs]
165
+ except Exception as e:
166
+ raise NoSQLError(f"Firestore query failed: {str(e)}")
167
+
168
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
169
+ def _delete_raw(self, model: type, pk: str, rk: str, etag: Optional[str]) -> JsonDict:
170
+ try:
171
+ doc_id = f"{pk}_{rk}"
172
+ collection = self._get_collection(model)
173
+
174
+ # Check if overflow before deleting
175
+ try:
176
+ doc = collection.document(doc_id).get()
177
+ if doc.exists:
178
+ doc_data = doc.to_dict()
179
+
180
+ if doc_data.get('_overflow'):
181
+ blob_key = doc_data.get('_blob_key')
182
+ if blob_key and self._bucket:
183
+ blob = self._bucket.blob(blob_key)
184
+ blob.delete()
185
+ self.logger.debug(f"Deleted overflow GCS object: {blob_key}")
186
+ except:
187
+ pass # Doc might not exist or no overflow
188
+
189
+ # Delete Firestore document
190
+ collection.document(doc_id).delete()
191
+ return {'deleted': True, 'id': doc_id}
192
+ except Exception as e:
193
+ raise NoSQLError(f"Firestore delete failed: {str(e)}")
@@ -0,0 +1,81 @@
1
+ # src/polydb/adapters/GCPStorageAdapter.py
2
+ import os
3
+ import threading
4
+ from typing import List, Optional, cast
5
+ from google.cloud.firestore import DocumentSnapshot
6
+ from ..base.ObjectStorageAdapter import ObjectStorageAdapter
7
+ from ..errors import StorageError, ConnectionError
8
+ from ..retry import retry
9
+
10
+
11
+ class GCPStorageAdapter(ObjectStorageAdapter):
12
+ """GCP Cloud Storage with client reuse"""
13
+
14
+ def __init__(self):
15
+ super().__init__()
16
+ self.bucket_name = os.getenv("GCS_BUCKET_NAME", "default")
17
+ self._client = None
18
+ self._bucket = None
19
+ self._lock = threading.Lock()
20
+ self._initialize_client()
21
+
22
+ def _initialize_client(self):
23
+ """Initialize GCS client once"""
24
+ try:
25
+ from google.cloud import storage
26
+
27
+ with self._lock:
28
+ if not self._client:
29
+ self._client = storage.Client()
30
+ self._bucket = self._client.bucket(self.bucket_name)
31
+ self.logger.info("Initialized GCS client")
32
+ except Exception as e:
33
+ raise ConnectionError(f"Failed to initialize GCS: {str(e)}")
34
+
35
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
36
+ def _put_raw(self, key: str, data: bytes) -> str:
37
+ """Store object"""
38
+ try:
39
+ if self._bucket:
40
+ blob = self._bucket.blob(key)
41
+ blob.upload_from_string(data)
42
+ self.logger.debug(f"Uploaded blob: {key}")
43
+ return key
44
+ except Exception as e:
45
+ raise StorageError(f"GCS put failed: {str(e)}")
46
+
47
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
48
+ def get(self, key: str) -> bytes | None:
49
+ """Get object"""
50
+ try:
51
+ if self._bucket:
52
+ blob = self._bucket.blob(key)
53
+ return blob.download_as_bytes()
54
+ return None
55
+ except Exception as e:
56
+ raise StorageError(f"GCS get failed: {str(e)}")
57
+
58
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
59
+ def delete(self, key: str) -> bool:
60
+ """Delete object"""
61
+ try:
62
+ if self._bucket:
63
+ blob = self._bucket.blob(key)
64
+ blob.delete()
65
+ return True
66
+ return False
67
+ except Exception as e:
68
+ raise StorageError(f"GCS delete failed: {str(e)}")
69
+
70
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
71
+ def list(self, prefix: str = "") -> List[str]:
72
+ """List objects with prefix"""
73
+ try:
74
+ if self._bucket:
75
+ blobs = self._bucket.list_blobs(prefix=prefix)
76
+ return [blob.name for blob in blobs]
77
+ return []
78
+ except Exception as e:
79
+ raise StorageError(f"GCS list failed: {str(e)}")
80
+
81
+
@@ -0,0 +1,136 @@
1
+ # src/polydb/adapters/mongodb.py
2
+ import os
3
+ import re
4
+ import threading
5
+ from typing import Any, Dict, List, Optional
6
+ from polydb.base.NoSQLKVAdapter import NoSQLKVAdapter
7
+
8
+ from ..errors import NoSQLError, ConnectionError
9
+ from ..retry import retry
10
+ from ..types import JsonDict
11
+ from ..models import PartitionConfig
12
+
13
+
14
+ class MongoDBAdapter(NoSQLKVAdapter):
15
+ """MongoDB with overflow and LINQ"""
16
+
17
+ def __init__(self, partition_config: Optional[PartitionConfig] = None):
18
+ super().__init__(partition_config)
19
+ self.mongo_uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017")
20
+ self.db_name = os.getenv("MONGODB_DATABASE", "default")
21
+ self._client = None
22
+ self._client_lock = threading.Lock()
23
+ self._initialize_client()
24
+
25
+ def _initialize_client(self):
26
+ try:
27
+ from pymongo import MongoClient
28
+
29
+ with self._client_lock:
30
+ if not self._client:
31
+ self._client = MongoClient(
32
+ self.mongo_uri,
33
+ maxPoolSize=int(os.getenv("MONGODB_MAX_POOL_SIZE", "10")),
34
+ minPoolSize=int(os.getenv("MONGODB_MIN_POOL_SIZE", "1")),
35
+ serverSelectionTimeoutMS=5000,
36
+ )
37
+ self._client.server_info()
38
+ self.logger.info("MongoDB initialized")
39
+ except Exception as e:
40
+ raise ConnectionError(f"MongoDB init failed: {str(e)}")
41
+
42
+ def _get_collection(self, model: type):
43
+ if not self._client:
44
+ self._initialize_client()
45
+
46
+ meta = getattr(model, "__polydb__", {})
47
+ collection_name = meta.get("collection") or meta.get("table") or model.__name__.lower()
48
+ return self._client[self.db_name][collection_name] # type: ignore
49
+
50
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
51
+ def _put_raw(self, model: type, pk: str, rk: str, data: JsonDict) -> JsonDict:
52
+ try:
53
+ data_copy = dict(data)
54
+ data_copy["_pk"] = pk
55
+ data_copy["_rk"] = rk
56
+
57
+ collection = self._get_collection(model)
58
+ result = collection.update_one(
59
+ {"_pk": pk, "_rk": rk},
60
+ {"$set": data_copy},
61
+ upsert=True,
62
+ )
63
+
64
+ return {"_pk": pk, "_rk": rk}
65
+ except Exception as e:
66
+ raise NoSQLError(f"MongoDB put failed: {str(e)}")
67
+
68
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
69
+ def _get_raw(self, model: type, pk: str, rk: str) -> Optional[JsonDict]:
70
+ try:
71
+ collection = self._get_collection(model)
72
+ doc = collection.find_one({"_pk": pk, "_rk": rk})
73
+
74
+ if doc:
75
+ doc.pop("_id", None)
76
+ return doc
77
+
78
+ return None
79
+ except Exception as e:
80
+ raise NoSQLError(f"MongoDB get failed: {str(e)}")
81
+
82
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
83
+ def _query_raw(
84
+ self, model: type, filters: Dict[str, Any], limit: Optional[int]
85
+ ) -> List[JsonDict]:
86
+ try:
87
+ collection = self._get_collection(model)
88
+
89
+ query = {}
90
+ for k, v in filters.items():
91
+ if k.endswith("__gt"):
92
+ query[k[:-4]] = {"$gt": v}
93
+ elif k.endswith("__gte"):
94
+ query[k[:-5]] = {"$gte": v}
95
+ elif k.endswith("__lt"):
96
+ query[k[:-4]] = {"$lt": v}
97
+ elif k.endswith("__lte"):
98
+ query[k[:-5]] = {"$lte": v}
99
+ elif k.endswith("__in"):
100
+ query[k[:-4]] = {"$in": v}
101
+ elif k.endswith("__contains"):
102
+ safe_pattern = re.escape(str(v))
103
+ query[k[:-10]] = {"$regex": safe_pattern, "$options": "i"} # case-insensitive
104
+ else:
105
+ query[k] = v
106
+
107
+ cursor = collection.find(query)
108
+
109
+ if limit:
110
+ cursor = cursor.limit(limit)
111
+
112
+ results = []
113
+ for doc in cursor:
114
+ doc.pop("_id", None)
115
+ results.append(doc)
116
+
117
+ return results
118
+ except Exception as e:
119
+ raise NoSQLError(f"MongoDB query failed: {str(e)}")
120
+
121
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
122
+ def _delete_raw(self, model: type, pk: str, rk: str, etag: Optional[str]) -> JsonDict:
123
+ try:
124
+ collection = self._get_collection(model)
125
+ result = collection.delete_one({"_pk": pk, "_rk": rk})
126
+
127
+ return {"deleted": result.deleted_count > 0, "_pk": pk, "_rk": rk}
128
+ except Exception as e:
129
+ raise NoSQLError(f"MongoDB delete failed: {str(e)}")
130
+
131
+ def __del__(self):
132
+ if self._client:
133
+ try:
134
+ self._client.close()
135
+ except:
136
+ pass