altcodepro-polydb-python 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. altcodepro_polydb_python-2.1.0.dist-info/METADATA +378 -0
  2. altcodepro_polydb_python-2.1.0.dist-info/RECORD +51 -0
  3. altcodepro_polydb_python-2.1.0.dist-info/WHEEL +5 -0
  4. altcodepro_polydb_python-2.1.0.dist-info/licenses/LICENSE +21 -0
  5. altcodepro_polydb_python-2.1.0.dist-info/top_level.txt +1 -0
  6. polydb/__init__.py +64 -0
  7. polydb/adapters/AzureBlobStorageAdapter.py +77 -0
  8. polydb/adapters/AzureFileStorageAdapter.py +79 -0
  9. polydb/adapters/AzureQueueAdapter.py +61 -0
  10. polydb/adapters/AzureTableStorageAdapter.py +182 -0
  11. polydb/adapters/DynamoDBAdapter.py +216 -0
  12. polydb/adapters/EFSAdapter.py +50 -0
  13. polydb/adapters/FirestoreAdapter.py +193 -0
  14. polydb/adapters/GCPStorageAdapter.py +81 -0
  15. polydb/adapters/MongoDBAdapter.py +136 -0
  16. polydb/adapters/PostgreSQLAdapter.py +453 -0
  17. polydb/adapters/PubSubAdapter.py +83 -0
  18. polydb/adapters/S3Adapter.py +86 -0
  19. polydb/adapters/S3CompatibleAdapter.py +90 -0
  20. polydb/adapters/SQSAdapter.py +84 -0
  21. polydb/adapters/VercelKVAdapter.py +327 -0
  22. polydb/adapters/__init__.py +0 -0
  23. polydb/advanced_query.py +147 -0
  24. polydb/audit/AuditStorage.py +136 -0
  25. polydb/audit/__init__.py +7 -0
  26. polydb/audit/context.py +53 -0
  27. polydb/audit/manager.py +47 -0
  28. polydb/audit/models.py +86 -0
  29. polydb/base/NoSQLKVAdapter.py +301 -0
  30. polydb/base/ObjectStorageAdapter.py +42 -0
  31. polydb/base/QueueAdapter.py +27 -0
  32. polydb/base/SharedFilesAdapter.py +32 -0
  33. polydb/base/__init__.py +0 -0
  34. polydb/batch.py +163 -0
  35. polydb/cache.py +204 -0
  36. polydb/databaseFactory.py +748 -0
  37. polydb/decorators.py +21 -0
  38. polydb/errors.py +82 -0
  39. polydb/factory.py +107 -0
  40. polydb/models.py +39 -0
  41. polydb/monitoring.py +313 -0
  42. polydb/multitenancy.py +197 -0
  43. polydb/py.typed +0 -0
  44. polydb/query.py +150 -0
  45. polydb/registry.py +71 -0
  46. polydb/retry.py +76 -0
  47. polydb/schema.py +205 -0
  48. polydb/security.py +458 -0
  49. polydb/types.py +127 -0
  50. polydb/utils.py +61 -0
  51. polydb/validation.py +131 -0
@@ -0,0 +1,90 @@
1
+ # src/polydb/adapters/S3CompatibleAdapter.py
2
+ import os
3
+ import threading
4
+ from typing import List
5
+ from ..base.ObjectStorageAdapter import ObjectStorageAdapter
6
+ from ..errors import StorageError, ConnectionError
7
+ from ..retry import retry
8
+
9
+ class S3CompatibleAdapter(ObjectStorageAdapter):
10
+ """S3-compatible storage (MinIO, DigitalOcean Spaces) with client reuse"""
11
+
12
+ def __init__(self):
13
+ super().__init__()
14
+ self.endpoint = os.getenv("S3_ENDPOINT_URL")
15
+ self.access_key = os.getenv("S3_ACCESS_KEY")
16
+ self.secret_key = os.getenv("S3_SECRET_KEY")
17
+ self.bucket_name = os.getenv("S3_BUCKET_NAME", "default")
18
+ self._client = None
19
+ self._lock = threading.Lock()
20
+ self._initialize_client()
21
+
22
+ def _initialize_client(self):
23
+ """Initialize S3-compatible client once"""
24
+ try:
25
+ import boto3
26
+
27
+ with self._lock:
28
+ if not self._client:
29
+ self._client = boto3.client(
30
+ "s3",
31
+ endpoint_url=self.endpoint,
32
+ aws_access_key_id=self.access_key,
33
+ aws_secret_access_key=self.secret_key,
34
+ )
35
+ self.logger.info("Initialized S3-compatible client")
36
+ except Exception as e:
37
+ raise ConnectionError(f"Failed to initialize S3-compatible client: {str(e)}")
38
+
39
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
40
+ def _put_raw(self, key: str, data: bytes) -> str:
41
+ """Store object"""
42
+ try:
43
+ if not self._client:
44
+ self._initialize_client()
45
+ if self._client:
46
+ self._client.put_object(Bucket=self.bucket_name, Key=key, Body=data)
47
+ self.logger.debug(f"Uploaded to S3-compatible: {key}")
48
+ return key
49
+ except Exception as e:
50
+ raise StorageError(f"S3-compatible put failed: {str(e)}")
51
+
52
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
53
+ def get(self, key: str) -> bytes | None:
54
+ """Get object"""
55
+ try:
56
+ if not self._client:
57
+ self._initialize_client()
58
+ if self._client:
59
+ response = self._client.get_object(Bucket=self.bucket_name, Key=key)
60
+ return response["Body"].read()
61
+ return None
62
+ except Exception as e:
63
+ raise StorageError(f"S3-compatible get failed: {str(e)}")
64
+
65
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
66
+ def delete(self, key: str) -> bool:
67
+ """Delete object"""
68
+ try:
69
+ if not self._client:
70
+ self._initialize_client()
71
+ if self._client:
72
+ self._client.delete_object(Bucket=self.bucket_name, Key=key)
73
+ return True
74
+ return False
75
+ except Exception as e:
76
+ raise StorageError(f"S3-compatible delete failed: {str(e)}")
77
+
78
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
79
+ def list(self, prefix: str = "") -> List[str]:
80
+ """List objects with prefix"""
81
+ try:
82
+ if not self._client:
83
+ self._initialize_client()
84
+ if self._client:
85
+ response = self._client.list_objects_v2(Bucket=self.bucket_name, Prefix=prefix)
86
+ return [obj["Key"] for obj in response.get("Contents", [])]
87
+ return []
88
+ except Exception as e:
89
+ raise StorageError(f"S3-compatible list failed: {str(e)}")
90
+
@@ -0,0 +1,84 @@
1
+ from polydb.base.QueueAdapter import QueueAdapter
2
+ from polydb.errors import ConnectionError, QueueError
3
+ from polydb.retry import retry
4
+
5
+
6
+ import boto3
7
+ from botocore.client import BaseClient
8
+
9
+
10
+ import json
11
+ import os
12
+ import threading
13
+ from typing import Any, Dict, List
14
+
15
+
16
+ class SQSAdapter(QueueAdapter):
17
+ """AWS SQS with client reuse"""
18
+
19
+ def __init__(self):
20
+ super().__init__()
21
+ self.queue_url = os.getenv("SQS_QUEUE_URL")
22
+ self._client: BaseClient | None = None
23
+ self._lock = threading.Lock()
24
+ self._initialize_client()
25
+
26
+ def _initialize_client(self):
27
+ """Initialize SQS client once"""
28
+ try:
29
+ import boto3
30
+
31
+ with self._lock:
32
+ if not self._client:
33
+ self._client = boto3.client("sqs")
34
+ self.logger.info("Initialized SQS client")
35
+ except Exception as e:
36
+ raise ConnectionError(f"Failed to initialize SQS client: {str(e)}")
37
+
38
+ @retry(max_attempts=3, delay=1.0, exceptions=(QueueError,))
39
+ def send(self, message: Dict[str, Any], queue_name: str = "default") -> str:
40
+ """Send message to queue"""
41
+ try:
42
+ import json
43
+
44
+ if not self._client:
45
+ self._initialize_client()
46
+ if self._client:
47
+ response = self._client.send_message(
48
+ QueueUrl=self.queue_url, MessageBody=json.dumps(message)
49
+ )
50
+ return response["MessageId"]
51
+ return ""
52
+ except Exception as e:
53
+ raise QueueError(f"SQS send failed: {str(e)}")
54
+
55
+ @retry(max_attempts=3, delay=1.0, exceptions=(QueueError,))
56
+ def receive(self, queue_name: str = "default", max_messages: int = 1) -> List[Dict[str, Any]]:
57
+ """Receive messages from queue"""
58
+ try:
59
+ import json
60
+
61
+ if not self._client:
62
+ self._initialize_client()
63
+ if self._client:
64
+ response = self._client.receive_message(
65
+ QueueUrl=self.queue_url, MaxNumberOfMessages=max_messages, WaitTimeSeconds=5
66
+ )
67
+ messages = response.get("Messages", [])
68
+ return [json.loads(msg["Body"]) for msg in messages]
69
+ return []
70
+ except Exception as e:
71
+ raise QueueError(f"SQS receive failed: {str(e)}")
72
+
73
+ @retry(max_attempts=3, delay=1.0, exceptions=(QueueError,))
74
+ def delete(self, message_id: str, queue_name: str = "default") -> bool:
75
+ """Delete message from queue"""
76
+ try:
77
+ if not self._client:
78
+ self._initialize_client()
79
+ if self._client:
80
+ self._client.delete_message(QueueUrl=self.queue_url, ReceiptHandle=message_id)
81
+ return True
82
+ return False
83
+ except Exception as e:
84
+ raise QueueError(f"SQS delete failed: {str(e)}")
@@ -0,0 +1,327 @@
1
+ # src/polydb/adapters/VercelKVAdapter.py
2
+ import os
3
+ import threading
4
+ from typing import Any, Dict, List, Optional
5
+ from polydb.base.NoSQLKVAdapter import NoSQLKVAdapter
6
+ from ..errors import NoSQLError, StorageError
7
+ from ..retry import retry
8
+ from ..types import JsonDict
9
+ from ..models import PartitionConfig
10
+
11
+
12
+ class VercelKVAdapter(NoSQLKVAdapter):
13
+ """Vercel KV (Redis) with Vercel Blob overflow (limit: 100KB per key)"""
14
+
15
+ VERCEL_KV_MAX_SIZE = 100 * 1024 # 100KB
16
+
17
+ def __init__(self, partition_config: Optional[PartitionConfig] = None):
18
+ super().__init__(partition_config)
19
+ self.max_size = self.VERCEL_KV_MAX_SIZE
20
+ self.kv_url = os.getenv('KV_URL')
21
+ self.kv_token = os.getenv('KV_REST_API_TOKEN')
22
+ self.blob_token = os.getenv('BLOB_READ_WRITE_TOKEN')
23
+ self.timeout = int(os.getenv('VERCEL_KV_TIMEOUT', '10'))
24
+ self._client_lock = threading.Lock()
25
+
26
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
27
+ def _put_raw(self, model: type, pk: str, rk: str, data: JsonDict) -> JsonDict:
28
+ try:
29
+ import requests
30
+ import json
31
+ import hashlib
32
+
33
+ key = f"{pk}:{rk}"
34
+ data_copy = dict(data)
35
+ data_copy['_pk'] = pk
36
+ data_copy['_rk'] = rk
37
+
38
+ # Check size
39
+ data_bytes = json.dumps(data_copy).encode()
40
+ data_size = len(data_bytes)
41
+
42
+ if data_size > self.VERCEL_KV_MAX_SIZE:
43
+ # Store in Vercel Blob
44
+ blob_id = hashlib.md5(data_bytes).hexdigest()
45
+ blob_key = f"overflow/{pk}/{rk}/{blob_id}.json"
46
+
47
+ blob_response = requests.put(
48
+ f"https://blob.vercel-storage.com/{blob_key}",
49
+ headers={
50
+ "Authorization": f"Bearer {self.blob_token}",
51
+ "x-content-type": "application/json",
52
+ },
53
+ data=data_bytes,
54
+ timeout=self.timeout,
55
+ )
56
+ blob_response.raise_for_status()
57
+ self.logger.info(f"Stored overflow to Blob: {blob_key} ({data_size} bytes)")
58
+
59
+ # Store reference in KV
60
+ reference_data = {
61
+ '_pk': pk,
62
+ '_rk': rk,
63
+ '_overflow': True,
64
+ '_blob_key': blob_key,
65
+ '_size': data_size,
66
+ '_checksum': blob_id,
67
+ }
68
+
69
+ response = requests.post(
70
+ f"{self.kv_url}/set/{key}",
71
+ headers={'Authorization': f'Bearer {self.kv_token}'},
72
+ json={'value': json.dumps(reference_data)},
73
+ timeout=self.timeout
74
+ )
75
+ response.raise_for_status()
76
+ else:
77
+ # Store directly in KV
78
+ response = requests.post(
79
+ f"{self.kv_url}/set/{key}",
80
+ headers={'Authorization': f'Bearer {self.kv_token}'},
81
+ json={'value': json.dumps(data_copy)},
82
+ timeout=self.timeout
83
+ )
84
+ response.raise_for_status()
85
+
86
+ return {'key': key, '_pk': pk, '_rk': rk}
87
+ except Exception as e:
88
+ raise NoSQLError(f"Vercel KV put failed: {str(e)}")
89
+
90
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
91
+ def _get_raw(self, model: type, pk: str, rk: str) -> Optional[JsonDict]:
92
+ try:
93
+ import requests
94
+ import json
95
+ import hashlib
96
+
97
+ key = f"{pk}:{rk}"
98
+
99
+ response = requests.get(
100
+ f"{self.kv_url}/get/{key}",
101
+ headers={'Authorization': f'Bearer {self.kv_token}'},
102
+ timeout=self.timeout
103
+ )
104
+
105
+ if response.status_code != 200:
106
+ return None
107
+
108
+ result = response.json().get('result')
109
+ if not result:
110
+ return None
111
+
112
+ kv_data = json.loads(result)
113
+
114
+ # Check if overflow
115
+ if kv_data.get('_overflow'):
116
+ blob_key = kv_data.get('_blob_key')
117
+ checksum = kv_data.get('_checksum')
118
+
119
+ if blob_key:
120
+ blob_response = requests.get(
121
+ f"https://blob.vercel-storage.com/{blob_key}",
122
+ headers={"Authorization": f"Bearer {self.blob_token}"},
123
+ timeout=self.timeout,
124
+ )
125
+ blob_response.raise_for_status()
126
+ blob_data = blob_response.content
127
+
128
+ # Verify checksum
129
+ actual_checksum = hashlib.md5(blob_data).hexdigest()
130
+ if actual_checksum != checksum:
131
+ raise NoSQLError(f"Checksum mismatch: expected {checksum}, got {actual_checksum}")
132
+
133
+ retrieved = json.loads(blob_data.decode())
134
+ self.logger.debug(f"Retrieved overflow from Blob: {blob_key}")
135
+ return retrieved
136
+
137
+ return kv_data
138
+ except Exception as e:
139
+ raise NoSQLError(f"Vercel KV get failed: {str(e)}")
140
+
141
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
142
+ def _query_raw(self, model: type, filters: Dict[str, Any], limit: Optional[int]) -> List[JsonDict]:
143
+ try:
144
+ import requests
145
+ import json
146
+
147
+ # Vercel KV doesn't support native queries, use SCAN pattern
148
+ pattern = "*"
149
+ if filters.get('_pk'):
150
+ pattern = f"{filters['_pk']}:*"
151
+
152
+ response = requests.get(
153
+ f"{self.kv_url}/keys/{pattern}",
154
+ headers={'Authorization': f'Bearer {self.kv_token}'},
155
+ timeout=self.timeout
156
+ )
157
+
158
+ if response.status_code != 200:
159
+ return []
160
+
161
+ keys = response.json().get('result', [])
162
+
163
+ # Fetch all matching keys
164
+ results = []
165
+ for key in keys:
166
+ if limit and len(results) >= limit:
167
+ break
168
+
169
+ get_response = requests.get(
170
+ f"{self.kv_url}/get/{key}",
171
+ headers={'Authorization': f'Bearer {self.kv_token}'},
172
+ timeout=self.timeout
173
+ )
174
+
175
+ if get_response.status_code == 200:
176
+ result = get_response.json().get('result')
177
+ if result:
178
+ data = json.loads(result)
179
+
180
+ # Apply filters
181
+ match = True
182
+ for k, v in filters.items():
183
+ if k.startswith('_'):
184
+ continue
185
+ if data.get(k) != v:
186
+ match = False
187
+ break
188
+
189
+ if match:
190
+ results.append(data)
191
+
192
+ return results
193
+ except Exception as e:
194
+ raise NoSQLError(f"Vercel KV query failed: {str(e)}")
195
+
196
+ @retry(max_attempts=3, delay=1.0, exceptions=(NoSQLError,))
197
+ def _delete_raw(self, model: type, pk: str, rk: str, etag: Optional[str]) -> JsonDict:
198
+ try:
199
+ import requests
200
+ import json
201
+
202
+ key = f"{pk}:{rk}"
203
+
204
+ # Check if overflow before deleting
205
+ try:
206
+ get_response = requests.get(
207
+ f"{self.kv_url}/get/{key}",
208
+ headers={'Authorization': f'Bearer {self.kv_token}'},
209
+ timeout=self.timeout
210
+ )
211
+
212
+ if get_response.status_code == 200:
213
+ result = get_response.json().get('result')
214
+ if result:
215
+ kv_data = json.loads(result)
216
+
217
+ if kv_data.get('_overflow'):
218
+ blob_key = kv_data.get('_blob_key')
219
+ if blob_key:
220
+ blob_response = requests.delete(
221
+ f"https://blob.vercel-storage.com/{blob_key}",
222
+ headers={"Authorization": f"Bearer {self.blob_token}"},
223
+ timeout=self.timeout,
224
+ )
225
+ blob_response.raise_for_status()
226
+ self.logger.debug(f"Deleted overflow Blob: {blob_key}")
227
+ except:
228
+ pass # Key might not exist or no overflow
229
+
230
+ # Delete KV key
231
+ response = requests.delete(
232
+ f"{self.kv_url}/del/{key}",
233
+ headers={'Authorization': f'Bearer {self.kv_token}'},
234
+ timeout=self.timeout
235
+ )
236
+ response.raise_for_status()
237
+
238
+ return {'deleted': True, 'key': key}
239
+ except Exception as e:
240
+ raise NoSQLError(f"Vercel KV delete failed: {str(e)}")
241
+
242
+
243
+ class VercelBlobAdapter:
244
+ """Vercel Blob Storage"""
245
+
246
+ def __init__(self):
247
+ from ..utils import setup_logger
248
+ self.logger = setup_logger(__name__)
249
+ self.blob_token = os.getenv("BLOB_READ_WRITE_TOKEN")
250
+ self.timeout = int(os.getenv("VERCEL_BLOB_TIMEOUT", "10"))
251
+
252
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
253
+ def put(self, key: str, data: bytes) -> str:
254
+ try:
255
+ import requests
256
+
257
+ response = requests.put(
258
+ f"https://blob.vercel-storage.com/{key}",
259
+ headers={
260
+ "Authorization": f"Bearer {self.blob_token}",
261
+ "x-content-type": "application/octet-stream",
262
+ },
263
+ data=data,
264
+ timeout=self.timeout,
265
+ )
266
+ response.raise_for_status()
267
+ return response.json()["url"]
268
+ except Exception as e:
269
+ raise StorageError(f"Vercel Blob put failed: {str(e)}")
270
+
271
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
272
+ def get(self, key: str) -> bytes:
273
+ try:
274
+ import requests
275
+
276
+ response = requests.get(
277
+ f"https://blob.vercel-storage.com/{key}",
278
+ headers={"Authorization": f"Bearer {self.blob_token}"},
279
+ timeout=self.timeout,
280
+ )
281
+ response.raise_for_status()
282
+ return response.content
283
+ except Exception as e:
284
+ raise StorageError(f"Vercel Blob get failed: {str(e)}")
285
+
286
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
287
+ def delete(self, key: str) -> bool:
288
+ try:
289
+ import requests
290
+
291
+ response = requests.delete(
292
+ f"https://blob.vercel-storage.com/{key}",
293
+ headers={"Authorization": f"Bearer {self.blob_token}"},
294
+ timeout=self.timeout,
295
+ )
296
+ response.raise_for_status()
297
+ return True
298
+ except Exception as e:
299
+ raise StorageError(f"Vercel Blob delete failed: {str(e)}")
300
+
301
+ @retry(max_attempts=3, delay=1.0, exceptions=(StorageError,))
302
+ def list(self, prefix: str = "") -> List[str]:
303
+ try:
304
+ import requests
305
+
306
+ response = requests.get(
307
+ f"https://blob.vercel-storage.com/?prefix={prefix}",
308
+ headers={"Authorization": f"Bearer {self.blob_token}"},
309
+ timeout=self.timeout,
310
+ )
311
+ response.raise_for_status()
312
+ return [blob["pathname"] for blob in response.json()["blobs"]]
313
+ except Exception as e:
314
+ raise StorageError(f"Vercel Blob list failed: {str(e)}")
315
+
316
+
317
+ class VercelQueueAdapter:
318
+ """Vercel Queue adapter"""
319
+
320
+ def send(self, message: Dict[str, Any], queue_name: str = "default") -> str:
321
+ raise NotImplementedError("Vercel Queue not yet available")
322
+
323
+ def receive(self, queue_name: str = "default", max_messages: int = 1) -> List[Dict[str, Any]]:
324
+ raise NotImplementedError("Vercel Queue not yet available")
325
+
326
+ def delete(self, message_id: str, queue_name: str = "default") -> bool:
327
+ raise NotImplementedError("Vercel Queue not yet available")
File without changes
@@ -0,0 +1,147 @@
1
+ # src/polydb/advanced_query.py
2
+ """
3
+ Advanced query capabilities: JOIN, subqueries, aggregates
4
+ """
5
+ from typing import List, Optional, Any, Dict
6
+ from dataclasses import dataclass, field
7
+ from enum import Enum
8
+
9
+
10
+ class JoinType(Enum):
11
+ INNER = "INNER JOIN"
12
+ LEFT = "LEFT JOIN"
13
+ RIGHT = "RIGHT JOIN"
14
+ FULL = "FULL OUTER JOIN"
15
+
16
+
17
+ class AggregateFunction(Enum):
18
+ COUNT = "COUNT"
19
+ SUM = "SUM"
20
+ AVG = "AVG"
21
+ MIN = "MIN"
22
+ MAX = "MAX"
23
+
24
+
25
+ @dataclass
26
+ class Join:
27
+ table: str
28
+ join_type: JoinType
29
+ on_left: str
30
+ on_right: str
31
+ alias: Optional[str] = None
32
+
33
+
34
+ @dataclass
35
+ class Aggregate:
36
+ function: AggregateFunction
37
+ field: str
38
+ alias: str
39
+
40
+
41
+ @dataclass
42
+ class AdvancedQueryBuilder:
43
+ """Extended QueryBuilder with JOINs and aggregates"""
44
+
45
+ table: str
46
+ joins: List[Join] = field(default_factory=list)
47
+ aggregates: List[Aggregate] = field(default_factory=list)
48
+ group_by_fields: List[str] = field(default_factory=list)
49
+ having_conditions: List[str] = field(default_factory=list)
50
+ subqueries: Dict[str, "AdvancedQueryBuilder"] = field(default_factory=dict)
51
+
52
+ def join(
53
+ self,
54
+ table: str,
55
+ on_left: str,
56
+ on_right: str,
57
+ join_type: JoinType = JoinType.INNER,
58
+ alias: Optional[str] = None,
59
+ ) -> "AdvancedQueryBuilder":
60
+ """Add JOIN clause"""
61
+ self.joins.append(
62
+ Join(table=table, join_type=join_type, on_left=on_left, on_right=on_right, alias=alias)
63
+ )
64
+ return self
65
+
66
+ def aggregate(
67
+ self, function: AggregateFunction, field: str, alias: str
68
+ ) -> "AdvancedQueryBuilder":
69
+ """Add aggregate function"""
70
+ self.aggregates.append(Aggregate(function=function, field=field, alias=alias))
71
+ return self
72
+
73
+ def group_by(self, *fields: str) -> "AdvancedQueryBuilder":
74
+ """Add GROUP BY"""
75
+ self.group_by_fields.extend(fields)
76
+ return self
77
+
78
+ def having(self, condition: str) -> "AdvancedQueryBuilder":
79
+ """Add HAVING clause"""
80
+ self.having_conditions.append(condition)
81
+ return self
82
+
83
+ def to_sql(self) -> tuple[str, List[Any]]:
84
+ """Generate complete SQL with JOINs"""
85
+ params = []
86
+
87
+ # SELECT clause
88
+ if self.aggregates:
89
+ select_parts = [
90
+ f"{agg.function.value}({agg.field}) AS {agg.alias}" for agg in self.aggregates
91
+ ]
92
+ if self.group_by_fields:
93
+ select_parts = list(self.group_by_fields) + select_parts
94
+ sql = f"SELECT {', '.join(select_parts)}"
95
+ else:
96
+ sql = f"SELECT *"
97
+
98
+ # FROM clause
99
+ sql += f" FROM {self.table}"
100
+
101
+ # JOIN clauses
102
+ for join in self.joins:
103
+ table = f"{join.table} AS {join.alias}" if join.alias else join.table
104
+ sql += f" {join.join_type.value} {table}"
105
+ sql += f" ON {join.on_left} = {join.on_right}"
106
+
107
+ # GROUP BY clause
108
+ if self.group_by_fields:
109
+ sql += f" GROUP BY {', '.join(self.group_by_fields)}"
110
+
111
+ # HAVING clause
112
+ if self.having_conditions:
113
+ sql += f" HAVING {' AND '.join(self.having_conditions)}"
114
+
115
+ return sql, params
116
+
117
+
118
+ # Example usage helper
119
+ class QueryHelper:
120
+ """Helper methods for common query patterns"""
121
+
122
+ @staticmethod
123
+ def count_by_field(table: str, field: str, group_field: str) -> AdvancedQueryBuilder:
124
+ """Count occurrences grouped by field"""
125
+ return (
126
+ AdvancedQueryBuilder(table=table)
127
+ .aggregate(AggregateFunction.COUNT, field, "count")
128
+ .group_by(group_field)
129
+ )
130
+
131
+ @staticmethod
132
+ def sum_by_category(table: str, sum_field: str, category_field: str) -> AdvancedQueryBuilder:
133
+ """Sum values grouped by category"""
134
+ return (
135
+ AdvancedQueryBuilder(table=table)
136
+ .aggregate(AggregateFunction.SUM, sum_field, "total")
137
+ .group_by(category_field)
138
+ )
139
+
140
+ @staticmethod
141
+ def join_with_filter(
142
+ left_table: str, right_table: str, join_field: str
143
+ ) -> AdvancedQueryBuilder:
144
+ """Simple INNER JOIN"""
145
+ return AdvancedQueryBuilder(table=left_table).join(
146
+ right_table, f"{left_table}.{join_field}", f"{right_table}.{join_field}"
147
+ )