srx-lib-azure 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of srx-lib-azure might be problematic. Click here for more details.

srx_lib_azure/blob.py CHANGED
@@ -10,18 +10,32 @@ from loguru import logger
10
10
 
11
11
 
12
12
  class AzureBlobService:
13
- """Minimal Azure Blob helper with SAS URL generation."""
14
-
15
- def __init__(self) -> None:
16
- self.container_name = os.getenv("AZURE_BLOB_CONTAINER", "uploads")
17
- self.connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING")
18
- self.account_key = os.getenv("AZURE_STORAGE_ACCOUNT_KEY")
19
- self.sas_token = os.getenv("AZURE_SAS_TOKEN")
20
- self.base_blob_url = os.getenv("AZURE_BLOB_URL")
21
-
22
- if not self.connection_string:
13
+ """Minimal Azure Blob helper with SAS URL generation.
14
+
15
+ All configuration can be passed explicitly via constructor. If omitted, falls back
16
+ to environment variables. By default, it does not warn at startup when not
17
+ configured; operations will error if required values are missing.
18
+ """
19
+
20
+ def __init__(
21
+ self,
22
+ *,
23
+ connection_string: Optional[str] = None,
24
+ account_key: Optional[str] = None,
25
+ container_name: Optional[str] = None,
26
+ base_blob_url: Optional[str] = None,
27
+ sas_token: Optional[str] = None,
28
+ warn_if_unconfigured: bool = False,
29
+ ) -> None:
30
+ self.container_name = container_name or os.getenv("AZURE_BLOB_CONTAINER", "uploads")
31
+ self.connection_string = connection_string or os.getenv("AZURE_STORAGE_CONNECTION_STRING")
32
+ self.account_key = account_key or os.getenv("AZURE_STORAGE_ACCOUNT_KEY")
33
+ self.sas_token = sas_token or os.getenv("AZURE_SAS_TOKEN")
34
+ self.base_blob_url = base_blob_url or os.getenv("AZURE_BLOB_URL")
35
+
36
+ if warn_if_unconfigured and not self.connection_string:
23
37
  logger.warning(
24
- "Azure Storage connection string not configured; blob operations will fail."
38
+ "Azure Storage connection string not configured; blob operations may fail."
25
39
  )
26
40
 
27
41
  def _get_blob_service(self) -> BlobServiceClient:
srx_lib_azure/email.py CHANGED
@@ -17,14 +17,21 @@ class EmailService:
17
17
  If not configured, send calls are skipped with a warning and a 'skipped' status.
18
18
  """
19
19
 
20
- def __init__(self):
21
- self.connection_string = os.getenv("ACS_CONNECTION_STRING")
22
- self.sender_address = os.getenv("EMAIL_SENDER")
20
+ def __init__(
21
+ self,
22
+ *,
23
+ connection_string: str | None = None,
24
+ sender_address: str | None = None,
25
+ warn_if_unconfigured: bool = False,
26
+ ):
27
+ self.connection_string = connection_string or os.getenv("ACS_CONNECTION_STRING")
28
+ self.sender_address = sender_address or os.getenv("EMAIL_SENDER")
23
29
  if not self.connection_string or not self.sender_address or EmailClient is None:
24
30
  self.email_client = None
25
- logger.warning(
26
- "EmailService not configured (missing ACS_CONNECTION_STRING/EMAIL_SENDER or azure SDK). Calls will be skipped."
27
- )
31
+ if warn_if_unconfigured:
32
+ logger.warning(
33
+ "EmailService not configured (missing ACS_CONNECTION_STRING/EMAIL_SENDER or azure SDK). Calls will be skipped."
34
+ )
28
35
  else:
29
36
  try:
30
37
  self.email_client = EmailClient.from_connection_string(self.connection_string)
srx_lib_azure/table.py CHANGED
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import os
4
4
  from dataclasses import dataclass
5
5
  from datetime import datetime, timezone
6
- from typing import Any, Dict, Iterable, Optional
6
+ from typing import Any, Dict, Iterable, List, Optional
7
7
 
8
8
  from loguru import logger
9
9
 
@@ -19,7 +19,11 @@ def _now_iso() -> str:
19
19
 
20
20
  @dataclass
21
21
  class AzureTableService:
22
- connection_string: Optional[str] = os.getenv("AZURE_STORAGE_CONNECTION_STRING")
22
+ connection_string: Optional[str] = None
23
+
24
+ def __init__(self, connection_string: Optional[str] = None) -> None:
25
+ # Constructor injection preferred; fallback to env only if not provided
26
+ self.connection_string = connection_string or os.getenv("AZURE_STORAGE_CONNECTION_STRING")
23
27
 
24
28
  def _get_client(self) -> "TableServiceClient":
25
29
  if not self.connection_string:
@@ -36,6 +40,50 @@ class AzureTableService:
36
40
  except Exception as e:
37
41
  logger.warning("ensure_table(%s) warning: %s", table_name, e)
38
42
 
43
+ def list_tables(self) -> List[str]:
44
+ """List all tables in the storage account.
45
+
46
+ Returns:
47
+ List of table names
48
+ """
49
+ client = self._get_client()
50
+ try:
51
+ tables = [table.name for table in client.list_tables()]
52
+ logger.info("Listed %d tables", len(tables))
53
+ return tables
54
+ except Exception as exc:
55
+ logger.error("Failed to list tables: %s", exc)
56
+ return []
57
+
58
+ def delete_table(self, table_name: str) -> bool:
59
+ """Delete a table.
60
+
61
+ Args:
62
+ table_name: Name of the table to delete
63
+
64
+ Returns:
65
+ True if deleted successfully, False otherwise
66
+ """
67
+ client = self._get_client()
68
+ try:
69
+ client.delete_table(table_name=table_name)
70
+ logger.info("Deleted table: %s", table_name)
71
+ return True
72
+ except Exception as exc:
73
+ logger.error("Failed to delete table %s: %s", table_name, exc)
74
+ return False
75
+
76
+ def table_exists(self, table_name: str) -> bool:
77
+ """Check if a table exists.
78
+
79
+ Args:
80
+ table_name: Name of the table
81
+
82
+ Returns:
83
+ True if table exists, False otherwise
84
+ """
85
+ return table_name in self.list_tables()
86
+
39
87
  def put_entity(self, table_name: str, entity: Dict[str, Any]) -> Dict[str, Any]:
40
88
  client = self._get_client()
41
89
  table = client.get_table_client(table_name)
@@ -71,9 +119,284 @@ class AzureTableService:
71
119
  logger.error("Failed to delete entity in %s: %s", table_name, exc)
72
120
  return False
73
121
 
122
+ def get_entity(
123
+ self, table_name: str, partition_key: str, row_key: str
124
+ ) -> Optional[Dict[str, Any]]:
125
+ """Retrieve a single entity by partition and row key.
126
+
127
+ Args:
128
+ table_name: Name of the table
129
+ partition_key: Partition key of the entity
130
+ row_key: Row key of the entity
131
+
132
+ Returns:
133
+ Entity dict if found, None otherwise
134
+ """
135
+ client = self._get_client()
136
+ table = client.get_table_client(table_name)
137
+ try:
138
+ entity = table.get_entity(partition_key=partition_key, row_key=row_key)
139
+ logger.info("Retrieved entity from %s: PK=%s RK=%s", table_name, partition_key, row_key)
140
+ return dict(entity)
141
+ except Exception as exc:
142
+ logger.warning(
143
+ "Entity not found in %s (%s/%s): %s", table_name, partition_key, row_key, exc
144
+ )
145
+ return None
146
+
147
+ def entity_exists(self, table_name: str, partition_key: str, row_key: str) -> bool:
148
+ """Check if an entity exists without retrieving it.
149
+
150
+ Args:
151
+ table_name: Name of the table
152
+ partition_key: Partition key of the entity
153
+ row_key: Row key of the entity
154
+
155
+ Returns:
156
+ True if entity exists, False otherwise
157
+ """
158
+ return self.get_entity(table_name, partition_key, row_key) is not None
159
+
160
+ def batch_insert_entities(
161
+ self, table_name: str, entities: List[Dict[str, Any]]
162
+ ) -> Dict[str, Any]:
163
+ """Insert multiple entities in a batch operation.
164
+
165
+ Note: All entities must have the same PartitionKey for batch operations.
166
+
167
+ Args:
168
+ table_name: Name of the table
169
+ entities: List of entity dictionaries to insert
170
+
171
+ Returns:
172
+ Dict with count of successful operations and any errors
173
+ """
174
+ if not entities:
175
+ return {"success": 0, "errors": []}
176
+
177
+ client = self._get_client()
178
+ table = client.get_table_client(table_name)
179
+
180
+ # Group by partition key (batch requirement)
181
+ from collections import defaultdict
182
+
183
+ by_partition: defaultdict[str, List[Dict[str, Any]]] = defaultdict(list)
184
+ for entity in entities:
185
+ pk = entity.get("PartitionKey")
186
+ if not pk:
187
+ logger.error("Entity missing PartitionKey, skipping")
188
+ continue
189
+ by_partition[pk].append(entity)
190
+
191
+ success_count = 0
192
+ errors = []
193
+
194
+ for partition_key, partition_entities in by_partition.items():
195
+ # Process in chunks of 100 (Azure limit)
196
+ for i in range(0, len(partition_entities), 100):
197
+ chunk = partition_entities[i : i + 100]
198
+ operations = [("create", entity) for entity in chunk]
199
+
200
+ try:
201
+ table.submit_transaction(operations)
202
+ success_count += len(chunk)
203
+ logger.info(
204
+ "Batch inserted %d entities into %s (PK=%s)",
205
+ len(chunk),
206
+ table_name,
207
+ partition_key,
208
+ )
209
+ except Exception as exc:
210
+ error_msg = f"Batch insert failed for PK={partition_key}: {exc}"
211
+ logger.error(error_msg)
212
+ errors.append(error_msg)
213
+
214
+ return {"success": success_count, "errors": errors, "ts": _now_iso()}
215
+
216
+ def batch_upsert_entities(
217
+ self, table_name: str, entities: List[Dict[str, Any]]
218
+ ) -> Dict[str, Any]:
219
+ """Upsert multiple entities in a batch operation.
220
+
221
+ Note: All entities must have the same PartitionKey for batch operations.
222
+
223
+ Args:
224
+ table_name: Name of the table
225
+ entities: List of entity dictionaries to upsert
226
+
227
+ Returns:
228
+ Dict with count of successful operations and any errors
229
+ """
230
+ if not entities:
231
+ return {"success": 0, "errors": []}
232
+
233
+ client = self._get_client()
234
+ table = client.get_table_client(table_name)
235
+
236
+ # Group by partition key
237
+ from collections import defaultdict
238
+
239
+ by_partition: defaultdict[str, List[Dict[str, Any]]] = defaultdict(list)
240
+ for entity in entities:
241
+ pk = entity.get("PartitionKey")
242
+ if not pk:
243
+ logger.error("Entity missing PartitionKey, skipping")
244
+ continue
245
+ by_partition[pk].append(entity)
246
+
247
+ success_count = 0
248
+ errors = []
249
+
250
+ for partition_key, partition_entities in by_partition.items():
251
+ # Process in chunks of 100
252
+ for i in range(0, len(partition_entities), 100):
253
+ chunk = partition_entities[i : i + 100]
254
+ operations = [("upsert", entity, {"mode": "merge"}) for entity in chunk]
255
+
256
+ try:
257
+ table.submit_transaction(operations)
258
+ success_count += len(chunk)
259
+ logger.info(
260
+ "Batch upserted %d entities into %s (PK=%s)",
261
+ len(chunk),
262
+ table_name,
263
+ partition_key,
264
+ )
265
+ except Exception as exc:
266
+ error_msg = f"Batch upsert failed for PK={partition_key}: {exc}"
267
+ logger.error(error_msg)
268
+ errors.append(error_msg)
269
+
270
+ return {"success": success_count, "errors": errors, "ts": _now_iso()}
271
+
272
+ def batch_delete_entities(self, table_name: str, keys: List[tuple[str, str]]) -> Dict[str, Any]:
273
+ """Delete multiple entities in a batch operation.
274
+
275
+ Note: All entities must have the same PartitionKey for batch operations.
276
+
277
+ Args:
278
+ table_name: Name of the table
279
+ keys: List of (partition_key, row_key) tuples
280
+
281
+ Returns:
282
+ Dict with count of successful operations and any errors
283
+ """
284
+ if not keys:
285
+ return {"success": 0, "errors": []}
286
+
287
+ client = self._get_client()
288
+ table = client.get_table_client(table_name)
289
+
290
+ # Group by partition key
291
+ from collections import defaultdict
292
+
293
+ by_partition: defaultdict[str, List[tuple[str, str]]] = defaultdict(list)
294
+ for pk, rk in keys:
295
+ by_partition[pk].append((pk, rk))
296
+
297
+ success_count = 0
298
+ errors = []
299
+
300
+ for partition_key, partition_keys in by_partition.items():
301
+ # Process in chunks of 100
302
+ for i in range(0, len(partition_keys), 100):
303
+ chunk = partition_keys[i : i + 100]
304
+ operations = [("delete", {"PartitionKey": pk, "RowKey": rk}) for pk, rk in chunk]
305
+
306
+ try:
307
+ table.submit_transaction(operations)
308
+ success_count += len(chunk)
309
+ logger.info(
310
+ "Batch deleted %d entities from %s (PK=%s)",
311
+ len(chunk),
312
+ table_name,
313
+ partition_key,
314
+ )
315
+ except Exception as exc:
316
+ error_msg = f"Batch delete failed for PK={partition_key}: {exc}"
317
+ logger.error(error_msg)
318
+ errors.append(error_msg)
319
+
320
+ return {"success": success_count, "errors": errors, "ts": _now_iso()}
321
+
74
322
  def query(self, table_name: str, filter_query: str) -> Iterable[Dict[str, Any]]:
323
+ """Query entities with a filter.
324
+
325
+ Args:
326
+ table_name: Name of the table
327
+ filter_query: OData filter query string
328
+
329
+ Yields:
330
+ Entity dictionaries matching the filter
331
+ """
75
332
  client = self._get_client()
76
333
  table = client.get_table_client(table_name)
77
334
  for entity in table.query_entities(filter=filter_query):
78
335
  yield dict(entity)
79
336
 
337
+ def query_with_options(
338
+ self,
339
+ table_name: str,
340
+ filter_query: Optional[str] = None,
341
+ select: Optional[List[str]] = None,
342
+ top: Optional[int] = None,
343
+ ) -> Iterable[Dict[str, Any]]:
344
+ """Query entities with advanced options.
345
+
346
+ Args:
347
+ table_name: Name of the table
348
+ filter_query: Optional OData filter query string
349
+ select: Optional list of property names to return (projection)
350
+ top: Optional maximum number of entities to return
351
+
352
+ Yields:
353
+ Entity dictionaries matching the criteria
354
+ """
355
+ client = self._get_client()
356
+ table = client.get_table_client(table_name)
357
+
358
+ kwargs: Dict[str, Any] = {}
359
+ if filter_query:
360
+ kwargs["filter"] = filter_query
361
+ if select:
362
+ kwargs["select"] = select
363
+ if top:
364
+ kwargs["results_per_page"] = top
365
+
366
+ for entity in table.query_entities(**kwargs):
367
+ yield dict(entity)
368
+
369
+ def query_all(
370
+ self,
371
+ table_name: str,
372
+ filter_query: Optional[str] = None,
373
+ select: Optional[List[str]] = None,
374
+ ) -> List[Dict[str, Any]]:
375
+ """Query all entities and return as a list.
376
+
377
+ Warning: This loads all results into memory. Use query() for large result sets.
378
+
379
+ Args:
380
+ table_name: Name of the table
381
+ filter_query: Optional OData filter query string
382
+ select: Optional list of property names to return
383
+
384
+ Returns:
385
+ List of entity dictionaries
386
+ """
387
+ return list(self.query_with_options(table_name, filter_query, select))
388
+
389
+ def count_entities(self, table_name: str, filter_query: Optional[str] = None) -> int:
390
+ """Count entities matching a filter.
391
+
392
+ Args:
393
+ table_name: Name of the table
394
+ filter_query: Optional OData filter query string
395
+
396
+ Returns:
397
+ Count of matching entities
398
+ """
399
+ count = 0
400
+ for _ in self.query_with_options(table_name, filter_query):
401
+ count += 1
402
+ return count
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: srx-lib-azure
3
- Version: 0.1.5
3
+ Version: 0.1.7
4
4
  Summary: Azure helpers for SRX services: Blob, Email, Table
5
5
  Author-email: SRX <dev@srx.id>
6
6
  Requires-Python: >=3.12
@@ -0,0 +1,7 @@
1
+ srx_lib_azure/__init__.py,sha256=K0UCmkKw7HWJMshp6Xv3SxD4y26r7bdcPtb_2aRc2rs,174
2
+ srx_lib_azure/blob.py,sha256=3g5r3cOOdTAN283PBEU__p5gLYQ97LE_KEeNc2mVnLg,8889
3
+ srx_lib_azure/email.py,sha256=2J5zlgJMhx7pMINwN4kW23PmdwL1JyU9xFsSl5gAAM4,2831
4
+ srx_lib_azure/table.py,sha256=0qb1t84wEkpif3t1KybasZYffgTGQQ5ULfjfk2mjy54,14262
5
+ srx_lib_azure-0.1.7.dist-info/METADATA,sha256=bjyMw1i1lMXVw7Q6cbv_yYz9snHpH6pckSPTFR_JZyQ,1600
6
+ srx_lib_azure-0.1.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
+ srx_lib_azure-0.1.7.dist-info/RECORD,,
@@ -1,7 +0,0 @@
1
- srx_lib_azure/__init__.py,sha256=K0UCmkKw7HWJMshp6Xv3SxD4y26r7bdcPtb_2aRc2rs,174
2
- srx_lib_azure/blob.py,sha256=uCsRUCQN4GHtlyLBtDUvy0_mZaFTxWKdCA407cRIU8I,8245
3
- srx_lib_azure/email.py,sha256=t5W9DRFZLMJKrCaQuSoRU3jgiqNpDYqOxtvhObYn96w,2584
4
- srx_lib_azure/table.py,sha256=_5DCsk1SLqCc27F7469hxnRASS3XeffqK_MsJE1cD7Y,3022
5
- srx_lib_azure-0.1.5.dist-info/METADATA,sha256=qSOMza1A90Pg4AKtWyrcYznmX3NiXZfKLdtF2V0TyNM,1600
6
- srx_lib_azure-0.1.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
- srx_lib_azure-0.1.5.dist-info/RECORD,,