srx-lib-azure 0.1.6__tar.gz → 0.1.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/PKG-INFO +1 -1
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/pyproject.toml +1 -1
- srx_lib_azure-0.1.7/src/srx_lib_azure/table.py +402 -0
- srx_lib_azure-0.1.6/src/srx_lib_azure/table.py +0 -82
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/.github/workflows/publish.yml +0 -0
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/.gitignore +0 -0
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/README.md +0 -0
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/src/srx_lib_azure/__init__.py +0 -0
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/src/srx_lib_azure/blob.py +0 -0
- {srx_lib_azure-0.1.6 → srx_lib_azure-0.1.7}/src/srx_lib_azure/email.py +0 -0
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from typing import Any, Dict, Iterable, List, Optional
|
|
7
|
+
|
|
8
|
+
from loguru import logger
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
from azure.data.tables import TableServiceClient
|
|
12
|
+
except Exception: # pragma: no cover
|
|
13
|
+
TableServiceClient = None # type: ignore
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _now_iso() -> str:
|
|
17
|
+
return datetime.now(timezone.utc).isoformat()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class AzureTableService:
|
|
22
|
+
connection_string: Optional[str] = None
|
|
23
|
+
|
|
24
|
+
def __init__(self, connection_string: Optional[str] = None) -> None:
|
|
25
|
+
# Constructor injection preferred; fallback to env only if not provided
|
|
26
|
+
self.connection_string = connection_string or os.getenv("AZURE_STORAGE_CONNECTION_STRING")
|
|
27
|
+
|
|
28
|
+
def _get_client(self) -> "TableServiceClient":
|
|
29
|
+
if not self.connection_string:
|
|
30
|
+
raise RuntimeError("AZURE_STORAGE_CONNECTION_STRING not configured")
|
|
31
|
+
if TableServiceClient is None:
|
|
32
|
+
raise RuntimeError("azure-data-tables not installed; install to use table operations")
|
|
33
|
+
clean = self.connection_string.strip().strip('"').strip("'")
|
|
34
|
+
return TableServiceClient.from_connection_string(conn_str=clean)
|
|
35
|
+
|
|
36
|
+
def ensure_table(self, table_name: str) -> None:
|
|
37
|
+
client = self._get_client()
|
|
38
|
+
try:
|
|
39
|
+
client.create_table_if_not_exists(table_name=table_name)
|
|
40
|
+
except Exception as e:
|
|
41
|
+
logger.warning("ensure_table(%s) warning: %s", table_name, e)
|
|
42
|
+
|
|
43
|
+
def list_tables(self) -> List[str]:
|
|
44
|
+
"""List all tables in the storage account.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
List of table names
|
|
48
|
+
"""
|
|
49
|
+
client = self._get_client()
|
|
50
|
+
try:
|
|
51
|
+
tables = [table.name for table in client.list_tables()]
|
|
52
|
+
logger.info("Listed %d tables", len(tables))
|
|
53
|
+
return tables
|
|
54
|
+
except Exception as exc:
|
|
55
|
+
logger.error("Failed to list tables: %s", exc)
|
|
56
|
+
return []
|
|
57
|
+
|
|
58
|
+
def delete_table(self, table_name: str) -> bool:
|
|
59
|
+
"""Delete a table.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
table_name: Name of the table to delete
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
True if deleted successfully, False otherwise
|
|
66
|
+
"""
|
|
67
|
+
client = self._get_client()
|
|
68
|
+
try:
|
|
69
|
+
client.delete_table(table_name=table_name)
|
|
70
|
+
logger.info("Deleted table: %s", table_name)
|
|
71
|
+
return True
|
|
72
|
+
except Exception as exc:
|
|
73
|
+
logger.error("Failed to delete table %s: %s", table_name, exc)
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
def table_exists(self, table_name: str) -> bool:
|
|
77
|
+
"""Check if a table exists.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
table_name: Name of the table
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
True if table exists, False otherwise
|
|
84
|
+
"""
|
|
85
|
+
return table_name in self.list_tables()
|
|
86
|
+
|
|
87
|
+
def put_entity(self, table_name: str, entity: Dict[str, Any]) -> Dict[str, Any]:
|
|
88
|
+
client = self._get_client()
|
|
89
|
+
table = client.get_table_client(table_name)
|
|
90
|
+
res = table.create_entity(entity=entity)
|
|
91
|
+
logger.info(
|
|
92
|
+
"Inserted entity into %s: PK=%s RK=%s",
|
|
93
|
+
table_name,
|
|
94
|
+
entity.get("PartitionKey"),
|
|
95
|
+
entity.get("RowKey"),
|
|
96
|
+
)
|
|
97
|
+
return {"etag": getattr(res, "etag", None), "ts": _now_iso()}
|
|
98
|
+
|
|
99
|
+
def upsert_entity(self, table_name: str, entity: Dict[str, Any]) -> Dict[str, Any]:
|
|
100
|
+
client = self._get_client()
|
|
101
|
+
table = client.get_table_client(table_name)
|
|
102
|
+
res = table.upsert_entity(entity=entity, mode="merge")
|
|
103
|
+
logger.info(
|
|
104
|
+
"Upserted entity into %s: PK=%s RK=%s",
|
|
105
|
+
table_name,
|
|
106
|
+
entity.get("PartitionKey"),
|
|
107
|
+
entity.get("RowKey"),
|
|
108
|
+
)
|
|
109
|
+
return {"etag": getattr(res, "etag", None), "ts": _now_iso()}
|
|
110
|
+
|
|
111
|
+
def delete_entity(self, table_name: str, partition_key: str, row_key: str) -> bool:
|
|
112
|
+
client = self._get_client()
|
|
113
|
+
table = client.get_table_client(table_name)
|
|
114
|
+
try:
|
|
115
|
+
table.delete_entity(partition_key=partition_key, row_key=row_key)
|
|
116
|
+
logger.info("Deleted entity in %s (%s/%s)", table_name, partition_key, row_key)
|
|
117
|
+
return True
|
|
118
|
+
except Exception as exc:
|
|
119
|
+
logger.error("Failed to delete entity in %s: %s", table_name, exc)
|
|
120
|
+
return False
|
|
121
|
+
|
|
122
|
+
def get_entity(
|
|
123
|
+
self, table_name: str, partition_key: str, row_key: str
|
|
124
|
+
) -> Optional[Dict[str, Any]]:
|
|
125
|
+
"""Retrieve a single entity by partition and row key.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
table_name: Name of the table
|
|
129
|
+
partition_key: Partition key of the entity
|
|
130
|
+
row_key: Row key of the entity
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
Entity dict if found, None otherwise
|
|
134
|
+
"""
|
|
135
|
+
client = self._get_client()
|
|
136
|
+
table = client.get_table_client(table_name)
|
|
137
|
+
try:
|
|
138
|
+
entity = table.get_entity(partition_key=partition_key, row_key=row_key)
|
|
139
|
+
logger.info("Retrieved entity from %s: PK=%s RK=%s", table_name, partition_key, row_key)
|
|
140
|
+
return dict(entity)
|
|
141
|
+
except Exception as exc:
|
|
142
|
+
logger.warning(
|
|
143
|
+
"Entity not found in %s (%s/%s): %s", table_name, partition_key, row_key, exc
|
|
144
|
+
)
|
|
145
|
+
return None
|
|
146
|
+
|
|
147
|
+
def entity_exists(self, table_name: str, partition_key: str, row_key: str) -> bool:
|
|
148
|
+
"""Check if an entity exists without retrieving it.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
table_name: Name of the table
|
|
152
|
+
partition_key: Partition key of the entity
|
|
153
|
+
row_key: Row key of the entity
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
True if entity exists, False otherwise
|
|
157
|
+
"""
|
|
158
|
+
return self.get_entity(table_name, partition_key, row_key) is not None
|
|
159
|
+
|
|
160
|
+
def batch_insert_entities(
|
|
161
|
+
self, table_name: str, entities: List[Dict[str, Any]]
|
|
162
|
+
) -> Dict[str, Any]:
|
|
163
|
+
"""Insert multiple entities in a batch operation.
|
|
164
|
+
|
|
165
|
+
Note: All entities must have the same PartitionKey for batch operations.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
table_name: Name of the table
|
|
169
|
+
entities: List of entity dictionaries to insert
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
Dict with count of successful operations and any errors
|
|
173
|
+
"""
|
|
174
|
+
if not entities:
|
|
175
|
+
return {"success": 0, "errors": []}
|
|
176
|
+
|
|
177
|
+
client = self._get_client()
|
|
178
|
+
table = client.get_table_client(table_name)
|
|
179
|
+
|
|
180
|
+
# Group by partition key (batch requirement)
|
|
181
|
+
from collections import defaultdict
|
|
182
|
+
|
|
183
|
+
by_partition: defaultdict[str, List[Dict[str, Any]]] = defaultdict(list)
|
|
184
|
+
for entity in entities:
|
|
185
|
+
pk = entity.get("PartitionKey")
|
|
186
|
+
if not pk:
|
|
187
|
+
logger.error("Entity missing PartitionKey, skipping")
|
|
188
|
+
continue
|
|
189
|
+
by_partition[pk].append(entity)
|
|
190
|
+
|
|
191
|
+
success_count = 0
|
|
192
|
+
errors = []
|
|
193
|
+
|
|
194
|
+
for partition_key, partition_entities in by_partition.items():
|
|
195
|
+
# Process in chunks of 100 (Azure limit)
|
|
196
|
+
for i in range(0, len(partition_entities), 100):
|
|
197
|
+
chunk = partition_entities[i : i + 100]
|
|
198
|
+
operations = [("create", entity) for entity in chunk]
|
|
199
|
+
|
|
200
|
+
try:
|
|
201
|
+
table.submit_transaction(operations)
|
|
202
|
+
success_count += len(chunk)
|
|
203
|
+
logger.info(
|
|
204
|
+
"Batch inserted %d entities into %s (PK=%s)",
|
|
205
|
+
len(chunk),
|
|
206
|
+
table_name,
|
|
207
|
+
partition_key,
|
|
208
|
+
)
|
|
209
|
+
except Exception as exc:
|
|
210
|
+
error_msg = f"Batch insert failed for PK={partition_key}: {exc}"
|
|
211
|
+
logger.error(error_msg)
|
|
212
|
+
errors.append(error_msg)
|
|
213
|
+
|
|
214
|
+
return {"success": success_count, "errors": errors, "ts": _now_iso()}
|
|
215
|
+
|
|
216
|
+
def batch_upsert_entities(
|
|
217
|
+
self, table_name: str, entities: List[Dict[str, Any]]
|
|
218
|
+
) -> Dict[str, Any]:
|
|
219
|
+
"""Upsert multiple entities in a batch operation.
|
|
220
|
+
|
|
221
|
+
Note: All entities must have the same PartitionKey for batch operations.
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
table_name: Name of the table
|
|
225
|
+
entities: List of entity dictionaries to upsert
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
Dict with count of successful operations and any errors
|
|
229
|
+
"""
|
|
230
|
+
if not entities:
|
|
231
|
+
return {"success": 0, "errors": []}
|
|
232
|
+
|
|
233
|
+
client = self._get_client()
|
|
234
|
+
table = client.get_table_client(table_name)
|
|
235
|
+
|
|
236
|
+
# Group by partition key
|
|
237
|
+
from collections import defaultdict
|
|
238
|
+
|
|
239
|
+
by_partition: defaultdict[str, List[Dict[str, Any]]] = defaultdict(list)
|
|
240
|
+
for entity in entities:
|
|
241
|
+
pk = entity.get("PartitionKey")
|
|
242
|
+
if not pk:
|
|
243
|
+
logger.error("Entity missing PartitionKey, skipping")
|
|
244
|
+
continue
|
|
245
|
+
by_partition[pk].append(entity)
|
|
246
|
+
|
|
247
|
+
success_count = 0
|
|
248
|
+
errors = []
|
|
249
|
+
|
|
250
|
+
for partition_key, partition_entities in by_partition.items():
|
|
251
|
+
# Process in chunks of 100
|
|
252
|
+
for i in range(0, len(partition_entities), 100):
|
|
253
|
+
chunk = partition_entities[i : i + 100]
|
|
254
|
+
operations = [("upsert", entity, {"mode": "merge"}) for entity in chunk]
|
|
255
|
+
|
|
256
|
+
try:
|
|
257
|
+
table.submit_transaction(operations)
|
|
258
|
+
success_count += len(chunk)
|
|
259
|
+
logger.info(
|
|
260
|
+
"Batch upserted %d entities into %s (PK=%s)",
|
|
261
|
+
len(chunk),
|
|
262
|
+
table_name,
|
|
263
|
+
partition_key,
|
|
264
|
+
)
|
|
265
|
+
except Exception as exc:
|
|
266
|
+
error_msg = f"Batch upsert failed for PK={partition_key}: {exc}"
|
|
267
|
+
logger.error(error_msg)
|
|
268
|
+
errors.append(error_msg)
|
|
269
|
+
|
|
270
|
+
return {"success": success_count, "errors": errors, "ts": _now_iso()}
|
|
271
|
+
|
|
272
|
+
def batch_delete_entities(self, table_name: str, keys: List[tuple[str, str]]) -> Dict[str, Any]:
|
|
273
|
+
"""Delete multiple entities in a batch operation.
|
|
274
|
+
|
|
275
|
+
Note: All entities must have the same PartitionKey for batch operations.
|
|
276
|
+
|
|
277
|
+
Args:
|
|
278
|
+
table_name: Name of the table
|
|
279
|
+
keys: List of (partition_key, row_key) tuples
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
Dict with count of successful operations and any errors
|
|
283
|
+
"""
|
|
284
|
+
if not keys:
|
|
285
|
+
return {"success": 0, "errors": []}
|
|
286
|
+
|
|
287
|
+
client = self._get_client()
|
|
288
|
+
table = client.get_table_client(table_name)
|
|
289
|
+
|
|
290
|
+
# Group by partition key
|
|
291
|
+
from collections import defaultdict
|
|
292
|
+
|
|
293
|
+
by_partition: defaultdict[str, List[tuple[str, str]]] = defaultdict(list)
|
|
294
|
+
for pk, rk in keys:
|
|
295
|
+
by_partition[pk].append((pk, rk))
|
|
296
|
+
|
|
297
|
+
success_count = 0
|
|
298
|
+
errors = []
|
|
299
|
+
|
|
300
|
+
for partition_key, partition_keys in by_partition.items():
|
|
301
|
+
# Process in chunks of 100
|
|
302
|
+
for i in range(0, len(partition_keys), 100):
|
|
303
|
+
chunk = partition_keys[i : i + 100]
|
|
304
|
+
operations = [("delete", {"PartitionKey": pk, "RowKey": rk}) for pk, rk in chunk]
|
|
305
|
+
|
|
306
|
+
try:
|
|
307
|
+
table.submit_transaction(operations)
|
|
308
|
+
success_count += len(chunk)
|
|
309
|
+
logger.info(
|
|
310
|
+
"Batch deleted %d entities from %s (PK=%s)",
|
|
311
|
+
len(chunk),
|
|
312
|
+
table_name,
|
|
313
|
+
partition_key,
|
|
314
|
+
)
|
|
315
|
+
except Exception as exc:
|
|
316
|
+
error_msg = f"Batch delete failed for PK={partition_key}: {exc}"
|
|
317
|
+
logger.error(error_msg)
|
|
318
|
+
errors.append(error_msg)
|
|
319
|
+
|
|
320
|
+
return {"success": success_count, "errors": errors, "ts": _now_iso()}
|
|
321
|
+
|
|
322
|
+
def query(self, table_name: str, filter_query: str) -> Iterable[Dict[str, Any]]:
|
|
323
|
+
"""Query entities with a filter.
|
|
324
|
+
|
|
325
|
+
Args:
|
|
326
|
+
table_name: Name of the table
|
|
327
|
+
filter_query: OData filter query string
|
|
328
|
+
|
|
329
|
+
Yields:
|
|
330
|
+
Entity dictionaries matching the filter
|
|
331
|
+
"""
|
|
332
|
+
client = self._get_client()
|
|
333
|
+
table = client.get_table_client(table_name)
|
|
334
|
+
for entity in table.query_entities(filter=filter_query):
|
|
335
|
+
yield dict(entity)
|
|
336
|
+
|
|
337
|
+
def query_with_options(
|
|
338
|
+
self,
|
|
339
|
+
table_name: str,
|
|
340
|
+
filter_query: Optional[str] = None,
|
|
341
|
+
select: Optional[List[str]] = None,
|
|
342
|
+
top: Optional[int] = None,
|
|
343
|
+
) -> Iterable[Dict[str, Any]]:
|
|
344
|
+
"""Query entities with advanced options.
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
table_name: Name of the table
|
|
348
|
+
filter_query: Optional OData filter query string
|
|
349
|
+
select: Optional list of property names to return (projection)
|
|
350
|
+
top: Optional maximum number of entities to return
|
|
351
|
+
|
|
352
|
+
Yields:
|
|
353
|
+
Entity dictionaries matching the criteria
|
|
354
|
+
"""
|
|
355
|
+
client = self._get_client()
|
|
356
|
+
table = client.get_table_client(table_name)
|
|
357
|
+
|
|
358
|
+
kwargs: Dict[str, Any] = {}
|
|
359
|
+
if filter_query:
|
|
360
|
+
kwargs["filter"] = filter_query
|
|
361
|
+
if select:
|
|
362
|
+
kwargs["select"] = select
|
|
363
|
+
if top:
|
|
364
|
+
kwargs["results_per_page"] = top
|
|
365
|
+
|
|
366
|
+
for entity in table.query_entities(**kwargs):
|
|
367
|
+
yield dict(entity)
|
|
368
|
+
|
|
369
|
+
def query_all(
|
|
370
|
+
self,
|
|
371
|
+
table_name: str,
|
|
372
|
+
filter_query: Optional[str] = None,
|
|
373
|
+
select: Optional[List[str]] = None,
|
|
374
|
+
) -> List[Dict[str, Any]]:
|
|
375
|
+
"""Query all entities and return as a list.
|
|
376
|
+
|
|
377
|
+
Warning: This loads all results into memory. Use query() for large result sets.
|
|
378
|
+
|
|
379
|
+
Args:
|
|
380
|
+
table_name: Name of the table
|
|
381
|
+
filter_query: Optional OData filter query string
|
|
382
|
+
select: Optional list of property names to return
|
|
383
|
+
|
|
384
|
+
Returns:
|
|
385
|
+
List of entity dictionaries
|
|
386
|
+
"""
|
|
387
|
+
return list(self.query_with_options(table_name, filter_query, select))
|
|
388
|
+
|
|
389
|
+
def count_entities(self, table_name: str, filter_query: Optional[str] = None) -> int:
|
|
390
|
+
"""Count entities matching a filter.
|
|
391
|
+
|
|
392
|
+
Args:
|
|
393
|
+
table_name: Name of the table
|
|
394
|
+
filter_query: Optional OData filter query string
|
|
395
|
+
|
|
396
|
+
Returns:
|
|
397
|
+
Count of matching entities
|
|
398
|
+
"""
|
|
399
|
+
count = 0
|
|
400
|
+
for _ in self.query_with_options(table_name, filter_query):
|
|
401
|
+
count += 1
|
|
402
|
+
return count
|
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
from dataclasses import dataclass
|
|
5
|
-
from datetime import datetime, timezone
|
|
6
|
-
from typing import Any, Dict, Iterable, Optional
|
|
7
|
-
|
|
8
|
-
from loguru import logger
|
|
9
|
-
|
|
10
|
-
try:
|
|
11
|
-
from azure.data.tables import TableServiceClient
|
|
12
|
-
except Exception: # pragma: no cover
|
|
13
|
-
TableServiceClient = None # type: ignore
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def _now_iso() -> str:
|
|
17
|
-
return datetime.now(timezone.utc).isoformat()
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
@dataclass
|
|
21
|
-
class AzureTableService:
|
|
22
|
-
connection_string: Optional[str] = None
|
|
23
|
-
|
|
24
|
-
def __init__(self, connection_string: Optional[str] = None) -> None:
|
|
25
|
-
# Constructor injection preferred; fallback to env only if not provided
|
|
26
|
-
self.connection_string = connection_string or os.getenv("AZURE_STORAGE_CONNECTION_STRING")
|
|
27
|
-
|
|
28
|
-
def _get_client(self) -> "TableServiceClient":
|
|
29
|
-
if not self.connection_string:
|
|
30
|
-
raise RuntimeError("AZURE_STORAGE_CONNECTION_STRING not configured")
|
|
31
|
-
if TableServiceClient is None:
|
|
32
|
-
raise RuntimeError("azure-data-tables not installed; install to use table operations")
|
|
33
|
-
clean = self.connection_string.strip().strip('"').strip("'")
|
|
34
|
-
return TableServiceClient.from_connection_string(conn_str=clean)
|
|
35
|
-
|
|
36
|
-
def ensure_table(self, table_name: str) -> None:
|
|
37
|
-
client = self._get_client()
|
|
38
|
-
try:
|
|
39
|
-
client.create_table_if_not_exists(table_name=table_name)
|
|
40
|
-
except Exception as e:
|
|
41
|
-
logger.warning("ensure_table(%s) warning: %s", table_name, e)
|
|
42
|
-
|
|
43
|
-
def put_entity(self, table_name: str, entity: Dict[str, Any]) -> Dict[str, Any]:
|
|
44
|
-
client = self._get_client()
|
|
45
|
-
table = client.get_table_client(table_name)
|
|
46
|
-
res = table.create_entity(entity=entity)
|
|
47
|
-
logger.info(
|
|
48
|
-
"Inserted entity into %s: PK=%s RK=%s",
|
|
49
|
-
table_name,
|
|
50
|
-
entity.get("PartitionKey"),
|
|
51
|
-
entity.get("RowKey"),
|
|
52
|
-
)
|
|
53
|
-
return {"etag": getattr(res, "etag", None), "ts": _now_iso()}
|
|
54
|
-
|
|
55
|
-
def upsert_entity(self, table_name: str, entity: Dict[str, Any]) -> Dict[str, Any]:
|
|
56
|
-
client = self._get_client()
|
|
57
|
-
table = client.get_table_client(table_name)
|
|
58
|
-
res = table.upsert_entity(entity=entity, mode="merge")
|
|
59
|
-
logger.info(
|
|
60
|
-
"Upserted entity into %s: PK=%s RK=%s",
|
|
61
|
-
table_name,
|
|
62
|
-
entity.get("PartitionKey"),
|
|
63
|
-
entity.get("RowKey"),
|
|
64
|
-
)
|
|
65
|
-
return {"etag": getattr(res, "etag", None), "ts": _now_iso()}
|
|
66
|
-
|
|
67
|
-
def delete_entity(self, table_name: str, partition_key: str, row_key: str) -> bool:
|
|
68
|
-
client = self._get_client()
|
|
69
|
-
table = client.get_table_client(table_name)
|
|
70
|
-
try:
|
|
71
|
-
table.delete_entity(partition_key=partition_key, row_key=row_key)
|
|
72
|
-
logger.info("Deleted entity in %s (%s/%s)", table_name, partition_key, row_key)
|
|
73
|
-
return True
|
|
74
|
-
except Exception as exc:
|
|
75
|
-
logger.error("Failed to delete entity in %s: %s", table_name, exc)
|
|
76
|
-
return False
|
|
77
|
-
|
|
78
|
-
def query(self, table_name: str, filter_query: str) -> Iterable[Dict[str, Any]]:
|
|
79
|
-
client = self._get_client()
|
|
80
|
-
table = client.get_table_client(table_name)
|
|
81
|
-
for entity in table.query_entities(filter=filter_query):
|
|
82
|
-
yield dict(entity)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|