peak-sdk 1.17.0__py3-none-any.whl → 1.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- peak/_version.py +1 -1
- peak/cli/cli.py +2 -1
- peak/cli/helpers.py +1 -0
- peak/cli/resources/cache.py +452 -0
- peak/resources/__init__.py +2 -1
- peak/resources/cache.py +650 -0
- {peak_sdk-1.17.0.dist-info → peak_sdk-1.18.0.dist-info}/METADATA +4 -3
- {peak_sdk-1.17.0.dist-info → peak_sdk-1.18.0.dist-info}/RECORD +11 -9
- {peak_sdk-1.17.0.dist-info → peak_sdk-1.18.0.dist-info}/LICENSE +0 -0
- {peak_sdk-1.17.0.dist-info → peak_sdk-1.18.0.dist-info}/WHEEL +0 -0
- {peak_sdk-1.17.0.dist-info → peak_sdk-1.18.0.dist-info}/entry_points.txt +0 -0
peak/_version.py
CHANGED
peak/cli/cli.py
CHANGED
@@ -26,7 +26,7 @@ import typer
|
|
26
26
|
from peak.cli import args, helpers
|
27
27
|
from peak.cli.metrics import metrics
|
28
28
|
from peak.cli.press import apps, blocks, deployments, specs
|
29
|
-
from peak.cli.resources import alerts, artifacts, images, services, tenants, users, webapps, workflows
|
29
|
+
from peak.cli.resources import alerts, artifacts, cache, images, services, tenants, users, webapps, workflows
|
30
30
|
from peak.constants import Sources
|
31
31
|
from peak.output import Writer
|
32
32
|
|
@@ -38,6 +38,7 @@ typer_app.add_typer(images.app, name="images")
|
|
38
38
|
typer_app.add_typer(metrics.app, name="metrics")
|
39
39
|
typer_app.add_typer(alerts.app, name="alerts")
|
40
40
|
typer_app.add_typer(artifacts.app, name="artifacts")
|
41
|
+
typer_app.add_typer(cache.app, name="cache")
|
41
42
|
typer_app.add_typer(workflows.app, name="workflows")
|
42
43
|
typer_app.add_typer(webapps.app, name="webapps")
|
43
44
|
typer_app.add_typer(services.app, name="services")
|
peak/cli/helpers.py
CHANGED
@@ -251,6 +251,7 @@ def get_client(command: str) -> base_client.BaseClient:
|
|
251
251
|
"specs": press.specs,
|
252
252
|
"deployments": press.deployments,
|
253
253
|
"artifacts": resources.artifacts,
|
254
|
+
"cache": resources.cache,
|
254
255
|
"images": resources.images,
|
255
256
|
"workflows": resources.workflows,
|
256
257
|
"services": resources.services,
|
@@ -0,0 +1,452 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2025 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Peak Cache commands."""
|
22
|
+
import json
|
23
|
+
from typing import Any, Dict, Optional
|
24
|
+
|
25
|
+
import typer
|
26
|
+
from peak.cli.args import OUTPUT_TYPES, PAGING
|
27
|
+
from peak.constants import OutputTypes, OutputTypesNoTable
|
28
|
+
from peak.output import Writer
|
29
|
+
from peak.resources.cache import CacheClient
|
30
|
+
|
31
|
+
app = typer.Typer(
|
32
|
+
help="Cache operations for storing and retrieving data.",
|
33
|
+
short_help="Manage Cache Operations.",
|
34
|
+
)
|
35
|
+
|
36
|
+
_KEY = typer.Option(..., help="The cache key to operate on.")
|
37
|
+
_VALUE = typer.Option(..., help="The value to store in the cache.")
|
38
|
+
_TTL = typer.Option(None, help="Time to live in seconds for the cache entry.")
|
39
|
+
_DEFAULT = typer.Option(None, help="Default value to return if key doesn't exist.")
|
40
|
+
_KEYS = typer.Option(..., help="Comma-separated list of keys to operate on.")
|
41
|
+
_MAPPING = typer.Option(..., help="JSON mapping of key-value pairs to store.")
|
42
|
+
_PATTERN = typer.Option(..., help="Pattern to match keys for deletion.")
|
43
|
+
_DEBUG = typer.Option(False, help="Enable debug logging.")
|
44
|
+
_PREFIX = typer.Option(None, help="Additional prefix for cache keys.")
|
45
|
+
|
46
|
+
|
47
|
+
def _parse_json_mapping(mapping: str) -> Dict[str, Any]:
|
48
|
+
"""Parse and validate JSON mapping for mset command.
|
49
|
+
|
50
|
+
Args:
|
51
|
+
mapping: JSON string to parse
|
52
|
+
|
53
|
+
Returns:
|
54
|
+
Parsed dictionary
|
55
|
+
|
56
|
+
Raises:
|
57
|
+
typer.BadParameter: If mapping is invalid
|
58
|
+
TypeError: If mapping is not a JSON object
|
59
|
+
"""
|
60
|
+
parsed_mapping = json.loads(mapping)
|
61
|
+
if not isinstance(parsed_mapping, dict):
|
62
|
+
msg = "Mapping must be a JSON object"
|
63
|
+
raise TypeError(msg)
|
64
|
+
return parsed_mapping
|
65
|
+
|
66
|
+
|
67
|
+
@app.command("set", short_help="Store a value in the cache.")
|
68
|
+
def set_value(
|
69
|
+
ctx: typer.Context,
|
70
|
+
key: str = _KEY,
|
71
|
+
value: str = _VALUE,
|
72
|
+
ttl: Optional[int] = _TTL,
|
73
|
+
_debug: bool = _DEBUG,
|
74
|
+
prefix: Optional[str] = _PREFIX,
|
75
|
+
_paging: Optional[bool] = PAGING,
|
76
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
77
|
+
) -> None:
|
78
|
+
"""Store a value in the cache with an optional TTL.
|
79
|
+
|
80
|
+
\b
|
81
|
+
📝 ***Example usage:***
|
82
|
+
```bash
|
83
|
+
peak cache set --key "user:123" --value "John Doe"
|
84
|
+
peak cache set --key "config" --value '{"timeout": 30}' --ttl 3600
|
85
|
+
```
|
86
|
+
|
87
|
+
\b
|
88
|
+
🆗 ***Response:***
|
89
|
+
True if the value was stored successfully, False otherwise.
|
90
|
+
"""
|
91
|
+
client: CacheClient = ctx.obj["client"]
|
92
|
+
writer: Writer = ctx.obj["writer"]
|
93
|
+
|
94
|
+
if prefix:
|
95
|
+
client.set_additional_prefix(prefix)
|
96
|
+
|
97
|
+
try:
|
98
|
+
parsed_value = json.loads(value)
|
99
|
+
except json.JSONDecodeError:
|
100
|
+
parsed_value = value
|
101
|
+
|
102
|
+
with writer.pager():
|
103
|
+
result = client.set(key, parsed_value, ttl=ttl)
|
104
|
+
writer.write(result, output_type=OutputTypes.json)
|
105
|
+
|
106
|
+
|
107
|
+
@app.command(short_help="Retrieve a value from the cache.")
|
108
|
+
def get(
|
109
|
+
ctx: typer.Context,
|
110
|
+
key: str = _KEY,
|
111
|
+
default: Optional[str] = _DEFAULT,
|
112
|
+
_debug: bool = _DEBUG,
|
113
|
+
prefix: Optional[str] = _PREFIX,
|
114
|
+
_paging: Optional[bool] = PAGING,
|
115
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
116
|
+
) -> None:
|
117
|
+
"""Retrieve a value from the cache.
|
118
|
+
|
119
|
+
\b
|
120
|
+
📝 ***Example usage:***
|
121
|
+
```bash
|
122
|
+
peak cache get --key "user:123"
|
123
|
+
peak cache get --key "missing" --default "not found"
|
124
|
+
```
|
125
|
+
|
126
|
+
\b
|
127
|
+
🆗 ***Response:***
|
128
|
+
The cached value or the default value if the key doesn't exist.
|
129
|
+
"""
|
130
|
+
client: CacheClient = ctx.obj["client"]
|
131
|
+
writer: Writer = ctx.obj["writer"]
|
132
|
+
|
133
|
+
if prefix:
|
134
|
+
client.set_additional_prefix(prefix)
|
135
|
+
|
136
|
+
parsed_default = None
|
137
|
+
if default is not None:
|
138
|
+
try:
|
139
|
+
parsed_default = json.loads(default)
|
140
|
+
except json.JSONDecodeError:
|
141
|
+
parsed_default = default
|
142
|
+
|
143
|
+
with writer.pager():
|
144
|
+
result = client.get(key, default=parsed_default)
|
145
|
+
writer.write(result, output_type=OutputTypes.json)
|
146
|
+
|
147
|
+
|
148
|
+
@app.command(short_help="Retrieve multiple values from the cache.")
|
149
|
+
def mget(
|
150
|
+
ctx: typer.Context,
|
151
|
+
keys: str = _KEYS,
|
152
|
+
_debug: bool = _DEBUG,
|
153
|
+
prefix: Optional[str] = _PREFIX,
|
154
|
+
_paging: Optional[bool] = PAGING,
|
155
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
156
|
+
) -> None:
|
157
|
+
"""Retrieve multiple values from the cache.
|
158
|
+
|
159
|
+
\b
|
160
|
+
📝 ***Example usage:***
|
161
|
+
```bash
|
162
|
+
peak cache mget --keys "user:123,user:456,config"
|
163
|
+
peak cache mget --keys "session:abc,session:def"
|
164
|
+
```
|
165
|
+
|
166
|
+
\b
|
167
|
+
🆗 ***Response:***
|
168
|
+
List of values corresponding to the keys (null for non-existent keys).
|
169
|
+
"""
|
170
|
+
client: CacheClient = ctx.obj["client"]
|
171
|
+
writer: Writer = ctx.obj["writer"]
|
172
|
+
|
173
|
+
if prefix:
|
174
|
+
client.set_additional_prefix(prefix)
|
175
|
+
|
176
|
+
key_list = [key.strip() for key in keys.split(",")]
|
177
|
+
|
178
|
+
with writer.pager():
|
179
|
+
result = client.mget(*key_list)
|
180
|
+
writer.write(result, output_type=OutputTypes.json)
|
181
|
+
|
182
|
+
|
183
|
+
@app.command(short_help="Store multiple key-value pairs in the cache.")
|
184
|
+
def mset(
|
185
|
+
ctx: typer.Context,
|
186
|
+
mapping: str = _MAPPING,
|
187
|
+
ttl: Optional[int] = _TTL,
|
188
|
+
_debug: bool = _DEBUG,
|
189
|
+
prefix: Optional[str] = _PREFIX,
|
190
|
+
_paging: Optional[bool] = PAGING,
|
191
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
192
|
+
) -> None:
|
193
|
+
"""Store multiple key-value pairs in the cache.
|
194
|
+
|
195
|
+
\b
|
196
|
+
📝 ***Example usage:***
|
197
|
+
```bash
|
198
|
+
peak cache mset --mapping '{"user:123": "John", "user:456": "Jane"}'
|
199
|
+
peak cache mset --mapping '{"config:timeout": 30, "config:retries": 3}' --ttl 3600
|
200
|
+
```
|
201
|
+
|
202
|
+
\b
|
203
|
+
🆗 ***Response:***
|
204
|
+
True if all values were stored successfully, False otherwise.
|
205
|
+
"""
|
206
|
+
client: CacheClient = ctx.obj["client"]
|
207
|
+
writer: Writer = ctx.obj["writer"]
|
208
|
+
|
209
|
+
if prefix:
|
210
|
+
client.set_additional_prefix(prefix)
|
211
|
+
|
212
|
+
try:
|
213
|
+
parsed_mapping = _parse_json_mapping(mapping)
|
214
|
+
except (json.JSONDecodeError, ValueError, TypeError) as e:
|
215
|
+
msg = f"Invalid JSON mapping: {e}"
|
216
|
+
raise typer.BadParameter(msg) from e
|
217
|
+
|
218
|
+
with writer.pager():
|
219
|
+
result = client.mset(parsed_mapping, ttl=ttl)
|
220
|
+
writer.write(result, output_type=OutputTypes.json)
|
221
|
+
|
222
|
+
|
223
|
+
@app.command(short_help="Delete one or more keys from the cache.")
|
224
|
+
def delete(
|
225
|
+
ctx: typer.Context,
|
226
|
+
keys: str = _KEYS,
|
227
|
+
_debug: bool = _DEBUG,
|
228
|
+
prefix: Optional[str] = _PREFIX,
|
229
|
+
_paging: Optional[bool] = PAGING,
|
230
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
231
|
+
) -> None:
|
232
|
+
"""Delete one or more keys from the cache.
|
233
|
+
|
234
|
+
\b
|
235
|
+
📝 ***Example usage:***
|
236
|
+
```bash
|
237
|
+
peak cache delete --keys "user:123"
|
238
|
+
peak cache delete --keys "user:123,user:456,config"
|
239
|
+
```
|
240
|
+
|
241
|
+
\b
|
242
|
+
🆗 ***Response:***
|
243
|
+
Number of keys that were deleted.
|
244
|
+
"""
|
245
|
+
client: CacheClient = ctx.obj["client"]
|
246
|
+
writer: Writer = ctx.obj["writer"]
|
247
|
+
|
248
|
+
if prefix:
|
249
|
+
client.set_additional_prefix(prefix)
|
250
|
+
|
251
|
+
key_list = [key.strip() for key in keys.split(",")]
|
252
|
+
|
253
|
+
with writer.pager():
|
254
|
+
result = client.delete(*key_list)
|
255
|
+
writer.write(result, output_type=OutputTypes.json)
|
256
|
+
|
257
|
+
|
258
|
+
@app.command(short_help="Check if one or more keys exist in the cache.")
|
259
|
+
def exists(
|
260
|
+
ctx: typer.Context,
|
261
|
+
keys: str = _KEYS,
|
262
|
+
_debug: bool = _DEBUG,
|
263
|
+
prefix: Optional[str] = _PREFIX,
|
264
|
+
_paging: Optional[bool] = PAGING,
|
265
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
266
|
+
) -> None:
|
267
|
+
"""Check if one or more keys exist in the cache.
|
268
|
+
|
269
|
+
\b
|
270
|
+
📝 ***Example usage:***
|
271
|
+
```bash
|
272
|
+
peak cache exists --keys "user:123"
|
273
|
+
peak cache exists --keys "user:123,user:456"
|
274
|
+
```
|
275
|
+
|
276
|
+
\b
|
277
|
+
🆗 ***Response:***
|
278
|
+
Number of keys that exist in the cache.
|
279
|
+
"""
|
280
|
+
client: CacheClient = ctx.obj["client"]
|
281
|
+
writer: Writer = ctx.obj["writer"]
|
282
|
+
|
283
|
+
if prefix:
|
284
|
+
client.set_additional_prefix(prefix)
|
285
|
+
|
286
|
+
key_list = [key.strip() for key in keys.split(",")]
|
287
|
+
|
288
|
+
with writer.pager():
|
289
|
+
result = client.exists(*key_list)
|
290
|
+
writer.write(result, output_type=OutputTypes.json)
|
291
|
+
|
292
|
+
|
293
|
+
@app.command(short_help="Set expiration time for a key.")
|
294
|
+
def expire(
|
295
|
+
ctx: typer.Context,
|
296
|
+
key: str = _KEY,
|
297
|
+
ttl: int = typer.Option(..., help="Time to live in seconds."),
|
298
|
+
_debug: bool = _DEBUG,
|
299
|
+
prefix: Optional[str] = _PREFIX,
|
300
|
+
_paging: Optional[bool] = PAGING,
|
301
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
302
|
+
) -> None:
|
303
|
+
"""Set expiration time for a key.
|
304
|
+
|
305
|
+
\b
|
306
|
+
📝 ***Example usage:***
|
307
|
+
```bash
|
308
|
+
peak cache expire --key "user:123" --ttl 3600
|
309
|
+
peak cache expire --key "session:abc" --ttl 1800
|
310
|
+
```
|
311
|
+
|
312
|
+
\b
|
313
|
+
🆗 ***Response:***
|
314
|
+
True if the expiration was set, False if the key doesn't exist.
|
315
|
+
"""
|
316
|
+
client: CacheClient = ctx.obj["client"]
|
317
|
+
writer: Writer = ctx.obj["writer"]
|
318
|
+
|
319
|
+
if prefix:
|
320
|
+
client.set_additional_prefix(prefix)
|
321
|
+
|
322
|
+
with writer.pager():
|
323
|
+
result = client.expire(key, ttl)
|
324
|
+
writer.write(result, output_type=OutputTypes.json)
|
325
|
+
|
326
|
+
|
327
|
+
@app.command(short_help="Get the remaining time to live for a key.")
|
328
|
+
def ttl(
|
329
|
+
ctx: typer.Context,
|
330
|
+
key: str = _KEY,
|
331
|
+
_debug: bool = _DEBUG,
|
332
|
+
prefix: Optional[str] = _PREFIX,
|
333
|
+
_paging: Optional[bool] = PAGING,
|
334
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
335
|
+
) -> None:
|
336
|
+
"""Get the remaining time to live for a key.
|
337
|
+
|
338
|
+
\b
|
339
|
+
📝 ***Example usage:***
|
340
|
+
```bash
|
341
|
+
peak cache ttl --key "user:123"
|
342
|
+
peak cache ttl --key "session:abc"
|
343
|
+
```
|
344
|
+
|
345
|
+
\b
|
346
|
+
🆗 ***Response:***
|
347
|
+
Remaining TTL in seconds (-1 if no expiration, -2 if key doesn't exist).
|
348
|
+
"""
|
349
|
+
client: CacheClient = ctx.obj["client"]
|
350
|
+
writer: Writer = ctx.obj["writer"]
|
351
|
+
|
352
|
+
if prefix:
|
353
|
+
client.set_additional_prefix(prefix)
|
354
|
+
|
355
|
+
with writer.pager():
|
356
|
+
result = client.ttl(key)
|
357
|
+
writer.write(result, output_type=OutputTypes.json)
|
358
|
+
|
359
|
+
|
360
|
+
@app.command(short_help="Test cache connection.")
|
361
|
+
def ping(
|
362
|
+
ctx: typer.Context,
|
363
|
+
_debug: bool = _DEBUG,
|
364
|
+
prefix: Optional[str] = _PREFIX,
|
365
|
+
_paging: Optional[bool] = PAGING,
|
366
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
367
|
+
) -> None:
|
368
|
+
"""Test cache connection.
|
369
|
+
|
370
|
+
\b
|
371
|
+
📝 ***Example usage:***
|
372
|
+
```bash
|
373
|
+
peak cache ping
|
374
|
+
```
|
375
|
+
|
376
|
+
\b
|
377
|
+
🆗 ***Response:***
|
378
|
+
True if the connection is successful, False otherwise.
|
379
|
+
"""
|
380
|
+
client: CacheClient = ctx.obj["client"]
|
381
|
+
writer: Writer = ctx.obj["writer"]
|
382
|
+
|
383
|
+
if prefix:
|
384
|
+
client.set_additional_prefix(prefix)
|
385
|
+
|
386
|
+
with writer.pager():
|
387
|
+
result = client.ping()
|
388
|
+
writer.write(result, output_type=OutputTypes.json)
|
389
|
+
|
390
|
+
|
391
|
+
@app.command(short_help="Delete all keys matching a pattern.")
|
392
|
+
def flush_pattern(
|
393
|
+
ctx: typer.Context,
|
394
|
+
pattern: str = _PATTERN,
|
395
|
+
_debug: bool = _DEBUG,
|
396
|
+
prefix: Optional[str] = _PREFIX,
|
397
|
+
_paging: Optional[bool] = PAGING,
|
398
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
399
|
+
) -> None:
|
400
|
+
"""Delete all keys matching a pattern within the tenant namespace.
|
401
|
+
|
402
|
+
\b
|
403
|
+
📝 ***Example usage:***
|
404
|
+
```bash
|
405
|
+
peak cache flush-pattern --pattern "user:*"
|
406
|
+
peak cache flush-pattern --pattern "session:*"
|
407
|
+
```
|
408
|
+
|
409
|
+
\b
|
410
|
+
🆗 ***Response:***
|
411
|
+
Number of keys that were deleted.
|
412
|
+
"""
|
413
|
+
client: CacheClient = ctx.obj["client"]
|
414
|
+
writer: Writer = ctx.obj["writer"]
|
415
|
+
|
416
|
+
if prefix:
|
417
|
+
client.set_additional_prefix(prefix)
|
418
|
+
|
419
|
+
with writer.pager():
|
420
|
+
result = client.flush_by_pattern(pattern)
|
421
|
+
writer.write(result, output_type=OutputTypes.json)
|
422
|
+
|
423
|
+
|
424
|
+
@app.command(short_help="Delete all keys for the current tenant.")
|
425
|
+
def flush_tenant(
|
426
|
+
ctx: typer.Context,
|
427
|
+
_debug: bool = _DEBUG,
|
428
|
+
prefix: Optional[str] = _PREFIX,
|
429
|
+
_paging: Optional[bool] = PAGING,
|
430
|
+
_output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES,
|
431
|
+
) -> None:
|
432
|
+
"""Delete all keys for the current tenant.
|
433
|
+
|
434
|
+
\b
|
435
|
+
📝 ***Example usage:***
|
436
|
+
```bash
|
437
|
+
peak cache flush-tenant
|
438
|
+
```
|
439
|
+
|
440
|
+
\b
|
441
|
+
🆗 ***Response:***
|
442
|
+
Number of keys that were deleted.
|
443
|
+
"""
|
444
|
+
client: CacheClient = ctx.obj["client"]
|
445
|
+
writer: Writer = ctx.obj["writer"]
|
446
|
+
|
447
|
+
if prefix:
|
448
|
+
client.set_additional_prefix(prefix)
|
449
|
+
|
450
|
+
with writer.pager():
|
451
|
+
result = client.flush_tenant()
|
452
|
+
writer.write(result, output_type=OutputTypes.json)
|
peak/resources/__init__.py
CHANGED
@@ -24,11 +24,12 @@ from __future__ import annotations
|
|
24
24
|
|
25
25
|
from typing import List
|
26
26
|
|
27
|
-
from peak.resources import alerts, artifacts, images, services, tenants, users, webapps, workflows
|
27
|
+
from peak.resources import alerts, artifacts, cache, images, services, tenants, users, webapps, workflows
|
28
28
|
|
29
29
|
__all__: List[str] = [
|
30
30
|
"alerts",
|
31
31
|
"artifacts",
|
32
|
+
"cache",
|
32
33
|
"images",
|
33
34
|
"services",
|
34
35
|
"tenants",
|
peak/resources/cache.py
ADDED
@@ -0,0 +1,650 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2025 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Peak Cache Client.
|
22
|
+
|
23
|
+
Provides caching functionality with tenant-based key prefixing.
|
24
|
+
Supports cache operations with JSON serialization and connection management.
|
25
|
+
"""
|
26
|
+
|
27
|
+
from __future__ import annotations
|
28
|
+
|
29
|
+
import json
|
30
|
+
import logging
|
31
|
+
from typing import Any, Dict, List, Optional, cast
|
32
|
+
from urllib.parse import quote_plus, unquote_plus, urlparse
|
33
|
+
|
34
|
+
import valkey
|
35
|
+
from valkey.exceptions import ConnectionError, TimeoutError, ValkeyError
|
36
|
+
|
37
|
+
from peak.base_client import BaseClient
|
38
|
+
from peak.constants import ContentType, HttpMethods
|
39
|
+
from peak.session import Session
|
40
|
+
|
41
|
+
logger = logging.getLogger(__name__)
|
42
|
+
|
43
|
+
DEFAULT_VALKEY_PORT = 6379
|
44
|
+
SECURE_VALKEY_PORT = 6380
|
45
|
+
|
46
|
+
|
47
|
+
class CacheError(Exception):
|
48
|
+
"""Base exception for cache operations."""
|
49
|
+
|
50
|
+
|
51
|
+
class CacheConnectionError(CacheError):
|
52
|
+
"""Exception for cache connection issues."""
|
53
|
+
|
54
|
+
|
55
|
+
def _raise_connection_error(message: str) -> None:
|
56
|
+
"""Raise a CacheConnectionError with the given message."""
|
57
|
+
raise CacheConnectionError(message)
|
58
|
+
|
59
|
+
|
60
|
+
def _raise_cache_error(message: str) -> None:
|
61
|
+
"""Raise a CacheError with the given message."""
|
62
|
+
raise CacheError(message)
|
63
|
+
|
64
|
+
|
65
|
+
class CacheClient(BaseClient):
|
66
|
+
"""Peak Cache Client for caching operations.
|
67
|
+
|
68
|
+
Provides auto key prefixing based on tenant names to ensure
|
69
|
+
proper isolation and access control patterns.
|
70
|
+
|
71
|
+
Inherits from BaseClient to use the default session pattern.
|
72
|
+
"""
|
73
|
+
|
74
|
+
def __init__(
|
75
|
+
self,
|
76
|
+
session: Optional[Session] = None,
|
77
|
+
*,
|
78
|
+
debug_logs: bool = True,
|
79
|
+
additional_prefix: Optional[str] = None,
|
80
|
+
connection_config: Optional[Dict[str, Any]] = None,
|
81
|
+
) -> None:
|
82
|
+
"""Initialize cache client.
|
83
|
+
|
84
|
+
Args:
|
85
|
+
session: Peak session for authentication (optional)
|
86
|
+
debug_logs: Enable or disable debug logging (default: True)
|
87
|
+
additional_prefix: Additional prefix to add after tenant name (optional)
|
88
|
+
connection_config: Custom connection configuration overrides (optional)
|
89
|
+
Available options:
|
90
|
+
- decode_responses: bool (default: True)
|
91
|
+
- socket_timeout: float (default: 5.0)
|
92
|
+
- socket_connect_timeout: float (default: 5.0)
|
93
|
+
- retry_on_timeout: bool (default: True)
|
94
|
+
- health_check_interval: int (default: 60)
|
95
|
+
- max_connections: int (default: None)
|
96
|
+
- retry_on_error: list (default: None)
|
97
|
+
- socket_keepalive: bool (default: None)
|
98
|
+
- socket_keepalive_options: dict (default: None)
|
99
|
+
"""
|
100
|
+
super().__init__(session)
|
101
|
+
self._client: Optional[valkey.Valkey] = None
|
102
|
+
self._connection_config: Optional[Dict[str, Any]] = None
|
103
|
+
self._tenant_name: Optional[str] = None
|
104
|
+
self._debug_logs = debug_logs
|
105
|
+
self._additional_prefix = additional_prefix
|
106
|
+
self._custom_connection_config = connection_config or {}
|
107
|
+
|
108
|
+
def _debug_log(self, message: str) -> None:
|
109
|
+
"""Log debug message if debug logging is enabled."""
|
110
|
+
if self._debug_logs:
|
111
|
+
logger.debug(message)
|
112
|
+
|
113
|
+
def _get_connection_config(self) -> Dict[str, Any]:
|
114
|
+
"""Get cache connection configuration from credentials endpoint."""
|
115
|
+
if self._connection_config is None:
|
116
|
+
try:
|
117
|
+
self._debug_log("Getting cache credentials...")
|
118
|
+
response = self.session.create_request(
|
119
|
+
endpoint="connections/api/v1/connections/valkey-credentials",
|
120
|
+
method=HttpMethods.GET,
|
121
|
+
content_type=ContentType.APPLICATION_JSON,
|
122
|
+
subdomain="service",
|
123
|
+
)
|
124
|
+
|
125
|
+
self._tenant_name = response.get("tenant")
|
126
|
+
if not self._tenant_name:
|
127
|
+
_raise_connection_error("Tenant information not found in cache credentials response")
|
128
|
+
|
129
|
+
engine = response.get("engine", "valkey")
|
130
|
+
|
131
|
+
url = response.get("url")
|
132
|
+
if not url:
|
133
|
+
host = response.get("host")
|
134
|
+
port = response.get("port", DEFAULT_VALKEY_PORT)
|
135
|
+
username = response.get("userId")
|
136
|
+
password = response.get("password")
|
137
|
+
|
138
|
+
if not all([host, username, password]):
|
139
|
+
_raise_connection_error("Missing required cache connection credentials")
|
140
|
+
|
141
|
+
encoded_username = quote_plus(username)
|
142
|
+
encoded_password = quote_plus(password)
|
143
|
+
use_ssl = port == SECURE_VALKEY_PORT or engine == "valkey"
|
144
|
+
scheme = "rediss" if use_ssl else "redis"
|
145
|
+
url = f"{scheme}://{encoded_username}:{encoded_password}@{host}:{port}"
|
146
|
+
|
147
|
+
parsed = urlparse(url)
|
148
|
+
|
149
|
+
decoded_username = unquote_plus(parsed.username) if parsed.username else None
|
150
|
+
decoded_password = unquote_plus(parsed.password) if parsed.password else None
|
151
|
+
|
152
|
+
if engine == "valkey":
|
153
|
+
decoded_username = self._tenant_name
|
154
|
+
|
155
|
+
self._validate_connection_config(
|
156
|
+
{
|
157
|
+
"host": parsed.hostname,
|
158
|
+
"port": parsed.port,
|
159
|
+
"username": decoded_username,
|
160
|
+
"password": decoded_password,
|
161
|
+
},
|
162
|
+
)
|
163
|
+
|
164
|
+
use_ssl = parsed.scheme == "rediss"
|
165
|
+
|
166
|
+
config = {
|
167
|
+
"host": parsed.hostname,
|
168
|
+
"port": parsed.port or DEFAULT_VALKEY_PORT,
|
169
|
+
"password": decoded_password,
|
170
|
+
"username": decoded_username,
|
171
|
+
"ssl": use_ssl,
|
172
|
+
"decode_responses": True,
|
173
|
+
"socket_timeout": 5.0,
|
174
|
+
"socket_connect_timeout": 5.0,
|
175
|
+
"retry_on_timeout": True,
|
176
|
+
"health_check_interval": 60,
|
177
|
+
}
|
178
|
+
|
179
|
+
# Merge custom configuration (only allow safe overrides)
|
180
|
+
safe_overrides = {
|
181
|
+
"decode_responses",
|
182
|
+
"socket_timeout",
|
183
|
+
"socket_connect_timeout",
|
184
|
+
"retry_on_timeout",
|
185
|
+
"health_check_interval",
|
186
|
+
"max_connections",
|
187
|
+
"retry_on_error",
|
188
|
+
"socket_keepalive",
|
189
|
+
"socket_keepalive_options",
|
190
|
+
}
|
191
|
+
|
192
|
+
for key, value in self._custom_connection_config.items():
|
193
|
+
if key in safe_overrides:
|
194
|
+
config[key] = value
|
195
|
+
self._debug_log(f"Cache config override: {key} = {value}")
|
196
|
+
else:
|
197
|
+
logger.warning("Ignoring unsafe connection config override: %s", key)
|
198
|
+
|
199
|
+
self._connection_config = config
|
200
|
+
|
201
|
+
logger.info("Cache configured for tenant: %s", self._tenant_name)
|
202
|
+
|
203
|
+
except Exception as e:
|
204
|
+
logger.exception("Failed to get cache credentials")
|
205
|
+
msg = f"Failed to get cache credentials: {e}"
|
206
|
+
raise CacheConnectionError(msg) from e
|
207
|
+
|
208
|
+
return self._connection_config
|
209
|
+
|
210
|
+
def _validate_connection_config(self, config: Dict[str, Any]) -> None:
|
211
|
+
"""Validate connection configuration."""
|
212
|
+
required_fields = ["host", "port", "password", "username"]
|
213
|
+
missing = [field for field in required_fields if not config.get(field)]
|
214
|
+
if missing:
|
215
|
+
_raise_connection_error(f"Missing required connection fields: {missing}")
|
216
|
+
|
217
|
+
def _get_client(self) -> valkey.Valkey:
|
218
|
+
"""Get or create cache client."""
|
219
|
+
if self._client is None:
|
220
|
+
try:
|
221
|
+
config = self._get_connection_config()
|
222
|
+
self._client = valkey.Valkey(**config)
|
223
|
+
self._debug_log("Cache client created successfully")
|
224
|
+
except Exception as e:
|
225
|
+
logger.exception("Failed to create cache client")
|
226
|
+
msg = f"Failed to create cache client: {e}"
|
227
|
+
raise CacheConnectionError(msg) from e
|
228
|
+
return self._client
|
229
|
+
|
230
|
+
def _prefix_key(self, key: str) -> str:
|
231
|
+
"""Add tenant prefix to key."""
|
232
|
+
if not self._tenant_name:
|
233
|
+
self._get_connection_config()
|
234
|
+
prefix = f"{self._tenant_name}:"
|
235
|
+
if self._additional_prefix:
|
236
|
+
prefix += f"{self._additional_prefix}:"
|
237
|
+
return f"{prefix}{key}"
|
238
|
+
|
239
|
+
def _prefix_keys(self, keys: List[str]) -> List[str]:
|
240
|
+
"""Add tenant prefix to multiple keys."""
|
241
|
+
return [self._prefix_key(key) for key in keys]
|
242
|
+
|
243
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
|
244
|
+
"""Set a key-value pair in the cache.
|
245
|
+
|
246
|
+
Args:
|
247
|
+
key: The key to set
|
248
|
+
value: The value to set (will be JSON serialized if not string)
|
249
|
+
ttl: Time to live in seconds (optional)
|
250
|
+
|
251
|
+
Returns:
|
252
|
+
bool: True if successful, False otherwise
|
253
|
+
|
254
|
+
Raises:
|
255
|
+
CacheError: If the operation fails
|
256
|
+
"""
|
257
|
+
try:
|
258
|
+
client = self._get_client()
|
259
|
+
prefixed_key = self._prefix_key(key)
|
260
|
+
|
261
|
+
serialized_value = value if isinstance(value, str) else json.dumps(value)
|
262
|
+
|
263
|
+
result = client.set(prefixed_key, serialized_value, ex=ttl)
|
264
|
+
self._debug_log(f"Set key: {key} (prefixed: {prefixed_key})")
|
265
|
+
return bool(result)
|
266
|
+
|
267
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
268
|
+
logger.exception("Cache set operation failed for key: %s", key)
|
269
|
+
msg = f"Failed to set cache key: {e}"
|
270
|
+
raise CacheError(msg) from e
|
271
|
+
|
272
|
+
def get(self, key: str, default: Any = None) -> Any:
|
273
|
+
"""Get a value from the cache.
|
274
|
+
|
275
|
+
Args:
|
276
|
+
key: The key to get
|
277
|
+
default: Default value if key doesn't exist
|
278
|
+
|
279
|
+
Returns:
|
280
|
+
Any: The value (JSON deserialized if applicable) or default
|
281
|
+
|
282
|
+
Raises:
|
283
|
+
CacheError: If the operation fails
|
284
|
+
"""
|
285
|
+
try:
|
286
|
+
client = self._get_client()
|
287
|
+
prefixed_key = self._prefix_key(key)
|
288
|
+
|
289
|
+
value = client.get(prefixed_key)
|
290
|
+
if value is None:
|
291
|
+
self._debug_log(f"Key not found: {key}")
|
292
|
+
return default
|
293
|
+
|
294
|
+
if isinstance(value, str):
|
295
|
+
value_str = value
|
296
|
+
else:
|
297
|
+
value_str = value.decode("utf-8") if isinstance(value, bytes) else str(value)
|
298
|
+
|
299
|
+
if value_str.startswith(("{", "[")):
|
300
|
+
try:
|
301
|
+
result = json.loads(value_str)
|
302
|
+
except json.JSONDecodeError:
|
303
|
+
pass
|
304
|
+
else:
|
305
|
+
self._debug_log(f"Got key: {key} (JSON deserialized)")
|
306
|
+
return result
|
307
|
+
|
308
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
309
|
+
logger.exception("Cache get operation failed for key: %s", key)
|
310
|
+
msg = f"Failed to get cache key: {e}"
|
311
|
+
raise CacheError(msg) from e
|
312
|
+
else:
|
313
|
+
self._debug_log(f"Got key: {key} (as string)")
|
314
|
+
return value_str
|
315
|
+
|
316
|
+
def delete(self, *keys: str) -> int:
|
317
|
+
"""Delete one or more keys from the cache.
|
318
|
+
|
319
|
+
Args:
|
320
|
+
keys: Keys to delete
|
321
|
+
|
322
|
+
Returns:
|
323
|
+
int: Number of keys deleted
|
324
|
+
|
325
|
+
Raises:
|
326
|
+
CacheError: If the operation fails
|
327
|
+
"""
|
328
|
+
if not keys:
|
329
|
+
return 0
|
330
|
+
|
331
|
+
try:
|
332
|
+
client = self._get_client()
|
333
|
+
prefixed_keys = self._prefix_keys(list(keys))
|
334
|
+
|
335
|
+
result = client.delete(*prefixed_keys)
|
336
|
+
self._debug_log(f"Deleted {result} keys: {list(keys)}")
|
337
|
+
return int(cast(int, result))
|
338
|
+
|
339
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
340
|
+
logger.exception("Cache delete operation failed for keys: %s", keys)
|
341
|
+
msg = f"Failed to delete cache keys: {e}"
|
342
|
+
raise CacheError(msg) from e
|
343
|
+
|
344
|
+
def exists(self, *keys: str) -> int:
|
345
|
+
"""Check if one or more keys exist in the cache.
|
346
|
+
|
347
|
+
Args:
|
348
|
+
keys: Keys to check
|
349
|
+
|
350
|
+
Returns:
|
351
|
+
int: Number of keys that exist
|
352
|
+
|
353
|
+
Raises:
|
354
|
+
CacheError: If the operation fails
|
355
|
+
"""
|
356
|
+
if not keys:
|
357
|
+
return 0
|
358
|
+
|
359
|
+
try:
|
360
|
+
client = self._get_client()
|
361
|
+
prefixed_keys = self._prefix_keys(list(keys))
|
362
|
+
|
363
|
+
result = client.exists(*prefixed_keys)
|
364
|
+
self._debug_log(f"Checked existence of {len(keys)} keys, {result} exist")
|
365
|
+
return int(cast(int, result))
|
366
|
+
|
367
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
368
|
+
logger.exception("Cache exists operation failed for keys: %s", keys)
|
369
|
+
msg = f"Failed to check cache key existence: {e}"
|
370
|
+
raise CacheError(msg) from e
|
371
|
+
|
372
|
+
def expire(self, key: str, ttl: int) -> bool:
|
373
|
+
"""Set expiration time for a key.
|
374
|
+
|
375
|
+
Args:
|
376
|
+
key: The key to set expiration for
|
377
|
+
ttl: Time to live in seconds
|
378
|
+
|
379
|
+
Returns:
|
380
|
+
bool: True if successful, False if key doesn't exist
|
381
|
+
|
382
|
+
Raises:
|
383
|
+
CacheError: If the operation fails
|
384
|
+
"""
|
385
|
+
try:
|
386
|
+
client = self._get_client()
|
387
|
+
prefixed_key = self._prefix_key(key)
|
388
|
+
|
389
|
+
result = client.expire(prefixed_key, ttl)
|
390
|
+
self._debug_log(f"Set expiration for key: {key} (TTL: {ttl}s)")
|
391
|
+
return bool(result)
|
392
|
+
|
393
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
394
|
+
logger.exception("Cache expire operation failed for key: %s", key)
|
395
|
+
msg = f"Failed to set cache key expiration: {e}"
|
396
|
+
raise CacheError(msg) from e
|
397
|
+
|
398
|
+
def ttl(self, key: str) -> int:
|
399
|
+
"""Get the time to live for a key.
|
400
|
+
|
401
|
+
Args:
|
402
|
+
key: The key to check
|
403
|
+
|
404
|
+
Returns:
|
405
|
+
int: TTL in seconds (-1 if no expiration, -2 if key doesn't exist)
|
406
|
+
|
407
|
+
Raises:
|
408
|
+
CacheError: If the operation fails
|
409
|
+
"""
|
410
|
+
try:
|
411
|
+
client = self._get_client()
|
412
|
+
prefixed_key = self._prefix_key(key)
|
413
|
+
|
414
|
+
result = client.ttl(prefixed_key)
|
415
|
+
self._debug_log(f"Got TTL for key: {key} (TTL: {result}s)")
|
416
|
+
return int(cast(int, result))
|
417
|
+
|
418
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
419
|
+
logger.exception("Cache TTL operation failed for key: %s", key)
|
420
|
+
msg = f"Failed to get cache key TTL: {e}"
|
421
|
+
raise CacheError(msg) from e
|
422
|
+
|
423
|
+
def mget(self, *keys: str) -> List[Any]:
|
424
|
+
"""Get multiple values from the cache.
|
425
|
+
|
426
|
+
Args:
|
427
|
+
keys: Keys to get
|
428
|
+
|
429
|
+
Returns:
|
430
|
+
List[Any]: List of values (None for missing keys)
|
431
|
+
|
432
|
+
Raises:
|
433
|
+
CacheError: If the operation fails
|
434
|
+
"""
|
435
|
+
if not keys:
|
436
|
+
return []
|
437
|
+
|
438
|
+
try:
|
439
|
+
client = self._get_client()
|
440
|
+
prefixed_keys = self._prefix_keys(list(keys))
|
441
|
+
|
442
|
+
values = client.mget(prefixed_keys)
|
443
|
+
results: List[Any] = []
|
444
|
+
|
445
|
+
for value in cast(List[Any], values):
|
446
|
+
if value is None:
|
447
|
+
results.append(None)
|
448
|
+
continue
|
449
|
+
|
450
|
+
if isinstance(value, str):
|
451
|
+
value_str = value
|
452
|
+
else:
|
453
|
+
value_str = value.decode("utf-8") if isinstance(value, bytes) else str(value)
|
454
|
+
|
455
|
+
if value_str.startswith(("{", "[")):
|
456
|
+
try:
|
457
|
+
results.append(json.loads(value_str))
|
458
|
+
except json.JSONDecodeError:
|
459
|
+
pass
|
460
|
+
else:
|
461
|
+
continue
|
462
|
+
|
463
|
+
results.append(value_str)
|
464
|
+
|
465
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
466
|
+
logger.exception("Cache mget operation failed for keys: %s", keys)
|
467
|
+
msg = f"Failed to get multiple cache keys: {e}"
|
468
|
+
raise CacheError(msg) from e
|
469
|
+
else:
|
470
|
+
self._debug_log(f"Got {len(keys)} keys via mget")
|
471
|
+
return results
|
472
|
+
|
473
|
+
def mset(self, mapping: Dict[str, Any], ttl: Optional[int] = None) -> bool:
|
474
|
+
"""Set multiple key-value pairs in the cache.
|
475
|
+
|
476
|
+
Args:
|
477
|
+
mapping: Dictionary of key-value pairs to set
|
478
|
+
ttl: Time to live in seconds (optional, applies to all keys)
|
479
|
+
|
480
|
+
Returns:
|
481
|
+
bool: True if successful
|
482
|
+
|
483
|
+
Raises:
|
484
|
+
CacheError: If the operation fails
|
485
|
+
"""
|
486
|
+
if not mapping:
|
487
|
+
return True
|
488
|
+
|
489
|
+
try:
|
490
|
+
client = self._get_client()
|
491
|
+
prefixed_mapping = {}
|
492
|
+
|
493
|
+
for key, value in mapping.items():
|
494
|
+
prefixed_key = self._prefix_key(key)
|
495
|
+
serialized_value = json.dumps(value) if not isinstance(value, str) else value
|
496
|
+
prefixed_mapping[prefixed_key] = serialized_value
|
497
|
+
|
498
|
+
result = client.mset(prefixed_mapping)
|
499
|
+
|
500
|
+
if ttl is not None:
|
501
|
+
for prefixed_key in prefixed_mapping:
|
502
|
+
client.expire(prefixed_key, ttl)
|
503
|
+
|
504
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
505
|
+
logger.exception("Cache mset operation failed")
|
506
|
+
msg = f"Failed to set multiple cache keys: {e}"
|
507
|
+
raise CacheError(msg) from e
|
508
|
+
else:
|
509
|
+
self._debug_log(f"Set {len(mapping)} keys via mset")
|
510
|
+
return bool(result)
|
511
|
+
|
512
|
+
def flush_tenant(self) -> int:
|
513
|
+
"""Flush all keys for the current tenant.
|
514
|
+
|
515
|
+
Returns:
|
516
|
+
int: Number of keys deleted
|
517
|
+
|
518
|
+
Raises:
|
519
|
+
CacheError: If the operation fails
|
520
|
+
"""
|
521
|
+
try:
|
522
|
+
client = self._get_client()
|
523
|
+
if not self._tenant_name:
|
524
|
+
self._get_connection_config()
|
525
|
+
|
526
|
+
pattern = f"{self._tenant_name}:*"
|
527
|
+
keys = list(client.scan_iter(match=pattern))
|
528
|
+
|
529
|
+
if not keys:
|
530
|
+
self._debug_log("No keys found for tenant flush")
|
531
|
+
return 0
|
532
|
+
|
533
|
+
result = client.delete(*keys)
|
534
|
+
self._debug_log(f"Flushed {result} keys for tenant: {self._tenant_name}")
|
535
|
+
return int(cast(int, result))
|
536
|
+
|
537
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
538
|
+
logger.exception("Cache flush_tenant operation failed")
|
539
|
+
msg = f"Failed to flush tenant cache: {e}"
|
540
|
+
raise CacheError(msg) from e
|
541
|
+
|
542
|
+
def flush_by_pattern(self, pattern: str) -> int:
|
543
|
+
"""Flush keys matching a pattern within the tenant namespace.
|
544
|
+
|
545
|
+
Args:
|
546
|
+
pattern: Pattern to match (will be prefixed with tenant name)
|
547
|
+
|
548
|
+
Returns:
|
549
|
+
int: Number of keys deleted
|
550
|
+
|
551
|
+
Raises:
|
552
|
+
CacheError: If the operation fails
|
553
|
+
"""
|
554
|
+
try:
|
555
|
+
client = self._get_client()
|
556
|
+
if not self._tenant_name:
|
557
|
+
self._get_connection_config()
|
558
|
+
|
559
|
+
prefixed_pattern = self._prefix_key(pattern)
|
560
|
+
keys = list(client.scan_iter(match=prefixed_pattern))
|
561
|
+
|
562
|
+
if not keys:
|
563
|
+
self._debug_log(f"No keys found for pattern: {pattern}")
|
564
|
+
return 0
|
565
|
+
|
566
|
+
result = client.delete(*keys)
|
567
|
+
self._debug_log(f"Flushed {result} keys for pattern: {pattern}")
|
568
|
+
return int(cast(int, result))
|
569
|
+
|
570
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
571
|
+
logger.exception("Cache flush_by_pattern operation failed for pattern: %s", pattern)
|
572
|
+
msg = f"Failed to flush cache by pattern: {e}"
|
573
|
+
raise CacheError(msg) from e
|
574
|
+
|
575
|
+
def set_additional_prefix(self, additional_prefix: Optional[str]) -> None:
|
576
|
+
"""Set additional prefix for cache keys.
|
577
|
+
|
578
|
+
Args:
|
579
|
+
additional_prefix: Additional prefix to add after tenant name
|
580
|
+
"""
|
581
|
+
self._additional_prefix = additional_prefix
|
582
|
+
self._debug_log(f"Set additional prefix: {additional_prefix}")
|
583
|
+
|
584
|
+
def get_additional_prefix(self) -> Optional[str]:
|
585
|
+
"""Get current additional prefix.
|
586
|
+
|
587
|
+
Returns:
|
588
|
+
Optional[str]: Current additional prefix
|
589
|
+
"""
|
590
|
+
return self._additional_prefix
|
591
|
+
|
592
|
+
def ping(self) -> bool:
|
593
|
+
"""Test the connection to the cache.
|
594
|
+
|
595
|
+
Returns:
|
596
|
+
bool: True if connection is successful
|
597
|
+
|
598
|
+
Raises:
|
599
|
+
CacheError: If the connection fails
|
600
|
+
"""
|
601
|
+
try:
|
602
|
+
client = self._get_client()
|
603
|
+
result = client.ping()
|
604
|
+
self._debug_log("Cache ping successful")
|
605
|
+
return bool(result)
|
606
|
+
|
607
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
608
|
+
logger.exception("Cache ping failed")
|
609
|
+
msg = f"Cache connection test failed: {e}"
|
610
|
+
raise CacheError(msg) from e
|
611
|
+
|
612
|
+
def close(self) -> None:
|
613
|
+
"""Close the cache connection."""
|
614
|
+
if self._client is not None:
|
615
|
+
try:
|
616
|
+
self._client.close() # type: ignore[no-untyped-call]
|
617
|
+
self._debug_log("Cache connection closed")
|
618
|
+
except (ConnectionError, TimeoutError, ValkeyError) as e:
|
619
|
+
logger.debug("Error closing cache connection: %s", e)
|
620
|
+
finally:
|
621
|
+
self._client = None
|
622
|
+
|
623
|
+
|
624
|
+
def get_client(
|
625
|
+
session: Optional[Session] = None,
|
626
|
+
*,
|
627
|
+
debug_logs: bool = True,
|
628
|
+
additional_prefix: Optional[str] = None,
|
629
|
+
connection_config: Optional[Dict[str, Any]] = None,
|
630
|
+
) -> CacheClient:
|
631
|
+
"""Get a cache client instance.
|
632
|
+
|
633
|
+
Args:
|
634
|
+
session: Peak session for authentication (optional)
|
635
|
+
debug_logs: Enable or disable debug logging (default: True)
|
636
|
+
additional_prefix: Additional prefix to add after tenant name (optional)
|
637
|
+
connection_config: Custom connection configuration overrides (optional)
|
638
|
+
|
639
|
+
Returns:
|
640
|
+
CacheClient: Cache client instance
|
641
|
+
"""
|
642
|
+
return CacheClient(
|
643
|
+
session=session,
|
644
|
+
debug_logs=debug_logs,
|
645
|
+
additional_prefix=additional_prefix,
|
646
|
+
connection_config=connection_config,
|
647
|
+
)
|
648
|
+
|
649
|
+
|
650
|
+
__all__: List[str] = ["CacheClient", "CacheError", "CacheConnectionError", "get_client"]
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: peak-sdk
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.18.0
|
4
4
|
Summary: Python SDK for interacting with the Peak platform
|
5
5
|
Home-page: https://docs.peak.ai/sdk/latest/
|
6
6
|
License: Apache-2.0
|
@@ -31,6 +31,7 @@ Requires-Dist: shellingham (<1.5.4)
|
|
31
31
|
Requires-Dist: structlog (>=24.2.0,<25.0.0)
|
32
32
|
Requires-Dist: typer (>=0.12.4,<0.13.0)
|
33
33
|
Requires-Dist: urllib3 (<2)
|
34
|
+
Requires-Dist: valkey (>=6.0.0,<7.0.0)
|
34
35
|
Project-URL: Documentation, https://docs.peak.ai/sdk/latest/
|
35
36
|
Description-Content-Type: text/markdown
|
36
37
|
|
@@ -106,7 +107,7 @@ Follow these steps to create a virtual environment using Python's built-in `venv
|
|
106
107
|
This should return a response of the following format
|
107
108
|
|
108
109
|
```bash
|
109
|
-
peak-cli==1.
|
110
|
+
peak-cli==1.18.0
|
110
111
|
Python==3.12.3
|
111
112
|
System==Darwin(23.6.0)
|
112
113
|
```
|
@@ -122,7 +123,7 @@ Follow these steps to create a virtual environment using Python's built-in `venv
|
|
122
123
|
This should print the version of the SDK
|
123
124
|
|
124
125
|
```
|
125
|
-
1.
|
126
|
+
1.18.0
|
126
127
|
```
|
127
128
|
|
128
129
|
### Using the SDK and CLI
|
@@ -1,13 +1,13 @@
|
|
1
1
|
peak/__init__.py,sha256=UaVwsRIPq0Wuti8j2x4ijGRVYywglfjvZGz6ALBA7Oo,1284
|
2
2
|
peak/_metadata.py,sha256=8w0pXN03pDvh1toM-divY6HNVF8znTqGGG2T9Q4hEl4,30672
|
3
|
-
peak/_version.py,sha256=
|
3
|
+
peak/_version.py,sha256=kl78pleYMJmemmlFtE2DoQAdou8KltNLWLbj6m12g5M,887
|
4
4
|
peak/auth.py,sha256=A6nM9VGUdPJpFTFmb1zeeHjhKfBIsAyIMmnF9ajZkgs,904
|
5
5
|
peak/base_client.py,sha256=UO25ZViCQfKbBEDEfCdKS-eLXaVzb1aGnYDntBZ77ko,1808
|
6
6
|
peak/callbacks.py,sha256=WRVxSpg0Ur3uzG1aqxX4dQ5YV_Dr4GBrwYddppztcdM,3775
|
7
7
|
peak/cli/__init_.py,sha256=cpyVZFRLvtiZw6X0TEI6xF7bYdWSQL6YDjDmBHsLzO4,859
|
8
8
|
peak/cli/args.py,sha256=6vFvxburNhVZ3MOEhGsbXCb2IwURCf_nqcmnEUvsqzg,6166
|
9
|
-
peak/cli/cli.py,sha256=
|
10
|
-
peak/cli/helpers.py,sha256=
|
9
|
+
peak/cli/cli.py,sha256=YOt7IS4azxmNsZdEKs1U6P9RUOU-02nyjxUTJPnzOJI,2583
|
10
|
+
peak/cli/helpers.py,sha256=VGHLvASMCjlqJV7HoJHE4ZGUHSHdOTbIroc125O3Mu0,8663
|
11
11
|
peak/cli/metrics/__init__.py,sha256=O2Nxt8hTbW7E6IdFR5_cuL8WTiEk2Hf9QFNo29bpwfY,894
|
12
12
|
peak/cli/metrics/metrics.py,sha256=E_uVPkE0RTKypeMCw9UdNdl2YWiOW-BgXjvPkiXhbHs,29952
|
13
13
|
peak/cli/press/__init__.py,sha256=Uiir3EoPCWk_zoEk-RitATS20RxDLqiW8Iz5jybrDts,898
|
@@ -23,6 +23,7 @@ peak/cli/resources/__init__.py,sha256=bAXS042xP_5_0f8jpRELYFrSg0xXNN6hjsoFhZ74Al
|
|
23
23
|
peak/cli/resources/alerts/__init__.py,sha256=5Ol0XeSwnbQLycnCFr6XLog7B3nAYY4ohS5hC4Sy71g,1197
|
24
24
|
peak/cli/resources/alerts/emails.py,sha256=WrhEwy2uSdu7_ysjliOX3_Hb6b4ptIMYO5CW0djauPI,12435
|
25
25
|
peak/cli/resources/artifacts.py,sha256=Ik0NlvpRpQj9J_bU-yd5_ujWmU03ugC5q7Fy8Sx85ac,12619
|
26
|
+
peak/cli/resources/cache.py,sha256=joWMEQ0GXnR-im_ofsq3uvB6Jkqt9mc4f4nbFiq_vfE,12844
|
26
27
|
peak/cli/resources/images.py,sha256=f4SFC34yBBkbzqw4i72RjuLapvczTUIeZHFPIbLW9Hw,43133
|
27
28
|
peak/cli/resources/services.py,sha256=ySgpv-7bUsJ4bhRiMU2zvlC5TX_3M-73zsvV84Ki0bw,25666
|
28
29
|
peak/cli/resources/tenants.py,sha256=5SkUoW8O_LGqTYVzWYO9m3HjoXWifcmS66LMf5TFjsQ,4066
|
@@ -47,9 +48,10 @@ peak/press/blocks.py,sha256=9XLt2iiQBFa3ON9BddVZ1vp-kRNjij2JOfn069SAO-g,97610
|
|
47
48
|
peak/press/deployments.py,sha256=ZUcuUMygIEswGXleZAF8wkjzSXSl21jGERvIXJ__dcw,10969
|
48
49
|
peak/press/specs.py,sha256=SiSQcXXlPXQQ38AuUqfUmAW8tkuOXlSjwsWDrhNwSmQ,10915
|
49
50
|
peak/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
50
|
-
peak/resources/__init__.py,sha256=
|
51
|
+
peak/resources/__init__.py,sha256=gB20p9gb_ImrRpOMzj9H5HjoBjRRfdpLLi1e3Y28x0c,1262
|
51
52
|
peak/resources/alerts.py,sha256=1H6n_7lsR5rNI6FUfni8ye7KCPPArcm0fTpQOYLB6lg,12792
|
52
53
|
peak/resources/artifacts.py,sha256=gKlt9bzb1xkN7meM2r5p7AcWepgFFgxFyP1tvpq57qE,15648
|
54
|
+
peak/resources/cache.py,sha256=TmAwKB5qzsj2y4zopV8XJw0BIGqXA7NlKiRXFUscOZc,22929
|
53
55
|
peak/resources/images.py,sha256=dgYOlPhwOjbiBIz0xum3PfCT9RAP4GVGxBvr8ItiJEY,46715
|
54
56
|
peak/resources/services.py,sha256=MHV4Mxm0GHf5UpQUrnwFBmC9T5qHMI4j7PuD271gV0I,17176
|
55
57
|
peak/resources/tenants.py,sha256=_rNLWQxCJeaT0mA8_fipYRjSeZ9PIlTD1u_5STbcN-c,4380
|
@@ -123,8 +125,8 @@ peak/tools/logging/log_level.py,sha256=FVe94CEtow3nfHhNr5oQk0gEt2_5mpfaiV-xTfagX
|
|
123
125
|
peak/tools/logging/logger.py,sha256=DHe--A2J7RRb5N-u0Gb3bbEApPyoUD060A1IeNvJ87Y,15986
|
124
126
|
peak/tools/logging/utils.py,sha256=XRQ0nw_lmV_iiRel-o83EE84UTjvrzLTt4H7BHlPbLg,3330
|
125
127
|
peak/validators.py,sha256=mY17UDGKJ879wY3EApqrGVs3hJvRkPhgwftvmvnKAdI,2715
|
126
|
-
peak_sdk-1.
|
127
|
-
peak_sdk-1.
|
128
|
-
peak_sdk-1.
|
129
|
-
peak_sdk-1.
|
130
|
-
peak_sdk-1.
|
128
|
+
peak_sdk-1.18.0.dist-info/LICENSE,sha256=W0jszenKx7YdFA7BDnyg8xDKXzCP8AperJb_PHh9paQ,11340
|
129
|
+
peak_sdk-1.18.0.dist-info/METADATA,sha256=FzdrCBlmaWgg7AYCd524m_wJzZMQvJzvz8yoeqhfT8A,7974
|
130
|
+
peak_sdk-1.18.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
131
|
+
peak_sdk-1.18.0.dist-info/entry_points.txt,sha256=zHCEjuOTjkfmqivgEZQsPGm4zFA4W3Q_vKCjPr7W6lE,47
|
132
|
+
peak_sdk-1.18.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|