meerschaum 3.0.0rc3__py3-none-any.whl → 3.0.0rc7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. meerschaum/_internal/arguments/_parser.py +14 -2
  2. meerschaum/_internal/cli/__init__.py +6 -0
  3. meerschaum/_internal/cli/daemons.py +103 -0
  4. meerschaum/_internal/cli/entry.py +220 -0
  5. meerschaum/_internal/cli/workers.py +434 -0
  6. meerschaum/_internal/docs/index.py +1 -2
  7. meerschaum/_internal/entry.py +44 -8
  8. meerschaum/_internal/shell/Shell.py +113 -19
  9. meerschaum/_internal/shell/__init__.py +4 -1
  10. meerschaum/_internal/static.py +3 -1
  11. meerschaum/_internal/term/TermPageHandler.py +1 -2
  12. meerschaum/_internal/term/__init__.py +40 -6
  13. meerschaum/_internal/term/tools.py +33 -8
  14. meerschaum/actions/__init__.py +6 -4
  15. meerschaum/actions/api.py +39 -11
  16. meerschaum/actions/attach.py +1 -0
  17. meerschaum/actions/delete.py +4 -2
  18. meerschaum/actions/edit.py +27 -8
  19. meerschaum/actions/login.py +8 -8
  20. meerschaum/actions/register.py +13 -7
  21. meerschaum/actions/reload.py +22 -5
  22. meerschaum/actions/restart.py +14 -0
  23. meerschaum/actions/show.py +69 -4
  24. meerschaum/actions/start.py +135 -14
  25. meerschaum/actions/stop.py +36 -3
  26. meerschaum/actions/sync.py +6 -1
  27. meerschaum/api/__init__.py +35 -13
  28. meerschaum/api/_events.py +7 -2
  29. meerschaum/api/_oauth2.py +47 -4
  30. meerschaum/api/dash/callbacks/dashboard.py +103 -97
  31. meerschaum/api/dash/callbacks/jobs.py +3 -2
  32. meerschaum/api/dash/callbacks/login.py +10 -1
  33. meerschaum/api/dash/callbacks/pipes.py +136 -57
  34. meerschaum/api/dash/callbacks/register.py +9 -2
  35. meerschaum/api/dash/callbacks/tokens.py +2 -1
  36. meerschaum/api/dash/components.py +6 -7
  37. meerschaum/api/dash/keys.py +17 -1
  38. meerschaum/api/dash/pages/login.py +2 -2
  39. meerschaum/api/dash/pages/pipes.py +14 -4
  40. meerschaum/api/dash/pipes.py +186 -65
  41. meerschaum/api/dash/tokens.py +1 -1
  42. meerschaum/api/dash/webterm.py +14 -6
  43. meerschaum/api/models/_pipes.py +7 -1
  44. meerschaum/api/resources/static/js/terminado.js +3 -0
  45. meerschaum/api/resources/static/js/xterm-addon-unicode11.js +2 -0
  46. meerschaum/api/resources/templates/termpage.html +1 -0
  47. meerschaum/api/routes/_jobs.py +23 -11
  48. meerschaum/api/routes/_login.py +73 -5
  49. meerschaum/api/routes/_pipes.py +6 -4
  50. meerschaum/api/routes/_webterm.py +3 -3
  51. meerschaum/config/__init__.py +60 -13
  52. meerschaum/config/_default.py +89 -61
  53. meerschaum/config/_edit.py +10 -8
  54. meerschaum/config/_formatting.py +2 -0
  55. meerschaum/config/_patch.py +4 -2
  56. meerschaum/config/_paths.py +127 -12
  57. meerschaum/config/_read_config.py +20 -10
  58. meerschaum/config/_version.py +1 -1
  59. meerschaum/config/environment.py +262 -0
  60. meerschaum/config/stack/__init__.py +7 -5
  61. meerschaum/connectors/_Connector.py +1 -2
  62. meerschaum/connectors/__init__.py +37 -2
  63. meerschaum/connectors/api/_APIConnector.py +1 -1
  64. meerschaum/connectors/api/_jobs.py +11 -0
  65. meerschaum/connectors/api/_pipes.py +7 -1
  66. meerschaum/connectors/instance/_plugins.py +9 -1
  67. meerschaum/connectors/instance/_tokens.py +20 -3
  68. meerschaum/connectors/instance/_users.py +8 -1
  69. meerschaum/connectors/parse.py +1 -1
  70. meerschaum/connectors/sql/_create_engine.py +3 -0
  71. meerschaum/connectors/sql/_pipes.py +98 -79
  72. meerschaum/connectors/sql/_users.py +8 -1
  73. meerschaum/connectors/sql/tables/__init__.py +20 -3
  74. meerschaum/connectors/valkey/_ValkeyConnector.py +3 -3
  75. meerschaum/connectors/valkey/_pipes.py +7 -5
  76. meerschaum/core/Pipe/__init__.py +62 -72
  77. meerschaum/core/Pipe/_attributes.py +66 -90
  78. meerschaum/core/Pipe/_cache.py +555 -0
  79. meerschaum/core/Pipe/_clear.py +0 -11
  80. meerschaum/core/Pipe/_data.py +0 -50
  81. meerschaum/core/Pipe/_deduplicate.py +0 -13
  82. meerschaum/core/Pipe/_delete.py +12 -21
  83. meerschaum/core/Pipe/_drop.py +11 -23
  84. meerschaum/core/Pipe/_dtypes.py +1 -1
  85. meerschaum/core/Pipe/_index.py +8 -14
  86. meerschaum/core/Pipe/_sync.py +12 -18
  87. meerschaum/core/Plugin/_Plugin.py +7 -1
  88. meerschaum/core/Token/_Token.py +1 -1
  89. meerschaum/core/User/_User.py +1 -2
  90. meerschaum/jobs/_Executor.py +88 -4
  91. meerschaum/jobs/_Job.py +135 -35
  92. meerschaum/jobs/systemd.py +7 -2
  93. meerschaum/plugins/__init__.py +277 -81
  94. meerschaum/utils/_get_pipes.py +30 -4
  95. meerschaum/utils/daemon/Daemon.py +195 -41
  96. meerschaum/utils/daemon/FileDescriptorInterceptor.py +0 -1
  97. meerschaum/utils/daemon/RotatingFile.py +63 -36
  98. meerschaum/utils/daemon/StdinFile.py +53 -13
  99. meerschaum/utils/daemon/__init__.py +18 -5
  100. meerschaum/utils/daemon/_names.py +6 -3
  101. meerschaum/utils/debug.py +34 -4
  102. meerschaum/utils/dtypes/__init__.py +5 -1
  103. meerschaum/utils/formatting/__init__.py +4 -1
  104. meerschaum/utils/formatting/_jobs.py +1 -1
  105. meerschaum/utils/formatting/_pipes.py +47 -46
  106. meerschaum/utils/formatting/_pprint.py +1 -0
  107. meerschaum/utils/formatting/_shell.py +16 -6
  108. meerschaum/utils/misc.py +18 -38
  109. meerschaum/utils/packages/__init__.py +15 -13
  110. meerschaum/utils/packages/_packages.py +1 -0
  111. meerschaum/utils/pipes.py +39 -7
  112. meerschaum/utils/process.py +1 -1
  113. meerschaum/utils/prompt.py +171 -144
  114. meerschaum/utils/sql.py +12 -2
  115. meerschaum/utils/threading.py +42 -0
  116. meerschaum/utils/venv/__init__.py +2 -0
  117. meerschaum/utils/warnings.py +19 -13
  118. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/METADATA +3 -1
  119. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/RECORD +125 -119
  120. meerschaum/config/_environment.py +0 -145
  121. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/WHEEL +0 -0
  122. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/entry_points.txt +0 -0
  123. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/licenses/LICENSE +0 -0
  124. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/licenses/NOTICE +0 -0
  125. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/top_level.txt +0 -0
  126. {meerschaum-3.0.0rc3.dist-info → meerschaum-3.0.0rc7.dist-info}/zip-safe +0 -0
@@ -0,0 +1,555 @@
1
+ #! /usr/bin/env python3
2
+ # vim:fenc=utf-8
3
+
4
+ """
5
+ Define logic for caching pipes' attributes.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import os
11
+ import pickle
12
+ import json
13
+ import pathlib
14
+ from datetime import datetime, timedelta
15
+ from typing import Any, Union, List
16
+
17
+ import meerschaum as mrsm
18
+ from meerschaum.utils.warnings import warn, dprint
19
+
20
+
21
+ def _get_in_memory_key(cache_key: str) -> str:
22
+ """
23
+ Return the in-memory version of a cache key.
24
+ """
25
+ return (
26
+ ('_' + cache_key)
27
+ if not cache_key.startswith('_')
28
+ else cache_key
29
+ )
30
+
31
+
32
+ def _get_cache_conn_cache_key(pipe: mrsm.Pipe, cache_key: str) -> str:
33
+ """
34
+ Return the cache key to use in the cache connector.
35
+ """
36
+ return f'.cache:pipes:{pipe.connector_keys}:{pipe.metric_key}:{pipe.location_key}:{cache_key}'
37
+
38
+
39
+ def _get_cache_connector(self) -> 'Union[None, ValkeyConnector]':
40
+ """
41
+ Return the cache connector if required.
42
+ """
43
+ enable_valkey_cache = mrsm.get_config('system', 'experimental', 'valkey_session_cache')
44
+ if not enable_valkey_cache:
45
+ return None
46
+
47
+ if self.cache_connector_keys is None:
48
+ return None
49
+
50
+ if not self.cache_connector_keys.startswith('valkey:'):
51
+ warn(f"Invalid cache connector keys: '{self.cache_connector_keys}'")
52
+ return None
53
+
54
+ return mrsm.get_connector(self.cache_connector_keys)
55
+
56
+
57
+ def _cache_value(
58
+ self,
59
+ cache_key: str,
60
+ value: Any,
61
+ memory_only: bool = False,
62
+ debug: bool = False,
63
+ ) -> None:
64
+ """
65
+ Cache a value in-memory and (if `Pipe.cache` is `True`) on-disk or to the cache connector.
66
+ """
67
+ if value is None:
68
+ if debug:
69
+ dprint(f"Skip caching '{cache_key}': received value of `None`")
70
+ return
71
+
72
+ in_memory_key = _get_in_memory_key(cache_key)
73
+ self.__dict__[in_memory_key] = value
74
+ if memory_only:
75
+ return
76
+
77
+ write_success, write_msg = (
78
+ self._write_cache_key(cache_key, value)
79
+ if self.cache
80
+ else (True, "Success")
81
+ )
82
+ if not write_success and debug:
83
+ dprint(f"Failed to cache '{cache_key}':\n{write_msg}")
84
+
85
+
86
+ def _get_cached_value(
87
+ self,
88
+ cache_key: str,
89
+ debug: bool = False,
90
+ ) -> Any:
91
+ """
92
+ Attempt to retrieve a cached value from in-memory on on-disk.
93
+ """
94
+ in_memory_key = _get_in_memory_key(cache_key)
95
+ if in_memory_key in self.__dict__:
96
+ return self.__dict__[in_memory_key]
97
+
98
+ return self._read_cache_key(cache_key, debug=debug)
99
+
100
+
101
+ def _invalidate_cache(
102
+ self,
103
+ hard: bool = False,
104
+ debug: bool = False,
105
+ ) -> mrsm.SuccessTuple:
106
+ """
107
+ Invalidate temporary in-memory cache.
108
+ Note this does not affect in on-disk cache created when `cache=True`.
109
+
110
+ Parameters
111
+ ----------
112
+ hard: bool, default False
113
+ If `True`, clear all temporary cache.
114
+ Otherwise only clear soft cache.
115
+
116
+ Returns
117
+ -------
118
+ A `SuccessTuple` to indicate success.
119
+ """
120
+ if debug:
121
+ dprint(f"Invalidating {'some' if not hard else 'all'} cache for {self}.")
122
+
123
+ self._clear_cache_key('_exists', debug=debug)
124
+ self._clear_cache_key('sync_ts', debug=debug)
125
+
126
+ if not hard:
127
+ return True, "Success"
128
+
129
+ if self.__dict__.get('_static', None):
130
+ return True, "Success"
131
+
132
+ cache_keys = self._get_cache_keys(debug=debug)
133
+ for cache_key in cache_keys:
134
+ if cache_keys == 'attributes':
135
+ continue
136
+ self._clear_cache_key(cache_key, debug=debug)
137
+
138
+ return True, "Success"
139
+
140
+
141
+ def _get_cache_dir_path(self, create_if_not_exists: bool = False) -> pathlib.Path:
142
+ """
143
+ Return the path to the cache directory.
144
+ """
145
+ from meerschaum.config.paths import PIPES_CACHE_RESOURCES_PATH, ROOT_DIR_PATH
146
+ cache_dir_path = (
147
+ PIPES_CACHE_RESOURCES_PATH
148
+ / self.instance_keys
149
+ / self.connector_keys
150
+ / self.metric_key
151
+ / str(self.location_key)
152
+ )
153
+ if create_if_not_exists and not cache_dir_path.exists():
154
+ try:
155
+ cache_dir_path.mkdir(parents=True, exist_ok=True)
156
+ except Exception as e:
157
+ warn(f"Encountered an issue when creating local pipe metadata cache:\n{e}")
158
+
159
+ return cache_dir_path
160
+
161
+
162
+ def _write_cache_key(
163
+ self,
164
+ cache_key: str,
165
+ obj_to_write: Any,
166
+ debug: bool = False,
167
+ ) -> mrsm.SuccessTuple:
168
+ """
169
+ Pickle and write the object to cache.
170
+ """
171
+ cache_connector = self._get_cache_connector()
172
+ if cache_connector is None:
173
+ return self._write_cache_file(cache_key, obj_to_write, debug=debug)
174
+
175
+ return self._write_cache_conn_key(cache_key, obj_to_write, debug=debug)
176
+
177
+
178
+ def _write_cache_file(
179
+ self,
180
+ cache_key: str,
181
+ obj_to_write: Any,
182
+ debug: bool = False,
183
+ ) -> mrsm.SuccessTuple:
184
+ """
185
+ Write a pickle-able object to a cache file.
186
+ """
187
+ from meerschaum.utils.dtypes import get_current_timestamp, json_serialize_value
188
+ now = get_current_timestamp()
189
+ _checked_if_cache_dir_exists = self.__dict__.get('_checked_if_cache_dir_exists', None)
190
+ cache_dir_path = self._get_cache_dir_path(create_if_not_exists=(not _checked_if_cache_dir_exists))
191
+ if not _checked_if_cache_dir_exists:
192
+ self._checked_if_cache_dir_exists = True
193
+
194
+ file_path = cache_dir_path / (cache_key + '.pkl')
195
+ meta_file_path = cache_dir_path / (cache_key + '.meta.json')
196
+ metadata = {
197
+ 'created': now,
198
+ }
199
+
200
+ if debug:
201
+ dprint(f"Writing cache file '{file_path}'.")
202
+
203
+ try:
204
+ with open(file_path, 'wb+') as f:
205
+ pickle.dump(obj_to_write, f)
206
+ with open(meta_file_path, 'w+', encoding='utf-8') as f:
207
+ json.dump(metadata, f, default=json_serialize_value)
208
+ except Exception as e:
209
+ if debug:
210
+ dprint(f"Failed to write cache file:\n{e}")
211
+ return False, f"Failed to write cache file:\n{e}"
212
+
213
+ return True, "Success"
214
+
215
+
216
+ def _write_cache_conn_key(
217
+ self,
218
+ cache_key: str,
219
+ obj_to_write: Any,
220
+ debug: bool = False,
221
+ ) -> mrsm.SuccessTuple:
222
+ """
223
+ Write the object to the cache connector.
224
+ """
225
+ cache_connector = self._get_cache_connector()
226
+ if cache_connector is None:
227
+ return False, f"No cache connector is set for {self}."
228
+
229
+ cache_conn_cache_key = _get_cache_conn_cache_key(self, cache_key)
230
+ local_cache_timeout_seconds = int(mrsm.get_config(
231
+ 'pipes', 'attributes', 'local_cache_timeout_seconds'
232
+ ))
233
+ obj_bytes = pickle.dumps(obj_to_write)
234
+ if debug:
235
+ dprint(f"Setting '{cache_conn_cache_key}' on '{cache_connector}'.")
236
+
237
+ success = cache_connector.set(
238
+ cache_conn_cache_key,
239
+ obj_bytes,
240
+ ex=local_cache_timeout_seconds,
241
+ )
242
+ if not success:
243
+ return False, f"Failed to set '{cache_conn_cache_key}' on '{cache_connector}'."
244
+
245
+ return True, "Success"
246
+
247
+
248
+ def _read_cache_key(
249
+ self,
250
+ cache_key: str,
251
+ debug: bool = False,
252
+ ) -> Any:
253
+ """
254
+ Read the cache file if the cache connector is None, otherwise read from Valkey.
255
+ """
256
+ cache_connector = self._get_cache_connector()
257
+ if cache_connector is None:
258
+ return self._read_cache_file(cache_key, debug=debug)
259
+
260
+ return self._read_cache_conn_key(cache_key, debug=debug)
261
+
262
+
263
+ def _read_cache_file(
264
+ self,
265
+ cache_key: str,
266
+ debug: bool = False,
267
+ ) -> Any:
268
+ """
269
+ Read a cache file and return the pickled object.
270
+ Returns `None` if the cache file does not exist or is expired.
271
+ """
272
+ from meerschaum.utils.dtypes import get_current_timestamp
273
+ now = get_current_timestamp()
274
+ cache_dir_path = self._get_cache_dir_path()
275
+ file_path = cache_dir_path / (cache_key + '.pkl')
276
+ meta_file_path = cache_dir_path / (cache_key + '.meta.json')
277
+ local_cache_timeout_seconds = mrsm.get_config(
278
+ 'pipes', 'attributes', 'local_cache_timeout_seconds'
279
+ )
280
+
281
+ if not meta_file_path.exists() or not file_path.exists():
282
+ return None
283
+
284
+ try:
285
+ if debug:
286
+ dprint(f"Reading cache file '{file_path}'.")
287
+
288
+ with open(meta_file_path, 'r', encoding='utf-8') as f:
289
+ metadata = json.load(f)
290
+ except Exception as e:
291
+ if debug:
292
+ dprint(f"Failed to read cache metadata file '{meta_file_path}':\n{e}")
293
+ return None
294
+
295
+ created_str = metadata.get('created', None)
296
+ created = datetime.fromisoformat(created_str) if created_str else None
297
+ if not created:
298
+ if debug:
299
+ dprint(f"Could not read cache `created` timestamp for '{meta_file_path}'.")
300
+ return None
301
+
302
+ is_expired = (now - created) >= timedelta(seconds=local_cache_timeout_seconds)
303
+ if is_expired:
304
+ self._clear_cache_file(cache_key, debug=debug)
305
+ return None
306
+
307
+ try:
308
+ with open(file_path, 'rb') as f:
309
+ obj = pickle.load(f)
310
+ except Exception as e:
311
+ if debug:
312
+ dprint(f"Failed to read cache file:\n{e}")
313
+
314
+ return None
315
+
316
+ return obj
317
+
318
+
319
+ def _read_cache_conn_key(
320
+ self,
321
+ cache_key: str,
322
+ debug: bool = False,
323
+ ) -> Any:
324
+ """
325
+ Read a cache key from the cache connector.
326
+ """
327
+ cache_connector = self._get_cache_connector()
328
+ if cache_connector is None:
329
+ return None
330
+
331
+ cache_conn_cache_key = _get_cache_conn_cache_key(self, cache_key)
332
+ try:
333
+ obj_bytes = cache_connector.get(cache_conn_cache_key, decode=False)
334
+ if obj_bytes is None:
335
+ return None
336
+ obj = pickle.loads(obj_bytes)
337
+ except Exception as e:
338
+ warn(f"Failed to load '{cache_conn_cache_key}' from '{cache_connector}':\n{e}")
339
+ return None
340
+
341
+ return obj
342
+
343
+
344
+ def _load_cache_keys(self, debug: bool = False) -> mrsm.SuccessTuple:
345
+ """
346
+ Discover and load existing cache keys.
347
+ """
348
+ if not self.cache:
349
+ return True, f"Skip checking for cache for {self}."
350
+
351
+ cache_connector = self._get_cache_connector()
352
+ if cache_connector is None:
353
+ return self._load_cache_files(debug=debug)
354
+
355
+ return self._load_cache_conn_keys(debug=debug)
356
+
357
+
358
+ def _load_cache_files(self, debug: bool = False) -> mrsm.SuccessTuple:
359
+ """
360
+ Load all the existing pickle cache files.
361
+ """
362
+ if not self.cache:
363
+ return True, f"Skip checking for cache for {self}."
364
+
365
+ cache_dir_path = self._get_cache_dir_path(create_if_not_exists=True)
366
+ if not cache_dir_path.exists():
367
+ return True, f"No cache directory for {self}."
368
+
369
+ cache_keys = self._get_cache_file_keys(debug=debug)
370
+ if not cache_keys:
371
+ if debug:
372
+ dprint(f"No local cache found for {self}.")
373
+ return True, "No cache to load."
374
+
375
+ if debug:
376
+ dprint(
377
+ f"Will load {len(cache_keys)} cache file"
378
+ + ('s' if len(cache_keys) != 1 else '')
379
+ + f' into {self}.'
380
+ )
381
+
382
+ cache_objs = {
383
+ cache_key: self._read_cache_file(cache_key, debug=debug)
384
+ for cache_key in cache_keys
385
+ }
386
+ cache_patch = {
387
+ in_memory_key: obj
388
+ for cache_key, obj in cache_objs.items()
389
+ if (
390
+ obj is not None
391
+ and (in_memory_key := _get_in_memory_key(cache_key)) not in self.__dict__
392
+ )
393
+ }
394
+ if debug:
395
+ dprint(f"Loading cache keys into {self}:")
396
+ mrsm.pprint(cache_patch)
397
+
398
+ self.__dict__.update(cache_patch)
399
+ return True, "Success"
400
+
401
+
402
+ def _load_cache_conn_keys(self, debug: bool = False) -> mrsm.SuccessTuple:
403
+ """
404
+ Discover and load cache keys from the cache connector.
405
+ """
406
+ if not self.cache:
407
+ return True, f"Skip checking for cache for {self}."
408
+
409
+ cache_connector = self._get_cache_connector()
410
+ if cache_connector is None:
411
+ return False, f"No cache connector is set for {self}."
412
+
413
+ keys = self._get_cache_conn_keys(debug=debug)
414
+ try:
415
+ cache_keys_bytes = {
416
+ key.split(':')[-1]: cache_connector.get(key, decode=False)
417
+ for key in keys
418
+ }
419
+ except Exception as e:
420
+ return False, f"Failed to retrieve cache keys for {self} from '{cache_connector}':\n{e}"
421
+
422
+ try:
423
+ cache_keys_objs = {
424
+ cache_key: pickle.loads(obj_bytes)
425
+ for cache_key, obj_bytes in cache_keys_bytes.items()
426
+ }
427
+ except Exception as e:
428
+ return False, f"Failed to de-pickle cache bytes from '{self}':\n{e}"
429
+
430
+ cache_patch = {
431
+ in_memory_key: obj
432
+ for cache_key, obj in cache_keys_objs.items()
433
+ if (
434
+ obj is not None
435
+ and (in_memory_key := _get_in_memory_key(cache_key)) not in self.__dict__
436
+ )
437
+ }
438
+ if debug:
439
+ dprint("Loading cache keys into {self}:")
440
+ mrsm.pprint(cache_patch)
441
+
442
+ self.__dict__.update(cache_patch)
443
+ return True, "Success"
444
+
445
+
446
+ def _get_cache_keys(self, debug: bool = False) -> List[str]:
447
+ """
448
+ Return a list of existing cache keys.
449
+ """
450
+ cache_connector = self._get_cache_connector()
451
+ if cache_connector is None:
452
+ return self._get_cache_file_keys(debug=debug)
453
+
454
+ return self._get_cache_conn_keys(debug=debug)
455
+
456
+
457
+ def _get_cache_file_keys(self, debug: bool = False) -> List[str]:
458
+ """
459
+ Return the cache keys from disk.
460
+ """
461
+ cache_dir_path = self._get_cache_dir_path()
462
+ if not cache_dir_path.exists():
463
+ if debug:
464
+ dprint(f"Cache path '{cache_dir_path}' does not exist; no keys to return.")
465
+ return []
466
+
467
+ if debug:
468
+ dprint(f"Listing cache files from '{cache_dir_path}'.")
469
+
470
+ return [
471
+ filename[:(-1 * len('.pkl'))]
472
+ for filename in os.listdir(cache_dir_path)
473
+ if filename.endswith('.pkl')
474
+ ]
475
+
476
+
477
+ def _get_cache_conn_keys(self, debug: bool = False) -> List[str]:
478
+ """
479
+ Return the cache keys from the cache connector.
480
+ """
481
+ cache_connector = self._get_cache_connector()
482
+ if cache_connector is None:
483
+ return []
484
+
485
+ keys_prefix = _get_cache_conn_cache_key(self, '')
486
+
487
+ try:
488
+ return cache_connector.client.keys(keys_prefix + '*')
489
+ except Exception as e:
490
+ warn(f"Failed to get cache keys for {self} from '{cache_connector}':\n{e}")
491
+ return []
492
+
493
+
494
+ def _clear_cache_key(
495
+ self,
496
+ cache_key: str,
497
+ debug: bool = False,
498
+ ) -> None:
499
+ """
500
+ Clear a cached value from in-memory and on-disk / from Valkey.
501
+ """
502
+ in_memory_key = _get_in_memory_key(cache_key)
503
+ _ = self.__dict__.pop(in_memory_key, None)
504
+
505
+ cache_connector = self._get_cache_connector()
506
+ if cache_connector is None:
507
+ self._clear_cache_file(cache_key, debug=debug)
508
+ else:
509
+ self._clear_cache_conn_key(cache_key, debug=debug)
510
+
511
+
512
+ def _clear_cache_file(
513
+ self,
514
+ cache_key: str,
515
+ debug: bool = False,
516
+ ) -> None:
517
+ """
518
+ Clear a cached value from on-disk.
519
+ """
520
+ cache_dir_path = self._get_cache_dir_path()
521
+ file_path = cache_dir_path / (cache_key + '.pkl')
522
+ meta_file_path = cache_dir_path / (cache_key + '.meta.json')
523
+
524
+ try:
525
+ if file_path.exists():
526
+ file_path.unlink()
527
+ except Exception as e:
528
+ if debug:
529
+ dprint(f"Failed to delete cache file '{file_path}':\n{e}")
530
+
531
+ try:
532
+ if meta_file_path.exists():
533
+ meta_file_path.unlink()
534
+ except Exception as e:
535
+ if debug:
536
+ dprint(f"Failed to delete meta cache file '{meta_file_path}':{e}")
537
+
538
+
539
+ def _clear_cache_conn_key(
540
+ self,
541
+ cache_key: str,
542
+ debug: bool = False,
543
+ ) -> None:
544
+ """
545
+ Clear a cached value from Valkey.
546
+ """
547
+ cache_connector = self._get_cache_connector()
548
+ if cache_connector is None:
549
+ return
550
+
551
+ cache_conn_cache_key = _get_cache_conn_cache_key(self, cache_key)
552
+ try:
553
+ cache_connector.client.unlink(cache_conn_cache_key)
554
+ except Exception as e:
555
+ warn(f"Failed to clear cache key '{cache_key}' from '{cache_connector}':\n{e}")
@@ -60,17 +60,6 @@ def clear(
60
60
 
61
61
  begin, end = self.parse_date_bounds(begin, end)
62
62
 
63
- if self.cache_pipe is not None:
64
- success, msg = self.cache_pipe.clear(
65
- begin=begin,
66
- end=end,
67
- params=params,
68
- debug=debug,
69
- **kwargs
70
- )
71
- if not success:
72
- warn(msg)
73
-
74
63
  with Venv(get_connector_plugin(self.instance_connector)):
75
64
  return self.instance_connector.clear_pipe(
76
65
  self,
@@ -208,35 +208,6 @@ def get_data(
208
208
  if not self.exists(debug=debug):
209
209
  return None
210
210
 
211
- if self.cache_pipe is not None:
212
- if not fresh:
213
- _sync_cache_tuple = self.cache_pipe.sync(
214
- begin=begin,
215
- end=end,
216
- params=params,
217
- debug=debug,
218
- **kw
219
- )
220
- if not _sync_cache_tuple[0]:
221
- warn(f"Failed to sync cache for {self}:\n" + _sync_cache_tuple[1])
222
- fresh = True
223
- else: ### Successfully synced cache.
224
- return self.enforce_dtypes(
225
- self.cache_pipe.get_data(
226
- select_columns=select_columns,
227
- omit_columns=omit_columns,
228
- begin=begin,
229
- end=end,
230
- params=params,
231
- order=order,
232
- limit=limit,
233
- debug=debug,
234
- fresh=True,
235
- **kw
236
- ),
237
- debug=debug,
238
- )
239
-
240
211
  with Venv(get_connector_plugin(self.instance_connector)):
241
212
  df = self.instance_connector.get_pipe_data(
242
213
  pipe=self,
@@ -474,27 +445,6 @@ def get_backtrack_data(
474
445
  else backtrack_interval
475
446
  )
476
447
 
477
- if self.cache_pipe is not None:
478
- if not fresh:
479
- _sync_cache_tuple = self.cache_pipe.sync(begin=begin, params=params, debug=debug, **kw)
480
- if not _sync_cache_tuple[0]:
481
- warn(f"Failed to sync cache for {self}:\n" + _sync_cache_tuple[1])
482
- fresh = True
483
- else: ### Successfully synced cache.
484
- return self.enforce_dtypes(
485
- self.cache_pipe.get_backtrack_data(
486
- fresh=True,
487
- begin=begin,
488
- backtrack_minutes=backtrack_minutes,
489
- params=params,
490
- limit=limit,
491
- order=kw.get('order', 'desc'),
492
- debug=debug,
493
- **kw
494
- ),
495
- debug=debug,
496
- )
497
-
498
448
  if hasattr(self.instance_connector, 'get_backtrack_data'):
499
449
  with Venv(get_connector_plugin(self.instance_connector)):
500
450
  return self.enforce_dtypes(
@@ -67,19 +67,6 @@ def deduplicate(
67
67
 
68
68
  begin, end = self.parse_date_bounds(begin, end)
69
69
 
70
- if self.cache_pipe is not None:
71
- success, msg = self.cache_pipe.deduplicate(
72
- begin=begin,
73
- end=end,
74
- params=params,
75
- bounded=bounded,
76
- debug=debug,
77
- _use_instance_method=_use_instance_method,
78
- **kwargs
79
- )
80
- if not success:
81
- warn(msg)
82
-
83
70
  workers = self.get_num_workers(workers=workers)
84
71
  pool = get_pool(workers=workers)
85
72