meerschaum 3.0.0rc4__py3-none-any.whl → 3.0.0rc7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- meerschaum/_internal/arguments/_parser.py +14 -2
- meerschaum/_internal/cli/__init__.py +6 -0
- meerschaum/_internal/cli/daemons.py +103 -0
- meerschaum/_internal/cli/entry.py +220 -0
- meerschaum/_internal/cli/workers.py +434 -0
- meerschaum/_internal/docs/index.py +1 -2
- meerschaum/_internal/entry.py +44 -8
- meerschaum/_internal/shell/Shell.py +113 -19
- meerschaum/_internal/shell/__init__.py +4 -1
- meerschaum/_internal/static.py +3 -1
- meerschaum/_internal/term/TermPageHandler.py +1 -2
- meerschaum/_internal/term/__init__.py +40 -6
- meerschaum/_internal/term/tools.py +33 -8
- meerschaum/actions/__init__.py +6 -4
- meerschaum/actions/api.py +39 -11
- meerschaum/actions/attach.py +1 -0
- meerschaum/actions/delete.py +4 -2
- meerschaum/actions/edit.py +27 -8
- meerschaum/actions/login.py +8 -8
- meerschaum/actions/register.py +13 -7
- meerschaum/actions/reload.py +22 -5
- meerschaum/actions/restart.py +14 -0
- meerschaum/actions/show.py +69 -4
- meerschaum/actions/start.py +135 -14
- meerschaum/actions/stop.py +36 -3
- meerschaum/actions/sync.py +6 -1
- meerschaum/api/__init__.py +35 -13
- meerschaum/api/_events.py +2 -2
- meerschaum/api/_oauth2.py +47 -4
- meerschaum/api/dash/callbacks/dashboard.py +29 -0
- meerschaum/api/dash/callbacks/jobs.py +3 -2
- meerschaum/api/dash/callbacks/login.py +10 -1
- meerschaum/api/dash/callbacks/register.py +9 -2
- meerschaum/api/dash/pages/login.py +2 -2
- meerschaum/api/dash/pipes.py +72 -36
- meerschaum/api/dash/webterm.py +14 -6
- meerschaum/api/models/_pipes.py +7 -1
- meerschaum/api/resources/static/js/terminado.js +3 -0
- meerschaum/api/resources/static/js/xterm-addon-unicode11.js +2 -0
- meerschaum/api/resources/templates/termpage.html +1 -0
- meerschaum/api/routes/_jobs.py +23 -11
- meerschaum/api/routes/_login.py +73 -5
- meerschaum/api/routes/_pipes.py +6 -4
- meerschaum/api/routes/_webterm.py +3 -3
- meerschaum/config/__init__.py +60 -13
- meerschaum/config/_default.py +89 -61
- meerschaum/config/_edit.py +10 -8
- meerschaum/config/_formatting.py +2 -0
- meerschaum/config/_patch.py +4 -2
- meerschaum/config/_paths.py +127 -12
- meerschaum/config/_read_config.py +20 -10
- meerschaum/config/_version.py +1 -1
- meerschaum/config/environment.py +262 -0
- meerschaum/config/stack/__init__.py +7 -5
- meerschaum/connectors/_Connector.py +1 -2
- meerschaum/connectors/__init__.py +37 -2
- meerschaum/connectors/api/_APIConnector.py +1 -1
- meerschaum/connectors/api/_jobs.py +11 -0
- meerschaum/connectors/api/_pipes.py +7 -1
- meerschaum/connectors/instance/_plugins.py +9 -1
- meerschaum/connectors/instance/_tokens.py +20 -3
- meerschaum/connectors/instance/_users.py +8 -1
- meerschaum/connectors/parse.py +1 -1
- meerschaum/connectors/sql/_create_engine.py +3 -0
- meerschaum/connectors/sql/_pipes.py +93 -79
- meerschaum/connectors/sql/_users.py +8 -1
- meerschaum/connectors/valkey/_ValkeyConnector.py +3 -3
- meerschaum/connectors/valkey/_pipes.py +7 -5
- meerschaum/core/Pipe/__init__.py +45 -71
- meerschaum/core/Pipe/_attributes.py +66 -90
- meerschaum/core/Pipe/_cache.py +555 -0
- meerschaum/core/Pipe/_clear.py +0 -11
- meerschaum/core/Pipe/_data.py +0 -50
- meerschaum/core/Pipe/_deduplicate.py +0 -13
- meerschaum/core/Pipe/_delete.py +12 -21
- meerschaum/core/Pipe/_drop.py +11 -23
- meerschaum/core/Pipe/_dtypes.py +1 -1
- meerschaum/core/Pipe/_index.py +8 -14
- meerschaum/core/Pipe/_sync.py +12 -18
- meerschaum/core/Plugin/_Plugin.py +7 -1
- meerschaum/core/Token/_Token.py +1 -1
- meerschaum/core/User/_User.py +1 -2
- meerschaum/jobs/_Executor.py +88 -4
- meerschaum/jobs/_Job.py +135 -35
- meerschaum/jobs/systemd.py +7 -2
- meerschaum/plugins/__init__.py +277 -81
- meerschaum/utils/daemon/Daemon.py +195 -41
- meerschaum/utils/daemon/FileDescriptorInterceptor.py +0 -1
- meerschaum/utils/daemon/RotatingFile.py +63 -36
- meerschaum/utils/daemon/StdinFile.py +53 -13
- meerschaum/utils/daemon/__init__.py +18 -5
- meerschaum/utils/daemon/_names.py +6 -3
- meerschaum/utils/debug.py +34 -4
- meerschaum/utils/dtypes/__init__.py +5 -1
- meerschaum/utils/formatting/__init__.py +4 -1
- meerschaum/utils/formatting/_jobs.py +1 -1
- meerschaum/utils/formatting/_pipes.py +47 -46
- meerschaum/utils/formatting/_shell.py +16 -6
- meerschaum/utils/misc.py +18 -38
- meerschaum/utils/packages/__init__.py +15 -13
- meerschaum/utils/packages/_packages.py +1 -0
- meerschaum/utils/pipes.py +33 -5
- meerschaum/utils/process.py +1 -1
- meerschaum/utils/prompt.py +171 -144
- meerschaum/utils/sql.py +12 -2
- meerschaum/utils/threading.py +42 -0
- meerschaum/utils/venv/__init__.py +2 -0
- meerschaum/utils/warnings.py +19 -13
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/METADATA +3 -1
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/RECORD +116 -110
- meerschaum/config/_environment.py +0 -145
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/WHEEL +0 -0
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/entry_points.txt +0 -0
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/licenses/LICENSE +0 -0
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/licenses/NOTICE +0 -0
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/top_level.txt +0 -0
- {meerschaum-3.0.0rc4.dist-info → meerschaum-3.0.0rc7.dist-info}/zip-safe +0 -0
@@ -25,9 +25,6 @@ from meerschaum.utils.typing import (
|
|
25
25
|
)
|
26
26
|
from meerschaum.config import get_config
|
27
27
|
from meerschaum._internal.static import STATIC_CONFIG
|
28
|
-
from meerschaum.config._paths import (
|
29
|
-
DAEMON_RESOURCES_PATH, LOGS_RESOURCES_PATH, DAEMON_ERROR_LOG_PATH,
|
30
|
-
)
|
31
28
|
from meerschaum.config._patch import apply_patch_to_config
|
32
29
|
from meerschaum.utils.warnings import warn, error
|
33
30
|
from meerschaum.utils.packages import attempt_import
|
@@ -144,6 +141,7 @@ class Daemon:
|
|
144
141
|
daemon_id: Optional[str] = None,
|
145
142
|
label: Optional[str] = None,
|
146
143
|
properties: Optional[Dict[str, Any]] = None,
|
144
|
+
pickle: bool = True,
|
147
145
|
):
|
148
146
|
"""
|
149
147
|
Parameters
|
@@ -211,6 +209,8 @@ class Daemon:
|
|
211
209
|
error("Cannot create a Daemon without a target.")
|
212
210
|
self.target = target
|
213
211
|
|
212
|
+
self.pickle = pickle
|
213
|
+
|
214
214
|
### NOTE: We have to check self.__dict__ in case we un-pickling.
|
215
215
|
if '_target_args' not in self.__dict__:
|
216
216
|
self._target_args = target_args
|
@@ -224,10 +224,17 @@ class Daemon:
|
|
224
224
|
else str(self.target)
|
225
225
|
)
|
226
226
|
self.label = label
|
227
|
+
elif label is not None:
|
228
|
+
self.label = label
|
229
|
+
|
227
230
|
if 'daemon_id' not in self.__dict__:
|
228
231
|
self.daemon_id = get_new_daemon_name()
|
229
232
|
if '_properties' not in self.__dict__:
|
230
233
|
self._properties = properties
|
234
|
+
elif properties:
|
235
|
+
if self._properties is None:
|
236
|
+
self._properties = {}
|
237
|
+
self._properties.update(properties)
|
231
238
|
if self._properties is None:
|
232
239
|
self._properties = {}
|
233
240
|
|
@@ -276,6 +283,23 @@ class Daemon:
|
|
276
283
|
|
277
284
|
self._setup(allow_dirty_run)
|
278
285
|
|
286
|
+
_daemons.append(self)
|
287
|
+
|
288
|
+
logs_cf = self.properties.get('logs', {})
|
289
|
+
log_refresh_seconds = logs_cf.get('refresh_files_seconds', None)
|
290
|
+
if log_refresh_seconds is None:
|
291
|
+
log_refresh_seconds = get_config('jobs', 'logs', 'refresh_files_seconds')
|
292
|
+
write_timestamps = logs_cf.get('write_timestamps', None)
|
293
|
+
if write_timestamps is None:
|
294
|
+
write_timestamps = get_config('jobs', 'logs', 'timestamps', 'enabled')
|
295
|
+
|
296
|
+
self._log_refresh_timer = RepeatTimer(
|
297
|
+
log_refresh_seconds,
|
298
|
+
partial(self.rotating_log.refresh_files, start_interception=write_timestamps),
|
299
|
+
)
|
300
|
+
|
301
|
+
capture_stdin = logs_cf.get('stdin', True)
|
302
|
+
|
279
303
|
### NOTE: The SIGINT handler has been removed so that child processes may handle
|
280
304
|
### KeyboardInterrupts themselves.
|
281
305
|
### The previous aggressive approach was redundant because of the SIGTERM handler.
|
@@ -283,6 +307,7 @@ class Daemon:
|
|
283
307
|
pidfile=self.pid_lock,
|
284
308
|
stdout=self.rotating_log,
|
285
309
|
stderr=self.rotating_log,
|
310
|
+
stdin=(self.stdin_file if capture_stdin else None),
|
286
311
|
working_directory=os.getcwd(),
|
287
312
|
detach_process=True,
|
288
313
|
files_preserve=list(self.rotating_log.subfile_objects.values()),
|
@@ -291,18 +316,14 @@ class Daemon:
|
|
291
316
|
},
|
292
317
|
)
|
293
318
|
|
294
|
-
|
295
|
-
|
296
|
-
log_refresh_seconds = get_config('jobs', 'logs', 'refresh_files_seconds')
|
297
|
-
self._log_refresh_timer = RepeatTimer(
|
298
|
-
log_refresh_seconds,
|
299
|
-
partial(self.rotating_log.refresh_files, start_interception=True),
|
300
|
-
)
|
319
|
+
if capture_stdin and sys.stdin is None:
|
320
|
+
raise OSError("Cannot daemonize without stdin.")
|
301
321
|
|
302
322
|
try:
|
303
323
|
os.environ['LINES'], os.environ['COLUMNS'] = str(int(lines)), str(int(columns))
|
304
324
|
with self._daemon_context:
|
305
|
-
|
325
|
+
if capture_stdin:
|
326
|
+
sys.stdin = self.stdin_file
|
306
327
|
_ = os.environ.pop(STATIC_CONFIG['environment']['systemd_stdin_path'], None)
|
307
328
|
os.environ[STATIC_CONFIG['environment']['daemon_id']] = self.daemon_id
|
308
329
|
os.environ['PYTHONUNBUFFERED'] = '1'
|
@@ -360,8 +381,15 @@ class Daemon:
|
|
360
381
|
|
361
382
|
except Exception:
|
362
383
|
daemon_error = traceback.format_exc()
|
384
|
+
from meerschaum.config.paths import DAEMON_ERROR_LOG_PATH
|
363
385
|
with open(DAEMON_ERROR_LOG_PATH, 'a+', encoding='utf-8') as f:
|
364
|
-
f.write(
|
386
|
+
f.write(
|
387
|
+
f"Error in Daemon '{self}':\n\n"
|
388
|
+
f"{sys.stdin=}\n"
|
389
|
+
f"{self.stdin_file_path=}\n"
|
390
|
+
f"{self.stdin_file_path.exists()=}\n\n"
|
391
|
+
f"{daemon_error}\n\n"
|
392
|
+
)
|
365
393
|
warn(f"Encountered an error while running the daemon '{self}':\n{daemon_error}")
|
366
394
|
|
367
395
|
def _capture_process_timestamp(
|
@@ -396,6 +424,8 @@ class Daemon:
|
|
396
424
|
self,
|
397
425
|
keep_daemon_output: bool = True,
|
398
426
|
allow_dirty_run: bool = False,
|
427
|
+
wait: bool = False,
|
428
|
+
timeout: Union[int, float] = 4,
|
399
429
|
debug: bool = False,
|
400
430
|
) -> SuccessTuple:
|
401
431
|
"""Run the daemon as a child process and continue executing the parent.
|
@@ -410,6 +440,12 @@ class Daemon:
|
|
410
440
|
This option is dangerous because if the same `daemon_id` runs concurrently,
|
411
441
|
the last to finish will overwrite the output of the first.
|
412
442
|
|
443
|
+
wait: bool, default True
|
444
|
+
If `True`, block until `Daemon.status` is running (or the timeout expires).
|
445
|
+
|
446
|
+
timeout: Union[int, float], default 4
|
447
|
+
If `wait` is `True`, block for up to `timeout` seconds before returning a failure.
|
448
|
+
|
413
449
|
Returns
|
414
450
|
-------
|
415
451
|
A SuccessTuple indicating success.
|
@@ -433,20 +469,44 @@ class Daemon:
|
|
433
469
|
return _write_pickle_success_tuple
|
434
470
|
|
435
471
|
_launch_daemon_code = (
|
436
|
-
"from meerschaum.utils.daemon import Daemon; "
|
437
|
-
|
438
|
-
|
439
|
-
|
472
|
+
"from meerschaum.utils.daemon import Daemon, _daemons; "
|
473
|
+
f"daemon = Daemon(daemon_id='{self.daemon_id}'); "
|
474
|
+
f"_daemons['{self.daemon_id}'] = daemon; "
|
475
|
+
f"daemon._run_exit(keep_daemon_output={keep_daemon_output}, "
|
476
|
+
"allow_dirty_run=True)"
|
440
477
|
)
|
441
478
|
env = dict(os.environ)
|
442
|
-
env[STATIC_CONFIG['environment']['noninteractive']] = 'true'
|
443
479
|
_launch_success_bool = venv_exec(_launch_daemon_code, debug=debug, venv=None, env=env)
|
444
480
|
msg = (
|
445
481
|
"Success"
|
446
482
|
if _launch_success_bool
|
447
483
|
else f"Failed to start daemon '{self.daemon_id}'."
|
448
484
|
)
|
449
|
-
|
485
|
+
if not wait or not _launch_success_bool:
|
486
|
+
return _launch_success_bool, msg
|
487
|
+
|
488
|
+
timeout = self.get_timeout_seconds(timeout)
|
489
|
+
check_timeout_interval = self.get_check_timeout_interval_seconds()
|
490
|
+
|
491
|
+
if not timeout:
|
492
|
+
success = self.status == 'running'
|
493
|
+
msg = "Success" if success else f"Failed to run daemon '{self.daemon_id}'."
|
494
|
+
if success:
|
495
|
+
self._capture_process_timestamp('began')
|
496
|
+
return success, msg
|
497
|
+
|
498
|
+
begin = time.perf_counter()
|
499
|
+
while (time.perf_counter() - begin) < timeout:
|
500
|
+
if self.status == 'running':
|
501
|
+
self._capture_process_timestamp('began')
|
502
|
+
return True, "Success"
|
503
|
+
time.sleep(check_timeout_interval)
|
504
|
+
|
505
|
+
return False, (
|
506
|
+
f"Failed to start daemon '{self.daemon_id}' within {timeout} second"
|
507
|
+
+ ('s' if timeout != 1 else '') + '.'
|
508
|
+
)
|
509
|
+
|
450
510
|
|
451
511
|
def kill(self, timeout: Union[int, float, None] = 8) -> SuccessTuple:
|
452
512
|
"""
|
@@ -466,10 +526,14 @@ class Daemon:
|
|
466
526
|
success, msg = self._send_signal(signal.SIGTERM, timeout=timeout)
|
467
527
|
if success:
|
468
528
|
self._write_stop_file('kill')
|
529
|
+
self.stdin_file.close()
|
530
|
+
self._remove_blocking_stdin_file()
|
469
531
|
return success, msg
|
470
532
|
|
471
533
|
if self.status == 'stopped':
|
472
534
|
self._write_stop_file('kill')
|
535
|
+
self.stdin_file.close()
|
536
|
+
self._remove_blocking_stdin_file()
|
473
537
|
return True, "Process has already stopped."
|
474
538
|
|
475
539
|
psutil = attempt_import('psutil')
|
@@ -494,6 +558,8 @@ class Daemon:
|
|
494
558
|
pass
|
495
559
|
|
496
560
|
self._write_stop_file('kill')
|
561
|
+
self.stdin_file.close()
|
562
|
+
self._remove_blocking_stdin_file()
|
497
563
|
return True, "Success"
|
498
564
|
|
499
565
|
def quit(self, timeout: Union[int, float, None] = None) -> SuccessTuple:
|
@@ -504,6 +570,8 @@ class Daemon:
|
|
504
570
|
signal_success, signal_msg = self._send_signal(signal.SIGINT, timeout=timeout)
|
505
571
|
if signal_success:
|
506
572
|
self._write_stop_file('quit')
|
573
|
+
self.stdin_file.close()
|
574
|
+
self._remove_blocking_stdin_file()
|
507
575
|
return signal_success, signal_msg
|
508
576
|
|
509
577
|
def pause(
|
@@ -526,6 +594,8 @@ class Daemon:
|
|
526
594
|
-------
|
527
595
|
A `SuccessTuple` indicating whether the `Daemon` process was successfully suspended.
|
528
596
|
"""
|
597
|
+
self._remove_blocking_stdin_file()
|
598
|
+
|
529
599
|
if self.process is None:
|
530
600
|
return False, f"Daemon '{self.daemon_id}' is not running and cannot be paused."
|
531
601
|
|
@@ -533,6 +603,8 @@ class Daemon:
|
|
533
603
|
return True, f"Daemon '{self.daemon_id}' is already paused."
|
534
604
|
|
535
605
|
self._write_stop_file('pause')
|
606
|
+
self.stdin_file.close()
|
607
|
+
self._remove_blocking_stdin_file()
|
536
608
|
try:
|
537
609
|
self.process.suspend()
|
538
610
|
except Exception as e:
|
@@ -598,6 +670,9 @@ class Daemon:
|
|
598
670
|
|
599
671
|
self._remove_stop_file()
|
600
672
|
try:
|
673
|
+
if self.process is None:
|
674
|
+
return False, f"Cannot resume daemon '{self.daemon_id}'."
|
675
|
+
|
601
676
|
self.process.resume()
|
602
677
|
except Exception as e:
|
603
678
|
return False, f"Failed to resume daemon '{self.daemon_id}':\n{e}"
|
@@ -671,6 +746,18 @@ class Daemon:
|
|
671
746
|
except Exception:
|
672
747
|
return {}
|
673
748
|
|
749
|
+
def _remove_blocking_stdin_file(self) -> mrsm.SuccessTuple:
|
750
|
+
"""
|
751
|
+
Remove the blocking STDIN file if it exists.
|
752
|
+
"""
|
753
|
+
try:
|
754
|
+
if self.blocking_stdin_file_path.exists():
|
755
|
+
self.blocking_stdin_file_path.unlink()
|
756
|
+
except Exception as e:
|
757
|
+
return False, str(e)
|
758
|
+
|
759
|
+
return True, "Success"
|
760
|
+
|
674
761
|
def _handle_sigterm(self, signal_number: int, stack_frame: 'frame') -> None:
|
675
762
|
"""
|
676
763
|
Handle `SIGTERM` within the `Daemon` context.
|
@@ -799,7 +886,7 @@ class Daemon:
|
|
799
886
|
if self.process is None:
|
800
887
|
return 'stopped'
|
801
888
|
|
802
|
-
psutil = attempt_import('psutil')
|
889
|
+
psutil = attempt_import('psutil', lazy=False)
|
803
890
|
try:
|
804
891
|
if self.process.status() == 'stopped':
|
805
892
|
return 'paused'
|
@@ -820,6 +907,7 @@ class Daemon:
|
|
820
907
|
"""
|
821
908
|
Return a Daemon's path from its `daemon_id`.
|
822
909
|
"""
|
910
|
+
from meerschaum.config.paths import DAEMON_RESOURCES_PATH
|
823
911
|
return DAEMON_RESOURCES_PATH / daemon_id
|
824
912
|
|
825
913
|
@property
|
@@ -855,7 +943,12 @@ class Daemon:
|
|
855
943
|
"""
|
856
944
|
Return the log path.
|
857
945
|
"""
|
858
|
-
|
946
|
+
logs_cf = self.properties.get('logs', None) or {}
|
947
|
+
if 'path' not in logs_cf:
|
948
|
+
from meerschaum.config.paths import LOGS_RESOURCES_PATH
|
949
|
+
return LOGS_RESOURCES_PATH / (self.daemon_id + '.log')
|
950
|
+
|
951
|
+
return pathlib.Path(logs_cf['path'])
|
859
952
|
|
860
953
|
@property
|
861
954
|
def stdin_file_path(self) -> pathlib.Path:
|
@@ -874,13 +967,33 @@ class Daemon:
|
|
874
967
|
|
875
968
|
return self.path / 'input.stdin.block'
|
876
969
|
|
970
|
+
@property
|
971
|
+
def prompt_kwargs_file_path(self) -> pathlib.Path:
|
972
|
+
"""
|
973
|
+
Return the file path to the kwargs for the invoking `prompt()`.
|
974
|
+
"""
|
975
|
+
return self.path / 'prompt_kwargs.json'
|
976
|
+
|
877
977
|
@property
|
878
978
|
def log_offset_path(self) -> pathlib.Path:
|
879
979
|
"""
|
880
980
|
Return the log offset file path.
|
881
981
|
"""
|
982
|
+
from meerschaum.config.paths import LOGS_RESOURCES_PATH
|
882
983
|
return LOGS_RESOURCES_PATH / ('.' + self.daemon_id + '.log.offset')
|
883
984
|
|
985
|
+
@property
|
986
|
+
def log_offset_lock(self) -> 'fasteners.InterProcessLock':
|
987
|
+
"""
|
988
|
+
Return the process lock context manager.
|
989
|
+
"""
|
990
|
+
if '_log_offset_lock' in self.__dict__:
|
991
|
+
return self._log_offset_lock
|
992
|
+
|
993
|
+
fasteners = attempt_import('fasteners')
|
994
|
+
self._log_offset_lock = fasteners.InterProcessLock(self.log_offset_path)
|
995
|
+
return self._log_offset_lock
|
996
|
+
|
884
997
|
@property
|
885
998
|
def rotating_log(self) -> RotatingFile:
|
886
999
|
"""
|
@@ -889,17 +1002,32 @@ class Daemon:
|
|
889
1002
|
if '_rotating_log' in self.__dict__:
|
890
1003
|
return self._rotating_log
|
891
1004
|
|
892
|
-
|
893
|
-
|
894
|
-
)
|
1005
|
+
logs_cf = self.properties.get('logs', None) or {}
|
1006
|
+
write_timestamps = logs_cf.get('write_timestamps', None)
|
895
1007
|
if write_timestamps is None:
|
896
1008
|
write_timestamps = get_config('jobs', 'logs', 'timestamps', 'enabled')
|
897
1009
|
|
1010
|
+
timestamp_format = logs_cf.get('timestamp_format', None)
|
1011
|
+
if timestamp_format is None:
|
1012
|
+
timestamp_format = get_config('jobs', 'logs', 'timestamps', 'format')
|
1013
|
+
|
1014
|
+
num_files_to_keep = logs_cf.get('num_files_to_keep', None)
|
1015
|
+
if num_files_to_keep is None:
|
1016
|
+
num_files_to_keep = get_config('jobs', 'logs', 'num_files_to_keep')
|
1017
|
+
|
1018
|
+
max_file_size = logs_cf.get('max_file_size', None)
|
1019
|
+
if max_file_size is None:
|
1020
|
+
max_file_size = get_config('jobs', 'logs', 'max_file_size')
|
1021
|
+
|
1022
|
+
redirect_streams = logs_cf.get('redirect_streams', True)
|
1023
|
+
|
898
1024
|
self._rotating_log = RotatingFile(
|
899
1025
|
self.log_path,
|
900
|
-
redirect_streams=
|
1026
|
+
redirect_streams=redirect_streams,
|
901
1027
|
write_timestamps=write_timestamps,
|
902
|
-
timestamp_format=
|
1028
|
+
timestamp_format=timestamp_format,
|
1029
|
+
num_files_to_keep=num_files_to_keep,
|
1030
|
+
max_file_size=max_file_size,
|
903
1031
|
)
|
904
1032
|
return self._rotating_log
|
905
1033
|
|
@@ -908,8 +1036,8 @@ class Daemon:
|
|
908
1036
|
"""
|
909
1037
|
Return the file handler for the stdin file.
|
910
1038
|
"""
|
911
|
-
if (
|
912
|
-
return
|
1039
|
+
if (_stdin_file := self.__dict__.get('_stdin_file', None)):
|
1040
|
+
return _stdin_file
|
913
1041
|
|
914
1042
|
self._stdin_file = StdinFile(
|
915
1043
|
self.stdin_file_path,
|
@@ -918,17 +1046,34 @@ class Daemon:
|
|
918
1046
|
return self._stdin_file
|
919
1047
|
|
920
1048
|
@property
|
921
|
-
def log_text(self) ->
|
1049
|
+
def log_text(self) -> Union[str, None]:
|
922
1050
|
"""
|
923
1051
|
Read the log files and return their contents.
|
924
1052
|
Returns `None` if the log file does not exist.
|
925
1053
|
"""
|
1054
|
+
logs_cf = self.properties.get('logs', None) or {}
|
1055
|
+
write_timestamps = logs_cf.get('write_timestamps', None)
|
1056
|
+
if write_timestamps is None:
|
1057
|
+
write_timestamps = get_config('jobs', 'logs', 'timestamps', 'enabled')
|
1058
|
+
|
1059
|
+
timestamp_format = logs_cf.get('timestamp_format', None)
|
1060
|
+
if timestamp_format is None:
|
1061
|
+
timestamp_format = get_config('jobs', 'logs', 'timestamps', 'format')
|
1062
|
+
|
1063
|
+
num_files_to_keep = logs_cf.get('num_files_to_keep', None)
|
1064
|
+
if num_files_to_keep is None:
|
1065
|
+
num_files_to_keep = get_config('jobs', 'logs', 'num_files_to_keep')
|
1066
|
+
|
1067
|
+
max_file_size = logs_cf.get('max_file_size', None)
|
1068
|
+
if max_file_size is None:
|
1069
|
+
max_file_size = get_config('jobs', 'logs', 'max_file_size')
|
1070
|
+
|
926
1071
|
new_rotating_log = RotatingFile(
|
927
1072
|
self.rotating_log.file_path,
|
928
|
-
num_files_to_keep
|
929
|
-
max_file_size
|
930
|
-
write_timestamps
|
931
|
-
timestamp_format
|
1073
|
+
num_files_to_keep=num_files_to_keep,
|
1074
|
+
max_file_size=max_file_size,
|
1075
|
+
write_timestamps=write_timestamps,
|
1076
|
+
timestamp_format=timestamp_format,
|
932
1077
|
)
|
933
1078
|
return new_rotating_log.read()
|
934
1079
|
|
@@ -953,20 +1098,25 @@ class Daemon:
|
|
953
1098
|
if not self.log_offset_path.exists():
|
954
1099
|
return 0, 0
|
955
1100
|
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
1101
|
+
try:
|
1102
|
+
with open(self.log_offset_path, 'r', encoding='utf-8') as f:
|
1103
|
+
cursor_text = f.read()
|
1104
|
+
cursor_parts = cursor_text.split(' ')
|
1105
|
+
subfile_index, subfile_position = int(cursor_parts[0]), int(cursor_parts[1])
|
1106
|
+
return subfile_index, subfile_position
|
1107
|
+
except Exception as e:
|
1108
|
+
warn(f"Failed to read cursor:\n{e}")
|
1109
|
+
return 0, 0
|
961
1110
|
|
962
1111
|
def _write_log_offset(self) -> None:
|
963
1112
|
"""
|
964
1113
|
Write the current log offset file.
|
965
1114
|
"""
|
966
|
-
with
|
967
|
-
|
968
|
-
|
969
|
-
|
1115
|
+
with self.log_offset_lock:
|
1116
|
+
with open(self.log_offset_path, 'w+', encoding='utf-8') as f:
|
1117
|
+
subfile_index = self.rotating_log._cursor[0]
|
1118
|
+
subfile_position = self.rotating_log._cursor[1]
|
1119
|
+
f.write(f"{subfile_index} {subfile_position}")
|
970
1120
|
|
971
1121
|
@property
|
972
1122
|
def pid(self) -> Union[int, None]:
|
@@ -1124,6 +1274,10 @@ class Daemon:
|
|
1124
1274
|
import pickle
|
1125
1275
|
import traceback
|
1126
1276
|
from meerschaum.utils.misc import generate_password
|
1277
|
+
|
1278
|
+
if not self.pickle:
|
1279
|
+
return True, "Success"
|
1280
|
+
|
1127
1281
|
backup_path = self.pickle_path.parent / (generate_password(7) + '.pkl')
|
1128
1282
|
try:
|
1129
1283
|
self.path.mkdir(parents=True, exist_ok=True)
|
@@ -14,12 +14,13 @@ import traceback
|
|
14
14
|
import sys
|
15
15
|
import atexit
|
16
16
|
from datetime import datetime, timezone
|
17
|
-
from typing import List, Optional, Tuple
|
17
|
+
from typing import List, Optional, Tuple, Callable
|
18
18
|
from meerschaum.config import get_config
|
19
19
|
from meerschaum.utils.warnings import warn
|
20
20
|
from meerschaum.utils.daemon.FileDescriptorInterceptor import FileDescriptorInterceptor
|
21
21
|
from meerschaum.utils.threading import Thread
|
22
22
|
import meerschaum as mrsm
|
23
|
+
import threading
|
23
24
|
daemon = mrsm.attempt_import('daemon')
|
24
25
|
|
25
26
|
class RotatingFile(io.IOBase):
|
@@ -38,6 +39,7 @@ class RotatingFile(io.IOBase):
|
|
38
39
|
redirect_streams: bool = False,
|
39
40
|
write_timestamps: bool = False,
|
40
41
|
timestamp_format: Optional[str] = None,
|
42
|
+
write_callback: Optional[Callable[[str], None]] = None,
|
41
43
|
):
|
42
44
|
"""
|
43
45
|
Create a file-like object which manages other files.
|
@@ -66,6 +68,9 @@ class RotatingFile(io.IOBase):
|
|
66
68
|
timestamp_format: str, default None
|
67
69
|
If `write_timestamps` is `True`, use this format for the timestamps.
|
68
70
|
Defaults to `'%Y-%m-%d %H:%M'`.
|
71
|
+
|
72
|
+
write_callback: Optional[Callable[[str], None]], default None
|
73
|
+
If provided, execute this callback with the data to be written.
|
69
74
|
"""
|
70
75
|
self.file_path = pathlib.Path(file_path)
|
71
76
|
if num_files_to_keep is None:
|
@@ -74,17 +79,18 @@ class RotatingFile(io.IOBase):
|
|
74
79
|
max_file_size = get_config('jobs', 'logs', 'max_file_size')
|
75
80
|
if timestamp_format is None:
|
76
81
|
timestamp_format = get_config('jobs', 'logs', 'timestamps', 'format')
|
77
|
-
if num_files_to_keep <
|
78
|
-
raise ValueError("At least
|
79
|
-
if max_file_size <
|
80
|
-
raise ValueError("Subfiles must contain at least
|
82
|
+
if num_files_to_keep < 1:
|
83
|
+
raise ValueError("At least 1 file must be kept.")
|
84
|
+
if max_file_size < 100:
|
85
|
+
raise ValueError("Subfiles must contain at least 100 bytes.")
|
81
86
|
|
82
87
|
self.num_files_to_keep = num_files_to_keep
|
83
88
|
self.max_file_size = max_file_size
|
84
89
|
self.redirect_streams = redirect_streams
|
85
90
|
self.write_timestamps = write_timestamps
|
86
91
|
self.timestamp_format = timestamp_format
|
87
|
-
self.
|
92
|
+
self.write_callback = write_callback
|
93
|
+
self.subfile_regex_pattern = re.compile(r'(.*)\.log(?:\.\d+)?')
|
88
94
|
|
89
95
|
### When subfiles are opened, map from their index to the file objects.
|
90
96
|
self.subfile_objects = {}
|
@@ -186,7 +192,7 @@ class RotatingFile(io.IOBase):
|
|
186
192
|
"""
|
187
193
|
try:
|
188
194
|
return int(subfile_name.replace(self.file_path.name + '.', ''))
|
189
|
-
except Exception
|
195
|
+
except Exception:
|
190
196
|
return -1
|
191
197
|
|
192
198
|
|
@@ -272,7 +278,7 @@ class RotatingFile(io.IOBase):
|
|
272
278
|
try:
|
273
279
|
daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
|
274
280
|
daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
|
275
|
-
except OSError
|
281
|
+
except OSError:
|
276
282
|
warn(
|
277
283
|
f"Encountered an issue when redirecting streams:\n{traceback.format_exc()}"
|
278
284
|
)
|
@@ -287,30 +293,36 @@ class RotatingFile(io.IOBase):
|
|
287
293
|
self.is_subfile_too_large(latest_subfile_index, potential_new_len)
|
288
294
|
)
|
289
295
|
if create_new_file:
|
290
|
-
|
291
|
-
new_subfile_index = old_subfile_index + 1
|
292
|
-
new_file_path = self.get_subfile_path_from_index(new_subfile_index)
|
293
|
-
self._previous_file_obj = self._current_file_obj
|
294
|
-
self._current_file_obj = open(new_file_path, 'a+', encoding='utf-8')
|
295
|
-
self.subfile_objects[new_subfile_index] = self._current_file_obj
|
296
|
-
self.flush()
|
297
|
-
|
298
|
-
if self._previous_file_obj is not None:
|
299
|
-
if self.redirect_streams:
|
300
|
-
self._redirected_subfile_objects[old_subfile_index] = self._previous_file_obj
|
301
|
-
daemon.daemon.redirect_stream(self._previous_file_obj, self._current_file_obj)
|
302
|
-
daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
|
303
|
-
daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
|
304
|
-
self.close(unused_only=True)
|
296
|
+
self.increment_subfiles()
|
305
297
|
|
306
|
-
|
307
|
-
if self._previous_file_obj is self._current_file_obj:
|
308
|
-
self._previous_file_obj = None
|
298
|
+
return self._current_file_obj
|
309
299
|
|
310
|
-
|
300
|
+
def increment_subfiles(self, increment_by: int = 1):
|
301
|
+
"""
|
302
|
+
Create a new subfile and switch the file pointer over.
|
303
|
+
"""
|
304
|
+
latest_subfile_index = self.get_latest_subfile_index()
|
305
|
+
old_subfile_index = latest_subfile_index
|
306
|
+
new_subfile_index = old_subfile_index + increment_by
|
307
|
+
new_file_path = self.get_subfile_path_from_index(new_subfile_index)
|
308
|
+
self._previous_file_obj = self._current_file_obj
|
309
|
+
self._current_file_obj = open(new_file_path, 'a+', encoding='utf-8')
|
310
|
+
self.subfile_objects[new_subfile_index] = self._current_file_obj
|
311
|
+
self.flush()
|
311
312
|
|
312
|
-
|
313
|
+
if self.redirect_streams:
|
314
|
+
if self._previous_file_obj is not None:
|
315
|
+
self._redirected_subfile_objects[old_subfile_index] = self._previous_file_obj
|
316
|
+
daemon.daemon.redirect_stream(self._previous_file_obj, self._current_file_obj)
|
317
|
+
daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
|
318
|
+
daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
|
319
|
+
self.close(unused_only=True)
|
320
|
+
|
321
|
+
### Sanity check in case writing somehow fails.
|
322
|
+
if self._previous_file_obj is self._current_file_obj:
|
323
|
+
self._previous_file_obj = None
|
313
324
|
|
325
|
+
self.delete(unused_only=True)
|
314
326
|
|
315
327
|
def close(self, unused_only: bool = False) -> None:
|
316
328
|
"""
|
@@ -330,7 +342,7 @@ class RotatingFile(io.IOBase):
|
|
330
342
|
try:
|
331
343
|
if not subfile_object.closed:
|
332
344
|
subfile_object.close()
|
333
|
-
except Exception
|
345
|
+
except Exception:
|
334
346
|
warn(f"Failed to close an open subfile:\n{traceback.format_exc()}")
|
335
347
|
|
336
348
|
_ = self.subfile_objects.pop(subfile_index, None)
|
@@ -359,6 +371,12 @@ class RotatingFile(io.IOBase):
|
|
359
371
|
As such, if data is larger than max_file_size, then the corresponding subfile
|
360
372
|
may exceed this limit.
|
361
373
|
"""
|
374
|
+
try:
|
375
|
+
if callable(self.write_callback):
|
376
|
+
self.write_callback(data)
|
377
|
+
except Exception:
|
378
|
+
warn(f"Failed to execute write callback:\n{traceback.format_exc()}")
|
379
|
+
|
362
380
|
try:
|
363
381
|
self.file_path.parent.mkdir(exist_ok=True, parents=True)
|
364
382
|
if isinstance(data, bytes):
|
@@ -379,7 +397,7 @@ class RotatingFile(io.IOBase):
|
|
379
397
|
except BrokenPipeError:
|
380
398
|
warn("BrokenPipeError encountered. The daemon may have been terminated.")
|
381
399
|
return
|
382
|
-
except Exception
|
400
|
+
except Exception:
|
383
401
|
warn(f"Failed to write to subfile:\n{traceback.format_exc()}")
|
384
402
|
self.flush()
|
385
403
|
self.delete(unused_only=True)
|
@@ -410,11 +428,10 @@ class RotatingFile(io.IOBase):
|
|
410
428
|
)
|
411
429
|
for subfile_path_to_delete in existing_subfile_paths[0:end_ix]:
|
412
430
|
subfile_index = self.get_index_from_subfile_name(subfile_path_to_delete.name)
|
413
|
-
subfile_object = self.subfile_objects.get(subfile_index, None)
|
414
431
|
|
415
432
|
try:
|
416
433
|
subfile_path_to_delete.unlink()
|
417
|
-
except Exception
|
434
|
+
except Exception:
|
418
435
|
warn(
|
419
436
|
f"Unable to delete subfile '{subfile_path_to_delete}':\n"
|
420
437
|
+ f"{traceback.format_exc()}"
|
@@ -586,20 +603,21 @@ class RotatingFile(io.IOBase):
|
|
586
603
|
if not subfile_object.closed:
|
587
604
|
try:
|
588
605
|
subfile_object.flush()
|
589
|
-
except Exception
|
606
|
+
except Exception:
|
590
607
|
warn(f"Failed to flush subfile {subfile_index}:\n{traceback.format_exc()}")
|
608
|
+
|
591
609
|
if self.redirect_streams:
|
592
610
|
try:
|
593
611
|
sys.stdout.flush()
|
594
612
|
except BrokenPipeError:
|
595
613
|
pass
|
596
|
-
except Exception
|
614
|
+
except Exception:
|
597
615
|
warn(f"Failed to flush STDOUT:\n{traceback.format_exc()}")
|
598
616
|
try:
|
599
617
|
sys.stderr.flush()
|
600
618
|
except BrokenPipeError:
|
601
619
|
pass
|
602
|
-
except Exception
|
620
|
+
except Exception:
|
603
621
|
warn(f"Failed to flush STDERR:\n{traceback.format_exc()}")
|
604
622
|
|
605
623
|
|
@@ -665,10 +683,19 @@ class RotatingFile(io.IOBase):
|
|
665
683
|
for thread in interceptor_threads[:end_ix]:
|
666
684
|
try:
|
667
685
|
thread.join()
|
668
|
-
except Exception
|
686
|
+
except Exception:
|
669
687
|
warn(f"Failed to join interceptor threads:\n{traceback.format_exc()}")
|
670
688
|
del interceptor_threads[:end_ix]
|
671
689
|
|
690
|
+
def touch(self):
|
691
|
+
"""
|
692
|
+
Touch the latest subfile.
|
693
|
+
"""
|
694
|
+
subfile_path = self.get_latest_subfile_path()
|
695
|
+
subfile_path.touch()
|
696
|
+
|
697
|
+
def isatty(self) -> bool:
|
698
|
+
return True
|
672
699
|
|
673
700
|
def __repr__(self) -> str:
|
674
701
|
"""
|