meerschaum 2.2.0rc1__py3-none-any.whl → 2.2.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. meerschaum/__main__.py +1 -1
  2. meerschaum/actions/show.py +68 -43
  3. meerschaum/api/dash/callbacks/dashboard.py +2 -7
  4. meerschaum/api/dash/pipes.py +33 -9
  5. meerschaum/api/dash/plugins.py +25 -9
  6. meerschaum/api/resources/templates/termpage.html +3 -0
  7. meerschaum/api/routes/_login.py +5 -4
  8. meerschaum/api/routes/_plugins.py +6 -3
  9. meerschaum/config/_dash.py +11 -0
  10. meerschaum/config/_default.py +3 -1
  11. meerschaum/config/_jobs.py +10 -4
  12. meerschaum/config/_paths.py +1 -0
  13. meerschaum/config/_sync.py +2 -3
  14. meerschaum/config/_version.py +1 -1
  15. meerschaum/config/stack/__init__.py +6 -6
  16. meerschaum/config/stack/grafana/__init__.py +1 -1
  17. meerschaum/config/static/__init__.py +3 -1
  18. meerschaum/connectors/sql/_plugins.py +0 -2
  19. meerschaum/core/User/_User.py +156 -16
  20. meerschaum/core/User/__init__.py +1 -1
  21. meerschaum/plugins/_Plugin.py +1 -1
  22. meerschaum/utils/daemon/Daemon.py +23 -12
  23. meerschaum/utils/daemon/FileDescriptorInterceptor.py +46 -4
  24. meerschaum/utils/daemon/RotatingFile.py +76 -57
  25. meerschaum/utils/daemon/__init__.py +1 -0
  26. meerschaum/utils/threading.py +1 -0
  27. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/METADATA +1 -1
  28. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/RECORD +34 -33
  29. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/LICENSE +0 -0
  30. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/NOTICE +0 -0
  31. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/WHEEL +0 -0
  32. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/entry_points.txt +0 -0
  33. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/top_level.txt +0 -0
  34. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc2.dist-info}/zip-safe +0 -0
@@ -7,22 +7,155 @@ User class definition
7
7
  """
8
8
 
9
9
  from __future__ import annotations
10
- from meerschaum.utils.typing import Optional, Dict, Any
11
-
12
- pwd_context = None
13
- def get_pwd_context():
14
- global pwd_context
15
- if pwd_context is None:
16
- from meerschaum.config.static import STATIC_CONFIG
17
- from meerschaum.utils.packages import attempt_import
18
- hash_config = STATIC_CONFIG['users']['password_hash']
19
- passlib_context = attempt_import('passlib.context')
20
- pwd_context = passlib_context.CryptContext(
21
- schemes = hash_config['schemes'],
22
- default = hash_config['default'],
23
- pbkdf2_sha256__default_rounds = hash_config['pbkdf2_sha256__default_rounds']
10
+ import os
11
+ import hashlib
12
+ import hmac
13
+ from binascii import b2a_base64, a2b_base64, Error as _BinAsciiError
14
+ from meerschaum.utils.typing import Optional, Dict, Any, Tuple
15
+ from meerschaum.config.static import STATIC_CONFIG
16
+ from meerschaum.utils.warnings import warn
17
+
18
+
19
+ __all__ = ('hash_password', 'verify_password', 'User')
20
+
21
+ def hash_password(
22
+ password: str,
23
+ salt: Optional[bytes] = None,
24
+ rounds: Optional[int] = None,
25
+ ) -> str:
26
+ """
27
+ Return an encoded hash string from the given password.
28
+
29
+ Parameters
30
+ ----------
31
+ password: str
32
+ The password to be hashed.
33
+
34
+ salt: Optional[str], default None
35
+ If provided, use these bytes for the salt in the hash.
36
+ Otherwise defaults to 16 random bytes.
37
+
38
+ rounds: Optional[int], default None
39
+ If provided, use this number of rounds to generate the hash.
40
+ Defaults to 3,000,000.
41
+
42
+ Returns
43
+ -------
44
+ An encoded hash string to be stored in a database.
45
+ See the `passlib` documentation on the string format:
46
+ https://passlib.readthedocs.io/en/stable/lib/passlib.hash.pbkdf2_digest.html#format-algorithm
47
+ """
48
+ hash_config = STATIC_CONFIG['users']['password_hash']
49
+ if password is None:
50
+ password = ''
51
+ if salt is None:
52
+ salt = os.urandom(hash_config['salt_bytes'])
53
+ if rounds is None:
54
+ rounds = hash_config['pbkdf2_sha256__default_rounds']
55
+
56
+ pw_hash = hashlib.pbkdf2_hmac(
57
+ hash_config['algorithm_name'],
58
+ password.encode('utf-8'),
59
+ salt,
60
+ rounds,
61
+ )
62
+ return (
63
+ f"$pbkdf2-{hash_config['algorithm_name']}"
64
+ + f"${hash_config['pbkdf2_sha256__default_rounds']}"
65
+ + '$' + ab64_encode(salt).decode('utf-8')
66
+ + '$' + ab64_encode(pw_hash).decode('utf-8')
67
+ )
68
+
69
+
70
+ def verify_password(
71
+ password: str,
72
+ password_hash: str,
73
+ ) -> bool:
74
+ """
75
+ Return `True` if the password matches the provided hash.
76
+
77
+ Parameters
78
+ ----------
79
+ password: str
80
+ The password to be checked.
81
+
82
+ password_hash: str
83
+ The encoded hash string as generated from `hash_password()`.
84
+
85
+ Returns
86
+ -------
87
+ A `bool` indicating whether `password` matches `password_hash`.
88
+ """
89
+ hash_config = STATIC_CONFIG['users']['password_hash']
90
+ try:
91
+ digest, rounds_str, encoded_salt, encoded_checksum = password_hash.split('$')[1:]
92
+ algorithm_name = digest.split('-')[-1]
93
+ salt = ab64_decode(encoded_salt)
94
+ checksum = ab64_decode(encoded_checksum)
95
+ rounds = int(rounds_str)
96
+ except Exception as e:
97
+ warn(f"Failed to extract context from password hash '{password_hash}'. Is it corrupted?")
98
+ return False
99
+
100
+ return hmac.compare_digest(
101
+ checksum,
102
+ hashlib.pbkdf2_hmac(
103
+ algorithm_name,
104
+ password.encode('utf-8'),
105
+ salt,
106
+ rounds,
24
107
  )
25
- return pwd_context
108
+ )
109
+
110
+ _BASE64_STRIP = b"=\n"
111
+ _BASE64_PAD1 = b"="
112
+ _BASE64_PAD2 = b"=="
113
+
114
+ def ab64_encode(data):
115
+ return b64s_encode(data).replace(b"+", b".")
116
+
117
+ def ab64_decode(data):
118
+ """
119
+ decode from shortened base64 format which omits padding & whitespace.
120
+ uses custom ``./`` altchars, but supports decoding normal ``+/`` altchars as well.
121
+ """
122
+ if isinstance(data, str):
123
+ # needs bytes for replace() call, but want to accept ascii-unicode ala a2b_base64()
124
+ try:
125
+ data = data.encode("ascii")
126
+ except UnicodeEncodeError:
127
+ raise ValueError("string argument should contain only ASCII characters")
128
+ return b64s_decode(data.replace(b".", b"+"))
129
+
130
+
131
+ def b64s_encode(data):
132
+ return b2a_base64(data).rstrip(_BASE64_STRIP)
133
+
134
+ def b64s_decode(data):
135
+ """
136
+ decode from shortened base64 format which omits padding & whitespace.
137
+ uses default ``+/`` altchars.
138
+ """
139
+ if isinstance(data, str):
140
+ # needs bytes for replace() call, but want to accept ascii-unicode ala a2b_base64()
141
+ try:
142
+ data = data.encode("ascii")
143
+ except UnicodeEncodeError as ue:
144
+ raise ValueError("string argument should contain only ASCII characters") from ue
145
+ off = len(data) & 3
146
+ if off == 0:
147
+ pass
148
+ elif off == 2:
149
+ data += _BASE64_PAD2
150
+ elif off == 3:
151
+ data += _BASE64_PAD1
152
+ else: # off == 1
153
+ raise ValueError("Invalid base64 input")
154
+ try:
155
+ return a2b_base64(data)
156
+ except _BinAsciiError as err:
157
+ raise TypeError(err) from err
158
+
26
159
 
27
160
  class User:
28
161
  """
@@ -42,7 +175,6 @@ class User:
42
175
  if password is None:
43
176
  password = ''
44
177
  self.password = password
45
- self.password_hash = get_pwd_context().hash(password)
46
178
  self.username = username
47
179
  self.email = email
48
180
  self.type = type
@@ -80,3 +212,11 @@ class User:
80
212
  @user_id.setter
81
213
  def user_id(self, user_id):
82
214
  self._user_id = user_id
215
+
216
+ @property
217
+ def password_hash(self):
218
+ _password_hash = self.__dict__.get('_password_hash', None)
219
+ if _password_hash is not None:
220
+ return _password_hash
221
+ self._password_hash = hash_password(self.password)
222
+ return self._password_hash
@@ -6,4 +6,4 @@
6
6
  Manager users' metadata via the User class
7
7
  """
8
8
 
9
- from meerschaum.core.User._User import User
9
+ from meerschaum.core.User._User import User, hash_password, verify_password
@@ -209,7 +209,7 @@ class Plugin:
209
209
  def parse_gitignore() -> 'Set[str]':
210
210
  gitignore_path = pathlib.Path(path) / '.gitignore'
211
211
  if not gitignore_path.exists():
212
- return set()
212
+ return set(default_patterns_to_ignore)
213
213
  with open(gitignore_path, 'r', encoding='utf-8') as f:
214
214
  gitignore_text = f.read()
215
215
  return set(pathspec.PathSpec.from_lines(
@@ -15,6 +15,7 @@ import signal
15
15
  import sys
16
16
  import time
17
17
  import traceback
18
+ from functools import partial
18
19
  from datetime import datetime, timezone
19
20
  from meerschaum.utils.typing import Optional, Dict, Any, SuccessTuple, Callable, List, Union
20
21
  from meerschaum.config import get_config
@@ -139,11 +140,12 @@ class Daemon:
139
140
  Nothing — this will exit the parent process.
140
141
  """
141
142
  import platform, sys, os, traceback
142
- from meerschaum.config._paths import LOGS_RESOURCES_PATH
143
+ from meerschaum.config._paths import DAEMON_ERROR_LOG_PATH
143
144
  from meerschaum.utils.warnings import warn
144
- daemons_error_log_path = LOGS_RESOURCES_PATH / 'daemons_error.log'
145
-
145
+ from meerschaum.config import get_config
146
146
  daemon = attempt_import('daemon')
147
+ lines = get_config('jobs', 'terminal', 'lines')
148
+ columns = get_config('jobs','terminal', 'columns')
147
149
 
148
150
  if platform.system() == 'Windows':
149
151
  return False, "Windows is no longer supported."
@@ -164,10 +166,14 @@ class Daemon:
164
166
  )
165
167
 
166
168
  log_refresh_seconds = get_config('jobs', 'logs', 'refresh_files_seconds')
167
- self._log_refresh_timer = RepeatTimer(log_refresh_seconds, self.rotating_log.refresh_files)
168
-
169
+ self._log_refresh_timer = RepeatTimer(
170
+ log_refresh_seconds,
171
+ partial(self.rotating_log.refresh_files, start_interception=True),
172
+ )
169
173
  try:
174
+ os.environ['LINES'], os.environ['COLUMNS'] = str(int(lines)), str(int(columns))
170
175
  with self._daemon_context:
176
+ self.rotating_log.refresh_files(start_interception=True)
171
177
  try:
172
178
  with open(self.pid_path, 'w+', encoding='utf-8') as f:
173
179
  f.write(str(os.getpid()))
@@ -192,11 +198,11 @@ class Daemon:
192
198
  return result
193
199
  except Exception as e:
194
200
  daemon_error = traceback.format_exc()
195
- with open(LOGS_RESOURCES_PATH, 'a+', encoding='utf-8') as f:
201
+ with open(DAEMON_ERROR_LOG_PATH, 'a+', encoding='utf-8') as f:
196
202
  f.write(daemon_error)
197
203
 
198
204
  if daemon_error:
199
- warn("Encountered an error while starting the daemon '{self}':\n{daemon_error}")
205
+ warn(f"Encountered an error while starting the daemon '{self}':\n{daemon_error}")
200
206
 
201
207
 
202
208
  def _capture_process_timestamp(
@@ -464,10 +470,9 @@ class Daemon:
464
470
  if daemon_context is not None:
465
471
  daemon_context.close()
466
472
 
467
- self.rotating_log.stop_log_fd_interception()
468
-
469
473
  _close_pools()
470
- raise SystemExit(0)
474
+ self.rotating_log.stop_log_fd_interception()
475
+ raise KeyboardInterrupt()
471
476
 
472
477
 
473
478
  def _handle_sigterm(self, signal_number: int, stack_frame: 'frame') -> None:
@@ -483,8 +488,6 @@ class Daemon:
483
488
  if daemon_context is not None:
484
489
  daemon_context.close()
485
490
 
486
- self.rotating_log.stop_log_fd_interception()
487
-
488
491
  _close_pools()
489
492
  raise SystemExit(1)
490
493
 
@@ -667,6 +670,7 @@ class Daemon:
667
670
  self.log_path,
668
671
  redirect_streams = True,
669
672
  write_timestamps = True,
673
+ timestamp_format = get_config('jobs', 'logs', 'timestamp_format'),
670
674
  )
671
675
  return self._rotating_log
672
676
 
@@ -905,6 +909,13 @@ class Daemon:
905
909
  return False, msg
906
910
  if not keep_logs:
907
911
  self.rotating_log.delete()
912
+ try:
913
+ if self.log_offset_path.exists():
914
+ self.log_offset_path.unlink()
915
+ except Exception as e:
916
+ msg = f"Failed to remove offset file for '{self.daemon_id}':\n{e}"
917
+ warn(msg)
918
+ return False, msg
908
919
  return True, "Success"
909
920
 
910
921
 
@@ -7,8 +7,12 @@ Intercept OS-level file descriptors.
7
7
  """
8
8
 
9
9
  import os
10
+ import traceback
10
11
  from datetime import datetime
11
12
  from meerschaum.utils.typing import Callable
13
+ from meerschaum.utils.warnings import warn
14
+
15
+ FD_CLOSED: int = 9
12
16
 
13
17
  class FileDescriptorInterceptor:
14
18
  """
@@ -40,12 +44,28 @@ class FileDescriptorInterceptor:
40
44
 
41
45
  NOTE: This is blocking and is meant to be run in a thread.
42
46
  """
47
+ is_first_read = True
43
48
  while True:
44
49
  data = os.read(self.read_pipe, 1024)
45
50
  if not data:
46
51
  break
52
+
53
+ first_char_is_newline = data[0] == b'\n'
54
+ last_char_is_newline = data[-1] == b'\n'
55
+
47
56
  injected_str = self.injection_hook()
48
- modified_data = data.replace(b'\n', f'\n{injected_str}'.encode('utf-8'))
57
+ injected_bytes = injected_str.encode('utf-8')
58
+
59
+ if is_first_read:
60
+ data = b'\n' + data
61
+ is_first_read = False
62
+
63
+ modified_data = (
64
+ (data[:-1].replace(b'\n', b'\n' + injected_bytes) + b'\n')
65
+ if last_char_is_newline
66
+ else data.replace(b'\n', b'\n' + injected_bytes)
67
+ )
68
+
49
69
  os.write(self.new_file_descriptor, modified_data)
50
70
 
51
71
  def stop_interception(self):
@@ -54,7 +74,29 @@ class FileDescriptorInterceptor:
54
74
  """
55
75
  try:
56
76
  os.dup2(self.new_file_descriptor, self.original_file_descriptor)
57
- os.close(self.read_pipe)
77
+ # os.close(self.new_file_descriptor)
78
+ except OSError as e:
79
+ if e.errno != FD_CLOSED:
80
+ warn(
81
+ f"Error while trying to close the duplicated file descriptor:\n"
82
+ + f"{traceback.format_exc()}"
83
+ )
84
+
85
+ try:
58
86
  os.close(self.write_pipe)
59
- except OSError:
60
- pass
87
+ except OSError as e:
88
+ if e.errno != FD_CLOSED:
89
+ warn(
90
+ f"Error while trying to close the write-pipe "
91
+ + "to the intercepted file descriptor:\n"
92
+ + f"{traceback.format_exc()}"
93
+ )
94
+ try:
95
+ os.close(self.read_pipe)
96
+ except OSError as e:
97
+ if e.errno != FD_CLOSED:
98
+ warn(
99
+ f"Error while trying to close the read-pipe "
100
+ + "to the intercepted file descriptor:\n"
101
+ + f"{traceback.format_exc()}"
102
+ )
@@ -38,7 +38,7 @@ class RotatingFile(io.IOBase):
38
38
  max_file_size: Optional[int] = None,
39
39
  redirect_streams: bool = False,
40
40
  write_timestamps: bool = False,
41
- timestamps_format: str = '%Y-%m-%d %H:%M | ',
41
+ timestamp_format: str = '%Y-%m-%d %H:%M',
42
42
  ):
43
43
  """
44
44
  Create a file-like object which manages other files.
@@ -78,7 +78,7 @@ class RotatingFile(io.IOBase):
78
78
  self.max_file_size = max_file_size
79
79
  self.redirect_streams = redirect_streams
80
80
  self.write_timestamps = write_timestamps
81
- self.timestamps_format = timestamps_format
81
+ self.timestamp_format = timestamp_format
82
82
  self.subfile_regex_pattern = re.compile(
83
83
  r'^'
84
84
  + self.file_path.name
@@ -98,34 +98,14 @@ class RotatingFile(io.IOBase):
98
98
  atexit.register(self.close)
99
99
 
100
100
 
101
-
102
101
  def fileno(self):
103
102
  """
104
103
  Return the file descriptor for the latest subfile.
105
104
  """
106
- import inspect
107
- stack = inspect.stack()
108
- parent_level = stack[1]
109
- parent_module = parent_level[0].f_globals.get('__file__')
110
- # if parent_module.endswith('daemon.py'):
111
- # self._monkey_patch_os_write()
112
- self.refresh_files()
105
+ self.refresh_files(start_interception=False)
113
106
  return self._current_file_obj.fileno()
114
107
 
115
108
 
116
- def _monkey_patch_os_write(self):
117
- import os
118
- import sys
119
- import pathlib
120
- path = pathlib.Path('/home/bmeares/test1.log')
121
- original_write = os.write
122
- def intercept(*args, **kwargs):
123
- with open(path, 'w', encoding='utf-8') as f:
124
- f.write(str(args))
125
- original_write(*args, **kwargs)
126
- os.write = intercept
127
-
128
-
129
109
  def get_latest_subfile_path(self) -> pathlib.Path:
130
110
  """
131
111
  Return the path for the latest subfile to which to write into.
@@ -252,7 +232,11 @@ class RotatingFile(io.IOBase):
252
232
  ]
253
233
 
254
234
 
255
- def refresh_files(self, potential_new_len: int = 0) -> '_io.TextUIWrapper':
235
+ def refresh_files(
236
+ self,
237
+ potential_new_len: int = 0,
238
+ start_interception: bool = False,
239
+ ) -> '_io.TextUIWrapper':
256
240
  """
257
241
  Check the state of the subfiles.
258
242
  If the latest subfile is too large, create a new file and delete old ones.
@@ -260,6 +244,9 @@ class RotatingFile(io.IOBase):
260
244
  Parameters
261
245
  ----------
262
246
  potential_new_len: int, default 0
247
+
248
+ start_interception: bool, default False
249
+ If `True`, kick off the file interception threads.
263
250
  """
264
251
  self.flush()
265
252
 
@@ -278,10 +265,15 @@ class RotatingFile(io.IOBase):
278
265
  if is_first_run_with_logs or lost_latest_handle:
279
266
  self._current_file_obj = open(latest_subfile_path, 'a+', encoding='utf-8')
280
267
  if self.redirect_streams:
281
- self.stop_log_fd_interception()
282
- daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
283
- daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
284
- self.start_log_fd_interception()
268
+ try:
269
+ daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
270
+ daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
271
+ except OSError as e:
272
+ warn(
273
+ f"Encountered an issue when redirecting streams:\n{traceback.format_exc()}"
274
+ )
275
+ if start_interception:
276
+ self.start_log_fd_interception()
285
277
 
286
278
  create_new_file = (
287
279
  (latest_subfile_index == -1)
@@ -302,20 +294,17 @@ class RotatingFile(io.IOBase):
302
294
  if self._previous_file_obj is not None:
303
295
  if self.redirect_streams:
304
296
  self._redirected_subfile_objects[old_subfile_index] = self._previous_file_obj
305
- self.stop_log_fd_interception()
306
297
  daemon.daemon.redirect_stream(self._previous_file_obj, self._current_file_obj)
307
298
  daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
308
299
  daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
309
- self.start_log_fd_interception()
310
300
  self.close(unused_only=True)
311
301
 
312
302
  ### Sanity check in case writing somehow fails.
313
303
  if self._previous_file_obj is self._current_file_obj:
314
- self._previous_file_obj is None
304
+ self._previous_file_obj = None
315
305
 
316
306
  self.delete(unused_only=True)
317
307
 
318
-
319
308
  return self._current_file_obj
320
309
 
321
310
 
@@ -328,6 +317,7 @@ class RotatingFile(io.IOBase):
328
317
  unused_only: bool, default False
329
318
  If `True`, only close file descriptors not currently in use.
330
319
  """
320
+ self.stop_log_fd_interception(unused_only=unused_only)
331
321
  subfile_indices = sorted(self.subfile_objects.keys())
332
322
  for subfile_index in subfile_indices:
333
323
  subfile_object = self.subfile_objects[subfile_index]
@@ -335,14 +325,14 @@ class RotatingFile(io.IOBase):
335
325
  continue
336
326
  try:
337
327
  if not subfile_object.closed:
338
- # subfile_object.flush()
339
328
  subfile_object.close()
340
- _ = self.subfile_objects.pop(subfile_index, None)
341
- if self.redirect_streams:
342
- _ = self._redirected_subfile_objects.pop(subfile_index, None)
343
329
  except Exception as e:
344
330
  warn(f"Failed to close an open subfile:\n{traceback.format_exc()}")
345
331
 
332
+ _ = self.subfile_objects.pop(subfile_index, None)
333
+ if self.redirect_streams:
334
+ _ = self._redirected_subfile_objects.pop(subfile_index, None)
335
+
346
336
  if not unused_only:
347
337
  self._previous_file_obj = None
348
338
  self._current_file_obj = None
@@ -352,7 +342,7 @@ class RotatingFile(io.IOBase):
352
342
  """
353
343
  Return the current minute prefixm string.
354
344
  """
355
- return datetime.now(timezone.utc).strftime(self.timestamps_format)
345
+ return datetime.now(timezone.utc).strftime(self.timestamp_format) + ' | '
356
346
 
357
347
 
358
348
  def write(self, data: str) -> None:
@@ -371,7 +361,10 @@ class RotatingFile(io.IOBase):
371
361
 
372
362
  prefix_str = self.get_timestamp_prefix_str() if self.write_timestamps else ""
373
363
  suffix_str = "\n" if self.write_timestamps else ""
374
- self.refresh_files(potential_new_len=len(prefix_str + data + suffix_str))
364
+ self.refresh_files(
365
+ potential_new_len = len(prefix_str + data + suffix_str),
366
+ start_interception = True,
367
+ )
375
368
  try:
376
369
  if prefix_str:
377
370
  self._current_file_obj.write(prefix_str)
@@ -582,16 +575,23 @@ class RotatingFile(io.IOBase):
582
575
  try:
583
576
  subfile_object.flush()
584
577
  except Exception as e:
585
- warn(f"Failed to flush subfile:\n{traceback.format_exc()}")
578
+ warn(f"Failed to flush subfile {subfile_index}:\n{traceback.format_exc()}")
586
579
  if self.redirect_streams:
587
- sys.stdout.flush()
588
- sys.stderr.flush()
580
+ try:
581
+ sys.stdout.flush()
582
+ except Exception as e:
583
+ warn(f"Failed to flush STDOUT:\n{traceback.format_exc()}")
584
+ try:
585
+ sys.stderr.flush()
586
+ except Exception as e:
587
+ warn(f"Failed to flush STDERR:\n{traceback.format_exc()}")
589
588
 
590
589
 
591
590
  def start_log_fd_interception(self):
592
591
  """
593
592
  Start the file descriptor monitoring threads.
594
593
  """
594
+ threads = self.__dict__.get('_interceptor_threads', [])
595
595
  self._stdout_interceptor = FileDescriptorInterceptor(
596
596
  sys.stdout.fileno(),
597
597
  self.get_timestamp_prefix_str,
@@ -600,29 +600,48 @@ class RotatingFile(io.IOBase):
600
600
  sys.stderr.fileno(),
601
601
  self.get_timestamp_prefix_str,
602
602
  )
603
- self._stdout_interceptor_thread = Thread(target=self._stdout_interceptor.start_interception)
604
- self._stderr_interceptor_thread = Thread(target=self._stderr_interceptor.start_interception)
603
+
604
+ self._stdout_interceptor_thread = Thread(
605
+ target = self._stdout_interceptor.start_interception,
606
+ daemon = True,
607
+ )
608
+ self._stderr_interceptor_thread = Thread(
609
+ target = self._stderr_interceptor.start_interception,
610
+ daemon = True,
611
+ )
605
612
  self._stdout_interceptor_thread.start()
606
613
  self._stderr_interceptor_thread.start()
614
+ self._intercepting = True
607
615
 
616
+ if '_interceptor_threads' not in self.__dict__:
617
+ self._interceptor_threads = []
618
+ if '_interceptors' not in self.__dict__:
619
+ self._interceptors = []
620
+ self._interceptor_threads.extend([
621
+ self._stdout_interceptor_thread,
622
+ self._stderr_interceptor_thread,
623
+ ])
624
+ self.stop_log_fd_interception(unused_only=True)
608
625
 
609
- def stop_log_fd_interception(self):
626
+ def stop_log_fd_interception(self, unused_only: bool = False):
610
627
  """
611
628
  Stop the file descriptor monitoring threads.
612
629
  """
613
- stdout_interceptor = self.__dict__.get('_stdout_interceptor', None)
614
- stderr_interceptor = self.__dict__.get('_stderr_interceptor', None)
615
- stdout_interceptor_thread = self.__dict__.get('_stdout_interceptor_thread', None)
616
- stderr_interceptor_thread = self.__dict__.get('_stderr_interceptor_thread', None)
617
- if stdout_interceptor is None:
618
- return
619
- stdout_interceptor.stop_interception()
620
- stderr_interceptor.stop_interception()
621
- try:
622
- stdout_interceptor_thread.join()
623
- stderr_interceptor_thread.join()
624
- except Exception:
625
- pass
630
+ interceptors = self.__dict__.get('_interceptors', [])
631
+ interceptor_threads = self.__dict__.get('_interceptor_threads', [])
632
+
633
+ end_ix = len(interceptors) if not unused_only else -2
634
+
635
+ for interceptor in interceptors[:end_ix]:
636
+ interceptor.stop_interception()
637
+ del interceptors[:end_ix]
638
+
639
+ for thread in interceptor_threads[:end_ix]:
640
+ try:
641
+ thread.join()
642
+ except Exception as e:
643
+ warn(f"Failed to join interceptor threads:\n{traceback.format_exc()}")
644
+ del interceptor_threads[:end_ix]
626
645
 
627
646
 
628
647
  def __repr__(self) -> str:
@@ -12,6 +12,7 @@ from meerschaum.utils.typing import SuccessTuple, List, Optional, Callable, Any,
12
12
  from meerschaum.config._paths import DAEMON_RESOURCES_PATH
13
13
  from meerschaum.utils.daemon.Daemon import Daemon
14
14
  from meerschaum.utils.daemon.RotatingFile import RotatingFile
15
+ from meerschaum.utils.daemon.FileDescriptorInterceptor import FileDescriptorInterceptor
15
16
 
16
17
 
17
18
  def daemon_entry(sysargs: Optional[List[str]] = None) -> SuccessTuple:
@@ -10,6 +10,7 @@ from __future__ import annotations
10
10
  from meerschaum.utils.typing import Optional
11
11
 
12
12
  import threading
13
+ import traceback
13
14
  Lock = threading.Lock
14
15
  RLock = threading.RLock
15
16
  Event = threading.Event
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: meerschaum
3
- Version: 2.2.0rc1
3
+ Version: 2.2.0rc2
4
4
  Summary: Sync Time-Series Pipes with Meerschaum
5
5
  Home-page: https://meerschaum.io
6
6
  Author: Bennett Meares