meerschaum 2.2.0rc1__py3-none-any.whl → 2.2.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. meerschaum/__main__.py +1 -1
  2. meerschaum/actions/show.py +68 -43
  3. meerschaum/api/__init__.py +16 -11
  4. meerschaum/api/dash/callbacks/dashboard.py +2 -7
  5. meerschaum/api/dash/pipes.py +33 -9
  6. meerschaum/api/dash/plugins.py +25 -9
  7. meerschaum/api/resources/templates/termpage.html +3 -0
  8. meerschaum/api/routes/_login.py +5 -4
  9. meerschaum/api/routes/_plugins.py +6 -3
  10. meerschaum/config/_dash.py +11 -0
  11. meerschaum/config/_default.py +3 -1
  12. meerschaum/config/_jobs.py +10 -4
  13. meerschaum/config/_paths.py +2 -0
  14. meerschaum/config/_sync.py +2 -3
  15. meerschaum/config/_version.py +1 -1
  16. meerschaum/config/stack/__init__.py +6 -6
  17. meerschaum/config/stack/grafana/__init__.py +1 -1
  18. meerschaum/config/static/__init__.py +4 -1
  19. meerschaum/connectors/__init__.py +2 -0
  20. meerschaum/connectors/sql/SQLConnector.py +4 -2
  21. meerschaum/connectors/sql/_create_engine.py +4 -4
  22. meerschaum/connectors/sql/_instance.py +3 -1
  23. meerschaum/connectors/sql/_pipes.py +53 -38
  24. meerschaum/connectors/sql/_plugins.py +0 -2
  25. meerschaum/connectors/sql/_sql.py +7 -9
  26. meerschaum/core/User/_User.py +158 -16
  27. meerschaum/core/User/__init__.py +1 -1
  28. meerschaum/plugins/_Plugin.py +1 -1
  29. meerschaum/plugins/__init__.py +23 -1
  30. meerschaum/utils/daemon/Daemon.py +38 -12
  31. meerschaum/utils/daemon/FileDescriptorInterceptor.py +90 -10
  32. meerschaum/utils/daemon/RotatingFile.py +82 -59
  33. meerschaum/utils/daemon/__init__.py +3 -0
  34. meerschaum/utils/packages/__init__.py +10 -4
  35. meerschaum/utils/packages/_packages.py +7 -8
  36. meerschaum/utils/process.py +13 -10
  37. meerschaum/utils/schedule.py +15 -1
  38. meerschaum/utils/threading.py +1 -0
  39. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/METADATA +19 -21
  40. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/RECORD +46 -45
  41. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/LICENSE +0 -0
  42. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/NOTICE +0 -0
  43. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/WHEEL +0 -0
  44. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/entry_points.txt +0 -0
  45. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/top_level.txt +0 -0
  46. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/zip-safe +0 -0
@@ -7,8 +7,15 @@ Intercept OS-level file descriptors.
7
7
  """
8
8
 
9
9
  import os
10
+ import select
11
+ import traceback
12
+ from threading import Event
10
13
  from datetime import datetime
11
14
  from meerschaum.utils.typing import Callable
15
+ from meerschaum.utils.warnings import warn
16
+
17
+ FD_CLOSED: int = 9
18
+ STOP_READING_FD_EVENT: Event = Event()
12
19
 
13
20
  class FileDescriptorInterceptor:
14
21
  """
@@ -28,10 +35,12 @@ class FileDescriptorInterceptor:
28
35
  injection_hook: Callable[[], str]
29
36
  A callable which returns a string to be injected into the written data.
30
37
  """
38
+ self.stop_event = Event()
31
39
  self.injection_hook = injection_hook
32
40
  self.original_file_descriptor = file_descriptor
33
41
  self.new_file_descriptor = os.dup(file_descriptor)
34
42
  self.read_pipe, self.write_pipe = os.pipe()
43
+ self.signal_read_pipe, self.signal_write_pipe = os.pipe()
35
44
  os.dup2(self.write_pipe, file_descriptor)
36
45
 
37
46
  def start_interception(self):
@@ -40,21 +49,92 @@ class FileDescriptorInterceptor:
40
49
 
41
50
  NOTE: This is blocking and is meant to be run in a thread.
42
51
  """
43
- while True:
44
- data = os.read(self.read_pipe, 1024)
45
- if not data:
46
- break
52
+ os.set_blocking(self.read_pipe, False)
53
+ os.set_blocking(self.signal_read_pipe, False)
54
+ is_first_read = True
55
+ while not self.stop_event.is_set():
56
+ try:
57
+ rlist, _, _ = select.select([self.read_pipe, self.signal_read_pipe], [], [], 0.1)
58
+ if self.signal_read_pipe in rlist:
59
+ break
60
+ if not rlist:
61
+ continue
62
+ data = os.read(self.read_pipe, 1024)
63
+ if not data:
64
+ break
65
+ except BlockingIOError:
66
+ continue
67
+ except OSError as e:
68
+ continue
69
+
70
+ first_char_is_newline = data[0] == b'\n'
71
+ last_char_is_newline = data[-1] == b'\n'
72
+
47
73
  injected_str = self.injection_hook()
48
- modified_data = data.replace(b'\n', f'\n{injected_str}'.encode('utf-8'))
74
+ injected_bytes = injected_str.encode('utf-8')
75
+
76
+ if is_first_read:
77
+ data = b'\n' + data
78
+ is_first_read = False
79
+
80
+ modified_data = (
81
+ (data[:-1].replace(b'\n', b'\n' + injected_bytes) + b'\n')
82
+ if last_char_is_newline
83
+ else data.replace(b'\n', b'\n' + injected_bytes)
84
+ )
49
85
  os.write(self.new_file_descriptor, modified_data)
50
86
 
87
+
51
88
  def stop_interception(self):
52
89
  """
53
- Restore the file descriptors and close the new pipes.
90
+ Close the new file descriptors.
54
91
  """
92
+ self.stop_event.set()
93
+ os.write(self.signal_write_pipe, b'\0')
94
+ try:
95
+ os.close(self.new_file_descriptor)
96
+ except OSError as e:
97
+ if e.errno != FD_CLOSED:
98
+ warn(
99
+ f"Error while trying to close the duplicated file descriptor:\n"
100
+ + f"{traceback.format_exc()}"
101
+ )
102
+
55
103
  try:
56
- os.dup2(self.new_file_descriptor, self.original_file_descriptor)
57
- os.close(self.read_pipe)
58
104
  os.close(self.write_pipe)
59
- except OSError:
60
- pass
105
+ except OSError as e:
106
+ if e.errno != FD_CLOSED:
107
+ warn(
108
+ f"Error while trying to close the write-pipe "
109
+ + "to the intercepted file descriptor:\n"
110
+ + f"{traceback.format_exc()}"
111
+ )
112
+ try:
113
+ os.close(self.read_pipe)
114
+ except OSError as e:
115
+ if e.errno != FD_CLOSED:
116
+ warn(
117
+ f"Error while trying to close the read-pipe "
118
+ + "to the intercepted file descriptor:\n"
119
+ + f"{traceback.format_exc()}"
120
+ )
121
+
122
+ try:
123
+ os.close(self.signal_read_pipe)
124
+ except OSError as e:
125
+ if e.errno != FD_CLOSED:
126
+ warn(
127
+ f"Error while trying to close the signal-read-pipe "
128
+ + "to the intercepted file descriptor:\n"
129
+ + f"{traceback.format_exc()}"
130
+ )
131
+
132
+ try:
133
+ os.close(self.signal_write_pipe)
134
+ except OSError as e:
135
+ if e.errno != FD_CLOSED:
136
+ warn(
137
+ f"Error while trying to close the signal-write-pipe "
138
+ + "to the intercepted file descriptor:\n"
139
+ + f"{traceback.format_exc()}"
140
+ )
@@ -38,7 +38,7 @@ class RotatingFile(io.IOBase):
38
38
  max_file_size: Optional[int] = None,
39
39
  redirect_streams: bool = False,
40
40
  write_timestamps: bool = False,
41
- timestamps_format: str = '%Y-%m-%d %H:%M | ',
41
+ timestamp_format: str = '%Y-%m-%d %H:%M',
42
42
  ):
43
43
  """
44
44
  Create a file-like object which manages other files.
@@ -78,7 +78,7 @@ class RotatingFile(io.IOBase):
78
78
  self.max_file_size = max_file_size
79
79
  self.redirect_streams = redirect_streams
80
80
  self.write_timestamps = write_timestamps
81
- self.timestamps_format = timestamps_format
81
+ self.timestamp_format = timestamp_format
82
82
  self.subfile_regex_pattern = re.compile(
83
83
  r'^'
84
84
  + self.file_path.name
@@ -98,34 +98,14 @@ class RotatingFile(io.IOBase):
98
98
  atexit.register(self.close)
99
99
 
100
100
 
101
-
102
101
  def fileno(self):
103
102
  """
104
103
  Return the file descriptor for the latest subfile.
105
104
  """
106
- import inspect
107
- stack = inspect.stack()
108
- parent_level = stack[1]
109
- parent_module = parent_level[0].f_globals.get('__file__')
110
- # if parent_module.endswith('daemon.py'):
111
- # self._monkey_patch_os_write()
112
- self.refresh_files()
105
+ self.refresh_files(start_interception=False)
113
106
  return self._current_file_obj.fileno()
114
107
 
115
108
 
116
- def _monkey_patch_os_write(self):
117
- import os
118
- import sys
119
- import pathlib
120
- path = pathlib.Path('/home/bmeares/test1.log')
121
- original_write = os.write
122
- def intercept(*args, **kwargs):
123
- with open(path, 'w', encoding='utf-8') as f:
124
- f.write(str(args))
125
- original_write(*args, **kwargs)
126
- os.write = intercept
127
-
128
-
129
109
  def get_latest_subfile_path(self) -> pathlib.Path:
130
110
  """
131
111
  Return the path for the latest subfile to which to write into.
@@ -252,7 +232,11 @@ class RotatingFile(io.IOBase):
252
232
  ]
253
233
 
254
234
 
255
- def refresh_files(self, potential_new_len: int = 0) -> '_io.TextUIWrapper':
235
+ def refresh_files(
236
+ self,
237
+ potential_new_len: int = 0,
238
+ start_interception: bool = False,
239
+ ) -> '_io.TextUIWrapper':
256
240
  """
257
241
  Check the state of the subfiles.
258
242
  If the latest subfile is too large, create a new file and delete old ones.
@@ -260,6 +244,9 @@ class RotatingFile(io.IOBase):
260
244
  Parameters
261
245
  ----------
262
246
  potential_new_len: int, default 0
247
+
248
+ start_interception: bool, default False
249
+ If `True`, kick off the file interception threads.
263
250
  """
264
251
  self.flush()
265
252
 
@@ -278,10 +265,15 @@ class RotatingFile(io.IOBase):
278
265
  if is_first_run_with_logs or lost_latest_handle:
279
266
  self._current_file_obj = open(latest_subfile_path, 'a+', encoding='utf-8')
280
267
  if self.redirect_streams:
281
- self.stop_log_fd_interception()
282
- daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
283
- daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
284
- self.start_log_fd_interception()
268
+ try:
269
+ daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
270
+ daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
271
+ except OSError as e:
272
+ warn(
273
+ f"Encountered an issue when redirecting streams:\n{traceback.format_exc()}"
274
+ )
275
+ if start_interception:
276
+ self.start_log_fd_interception()
285
277
 
286
278
  create_new_file = (
287
279
  (latest_subfile_index == -1)
@@ -302,20 +294,17 @@ class RotatingFile(io.IOBase):
302
294
  if self._previous_file_obj is not None:
303
295
  if self.redirect_streams:
304
296
  self._redirected_subfile_objects[old_subfile_index] = self._previous_file_obj
305
- self.stop_log_fd_interception()
306
297
  daemon.daemon.redirect_stream(self._previous_file_obj, self._current_file_obj)
307
298
  daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
308
299
  daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
309
- self.start_log_fd_interception()
310
300
  self.close(unused_only=True)
311
301
 
312
302
  ### Sanity check in case writing somehow fails.
313
303
  if self._previous_file_obj is self._current_file_obj:
314
- self._previous_file_obj is None
304
+ self._previous_file_obj = None
315
305
 
316
306
  self.delete(unused_only=True)
317
307
 
318
-
319
308
  return self._current_file_obj
320
309
 
321
310
 
@@ -328,6 +317,7 @@ class RotatingFile(io.IOBase):
328
317
  unused_only: bool, default False
329
318
  If `True`, only close file descriptors not currently in use.
330
319
  """
320
+ self.stop_log_fd_interception(unused_only=unused_only)
331
321
  subfile_indices = sorted(self.subfile_objects.keys())
332
322
  for subfile_index in subfile_indices:
333
323
  subfile_object = self.subfile_objects[subfile_index]
@@ -335,14 +325,14 @@ class RotatingFile(io.IOBase):
335
325
  continue
336
326
  try:
337
327
  if not subfile_object.closed:
338
- # subfile_object.flush()
339
328
  subfile_object.close()
340
- _ = self.subfile_objects.pop(subfile_index, None)
341
- if self.redirect_streams:
342
- _ = self._redirected_subfile_objects.pop(subfile_index, None)
343
329
  except Exception as e:
344
330
  warn(f"Failed to close an open subfile:\n{traceback.format_exc()}")
345
331
 
332
+ _ = self.subfile_objects.pop(subfile_index, None)
333
+ if self.redirect_streams:
334
+ _ = self._redirected_subfile_objects.pop(subfile_index, None)
335
+
346
336
  if not unused_only:
347
337
  self._previous_file_obj = None
348
338
  self._current_file_obj = None
@@ -352,7 +342,7 @@ class RotatingFile(io.IOBase):
352
342
  """
353
343
  Return the current minute prefixm string.
354
344
  """
355
- return datetime.now(timezone.utc).strftime(self.timestamps_format)
345
+ return datetime.now(timezone.utc).strftime(self.timestamp_format) + ' | '
356
346
 
357
347
 
358
348
  def write(self, data: str) -> None:
@@ -371,7 +361,10 @@ class RotatingFile(io.IOBase):
371
361
 
372
362
  prefix_str = self.get_timestamp_prefix_str() if self.write_timestamps else ""
373
363
  suffix_str = "\n" if self.write_timestamps else ""
374
- self.refresh_files(potential_new_len=len(prefix_str + data + suffix_str))
364
+ self.refresh_files(
365
+ potential_new_len = len(prefix_str + data + suffix_str),
366
+ start_interception = True,
367
+ )
375
368
  try:
376
369
  if prefix_str:
377
370
  self._current_file_obj.write(prefix_str)
@@ -582,16 +575,23 @@ class RotatingFile(io.IOBase):
582
575
  try:
583
576
  subfile_object.flush()
584
577
  except Exception as e:
585
- warn(f"Failed to flush subfile:\n{traceback.format_exc()}")
578
+ warn(f"Failed to flush subfile {subfile_index}:\n{traceback.format_exc()}")
586
579
  if self.redirect_streams:
587
- sys.stdout.flush()
588
- sys.stderr.flush()
580
+ try:
581
+ sys.stdout.flush()
582
+ except Exception as e:
583
+ warn(f"Failed to flush STDOUT:\n{traceback.format_exc()}")
584
+ try:
585
+ sys.stderr.flush()
586
+ except Exception as e:
587
+ warn(f"Failed to flush STDERR:\n{traceback.format_exc()}")
589
588
 
590
589
 
591
590
  def start_log_fd_interception(self):
592
591
  """
593
592
  Start the file descriptor monitoring threads.
594
593
  """
594
+ threads = self.__dict__.get('_interceptor_threads', [])
595
595
  self._stdout_interceptor = FileDescriptorInterceptor(
596
596
  sys.stdout.fileno(),
597
597
  self.get_timestamp_prefix_str,
@@ -600,29 +600,52 @@ class RotatingFile(io.IOBase):
600
600
  sys.stderr.fileno(),
601
601
  self.get_timestamp_prefix_str,
602
602
  )
603
- self._stdout_interceptor_thread = Thread(target=self._stdout_interceptor.start_interception)
604
- self._stderr_interceptor_thread = Thread(target=self._stderr_interceptor.start_interception)
603
+
604
+ self._stdout_interceptor_thread = Thread(
605
+ target = self._stdout_interceptor.start_interception,
606
+ daemon = True,
607
+ )
608
+ self._stderr_interceptor_thread = Thread(
609
+ target = self._stderr_interceptor.start_interception,
610
+ daemon = True,
611
+ )
605
612
  self._stdout_interceptor_thread.start()
606
613
  self._stderr_interceptor_thread.start()
607
-
608
-
609
- def stop_log_fd_interception(self):
614
+ self._intercepting = True
615
+
616
+ if '_interceptor_threads' not in self.__dict__:
617
+ self._interceptor_threads = []
618
+ if '_interceptors' not in self.__dict__:
619
+ self._interceptors = []
620
+ self._interceptor_threads.extend([
621
+ self._stdout_interceptor_thread,
622
+ self._stderr_interceptor_thread,
623
+ ])
624
+ self._interceptors.extend([
625
+ self._stdout_interceptor,
626
+ self._stderr_interceptor,
627
+ ])
628
+ self.stop_log_fd_interception(unused_only=True)
629
+
630
+ def stop_log_fd_interception(self, unused_only: bool = False):
610
631
  """
611
632
  Stop the file descriptor monitoring threads.
612
633
  """
613
- stdout_interceptor = self.__dict__.get('_stdout_interceptor', None)
614
- stderr_interceptor = self.__dict__.get('_stderr_interceptor', None)
615
- stdout_interceptor_thread = self.__dict__.get('_stdout_interceptor_thread', None)
616
- stderr_interceptor_thread = self.__dict__.get('_stderr_interceptor_thread', None)
617
- if stdout_interceptor is None:
618
- return
619
- stdout_interceptor.stop_interception()
620
- stderr_interceptor.stop_interception()
621
- try:
622
- stdout_interceptor_thread.join()
623
- stderr_interceptor_thread.join()
624
- except Exception:
625
- pass
634
+ interceptors = self.__dict__.get('_interceptors', [])
635
+ interceptor_threads = self.__dict__.get('_interceptor_threads', [])
636
+
637
+ end_ix = len(interceptors) if not unused_only else -2
638
+
639
+ for interceptor in interceptors[:end_ix]:
640
+ interceptor.stop_interception()
641
+ del interceptors[:end_ix]
642
+
643
+ for thread in interceptor_threads[:end_ix]:
644
+ try:
645
+ thread.join()
646
+ except Exception as e:
647
+ warn(f"Failed to join interceptor threads:\n{traceback.format_exc()}")
648
+ del interceptor_threads[:end_ix]
626
649
 
627
650
 
628
651
  def __repr__(self) -> str:
@@ -12,6 +12,7 @@ from meerschaum.utils.typing import SuccessTuple, List, Optional, Callable, Any,
12
12
  from meerschaum.config._paths import DAEMON_RESOURCES_PATH
13
13
  from meerschaum.utils.daemon.Daemon import Daemon
14
14
  from meerschaum.utils.daemon.RotatingFile import RotatingFile
15
+ from meerschaum.utils.daemon.FileDescriptorInterceptor import FileDescriptorInterceptor
15
16
 
16
17
 
17
18
  def daemon_entry(sysargs: Optional[List[str]] = None) -> SuccessTuple:
@@ -63,6 +64,8 @@ def daemon_entry(sysargs: Optional[List[str]] = None) -> SuccessTuple:
63
64
 
64
65
  ### Only run if the kwargs equal or no actions are provided.
65
66
  if existing_kwargs == _args or not _args.get('action', []):
67
+ if daemon.status == 'running':
68
+ return True, f"Daemon '{daemon}' is already running."
66
69
  return daemon.run(
67
70
  debug = debug,
68
71
  allow_dirty_run = True,
@@ -829,8 +829,11 @@ def pip_install(
829
829
  check_wheel = False, debug = debug,
830
830
  ):
831
831
  warn(
832
- f"Failed to install `setuptools` and `wheel` for virtual environment '{venv}'.",
833
- color=False,
832
+ (
833
+ "Failed to install `setuptools` and `wheel` for virtual "
834
+ + f"environment '{venv}'."
835
+ ),
836
+ color = False,
834
837
  )
835
838
 
836
839
  if requirements_file_path is not None:
@@ -893,13 +896,16 @@ def pip_install(
893
896
  f"Failed to clean up package '{_install_no_version}'.",
894
897
  )
895
898
 
896
- success = run_python_package(
899
+ rc = run_python_package(
897
900
  'pip',
898
901
  _args + _packages,
899
902
  venv = venv,
900
903
  env = _get_pip_os_env(),
901
904
  debug = debug,
902
- ) == 0
905
+ )
906
+ if debug:
907
+ print(f"{rc=}")
908
+ success = rc == 0
903
909
 
904
910
  msg = (
905
911
  "Successfully " + ('un' if _uninstall else '') + "installed packages." if success
@@ -60,7 +60,7 @@ packages: Dict[str, Dict[str, str]] = {
60
60
  'pymysql' : 'PyMySQL>=0.9.0',
61
61
  'aiomysql' : 'aiomysql>=0.0.21',
62
62
  'sqlalchemy_cockroachdb' : 'sqlalchemy-cockroachdb>=2.0.0',
63
- 'duckdb' : 'duckdb>=0.9.0',
63
+ 'duckdb' : 'duckdb<0.10.0',
64
64
  'duckdb_engine' : 'duckdb-engine>=0.9.2',
65
65
  },
66
66
  '_drivers': {
@@ -120,8 +120,8 @@ packages: Dict[str, Dict[str, str]] = {
120
120
  packages['sql'] = {
121
121
  'numpy' : 'numpy>=1.18.5',
122
122
  'pandas' : 'pandas[parquet]>=2.0.1',
123
- 'pyarrow' : 'pyarrow>=7.0.0',
124
- 'dask' : 'dask>=2023.5.0',
123
+ 'pyarrow' : 'pyarrow>=16.1.0',
124
+ 'dask' : 'dask[dataframe]>=2024.5.1',
125
125
  'pytz' : 'pytz',
126
126
  'joblib' : 'joblib>=0.17.0',
127
127
  'sqlalchemy' : 'SQLAlchemy>=2.0.5',
@@ -142,14 +142,13 @@ packages['dash'] = {
142
142
  'tornado' : 'tornado>=6.1.0',
143
143
  }
144
144
  packages['api'] = {
145
- 'uvicorn' : 'uvicorn[standard]>=0.22.0',
146
- 'gunicorn' : 'gunicorn>=20.1.0',
145
+ 'uvicorn' : 'uvicorn[standard]>=0.29.0',
146
+ 'gunicorn' : 'gunicorn>=22.0.0',
147
147
  'dotenv' : 'python-dotenv>=0.20.0',
148
148
  'websockets' : 'websockets>=11.0.3',
149
- 'fastapi' : 'fastapi>=0.100.0',
150
- 'passlib' : 'passlib>=1.7.4',
149
+ 'fastapi' : 'fastapi>=0.111.0',
151
150
  'fastapi_login' : 'fastapi-login>=1.7.2',
152
- 'multipart' : 'python-multipart>=0.0.5',
151
+ 'multipart' : 'python-multipart>=0.0.9',
153
152
  'httpx' : 'httpx>=0.24.1',
154
153
  'websockets' : 'websockets>=11.0.3',
155
154
  }
@@ -11,6 +11,7 @@ See `meerschaum.utils.pool` for multiprocessing and
11
11
  from __future__ import annotations
12
12
  import os, signal, subprocess, sys, platform
13
13
  from meerschaum.utils.typing import Union, Optional, Any, Callable, Dict, Tuple
14
+ from meerschaum.config.static import STATIC_CONFIG
14
15
 
15
16
  def run_process(
16
17
  *args,
@@ -68,9 +69,18 @@ def run_process(
68
69
  if platform.system() == 'Windows':
69
70
  foreground = False
70
71
 
71
- if line_callback is not None:
72
+ def print_line(line):
73
+ sys.stdout.write(line.decode('utf-8'))
74
+ sys.stdout.flush()
75
+
76
+ if capture_output or line_callback is not None:
77
+ kw['stdout'] = subprocess.PIPE
78
+ kw['stderr'] = subprocess.STDOUT
79
+ elif os.environ.get(STATIC_CONFIG['environment']['daemon_id']):
72
80
  kw['stdout'] = subprocess.PIPE
73
81
  kw['stderr'] = subprocess.STDOUT
82
+ if line_callback is None:
83
+ line_callback = print_line
74
84
 
75
85
  if 'env' not in kw:
76
86
  kw['env'] = os.environ
@@ -112,15 +122,6 @@ def run_process(
112
122
  kw['preexec_fn'] = new_pgid
113
123
 
114
124
  try:
115
- # fork the child
116
- # stdout, stderr = (
117
- # (sys.stdout, sys.stderr) if not capture_output
118
- # else (subprocess.PIPE, subprocess.PIPE)
119
- # )
120
- if capture_output:
121
- kw['stdout'] = subprocess.PIPE
122
- kw['stderr'] = subprocess.PIPE
123
-
124
125
  child = subprocess.Popen(*args, **kw)
125
126
 
126
127
  # we can't set the process group id from the parent since the child
@@ -197,6 +198,8 @@ def poll_process(
197
198
  while proc.poll() is None:
198
199
  line = proc.stdout.readline()
199
200
  line_callback(line)
201
+
200
202
  if timeout_seconds is not None:
201
203
  watchdog_thread.cancel()
204
+
202
205
  return proc.poll()
@@ -278,7 +278,21 @@ def parse_start_time(schedule: str, now: Optional[datetime] = None) -> datetime:
278
278
  starting_str = ('now' if len(starting_parts) == 1 else starting_parts[-1]).strip()
279
279
  now = now or round_time(datetime.now(timezone.utc), timedelta(minutes=1))
280
280
  try:
281
- starting_ts = now if starting_str == 'now' else dateutil_parser.parse(starting_str)
281
+ if starting_str == 'now':
282
+ starting_ts = now
283
+ elif 'tomorrow' in starting_str or 'today' in starting_str:
284
+ today = round_time(now, timedelta(days=1))
285
+ tomorrow = today + timedelta(days=1)
286
+ is_tomorrow = 'tomorrow' in starting_str
287
+ time_str = starting_str.replace('tomorrow', '').replace('today', '').strip()
288
+ time_ts = dateutil_parser.parse(time_str) if time_str else today
289
+ starting_ts = (
290
+ (tomorrow if is_tomorrow else today)
291
+ + timedelta(hours=time_ts.hour)
292
+ + timedelta(minutes=time_ts.minute)
293
+ )
294
+ else:
295
+ starting_ts = dateutil_parser.parse(starting_str)
282
296
  schedule_parse_error = None
283
297
  except Exception as e:
284
298
  warn(f"Unable to parse starting time from '{starting_str}'.", stack=False)
@@ -10,6 +10,7 @@ from __future__ import annotations
10
10
  from meerschaum.utils.typing import Optional
11
11
 
12
12
  import threading
13
+ import traceback
13
14
  Lock = threading.Lock
14
15
  RLock = threading.RLock
15
16
  Event = threading.Event