meerschaum 2.2.0.dev3__py3-none-any.whl → 2.2.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. meerschaum/__main__.py +1 -1
  2. meerschaum/_internal/entry.py +1 -1
  3. meerschaum/actions/show.py +128 -42
  4. meerschaum/api/dash/callbacks/dashboard.py +2 -7
  5. meerschaum/api/dash/pipes.py +33 -9
  6. meerschaum/api/dash/plugins.py +25 -9
  7. meerschaum/api/resources/templates/termpage.html +3 -0
  8. meerschaum/api/routes/_login.py +5 -4
  9. meerschaum/api/routes/_plugins.py +6 -3
  10. meerschaum/config/_dash.py +11 -0
  11. meerschaum/config/_default.py +3 -1
  12. meerschaum/config/_jobs.py +10 -4
  13. meerschaum/config/_paths.py +1 -0
  14. meerschaum/config/_sync.py +2 -3
  15. meerschaum/config/_version.py +1 -1
  16. meerschaum/config/stack/__init__.py +6 -6
  17. meerschaum/config/stack/grafana/__init__.py +1 -1
  18. meerschaum/config/static/__init__.py +3 -1
  19. meerschaum/connectors/sql/_plugins.py +0 -2
  20. meerschaum/core/User/_User.py +156 -16
  21. meerschaum/core/User/__init__.py +1 -1
  22. meerschaum/plugins/_Plugin.py +1 -1
  23. meerschaum/utils/daemon/Daemon.py +63 -34
  24. meerschaum/utils/daemon/FileDescriptorInterceptor.py +102 -0
  25. meerschaum/utils/daemon/RotatingFile.py +120 -14
  26. meerschaum/utils/daemon/__init__.py +1 -0
  27. meerschaum/utils/packages/__init__.py +9 -2
  28. meerschaum/utils/packages/_packages.py +3 -3
  29. meerschaum/utils/schedule.py +41 -47
  30. meerschaum/utils/threading.py +1 -0
  31. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/METADATA +10 -9
  32. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/RECORD +38 -36
  33. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/WHEEL +1 -1
  34. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/LICENSE +0 -0
  35. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/NOTICE +0 -0
  36. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/entry_points.txt +0 -0
  37. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/top_level.txt +0 -0
  38. {meerschaum-2.2.0.dev3.dist-info → meerschaum-2.2.0rc2.dist-info}/zip-safe +0 -0
@@ -13,9 +13,13 @@ import pathlib
13
13
  import traceback
14
14
  import sys
15
15
  import atexit
16
+ from datetime import datetime, timezone, timedelta
16
17
  from typing import List, Union, Optional, Tuple
17
18
  from meerschaum.config import get_config
18
19
  from meerschaum.utils.warnings import warn
20
+ from meerschaum.utils.misc import round_time
21
+ from meerschaum.utils.daemon.FileDescriptorInterceptor import FileDescriptorInterceptor
22
+ from meerschaum.utils.threading import Thread
19
23
  import meerschaum as mrsm
20
24
  daemon = mrsm.attempt_import('daemon')
21
25
 
@@ -33,6 +37,8 @@ class RotatingFile(io.IOBase):
33
37
  num_files_to_keep: Optional[int] = None,
34
38
  max_file_size: Optional[int] = None,
35
39
  redirect_streams: bool = False,
40
+ write_timestamps: bool = False,
41
+ timestamp_format: str = '%Y-%m-%d %H:%M',
36
42
  ):
37
43
  """
38
44
  Create a file-like object which manages other files.
@@ -54,6 +60,9 @@ class RotatingFile(io.IOBase):
54
60
 
55
61
  NOTE: Only set this to `True` if you are entering into a daemon context.
56
62
  Doing so will redirect `sys.stdout` and `sys.stderr` into the log files.
63
+
64
+ write_timestamps: bool, default False
65
+ If `True`, prepend the current UTC timestamp to each line of the file.
57
66
  """
58
67
  self.file_path = pathlib.Path(file_path)
59
68
  if num_files_to_keep is None:
@@ -68,6 +77,8 @@ class RotatingFile(io.IOBase):
68
77
  self.num_files_to_keep = num_files_to_keep
69
78
  self.max_file_size = max_file_size
70
79
  self.redirect_streams = redirect_streams
80
+ self.write_timestamps = write_timestamps
81
+ self.timestamp_format = timestamp_format
71
82
  self.subfile_regex_pattern = re.compile(
72
83
  r'^'
73
84
  + self.file_path.name
@@ -91,7 +102,7 @@ class RotatingFile(io.IOBase):
91
102
  """
92
103
  Return the file descriptor for the latest subfile.
93
104
  """
94
- self.refresh_files()
105
+ self.refresh_files(start_interception=False)
95
106
  return self._current_file_obj.fileno()
96
107
 
97
108
 
@@ -221,7 +232,11 @@ class RotatingFile(io.IOBase):
221
232
  ]
222
233
 
223
234
 
224
- def refresh_files(self, potential_new_len: int = 0) -> '_io.TextUIWrapper':
235
+ def refresh_files(
236
+ self,
237
+ potential_new_len: int = 0,
238
+ start_interception: bool = False,
239
+ ) -> '_io.TextUIWrapper':
225
240
  """
226
241
  Check the state of the subfiles.
227
242
  If the latest subfile is too large, create a new file and delete old ones.
@@ -229,6 +244,9 @@ class RotatingFile(io.IOBase):
229
244
  Parameters
230
245
  ----------
231
246
  potential_new_len: int, default 0
247
+
248
+ start_interception: bool, default False
249
+ If `True`, kick off the file interception threads.
232
250
  """
233
251
  self.flush()
234
252
 
@@ -247,8 +265,15 @@ class RotatingFile(io.IOBase):
247
265
  if is_first_run_with_logs or lost_latest_handle:
248
266
  self._current_file_obj = open(latest_subfile_path, 'a+', encoding='utf-8')
249
267
  if self.redirect_streams:
250
- daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
251
- daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
268
+ try:
269
+ daemon.daemon.redirect_stream(sys.stdout, self._current_file_obj)
270
+ daemon.daemon.redirect_stream(sys.stderr, self._current_file_obj)
271
+ except OSError as e:
272
+ warn(
273
+ f"Encountered an issue when redirecting streams:\n{traceback.format_exc()}"
274
+ )
275
+ if start_interception:
276
+ self.start_log_fd_interception()
252
277
 
253
278
  create_new_file = (
254
279
  (latest_subfile_index == -1)
@@ -276,9 +301,10 @@ class RotatingFile(io.IOBase):
276
301
 
277
302
  ### Sanity check in case writing somehow fails.
278
303
  if self._previous_file_obj is self._current_file_obj:
279
- self._previous_file_obj is None
304
+ self._previous_file_obj = None
280
305
 
281
306
  self.delete(unused_only=True)
307
+
282
308
  return self._current_file_obj
283
309
 
284
310
 
@@ -291,6 +317,7 @@ class RotatingFile(io.IOBase):
291
317
  unused_only: bool, default False
292
318
  If `True`, only close file descriptors not currently in use.
293
319
  """
320
+ self.stop_log_fd_interception(unused_only=unused_only)
294
321
  subfile_indices = sorted(self.subfile_objects.keys())
295
322
  for subfile_index in subfile_indices:
296
323
  subfile_object = self.subfile_objects[subfile_index]
@@ -298,19 +325,26 @@ class RotatingFile(io.IOBase):
298
325
  continue
299
326
  try:
300
327
  if not subfile_object.closed:
301
- # subfile_object.flush()
302
328
  subfile_object.close()
303
- _ = self.subfile_objects.pop(subfile_index, None)
304
- if self.redirect_streams:
305
- _ = self._redirected_subfile_objects.pop(subfile_index, None)
306
329
  except Exception as e:
307
330
  warn(f"Failed to close an open subfile:\n{traceback.format_exc()}")
308
331
 
332
+ _ = self.subfile_objects.pop(subfile_index, None)
333
+ if self.redirect_streams:
334
+ _ = self._redirected_subfile_objects.pop(subfile_index, None)
335
+
309
336
  if not unused_only:
310
337
  self._previous_file_obj = None
311
338
  self._current_file_obj = None
312
339
 
313
340
 
341
+ def get_timestamp_prefix_str(self) -> str:
342
+ """
343
+ Return the current minute prefixm string.
344
+ """
345
+ return datetime.now(timezone.utc).strftime(self.timestamp_format) + ' | '
346
+
347
+
314
348
  def write(self, data: str) -> None:
315
349
  """
316
350
  Write the given text into the latest subfile.
@@ -325,9 +359,18 @@ class RotatingFile(io.IOBase):
325
359
  if isinstance(data, bytes):
326
360
  data = data.decode('utf-8')
327
361
 
328
- self.refresh_files(potential_new_len=len(data))
362
+ prefix_str = self.get_timestamp_prefix_str() if self.write_timestamps else ""
363
+ suffix_str = "\n" if self.write_timestamps else ""
364
+ self.refresh_files(
365
+ potential_new_len = len(prefix_str + data + suffix_str),
366
+ start_interception = True,
367
+ )
329
368
  try:
369
+ if prefix_str:
370
+ self._current_file_obj.write(prefix_str)
330
371
  self._current_file_obj.write(data)
372
+ if suffix_str:
373
+ self._current_file_obj.write(suffix_str)
331
374
  except Exception as e:
332
375
  warn(f"Failed to write to subfile:\n{traceback.format_exc()}")
333
376
  self.flush()
@@ -471,7 +514,7 @@ class RotatingFile(io.IOBase):
471
514
  subfile_object = self.subfile_objects[subfile_index]
472
515
  for i in range(self.SEEK_BACK_ATTEMPTS):
473
516
  try:
474
- subfile_object.seek(max(seek_ix - i), 0)
517
+ subfile_object.seek(max((seek_ix - i), 0))
475
518
  subfile_lines = subfile_object.readlines()
476
519
  except UnicodeDecodeError:
477
520
  continue
@@ -532,10 +575,73 @@ class RotatingFile(io.IOBase):
532
575
  try:
533
576
  subfile_object.flush()
534
577
  except Exception as e:
535
- warn(f"Failed to flush subfile:\n{traceback.format_exc()}")
578
+ warn(f"Failed to flush subfile {subfile_index}:\n{traceback.format_exc()}")
536
579
  if self.redirect_streams:
537
- sys.stdout.flush()
538
- sys.stderr.flush()
580
+ try:
581
+ sys.stdout.flush()
582
+ except Exception as e:
583
+ warn(f"Failed to flush STDOUT:\n{traceback.format_exc()}")
584
+ try:
585
+ sys.stderr.flush()
586
+ except Exception as e:
587
+ warn(f"Failed to flush STDERR:\n{traceback.format_exc()}")
588
+
589
+
590
+ def start_log_fd_interception(self):
591
+ """
592
+ Start the file descriptor monitoring threads.
593
+ """
594
+ threads = self.__dict__.get('_interceptor_threads', [])
595
+ self._stdout_interceptor = FileDescriptorInterceptor(
596
+ sys.stdout.fileno(),
597
+ self.get_timestamp_prefix_str,
598
+ )
599
+ self._stderr_interceptor = FileDescriptorInterceptor(
600
+ sys.stderr.fileno(),
601
+ self.get_timestamp_prefix_str,
602
+ )
603
+
604
+ self._stdout_interceptor_thread = Thread(
605
+ target = self._stdout_interceptor.start_interception,
606
+ daemon = True,
607
+ )
608
+ self._stderr_interceptor_thread = Thread(
609
+ target = self._stderr_interceptor.start_interception,
610
+ daemon = True,
611
+ )
612
+ self._stdout_interceptor_thread.start()
613
+ self._stderr_interceptor_thread.start()
614
+ self._intercepting = True
615
+
616
+ if '_interceptor_threads' not in self.__dict__:
617
+ self._interceptor_threads = []
618
+ if '_interceptors' not in self.__dict__:
619
+ self._interceptors = []
620
+ self._interceptor_threads.extend([
621
+ self._stdout_interceptor_thread,
622
+ self._stderr_interceptor_thread,
623
+ ])
624
+ self.stop_log_fd_interception(unused_only=True)
625
+
626
+ def stop_log_fd_interception(self, unused_only: bool = False):
627
+ """
628
+ Stop the file descriptor monitoring threads.
629
+ """
630
+ interceptors = self.__dict__.get('_interceptors', [])
631
+ interceptor_threads = self.__dict__.get('_interceptor_threads', [])
632
+
633
+ end_ix = len(interceptors) if not unused_only else -2
634
+
635
+ for interceptor in interceptors[:end_ix]:
636
+ interceptor.stop_interception()
637
+ del interceptors[:end_ix]
638
+
639
+ for thread in interceptor_threads[:end_ix]:
640
+ try:
641
+ thread.join()
642
+ except Exception as e:
643
+ warn(f"Failed to join interceptor threads:\n{traceback.format_exc()}")
644
+ del interceptor_threads[:end_ix]
539
645
 
540
646
 
541
647
  def __repr__(self) -> str:
@@ -12,6 +12,7 @@ from meerschaum.utils.typing import SuccessTuple, List, Optional, Callable, Any,
12
12
  from meerschaum.config._paths import DAEMON_RESOURCES_PATH
13
13
  from meerschaum.utils.daemon.Daemon import Daemon
14
14
  from meerschaum.utils.daemon.RotatingFile import RotatingFile
15
+ from meerschaum.utils.daemon.FileDescriptorInterceptor import FileDescriptorInterceptor
15
16
 
16
17
 
17
18
  def daemon_entry(sysargs: Optional[List[str]] = None) -> SuccessTuple:
@@ -35,6 +35,7 @@ _locks = {
35
35
  }
36
36
  _checked_for_updates = set()
37
37
  _is_installed_first_check: Dict[str, bool] = {}
38
+ _MRSM_PACKAGE_ARCHIVES_PREFIX: str = "https://meerschaum.io/files/archives/"
38
39
 
39
40
  def get_module_path(
40
41
  import_name: str,
@@ -640,9 +641,15 @@ def need_update(
640
641
 
641
642
  ### We might be depending on a prerelease.
642
643
  ### Sanity check that the required version is not greater than the installed version.
644
+ required_version = (
645
+ required_version.replace(_MRSM_PACKAGE_ARCHIVES_PREFIX, '')
646
+ .replace(' @ ', '').replace('wheels', '').replace('+mrsm', '').replace('/-', '')
647
+ .replace('-py3-none-any.whl', '')
648
+ )
649
+
643
650
  if 'a' in required_version:
644
- required_version = required_version.replace('a', '-dev')
645
- version = version.replace('a', '-dev')
651
+ required_version = required_version.replace('a', '-dev').replace('+mrsm', '')
652
+ version = version.replace('a', '-dev').replace('+mrsm', '')
646
653
  try:
647
654
  return (
648
655
  (not semver.Version.parse(version).match(required_version))
@@ -49,10 +49,10 @@ packages: Dict[str, Dict[str, str]] = {
49
49
  'daemon' : 'python-daemon>=0.2.3',
50
50
  'fasteners' : 'fasteners>=0.18.0',
51
51
  'psutil' : 'psutil>=5.8.0',
52
- 'watchgod' : 'watchgod>=0.7.0',
52
+ 'watchfiles' : 'watchfiles>=0.21.0',
53
53
  'dill' : 'dill>=0.3.3',
54
54
  'virtualenv' : 'virtualenv>=20.1.0',
55
- 'apscheduler' : 'apscheduler>=4.0.0a4',
55
+ 'apscheduler' : 'APScheduler>=4.0.0a5',
56
56
  },
57
57
  'drivers': {
58
58
  'cryptography' : 'cryptography>=38.0.1',
@@ -89,6 +89,7 @@ packages: Dict[str, Dict[str, str]] = {
89
89
  'pytest' : 'pytest>=6.2.2',
90
90
  'pytest_xdist' : 'pytest-xdist>=3.2.1',
91
91
  'heartrate' : 'heartrate>=0.2.1',
92
+ 'build' : 'build>=1.2.1',
92
93
  },
93
94
  'setup': {
94
95
  },
@@ -149,7 +150,6 @@ packages['api'] = {
149
150
  'passlib' : 'passlib>=1.7.4',
150
151
  'fastapi_login' : 'fastapi-login>=1.7.2',
151
152
  'multipart' : 'python-multipart>=0.0.5',
152
- # 'pydantic' : 'pydantic>2.0.0',
153
153
  'httpx' : 'httpx>=0.24.1',
154
154
  'websockets' : 'websockets>=11.0.3',
155
155
  }
@@ -12,7 +12,8 @@ from datetime import datetime, timezone, timedelta, timedelta
12
12
  import meerschaum as mrsm
13
13
  from meerschaum.utils.typing import Callable, Any, Optional, List, Dict
14
14
 
15
- INTERVAL_UNITS: List[str] = ['months', 'weeks', 'days', 'hours', 'minutes', 'seconds']
15
+ STARTING_KEYWORD: str = 'starting'
16
+ INTERVAL_UNITS: List[str] = ['months', 'weeks', 'days', 'hours', 'minutes', 'seconds', 'years']
16
17
  FREQUENCY_ALIASES: Dict[str, str] = {
17
18
  'daily': 'every 1 day',
18
19
  'hourly': 'every 1 hour',
@@ -20,6 +21,7 @@ FREQUENCY_ALIASES: Dict[str, str] = {
20
21
  'weekly': 'every 1 week',
21
22
  'monthly': 'every 1 month',
22
23
  'secondly': 'every 1 second',
24
+ 'yearly': 'every 1 year',
23
25
  }
24
26
  LOGIC_ALIASES: Dict[str, str] = {
25
27
  'and': '&',
@@ -27,7 +29,7 @@ LOGIC_ALIASES: Dict[str, str] = {
27
29
  ' through ': '-',
28
30
  ' thru ': '-',
29
31
  ' - ': '-',
30
- 'beginning': 'starting',
32
+ 'beginning': STARTING_KEYWORD,
31
33
  }
32
34
  CRON_DAYS_OF_WEEK: List[str] = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
33
35
  CRON_DAYS_OF_WEEK_ALIASES: Dict[str, str] = {
@@ -65,8 +67,8 @@ SCHEDULE_ALIASES: Dict[str, str] = {
65
67
  **CRON_DAYS_OF_WEEK_ALIASES,
66
68
  **CRON_MONTHS_ALIASES,
67
69
  }
68
- STARTING_KEYWORD: str = 'starting'
69
70
 
71
+ _scheduler = None
70
72
  def schedule_function(
71
73
  function: Callable[[Any], Any],
72
74
  schedule: str,
@@ -87,23 +89,35 @@ def schedule_function(
87
89
  The frequency schedule at which `function` should be executed (e.g. `'daily'`).
88
90
 
89
91
  """
90
- import warnings
92
+ import asyncio
91
93
  from meerschaum.utils.warnings import warn
92
94
  from meerschaum.utils.misc import filter_keywords, round_time
95
+ global _scheduler
93
96
  kw['debug'] = debug
94
97
  kw = filter_keywords(function, **kw)
95
98
 
96
99
  apscheduler = mrsm.attempt_import('apscheduler', lazy=False)
97
100
  now = round_time(datetime.now(timezone.utc), timedelta(minutes=1))
98
101
  trigger = parse_schedule(schedule, now=now)
102
+ _scheduler = apscheduler.AsyncScheduler()
103
+ try:
104
+ loop = asyncio.get_running_loop()
105
+ except RuntimeError:
106
+ loop = asyncio.new_event_loop()
107
+
108
+ async def run_scheduler():
109
+ async with _scheduler:
110
+ job = await _scheduler.add_schedule(function, trigger, args=args, kwargs=kw)
111
+ try:
112
+ await _scheduler.run_until_stopped()
113
+ except (KeyboardInterrupt, SystemExit) as e:
114
+ await _stop_scheduler()
115
+ raise e
99
116
 
100
- with apscheduler.Scheduler() as scheduler:
101
- job = scheduler.add_schedule(function, trigger, args=args, kwargs=kw)
102
- try:
103
- scheduler.run_until_stopped()
104
- except KeyboardInterrupt as e:
105
- scheduler.stop()
106
- scheduler.wait_until_stopped()
117
+ try:
118
+ loop.run_until_complete(run_scheduler())
119
+ except (KeyboardInterrupt, SystemExit) as e:
120
+ loop.run_until_complete(_stop_scheduler())
107
121
 
108
122
 
109
123
  def parse_schedule(schedule: str, now: Optional[datetime] = None):
@@ -134,7 +148,7 @@ def parse_schedule(schedule: str, now: Optional[datetime] = None):
134
148
 
135
149
  ### TODO Allow for combining `and` + `or` logic.
136
150
  if '&' in schedule and '|' in schedule:
137
- error(f"Cannot accept both 'and' + 'or' logic in the schedule frequency.", ValueError)
151
+ raise ValueError(f"Cannot accept both 'and' + 'or' logic in the schedule frequency.")
138
152
 
139
153
  join_str = '|' if '|' in schedule else '&'
140
154
  join_trigger = (
@@ -152,12 +166,6 @@ def parse_schedule(schedule: str, now: Optional[datetime] = None):
152
166
 
153
167
  has_seconds = 'second' in schedule
154
168
  has_minutes = 'minute' in schedule
155
- has_days = 'day' in schedule
156
- has_weeks = 'week' in schedule
157
- has_hours = 'hour' in schedule
158
- num_hourly_intervals = schedule.count('hour')
159
- divided_days = False
160
- divided_hours = False
161
169
 
162
170
  for schedule_part in schedule_parts:
163
171
 
@@ -168,10 +176,9 @@ def parse_schedule(schedule: str, now: Optional[datetime] = None):
168
176
  )
169
177
  schedule_unit = schedule_unit.rstrip('s') + 's'
170
178
  if schedule_unit not in INTERVAL_UNITS:
171
- error(
179
+ raise ValueError(
172
180
  f"Invalid interval '{schedule_unit}'.\n"
173
- + f" Accepted values are {items_str(INTERVAL_UNITS)}.",
174
- ValueError,
181
+ + f" Accepted values are {items_str(INTERVAL_UNITS)}."
175
182
  )
176
183
 
177
184
  schedule_num = (
@@ -180,29 +187,6 @@ def parse_schedule(schedule: str, now: Optional[datetime] = None):
180
187
  else float(schedule_num_str)
181
188
  )
182
189
 
183
- ### NOTE: When combining days or weeks with other schedules,
184
- ### we must divide one of the day-schedules by 2.
185
- ### TODO Remove this when APScheduler is patched.
186
- if (
187
- join_str == '&'
188
- and (has_days or has_weeks)
189
- and len(schedule_parts) > 1
190
- and not divided_days
191
- ):
192
- schedule_num /= 2
193
- divided_days = True
194
-
195
- ### NOTE: When combining multiple hourly intervals,
196
- ### one must be divided by 2.
197
- if (
198
- join_str == '&'
199
- # and num_hourly_intervals > 1
200
- and len(schedule_parts) > 1
201
- and not divided_hours
202
- ):
203
- schedule_num /= 2
204
- # divided_hours = True
205
-
206
190
  trigger = (
207
191
  apscheduler_triggers_interval.IntervalTrigger(
208
192
  **{
@@ -210,12 +194,12 @@ def parse_schedule(schedule: str, now: Optional[datetime] = None):
210
194
  'start_time': starting_ts,
211
195
  }
212
196
  )
213
- if schedule_unit != 'months' else (
197
+ if schedule_unit not in ('months', 'years') else (
214
198
  apscheduler_triggers_calendarinterval.CalendarIntervalTrigger(
215
199
  **{
216
200
  schedule_unit: schedule_num,
217
201
  'start_date': starting_ts,
218
- # 'timezone': starting_ts.tzinfo, TODO Re-enable once APScheduler updates.
202
+ 'timezone': starting_ts.tzinfo,
219
203
  }
220
204
  )
221
205
  )
@@ -223,12 +207,15 @@ def parse_schedule(schedule: str, now: Optional[datetime] = None):
223
207
 
224
208
  ### Determine whether this is a pure cron string or a cron subset (e.g. 'may-aug')_.
225
209
  else:
226
- first_three_prefix = schedule_part[:3]
210
+ first_three_prefix = schedule_part[:3].lower()
211
+ first_four_prefix = schedule_part[:4].lower()
227
212
  cron_kw = {}
228
213
  if first_three_prefix in CRON_DAYS_OF_WEEK:
229
214
  cron_kw['day_of_week'] = schedule_part
230
215
  elif first_three_prefix in CRON_MONTHS:
231
216
  cron_kw['month'] = schedule_part
217
+ elif is_int(first_four_prefix) and len(first_four_prefix) == 4:
218
+ cron_kw['year'] = int(first_four_prefix)
232
219
  trigger = (
233
220
  apscheduler_triggers_cron.CronTrigger(
234
221
  **{
@@ -301,3 +288,10 @@ def parse_start_time(schedule: str, now: Optional[datetime] = None) -> datetime:
301
288
  if not starting_ts.tzinfo:
302
289
  starting_ts = starting_ts.replace(tzinfo=timezone.utc)
303
290
  return starting_ts
291
+
292
+
293
+ async def _stop_scheduler():
294
+ if _scheduler is None:
295
+ return
296
+ await _scheduler.stop()
297
+ await _scheduler.wait_until_stopped()
@@ -10,6 +10,7 @@ from __future__ import annotations
10
10
  from meerschaum.utils.typing import Optional
11
11
 
12
12
  import threading
13
+ import traceback
13
14
  Lock = threading.Lock
14
15
  RLock = threading.RLock
15
16
  Event = threading.Event
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: meerschaum
3
- Version: 2.2.0.dev3
3
+ Version: 2.2.0rc2
4
4
  Summary: Sync Time-Series Pipes with Meerschaum
5
5
  Home-page: https://meerschaum.io
6
6
  Author: Bennett Meares
@@ -53,10 +53,10 @@ Requires-Dist: more-itertools >=8.7.0 ; extra == '_required'
53
53
  Requires-Dist: python-daemon >=0.2.3 ; extra == '_required'
54
54
  Requires-Dist: fasteners >=0.18.0 ; extra == '_required'
55
55
  Requires-Dist: psutil >=5.8.0 ; extra == '_required'
56
- Requires-Dist: watchgod >=0.7.0 ; extra == '_required'
56
+ Requires-Dist: watchfiles >=0.21.0 ; extra == '_required'
57
57
  Requires-Dist: dill >=0.3.3 ; extra == '_required'
58
58
  Requires-Dist: virtualenv >=20.1.0 ; extra == '_required'
59
- Requires-Dist: apscheduler >=4.0.0a4 ; extra == '_required'
59
+ Requires-Dist: APScheduler >=4.0.0a5 ; extra == '_required'
60
60
  Provides-Extra: api
61
61
  Requires-Dist: uvicorn[standard] >=0.22.0 ; extra == 'api'
62
62
  Requires-Dist: gunicorn >=20.1.0 ; extra == 'api'
@@ -102,10 +102,10 @@ Requires-Dist: more-itertools >=8.7.0 ; extra == 'api'
102
102
  Requires-Dist: python-daemon >=0.2.3 ; extra == 'api'
103
103
  Requires-Dist: fasteners >=0.18.0 ; extra == 'api'
104
104
  Requires-Dist: psutil >=5.8.0 ; extra == 'api'
105
- Requires-Dist: watchgod >=0.7.0 ; extra == 'api'
105
+ Requires-Dist: watchfiles >=0.21.0 ; extra == 'api'
106
106
  Requires-Dist: dill >=0.3.3 ; extra == 'api'
107
107
  Requires-Dist: virtualenv >=20.1.0 ; extra == 'api'
108
- Requires-Dist: apscheduler >=4.0.0a4 ; extra == 'api'
108
+ Requires-Dist: APScheduler >=4.0.0a5 ; extra == 'api'
109
109
  Requires-Dist: pprintpp >=0.4.0 ; extra == 'api'
110
110
  Requires-Dist: asciitree >=0.3.3 ; extra == 'api'
111
111
  Requires-Dist: typing-extensions >=4.7.1 ; extra == 'api'
@@ -148,6 +148,7 @@ Requires-Dist: mypy >=0.812.0 ; extra == 'dev-tools'
148
148
  Requires-Dist: pytest >=6.2.2 ; extra == 'dev-tools'
149
149
  Requires-Dist: pytest-xdist >=3.2.1 ; extra == 'dev-tools'
150
150
  Requires-Dist: heartrate >=0.2.1 ; extra == 'dev-tools'
151
+ Requires-Dist: build >=1.2.1 ; extra == 'dev-tools'
151
152
  Provides-Extra: docs
152
153
  Requires-Dist: mkdocs >=1.1.2 ; extra == 'docs'
153
154
  Requires-Dist: mkdocs-material >=6.2.5 ; extra == 'docs'
@@ -208,10 +209,10 @@ Requires-Dist: more-itertools >=8.7.0 ; extra == 'full'
208
209
  Requires-Dist: python-daemon >=0.2.3 ; extra == 'full'
209
210
  Requires-Dist: fasteners >=0.18.0 ; extra == 'full'
210
211
  Requires-Dist: psutil >=5.8.0 ; extra == 'full'
211
- Requires-Dist: watchgod >=0.7.0 ; extra == 'full'
212
+ Requires-Dist: watchfiles >=0.21.0 ; extra == 'full'
212
213
  Requires-Dist: dill >=0.3.3 ; extra == 'full'
213
214
  Requires-Dist: virtualenv >=20.1.0 ; extra == 'full'
214
- Requires-Dist: apscheduler >=4.0.0a4 ; extra == 'full'
215
+ Requires-Dist: APScheduler >=4.0.0a5 ; extra == 'full'
215
216
  Requires-Dist: cryptography >=38.0.1 ; extra == 'full'
216
217
  Requires-Dist: psycopg[binary] >=3.1.18 ; extra == 'full'
217
218
  Requires-Dist: PyMySQL >=0.9.0 ; extra == 'full'
@@ -292,10 +293,10 @@ Requires-Dist: more-itertools >=8.7.0 ; extra == 'sql'
292
293
  Requires-Dist: python-daemon >=0.2.3 ; extra == 'sql'
293
294
  Requires-Dist: fasteners >=0.18.0 ; extra == 'sql'
294
295
  Requires-Dist: psutil >=5.8.0 ; extra == 'sql'
295
- Requires-Dist: watchgod >=0.7.0 ; extra == 'sql'
296
+ Requires-Dist: watchfiles >=0.21.0 ; extra == 'sql'
296
297
  Requires-Dist: dill >=0.3.3 ; extra == 'sql'
297
298
  Requires-Dist: virtualenv >=20.1.0 ; extra == 'sql'
298
- Requires-Dist: apscheduler >=4.0.0a4 ; extra == 'sql'
299
+ Requires-Dist: APScheduler >=4.0.0a5 ; extra == 'sql'
299
300
  Provides-Extra: stack
300
301
  Requires-Dist: docker-compose >=1.29.2 ; extra == 'stack'
301
302