meerschaum 2.2.0rc1__py3-none-any.whl → 2.2.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. meerschaum/__main__.py +1 -1
  2. meerschaum/actions/show.py +68 -43
  3. meerschaum/api/__init__.py +16 -11
  4. meerschaum/api/dash/callbacks/dashboard.py +2 -7
  5. meerschaum/api/dash/pipes.py +33 -9
  6. meerschaum/api/dash/plugins.py +25 -9
  7. meerschaum/api/resources/templates/termpage.html +3 -0
  8. meerschaum/api/routes/_login.py +5 -4
  9. meerschaum/api/routes/_plugins.py +6 -3
  10. meerschaum/config/_dash.py +11 -0
  11. meerschaum/config/_default.py +3 -1
  12. meerschaum/config/_jobs.py +10 -4
  13. meerschaum/config/_paths.py +2 -0
  14. meerschaum/config/_sync.py +2 -3
  15. meerschaum/config/_version.py +1 -1
  16. meerschaum/config/stack/__init__.py +6 -6
  17. meerschaum/config/stack/grafana/__init__.py +1 -1
  18. meerschaum/config/static/__init__.py +4 -1
  19. meerschaum/connectors/__init__.py +2 -0
  20. meerschaum/connectors/sql/SQLConnector.py +4 -2
  21. meerschaum/connectors/sql/_create_engine.py +4 -4
  22. meerschaum/connectors/sql/_instance.py +3 -1
  23. meerschaum/connectors/sql/_pipes.py +53 -38
  24. meerschaum/connectors/sql/_plugins.py +0 -2
  25. meerschaum/connectors/sql/_sql.py +7 -9
  26. meerschaum/core/User/_User.py +158 -16
  27. meerschaum/core/User/__init__.py +1 -1
  28. meerschaum/plugins/_Plugin.py +1 -1
  29. meerschaum/plugins/__init__.py +23 -1
  30. meerschaum/utils/daemon/Daemon.py +38 -12
  31. meerschaum/utils/daemon/FileDescriptorInterceptor.py +90 -10
  32. meerschaum/utils/daemon/RotatingFile.py +82 -59
  33. meerschaum/utils/daemon/__init__.py +3 -0
  34. meerschaum/utils/packages/__init__.py +10 -4
  35. meerschaum/utils/packages/_packages.py +7 -8
  36. meerschaum/utils/process.py +13 -10
  37. meerschaum/utils/schedule.py +15 -1
  38. meerschaum/utils/threading.py +1 -0
  39. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/METADATA +19 -21
  40. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/RECORD +46 -45
  41. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/LICENSE +0 -0
  42. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/NOTICE +0 -0
  43. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/WHEEL +0 -0
  44. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/entry_points.txt +0 -0
  45. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/top_level.txt +0 -0
  46. {meerschaum-2.2.0rc1.dist-info → meerschaum-2.2.0rc3.dist-info}/zip-safe +0 -0
@@ -128,8 +128,10 @@ class SQLConnector(Connector):
128
128
  """
129
129
  if 'uri' in kw:
130
130
  uri = kw['uri']
131
- if uri.startswith('postgres://'):
132
- uri = uri.replace('postgres://', 'postgresql://', 1)
131
+ if uri.startswith('postgres') and not uri.startswith('postgresql'):
132
+ uri = uri.replace('postgres', 'postgresql', 1)
133
+ if uri.startswith('postgresql') and not uri.startswith('postgresql+'):
134
+ uri = uri.replace('postgresql://', 'postgresql+psycopg', 1)
133
135
  if uri.startswith('timescaledb://'):
134
136
  uri = uri.replace('timescaledb://', 'postgresql://', 1)
135
137
  flavor = 'timescaledb'
@@ -28,7 +28,7 @@ default_create_engine_args = {
28
28
  }
29
29
  flavor_configs = {
30
30
  'timescaledb' : {
31
- 'engine' : 'postgresql',
31
+ 'engine' : 'postgresql+psycopg',
32
32
  'create_engine' : default_create_engine_args,
33
33
  'omit_create_engine': {'method',},
34
34
  'to_sql' : {},
@@ -38,7 +38,7 @@ flavor_configs = {
38
38
  },
39
39
  },
40
40
  'postgresql' : {
41
- 'engine' : 'postgresql',
41
+ 'engine' : 'postgresql+psycopg',
42
42
  'create_engine' : default_create_engine_args,
43
43
  'omit_create_engine': {'method',},
44
44
  'to_sql' : {},
@@ -48,7 +48,7 @@ flavor_configs = {
48
48
  },
49
49
  },
50
50
  'citus' : {
51
- 'engine' : 'postgresql',
51
+ 'engine' : 'postgresql+psycopg',
52
52
  'create_engine' : default_create_engine_args,
53
53
  'omit_create_engine': {'method',},
54
54
  'to_sql' : {},
@@ -242,7 +242,7 @@ def create_engine(
242
242
 
243
243
  ### Sometimes the timescaledb:// flavor can slip in.
244
244
  if _uri and self.flavor in ('timescaledb',) and self.flavor in _uri:
245
- engine_str = engine_str.replace(f'{self.flavor}://', 'postgresql://')
245
+ engine_str = engine_str.replace(f'{self.flavor}', 'postgresql', 1)
246
246
 
247
247
  if debug:
248
248
  dprint(
@@ -155,7 +155,9 @@ def _drop_old_temporary_tables(
155
155
  temp_tables_table = get_tables(mrsm_instance=self, create=False, debug=debug)['temp_tables']
156
156
  last_check = getattr(self, '_stale_temporary_tables_check_timestamp', 0)
157
157
  now_ts = time.perf_counter()
158
- if refresh or not last_check or (now_ts - last_check) > 60:
158
+ if not last_check:
159
+ self._stale_temporary_tables_check_timestamp = 0
160
+ if refresh or (now_ts - last_check) < 60:
159
161
  self._stale_temporary_tables_check_timestamp = now_ts
160
162
  return self._drop_temporary_tables(debug=debug)
161
163
 
@@ -752,7 +752,7 @@ def get_pipe_data(
752
752
  debug = debug,
753
753
  **kw
754
754
  )
755
-
755
+
756
756
  if is_dask:
757
757
  index_col = pipe.columns.get('datetime', None)
758
758
  kw['index_col'] = index_col
@@ -1478,43 +1478,11 @@ def sync_pipe_inplace(
1478
1478
  from meerschaum.utils.misc import generate_password
1479
1479
  from meerschaum.utils.debug import dprint
1480
1480
 
1481
- sqlalchemy, sqlalchemy_orm = mrsm.attempt_import('sqlalchemy', 'sqlalchemy.orm')
1482
- metadef = self.get_pipe_metadef(
1483
- pipe,
1484
- params = params,
1485
- begin = begin,
1486
- end = end,
1487
- check_existing = check_existing,
1488
- debug = debug,
1489
- )
1490
- pipe_name = sql_item_name(pipe.target, self.flavor, self.get_pipe_schema(pipe))
1491
- upsert = pipe.parameters.get('upsert', False) and f'{self.flavor}-upsert' in update_queries
1492
- internal_schema = self.internal_schema
1493
- database = getattr(self, 'database', self.parse_uri(self.URI).get('database', None))
1494
-
1495
- if not pipe.exists(debug=debug):
1496
- create_pipe_query = get_create_table_query(
1497
- metadef,
1498
- pipe.target,
1499
- self.flavor,
1500
- schema = self.get_pipe_schema(pipe),
1501
- )
1502
- result = self.exec(create_pipe_query, debug=debug)
1503
- if result is None:
1504
- return False, f"Could not insert new data into {pipe} from its SQL query definition."
1505
- if not self.create_indices(pipe, debug=debug):
1506
- warn(f"Failed to create indices for {pipe}. Continuing...")
1507
-
1508
- rowcount = pipe.get_rowcount(debug=debug)
1509
- return True, f"Inserted {rowcount}, updated 0 rows."
1510
-
1511
- session = sqlalchemy_orm.Session(self.engine)
1512
- connectable = session if self.flavor != 'duckdb' else self
1513
-
1514
1481
  transact_id = generate_password(3)
1515
1482
  def get_temp_table_name(label: str) -> str:
1516
1483
  return '-' + transact_id + '_' + label + '_' + pipe.target
1517
1484
 
1485
+ internal_schema = self.internal_schema
1518
1486
  temp_table_roots = ['backtrack', 'new', 'delta', 'joined', 'unseen', 'update']
1519
1487
  temp_tables = {
1520
1488
  table_root: get_temp_table_name(table_root)
@@ -1528,6 +1496,17 @@ def sync_pipe_inplace(
1528
1496
  )
1529
1497
  for table_root, table_name_raw in temp_tables.items()
1530
1498
  }
1499
+ metadef = self.get_pipe_metadef(
1500
+ pipe,
1501
+ params = params,
1502
+ begin = begin,
1503
+ end = end,
1504
+ check_existing = check_existing,
1505
+ debug = debug,
1506
+ )
1507
+ pipe_name = sql_item_name(pipe.target, self.flavor, self.get_pipe_schema(pipe))
1508
+ upsert = pipe.parameters.get('upsert', False) and f'{self.flavor}-upsert' in update_queries
1509
+ database = getattr(self, 'database', self.parse_uri(self.URI).get('database', None))
1531
1510
 
1532
1511
  def clean_up_temp_tables(ready_to_drop: bool = False):
1533
1512
  log_success, log_msg = self._log_temporary_tables_creation(
@@ -1541,6 +1520,36 @@ def sync_pipe_inplace(
1541
1520
  )
1542
1521
  if not log_success:
1543
1522
  warn(log_msg)
1523
+ drop_stale_success, drop_stale_msg = self._drop_old_temporary_tables(
1524
+ refresh = False,
1525
+ debug = debug,
1526
+ )
1527
+ if not drop_stale_success:
1528
+ warn(drop_stale_msg)
1529
+ return drop_stale_success, drop_stale_msg
1530
+
1531
+ sqlalchemy, sqlalchemy_orm = mrsm.attempt_import('sqlalchemy', 'sqlalchemy.orm')
1532
+ if not pipe.exists(debug=debug):
1533
+ create_pipe_query = get_create_table_query(
1534
+ metadef,
1535
+ pipe.target,
1536
+ self.flavor,
1537
+ schema = self.get_pipe_schema(pipe),
1538
+ )
1539
+ result = self.exec(create_pipe_query, debug=debug)
1540
+ if result is None:
1541
+ _ = clean_up_temp_tables()
1542
+ return False, f"Could not insert new data into {pipe} from its SQL query definition."
1543
+
1544
+ if not self.create_indices(pipe, debug=debug):
1545
+ warn(f"Failed to create indices for {pipe}. Continuing...")
1546
+
1547
+ rowcount = pipe.get_rowcount(debug=debug)
1548
+ _ = clean_up_temp_tables()
1549
+ return True, f"Inserted {rowcount}, updated 0 rows."
1550
+
1551
+ session = sqlalchemy_orm.Session(self.engine)
1552
+ connectable = session if self.flavor != 'duckdb' else self
1544
1553
 
1545
1554
  create_new_query = get_create_table_query(
1546
1555
  metadef,
@@ -1908,10 +1917,6 @@ def sync_pipe_inplace(
1908
1917
  )
1909
1918
  _ = clean_up_temp_tables(ready_to_drop=True)
1910
1919
 
1911
- drop_stale_success, drop_stale_msg = self._drop_old_temporary_tables(refresh=False, debug=debug)
1912
- if not drop_stale_success:
1913
- warn(drop_stale_msg)
1914
-
1915
1920
  return True, msg
1916
1921
 
1917
1922
 
@@ -2372,6 +2377,16 @@ def get_pipe_columns_types(
2372
2377
  """
2373
2378
  if not pipe.exists(debug=debug):
2374
2379
  return {}
2380
+
2381
+ if self.flavor == 'duckdb':
2382
+ from meerschaum.utils.sql import get_table_cols_types
2383
+ return get_table_cols_types(
2384
+ pipe.target,
2385
+ self,
2386
+ flavor = self.flavor,
2387
+ schema = self.schema,
2388
+ )
2389
+
2375
2390
  table_columns = {}
2376
2391
  try:
2377
2392
  pipe_table = self.get_pipe_table(pipe, debug=debug)
@@ -108,9 +108,7 @@ def get_plugin_version(
108
108
  plugins_tbl = get_tables(mrsm_instance=self, debug=debug)['plugins']
109
109
  from meerschaum.utils.packages import attempt_import
110
110
  sqlalchemy = attempt_import('sqlalchemy')
111
-
112
111
  query = sqlalchemy.select(plugins_tbl.c.version).where(plugins_tbl.c.plugin_name == plugin.name)
113
-
114
112
  return self.value(query, debug=debug)
115
113
 
116
114
  def get_plugin_user_id(
@@ -943,17 +943,15 @@ def psql_insert_copy(
943
943
  ) for row in data_iter
944
944
  )
945
945
 
946
+ table_name = sql_item_name(table.name, 'postgresql', table.schema)
947
+ columns = ', '.join(f'"{k}"' for k in keys)
948
+ sql = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV NULL '\\N'"
949
+
946
950
  dbapi_conn = conn.connection
947
951
  with dbapi_conn.cursor() as cur:
948
- s_buf = StringIO()
949
- writer = csv.writer(s_buf)
950
- writer.writerows(data_iter)
951
- s_buf.seek(0)
952
-
953
- columns = ', '.join(f'"{k}"' for k in keys)
954
- table_name = sql_item_name(table.name, 'postgresql', table.schema)
955
- sql = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV NULL '\\N'"
956
- cur.copy_expert(sql=sql, file=s_buf)
952
+ with cur.copy(sql) as copy:
953
+ writer = csv.writer(copy)
954
+ writer.writerows(data_iter)
957
955
 
958
956
 
959
957
  def format_sql_query_for_dask(query: str) -> 'sqlalchemy.sql.selectable.Select':
@@ -7,22 +7,157 @@ User class definition
7
7
  """
8
8
 
9
9
  from __future__ import annotations
10
- from meerschaum.utils.typing import Optional, Dict, Any
11
-
12
- pwd_context = None
13
- def get_pwd_context():
14
- global pwd_context
15
- if pwd_context is None:
16
- from meerschaum.config.static import STATIC_CONFIG
17
- from meerschaum.utils.packages import attempt_import
18
- hash_config = STATIC_CONFIG['users']['password_hash']
19
- passlib_context = attempt_import('passlib.context')
20
- pwd_context = passlib_context.CryptContext(
21
- schemes = hash_config['schemes'],
22
- default = hash_config['default'],
23
- pbkdf2_sha256__default_rounds = hash_config['pbkdf2_sha256__default_rounds']
10
+ import os
11
+ import hashlib
12
+ import hmac
13
+ from binascii import b2a_base64, a2b_base64, Error as _BinAsciiError
14
+ from meerschaum.utils.typing import Optional, Dict, Any, Tuple
15
+ from meerschaum.config.static import STATIC_CONFIG
16
+ from meerschaum.utils.warnings import warn
17
+
18
+
19
+ __all__ = ('hash_password', 'verify_password', 'User')
20
+
21
+ def hash_password(
22
+ password: str,
23
+ salt: Optional[bytes] = None,
24
+ rounds: Optional[int] = None,
25
+ ) -> str:
26
+ """
27
+ Return an encoded hash string from the given password.
28
+
29
+ Parameters
30
+ ----------
31
+ password: str
32
+ The password to be hashed.
33
+
34
+ salt: Optional[str], default None
35
+ If provided, use these bytes for the salt in the hash.
36
+ Otherwise defaults to 16 random bytes.
37
+
38
+ rounds: Optional[int], default None
39
+ If provided, use this number of rounds to generate the hash.
40
+ Defaults to 3,000,000.
41
+
42
+ Returns
43
+ -------
44
+ An encoded hash string to be stored in a database.
45
+ See the `passlib` documentation on the string format:
46
+ https://passlib.readthedocs.io/en/stable/lib/passlib.hash.pbkdf2_digest.html#format-algorithm
47
+ """
48
+ hash_config = STATIC_CONFIG['users']['password_hash']
49
+ if password is None:
50
+ password = ''
51
+ if salt is None:
52
+ salt = os.urandom(hash_config['salt_bytes'])
53
+ if rounds is None:
54
+ rounds = hash_config['pbkdf2_sha256__default_rounds']
55
+
56
+ pw_hash = hashlib.pbkdf2_hmac(
57
+ hash_config['algorithm_name'],
58
+ password.encode('utf-8'),
59
+ salt,
60
+ rounds,
61
+ )
62
+ return (
63
+ f"$pbkdf2-{hash_config['algorithm_name']}"
64
+ + f"${hash_config['pbkdf2_sha256__default_rounds']}"
65
+ + '$' + ab64_encode(salt).decode('utf-8')
66
+ + '$' + ab64_encode(pw_hash).decode('utf-8')
67
+ )
68
+
69
+
70
+ def verify_password(
71
+ password: str,
72
+ password_hash: str,
73
+ ) -> bool:
74
+ """
75
+ Return `True` if the password matches the provided hash.
76
+
77
+ Parameters
78
+ ----------
79
+ password: str
80
+ The password to be checked.
81
+
82
+ password_hash: str
83
+ The encoded hash string as generated from `hash_password()`.
84
+
85
+ Returns
86
+ -------
87
+ A `bool` indicating whether `password` matches `password_hash`.
88
+ """
89
+ if password is None or password_hash is None:
90
+ return False
91
+ hash_config = STATIC_CONFIG['users']['password_hash']
92
+ try:
93
+ digest, rounds_str, encoded_salt, encoded_checksum = password_hash.split('$')[1:]
94
+ algorithm_name = digest.split('-')[-1]
95
+ salt = ab64_decode(encoded_salt)
96
+ checksum = ab64_decode(encoded_checksum)
97
+ rounds = int(rounds_str)
98
+ except Exception as e:
99
+ warn(f"Failed to extract context from password hash '{password_hash}'. Is it corrupted?")
100
+ return False
101
+
102
+ return hmac.compare_digest(
103
+ checksum,
104
+ hashlib.pbkdf2_hmac(
105
+ algorithm_name,
106
+ password.encode('utf-8'),
107
+ salt,
108
+ rounds,
24
109
  )
25
- return pwd_context
110
+ )
111
+
112
+ _BASE64_STRIP = b"=\n"
113
+ _BASE64_PAD1 = b"="
114
+ _BASE64_PAD2 = b"=="
115
+
116
+ def ab64_encode(data):
117
+ return b64s_encode(data).replace(b"+", b".")
118
+
119
+ def ab64_decode(data):
120
+ """
121
+ decode from shortened base64 format which omits padding & whitespace.
122
+ uses custom ``./`` altchars, but supports decoding normal ``+/`` altchars as well.
123
+ """
124
+ if isinstance(data, str):
125
+ # needs bytes for replace() call, but want to accept ascii-unicode ala a2b_base64()
126
+ try:
127
+ data = data.encode("ascii")
128
+ except UnicodeEncodeError:
129
+ raise ValueError("string argument should contain only ASCII characters")
130
+ return b64s_decode(data.replace(b".", b"+"))
131
+
132
+
133
+ def b64s_encode(data):
134
+ return b2a_base64(data).rstrip(_BASE64_STRIP)
135
+
136
+ def b64s_decode(data):
137
+ """
138
+ decode from shortened base64 format which omits padding & whitespace.
139
+ uses default ``+/`` altchars.
140
+ """
141
+ if isinstance(data, str):
142
+ # needs bytes for replace() call, but want to accept ascii-unicode ala a2b_base64()
143
+ try:
144
+ data = data.encode("ascii")
145
+ except UnicodeEncodeError as ue:
146
+ raise ValueError("string argument should contain only ASCII characters") from ue
147
+ off = len(data) & 3
148
+ if off == 0:
149
+ pass
150
+ elif off == 2:
151
+ data += _BASE64_PAD2
152
+ elif off == 3:
153
+ data += _BASE64_PAD1
154
+ else: # off == 1
155
+ raise ValueError("Invalid base64 input")
156
+ try:
157
+ return a2b_base64(data)
158
+ except _BinAsciiError as err:
159
+ raise TypeError(err) from err
160
+
26
161
 
27
162
  class User:
28
163
  """
@@ -42,7 +177,6 @@ class User:
42
177
  if password is None:
43
178
  password = ''
44
179
  self.password = password
45
- self.password_hash = get_pwd_context().hash(password)
46
180
  self.username = username
47
181
  self.email = email
48
182
  self.type = type
@@ -80,3 +214,11 @@ class User:
80
214
  @user_id.setter
81
215
  def user_id(self, user_id):
82
216
  self._user_id = user_id
217
+
218
+ @property
219
+ def password_hash(self):
220
+ _password_hash = self.__dict__.get('_password_hash', None)
221
+ if _password_hash is not None:
222
+ return _password_hash
223
+ self._password_hash = hash_password(self.password)
224
+ return self._password_hash
@@ -6,4 +6,4 @@
6
6
  Manager users' metadata via the User class
7
7
  """
8
8
 
9
- from meerschaum.core.User._User import User
9
+ from meerschaum.core.User._User import User, hash_password, verify_password
@@ -209,7 +209,7 @@ class Plugin:
209
209
  def parse_gitignore() -> 'Set[str]':
210
210
  gitignore_path = pathlib.Path(path) / '.gitignore'
211
211
  if not gitignore_path.exists():
212
- return set()
212
+ return set(default_patterns_to_ignore)
213
213
  with open(gitignore_path, 'r', encoding='utf-8') as f:
214
214
  gitignore_text = f.read()
215
215
  return set(pathspec.PathSpec.from_lines(
@@ -247,6 +247,26 @@ def sync_plugins_symlinks(debug: bool = False, warn: bool = True) -> None:
247
247
  _warn(f"Unable to create lockfile {PLUGINS_INTERNAL_LOCK_PATH}:\n{e}")
248
248
 
249
249
  with _locks['internal_plugins']:
250
+
251
+ try:
252
+ from importlib.metadata import entry_points
253
+ except ImportError:
254
+ importlib_metadata = attempt_import('importlib_metadata', lazy=False)
255
+ entry_points = importlib_metadata.entry_points
256
+
257
+ ### NOTE: Allow plugins to be installed via `pip`.
258
+ packaged_plugin_paths = []
259
+ discovered_packaged_plugins_eps = entry_points(group='meerschaum.plugins')
260
+ for ep in discovered_packaged_plugins_eps:
261
+ module_name = ep.name
262
+ for package_file_path in ep.dist.files:
263
+ if package_file_path.suffix != '.py':
264
+ continue
265
+ if str(package_file_path) == f'{module_name}.py':
266
+ packaged_plugin_paths.append(package_file_path.locate())
267
+ elif str(package_file_path) == f'{module_name}/__init__.py':
268
+ packaged_plugin_paths.append(package_file_path.locate().parent)
269
+
250
270
  if is_symlink(PLUGINS_RESOURCES_PATH) or not PLUGINS_RESOURCES_PATH.exists():
251
271
  try:
252
272
  PLUGINS_RESOURCES_PATH.unlink()
@@ -255,7 +275,6 @@ def sync_plugins_symlinks(debug: bool = False, warn: bool = True) -> None:
255
275
 
256
276
  PLUGINS_RESOURCES_PATH.mkdir(exist_ok=True)
257
277
 
258
-
259
278
  existing_symlinked_paths = [
260
279
  (PLUGINS_RESOURCES_PATH / item)
261
280
  for item in os.listdir(PLUGINS_RESOURCES_PATH)
@@ -275,6 +294,7 @@ def sync_plugins_symlinks(debug: bool = False, warn: bool = True) -> None:
275
294
  for plugins_path in PLUGINS_DIR_PATHS
276
295
  ]
277
296
  ))
297
+ plugins_to_be_symlinked.extend(packaged_plugin_paths)
278
298
 
279
299
  ### Check for duplicates.
280
300
  seen_plugins = defaultdict(lambda: 0)
@@ -538,6 +558,8 @@ def get_plugins(*to_load, try_import: bool = True) -> Union[Tuple[Plugin], Plugi
538
558
  ]
539
559
  plugins = tuple(plugin for plugin in _plugins if plugin.is_installed(try_import=try_import))
540
560
  if len(to_load) == 1:
561
+ if len(plugins) == 0:
562
+ raise ValueError(f"Plugin '{to_load[0]}' is not installed.")
541
563
  return plugins[0]
542
564
  return plugins
543
565
 
@@ -15,9 +15,11 @@ import signal
15
15
  import sys
16
16
  import time
17
17
  import traceback
18
+ from functools import partial
18
19
  from datetime import datetime, timezone
19
20
  from meerschaum.utils.typing import Optional, Dict, Any, SuccessTuple, Callable, List, Union
20
21
  from meerschaum.config import get_config
22
+ from meerschaum.config.static import STATIC_CONFIG
21
23
  from meerschaum.config._paths import DAEMON_RESOURCES_PATH, LOGS_RESOURCES_PATH
22
24
  from meerschaum.config._patch import apply_patch_to_config
23
25
  from meerschaum.utils.warnings import warn, error
@@ -139,11 +141,12 @@ class Daemon:
139
141
  Nothing — this will exit the parent process.
140
142
  """
141
143
  import platform, sys, os, traceback
142
- from meerschaum.config._paths import LOGS_RESOURCES_PATH
144
+ from meerschaum.config._paths import DAEMON_ERROR_LOG_PATH
143
145
  from meerschaum.utils.warnings import warn
144
- daemons_error_log_path = LOGS_RESOURCES_PATH / 'daemons_error.log'
145
-
146
+ from meerschaum.config import get_config
146
147
  daemon = attempt_import('daemon')
148
+ lines = get_config('jobs', 'terminal', 'lines')
149
+ columns = get_config('jobs','terminal', 'columns')
147
150
 
148
151
  if platform.system() == 'Windows':
149
152
  return False, "Windows is no longer supported."
@@ -164,10 +167,16 @@ class Daemon:
164
167
  )
165
168
 
166
169
  log_refresh_seconds = get_config('jobs', 'logs', 'refresh_files_seconds')
167
- self._log_refresh_timer = RepeatTimer(log_refresh_seconds, self.rotating_log.refresh_files)
170
+ self._log_refresh_timer = RepeatTimer(
171
+ log_refresh_seconds,
172
+ partial(self.rotating_log.refresh_files, start_interception=True),
173
+ )
168
174
 
169
175
  try:
176
+ os.environ['LINES'], os.environ['COLUMNS'] = str(int(lines)), str(int(columns))
170
177
  with self._daemon_context:
178
+ os.environ[STATIC_CONFIG['environment']['daemon_id']] = self.daemon_id
179
+ self.rotating_log.refresh_files(start_interception=True)
171
180
  try:
172
181
  with open(self.pid_path, 'w+', encoding='utf-8') as f:
173
182
  f.write(str(os.getpid()))
@@ -192,11 +201,11 @@ class Daemon:
192
201
  return result
193
202
  except Exception as e:
194
203
  daemon_error = traceback.format_exc()
195
- with open(LOGS_RESOURCES_PATH, 'a+', encoding='utf-8') as f:
204
+ with open(DAEMON_ERROR_LOG_PATH, 'a+', encoding='utf-8') as f:
196
205
  f.write(daemon_error)
197
206
 
198
207
  if daemon_error:
199
- warn("Encountered an error while starting the daemon '{self}':\n{daemon_error}")
208
+ warn(f"Encountered an error while starting the daemon '{self}':\n{daemon_error}")
200
209
 
201
210
 
202
211
  def _capture_process_timestamp(
@@ -456,6 +465,9 @@ class Daemon:
456
465
  Handle `SIGINT` within the Daemon context.
457
466
  This method is injected into the `DaemonContext`.
458
467
  """
468
+ # from meerschaum.utils.daemon.FileDescriptorInterceptor import STOP_READING_FD_EVENT
469
+ # STOP_READING_FD_EVENT.set()
470
+ self.rotating_log.stop_log_fd_interception(unused_only=False)
459
471
  timer = self.__dict__.get('_log_refresh_timer', None)
460
472
  if timer is not None:
461
473
  timer.cancel()
@@ -464,10 +476,18 @@ class Daemon:
464
476
  if daemon_context is not None:
465
477
  daemon_context.close()
466
478
 
467
- self.rotating_log.stop_log_fd_interception()
468
-
469
479
  _close_pools()
470
- raise SystemExit(0)
480
+ import threading
481
+ for thread in threading.enumerate():
482
+ if thread.name == 'MainThread':
483
+ continue
484
+ try:
485
+ if thread.is_alive():
486
+ stack = traceback.format_stack(sys._current_frames()[thread.ident])
487
+ thread.join()
488
+ except Exception as e:
489
+ warn(traceback.format_exc())
490
+ raise KeyboardInterrupt()
471
491
 
472
492
 
473
493
  def _handle_sigterm(self, signal_number: int, stack_frame: 'frame') -> None:
@@ -483,10 +503,8 @@ class Daemon:
483
503
  if daemon_context is not None:
484
504
  daemon_context.close()
485
505
 
486
- self.rotating_log.stop_log_fd_interception()
487
-
488
506
  _close_pools()
489
- raise SystemExit(1)
507
+ raise SystemExit(0)
490
508
 
491
509
 
492
510
  def _send_signal(
@@ -667,6 +685,7 @@ class Daemon:
667
685
  self.log_path,
668
686
  redirect_streams = True,
669
687
  write_timestamps = True,
688
+ timestamp_format = get_config('jobs', 'logs', 'timestamp_format'),
670
689
  )
671
690
  return self._rotating_log
672
691
 
@@ -905,6 +924,13 @@ class Daemon:
905
924
  return False, msg
906
925
  if not keep_logs:
907
926
  self.rotating_log.delete()
927
+ try:
928
+ if self.log_offset_path.exists():
929
+ self.log_offset_path.unlink()
930
+ except Exception as e:
931
+ msg = f"Failed to remove offset file for '{self.daemon_id}':\n{e}"
932
+ warn(msg)
933
+ return False, msg
908
934
  return True, "Success"
909
935
 
910
936