meerschaum 2.2.0rc2__py3-none-any.whl → 2.2.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. meerschaum/actions/show.py +2 -2
  2. meerschaum/api/__init__.py +24 -14
  3. meerschaum/api/_oauth2.py +4 -4
  4. meerschaum/api/dash/callbacks/dashboard.py +3 -3
  5. meerschaum/api/dash/keys.py +1 -1
  6. meerschaum/api/dash/pages/dashboard.py +14 -4
  7. meerschaum/config/_jobs.py +6 -3
  8. meerschaum/config/_paths.py +1 -0
  9. meerschaum/config/_version.py +1 -1
  10. meerschaum/config/static/__init__.py +1 -0
  11. meerschaum/connectors/__init__.py +2 -0
  12. meerschaum/connectors/sql/SQLConnector.py +4 -2
  13. meerschaum/connectors/sql/_create_engine.py +4 -4
  14. meerschaum/connectors/sql/_instance.py +3 -1
  15. meerschaum/connectors/sql/_pipes.py +54 -38
  16. meerschaum/connectors/sql/_sql.py +7 -9
  17. meerschaum/core/User/_User.py +2 -0
  18. meerschaum/plugins/__init__.py +23 -1
  19. meerschaum/utils/daemon/Daemon.py +21 -5
  20. meerschaum/utils/daemon/FileDescriptorInterceptor.py +46 -8
  21. meerschaum/utils/daemon/RotatingFile.py +12 -2
  22. meerschaum/utils/daemon/__init__.py +2 -0
  23. meerschaum/utils/packages/__init__.py +10 -4
  24. meerschaum/utils/packages/_packages.py +11 -13
  25. meerschaum/utils/process.py +13 -10
  26. meerschaum/utils/schedule.py +15 -1
  27. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/METADATA +45 -47
  28. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/RECORD +34 -34
  29. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/LICENSE +0 -0
  30. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/NOTICE +0 -0
  31. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/WHEEL +0 -0
  32. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/entry_points.txt +0 -0
  33. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/top_level.txt +0 -0
  34. {meerschaum-2.2.0rc2.dist-info → meerschaum-2.2.0rc4.dist-info}/zip-safe +0 -0
@@ -589,8 +589,8 @@ def _show_logs(
589
589
  if not ANSI:
590
590
  info = print
591
591
  colors = get_config('jobs', 'logs', 'colors')
592
- timestamp_format = get_config('jobs', 'logs', 'timestamp_format')
593
- follow_timestamp_format = get_config('jobs', 'logs', 'follow_timestamp_format')
592
+ timestamp_format = get_config('jobs', 'logs', 'timestamps', 'format')
593
+ follow_timestamp_format = get_config('jobs', 'logs', 'timestamps', 'follow_format')
594
594
  daemons = get_filtered_daemons(action)
595
595
  now = datetime.now(timezone.utc)
596
596
  now_str = now.strftime(timestamp_format)
@@ -30,25 +30,35 @@ _locks = {'pipes': RLock(), 'connector': RLock(), 'uvicorn_config': RLock()}
30
30
  CHECK_UPDATE = os.environ.get(STATIC_CONFIG['environment']['runtime'], None) != 'docker'
31
31
 
32
32
  endpoints = STATIC_CONFIG['api']['endpoints']
33
- aiofiles = attempt_import('aiofiles', lazy=False, check_update=CHECK_UPDATE)
34
- typing_extensions = attempt_import(
35
- 'typing_extensions', lazy=False, check_update=CHECK_UPDATE,
36
- venv = None,
37
- )
38
- pydantic_dataclasses = attempt_import(
39
- 'pydantic.dataclasses', lazy=False, check_update=CHECK_UPDATE,
33
+
34
+ (
35
+ fastapi,
36
+ aiofiles,
37
+ starlette_responses,
38
+ multipart,
39
+ packaging_version,
40
+ ) = attempt_import(
41
+ 'fastapi',
42
+ 'aiofiles',
43
+ 'starlette.responses',
44
+ 'multipart',
45
+ 'packaging.version',
46
+ lazy = False,
47
+ check_update = CHECK_UPDATE,
40
48
  )
41
- fastapi = attempt_import('fastapi', lazy=False, check_update=CHECK_UPDATE)
42
- starlette_reponses = attempt_import(
43
- 'starlette.responses', warn=False, lazy=False,
44
- check_update=CHECK_UPDATE,
49
+ (
50
+ typing_extensions,
51
+ uvicorn_workers,
52
+ ) = attempt_import(
53
+ 'typing_extensions',
54
+ 'uvicorn.workers',
55
+ lazy = False,
56
+ check_update = CHECK_UPDATE,
57
+ venv = None,
45
58
  )
46
- python_multipart = attempt_import('multipart', lazy=False, check_update=CHECK_UPDATE)
47
- packaging_version = attempt_import('packaging.version', check_update=CHECK_UPDATE)
48
59
  from meerschaum.api._chain import check_allow_chaining, DISALLOW_CHAINING_MESSAGE
49
60
  uvicorn_config_path = API_UVICORN_RESOURCES_PATH / SERVER_ID / 'config.json'
50
61
 
51
- uvicorn_workers = attempt_import('uvicorn.workers', venv=None, check_update=CHECK_UPDATE)
52
62
  uvicorn_config = None
53
63
  sys_config = get_config('system', 'api')
54
64
  permissions_config = get_config('system', 'api', 'permissions')
meerschaum/api/_oauth2.py CHANGED
@@ -7,11 +7,11 @@ Define JWT authorization here.
7
7
  """
8
8
 
9
9
  import os
10
- from meerschaum.api import app, endpoints
10
+ from meerschaum.api import app, endpoints, CHECK_UPDATE
11
11
  from meerschaum.utils.packages import attempt_import
12
- fastapi = attempt_import('fastapi', lazy=False)
13
- fastapi_responses = attempt_import('fastapi.responses', lazy=False)
14
- fastapi_login = attempt_import('fastapi_login')
12
+ fastapi = attempt_import('fastapi', lazy=False, check_update=CHECK_UPDATE)
13
+ fastapi_responses = attempt_import('fastapi.responses', lazy=False, check_update=CHECK_UPDATE)
14
+ fastapi_login = attempt_import('fastapi_login', check_update=CHECK_UPDATE)
15
15
 
16
16
  LoginManager = fastapi_login.LoginManager
17
17
  def generate_secret_key() -> str:
@@ -713,11 +713,11 @@ def download_pipe_csv(n_clicks):
713
713
  pipe = pipe_from_ctx(ctx, 'n_clicks')
714
714
  if pipe is None:
715
715
  raise PreventUpdate
716
- filename = str(pipe.target) + '.csv'
717
716
  bounds = pipe.get_chunk_bounds(bounded=True, debug=debug)
718
- begin, _ = bounds[-1]
717
+ begin, end = bounds[-1]
718
+ filename = str(pipe.target) + f" {begin} - {end}.csv"
719
719
  try:
720
- df = pipe.get_data(begin=begin, end=None, debug=debug)
720
+ df = pipe.get_data(begin=begin, end=end, debug=debug)
721
721
  except Exception as e:
722
722
  df = None
723
723
  if df is not None:
@@ -108,7 +108,7 @@ action_dropdown_row = html.Div(
108
108
  id = 'flags-dropdown',
109
109
  multi = True,
110
110
  placeholder = 'Boolean flags',
111
- options = [],
111
+ options = ['--yes'],
112
112
  value = ['--yes'],
113
113
  ),
114
114
  id = 'flags-dropdown-div',
@@ -11,12 +11,22 @@ from meerschaum.config import __doc__ as doc, get_config
11
11
  from meerschaum.utils.misc import get_connector_labels
12
12
  from meerschaum.utils.packages import attempt_import, import_html, import_dcc, import_pandas
13
13
  from meerschaum.api import endpoints, CHECK_UPDATE
14
- dex = attempt_import('dash_extensions', lazy=False, check_update=CHECK_UPDATE)
15
- dbc = attempt_import('dash_bootstrap_components', lazy=False, check_update=CHECK_UPDATE)
14
+ (
15
+ dex,
16
+ px,
17
+ daq,
18
+ dbc,
19
+ ) = attempt_import(
20
+ 'dash_extensions',
21
+ 'plotly.express',
22
+ 'dash_daq',
23
+ 'dash_bootstrap_components',
24
+ lazy = False,
25
+ warn = False,
26
+ check_update = CHECK_UPDATE,
27
+ )
16
28
  html, dcc = import_html(check_update=CHECK_UPDATE), import_dcc(check_update=CHECK_UPDATE)
17
29
  pd = import_pandas(check_update=CHECK_UPDATE)
18
- px = attempt_import('plotly.express', warn=False, check_update=CHECK_UPDATE)
19
- daq = attempt_import('dash_daq', warn=False, check_update=CHECK_UPDATE)
20
30
 
21
31
  from meerschaum.api.dash.components import (
22
32
  go_button,
@@ -14,13 +14,16 @@ default_jobs_config = {
14
14
  'columns': 70,
15
15
  },
16
16
  'logs': {
17
+ 'timestamps': {
18
+ 'enabled': True,
19
+ 'format': '%Y-%m-%d %H:%M',
20
+ 'follow_format': '%H:%M',
21
+ },
17
22
  'num_files_to_keep': 5,
18
23
  'max_file_size': 100_000,
19
24
  'lines_to_show': 30,
20
25
  'refresh_files_seconds': 5,
21
- 'min_buffer_len': 10,
22
- 'timestamp_format': '%Y-%m-%d %H:%M',
23
- 'follow_timestamp_format': '%H:%M',
26
+ 'min_buffer_len': 5,
24
27
  'colors': [
25
28
  'cyan',
26
29
  'magenta',
@@ -129,6 +129,7 @@ paths = {
129
129
 
130
130
  'PLUGINS_RESOURCES_PATH' : ('{INTERNAL_RESOURCES_PATH}', 'plugins'),
131
131
  'PLUGINS_INTERNAL_LOCK_PATH' : ('{INTERNAL_RESOURCES_PATH}', 'plugins.lock'),
132
+ 'PLUGINS_PACKAGES_INTERNAL_PATH' : ('{INTERNAL_RESOURCES_PATH}', 'packaged_plugins'),
132
133
  'PLUGINS_ARCHIVES_RESOURCES_PATH': ('{PLUGINS_RESOURCES_PATH}', '.archives'),
133
134
  'PLUGINS_TEMP_RESOURCES_PATH' : ('{PLUGINS_RESOURCES_PATH}', '.tmp'),
134
135
  'PLUGINS_INIT_PATH' : ('{PLUGINS_RESOURCES_PATH}', '__init__.py'),
@@ -2,4 +2,4 @@
2
2
  Specify the Meerschaum release version.
3
3
  """
4
4
 
5
- __version__ = "2.2.0rc2"
5
+ __version__ = "2.2.0rc4"
@@ -60,6 +60,7 @@ STATIC_CONFIG: Dict[str, Any] = {
60
60
  'gid': 'MRSM_GID',
61
61
  'noask': 'MRSM_NOASK',
62
62
  'id': 'MRSM_SERVER_ID',
63
+ 'daemon_id': 'MRSM_DAEMON_ID',
63
64
  'uri_regex': r'MRSM_([a-zA-Z0-9]*)_(\d*[a-zA-Z][a-zA-Z0-9-_+]*$)',
64
65
  'prefix': 'MRSM_',
65
66
  },
@@ -317,6 +317,8 @@ def load_plugin_connectors():
317
317
  from meerschaum.plugins import get_plugins, import_plugins
318
318
  to_import = []
319
319
  for plugin in get_plugins():
320
+ if plugin is None:
321
+ continue
320
322
  with open(plugin.__file__, encoding='utf-8') as f:
321
323
  text = f.read()
322
324
  if 'make_connector' in text:
@@ -128,8 +128,10 @@ class SQLConnector(Connector):
128
128
  """
129
129
  if 'uri' in kw:
130
130
  uri = kw['uri']
131
- if uri.startswith('postgres://'):
132
- uri = uri.replace('postgres://', 'postgresql://', 1)
131
+ if uri.startswith('postgres') and not uri.startswith('postgresql'):
132
+ uri = uri.replace('postgres', 'postgresql', 1)
133
+ if uri.startswith('postgresql') and not uri.startswith('postgresql+'):
134
+ uri = uri.replace('postgresql://', 'postgresql+psycopg', 1)
133
135
  if uri.startswith('timescaledb://'):
134
136
  uri = uri.replace('timescaledb://', 'postgresql://', 1)
135
137
  flavor = 'timescaledb'
@@ -28,7 +28,7 @@ default_create_engine_args = {
28
28
  }
29
29
  flavor_configs = {
30
30
  'timescaledb' : {
31
- 'engine' : 'postgresql',
31
+ 'engine' : 'postgresql+psycopg',
32
32
  'create_engine' : default_create_engine_args,
33
33
  'omit_create_engine': {'method',},
34
34
  'to_sql' : {},
@@ -38,7 +38,7 @@ flavor_configs = {
38
38
  },
39
39
  },
40
40
  'postgresql' : {
41
- 'engine' : 'postgresql',
41
+ 'engine' : 'postgresql+psycopg',
42
42
  'create_engine' : default_create_engine_args,
43
43
  'omit_create_engine': {'method',},
44
44
  'to_sql' : {},
@@ -48,7 +48,7 @@ flavor_configs = {
48
48
  },
49
49
  },
50
50
  'citus' : {
51
- 'engine' : 'postgresql',
51
+ 'engine' : 'postgresql+psycopg',
52
52
  'create_engine' : default_create_engine_args,
53
53
  'omit_create_engine': {'method',},
54
54
  'to_sql' : {},
@@ -242,7 +242,7 @@ def create_engine(
242
242
 
243
243
  ### Sometimes the timescaledb:// flavor can slip in.
244
244
  if _uri and self.flavor in ('timescaledb',) and self.flavor in _uri:
245
- engine_str = engine_str.replace(f'{self.flavor}://', 'postgresql://')
245
+ engine_str = engine_str.replace(f'{self.flavor}', 'postgresql', 1)
246
246
 
247
247
  if debug:
248
248
  dprint(
@@ -155,7 +155,9 @@ def _drop_old_temporary_tables(
155
155
  temp_tables_table = get_tables(mrsm_instance=self, create=False, debug=debug)['temp_tables']
156
156
  last_check = getattr(self, '_stale_temporary_tables_check_timestamp', 0)
157
157
  now_ts = time.perf_counter()
158
- if refresh or not last_check or (now_ts - last_check) > 60:
158
+ if not last_check:
159
+ self._stale_temporary_tables_check_timestamp = 0
160
+ if refresh or (now_ts - last_check) < 60:
159
161
  self._stale_temporary_tables_check_timestamp = now_ts
160
162
  return self._drop_temporary_tables(debug=debug)
161
163
 
@@ -752,7 +752,7 @@ def get_pipe_data(
752
752
  debug = debug,
753
753
  **kw
754
754
  )
755
-
755
+
756
756
  if is_dask:
757
757
  index_col = pipe.columns.get('datetime', None)
758
758
  kw['index_col'] = index_col
@@ -763,6 +763,7 @@ def get_pipe_data(
763
763
  if typ == 'numeric' and col in dtypes
764
764
  ]
765
765
  kw['coerce_float'] = kw.get('coerce_float', (len(numeric_columns) == 0))
766
+
766
767
  df = self.read(
767
768
  query,
768
769
  dtype = dtypes,
@@ -1478,43 +1479,11 @@ def sync_pipe_inplace(
1478
1479
  from meerschaum.utils.misc import generate_password
1479
1480
  from meerschaum.utils.debug import dprint
1480
1481
 
1481
- sqlalchemy, sqlalchemy_orm = mrsm.attempt_import('sqlalchemy', 'sqlalchemy.orm')
1482
- metadef = self.get_pipe_metadef(
1483
- pipe,
1484
- params = params,
1485
- begin = begin,
1486
- end = end,
1487
- check_existing = check_existing,
1488
- debug = debug,
1489
- )
1490
- pipe_name = sql_item_name(pipe.target, self.flavor, self.get_pipe_schema(pipe))
1491
- upsert = pipe.parameters.get('upsert', False) and f'{self.flavor}-upsert' in update_queries
1492
- internal_schema = self.internal_schema
1493
- database = getattr(self, 'database', self.parse_uri(self.URI).get('database', None))
1494
-
1495
- if not pipe.exists(debug=debug):
1496
- create_pipe_query = get_create_table_query(
1497
- metadef,
1498
- pipe.target,
1499
- self.flavor,
1500
- schema = self.get_pipe_schema(pipe),
1501
- )
1502
- result = self.exec(create_pipe_query, debug=debug)
1503
- if result is None:
1504
- return False, f"Could not insert new data into {pipe} from its SQL query definition."
1505
- if not self.create_indices(pipe, debug=debug):
1506
- warn(f"Failed to create indices for {pipe}. Continuing...")
1507
-
1508
- rowcount = pipe.get_rowcount(debug=debug)
1509
- return True, f"Inserted {rowcount}, updated 0 rows."
1510
-
1511
- session = sqlalchemy_orm.Session(self.engine)
1512
- connectable = session if self.flavor != 'duckdb' else self
1513
-
1514
1482
  transact_id = generate_password(3)
1515
1483
  def get_temp_table_name(label: str) -> str:
1516
1484
  return '-' + transact_id + '_' + label + '_' + pipe.target
1517
1485
 
1486
+ internal_schema = self.internal_schema
1518
1487
  temp_table_roots = ['backtrack', 'new', 'delta', 'joined', 'unseen', 'update']
1519
1488
  temp_tables = {
1520
1489
  table_root: get_temp_table_name(table_root)
@@ -1528,6 +1497,17 @@ def sync_pipe_inplace(
1528
1497
  )
1529
1498
  for table_root, table_name_raw in temp_tables.items()
1530
1499
  }
1500
+ metadef = self.get_pipe_metadef(
1501
+ pipe,
1502
+ params = params,
1503
+ begin = begin,
1504
+ end = end,
1505
+ check_existing = check_existing,
1506
+ debug = debug,
1507
+ )
1508
+ pipe_name = sql_item_name(pipe.target, self.flavor, self.get_pipe_schema(pipe))
1509
+ upsert = pipe.parameters.get('upsert', False) and f'{self.flavor}-upsert' in update_queries
1510
+ database = getattr(self, 'database', self.parse_uri(self.URI).get('database', None))
1531
1511
 
1532
1512
  def clean_up_temp_tables(ready_to_drop: bool = False):
1533
1513
  log_success, log_msg = self._log_temporary_tables_creation(
@@ -1541,6 +1521,36 @@ def sync_pipe_inplace(
1541
1521
  )
1542
1522
  if not log_success:
1543
1523
  warn(log_msg)
1524
+ drop_stale_success, drop_stale_msg = self._drop_old_temporary_tables(
1525
+ refresh = False,
1526
+ debug = debug,
1527
+ )
1528
+ if not drop_stale_success:
1529
+ warn(drop_stale_msg)
1530
+ return drop_stale_success, drop_stale_msg
1531
+
1532
+ sqlalchemy, sqlalchemy_orm = mrsm.attempt_import('sqlalchemy', 'sqlalchemy.orm')
1533
+ if not pipe.exists(debug=debug):
1534
+ create_pipe_query = get_create_table_query(
1535
+ metadef,
1536
+ pipe.target,
1537
+ self.flavor,
1538
+ schema = self.get_pipe_schema(pipe),
1539
+ )
1540
+ result = self.exec(create_pipe_query, debug=debug)
1541
+ if result is None:
1542
+ _ = clean_up_temp_tables()
1543
+ return False, f"Could not insert new data into {pipe} from its SQL query definition."
1544
+
1545
+ if not self.create_indices(pipe, debug=debug):
1546
+ warn(f"Failed to create indices for {pipe}. Continuing...")
1547
+
1548
+ rowcount = pipe.get_rowcount(debug=debug)
1549
+ _ = clean_up_temp_tables()
1550
+ return True, f"Inserted {rowcount}, updated 0 rows."
1551
+
1552
+ session = sqlalchemy_orm.Session(self.engine)
1553
+ connectable = session if self.flavor != 'duckdb' else self
1544
1554
 
1545
1555
  create_new_query = get_create_table_query(
1546
1556
  metadef,
@@ -1908,10 +1918,6 @@ def sync_pipe_inplace(
1908
1918
  )
1909
1919
  _ = clean_up_temp_tables(ready_to_drop=True)
1910
1920
 
1911
- drop_stale_success, drop_stale_msg = self._drop_old_temporary_tables(refresh=False, debug=debug)
1912
- if not drop_stale_success:
1913
- warn(drop_stale_msg)
1914
-
1915
1921
  return True, msg
1916
1922
 
1917
1923
 
@@ -2372,6 +2378,16 @@ def get_pipe_columns_types(
2372
2378
  """
2373
2379
  if not pipe.exists(debug=debug):
2374
2380
  return {}
2381
+
2382
+ if self.flavor == 'duckdb':
2383
+ from meerschaum.utils.sql import get_table_cols_types
2384
+ return get_table_cols_types(
2385
+ pipe.target,
2386
+ self,
2387
+ flavor = self.flavor,
2388
+ schema = self.schema,
2389
+ )
2390
+
2375
2391
  table_columns = {}
2376
2392
  try:
2377
2393
  pipe_table = self.get_pipe_table(pipe, debug=debug)
@@ -943,17 +943,15 @@ def psql_insert_copy(
943
943
  ) for row in data_iter
944
944
  )
945
945
 
946
+ table_name = sql_item_name(table.name, 'postgresql', table.schema)
947
+ columns = ', '.join(f'"{k}"' for k in keys)
948
+ sql = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV NULL '\\N'"
949
+
946
950
  dbapi_conn = conn.connection
947
951
  with dbapi_conn.cursor() as cur:
948
- s_buf = StringIO()
949
- writer = csv.writer(s_buf)
950
- writer.writerows(data_iter)
951
- s_buf.seek(0)
952
-
953
- columns = ', '.join(f'"{k}"' for k in keys)
954
- table_name = sql_item_name(table.name, 'postgresql', table.schema)
955
- sql = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV NULL '\\N'"
956
- cur.copy_expert(sql=sql, file=s_buf)
952
+ with cur.copy(sql) as copy:
953
+ writer = csv.writer(copy)
954
+ writer.writerows(data_iter)
957
955
 
958
956
 
959
957
  def format_sql_query_for_dask(query: str) -> 'sqlalchemy.sql.selectable.Select':
@@ -86,6 +86,8 @@ def verify_password(
86
86
  -------
87
87
  A `bool` indicating whether `password` matches `password_hash`.
88
88
  """
89
+ if password is None or password_hash is None:
90
+ return False
89
91
  hash_config = STATIC_CONFIG['users']['password_hash']
90
92
  try:
91
93
  digest, rounds_str, encoded_salt, encoded_checksum = password_hash.split('$')[1:]
@@ -247,6 +247,26 @@ def sync_plugins_symlinks(debug: bool = False, warn: bool = True) -> None:
247
247
  _warn(f"Unable to create lockfile {PLUGINS_INTERNAL_LOCK_PATH}:\n{e}")
248
248
 
249
249
  with _locks['internal_plugins']:
250
+
251
+ try:
252
+ from importlib.metadata import entry_points
253
+ except ImportError:
254
+ importlib_metadata = attempt_import('importlib_metadata', lazy=False)
255
+ entry_points = importlib_metadata.entry_points
256
+
257
+ ### NOTE: Allow plugins to be installed via `pip`.
258
+ packaged_plugin_paths = []
259
+ discovered_packaged_plugins_eps = entry_points(group='meerschaum.plugins')
260
+ for ep in discovered_packaged_plugins_eps:
261
+ module_name = ep.name
262
+ for package_file_path in ep.dist.files:
263
+ if package_file_path.suffix != '.py':
264
+ continue
265
+ if str(package_file_path) == f'{module_name}.py':
266
+ packaged_plugin_paths.append(package_file_path.locate())
267
+ elif str(package_file_path) == f'{module_name}/__init__.py':
268
+ packaged_plugin_paths.append(package_file_path.locate().parent)
269
+
250
270
  if is_symlink(PLUGINS_RESOURCES_PATH) or not PLUGINS_RESOURCES_PATH.exists():
251
271
  try:
252
272
  PLUGINS_RESOURCES_PATH.unlink()
@@ -255,7 +275,6 @@ def sync_plugins_symlinks(debug: bool = False, warn: bool = True) -> None:
255
275
 
256
276
  PLUGINS_RESOURCES_PATH.mkdir(exist_ok=True)
257
277
 
258
-
259
278
  existing_symlinked_paths = [
260
279
  (PLUGINS_RESOURCES_PATH / item)
261
280
  for item in os.listdir(PLUGINS_RESOURCES_PATH)
@@ -275,6 +294,7 @@ def sync_plugins_symlinks(debug: bool = False, warn: bool = True) -> None:
275
294
  for plugins_path in PLUGINS_DIR_PATHS
276
295
  ]
277
296
  ))
297
+ plugins_to_be_symlinked.extend(packaged_plugin_paths)
278
298
 
279
299
  ### Check for duplicates.
280
300
  seen_plugins = defaultdict(lambda: 0)
@@ -538,6 +558,8 @@ def get_plugins(*to_load, try_import: bool = True) -> Union[Tuple[Plugin], Plugi
538
558
  ]
539
559
  plugins = tuple(plugin for plugin in _plugins if plugin.is_installed(try_import=try_import))
540
560
  if len(to_load) == 1:
561
+ if len(plugins) == 0:
562
+ raise ValueError(f"Plugin '{to_load[0]}' is not installed.")
541
563
  return plugins[0]
542
564
  return plugins
543
565
 
@@ -19,6 +19,7 @@ from functools import partial
19
19
  from datetime import datetime, timezone
20
20
  from meerschaum.utils.typing import Optional, Dict, Any, SuccessTuple, Callable, List, Union
21
21
  from meerschaum.config import get_config
22
+ from meerschaum.config.static import STATIC_CONFIG
22
23
  from meerschaum.config._paths import DAEMON_RESOURCES_PATH, LOGS_RESOURCES_PATH
23
24
  from meerschaum.config._patch import apply_patch_to_config
24
25
  from meerschaum.utils.warnings import warn, error
@@ -170,9 +171,11 @@ class Daemon:
170
171
  log_refresh_seconds,
171
172
  partial(self.rotating_log.refresh_files, start_interception=True),
172
173
  )
174
+
173
175
  try:
174
176
  os.environ['LINES'], os.environ['COLUMNS'] = str(int(lines)), str(int(columns))
175
177
  with self._daemon_context:
178
+ os.environ[STATIC_CONFIG['environment']['daemon_id']] = self.daemon_id
176
179
  self.rotating_log.refresh_files(start_interception=True)
177
180
  try:
178
181
  with open(self.pid_path, 'w+', encoding='utf-8') as f:
@@ -462,6 +465,9 @@ class Daemon:
462
465
  Handle `SIGINT` within the Daemon context.
463
466
  This method is injected into the `DaemonContext`.
464
467
  """
468
+ # from meerschaum.utils.daemon.FileDescriptorInterceptor import STOP_READING_FD_EVENT
469
+ # STOP_READING_FD_EVENT.set()
470
+ self.rotating_log.stop_log_fd_interception(unused_only=False)
465
471
  timer = self.__dict__.get('_log_refresh_timer', None)
466
472
  if timer is not None:
467
473
  timer.cancel()
@@ -471,7 +477,16 @@ class Daemon:
471
477
  daemon_context.close()
472
478
 
473
479
  _close_pools()
474
- self.rotating_log.stop_log_fd_interception()
480
+ import threading
481
+ for thread in threading.enumerate():
482
+ if thread.name == 'MainThread':
483
+ continue
484
+ try:
485
+ if thread.is_alive():
486
+ stack = traceback.format_stack(sys._current_frames()[thread.ident])
487
+ thread.join()
488
+ except Exception as e:
489
+ warn(traceback.format_exc())
475
490
  raise KeyboardInterrupt()
476
491
 
477
492
 
@@ -489,7 +504,7 @@ class Daemon:
489
504
  daemon_context.close()
490
505
 
491
506
  _close_pools()
492
- raise SystemExit(1)
507
+ raise SystemExit(0)
493
508
 
494
509
 
495
510
  def _send_signal(
@@ -669,8 +684,8 @@ class Daemon:
669
684
  self._rotating_log = RotatingFile(
670
685
  self.log_path,
671
686
  redirect_streams = True,
672
- write_timestamps = True,
673
- timestamp_format = get_config('jobs', 'logs', 'timestamp_format'),
687
+ write_timestamps = get_config('jobs', 'logs', 'timestamps', 'enabled'),
688
+ timestamp_format = get_config('jobs', 'logs', 'timestamps', 'format'),
674
689
  )
675
690
  return self._rotating_log
676
691
 
@@ -684,7 +699,8 @@ class Daemon:
684
699
  self.rotating_log.file_path,
685
700
  num_files_to_keep = self.rotating_log.num_files_to_keep,
686
701
  max_file_size = self.rotating_log.max_file_size,
687
- write_timestamps = True,
702
+ write_timestamps = get_config('jobs', 'logs', 'timestamps', 'enabled'),
703
+ timestamp_format = get_config('jobs', 'logs', 'timestamps', 'format'),
688
704
  )
689
705
  return new_rotating_log.read()
690
706
 
@@ -7,12 +7,15 @@ Intercept OS-level file descriptors.
7
7
  """
8
8
 
9
9
  import os
10
+ import select
10
11
  import traceback
12
+ from threading import Event
11
13
  from datetime import datetime
12
14
  from meerschaum.utils.typing import Callable
13
15
  from meerschaum.utils.warnings import warn
14
16
 
15
17
  FD_CLOSED: int = 9
18
+ STOP_READING_FD_EVENT: Event = Event()
16
19
 
17
20
  class FileDescriptorInterceptor:
18
21
  """
@@ -32,10 +35,12 @@ class FileDescriptorInterceptor:
32
35
  injection_hook: Callable[[], str]
33
36
  A callable which returns a string to be injected into the written data.
34
37
  """
38
+ self.stop_event = Event()
35
39
  self.injection_hook = injection_hook
36
40
  self.original_file_descriptor = file_descriptor
37
41
  self.new_file_descriptor = os.dup(file_descriptor)
38
42
  self.read_pipe, self.write_pipe = os.pipe()
43
+ self.signal_read_pipe, self.signal_write_pipe = os.pipe()
39
44
  os.dup2(self.write_pipe, file_descriptor)
40
45
 
41
46
  def start_interception(self):
@@ -44,11 +49,23 @@ class FileDescriptorInterceptor:
44
49
 
45
50
  NOTE: This is blocking and is meant to be run in a thread.
46
51
  """
52
+ os.set_blocking(self.read_pipe, False)
53
+ os.set_blocking(self.signal_read_pipe, False)
47
54
  is_first_read = True
48
- while True:
49
- data = os.read(self.read_pipe, 1024)
50
- if not data:
51
- break
55
+ while not self.stop_event.is_set():
56
+ try:
57
+ rlist, _, _ = select.select([self.read_pipe, self.signal_read_pipe], [], [], 0.1)
58
+ if self.signal_read_pipe in rlist:
59
+ break
60
+ if not rlist:
61
+ continue
62
+ data = os.read(self.read_pipe, 1024)
63
+ if not data:
64
+ break
65
+ except BlockingIOError:
66
+ continue
67
+ except OSError as e:
68
+ continue
52
69
 
53
70
  first_char_is_newline = data[0] == b'\n'
54
71
  last_char_is_newline = data[-1] == b'\n'
@@ -65,16 +82,17 @@ class FileDescriptorInterceptor:
65
82
  if last_char_is_newline
66
83
  else data.replace(b'\n', b'\n' + injected_bytes)
67
84
  )
68
-
69
85
  os.write(self.new_file_descriptor, modified_data)
70
86
 
87
+
71
88
  def stop_interception(self):
72
89
  """
73
- Restore the file descriptors and close the new pipes.
90
+ Close the new file descriptors.
74
91
  """
92
+ self.stop_event.set()
93
+ os.write(self.signal_write_pipe, b'\0')
75
94
  try:
76
- os.dup2(self.new_file_descriptor, self.original_file_descriptor)
77
- # os.close(self.new_file_descriptor)
95
+ os.close(self.new_file_descriptor)
78
96
  except OSError as e:
79
97
  if e.errno != FD_CLOSED:
80
98
  warn(
@@ -100,3 +118,23 @@ class FileDescriptorInterceptor:
100
118
  + "to the intercepted file descriptor:\n"
101
119
  + f"{traceback.format_exc()}"
102
120
  )
121
+
122
+ try:
123
+ os.close(self.signal_read_pipe)
124
+ except OSError as e:
125
+ if e.errno != FD_CLOSED:
126
+ warn(
127
+ f"Error while trying to close the signal-read-pipe "
128
+ + "to the intercepted file descriptor:\n"
129
+ + f"{traceback.format_exc()}"
130
+ )
131
+
132
+ try:
133
+ os.close(self.signal_write_pipe)
134
+ except OSError as e:
135
+ if e.errno != FD_CLOSED:
136
+ warn(
137
+ f"Error while trying to close the signal-write-pipe "
138
+ + "to the intercepted file descriptor:\n"
139
+ + f"{traceback.format_exc()}"
140
+ )