meerschaum 2.1.5__py3-none-any.whl → 2.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,11 +11,7 @@ from copy import deepcopy
11
11
  from meerschaum.utils.typing import Union, SuccessTuple, Any, Callable, Optional, List, Dict
12
12
  from meerschaum.utils.packages import attempt_import
13
13
  from meerschaum.config import __doc__, __version__ as version, get_config
14
- cmd_import_name = get_config('shell', 'cmd')
15
- cmd_venv = None if cmd_import_name == 'cmd' else 'mrsm'
16
- cmd = attempt_import(cmd_import_name, venv=cmd_venv, warn=False, lazy=False)
17
- if cmd is None or isinstance(cmd, dict):
18
- cmd = attempt_import('cmd', lazy=False, warn=False)
14
+ import cmd
19
15
  _old_input = cmd.__builtins__['input']
20
16
  prompt_toolkit = attempt_import('prompt_toolkit', lazy=False, warn=False, install=True)
21
17
  (
@@ -53,7 +49,6 @@ hidden_commands = {
53
49
  'os',
54
50
  'sh',
55
51
  'pass',
56
- 'exit',
57
52
  'quit',
58
53
  'eof',
59
54
  'exit',
meerschaum/actions/api.py CHANGED
@@ -44,7 +44,7 @@ def api(
44
44
  sysargs = []
45
45
  if len(action) == 0:
46
46
  info(api.__doc__)
47
- return False, "Please provide a command to excecute (see above)."
47
+ return False, "Please provide a command to execute (see above)."
48
48
 
49
49
  boot_keywords = {'start', 'boot', 'init'}
50
50
  if action[0] in boot_keywords:
@@ -9,7 +9,7 @@ NOTE: `sync` required a SQL connection and is not intended for client use
9
9
  """
10
10
 
11
11
  from __future__ import annotations
12
- from datetime import timedelta
12
+ from datetime import timedelta, datetime, timezone
13
13
  import meerschaum as mrsm
14
14
  from meerschaum.utils.typing import SuccessTuple, Any, List, Optional, Tuple, Union
15
15
 
@@ -39,7 +39,7 @@ def sync(
39
39
 
40
40
  def _pipes_lap(
41
41
  workers: Optional[int] = None,
42
- debug: bool = None,
42
+ debug: Optional[bool] = None,
43
43
  unblock: bool = False,
44
44
  force: bool = False,
45
45
  min_seconds: int = 1,
@@ -52,7 +52,7 @@ def _pipes_lap(
52
52
  nopretty: bool = False,
53
53
  _progress: Optional['rich.progress.Progress'] = None,
54
54
  **kw: Any
55
- ) -> Tuple[List[meerschaum.Pipe], List[meerschaum.Pipe]]:
55
+ ) -> Tuple[List[mrsm.Pipe], List[mrsm.Pipe]]:
56
56
  """
57
57
  Do a lap of syncing pipes.
58
58
  """
@@ -402,11 +402,20 @@ def _wrap_pipe(
402
402
  Wrapper function for handling exceptions.
403
403
  """
404
404
  import time
405
+ import traceback
406
+ from datetime import datetime, timedelta, timezone
407
+ import meerschaum as mrsm
408
+ from meerschaum.utils.typing import is_success_tuple, SuccessTuple
405
409
  from meerschaum.connectors import get_connector_plugin
406
410
  from meerschaum.utils.venv import Venv
407
411
  from meerschaum.plugins import _pre_sync_hooks, _post_sync_hooks
408
412
  from meerschaum.utils.misc import filter_keywords
413
+ from meerschaum.utils.pool import get_pool
414
+ from meerschaum.utils.warnings import warn
415
+
416
+ pool = get_pool(workers=workers)
409
417
 
418
+ sync_timestamp = datetime.now(timezone.utc)
410
419
  sync_start = time.perf_counter()
411
420
  sync_kwargs = {k: v for k, v in kw.items() if k != 'blocking'}
412
421
  sync_kwargs.update({
@@ -415,8 +424,9 @@ def _wrap_pipe(
415
424
  'debug': debug,
416
425
  'min_seconds': min_seconds,
417
426
  'workers': workers,
418
- 'bounded': 'bounded',
427
+ 'bounded': bounded,
419
428
  'chunk_interval': chunk_interval,
429
+ 'sync_timestamp': sync_timestamp,
420
430
  })
421
431
  if not verify and not deduplicate:
422
432
  sync_method = pipe.sync
@@ -427,12 +437,32 @@ def _wrap_pipe(
427
437
  sync_kwargs['deduplicate'] = deduplicate
428
438
  sync_kwargs['sync_method'] = sync_method
429
439
 
430
- for module_name, pre_sync_hooks in _pre_sync_hooks.items():
431
- plugin_name = module_name.split('.')[-1] if module_name.startswith('plugins.') else None
440
+ def call_sync_hook(plugin_name: str, sync_hook) -> SuccessTuple:
432
441
  plugin = mrsm.Plugin(plugin_name) if plugin_name else None
433
- with Venv(plugin):
434
- for pre_sync_hook in pre_sync_hooks:
435
- _ = pre_sync_hook(pipe, **filter_keywords(pre_sync_hook, **sync_kwargs))
442
+ with mrsm.Venv(plugin):
443
+ try:
444
+ sync_hook_result = sync_hook(pipe, **filter_keywords(sync_hook, **sync_kwargs))
445
+ if is_success_tuple(sync_hook_result):
446
+ return sync_hook_result
447
+ except Exception as e:
448
+ msg = (
449
+ f"Failed to execute sync hook '{sync_hook.__name__}' "
450
+ + f"from plugin '{plugin}':\n{traceback.format_exc()}"
451
+ )
452
+ warn(msg, stack=False)
453
+ return False, msg
454
+ return True, "Success"
455
+
456
+ hook_results = []
457
+ def apply_hooks(is_pre_sync: bool):
458
+ _sync_hooks = (_pre_sync_hooks if is_pre_sync else _post_sync_hooks)
459
+ for module_name, sync_hooks in _sync_hooks.items():
460
+ plugin_name = module_name.split('.')[-1] if module_name.startswith('plugins.') else None
461
+ for sync_hook in sync_hooks:
462
+ hook_result = pool.apply_async(call_sync_hook, (plugin_name, sync_hook))
463
+ hook_results.append(hook_result)
464
+
465
+ apply_hooks(True)
436
466
 
437
467
  try:
438
468
  with Venv(get_connector_plugin(pipe.connector), debug=debug):
@@ -444,18 +474,16 @@ def _wrap_pipe(
444
474
  return_tuple = (False, f"Failed to sync {pipe} with exception:" + "\n" + str(e))
445
475
 
446
476
  duration = time.perf_counter() - sync_start
447
- sync_kwargs['duration'] = duration
448
- for module_name, post_sync_hooks in _post_sync_hooks.items():
449
- plugin_name = module_name.split('.')[-1] if module_name.startswith('plugins.') else None
450
- plugin = mrsm.Plugin(plugin_name) if plugin_name else None
451
- with Venv(plugin):
452
- for post_sync_hook in post_sync_hooks:
453
- _ = post_sync_hook(
454
- pipe,
455
- return_tuple,
456
- **filter_keywords(post_sync_hook, **sync_kwargs)
457
- )
458
-
477
+ sync_kwargs.update({
478
+ 'success_tuple': return_tuple,
479
+ 'sync_duration': duration,
480
+ 'sync_complete_timestamp': datetime.now(timezone.utc),
481
+ })
482
+ apply_hooks(False)
483
+ for hook_result in hook_results:
484
+ hook_success, hook_msg = hook_result.get()
485
+ mrsm.pprint((hook_success, hook_msg))
486
+
459
487
  return return_tuple
460
488
 
461
489
 
@@ -126,7 +126,6 @@ default_shell_config = {
126
126
  'timeout' : 60,
127
127
  'max_history' : 1000,
128
128
  'clear_screen' : True,
129
- 'cmd' : default_cmd,
130
129
  'bottom_toolbar' : {
131
130
  'enabled' : True,
132
131
  },
@@ -2,4 +2,4 @@
2
2
  Specify the Meerschaum release version.
3
3
  """
4
4
 
5
- __version__ = "2.1.5"
5
+ __version__ = "2.1.7"
@@ -174,9 +174,6 @@ def get_pipe_metadef(
174
174
  )
175
175
 
176
176
 
177
- if 'order by' in definition.lower() and 'over' not in definition.lower():
178
- error("Cannot fetch with an ORDER clause in the definition")
179
-
180
177
  apply_backtrack = begin == '' and check_existing
181
178
  backtrack_interval = pipe.get_backtrack_interval(check_existing=check_existing, debug=debug)
182
179
  btm = (
@@ -308,9 +305,9 @@ def _simple_fetch_query(pipe, debug: bool=False, **kw) -> str:
308
305
  def_name = 'definition'
309
306
  definition = get_pipe_query(pipe)
310
307
  return (
311
- f"WITH {def_name} AS ({definition}) SELECT * FROM {def_name}"
308
+ f"WITH {def_name} AS (\n{definition}\n) SELECT * FROM {def_name}"
312
309
  if pipe.connector.flavor not in ('mysql', 'mariadb')
313
- else f"SELECT * FROM ({definition}) AS {def_name}"
310
+ else f"SELECT * FROM (\n{definition}\n) AS {def_name}"
314
311
  )
315
312
 
316
313
  def _join_fetch_query(
@@ -363,10 +360,10 @@ def _join_fetch_query(
363
360
  )
364
361
  + f") AS {id_remote_name}, "
365
362
  + dateadd_str(
366
- flavor=pipe.connector.flavor,
367
- begin=_st,
368
- datepart='minute',
369
- number=pipe.parameters.get('fetch', {}).get('backtrack_minutes', 0)
363
+ flavor = pipe.connector.flavor,
364
+ begin = _st,
365
+ datepart = 'minute',
366
+ number = pipe.parameters.get('fetch', {}).get('backtrack_minutes', 0)
370
367
  ) + " AS " + dt_remote_name + "\nUNION ALL\n"
371
368
  )
372
369
  _sync_times_q = _sync_times_q[:(-1 * len('UNION ALL\n'))] + ")"
@@ -374,13 +371,13 @@ def _join_fetch_query(
374
371
  definition = get_pipe_query(pipe)
375
372
  query = (
376
373
  f"""
377
- WITH definition AS ({definition}){_sync_times_q}
374
+ WITH definition AS (\n{definition}\n){_sync_times_q}
378
375
  SELECT definition.*
379
376
  FROM definition"""
380
377
  if pipe.connector.flavor not in ('mysql', 'mariadb')
381
378
  else (
382
379
  f"""
383
- SELECT * FROM ({definition}) AS definition"""
380
+ SELECT * FROM (\n{definition}\n) AS definition"""
384
381
  )
385
382
  ) + f"""
386
383
  LEFT OUTER JOIN {sync_times_remote_name} AS st
@@ -385,7 +385,13 @@ def get_create_index_queries(
385
385
  -------
386
386
  A dictionary of column names mapping to lists of queries.
387
387
  """
388
- from meerschaum.utils.sql import sql_item_name, get_distinct_col_count, update_queries
388
+ from meerschaum.utils.sql import (
389
+ sql_item_name,
390
+ get_distinct_col_count,
391
+ update_queries,
392
+ get_null_replacement,
393
+ COALESCE_UNIQUE_INDEX_FLAVORS,
394
+ )
389
395
  from meerschaum.config import get_config
390
396
  index_queries = {}
391
397
 
@@ -497,15 +503,37 @@ def get_create_index_queries(
497
503
  if ix and ix in existing_cols_types
498
504
  ]
499
505
  )
506
+ coalesce_indices_cols_str = ', '.join(
507
+ [
508
+ (
509
+ "COALESCE("
510
+ + sql_item_name(ix, self.flavor)
511
+ + ", "
512
+ + get_null_replacement(existing_cols_types[ix], self.flavor)
513
+ + ") "
514
+ ) if ix_key != 'datetime' else (sql_item_name(ix, self.flavor))
515
+ for ix_key, ix in pipe.columns.items()
516
+ if ix and ix in existing_cols_types
517
+ ]
518
+ )
519
+ unique_index_name = sql_item_name(pipe.target + '_unique_index', self.flavor)
500
520
  constraint_name = sql_item_name(pipe.target + '_constraint', self.flavor)
501
- constraint_query = (
521
+ add_constraint_query = (
502
522
  f"ALTER TABLE {_pipe_name} ADD CONSTRAINT {constraint_name} UNIQUE ({indices_cols_str})"
503
- if self.flavor != 'sqlite'
504
- else f"CREATE UNIQUE INDEX {constraint_name} ON {_pipe_name} ({indices_cols_str})"
505
523
  )
524
+ unique_index_cols_str = (
525
+ indices_cols_str
526
+ if self.flavor not in COALESCE_UNIQUE_INDEX_FLAVORS
527
+ else coalesce_indices_cols_str
528
+ )
529
+ create_unique_index_query = (
530
+ f"CREATE UNIQUE INDEX {unique_index_name} ON {_pipe_name} ({unique_index_cols_str})"
531
+ )
532
+ constraint_queries = [create_unique_index_query]
533
+ if self.flavor != 'sqlite':
534
+ constraint_queries.append(add_constraint_query)
506
535
  if upsert and indices_cols_str:
507
- index_queries[constraint_name] = [constraint_query]
508
-
536
+ index_queries[unique_index_name] = constraint_queries
509
537
  return index_queries
510
538
 
511
539
 
@@ -1074,7 +1102,7 @@ def get_pipe_attributes(
1074
1102
  def sync_pipe(
1075
1103
  self,
1076
1104
  pipe: mrsm.Pipe,
1077
- df: Union[pandas.DataFrame, str, Dict[Any, Any], None] = None,
1105
+ df: Union[pd.DataFrame, str, Dict[Any, Any], None] = None,
1078
1106
  begin: Optional[datetime] = None,
1079
1107
  end: Optional[datetime] = None,
1080
1108
  chunksize: Optional[int] = -1,
@@ -1154,7 +1182,12 @@ def sync_pipe(
1154
1182
  dprint("Fetched data:\n" + str(df))
1155
1183
 
1156
1184
  if not isinstance(df, pd.DataFrame):
1157
- df = pipe.enforce_dtypes(df, chunksize=chunksize, debug=debug)
1185
+ df = pipe.enforce_dtypes(
1186
+ df,
1187
+ chunksize = chunksize,
1188
+ safe_copy = kw.get('safe_copy', False),
1189
+ debug = debug,
1190
+ )
1158
1191
 
1159
1192
  ### if table does not exist, create it with indices
1160
1193
  is_new = False
@@ -1198,6 +1231,7 @@ def sync_pipe(
1198
1231
  upsert = pipe.parameters.get('upsert', False) and (self.flavor + '-upsert') in update_queries
1199
1232
  if upsert:
1200
1233
  check_existing = False
1234
+ kw['safe_copy'] = kw.get('safe_copy', False)
1201
1235
 
1202
1236
  unseen_df, update_df, delta_df = (
1203
1237
  pipe.filter_existing(
@@ -1286,7 +1320,11 @@ def sync_pipe(
1286
1320
  temp_pipe = Pipe(
1287
1321
  pipe.connector_keys.replace(':', '_') + '_', pipe.metric_key, pipe.location_key,
1288
1322
  instance = pipe.instance_keys,
1289
- columns = pipe.columns,
1323
+ columns = {
1324
+ ix_key: ix
1325
+ for ix_key, ix in pipe.columns.items()
1326
+ if ix and ix in update_df.columns
1327
+ },
1290
1328
  dtypes = pipe.dtypes,
1291
1329
  target = temp_target,
1292
1330
  temporary = True,
@@ -214,22 +214,6 @@ def create_tables(
214
214
  from meerschaum.utils.sql import get_rename_table_queries, table_exists
215
215
  _tables = tables if tables is not None else get_tables(conn)
216
216
 
217
- rename_queries = []
218
- for table_key, table in _tables.items():
219
- if table_exists(
220
- table_key,
221
- conn,
222
- schema = conn.instance_schema,
223
- ):
224
- rename_queries.extend(get_rename_table_queries(
225
- table_key,
226
- table.name,
227
- schema = conn.instance_schema,
228
- flavor = conn.flavor,
229
- ))
230
- if rename_queries:
231
- conn.exec_queries(rename_queries)
232
-
233
217
  try:
234
218
  conn.metadata.create_all(bind=conn.engine)
235
219
  except Exception as e:
@@ -8,7 +8,7 @@ Retrieve Pipes' data from instances.
8
8
 
9
9
  from __future__ import annotations
10
10
  from datetime import datetime, timedelta
11
- from meerschaum.utils.typing import Optional, Dict, Any, Union, Generator, List, Tuple
11
+ from meerschaum.utils.typing import Optional, Dict, Any, Union, Generator, List, Tuple, Iterator
12
12
  from meerschaum.config import get_config
13
13
 
14
14
  def get_data(
@@ -247,7 +247,7 @@ def _get_data_as_iterator(
247
247
  fresh: bool = False,
248
248
  debug: bool = False,
249
249
  **kw: Any
250
- ) -> Generator['pd.DataFrame']:
250
+ ) -> Iterator['pd.DataFrame']:
251
251
  """
252
252
  Return a pipe's data as a generator.
253
253
  """
@@ -267,16 +267,17 @@ def _get_data_as_iterator(
267
267
 
268
268
  _ = kw.pop('as_chunks', None)
269
269
  _ = kw.pop('as_iterator', None)
270
+ dt_col = self.columns.get('datetime', None)
270
271
  min_dt = (
271
272
  begin
272
273
  if begin is not None
273
274
  else self.get_sync_time(round_down=False, newest=False, params=params, debug=debug)
274
- )
275
+ ) if dt_col else None
275
276
  max_dt = (
276
277
  end
277
278
  if end is not None
278
279
  else self.get_sync_time(round_down=False, newest=True, params=params, debug=debug)
279
- )
280
+ ) if dt_col else None
280
281
 
281
282
  ### We want to search just past the maximum value.
282
283
  if end is None:
@@ -14,6 +14,7 @@ def enforce_dtypes(
14
14
  self,
15
15
  df: 'pd.DataFrame',
16
16
  chunksize: Optional[int] = -1,
17
+ safe_copy: bool = True,
17
18
  debug: bool = False,
18
19
  ) -> 'pd.DataFrame':
19
20
  """
@@ -71,7 +72,7 @@ def enforce_dtypes(
71
72
  )
72
73
  return df
73
74
 
74
- return _enforce_dtypes(df, pipe_dtypes, debug=debug)
75
+ return _enforce_dtypes(df, pipe_dtypes, safe_copy=safe_copy, debug=debug)
75
76
 
76
77
 
77
78
  def infer_dtypes(self, persist: bool=False, debug: bool=False) -> Dict[str, Any]:
@@ -12,6 +12,7 @@ import json
12
12
  import time
13
13
  import threading
14
14
  import multiprocessing
15
+ import functools
15
16
  from datetime import datetime, timedelta
16
17
 
17
18
  from meerschaum.utils.typing import (
@@ -518,6 +519,8 @@ def exists(
518
519
  def filter_existing(
519
520
  self,
520
521
  df: 'pd.DataFrame',
522
+ safe_copy: bool = True,
523
+ date_bound_only: bool = False,
521
524
  chunksize: Optional[int] = -1,
522
525
  debug: bool = False,
523
526
  **kw
@@ -530,6 +533,14 @@ def filter_existing(
530
533
  df: 'pd.DataFrame'
531
534
  The dataframe to inspect and filter.
532
535
 
536
+ safe_copy: bool, default True
537
+ If `True`, create a copy before comparing and modifying the dataframes.
538
+ Setting to `False` may mutate the DataFrames.
539
+ See `meerschaum.utils.dataframe.filter_unseen_df`.
540
+
541
+ date_bound_only: bool, default False
542
+ If `True`, only use the datetime index to fetch the sample dataframe.
543
+
533
544
  chunksize: Optional[int], default -1
534
545
  The `chunksize` used when fetching existing data.
535
546
 
@@ -567,7 +578,8 @@ def filter_existing(
567
578
  else:
568
579
  merge = pd.merge
569
580
  NA = pd.NA
570
-
581
+ if df is None:
582
+ return df, df, df
571
583
  if (df.empty if not is_dask else len(df) == 0):
572
584
  return df, df, df
573
585
 
@@ -617,7 +629,7 @@ def filter_existing(
617
629
  traceback.print_exc()
618
630
  max_dt = None
619
631
 
620
- if not ('datetime' in str(type(max_dt))) or str(min_dt) == 'NaT':
632
+ if ('datetime' not in str(type(max_dt))) or str(min_dt) == 'NaT':
621
633
  if 'int' not in str(type(max_dt)).lower():
622
634
  max_dt = None
623
635
 
@@ -645,7 +657,7 @@ def filter_existing(
645
657
  col: df[col].unique()
646
658
  for col in self.columns
647
659
  if col in df.columns and col != dt_col
648
- }
660
+ } if not date_bound_only else {}
649
661
  filter_params_index_limit = get_config('pipes', 'sync', 'filter_params_index_limit')
650
662
  _ = kw.pop('params', None)
651
663
  params = {
@@ -655,7 +667,7 @@ def filter_existing(
655
667
  ]
656
668
  for col, unique_vals in unique_index_vals.items()
657
669
  if len(unique_vals) <= filter_params_index_limit
658
- }
670
+ } if not date_bound_only else {}
659
671
 
660
672
  if debug:
661
673
  dprint(f"Looking at data between '{begin}' and '{end}':", **kw)
@@ -698,18 +710,23 @@ def filter_existing(
698
710
  col: to_pandas_dtype(typ)
699
711
  for col, typ in self_dtypes.items()
700
712
  },
713
+ safe_copy = safe_copy,
701
714
  debug = debug
702
715
  ),
703
716
  on_cols_dtypes,
704
717
  )
705
718
 
706
719
  ### Cast dicts or lists to strings so we can merge.
720
+ serializer = functools.partial(json.dumps, sort_keys=True, separators=(',', ':'), default=str)
721
+ def deserializer(x):
722
+ return json.loads(x) if isinstance(x, str) else x
723
+
707
724
  unhashable_delta_cols = get_unhashable_cols(delta_df)
708
725
  unhashable_backtrack_cols = get_unhashable_cols(backtrack_df)
709
726
  for col in unhashable_delta_cols:
710
- delta_df[col] = delta_df[col].apply(json.dumps)
727
+ delta_df[col] = delta_df[col].apply(serializer)
711
728
  for col in unhashable_backtrack_cols:
712
- backtrack_df[col] = backtrack_df[col].apply(json.dumps)
729
+ backtrack_df[col] = backtrack_df[col].apply(serializer)
713
730
  casted_cols = set(unhashable_delta_cols + unhashable_backtrack_cols)
714
731
 
715
732
  joined_df = merge(
@@ -722,13 +739,9 @@ def filter_existing(
722
739
  ) if on_cols else delta_df
723
740
  for col in casted_cols:
724
741
  if col in joined_df.columns:
725
- joined_df[col] = joined_df[col].apply(
726
- lambda x: (
727
- json.loads(x)
728
- if isinstance(x, str)
729
- else x
730
- )
731
- )
742
+ joined_df[col] = joined_df[col].apply(deserializer)
743
+ if col in delta_df.columns:
744
+ delta_df[col] = delta_df[col].apply(deserializer)
732
745
 
733
746
  ### Determine which rows are completely new.
734
747
  new_rows_mask = (joined_df['_merge'] == 'left_only') if on_cols else None
@@ -7,9 +7,10 @@ Utility functions for working with DataFrames.
7
7
  """
8
8
 
9
9
  from __future__ import annotations
10
+ from datetime import datetime
10
11
  from meerschaum.utils.typing import (
11
12
  Optional, Dict, Any, List, Hashable, Generator,
12
- Iterator, Iterable, Union,
13
+ Iterator, Iterable, Union, Tuple,
13
14
  )
14
15
 
15
16
 
@@ -71,6 +72,7 @@ def add_missing_cols_to_df(df: 'pd.DataFrame', dtypes: Dict[str, Any]) -> pd.Dat
71
72
  def filter_unseen_df(
72
73
  old_df: 'pd.DataFrame',
73
74
  new_df: 'pd.DataFrame',
75
+ safe_copy: bool = True,
74
76
  dtypes: Optional[Dict[str, Any]] = None,
75
77
  debug: bool = False,
76
78
  ) -> 'pd.DataFrame':
@@ -84,6 +86,10 @@ def filter_unseen_df(
84
86
 
85
87
  new_df: 'pd.DataFrame'
86
88
  The fetched (source) dataframe. Rows that are contained in `old_df` are removed.
89
+
90
+ safe_copy: bool, default True
91
+ If `True`, create a copy before comparing and modifying the dataframes.
92
+ Setting to `False` may mutate the DataFrames.
87
93
 
88
94
  dtypes: Optional[Dict[str, Any]], default None
89
95
  Optionally specify the datatypes of the dataframe.
@@ -111,6 +117,10 @@ def filter_unseen_df(
111
117
  if old_df is None:
112
118
  return new_df
113
119
 
120
+ if safe_copy:
121
+ old_df = old_df.copy()
122
+ new_df = new_df.copy()
123
+
114
124
  import json
115
125
  import functools
116
126
  import traceback
@@ -118,6 +128,7 @@ def filter_unseen_df(
118
128
  from meerschaum.utils.warnings import warn
119
129
  from meerschaum.utils.packages import import_pandas, attempt_import
120
130
  from meerschaum.utils.dtypes import to_pandas_dtype, are_dtypes_equal, attempt_cast_to_numeric
131
+ from meerschaum.utils.debug import dprint
121
132
  pd = import_pandas(debug=debug)
122
133
  is_dask = 'dask' in new_df.__module__
123
134
  if is_dask:
@@ -243,12 +254,7 @@ def filter_unseen_df(
243
254
  indicator = True,
244
255
  )
245
256
  changed_rows_mask = (joined_df['_merge'] == 'left_only')
246
-
247
- delta_df = joined_df[
248
- list(new_df_dtypes.keys())
249
- ][
250
- changed_rows_mask
251
- ].reset_index(drop=True)
257
+ delta_df = joined_df[list(new_df_dtypes.keys())][changed_rows_mask].reset_index(drop=True)
252
258
 
253
259
  for json_col in json_cols:
254
260
  if json_col not in delta_df.columns:
@@ -535,6 +541,8 @@ def get_numeric_cols(df: 'pd.DataFrame') -> List[str]:
535
541
  def enforce_dtypes(
536
542
  df: 'pd.DataFrame',
537
543
  dtypes: Dict[str, str],
544
+ safe_copy: bool = True,
545
+ coerce_numeric: bool = True,
538
546
  debug: bool = False,
539
547
  ) -> 'pd.DataFrame':
540
548
  """
@@ -548,6 +556,14 @@ def enforce_dtypes(
548
556
  dtypes: Dict[str, str]
549
557
  The data types to attempt to enforce on the DataFrame.
550
558
 
559
+ safe_copy: bool, default True
560
+ If `True`, create a copy before comparing and modifying the dataframes.
561
+ Setting to `False` may mutate the DataFrames.
562
+ See `meerschaum.utils.dataframe.filter_unseen_df`.
563
+
564
+ coerce_numeric: bool, default True
565
+ If `True`, convert float and int collisions to numeric.
566
+
551
567
  debug: bool, default False
552
568
  Verbosity toggle.
553
569
 
@@ -569,6 +585,8 @@ def enforce_dtypes(
569
585
  is_dtype_numeric,
570
586
  attempt_cast_to_numeric,
571
587
  )
588
+ if safe_copy:
589
+ df = df.copy()
572
590
  df_dtypes = {c: str(t) for c, t in df.dtypes.items()}
573
591
  if len(df_dtypes) == 0:
574
592
  if debug:
@@ -674,7 +692,7 @@ def enforce_dtypes(
674
692
  explicitly_numeric
675
693
  or col in df_numeric_cols
676
694
  or (mixed_numeric_types and not explicitly_float)
677
- )
695
+ ) and coerce_numeric
678
696
  if cast_to_numeric:
679
697
  common_dtypes[col] = attempt_cast_to_numeric
680
698
  common_diff_dtypes[col] = attempt_cast_to_numeric
@@ -860,3 +878,160 @@ def get_first_valid_dask_partition(ddf: 'dask.dataframe.DataFrame') -> Union['pd
860
878
  if len(pdf) > 0:
861
879
  return pdf
862
880
  return ddf.compute()
881
+
882
+
883
+ def query_df(
884
+ df: 'pd.DataFrame',
885
+ params: Optional[Dict[str, Any]] = None,
886
+ begin: Union[datetime, int, None] = None,
887
+ end: Union[datetime, int, None] = None,
888
+ datetime_column: Optional[str] = None,
889
+ select_columns: Optional[List[str]] = None,
890
+ omit_columns: Optional[List[str]] = None,
891
+ inplace: bool = False,
892
+ reset_index: bool = False,
893
+ debug: bool = False,
894
+ ) -> 'pd.DataFrame':
895
+ """
896
+ Query the dataframe with the params dictionary.
897
+
898
+ Parameters
899
+ ----------
900
+ df: pd.DataFrame
901
+ The DataFrame to query against.
902
+
903
+ params: Optional[Dict[str, Any]], default None
904
+ The parameters dictionary to use for the query.
905
+
906
+ begin: Union[datetime, int, None], default None
907
+ If `begin` and `datetime_column` are provided, only return rows with a timestamp
908
+ greater than or equal to this value.
909
+
910
+ end: Union[datetime, int, None], default None
911
+ If `begin` and `datetime_column` are provided, only return rows with a timestamp
912
+ less than this value.
913
+
914
+ datetime_column: Optional[str], default None
915
+ A `datetime_column` must be provided to use `begin` and `end`.
916
+
917
+ select_columns: Optional[List[str]], default None
918
+ If provided, only return these columns.
919
+
920
+ omit_columns: Optional[List[str]], default None
921
+ If provided, do not include these columns in the result.
922
+
923
+ inplace: bool, default False
924
+ If `True`, modify the DataFrame inplace rather than creating a new DataFrame.
925
+
926
+ reset_index: bool, default True
927
+ If `True`, reset the index in the resulting DataFrame.
928
+
929
+ Returns
930
+ -------
931
+ A Pandas DataFrame query result.
932
+ """
933
+ if not params and not begin and not end:
934
+ return df
935
+
936
+ import json
937
+ import meerschaum as mrsm
938
+ from meerschaum.utils.debug import dprint
939
+ from meerschaum.utils.misc import get_in_ex_params
940
+ from meerschaum.utils.warnings import warn
941
+
942
+ dtypes = {col: str(typ) for col, typ in df.dtypes.items()}
943
+
944
+ if begin or end:
945
+ if not datetime_column or datetime_column not in df.columns:
946
+ warn(
947
+ f"The datetime column '{datetime_column}' is not present in the Dataframe, "
948
+ + "ignoring begin and end...",
949
+ )
950
+ begin, end = None, None
951
+
952
+ if debug:
953
+ dprint(f"Querying dataframe:\n{params=} {begin=} {end=} {datetime_column=}")
954
+
955
+ in_ex_params = get_in_ex_params(params)
956
+
957
+ def serialize(x: Any) -> str:
958
+ if isinstance(x, (dict, list, tuple)):
959
+ return json.dumps(x, sort_keys=True, separators=(',', ':'), default=str)
960
+ if hasattr(x, 'isoformat'):
961
+ return x.isoformat()
962
+ return str(x)
963
+
964
+ masks = [
965
+ (
966
+ (df[datetime_column] >= begin)
967
+ if begin is not None and datetime_column
968
+ else True
969
+ ) & (
970
+ (df[datetime_column] < end)
971
+ if end is not None and datetime_column
972
+ else True
973
+ )
974
+ ]
975
+
976
+ masks.extend([
977
+ (
978
+ (
979
+ df[col].apply(serialize).isin(
980
+ [
981
+ serialize(_in_val)
982
+ for _in_val in in_vals
983
+ ]
984
+ ) if in_vals else True
985
+ ) & (
986
+ ~df[col].apply(serialize).isin(
987
+ [
988
+ serialize(_ex_val)
989
+ for _ex_val in ex_vals
990
+ ]
991
+ ) if ex_vals else True
992
+ )
993
+ )
994
+ for col, (in_vals, ex_vals) in in_ex_params.items()
995
+ if col in df.columns
996
+ ])
997
+ query_mask = masks[0]
998
+ for mask in masks:
999
+ query_mask = query_mask & mask
1000
+
1001
+ if inplace:
1002
+ df.where(query_mask, inplace=inplace)
1003
+ df.dropna(how='all', inplace=inplace)
1004
+ result_df = df
1005
+ else:
1006
+ result_df = df.where(query_mask).dropna(how='all')
1007
+
1008
+ if reset_index:
1009
+ result_df.reset_index(drop=True, inplace=True)
1010
+
1011
+ result_df = enforce_dtypes(
1012
+ result_df,
1013
+ dtypes,
1014
+ safe_copy = (not inplace),
1015
+ debug = debug,
1016
+ coerce_numeric = False,
1017
+ )
1018
+
1019
+ if select_columns == ['*']:
1020
+ select_columns = None
1021
+
1022
+ if not select_columns and not omit_columns:
1023
+ return result_df
1024
+
1025
+ if select_columns:
1026
+ for col in list(result_df.columns):
1027
+ if col not in select_columns:
1028
+ del result_df[col]
1029
+ return result_df
1030
+
1031
+ if omit_columns:
1032
+ for col in list(result_df.columns):
1033
+ if col in omit_columns:
1034
+ del result_df[col]
1035
+ if debug:
1036
+ dprint(f"{dtypes=}")
1037
+ return result_df
@@ -9,7 +9,7 @@ Formatting functions for printing pipes
9
9
  from __future__ import annotations
10
10
  import json
11
11
  import meerschaum as mrsm
12
- from meerschaum.utils.typing import PipesDict, Dict, Union, Optional, SuccessTuple, Any
12
+ from meerschaum.utils.typing import PipesDict, Dict, Union, Optional, SuccessTuple, Any, List
13
13
  from meerschaum.config import get_config
14
14
 
15
15
  def pprint_pipes(pipes: PipesDict) -> None:
@@ -481,22 +481,54 @@ def print_pipes_results(
481
481
  )
482
482
 
483
483
 
484
- def extract_stats_from_message(message: str) -> Dict[str, int]:
484
+ def extract_stats_from_message(
485
+ message: str,
486
+ stat_keys: Optional[List[str]] = None,
487
+ ) -> Dict[str, int]:
485
488
  """
486
- Given a sync message, return the insert, update stats from within.
489
+ Given a sync message, return the insert, update, upsert stats from within.
490
+
491
+ Parameters
492
+ ----------
493
+ message: str
494
+ The message to parse for statistics.
495
+
496
+ stat_keys: Optional[List[str]], default None
497
+ If provided, search for these words (case insensitive) in the message.
498
+ Defaults to `['inserted', 'updated', 'upserted']`.
499
+
500
+ Returns
501
+ -------
502
+ A dictionary mapping the stat keys to the total number of rows affected.
487
503
  """
488
- stats = {
489
- 'inserted': 0,
490
- 'updated': 0,
491
- 'upserted': 0,
504
+ stat_keys = stat_keys or ['inserted', 'updated', 'upserted']
505
+ lines_stats = [extract_stats_from_line(line, stat_keys) for line in message.split('\n')]
506
+ message_stats = {
507
+ stat_key: sum(stats.get(stat_key, 0) for stats in lines_stats)
508
+ for stat_key in stat_keys
492
509
  }
510
+ return message_stats
493
511
 
494
- for search_key in list(stats.keys()):
495
- if search_key not in message.lower():
512
+
513
+ def extract_stats_from_line(
514
+ line: str,
515
+ stat_keys: List[str],
516
+ ) -> Dict[str, int]:
517
+ """
518
+ Return the insert, update, upsert stats from a single line.
519
+ """
520
+ stats = {key: 0 for key in stat_keys}
521
+
522
+ for stat_key in stat_keys:
523
+ search_key = stat_key.lower()
524
+ if search_key not in line.lower():
496
525
  continue
497
526
 
498
527
  ### stat_text starts with the digits we want.
499
- stat_text = message.lower().split(search_key + ' ')[1]
528
+ try:
529
+ stat_text = line.lower().split(search_key + ' ')[1]
530
+ except IndexError:
531
+ continue
500
532
 
501
533
  ### find the first non-digit value.
502
534
  end_of_num_ix = -1
@@ -504,6 +536,8 @@ def extract_stats_from_message(message: str) -> Dict[str, int]:
504
536
  if not char.isdigit():
505
537
  end_of_num_ix = i
506
538
  break
539
+ if i == len(stat_text) - 1:
540
+ end_of_num_ix = i + 1
507
541
  if end_of_num_ix == -1:
508
542
  continue
509
543
 
meerschaum/utils/misc.py CHANGED
@@ -1234,7 +1234,7 @@ def truncate_string_sections(item: str, delimeter: str = '_', max_len: int = 128
1234
1234
 
1235
1235
 
1236
1236
  def separate_negation_values(
1237
- vals: List[str],
1237
+ vals: Union[List[str], Tuple[str]],
1238
1238
  negation_prefix: Optional[str] = None,
1239
1239
  ) -> Tuple[List[str], List[str]]:
1240
1240
  """
@@ -1243,7 +1243,7 @@ def separate_negation_values(
1243
1243
 
1244
1244
  Parameters
1245
1245
  ----------
1246
- vals: List[str]
1246
+ vals: Union[List[str], Tuple[str]]
1247
1247
  A list of strings to parse.
1248
1248
 
1249
1249
  negation_prefix: Optional[str], default None
@@ -1263,6 +1263,38 @@ def separate_negation_values(
1263
1263
  return _in_vals, _ex_vals
1264
1264
 
1265
1265
 
1266
+ def get_in_ex_params(params: Optional[Dict[str, Any]]) -> Dict[str, Tuple[List[Any], List[Any]]]:
1267
+ """
1268
+ Translate a params dictionary into lists of include- and exclude-values.
1269
+
1270
+ Parameters
1271
+ ----------
1272
+ params: Optional[Dict[str, Any]]
1273
+ A params query dictionary.
1274
+
1275
+ Returns
1276
+ -------
1277
+ A dictionary mapping keys to a tuple of lists for include and exclude values.
1278
+
1279
+ Examples
1280
+ --------
1281
+ >>> get_in_ex_params({'a': ['b', 'c', '_d', 'e', '_f']})
1282
+ {'a': (['b', 'c', 'e'], ['d', 'f'])}
1283
+ """
1284
+ if not params:
1285
+ return {}
1286
+ return {
1287
+ col: separate_negation_values(
1288
+ (
1289
+ val
1290
+ if isinstance(val, (list, tuple))
1291
+ else [val]
1292
+ )
1293
+ )
1294
+ for col, val in params.items()
1295
+ }
1296
+
1297
+
1266
1298
  def flatten_list(list_: List[Any]) -> List[Any]:
1267
1299
  """
1268
1300
  Recursively flatten a list.
@@ -350,6 +350,7 @@ def determine_version(
350
350
  with _locks['import_versions']:
351
351
  if venv not in import_versions:
352
352
  import_versions[venv] = {}
353
+ import importlib.metadata
353
354
  import re, os
354
355
  old_cwd = os.getcwd()
355
356
  if debug:
@@ -1379,13 +1380,13 @@ def get_modules_from_package(
1379
1380
  Returns
1380
1381
  -------
1381
1382
  Either list of modules or tuple of lists.
1382
-
1383
1383
  """
1384
1384
  from os.path import dirname, join, isfile, isdir, basename
1385
1385
  import glob
1386
1386
 
1387
1387
  pattern = '*' if recursive else '*.py'
1388
- module_names = glob.glob(join(dirname(package.__file__), pattern), recursive=recursive)
1388
+ package_path = dirname(package.__file__ or package.__path__[0])
1389
+ module_names = glob.glob(join(package_path, pattern), recursive=recursive)
1389
1390
  _all = [
1390
1391
  basename(f)[:-3] if isfile(f) else basename(f)
1391
1392
  for f in module_names
@@ -1410,7 +1411,7 @@ def get_modules_from_package(
1410
1411
  modules.append(m)
1411
1412
  except Exception as e:
1412
1413
  if debug:
1413
- dprint(e)
1414
+ dprint(str(e))
1414
1415
  finally:
1415
1416
  if modules_venvs:
1416
1417
  deactivate_venv(module_name.split('.')[-1], debug=debug)
meerschaum/utils/sql.py CHANGED
@@ -38,6 +38,7 @@ version_queries = {
38
38
  'oracle': "SELECT version from PRODUCT_COMPONENT_VERSION WHERE rownum = 1",
39
39
  }
40
40
  SKIP_IF_EXISTS_FLAVORS = {'mssql', 'oracle'}
41
+ COALESCE_UNIQUE_INDEX_FLAVORS = {'timescaledb', 'postgresql', 'citus'}
41
42
  update_queries = {
42
43
  'default': """
43
44
  UPDATE {target_table_name} AS f
@@ -53,25 +54,25 @@ update_queries = {
53
54
  INSERT INTO {target_table_name} ({patch_cols_str})
54
55
  SELECT {patch_cols_str}
55
56
  FROM {patch_table_name}
56
- ON CONFLICT ({join_cols_str}) DO UPDATE {sets_subquery_none_excluded}
57
+ ON CONFLICT ({join_cols_str}) DO {update_or_nothing} {sets_subquery_none_excluded}
57
58
  """,
58
59
  'postgresql-upsert': """
59
60
  INSERT INTO {target_table_name} ({patch_cols_str})
60
61
  SELECT {patch_cols_str}
61
62
  FROM {patch_table_name}
62
- ON CONFLICT ({join_cols_str}) DO UPDATE {sets_subquery_none_excluded}
63
+ ON CONFLICT ({join_cols_str}) DO {update_or_nothing} {sets_subquery_none_excluded}
63
64
  """,
64
65
  'citus-upsert': """
65
66
  INSERT INTO {target_table_name} ({patch_cols_str})
66
67
  SELECT {patch_cols_str}
67
68
  FROM {patch_table_name}
68
- ON CONFLICT ({join_cols_str}) DO UPDATE {sets_subquery_none_excluded}
69
+ ON CONFLICT ({join_cols_str}) DO {update_or_nothing} {sets_subquery_none_excluded}
69
70
  """,
70
71
  'cockroachdb-upsert': """
71
72
  INSERT INTO {target_table_name} ({patch_cols_str})
72
73
  SELECT {patch_cols_str}
73
74
  FROM {patch_table_name}
74
- ON CONFLICT ({join_cols_str}) DO UPDATE {sets_subquery_none_excluded}
75
+ ON CONFLICT ({join_cols_str}) DO {update_or_nothing} {sets_subquery_none_excluded}
75
76
  """,
76
77
  'mysql': """
77
78
  UPDATE {target_table_name} AS f
@@ -122,7 +123,7 @@ update_queries = {
122
123
  SELECT {patch_cols_str}
123
124
  FROM {patch_table_name}
124
125
  WHERE true
125
- ON CONFLICT ({join_cols_str}) DO UPDATE {sets_subquery_none_excluded}
126
+ ON CONFLICT ({join_cols_str}) DO {update_or_nothing} {sets_subquery_none_excluded}
126
127
  """,
127
128
  'sqlite_delete_insert': [
128
129
  """
@@ -1084,7 +1085,7 @@ def get_update_queries(
1084
1085
  for col in patch_table_columns
1085
1086
  ]
1086
1087
  )
1087
- join_cols_str = ','.join(
1088
+ join_cols_str = ', '.join(
1088
1089
  [
1089
1090
  sql_item_name(col, flavor)
1090
1091
  for col in join_cols
@@ -1109,10 +1110,27 @@ def get_update_queries(
1109
1110
  if debug:
1110
1111
  dprint(f"value_cols: {value_cols}")
1111
1112
 
1112
- if not value_cols or not join_cols_types:
1113
+ if not join_cols_types:
1114
+ return []
1115
+ if not value_cols and not upsert:
1113
1116
  return []
1114
1117
 
1118
+ coalesce_join_cols_str = ', '.join(
1119
+ [
1120
+ 'COALESCE('
1121
+ + sql_item_name(c_name, flavor)
1122
+ + ', '
1123
+ + get_null_replacement(c_type, flavor)
1124
+ + ')'
1125
+ for c_name, c_type in join_cols_types
1126
+ ]
1127
+ )
1128
+
1129
+ update_or_nothing = ('UPDATE' if value_cols else 'NOTHING')
1130
+
1115
1131
  def sets_subquery(l_prefix: str, r_prefix: str):
1132
+ if not value_cols:
1133
+ return ''
1116
1134
  return 'SET ' + ',\n'.join([
1117
1135
  (
1118
1136
  l_prefix + sql_item_name(c_name, flavor, None)
@@ -1169,6 +1187,8 @@ def get_update_queries(
1169
1187
  patch_cols_str = patch_cols_str,
1170
1188
  date_bounds_subquery = date_bounds_subquery,
1171
1189
  join_cols_str = join_cols_str,
1190
+ coalesce_join_cols_str = coalesce_join_cols_str,
1191
+ update_or_nothing = update_or_nothing,
1172
1192
  )
1173
1193
  for base_query in base_queries
1174
1194
  ]
@@ -87,3 +87,14 @@ PipesDict = Dict[
87
87
  ]
88
88
  ]
89
89
  WebState = Dict[str, str]
90
+
91
+ def is_success_tuple(x: Any) -> bool:
92
+ """
93
+ Determine whether an object is a `SuccessTuple`.
94
+ """
95
+ return (
96
+ isinstance(x, tuple)
97
+ and len(x) == 2
98
+ and isinstance(x[0], bool)
99
+ and isinstance(x[1], str)
100
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: meerschaum
3
- Version: 2.1.5
3
+ Version: 2.1.7
4
4
  Summary: Sync Time-Series Pipes with Meerschaum
5
5
  Home-page: https://meerschaum.io
6
6
  Author: Bennett Meares
@@ -12,7 +12,7 @@ meerschaum/_internal/gui/app/__init__.py,sha256=rKUa8hHk6Fai-PDF61tQcpT1myxKcfmv
12
12
  meerschaum/_internal/gui/app/_windows.py,sha256=-VHdjTzA3V596fVqnbmTxemONSp_80-sTNJ0CTB8FwU,2632
13
13
  meerschaum/_internal/gui/app/actions.py,sha256=rx37qXf3uoa7Ou0n1cISqNFZNL0nr4wO7vSUmWO8f2E,935
14
14
  meerschaum/_internal/gui/app/pipes.py,sha256=4nAQ0rrHb_2bNgDF0Ru2YlbPaCDDzAl5beOGU4Af-4A,1596
15
- meerschaum/_internal/shell/Shell.py,sha256=34lDd4FVTyvj17L4WdcSWwN60VXu0NpiVzY45IGTWq4,33102
15
+ meerschaum/_internal/shell/Shell.py,sha256=zcgajt0FGH5ou0kqmOrYAvNfunOsn_CN6LHFYFXBBlI,32826
16
16
  meerschaum/_internal/shell/ShellCompleter.py,sha256=bbG-mExNXO4pltWBOXdbMp8P2wLgy8_BgipIr5aGp5s,3114
17
17
  meerschaum/_internal/shell/ValidAutoSuggest.py,sha256=bARjOWMidz0dvMelLUe6yRPto5l3gcEHYHqFDjoh22I,1280
18
18
  meerschaum/_internal/shell/__init__.py,sha256=vXQoQPEVlYiUYai1b5AwQAlTnja6A2cSABnqXhzlS7I,281
@@ -21,7 +21,7 @@ meerschaum/_internal/term/TermPageHandler.py,sha256=Rt5S47Pr_3HLJc8xIXpZUczYE_Dw
21
21
  meerschaum/_internal/term/__init__.py,sha256=xIwEWXyq1qaU7Rx-AryTtANJPdm__fy3XSMzxaFn0wU,1594
22
22
  meerschaum/_internal/term/tools.py,sha256=bpYexJBDCQXfzz6ESMvmpSHM1AIy4qWsrAHl95tSW2I,716
23
23
  meerschaum/actions/__init__.py,sha256=7CNoKEqkqqafqMcChspJX9cR9OdgEWk9ggj0000Jl98,11360
24
- meerschaum/actions/api.py,sha256=StCJN-gCLNTDJhGxgux_9aMdeSFDTl-Yzsx4o9imw_I,12757
24
+ meerschaum/actions/api.py,sha256=mWhv4bn3Ap17_Gqf2Cx9bAsHKG-Zhy072pBbNzHLEJc,12756
25
25
  meerschaum/actions/bootstrap.py,sha256=JnIyJ4odw6cA4e0Cw7J8THkLavMcj68nRyGsQDAT8nc,13396
26
26
  meerschaum/actions/clear.py,sha256=OoFZE0bK5m8s3GLNZcixuVT0DMj1izXVxGCATcmUGbI,4851
27
27
  meerschaum/actions/copy.py,sha256=8g3ANXfVdvuyaoXcZjgTg3BxHTOhHGrzVDOOsTBrpSU,6213
@@ -43,7 +43,7 @@ meerschaum/actions/sql.py,sha256=wYofwk1vGO96U2ncigGEfMtYMZeprz2FR1PRRZhkAPI,431
43
43
  meerschaum/actions/stack.py,sha256=WMRMebyYwZGNlbnj6Ja09qvCSDNteFJOTa8_joHlnVo,5886
44
44
  meerschaum/actions/start.py,sha256=mNFWqxc_o9moavvDQWE4YoZF6b-SW2nKyw5MtwIj-90,18384
45
45
  meerschaum/actions/stop.py,sha256=KTBadAmJ6SbReqlltkwfqZW6EryB4kZXupl0ZyInI0Q,4311
46
- meerschaum/actions/sync.py,sha256=oMRZzT7q6eUs-7UrXcTpcem59EBwftu_6ug8XCen6dk,16235
46
+ meerschaum/actions/sync.py,sha256=Pzj0p53Aa4ZjP3SnlUyB6OeNvVUoJ0iCfWffMIIsd0k,17266
47
47
  meerschaum/actions/tag.py,sha256=SJf5qFW0ccLXjqlTdkK_0MCcrCMdg6xhYrhKdco0hdA,3053
48
48
  meerschaum/actions/uninstall.py,sha256=2fUd5ZK45VGGCI8V4NLmSnavdKjOv7cGM22x2WlTStw,6068
49
49
  meerschaum/actions/upgrade.py,sha256=VQKyjCGioEF2FYbQmldHh21imDqApNl0xal0rhxzrJk,6302
@@ -132,9 +132,9 @@ meerschaum/config/_patch.py,sha256=21N30q1ANmWMDQ-2RUjpMx7KafWfPQ3lKx9rrMqg1s4,1
132
132
  meerschaum/config/_paths.py,sha256=ORzpEpYE5OM06H6Hw_nh51R8fkk45MyGPI7mDd1QzqI,7983
133
133
  meerschaum/config/_preprocess.py,sha256=-AEA8m_--KivZwTQ1sWN6LTn5sio_fUr2XZ51BO6wLs,1220
134
134
  meerschaum/config/_read_config.py,sha256=WFZKIXZMDe_ca0ES7ivgM_mnwShvFxLdoeisT_X5-h0,14720
135
- meerschaum/config/_shell.py,sha256=k6PH0BEr2imhgURLYlR5p6s5gXfYpWoyZSV29U-SsXk,3589
135
+ meerschaum/config/_shell.py,sha256=s74cmJl8NrhM_Y1cB_P41_JDUYXV0g4WXnKFZWMtnrY,3551
136
136
  meerschaum/config/_sync.py,sha256=Q-sz5YcjL3CJS2Dyw4rVRQsz9th9GWa9o5F9D0Jrmn8,4120
137
- meerschaum/config/_version.py,sha256=5u15PU7pHH1p6K_hknjy0-rs4zQUg8VbND291EX5N4I,71
137
+ meerschaum/config/_version.py,sha256=FZCUdRGYa70_q2wO0dpOp-Rp_YnYgZU3PX2U_SBZLB4,71
138
138
  meerschaum/config/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
139
139
  meerschaum/config/stack/__init__.py,sha256=4a_up1oxkitwgIylWWk0vA4XkGhEpWazUaENOPEdYQI,9034
140
140
  meerschaum/config/stack/grafana/__init__.py,sha256=wzuoch_AK49lcn7lH2qTSJ_PPbSagF4lcweeipz_XiE,2010
@@ -163,31 +163,31 @@ meerschaum/connectors/sql/SQLConnector.py,sha256=dPMgJXLghzBmqHaT9qxvn8pzerQLpzk
163
163
  meerschaum/connectors/sql/__init__.py,sha256=xwSYhYuketTXhQLXyD9pZ0NNBPboW5Oqv9zrKfjx0Ic,175
164
164
  meerschaum/connectors/sql/_cli.py,sha256=XaWjWZzGIfhMiYoXAs2FrwHUGNyZpxIzH4g4xugLKsw,4123
165
165
  meerschaum/connectors/sql/_create_engine.py,sha256=PrHS5xCvqBMdCl0-AsoEw_9Dt2WrH3Wim5qKeTiLyp4,10382
166
- meerschaum/connectors/sql/_fetch.py,sha256=BLcZBGeE8JV6utnCgvq1TX_y6dTjX5KAvB4xm4YKxiQ,13210
166
+ meerschaum/connectors/sql/_fetch.py,sha256=NYYWDoEd-aGIS337KwH-D9_3KVWVCZOHAspGLfdEuUE,13086
167
167
  meerschaum/connectors/sql/_instance.py,sha256=jAGq_qjz2WyBrjtnI7FyGRLOs2l3o630vonPM5Pp4Mg,6447
168
- meerschaum/connectors/sql/_pipes.py,sha256=L6ldoFLhhE0ap1Yf2aETzRIBby5TbkIWUb2_-B3_4-k,99687
168
+ meerschaum/connectors/sql/_pipes.py,sha256=4v1-2AwW8QqOTRjNUEoDJJH8KsJa6FxMNSekEDWJu6o,100925
169
169
  meerschaum/connectors/sql/_plugins.py,sha256=hS0cuJQxwd6jUfY136AQ33dGQw_MigP_OFC84KdSMhA,8323
170
170
  meerschaum/connectors/sql/_sql.py,sha256=jNdIcwQC2ZMNKT9W6ugavQPtimUpoTr8cBt05z_vHIo,34262
171
171
  meerschaum/connectors/sql/_uri.py,sha256=0BrhQtqQdzg9mR04gWBZINs_BbPFtSlTECXT_TCUwik,3460
172
172
  meerschaum/connectors/sql/_users.py,sha256=JkD6lKYJO8JVnpVySMqPM20NWUr-XzD_JO-lLMcAD_c,10100
173
173
  meerschaum/connectors/sql/tools.py,sha256=jz8huOaRCwGlYdtGfAqAh7SoK8uydYBrasKQba9FT38,187
174
- meerschaum/connectors/sql/tables/__init__.py,sha256=a4eFvbvGBoL4ZaTNzl6RV1Rj90LTeHFcPPT-j51pzFo,9471
174
+ meerschaum/connectors/sql/tables/__init__.py,sha256=e2dALTtThqbrq0soMNQ9QwgccyfTAjOrFkEClstLp3A,9001
175
175
  meerschaum/connectors/sql/tables/types.py,sha256=Jc_MTHIBM-KHpQt__Lckp39CeOo7tGOiAk5faDx-znY,1573
176
176
  meerschaum/core/__init__.py,sha256=tjASW10n9uLV6bYhcwP4rggh-ESXSJzgxpSBbVsuISs,251
177
177
  meerschaum/core/Pipe/__init__.py,sha256=1QOHqyUX2uJnfihVoGQV30BLm7JZZX2qjbsYIpCNFGI,16230
178
178
  meerschaum/core/Pipe/_attributes.py,sha256=XbSHfDsomaNymzf7-__UhbHqu6mlTTx20xprsw_L04I,13202
179
179
  meerschaum/core/Pipe/_bootstrap.py,sha256=sTbHUX8V0Kfn6vEErXzsVslSjQNfQ5MxXxxuRYslr4w,7613
180
180
  meerschaum/core/Pipe/_clear.py,sha256=hQVPztHiadzLB0c4_yFg6EETnf9MtFdJDCpO41Giuco,2261
181
- meerschaum/core/Pipe/_data.py,sha256=K4sZAPwBB8kb9mr5NqgQcCfgX1u-iYZ06uEPSJx5BqE,20783
181
+ meerschaum/core/Pipe/_data.py,sha256=uafKY7GGD5uGQ__-YcKlWOnRatAp1ExSbvz06LWPTsA,20880
182
182
  meerschaum/core/Pipe/_deduplicate.py,sha256=hXeNhz8p6Zgz2Y-A2W5AZPkVFUgsLqNvTQKKL0JHJ0A,10267
183
183
  meerschaum/core/Pipe/_delete.py,sha256=1geNp9BgrocXP1gt76dMbnlJWKYFMuSNqPFA4K4-hXE,2118
184
184
  meerschaum/core/Pipe/_drop.py,sha256=uf3MvMkCw9tVfJ2fuo8LqZ4vvMNa3xC3YoFGEuc-hH8,1052
185
- meerschaum/core/Pipe/_dtypes.py,sha256=RIJ5KwPWawV1zIwmsLz3lF33NZ-6zICf7XPscmaigB8,3641
185
+ meerschaum/core/Pipe/_dtypes.py,sha256=NcfVMJPyyA1VzIJOnPhyL36YTt2IXMRqQy4ZRDySSqg,3694
186
186
  meerschaum/core/Pipe/_edit.py,sha256=ZH2A0ZOpZKsVDnQxKzmXspNQKTEFUhkkZDjwOkmWtaY,8471
187
187
  meerschaum/core/Pipe/_fetch.py,sha256=zV3DzD7kfx08O6zda0I-9cX91m0Z_wO_on1ozHaqWnk,5234
188
188
  meerschaum/core/Pipe/_register.py,sha256=Sd5xaAW8H7uLTIoommcKb-6kHPRuHJLWNSbPnt2UbvA,2240
189
189
  meerschaum/core/Pipe/_show.py,sha256=nG50y8eBT9TVuKkRgAKtNDNIxysJvMNxfu__lkL1F9k,1352
190
- meerschaum/core/Pipe/_sync.py,sha256=761pNmLrH9wuemE2ZNqa1GtX-ZJeZ18aIEUQevB1Tvo,27330
190
+ meerschaum/core/Pipe/_sync.py,sha256=48qk1xvkcKOqfzzYf3QdA7ojsP80yrso1YU8QIGJxwE,28038
191
191
  meerschaum/core/Pipe/_verify.py,sha256=KSnthUzImRLjt9fxyBaLvArqDuOLRpKBfk0tnseJClc,14262
192
192
  meerschaum/core/Plugin/__init__.py,sha256=UXg64EvJPgI1PCxkY_KM02-ZmBm4FZpLPIQR_uSJJDc,137
193
193
  meerschaum/core/User/_User.py,sha256=waVdpH4SFZSXNYBgX5KFQ8csbCSxRLI5T2efAzVONks,2448
@@ -196,18 +196,18 @@ meerschaum/plugins/_Plugin.py,sha256=f0RDPazwzYSTZmD21rxwF48abUeVyFWKggBC9cWR7sw
196
196
  meerschaum/plugins/__init__.py,sha256=pVRgbBk1UMlqLrM5p1s7_x_mN70epdDBZOa4vrt6C6w,20760
197
197
  meerschaum/utils/__init__.py,sha256=QrK1K9hIbPCRCM5k2nZGFqGnrqhA0Eh-iSmCU7FG6Cs,612
198
198
  meerschaum/utils/_get_pipes.py,sha256=dlPckpYYyM0IwRZ2VL0u9DiEeYhr5Ho9gkzvWxzNVwI,11460
199
- meerschaum/utils/dataframe.py,sha256=wqSmyD_UAnGJYWVYF9obD1T_zW0ioFE4tmbVwe4Okps,26479
199
+ meerschaum/utils/dataframe.py,sha256=vxZ72ME7IWuadtktgjFZF5bc9fXW_0TuynjFlJljlLU,31955
200
200
  meerschaum/utils/debug.py,sha256=ry9UWf0ECelVIuBApwmKxPZ_IoL6UqjTSMpGNbjghVQ,3690
201
201
  meerschaum/utils/interactive.py,sha256=t-6jWozXSqL7lYGDHuwiOjTgr-UKhdcg61q_eR5mikI,3196
202
- meerschaum/utils/misc.py,sha256=aEohLCpf6u-eW1YWWFJed3077F7HJni1kXzeaeo3USg,42457
202
+ meerschaum/utils/misc.py,sha256=H26hLtCP8QHwXoHlvkxjWu6cPTwudDbbsbRkGw6ultg,43296
203
203
  meerschaum/utils/networking.py,sha256=Sr_eYUGW8_UV9-k9LqRFf7xLtbUcsDucODyLCRsFRUc,1006
204
204
  meerschaum/utils/pool.py,sha256=vkE42af4fjrTEJTxf6Ek3xGucm1MtEkpsSEiaVzNKHs,2655
205
205
  meerschaum/utils/process.py,sha256=tbEutHAg_Kn5UetOI-fduRjsafGOYX5tkLvpzqosgvc,7098
206
206
  meerschaum/utils/prompt.py,sha256=0mBFbgi_l9rCou9UnC_6qKTHkqyl1Z_jSRzfmc0xRXM,16490
207
207
  meerschaum/utils/schedule.py,sha256=lYra4f7QpGlmV4vAhO1UvA5TWQSPdLqw7eG7ekKZtwg,1951
208
- meerschaum/utils/sql.py,sha256=mWGZO-JXmwfELSRyT7w3L6A-_HJ-k9aPwIgnMGj9Bqs,45982
208
+ meerschaum/utils/sql.py,sha256=4sCNEpgUd6uFz6ySs4nnUMVaOT0YAvPM1ZlQYJTSF-0,46656
209
209
  meerschaum/utils/threading.py,sha256=fAXk7-FnbFvdU1FQ4vHKk5NeGbbTpTw7y9dRnlVayNI,2472
210
- meerschaum/utils/typing.py,sha256=mCWZ7QN5Oub5QtWtdLp3gL_PPbpbdRhI2DJJkv_dpTA,2535
210
+ meerschaum/utils/typing.py,sha256=-g_LN1HzBdYZml7iZNSoB4EbpV27D0S0pM_PhuDSuzI,2784
211
211
  meerschaum/utils/warnings.py,sha256=0b5O2DBbhEAGnu6RAB1hlHSVmwL_hcR3EiMkExXmBJ0,6535
212
212
  meerschaum/utils/yaml.py,sha256=vbCrFjdapKsZ9wRRaI9Ih8dVUwZ-KHpSzfGhRcpDBgQ,3162
213
213
  meerschaum/utils/daemon/Daemon.py,sha256=_dxHUM4SEIvTin_k0xMIFjdXTDj97ZE1Do4A_AXcAHA,31961
@@ -218,19 +218,19 @@ meerschaum/utils/dtypes/__init__.py,sha256=5GJUHZSJBc0lrynmvc2vgKAN2ybYlCs-YEi0r
218
218
  meerschaum/utils/dtypes/sql.py,sha256=IkEOyB63je-rCLHM6WwFzGbCerYk1zobL1cXkWqmTa4,14638
219
219
  meerschaum/utils/formatting/__init__.py,sha256=Dn-8EUTjz0s11bFaAr1HjqlM8IogCTxlpulwp_Kn4ok,13778
220
220
  meerschaum/utils/formatting/_jobs.py,sha256=s1lVcdMkzNj5Bqw-GsUhcguUFtahi5nQ-kg1fbp0Idw,3294
221
- meerschaum/utils/formatting/_pipes.py,sha256=vixqKIUYNC7yDtU-MoeSXnHug9IGkK1N2yqujxvJ-i0,18414
221
+ meerschaum/utils/formatting/_pipes.py,sha256=wy0iWJFsFl3X2VloaiA_gp9Yx9w6tD3FQZvAQAqef4A,19492
222
222
  meerschaum/utils/formatting/_pprint.py,sha256=tgrT3FyGyu5CWJYysqK3kX1xdZYorlbOk9fcU_vt9Qg,3096
223
223
  meerschaum/utils/formatting/_shell.py,sha256=ox75O7VHDAiwzSvdMSJZhXLadvAqYJVeihU6WeZ2Ogc,3677
224
- meerschaum/utils/packages/__init__.py,sha256=sUUtdkTNCXzMl21_8BN-fxuplVUuSx4gaKjXpC3XX6A,56445
224
+ meerschaum/utils/packages/__init__.py,sha256=yyf-3rAyRy5XaQkNFHjOC85VIkZt0o00o7rG3ykG7XU,56534
225
225
  meerschaum/utils/packages/_packages.py,sha256=gEWZHxFbYV6FZfM2uV1JOzS4eBl5OVoBZftMkLsU-4g,7967
226
226
  meerschaum/utils/packages/lazy_loader.py,sha256=VHnph3VozH29R4JnSSBfwtA5WKZYZQFT_GeQSShCnuc,2540
227
227
  meerschaum/utils/venv/_Venv.py,sha256=sBnlmxHdAh2bx8btfVoD79-H9-cYsv5lP02IIXkyECs,3553
228
228
  meerschaum/utils/venv/__init__.py,sha256=sj-n8scWH2NPDJGAxfpqzsYqVUt2jMEr-7Uq9G7YUNQ,23183
229
- meerschaum-2.1.5.dist-info/LICENSE,sha256=jG2zQEdRNt88EgHUWPpXVWmOrOduUQRx7MnYV9YIPaw,11359
230
- meerschaum-2.1.5.dist-info/METADATA,sha256=ig4PzucKdxot5IvSY8VeKrh54OVd7ZM8grPLZixLgiw,23976
231
- meerschaum-2.1.5.dist-info/NOTICE,sha256=OTA9Fcthjf5BRvWDDIcBC_xfLpeDV-RPZh3M-HQBRtQ,114
232
- meerschaum-2.1.5.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
233
- meerschaum-2.1.5.dist-info/entry_points.txt,sha256=5YBVzibw-0rNA_1VjB16z5GABsOGf-CDhW4yqH8C7Gc,88
234
- meerschaum-2.1.5.dist-info/top_level.txt,sha256=bNoSiDj0El6buocix-FRoAtJOeq1qOF5rRm2u9i7Q6A,11
235
- meerschaum-2.1.5.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
236
- meerschaum-2.1.5.dist-info/RECORD,,
229
+ meerschaum-2.1.7.dist-info/LICENSE,sha256=jG2zQEdRNt88EgHUWPpXVWmOrOduUQRx7MnYV9YIPaw,11359
230
+ meerschaum-2.1.7.dist-info/METADATA,sha256=AgC6CJaCcrlaqiGgfyG3pl2FtpqOX0ecLf8comQjt8w,23976
231
+ meerschaum-2.1.7.dist-info/NOTICE,sha256=OTA9Fcthjf5BRvWDDIcBC_xfLpeDV-RPZh3M-HQBRtQ,114
232
+ meerschaum-2.1.7.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
233
+ meerschaum-2.1.7.dist-info/entry_points.txt,sha256=5YBVzibw-0rNA_1VjB16z5GABsOGf-CDhW4yqH8C7Gc,88
234
+ meerschaum-2.1.7.dist-info/top_level.txt,sha256=bNoSiDj0El6buocix-FRoAtJOeq1qOF5rRm2u9i7Q6A,11
235
+ meerschaum-2.1.7.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
236
+ meerschaum-2.1.7.dist-info/RECORD,,