meerschaum 2.6.5__py3-none-any.whl → 2.6.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- meerschaum/_internal/docs/index.py +2 -1
- meerschaum/config/_version.py +1 -1
- meerschaum/connectors/sql/_SQLConnector.py +10 -7
- meerschaum/connectors/sql/_pipes.py +11 -7
- meerschaum/connectors/valkey/_pipes.py +58 -11
- meerschaum/core/Pipe/_attributes.py +10 -7
- meerschaum/core/Pipe/_drop.py +5 -0
- meerschaum/utils/dataframe.py +7 -3
- meerschaum/utils/dtypes/__init__.py +2 -2
- meerschaum/utils/packages/__init__.py +2 -3
- meerschaum/utils/packages/_packages.py +1 -0
- meerschaum/utils/sql.py +41 -3
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/METADATA +2 -1
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/RECORD +20 -20
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/WHEEL +1 -1
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/LICENSE +0 -0
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/NOTICE +0 -0
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/entry_points.txt +0 -0
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/top_level.txt +0 -0
- {meerschaum-2.6.5.dist-info → meerschaum-2.6.7.dist-info}/zip-safe +0 -0
@@ -741,10 +741,11 @@ def init_dash(dash_app):
|
|
741
741
|
<li><code>meerschaum.utils.sql.get_null_replacement()</code></li>
|
742
742
|
<li><code>meerschaum.utils.sql.get_db_version()</code></li>
|
743
743
|
<li><code>meerschaum.utils.sql.get_rename_table_queries()</code></li>
|
744
|
-
<li><code>meerschaum.utils.sql.
|
744
|
+
<li><code>meerschaum.utils.sql.get_create_table_queries()</code></li>
|
745
745
|
<li><code>meerschaum.utils.sql.wrap_query_with_cte()</code></li>
|
746
746
|
<li><code>meerschaum.utils.sql.format_cte_subquery()</code></li>
|
747
747
|
<li><code>meerschaum.utils.sql.session_execute()</code></li>
|
748
|
+
<li><code>meerschaum.utils.sql.get_reset_autoincrement_queries()</code></li>
|
748
749
|
</ul>
|
749
750
|
</details>
|
750
751
|
</ul>
|
meerschaum/config/_version.py
CHANGED
@@ -11,7 +11,7 @@ import meerschaum as mrsm
|
|
11
11
|
from meerschaum.utils.typing import Optional, Any, Union
|
12
12
|
|
13
13
|
from meerschaum.connectors import Connector
|
14
|
-
from meerschaum.utils.warnings import error
|
14
|
+
from meerschaum.utils.warnings import error, warn
|
15
15
|
|
16
16
|
|
17
17
|
class SQLConnector(Connector):
|
@@ -212,7 +212,6 @@ class SQLConnector(Connector):
|
|
212
212
|
|
213
213
|
if connect:
|
214
214
|
if not self.test_connection(debug=debug):
|
215
|
-
from meerschaum.utils.warnings import warn
|
216
215
|
warn(f"Failed to connect with connector '{self}'!", stack=False)
|
217
216
|
|
218
217
|
@property
|
@@ -230,8 +229,11 @@ class SQLConnector(Connector):
|
|
230
229
|
|
231
230
|
@property
|
232
231
|
def engine(self):
|
233
|
-
|
234
|
-
|
232
|
+
"""
|
233
|
+
Return the SQLAlchemy engine connected to the configured database.
|
234
|
+
"""
|
235
|
+
import os
|
236
|
+
import threading
|
235
237
|
if '_engine' not in self.__dict__:
|
236
238
|
self._engine, self._engine_str = self.create_engine(include_uri=True)
|
237
239
|
|
@@ -242,13 +244,14 @@ class SQLConnector(Connector):
|
|
242
244
|
if not same_process:
|
243
245
|
self._pid = os.getpid()
|
244
246
|
self._thread = threading.current_thread()
|
245
|
-
|
246
|
-
warn(f"Different PID detected. Disposing of connections...")
|
247
|
+
warn("Different PID detected. Disposing of connections...")
|
247
248
|
self._engine.dispose()
|
248
249
|
|
249
250
|
### handle different threads
|
250
251
|
if not same_thread:
|
251
|
-
|
252
|
+
if self.flavor == 'duckdb':
|
253
|
+
warn("Different thread detected.")
|
254
|
+
self._engine.dispose()
|
252
255
|
|
253
256
|
return self._engine
|
254
257
|
|
@@ -1349,9 +1349,9 @@ def create_pipe_table_from_df(
|
|
1349
1349
|
for col, typ in df.dtypes.items()
|
1350
1350
|
},
|
1351
1351
|
**{
|
1352
|
-
col: 'int'
|
1352
|
+
col: str(df.dtypes.get(col, 'int'))
|
1353
1353
|
for col_ix, col in pipe.columns.items()
|
1354
|
-
if col_ix != 'primary'
|
1354
|
+
if col and col_ix != 'primary'
|
1355
1355
|
},
|
1356
1356
|
**{
|
1357
1357
|
col: 'uuid'
|
@@ -1696,7 +1696,9 @@ def sync_pipe(
|
|
1696
1696
|
)
|
1697
1697
|
for col, typ in update_df.dtypes.items()
|
1698
1698
|
}
|
1699
|
-
|
1699
|
+
now_ts = time.perf_counter()
|
1700
|
+
temp_pipe.__dict__['_columns_types_timestamp'] = now_ts
|
1701
|
+
temp_pipe.__dict__['_skip_check_indices'] = True
|
1700
1702
|
temp_success, temp_msg = temp_pipe.sync(update_df, check_existing=False, debug=debug)
|
1701
1703
|
if not temp_success:
|
1702
1704
|
return temp_success, temp_msg
|
@@ -1867,6 +1869,7 @@ def sync_pipe_inplace(
|
|
1867
1869
|
)
|
1868
1870
|
pipe_name = sql_item_name(pipe.target, self.flavor, self.get_pipe_schema(pipe))
|
1869
1871
|
upsert = pipe.parameters.get('upsert', False) and f'{self.flavor}-upsert' in update_queries
|
1872
|
+
static = pipe.parameters.get('static', False)
|
1870
1873
|
database = getattr(self, 'database', self.parse_uri(self.URI).get('database', None))
|
1871
1874
|
primary_key = pipe.columns.get('primary', None)
|
1872
1875
|
autoincrement = pipe.parameters.get('autoincrement', False)
|
@@ -1944,7 +1947,7 @@ def sync_pipe_inplace(
|
|
1944
1947
|
schema=internal_schema,
|
1945
1948
|
database=database,
|
1946
1949
|
debug=debug,
|
1947
|
-
)
|
1950
|
+
) if not static else pipe.get_columns_types(debug=debug)
|
1948
1951
|
if not new_cols_types:
|
1949
1952
|
return False, f"Failed to get new columns for {pipe}."
|
1950
1953
|
|
@@ -2049,7 +2052,7 @@ def sync_pipe_inplace(
|
|
2049
2052
|
schema=internal_schema,
|
2050
2053
|
database=database,
|
2051
2054
|
debug=debug,
|
2052
|
-
) if not upsert else new_cols_types
|
2055
|
+
) if not (upsert or static) else new_cols_types
|
2053
2056
|
|
2054
2057
|
common_cols = [col for col in new_cols if col in backtrack_cols_types]
|
2055
2058
|
on_cols = {
|
@@ -2123,7 +2126,7 @@ def sync_pipe_inplace(
|
|
2123
2126
|
schema=internal_schema,
|
2124
2127
|
database=database,
|
2125
2128
|
debug=debug,
|
2126
|
-
) if not upsert else new_cols_types
|
2129
|
+
) if not (upsert or static) else new_cols_types
|
2127
2130
|
|
2128
2131
|
### This is a weird bug on SQLite.
|
2129
2132
|
### Sometimes the backtrack dtypes are all empty strings.
|
@@ -2812,12 +2815,13 @@ def get_pipe_columns_indices(
|
|
2812
2815
|
pipe: mrsm.Pipe
|
2813
2816
|
The pipe to be queried against.
|
2814
2817
|
|
2815
|
-
|
2816
2818
|
Returns
|
2817
2819
|
-------
|
2818
2820
|
A dictionary mapping columns names to lists of dictionaries.
|
2819
2821
|
The dictionaries in the lists contain the name and type of the indices.
|
2820
2822
|
"""
|
2823
|
+
if pipe.__dict__.get('_skip_check_indices', False):
|
2824
|
+
return {}
|
2821
2825
|
from meerschaum.utils.sql import get_table_cols_indices
|
2822
2826
|
return get_table_cols_indices(
|
2823
2827
|
pipe.target,
|
@@ -305,6 +305,14 @@ def drop_pipe(
|
|
305
305
|
-------
|
306
306
|
A `SuccessTuple` indicating success.
|
307
307
|
"""
|
308
|
+
for chunk_begin, chunk_end in pipe.get_chunk_bounds(debug=debug):
|
309
|
+
clear_chunk_success, clear_chunk_msg = pipe.clear(
|
310
|
+
begin=chunk_begin,
|
311
|
+
end=chunk_end,
|
312
|
+
debug=debug,
|
313
|
+
)
|
314
|
+
if not clear_chunk_success:
|
315
|
+
return clear_chunk_success, clear_chunk_msg
|
308
316
|
try:
|
309
317
|
self.drop_table(pipe.target, debug=debug)
|
310
318
|
except Exception as e:
|
@@ -547,7 +555,7 @@ def sync_pipe(
|
|
547
555
|
new_dtypes[col] = 'string'
|
548
556
|
df[col] = df[col].astype('string')
|
549
557
|
|
550
|
-
if new_dtypes and not static:
|
558
|
+
if new_dtypes and (not static or not valkey_dtypes):
|
551
559
|
valkey_dtypes.update(new_dtypes)
|
552
560
|
if 'valkey' not in pipe.parameters:
|
553
561
|
pipe.parameters['valkey'] = {}
|
@@ -560,19 +568,19 @@ def sync_pipe(
|
|
560
568
|
unseen_df, update_df, delta_df = (
|
561
569
|
pipe.filter_existing(df, include_unchanged_columns=True, debug=debug)
|
562
570
|
if check_existing and not upsert
|
563
|
-
else (
|
571
|
+
else (None, df, df)
|
564
572
|
)
|
565
573
|
num_insert = len(unseen_df) if unseen_df is not None else 0
|
566
574
|
num_update = len(update_df) if update_df is not None else 0
|
567
575
|
msg = (
|
568
576
|
f"Inserted {num_insert}, updated {num_update} rows."
|
569
577
|
if not upsert
|
570
|
-
else f"Upserted {
|
578
|
+
else f"Upserted {num_update} rows."
|
571
579
|
)
|
572
580
|
if len(delta_df) == 0:
|
573
581
|
return True, msg
|
574
582
|
|
575
|
-
unseen_docs = unseen_df.to_dict(orient='records')
|
583
|
+
unseen_docs = unseen_df.to_dict(orient='records') if unseen_df is not None else []
|
576
584
|
unseen_indices_docs = _serialize_indices_docs(unseen_docs)
|
577
585
|
unseen_ix_vals = {
|
578
586
|
get_document_key(doc, indices, table_name): serialize_document(doc)
|
@@ -599,11 +607,53 @@ def sync_pipe(
|
|
599
607
|
get_document_key(doc, indices, table_name): doc
|
600
608
|
for doc in update_docs
|
601
609
|
}
|
602
|
-
|
610
|
+
existing_docs_data = {
|
611
|
+
key: self.get(key)
|
612
|
+
for key in update_ix_docs
|
613
|
+
} if pipe.exists(debug=debug) else {}
|
614
|
+
existing_docs = {
|
615
|
+
key: json.loads(data)
|
616
|
+
for key, data in existing_docs_data.items()
|
617
|
+
if data
|
618
|
+
}
|
619
|
+
new_update_docs = {
|
620
|
+
key: doc
|
621
|
+
for key, doc in update_ix_docs.items()
|
622
|
+
if key not in existing_docs
|
623
|
+
}
|
624
|
+
new_ix_vals = {
|
625
|
+
get_document_key(doc, indices, table_name): serialize_document(doc)
|
626
|
+
for doc in new_update_docs.values()
|
627
|
+
}
|
628
|
+
for key, val in new_ix_vals.items():
|
603
629
|
try:
|
604
|
-
|
605
|
-
|
606
|
-
|
630
|
+
self.set(key, val)
|
631
|
+
except Exception as e:
|
632
|
+
return False, f"Failed to set keys for {pipe}:\n{e}"
|
633
|
+
|
634
|
+
old_update_docs = {
|
635
|
+
key: {
|
636
|
+
**existing_docs[key],
|
637
|
+
**doc
|
638
|
+
}
|
639
|
+
for key, doc in update_ix_docs.items()
|
640
|
+
if key in existing_docs
|
641
|
+
}
|
642
|
+
new_indices_docs = _serialize_indices_docs([doc for doc in new_update_docs.values()])
|
643
|
+
try:
|
644
|
+
if new_indices_docs:
|
645
|
+
self.push_docs(
|
646
|
+
new_indices_docs,
|
647
|
+
pipe.target,
|
648
|
+
datetime_column=dt_col,
|
649
|
+
debug=debug,
|
650
|
+
)
|
651
|
+
except Exception as e:
|
652
|
+
return False, f"Failed to upsert '{pipe.target}':\n{e}"
|
653
|
+
|
654
|
+
for key, doc in old_update_docs.items():
|
655
|
+
try:
|
656
|
+
self.set(key, serialize_document(doc))
|
607
657
|
except Exception as e:
|
608
658
|
return False, f"Failed to set keys for {pipe}:\n{e}"
|
609
659
|
|
@@ -667,9 +717,6 @@ def clear_pipe(
|
|
667
717
|
-------
|
668
718
|
A `SuccessTuple` indicating success.
|
669
719
|
"""
|
670
|
-
if begin is None and end is None and params is None:
|
671
|
-
return self.drop_pipe(pipe, debug=debug)
|
672
|
-
|
673
720
|
dt_col = pipe.columns.get('datetime', None)
|
674
721
|
|
675
722
|
existing_df = pipe.get_data(
|
@@ -85,7 +85,7 @@ def columns(self) -> Union[Dict[str, str], None]:
|
|
85
85
|
if not isinstance(cols, dict):
|
86
86
|
cols = {}
|
87
87
|
self.parameters['columns'] = cols
|
88
|
-
return cols
|
88
|
+
return {col_ix: col for col_ix, col in cols.items() if col}
|
89
89
|
|
90
90
|
|
91
91
|
@columns.setter
|
@@ -127,11 +127,11 @@ def indices(self) -> Union[Dict[str, Union[str, List[str]]], None]:
|
|
127
127
|
) + [
|
128
128
|
col
|
129
129
|
for col_ix, col in _columns.items()
|
130
|
-
if col_ix != 'datetime'
|
130
|
+
if col and col_ix != 'datetime'
|
131
131
|
]))
|
132
132
|
return {
|
133
133
|
**({'unique': unique_cols} if len(unique_cols) > 1 else {}),
|
134
|
-
**_columns,
|
134
|
+
**{col_ix: col for col_ix, col in _columns.items() if col},
|
135
135
|
**_indices
|
136
136
|
}
|
137
137
|
|
@@ -371,8 +371,7 @@ def get_columns_types(
|
|
371
371
|
|
372
372
|
now = time.perf_counter()
|
373
373
|
cache_seconds = STATIC_CONFIG['pipes']['static_schema_cache_seconds']
|
374
|
-
|
375
|
-
if not static:
|
374
|
+
if not self.static:
|
376
375
|
refresh = True
|
377
376
|
if refresh:
|
378
377
|
_ = self.__dict__.pop('_columns_types_timestamp', None)
|
@@ -416,7 +415,11 @@ def get_columns_indices(
|
|
416
415
|
from meerschaum.utils.warnings import dprint
|
417
416
|
|
418
417
|
now = time.perf_counter()
|
419
|
-
|
418
|
+
cache_seconds = (
|
419
|
+
STATIC_CONFIG['pipes']['static_schema_cache_seconds']
|
420
|
+
if self.static
|
421
|
+
else STATIC_CONFIG['pipes']['exists_timeout_seconds']
|
422
|
+
)
|
420
423
|
if refresh:
|
421
424
|
_ = self.__dict__.pop('_columns_indices_timestamp', None)
|
422
425
|
_ = self.__dict__.pop('_columns_indices', None)
|
@@ -425,7 +428,7 @@ def get_columns_indices(
|
|
425
428
|
columns_indices_timestamp = self.__dict__.get('_columns_indices_timestamp', None)
|
426
429
|
if columns_indices_timestamp is not None:
|
427
430
|
delta = now - columns_indices_timestamp
|
428
|
-
if delta <
|
431
|
+
if delta < cache_seconds:
|
429
432
|
if debug:
|
430
433
|
dprint(
|
431
434
|
f"Returning cached `columns_indices` for {self} "
|
meerschaum/core/Pipe/_drop.py
CHANGED
@@ -9,6 +9,7 @@ Drop a Pipe's table but keep its registration
|
|
9
9
|
from __future__ import annotations
|
10
10
|
from meerschaum.utils.typing import SuccessTuple, Any
|
11
11
|
|
12
|
+
|
12
13
|
def drop(
|
13
14
|
self,
|
14
15
|
debug: bool = False,
|
@@ -39,4 +40,8 @@ def drop(
|
|
39
40
|
|
40
41
|
with Venv(get_connector_plugin(self.instance_connector)):
|
41
42
|
result = self.instance_connector.drop_pipe(self, debug=debug, **kw)
|
43
|
+
|
44
|
+
_ = self.__dict__.pop('_exists', None)
|
45
|
+
_ = self.__dict__.pop('_exists_timestamp', None)
|
46
|
+
|
42
47
|
return result
|
meerschaum/utils/dataframe.py
CHANGED
@@ -234,10 +234,13 @@ def filter_unseen_df(
|
|
234
234
|
cast_dt_cols = True
|
235
235
|
try:
|
236
236
|
for col, typ in dt_dtypes.items():
|
237
|
+
strip_utc = (
|
238
|
+
(dtypes or {}).get(col, 'datetime') == 'datetime64[ns]'
|
239
|
+
)
|
237
240
|
if col in old_df.columns:
|
238
|
-
old_df[col] = coerce_timezone(old_df[col])
|
241
|
+
old_df[col] = coerce_timezone(old_df[col], strip_utc=strip_utc)
|
239
242
|
if col in new_df.columns:
|
240
|
-
new_df[col] = coerce_timezone(new_df[col])
|
243
|
+
new_df[col] = coerce_timezone(new_df[col], strip_utc=strip_utc)
|
241
244
|
cast_dt_cols = False
|
242
245
|
except Exception as e:
|
243
246
|
warn(f"Could not cast datetime columns:\n{e}")
|
@@ -1284,7 +1287,8 @@ def query_df(
|
|
1284
1287
|
if debug:
|
1285
1288
|
dprint(f"Casting column '{datetime_column}' to UTC...")
|
1286
1289
|
df[datetime_column] = coerce_timezone(df[datetime_column], strip_utc=False)
|
1287
|
-
|
1290
|
+
if debug:
|
1291
|
+
dprint(f"Using datetime bounds:\n{begin=}\n{end=}")
|
1288
1292
|
|
1289
1293
|
in_ex_params = get_in_ex_params(params)
|
1290
1294
|
|
@@ -270,9 +270,9 @@ def coerce_timezone(
|
|
270
270
|
pandas = mrsm.attempt_import('pandas')
|
271
271
|
dd = mrsm.attempt_import('dask.dataframe') if is_dask else None
|
272
272
|
dt_series = (
|
273
|
-
pandas.to_datetime(dt, utc=True)
|
273
|
+
pandas.to_datetime(dt, utc=True, format='ISO8601')
|
274
274
|
if dd is None
|
275
|
-
else dd.to_datetime(dt, utc=True)
|
275
|
+
else dd.to_datetime(dt, utc=True, format='ISO8601')
|
276
276
|
)
|
277
277
|
if strip_utc:
|
278
278
|
dt_series = dt_series.apply(lambda x: x.replace(tzinfo=None))
|
@@ -1467,7 +1467,6 @@ def import_pandas(
|
|
1467
1467
|
"""
|
1468
1468
|
Quality-of-life function to attempt to import the configured version of `pandas`.
|
1469
1469
|
"""
|
1470
|
-
import sys
|
1471
1470
|
pandas_module_name = pandas_name()
|
1472
1471
|
global emitted_pandas_warning
|
1473
1472
|
|
@@ -1482,11 +1481,11 @@ def import_pandas(
|
|
1482
1481
|
+ f"'{pandas_module_name}'"
|
1483
1482
|
+ "\n Features may not work as expected."
|
1484
1483
|
),
|
1485
|
-
stack
|
1484
|
+
stack=False,
|
1486
1485
|
)
|
1487
1486
|
|
1488
1487
|
pytz = attempt_import('pytz', debug=debug, lazy=False, **kw)
|
1489
|
-
pandas = attempt_import('pandas', debug=debug, lazy=False, **kw)
|
1488
|
+
pandas, pyarrow = attempt_import('pandas', 'pyarrow', debug=debug, lazy=False, **kw)
|
1490
1489
|
pd = attempt_import(pandas_module_name, debug=debug, lazy=lazy, **kw)
|
1491
1490
|
return pd
|
1492
1491
|
|
meerschaum/utils/sql.py
CHANGED
@@ -87,9 +87,11 @@ update_queries = {
|
|
87
87
|
WHERE {date_bounds_subquery}
|
88
88
|
""",
|
89
89
|
'mysql-upsert': """
|
90
|
-
|
90
|
+
INSERT {ignore}INTO {target_table_name} ({patch_cols_str})
|
91
91
|
SELECT {patch_cols_str}
|
92
92
|
FROM {patch_table_name}
|
93
|
+
{on_duplicate_key_update}
|
94
|
+
{cols_equal_values}
|
93
95
|
""",
|
94
96
|
'mariadb': """
|
95
97
|
UPDATE {target_table_name} AS f
|
@@ -99,9 +101,11 @@ update_queries = {
|
|
99
101
|
WHERE {date_bounds_subquery}
|
100
102
|
""",
|
101
103
|
'mariadb-upsert': """
|
102
|
-
|
104
|
+
INSERT {ignore}INTO {target_table_name} ({patch_cols_str})
|
103
105
|
SELECT {patch_cols_str}
|
104
106
|
FROM {patch_table_name}
|
107
|
+
{on_duplicate_key_update}
|
108
|
+
{cols_equal_values}
|
105
109
|
""",
|
106
110
|
'mssql': """
|
107
111
|
MERGE {target_table_name} f
|
@@ -1578,6 +1582,19 @@ def get_update_queries(
|
|
1578
1582
|
f" UPDATE {sets_subquery('', 'p.')}"
|
1579
1583
|
)
|
1580
1584
|
|
1585
|
+
cols_equal_values = '\n,'.join(
|
1586
|
+
[
|
1587
|
+
f"{sql_item_name(c_name, flavor)} = VALUES({sql_item_name(c_name, flavor)})"
|
1588
|
+
for c_name, c_type in value_cols
|
1589
|
+
]
|
1590
|
+
)
|
1591
|
+
on_duplicate_key_update = (
|
1592
|
+
"ON DUPLICATE KEY UPDATE"
|
1593
|
+
if value_cols
|
1594
|
+
else ""
|
1595
|
+
)
|
1596
|
+
ignore = "IGNORE " if not value_cols else ""
|
1597
|
+
|
1581
1598
|
return [
|
1582
1599
|
base_query.format(
|
1583
1600
|
sets_subquery_none=sets_subquery('', 'p.'),
|
@@ -1594,6 +1611,9 @@ def get_update_queries(
|
|
1594
1611
|
coalesce_join_cols_str=coalesce_join_cols_str,
|
1595
1612
|
update_or_nothing=update_or_nothing,
|
1596
1613
|
when_matched_update_sets_subquery_none=when_matched_update_sets_subquery_none,
|
1614
|
+
cols_equal_values=cols_equal_values,
|
1615
|
+
on_duplicate_key_update=on_duplicate_key_update,
|
1616
|
+
ignore=ignore,
|
1597
1617
|
)
|
1598
1618
|
for base_query in base_queries
|
1599
1619
|
]
|
@@ -2179,7 +2199,25 @@ def get_reset_autoincrement_queries(
|
|
2179
2199
|
debug: bool = False,
|
2180
2200
|
) -> List[str]:
|
2181
2201
|
"""
|
2182
|
-
Return a list of queries to reset a table's auto-increment counter.
|
2202
|
+
Return a list of queries to reset a table's auto-increment counter to the next largest value.
|
2203
|
+
|
2204
|
+
Parameters
|
2205
|
+
----------
|
2206
|
+
table: str
|
2207
|
+
The name of the table on which the auto-incrementing column exists.
|
2208
|
+
|
2209
|
+
column: str
|
2210
|
+
The name of the auto-incrementing column.
|
2211
|
+
|
2212
|
+
connector: mrsm.connectors.SQLConnector
|
2213
|
+
The SQLConnector to the database on which the table exists.
|
2214
|
+
|
2215
|
+
schema: Optional[str], default None
|
2216
|
+
The schema of the table. Defaults to `connector.schema`.
|
2217
|
+
|
2218
|
+
Returns
|
2219
|
+
-------
|
2220
|
+
A list of queries to be executed to reset the auto-incrementing column.
|
2183
2221
|
"""
|
2184
2222
|
if not table_exists(table, connector, schema=schema, debug=debug):
|
2185
2223
|
return []
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: meerschaum
|
3
|
-
Version: 2.6.
|
3
|
+
Version: 2.6.7
|
4
4
|
Summary: Sync Time-Series Pipes with Meerschaum
|
5
5
|
Home-page: https://meerschaum.io
|
6
6
|
Author: Bennett Meares
|
@@ -152,6 +152,7 @@ Requires-Dist: pytest >=6.2.2 ; extra == 'dev-tools'
|
|
152
152
|
Requires-Dist: pytest-xdist >=3.2.1 ; extra == 'dev-tools'
|
153
153
|
Requires-Dist: heartrate >=0.2.1 ; extra == 'dev-tools'
|
154
154
|
Requires-Dist: build >=1.2.1 ; extra == 'dev-tools'
|
155
|
+
Requires-Dist: attrs >=24.2.0 ; extra == 'dev-tools'
|
155
156
|
Provides-Extra: docs
|
156
157
|
Requires-Dist: mkdocs >=1.1.2 ; extra == 'docs'
|
157
158
|
Requires-Dist: mkdocs-material >=6.2.5 ; extra == 'docs'
|
@@ -6,7 +6,7 @@ meerschaum/_internal/arguments/__init__.py,sha256=_nSKKVLXNsJeSv-buxEZsx8_c0BAbk
|
|
6
6
|
meerschaum/_internal/arguments/_parse_arguments.py,sha256=H492J571CetGVIEzOZhwQVS3bcm4t6hjWg8Gsf6dw0Y,16340
|
7
7
|
meerschaum/_internal/arguments/_parser.py,sha256=h0OQbfadr7h03Vyqw2B326BJ-feHRYZFWFbZ_ge9DIo,16486
|
8
8
|
meerschaum/_internal/docs/__init__.py,sha256=ZQYHWo6n0kfLLkyG36YXqTYvv2Pc7it5HZHMylT6cBA,126
|
9
|
-
meerschaum/_internal/docs/index.py,sha256=
|
9
|
+
meerschaum/_internal/docs/index.py,sha256=ZkqXj-GhoLEpgMysy4ugvOlFhWVlnq7tFvzMhy43jUQ,24670
|
10
10
|
meerschaum/_internal/gui/__init__.py,sha256=KF6Opae0aBOjIndMZ2txoPs7ozCXRlR-lcTsicLO7fc,1313
|
11
11
|
meerschaum/_internal/gui/app/__init__.py,sha256=rKUa8hHk6Fai-PDF61tQcpT1myxKcfmvEMDHxThNp7o,1565
|
12
12
|
meerschaum/_internal/gui/app/_windows.py,sha256=-VHdjTzA3V596fVqnbmTxemONSp_80-sTNJ0CTB8FwU,2632
|
@@ -143,7 +143,7 @@ meerschaum/config/_preprocess.py,sha256=-AEA8m_--KivZwTQ1sWN6LTn5sio_fUr2XZ51BO6
|
|
143
143
|
meerschaum/config/_read_config.py,sha256=RLC3HHi_1ndj7ITVDKLD9_uULY3caGRwSz3ATYE-ixA,15014
|
144
144
|
meerschaum/config/_shell.py,sha256=46_m49Txc5q1rGfCgO49ca48BODx45DQJi8D0zz1R18,4245
|
145
145
|
meerschaum/config/_sync.py,sha256=jHcWRkxd82_BgX8Xo8agsWvf7BSbv3qHLWmYl6ehp_0,4242
|
146
|
-
meerschaum/config/_version.py,sha256=
|
146
|
+
meerschaum/config/_version.py,sha256=_eF-9HJ3QXep9cgyfLQDypSNV_Vgz5zFRjzI9xg_YZg,71
|
147
147
|
meerschaum/config/paths.py,sha256=JjibeGN3YAdSNceRwsd42aNmeUrIgM6ndzC8qZAmNI0,621
|
148
148
|
meerschaum/config/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
149
149
|
meerschaum/config/stack/__init__.py,sha256=2UukC0Lmk-aVL1o1qXzumqmuIrw3vu9fD7iCuz4XD4I,10544
|
@@ -170,13 +170,13 @@ meerschaum/connectors/api/_uri.py,sha256=HWxqGx4R1cHZ3ywy9Ro9ePbFxxusw4RLaC3hpGt
|
|
170
170
|
meerschaum/connectors/api/_users.py,sha256=kzb7ENgXwQ19OJYKOuuWzx2rwVuUZCly9dTnyvVuT2Q,5275
|
171
171
|
meerschaum/connectors/plugin/PluginConnector.py,sha256=aQ1QaB7MordCFimZqoGLb0R12PfDUN_nWks2J5mzeAs,2084
|
172
172
|
meerschaum/connectors/plugin/__init__.py,sha256=pwF7TGY4WNz2_HaVdmK4rPQ9ZwTOEuPHgzOqsGcoXJw,198
|
173
|
-
meerschaum/connectors/sql/_SQLConnector.py,sha256=
|
173
|
+
meerschaum/connectors/sql/_SQLConnector.py,sha256=g9SFK30CZp7CTJI-SdpOanL1NQUBFQeUng7FSGacJA4,11985
|
174
174
|
meerschaum/connectors/sql/__init__.py,sha256=3cqYiDkVasn7zWdtOTAZbT4bo95AuvGOmDD2TkaAxtw,205
|
175
175
|
meerschaum/connectors/sql/_cli.py,sha256=1SgnWeMIAihoxp4FzbNrcq1npXf0dSOQnCntpU9hUXA,4405
|
176
176
|
meerschaum/connectors/sql/_create_engine.py,sha256=zqeu1xHOw3n3Zgfjx-diy2aoynfdOlfOjwFuRrzB028,10452
|
177
177
|
meerschaum/connectors/sql/_fetch.py,sha256=A2R1aLgdEkiIet8P2BZ13OXy31I5e8BzWee74Bj_v4k,13141
|
178
178
|
meerschaum/connectors/sql/_instance.py,sha256=3KJI3ImwWAJkUfdZIrSL24pcW6Nic8wo5IUeGth9HP4,6459
|
179
|
-
meerschaum/connectors/sql/_pipes.py,sha256=
|
179
|
+
meerschaum/connectors/sql/_pipes.py,sha256=PZlcCb-dIsL2cfCCYpRarTAAdYcom-nSsM_tHphzCjc,117386
|
180
180
|
meerschaum/connectors/sql/_plugins.py,sha256=wbxcNxqTtjfDsxPvdVGTllasYf6NHHzODaQ72hEUSBQ,8135
|
181
181
|
meerschaum/connectors/sql/_sql.py,sha256=vdSslLKm8ftzIzfCs-0mL3q9zY2pmhZPTpzXSbKROag,37598
|
182
182
|
meerschaum/connectors/sql/_uri.py,sha256=0BrhQtqQdzg9mR04gWBZINs_BbPFtSlTECXT_TCUwik,3460
|
@@ -187,19 +187,19 @@ meerschaum/connectors/sql/tables/types.py,sha256=Jc_MTHIBM-KHpQt__Lckp39CeOo7tGO
|
|
187
187
|
meerschaum/connectors/valkey/_ValkeyConnector.py,sha256=xHld4OCnt0SXmAWH8Yintb3931F-MEgXfWerhTthsXc,15849
|
188
188
|
meerschaum/connectors/valkey/__init__.py,sha256=jkVutsygQCvGPLN17cP6wHAjHajxVycnQJbm2eVMuY0,187
|
189
189
|
meerschaum/connectors/valkey/_fetch.py,sha256=MjeE0h3YI4M3LCzy7axQAc_fX_l82vUqX4WXcYoppxE,1920
|
190
|
-
meerschaum/connectors/valkey/_pipes.py,sha256=
|
190
|
+
meerschaum/connectors/valkey/_pipes.py,sha256=V86tln3TE4SZwvLCgliw8kjQG9K0qk-NS4_-6UItObw,24324
|
191
191
|
meerschaum/connectors/valkey/_plugins.py,sha256=ZqiEW4XZCOpw4G8DUK2IKY6Qrph4mYfTjgXWimgakYY,6267
|
192
192
|
meerschaum/connectors/valkey/_users.py,sha256=AS1vLarrkDA9yPK644GWwRiQiTZVa9x3nlLpyntq40g,7730
|
193
193
|
meerschaum/core/__init__.py,sha256=tjASW10n9uLV6bYhcwP4rggh-ESXSJzgxpSBbVsuISs,251
|
194
194
|
meerschaum/core/Pipe/__init__.py,sha256=knkz0gVpIrHb8au2b_YxvnSC9eXMgPKr5b7TxVQE9O0,18529
|
195
|
-
meerschaum/core/Pipe/_attributes.py,sha256=
|
195
|
+
meerschaum/core/Pipe/_attributes.py,sha256=hvnt1kCdqn9UQU86jMTJ9kDYXJ4hC3Cc48wVX8u9Z7w,21446
|
196
196
|
meerschaum/core/Pipe/_bootstrap.py,sha256=evyi07kkzAVMj66HfZkbYdcWk_oHUDsl6f13EnSPMYs,7723
|
197
197
|
meerschaum/core/Pipe/_clear.py,sha256=LghXabgyyc1tD7FNQrh9ExT71ipcg2poM9FDA3k9e4M,2230
|
198
198
|
meerschaum/core/Pipe/_copy.py,sha256=YDclAapf_spm9phpFr4-CALyYyw7nUsyKyiaLM1cnm4,2965
|
199
199
|
meerschaum/core/Pipe/_data.py,sha256=BTmk2WIO3Ad9mrq47aIJzUD6cd5jj9UBjNm--6m9RaM,24259
|
200
200
|
meerschaum/core/Pipe/_deduplicate.py,sha256=xthUdsDxGO2t3m0XGDm9K3F6dpaZoemtjNi8gyKm0e0,10177
|
201
201
|
meerschaum/core/Pipe/_delete.py,sha256=1geNp9BgrocXP1gt76dMbnlJWKYFMuSNqPFA4K4-hXE,2118
|
202
|
-
meerschaum/core/Pipe/_drop.py,sha256=
|
202
|
+
meerschaum/core/Pipe/_drop.py,sha256=qj5L0obDh2_dKAg7LD3WLI3_L0Q5YECTfTmPc8zZUxI,1135
|
203
203
|
meerschaum/core/Pipe/_dtypes.py,sha256=VohahiZk44Uw3PNsTLyqH8m9wRdB-mkPgYvgCb5hlA0,4114
|
204
204
|
meerschaum/core/Pipe/_edit.py,sha256=HrKWe9vhqKaNOjOcJzW5BNbaUBPIbgNAhJEK8OMsy7c,8416
|
205
205
|
meerschaum/core/Pipe/_fetch.py,sha256=Q_LncNi1nv-YwvRPbh1QK0hf6hflL7Hn9v9lT3oQgF4,5451
|
@@ -219,7 +219,7 @@ meerschaum/plugins/__init__.py,sha256=6krcqaMKyzuVqesXMqEL0XEy2SJQ4xfNt2-oI_fJ6v
|
|
219
219
|
meerschaum/plugins/bootstrap.py,sha256=VwjpZAuYdqPJW0YoVgAoM_taHkdQHqP902-8T7OWWCI,11339
|
220
220
|
meerschaum/utils/__init__.py,sha256=QrK1K9hIbPCRCM5k2nZGFqGnrqhA0Eh-iSmCU7FG6Cs,612
|
221
221
|
meerschaum/utils/_get_pipes.py,sha256=tu4xKPoDn79Dz2kWM13cXTP4DSCkn-3G9M8KiLftopw,11073
|
222
|
-
meerschaum/utils/dataframe.py,sha256=
|
222
|
+
meerschaum/utils/dataframe.py,sha256=ZLv5QZHtAixaBWUmBCILFoA0ArFPEwrwlIbtL2FzJHM,43419
|
223
223
|
meerschaum/utils/debug.py,sha256=GyIzJmunkoPnOcZNYVQdT4Sgd-aOb5MI2VbIgATOjIQ,3695
|
224
224
|
meerschaum/utils/interactive.py,sha256=t-6jWozXSqL7lYGDHuwiOjTgr-UKhdcg61q_eR5mikI,3196
|
225
225
|
meerschaum/utils/misc.py,sha256=soGmUooT216Dl15KbcUTzf8E-aC6uNM6Zvy1PiUT_Y4,47089
|
@@ -228,7 +228,7 @@ meerschaum/utils/pool.py,sha256=vkE42af4fjrTEJTxf6Ek3xGucm1MtEkpsSEiaVzNKHs,2655
|
|
228
228
|
meerschaum/utils/process.py,sha256=9O8PPPJjY9Q5W2f39I3B3lFU6TlSiRiI3bgrzdOOyOw,7843
|
229
229
|
meerschaum/utils/prompt.py,sha256=6J--mZJ_NcEdSX6KMjtY4fXXezyILLHP24VdxFFqOIc,18985
|
230
230
|
meerschaum/utils/schedule.py,sha256=9BQGEzDbInLAU1aFO-FvL3wKu9XCTUpS0V_aQID6xzc,11228
|
231
|
-
meerschaum/utils/sql.py,sha256=
|
231
|
+
meerschaum/utils/sql.py,sha256=TAsHcUXg2RN3UfwMh3V8U5MtBrTn1VCIvikUAwsw3q4,72164
|
232
232
|
meerschaum/utils/threading.py,sha256=3N8JXPAnwqJiSjuQcbbJg3Rv9-CCUMJpeQRfKFR7MaA,2489
|
233
233
|
meerschaum/utils/typing.py,sha256=U3MC347sh1umpa3Xr1k71eADyDmk4LB6TnVCpq8dVzI,2830
|
234
234
|
meerschaum/utils/warnings.py,sha256=n-phr3BftNNgyPnvnXC_VMSjtCvjiCZ-ewmVfcROhkc,6611
|
@@ -239,23 +239,23 @@ meerschaum/utils/daemon/RotatingFile.py,sha256=ePm_svjwyFDWh6V1k-bp1RHXCSWlyxDtl
|
|
239
239
|
meerschaum/utils/daemon/StdinFile.py,sha256=J6tyUReM8NEp3bBQAxMfe8mjJG5mWi6CzHN4x86VQBI,3237
|
240
240
|
meerschaum/utils/daemon/__init__.py,sha256=o9jWb4lRTIyny4EPt7fPXFgV_vIf1mUofsTwoE1ZecA,8751
|
241
241
|
meerschaum/utils/daemon/_names.py,sha256=d2ZwTxBoTAqXZkCfZ5LuX2XrkQmLNUq1OTlUqfoH5dA,4515
|
242
|
-
meerschaum/utils/dtypes/__init__.py,sha256=
|
242
|
+
meerschaum/utils/dtypes/__init__.py,sha256=b9Qv7JNlHRDmh9SGRSEGu8fUyGhmOE3EmvDwo34Hnd4,8061
|
243
243
|
meerschaum/utils/dtypes/sql.py,sha256=IQihwQy4OKSbRjvJy6ky6SszFKR7W1iMs-ruZDsf2js,18701
|
244
244
|
meerschaum/utils/formatting/__init__.py,sha256=GpJQWeqkdWw5IuDmW4Rgmapjzv-KkI4jhBZllJi4QIg,15999
|
245
245
|
meerschaum/utils/formatting/_jobs.py,sha256=izsqPJhTtUkXUUtWnbXtReYsUYwulXtci3pBj72Ne64,6637
|
246
246
|
meerschaum/utils/formatting/_pipes.py,sha256=840O5rg2aHhQoraCDOh2ZtBo43_W2W6R60yYufEoXp8,19494
|
247
247
|
meerschaum/utils/formatting/_pprint.py,sha256=tgrT3FyGyu5CWJYysqK3kX1xdZYorlbOk9fcU_vt9Qg,3096
|
248
248
|
meerschaum/utils/formatting/_shell.py,sha256=XH7VFLteNv7NGtWhJl7FdIGt80sKeTiDoJokGSDAwBM,3761
|
249
|
-
meerschaum/utils/packages/__init__.py,sha256=
|
250
|
-
meerschaum/utils/packages/_packages.py,sha256=
|
249
|
+
meerschaum/utils/packages/__init__.py,sha256=Op93VJkAX3OL4H-js_p3dAaa_PT82jvjCna27aHOsUk,64199
|
250
|
+
meerschaum/utils/packages/_packages.py,sha256=IFcQ4MzmTqjdWkqOsUa25xUNmG246TFqe2iZ0TCRPmI,8801
|
251
251
|
meerschaum/utils/packages/lazy_loader.py,sha256=VHnph3VozH29R4JnSSBfwtA5WKZYZQFT_GeQSShCnuc,2540
|
252
252
|
meerschaum/utils/venv/_Venv.py,sha256=sBnlmxHdAh2bx8btfVoD79-H9-cYsv5lP02IIXkyECs,3553
|
253
253
|
meerschaum/utils/venv/__init__.py,sha256=f3oi67lXYPLKJrnRW9lae7M3A8SFiC7DzaMoBdCVUFs,24609
|
254
|
-
meerschaum-2.6.
|
255
|
-
meerschaum-2.6.
|
256
|
-
meerschaum-2.6.
|
257
|
-
meerschaum-2.6.
|
258
|
-
meerschaum-2.6.
|
259
|
-
meerschaum-2.6.
|
260
|
-
meerschaum-2.6.
|
261
|
-
meerschaum-2.6.
|
254
|
+
meerschaum-2.6.7.dist-info/LICENSE,sha256=jG2zQEdRNt88EgHUWPpXVWmOrOduUQRx7MnYV9YIPaw,11359
|
255
|
+
meerschaum-2.6.7.dist-info/METADATA,sha256=jfFW7xnLFU6XBON2LGdhq2gvpFF5s-8fmNwrI30xetE,24757
|
256
|
+
meerschaum-2.6.7.dist-info/NOTICE,sha256=OTA9Fcthjf5BRvWDDIcBC_xfLpeDV-RPZh3M-HQBRtQ,114
|
257
|
+
meerschaum-2.6.7.dist-info/WHEEL,sha256=a7TGlA-5DaHMRrarXjVbQagU3Man_dCnGIWMJr5kRWo,91
|
258
|
+
meerschaum-2.6.7.dist-info/entry_points.txt,sha256=5YBVzibw-0rNA_1VjB16z5GABsOGf-CDhW4yqH8C7Gc,88
|
259
|
+
meerschaum-2.6.7.dist-info/top_level.txt,sha256=bNoSiDj0El6buocix-FRoAtJOeq1qOF5rRm2u9i7Q6A,11
|
260
|
+
meerschaum-2.6.7.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
261
|
+
meerschaum-2.6.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|