meerschaum 2.1.0rc2__py3-none-any.whl → 2.1.1rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- meerschaum/actions/bootstrap.py +1 -2
- meerschaum/actions/delete.py +15 -1
- meerschaum/actions/sync.py +4 -4
- meerschaum/api/routes/_pipes.py +7 -11
- meerschaum/config/__init__.py +0 -2
- meerschaum/config/_default.py +3 -0
- meerschaum/config/_version.py +1 -1
- meerschaum/config/static/__init__.py +4 -0
- meerschaum/connectors/sql/SQLConnector.py +43 -3
- meerschaum/connectors/sql/_cli.py +27 -3
- meerschaum/connectors/sql/_instance.py +164 -0
- meerschaum/connectors/sql/_pipes.py +344 -304
- meerschaum/connectors/sql/_sql.py +52 -14
- meerschaum/connectors/sql/tables/__init__.py +65 -13
- meerschaum/connectors/sql/tables/pipes.py +9 -0
- meerschaum/core/Pipe/__init__.py +1 -1
- meerschaum/core/Pipe/_data.py +3 -4
- meerschaum/core/Pipe/_delete.py +12 -2
- meerschaum/core/Pipe/_sync.py +2 -5
- meerschaum/utils/dataframe.py +20 -4
- meerschaum/utils/dtypes/__init__.py +15 -1
- meerschaum/utils/dtypes/sql.py +1 -0
- meerschaum/utils/sql.py +485 -64
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/METADATA +1 -1
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/RECORD +31 -29
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/LICENSE +0 -0
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/NOTICE +0 -0
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/WHEEL +0 -0
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/entry_points.txt +0 -0
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/top_level.txt +0 -0
- {meerschaum-2.1.0rc2.dist-info → meerschaum-2.1.1rc1.dist-info}/zip-safe +0 -0
@@ -167,7 +167,7 @@ def read(
|
|
167
167
|
if debug:
|
168
168
|
import time
|
169
169
|
start = time.perf_counter()
|
170
|
-
dprint(query_or_table)
|
170
|
+
dprint(f"[{self}]\n{query_or_table}")
|
171
171
|
dprint(f"[{self}] Fetching with chunksize: {chunksize}")
|
172
172
|
|
173
173
|
### This might be sqlalchemy object or the string of a table name.
|
@@ -432,9 +432,9 @@ def value(
|
|
432
432
|
result, connection = self.exec(
|
433
433
|
query,
|
434
434
|
*args,
|
435
|
-
with_connection=True,
|
436
|
-
close=False,
|
437
|
-
commit=_commit,
|
435
|
+
with_connection = True,
|
436
|
+
close = False,
|
437
|
+
commit = _commit,
|
438
438
|
**kw
|
439
439
|
)
|
440
440
|
first = result.first() if result is not None else None
|
@@ -547,7 +547,7 @@ def exec(
|
|
547
547
|
if debug:
|
548
548
|
dprint(f"[{self}] Failed to execute query:\n\n{query}\n\n{e}")
|
549
549
|
if not silent:
|
550
|
-
warn(str(e))
|
550
|
+
warn(str(e), stacklevel=3)
|
551
551
|
result = None
|
552
552
|
if _commit:
|
553
553
|
transaction.rollback()
|
@@ -563,7 +563,12 @@ def exec(
|
|
563
563
|
|
564
564
|
def exec_queries(
|
565
565
|
self,
|
566
|
-
queries: List[
|
566
|
+
queries: List[
|
567
|
+
Union[
|
568
|
+
str,
|
569
|
+
Tuple[str, Callable[['sqlalchemy.orm.session.Session'], List[str]]]
|
570
|
+
]
|
571
|
+
],
|
567
572
|
break_on_error: bool = False,
|
568
573
|
rollback: bool = True,
|
569
574
|
silent: bool = False,
|
@@ -574,8 +579,16 @@ def exec_queries(
|
|
574
579
|
|
575
580
|
Parameters
|
576
581
|
----------
|
577
|
-
queries: List[
|
582
|
+
queries: List[
|
583
|
+
Union[
|
584
|
+
str,
|
585
|
+
Tuple[str, Callable[[], List[str]]]
|
586
|
+
]
|
587
|
+
]
|
578
588
|
The queries in the transaction to be executed.
|
589
|
+
If a query is a tuple, the second item of the tuple
|
590
|
+
will be considered a callable hook that returns a list of queries to be executed
|
591
|
+
before the next item in the list.
|
579
592
|
|
580
593
|
break_on_error: bool, default True
|
581
594
|
If `True`, stop executing when a query fails.
|
@@ -593,18 +606,27 @@ def exec_queries(
|
|
593
606
|
from meerschaum.utils.warnings import warn
|
594
607
|
from meerschaum.utils.debug import dprint
|
595
608
|
from meerschaum.utils.packages import attempt_import
|
596
|
-
sqlalchemy = attempt_import('sqlalchemy')
|
609
|
+
sqlalchemy, sqlalchemy_orm = attempt_import('sqlalchemy', 'sqlalchemy.orm')
|
610
|
+
session = sqlalchemy_orm.Session(self.engine)
|
597
611
|
|
612
|
+
result = None
|
598
613
|
results = []
|
599
|
-
with
|
614
|
+
with session.begin():
|
600
615
|
for query in queries:
|
601
|
-
|
602
|
-
|
616
|
+
hook = None
|
617
|
+
result = None
|
618
|
+
|
619
|
+
if isinstance(query, tuple):
|
620
|
+
query, hook = query
|
603
621
|
if isinstance(query, str):
|
604
622
|
query = sqlalchemy.text(query)
|
605
623
|
|
624
|
+
if debug:
|
625
|
+
dprint(f"[{self}]\n" + str(query))
|
626
|
+
|
606
627
|
try:
|
607
|
-
result =
|
628
|
+
result = session.execute(query)
|
629
|
+
session.flush()
|
608
630
|
except Exception as e:
|
609
631
|
msg = (f"Encountered error while executing:\n{e}")
|
610
632
|
if not silent:
|
@@ -612,11 +634,24 @@ def exec_queries(
|
|
612
634
|
elif debug:
|
613
635
|
dprint(f"[{self}]\n" + str(msg))
|
614
636
|
result = None
|
615
|
-
results.append(result)
|
616
637
|
if result is None and break_on_error:
|
617
638
|
if rollback:
|
618
|
-
|
639
|
+
session.rollback()
|
619
640
|
break
|
641
|
+
elif result is not None and hook is not None:
|
642
|
+
hook_queries = hook(session)
|
643
|
+
if hook_queries:
|
644
|
+
hook_results = self.exec_queries(
|
645
|
+
hook_queries,
|
646
|
+
break_on_error = break_on_error,
|
647
|
+
rollback = rollback,
|
648
|
+
silent = silent,
|
649
|
+
debug = debug,
|
650
|
+
)
|
651
|
+
result = (result, hook_results)
|
652
|
+
|
653
|
+
results.append(result)
|
654
|
+
|
620
655
|
return results
|
621
656
|
|
622
657
|
|
@@ -875,6 +910,9 @@ def psql_insert_copy(
|
|
875
910
|
data_iter: Iterable[Any]
|
876
911
|
Iterable that iterates the values to be inserted
|
877
912
|
|
913
|
+
schema: Optional[str], default None
|
914
|
+
Optionally specify the schema of the table to be inserted into.
|
915
|
+
|
878
916
|
Returns
|
879
917
|
-------
|
880
918
|
None
|
@@ -7,7 +7,8 @@ Define SQLAlchemy tables
|
|
7
7
|
"""
|
8
8
|
|
9
9
|
from __future__ import annotations
|
10
|
-
from meerschaum.utils.typing import Optional, Dict, Union, InstanceConnector
|
10
|
+
from meerschaum.utils.typing import Optional, Dict, Union, InstanceConnector, List
|
11
|
+
from meerschaum.utils.warnings import error, warn
|
11
12
|
|
12
13
|
### store a tables dict for each connector
|
13
14
|
connector_tables = {}
|
@@ -42,7 +43,6 @@ def get_tables(
|
|
42
43
|
"""
|
43
44
|
from meerschaum.utils.debug import dprint
|
44
45
|
from meerschaum.utils.formatting import pprint
|
45
|
-
from meerschaum.utils.warnings import error
|
46
46
|
from meerschaum.connectors.parse import parse_instance_keys
|
47
47
|
from meerschaum.utils.packages import attempt_import
|
48
48
|
from meerschaum.utils.sql import json_flavors
|
@@ -99,15 +99,18 @@ def get_tables(
|
|
99
99
|
kw.update({'server_default': sequences[k].next_value()})
|
100
100
|
|
101
101
|
_tables = {
|
102
|
-
'users'
|
103
|
-
'
|
102
|
+
'users': sqlalchemy.Table(
|
103
|
+
'mrsm_users',
|
104
104
|
conn.metadata,
|
105
105
|
sqlalchemy.Column(
|
106
106
|
*id_col_args['user_id'],
|
107
107
|
**id_col_kw['user_id'],
|
108
108
|
),
|
109
109
|
sqlalchemy.Column(
|
110
|
-
'username',
|
110
|
+
'username',
|
111
|
+
sqlalchemy.String(256),
|
112
|
+
index = index_names,
|
113
|
+
nullable = False,
|
111
114
|
),
|
112
115
|
sqlalchemy.Column('password_hash', sqlalchemy.String(1024)),
|
113
116
|
sqlalchemy.Column('email', sqlalchemy.String(256)),
|
@@ -115,9 +118,9 @@ def get_tables(
|
|
115
118
|
sqlalchemy.Column('attributes', params_type),
|
116
119
|
extend_existing = True,
|
117
120
|
),
|
118
|
-
'plugins'
|
121
|
+
'plugins': sqlalchemy.Table(
|
119
122
|
*([
|
120
|
-
'
|
123
|
+
'mrsm_plugins',
|
121
124
|
conn.metadata,
|
122
125
|
sqlalchemy.Column(
|
123
126
|
*id_col_args['plugin_id'],
|
@@ -130,26 +133,37 @@ def get_tables(
|
|
130
133
|
sqlalchemy.Column('version', sqlalchemy.String(256)),
|
131
134
|
sqlalchemy.Column('attributes', params_type),
|
132
135
|
] + ([
|
133
|
-
sqlalchemy.ForeignKeyConstraint(['user_id'], ['
|
136
|
+
sqlalchemy.ForeignKeyConstraint(['user_id'], ['mrsm_users.user_id']),
|
134
137
|
] if conn.flavor != 'duckdb' else [])),
|
135
138
|
extend_existing = True,
|
136
139
|
),
|
137
140
|
}
|
138
141
|
|
139
142
|
_tables['pipes'] = sqlalchemy.Table(
|
140
|
-
"
|
143
|
+
"mrsm_pipes",
|
141
144
|
conn.metadata,
|
142
145
|
sqlalchemy.Column(
|
143
146
|
*id_col_args['pipe_id'],
|
144
147
|
**id_col_kw['pipe_id'],
|
145
148
|
),
|
146
149
|
sqlalchemy.Column(
|
147
|
-
"connector_keys",
|
150
|
+
"connector_keys",
|
151
|
+
sqlalchemy.String(256),
|
152
|
+
index = index_names,
|
153
|
+
nullable = False,
|
148
154
|
),
|
149
155
|
sqlalchemy.Column(
|
150
|
-
"metric_key",
|
156
|
+
"metric_key",
|
157
|
+
sqlalchemy.String(256),
|
158
|
+
index = index_names,
|
159
|
+
nullable = False,
|
160
|
+
),
|
161
|
+
sqlalchemy.Column(
|
162
|
+
"location_key",
|
163
|
+
sqlalchemy.String(256),
|
164
|
+
index = index_names,
|
165
|
+
nullable = True,
|
151
166
|
),
|
152
|
-
sqlalchemy.Column("location_key", sqlalchemy.String(256), index=index_names),
|
153
167
|
sqlalchemy.Column("parameters", params_type),
|
154
168
|
extend_existing = True,
|
155
169
|
)
|
@@ -157,6 +171,11 @@ def get_tables(
|
|
157
171
|
### store the table dict for reuse (per connector)
|
158
172
|
connector_tables[conn] = _tables
|
159
173
|
if create:
|
174
|
+
create_schemas(
|
175
|
+
conn,
|
176
|
+
schemas = [conn.internal_schema],
|
177
|
+
debug = debug,
|
178
|
+
)
|
160
179
|
create_tables(conn, tables=_tables)
|
161
180
|
|
162
181
|
return connector_tables[conn]
|
@@ -169,7 +188,6 @@ def create_tables(
|
|
169
188
|
"""
|
170
189
|
Create the tables on the database.
|
171
190
|
"""
|
172
|
-
from meerschaum.utils.warnings import warn
|
173
191
|
_tables = tables if tables is not None else get_tables(conn)
|
174
192
|
try:
|
175
193
|
conn.metadata.create_all(bind=conn.engine)
|
@@ -181,3 +199,37 @@ def create_tables(
|
|
181
199
|
return True
|
182
200
|
|
183
201
|
|
202
|
+
def create_schemas(
|
203
|
+
conn: 'meerschaum.connectors.SQLConnector',
|
204
|
+
schemas: List[str],
|
205
|
+
debug: bool = False,
|
206
|
+
) -> bool:
|
207
|
+
"""
|
208
|
+
Create the internal Meerschaum schema on the database.
|
209
|
+
"""
|
210
|
+
from meerschaum.config.static import STATIC_CONFIG
|
211
|
+
from meerschaum.utils.packages import attempt_import
|
212
|
+
from meerschaum.utils.sql import sql_item_name, NO_SCHEMA_FLAVORS, SKIP_IF_EXISTS_FLAVORS
|
213
|
+
if conn.flavor in NO_SCHEMA_FLAVORS:
|
214
|
+
return True
|
215
|
+
|
216
|
+
sqlalchemy_schema = attempt_import('sqlalchemy.schema')
|
217
|
+
successes = {}
|
218
|
+
skip_if_not_exists = conn.flavor in SKIP_IF_EXISTS_FLAVORS
|
219
|
+
if_not_exists_str = ("IF NOT EXISTS " if not skip_if_not_exists else "")
|
220
|
+
with conn.engine.connect() as connection:
|
221
|
+
for schema in schemas:
|
222
|
+
if not schema:
|
223
|
+
continue
|
224
|
+
schema_name = sql_item_name(schema, conn.flavor)
|
225
|
+
schema_exists = conn.engine.dialect.has_schema(connection, schema)
|
226
|
+
if schema_exists:
|
227
|
+
continue
|
228
|
+
|
229
|
+
create_schema_query = f"CREATE SCHEMA {if_not_exists_str}{schema_name}"
|
230
|
+
try:
|
231
|
+
result = conn.exec(create_schema_query, debug=debug)
|
232
|
+
successes[schema] = (result is not None)
|
233
|
+
except Exception as e:
|
234
|
+
warn(f"Failed to create internal schema '{schema}':\n{e}")
|
235
|
+
return all(successes.values())
|
meerschaum/core/Pipe/__init__.py
CHANGED
@@ -140,7 +140,7 @@ class Pipe:
|
|
140
140
|
metric: str = '',
|
141
141
|
location: Optional[str] = None,
|
142
142
|
parameters: Optional[Dict[str, Any]] = None,
|
143
|
-
columns:
|
143
|
+
columns: Union[Dict[str, str], List[str], None] = None,
|
144
144
|
tags: Optional[List[str]] = None,
|
145
145
|
target: Optional[str] = None,
|
146
146
|
dtypes: Optional[Dict[str, str]] = None,
|
meerschaum/core/Pipe/_data.py
CHANGED
@@ -536,10 +536,9 @@ def get_chunk_interval(
|
|
536
536
|
return timedelta(minutes=chunk_minutes)
|
537
537
|
|
538
538
|
dt_dtype = self.dtypes.get(dt_col, 'datetime64[ns]')
|
539
|
-
if '
|
540
|
-
return
|
541
|
-
|
542
|
-
return chunk_minutes
|
539
|
+
if 'int' in dt_dtype.lower():
|
540
|
+
return chunk_minutes
|
541
|
+
return timedelta(minutes=chunk_minutes)
|
543
542
|
|
544
543
|
|
545
544
|
def get_chunk_bounds(
|
meerschaum/core/Pipe/_delete.py
CHANGED
@@ -10,6 +10,7 @@ from meerschaum.utils.typing import SuccessTuple
|
|
10
10
|
|
11
11
|
def delete(
|
12
12
|
self,
|
13
|
+
drop: bool = True,
|
13
14
|
debug: bool = False,
|
14
15
|
**kw
|
15
16
|
) -> SuccessTuple:
|
@@ -18,7 +19,10 @@ def delete(
|
|
18
19
|
|
19
20
|
Parameters
|
20
21
|
----------
|
21
|
-
|
22
|
+
drop: bool, default True
|
23
|
+
If `True`, drop the pipes' target table.
|
24
|
+
|
25
|
+
debug : bool, default False
|
22
26
|
Verbosity toggle.
|
23
27
|
|
24
28
|
Returns
|
@@ -49,11 +53,17 @@ def delete(
|
|
49
53
|
except Exception as e:
|
50
54
|
warn(f"Could not delete cache file '{_cache_db_path}' for {self}:\n{e}")
|
51
55
|
|
56
|
+
if drop:
|
57
|
+
drop_success, drop_msg = self.drop(debug=debug)
|
58
|
+
if not drop_success:
|
59
|
+
warn(f"Failed to drop {self}:\n{drop_msg}")
|
60
|
+
|
52
61
|
with Venv(get_connector_plugin(self.instance_connector)):
|
53
62
|
result = self.instance_connector.delete_pipe(self, debug=debug, **kw)
|
54
63
|
|
55
64
|
if not isinstance(result, tuple):
|
56
|
-
return False, f"Received unexpected result from '{self.instance_connector}': {result}"
|
65
|
+
return False, f"Received an unexpected result from '{self.instance_connector}': {result}"
|
66
|
+
|
57
67
|
if result[0]:
|
58
68
|
to_delete = ['_id']
|
59
69
|
for member in to_delete:
|
meerschaum/core/Pipe/_sync.py
CHANGED
@@ -548,6 +548,7 @@ def filter_existing(
|
|
548
548
|
)
|
549
549
|
from meerschaum.utils.dtypes import (
|
550
550
|
to_pandas_dtype,
|
551
|
+
none_if_null,
|
551
552
|
)
|
552
553
|
from meerschaum.config import get_config
|
553
554
|
pd = import_pandas()
|
@@ -645,11 +646,7 @@ def filter_existing(
|
|
645
646
|
_ = kw.pop('params', None)
|
646
647
|
params = {
|
647
648
|
col: [
|
648
|
-
(
|
649
|
-
val
|
650
|
-
if str(val).lower() not in ('none', 'na', 'nan')
|
651
|
-
else None
|
652
|
-
)
|
649
|
+
none_if_null(val)
|
653
650
|
for val in unique_vals
|
654
651
|
]
|
655
652
|
for col, unique_vals in unique_index_vals.items()
|
meerschaum/utils/dataframe.py
CHANGED
@@ -174,14 +174,22 @@ def filter_unseen_df(
|
|
174
174
|
f"Was not able to cast the new DataFrame to the given dtypes.\n{e}"
|
175
175
|
)
|
176
176
|
|
177
|
+
new_numeric_cols_existing = get_numeric_cols(new_df)
|
178
|
+
old_numeric_cols = get_numeric_cols(old_df)
|
177
179
|
for col, typ in {k: v for k, v in dtypes.items()}.items():
|
178
180
|
if not are_dtypes_equal(new_df_dtypes.get(col, 'None'), old_df_dtypes.get(col, 'None')):
|
179
181
|
new_is_float = are_dtypes_equal(new_df_dtypes.get(col, 'None'), 'float')
|
180
182
|
new_is_int = are_dtypes_equal(new_df_dtypes.get(col, 'None'), 'int')
|
183
|
+
new_is_numeric = col in new_numeric_cols_existing
|
181
184
|
old_is_float = are_dtypes_equal(old_df_dtypes.get(col, 'None'), 'float')
|
182
185
|
old_is_int = are_dtypes_equal(old_df_dtypes.get(col, 'None'), 'int')
|
186
|
+
old_is_numeric = col in old_numeric_cols
|
183
187
|
|
184
|
-
if (
|
188
|
+
if (
|
189
|
+
(new_is_float or new_is_int or new_is_numeric)
|
190
|
+
and
|
191
|
+
(old_is_float or old_is_int or old_is_numeric)
|
192
|
+
):
|
185
193
|
dtypes[col] = attempt_cast_to_numeric
|
186
194
|
cast_cols = True
|
187
195
|
continue
|
@@ -199,7 +207,11 @@ def filter_unseen_df(
|
|
199
207
|
for col, dtype in dtypes.items():
|
200
208
|
if col in new_df.columns:
|
201
209
|
try:
|
202
|
-
new_df[col] =
|
210
|
+
new_df[col] = (
|
211
|
+
new_df[col].astype(dtype)
|
212
|
+
if not callable(dtype)
|
213
|
+
else new_df[col].apply(dtype)
|
214
|
+
)
|
203
215
|
except Exception as e:
|
204
216
|
warn(f"Was not able to cast column '{col}' to dtype '{dtype}'.\n{e}")
|
205
217
|
|
@@ -213,7 +225,6 @@ def filter_unseen_df(
|
|
213
225
|
new_df[json_col] = new_df[json_col].apply(serializer)
|
214
226
|
|
215
227
|
new_numeric_cols = get_numeric_cols(new_df)
|
216
|
-
old_numeric_cols = get_numeric_cols(old_df)
|
217
228
|
numeric_cols = set(new_numeric_cols + old_numeric_cols)
|
218
229
|
for numeric_col in old_numeric_cols:
|
219
230
|
old_df[numeric_col] = old_df[numeric_col].apply(
|
@@ -578,6 +589,7 @@ def enforce_dtypes(
|
|
578
589
|
for col, typ in dtypes.items()
|
579
590
|
if typ == 'numeric'
|
580
591
|
]
|
592
|
+
df_numeric_cols = get_numeric_cols(df)
|
581
593
|
if debug:
|
582
594
|
dprint(f"Desired data types:")
|
583
595
|
pprint(dtypes)
|
@@ -658,7 +670,11 @@ def enforce_dtypes(
|
|
658
670
|
mixed_numeric_types = (is_dtype_numeric(typ) and is_dtype_numeric(previous_typ))
|
659
671
|
explicitly_float = are_dtypes_equal(dtypes.get(col, 'object'), 'float')
|
660
672
|
explicitly_numeric = dtypes.get(col, 'numeric') == 'numeric'
|
661
|
-
cast_to_numeric =
|
673
|
+
cast_to_numeric = (
|
674
|
+
explicitly_numeric
|
675
|
+
or col in df_numeric_cols
|
676
|
+
or (mixed_numeric_types and not explicitly_float)
|
677
|
+
)
|
662
678
|
if cast_to_numeric:
|
663
679
|
common_dtypes[col] = attempt_cast_to_numeric
|
664
680
|
common_diff_dtypes[col] = attempt_cast_to_numeric
|
@@ -170,13 +170,27 @@ def attempt_cast_to_numeric(value: Any) -> Any:
|
|
170
170
|
try:
|
171
171
|
return (
|
172
172
|
Decimal(str(value))
|
173
|
-
if
|
173
|
+
if not value_is_null(value)
|
174
174
|
else Decimal('NaN')
|
175
175
|
)
|
176
176
|
except Exception as e:
|
177
177
|
return value
|
178
178
|
|
179
179
|
|
180
|
+
def value_is_null(value: Any) -> Any:
|
181
|
+
"""
|
182
|
+
Determine if a value is a null-like string.
|
183
|
+
"""
|
184
|
+
return str(value).lower() in ('none', 'nan', 'na', 'nat', '', '<na>')
|
185
|
+
|
186
|
+
|
187
|
+
def none_if_null(value: Any) -> Any:
|
188
|
+
"""
|
189
|
+
Return `None` if a value is a null-like string.
|
190
|
+
"""
|
191
|
+
return (None if value_is_null(value) else value)
|
192
|
+
|
193
|
+
|
180
194
|
def quantize_decimal(x: Decimal, scale: int, precision: int) -> Decimal:
|
181
195
|
"""
|
182
196
|
Quantize a given `Decimal` to a known scale and precision.
|
meerschaum/utils/dtypes/sql.py
CHANGED
@@ -19,6 +19,7 @@ NUMERIC_PRECISION_FLAVORS: Dict[str, Tuple[int, int]] = {
|
|
19
19
|
|
20
20
|
DB_TO_PD_DTYPES: Dict[str, Union[str, Dict[str, str]]] = {
|
21
21
|
'FLOAT': 'float64[pyarrow]',
|
22
|
+
'REAL': 'float64[pyarrow]',
|
22
23
|
'DOUBLE_PRECISION': 'float64[pyarrow]',
|
23
24
|
'DOUBLE': 'float64[pyarrow]',
|
24
25
|
'DECIMAL': 'numeric',
|