meerschaum 2.6.16__py3-none-any.whl → 2.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- meerschaum/_internal/arguments/_parse_arguments.py +1 -1
- meerschaum/actions/delete.py +65 -69
- meerschaum/actions/edit.py +22 -2
- meerschaum/actions/install.py +1 -2
- meerschaum/actions/sync.py +2 -3
- meerschaum/api/routes/_pipes.py +7 -8
- meerschaum/config/_default.py +1 -1
- meerschaum/config/_paths.py +2 -1
- meerschaum/config/_version.py +1 -1
- meerschaum/connectors/api/_pipes.py +18 -21
- meerschaum/connectors/sql/_create_engine.py +3 -3
- meerschaum/connectors/sql/_instance.py +11 -12
- meerschaum/connectors/sql/_pipes.py +143 -91
- meerschaum/connectors/sql/_sql.py +43 -8
- meerschaum/connectors/valkey/_pipes.py +12 -1
- meerschaum/core/Pipe/__init__.py +23 -13
- meerschaum/core/Pipe/_attributes.py +25 -1
- meerschaum/core/Pipe/_dtypes.py +23 -16
- meerschaum/core/Pipe/_sync.py +59 -31
- meerschaum/core/Pipe/_verify.py +8 -7
- meerschaum/jobs/_Job.py +4 -1
- meerschaum/plugins/_Plugin.py +11 -14
- meerschaum/utils/daemon/Daemon.py +22 -15
- meerschaum/utils/dataframe.py +178 -16
- meerschaum/utils/dtypes/__init__.py +149 -14
- meerschaum/utils/dtypes/sql.py +41 -7
- meerschaum/utils/misc.py +8 -8
- meerschaum/utils/packages/_packages.py +1 -1
- meerschaum/utils/schedule.py +8 -3
- meerschaum/utils/sql.py +180 -100
- meerschaum/utils/venv/_Venv.py +4 -4
- meerschaum/utils/venv/__init__.py +53 -20
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/METADATA +2 -2
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/RECORD +40 -40
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/LICENSE +0 -0
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/NOTICE +0 -0
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/WHEEL +0 -0
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/entry_points.txt +0 -0
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/top_level.txt +0 -0
- {meerschaum-2.6.16.dist-info → meerschaum-2.7.0.dist-info}/zip-safe +0 -0
meerschaum/utils/sql.py
CHANGED
@@ -7,6 +7,7 @@ Flavor-specific SQL tools.
|
|
7
7
|
"""
|
8
8
|
|
9
9
|
from __future__ import annotations
|
10
|
+
|
10
11
|
from datetime import datetime, timezone, timedelta
|
11
12
|
import meerschaum as mrsm
|
12
13
|
from meerschaum.utils.typing import Optional, Dict, Any, Union, List, Iterable, Tuple
|
@@ -50,10 +51,12 @@ update_queries = {
|
|
50
51
|
{sets_subquery_none}
|
51
52
|
FROM {target_table_name} AS t
|
52
53
|
INNER JOIN (SELECT DISTINCT {patch_cols_str} FROM {patch_table_name}) AS p
|
53
|
-
ON
|
54
|
+
ON
|
55
|
+
{and_subquery_t}
|
54
56
|
WHERE
|
55
57
|
{and_subquery_f}
|
56
|
-
AND
|
58
|
+
AND
|
59
|
+
{date_bounds_subquery}
|
57
60
|
""",
|
58
61
|
'timescaledb-upsert': """
|
59
62
|
INSERT INTO {target_table_name} ({patch_cols_str})
|
@@ -82,9 +85,11 @@ update_queries = {
|
|
82
85
|
'mysql': """
|
83
86
|
UPDATE {target_table_name} AS f
|
84
87
|
JOIN (SELECT DISTINCT {patch_cols_str} FROM {patch_table_name}) AS p
|
85
|
-
ON
|
88
|
+
ON
|
89
|
+
{and_subquery_f}
|
86
90
|
{sets_subquery_f}
|
87
|
-
WHERE
|
91
|
+
WHERE
|
92
|
+
{date_bounds_subquery}
|
88
93
|
""",
|
89
94
|
'mysql-upsert': """
|
90
95
|
INSERT {ignore}INTO {target_table_name} ({patch_cols_str})
|
@@ -96,9 +101,11 @@ update_queries = {
|
|
96
101
|
'mariadb': """
|
97
102
|
UPDATE {target_table_name} AS f
|
98
103
|
JOIN (SELECT DISTINCT {patch_cols_str} FROM {patch_table_name}) AS p
|
99
|
-
ON
|
104
|
+
ON
|
105
|
+
{and_subquery_f}
|
100
106
|
{sets_subquery_f}
|
101
|
-
WHERE
|
107
|
+
WHERE
|
108
|
+
{date_bounds_subquery}
|
102
109
|
""",
|
103
110
|
'mariadb-upsert': """
|
104
111
|
INSERT {ignore}INTO {target_table_name} ({patch_cols_str})
|
@@ -108,34 +115,56 @@ update_queries = {
|
|
108
115
|
{cols_equal_values}
|
109
116
|
""",
|
110
117
|
'mssql': """
|
118
|
+
{with_temp_date_bounds}
|
111
119
|
MERGE {target_table_name} f
|
112
|
-
USING (SELECT
|
113
|
-
ON
|
114
|
-
|
120
|
+
USING (SELECT {patch_cols_str} FROM {patch_table_name}) p
|
121
|
+
ON
|
122
|
+
{and_subquery_f}
|
123
|
+
AND
|
124
|
+
{date_bounds_subquery}
|
115
125
|
WHEN MATCHED THEN
|
116
126
|
UPDATE
|
117
127
|
{sets_subquery_none};
|
118
128
|
""",
|
119
|
-
'mssql-upsert':
|
129
|
+
'mssql-upsert': [
|
130
|
+
"{identity_insert_on}",
|
131
|
+
"""
|
132
|
+
{with_temp_date_bounds}
|
120
133
|
MERGE {target_table_name} f
|
121
|
-
USING (SELECT
|
122
|
-
ON
|
123
|
-
|
124
|
-
|
134
|
+
USING (SELECT {patch_cols_str} FROM {patch_table_name}) p
|
135
|
+
ON
|
136
|
+
{and_subquery_f}
|
137
|
+
AND
|
138
|
+
{date_bounds_subquery}{when_matched_update_sets_subquery_none}
|
125
139
|
WHEN NOT MATCHED THEN
|
126
140
|
INSERT ({patch_cols_str})
|
127
141
|
VALUES ({patch_cols_prefixed_str});
|
128
|
-
|
142
|
+
""",
|
143
|
+
"{identity_insert_off}",
|
144
|
+
],
|
129
145
|
'oracle': """
|
130
146
|
MERGE INTO {target_table_name} f
|
131
|
-
USING (SELECT
|
147
|
+
USING (SELECT {patch_cols_str} FROM {patch_table_name}) p
|
132
148
|
ON (
|
133
149
|
{and_subquery_f}
|
134
|
-
AND
|
150
|
+
AND
|
151
|
+
{date_bounds_subquery}
|
135
152
|
)
|
136
|
-
|
137
|
-
|
138
|
-
|
153
|
+
WHEN MATCHED THEN
|
154
|
+
UPDATE
|
155
|
+
{sets_subquery_none}
|
156
|
+
""",
|
157
|
+
'oracle-upsert': """
|
158
|
+
MERGE INTO {target_table_name} f
|
159
|
+
USING (SELECT {patch_cols_str} FROM {patch_table_name}) p
|
160
|
+
ON (
|
161
|
+
{and_subquery_f}
|
162
|
+
AND
|
163
|
+
{date_bounds_subquery}
|
164
|
+
){when_matched_update_sets_subquery_none}
|
165
|
+
WHEN NOT MATCHED THEN
|
166
|
+
INSERT ({patch_cols_str})
|
167
|
+
VALUES ({patch_cols_prefixed_str})
|
139
168
|
""",
|
140
169
|
'sqlite-upsert': """
|
141
170
|
INSERT INTO {target_table_name} ({patch_cols_str})
|
@@ -323,7 +352,11 @@ columns_indices_queries = {
|
|
323
352
|
CASE
|
324
353
|
WHEN kc.type = 'PK' THEN 'PRIMARY KEY'
|
325
354
|
ELSE 'INDEX'
|
326
|
-
END AS [index_type]
|
355
|
+
END AS [index_type],
|
356
|
+
CASE
|
357
|
+
WHEN i.type = 1 THEN CAST(1 AS BIT)
|
358
|
+
ELSE CAST(0 AS BIT)
|
359
|
+
END AS [clustered]
|
327
360
|
FROM
|
328
361
|
sys.schemas s
|
329
362
|
INNER JOIN sys.tables t
|
@@ -425,20 +458,10 @@ reset_autoincrement_queries: Dict[str, Union[str, List[str]]] = {
|
|
425
458
|
SET seq = {val}
|
426
459
|
WHERE name = '{table}'
|
427
460
|
""",
|
428
|
-
'oracle':
|
429
|
-
""
|
430
|
-
|
431
|
-
|
432
|
-
current_val NUMBER;
|
433
|
-
BEGIN
|
434
|
-
SELECT {table_seq_name}.NEXTVAL INTO current_val FROM dual;
|
435
|
-
|
436
|
-
WHILE current_val < max_id LOOP
|
437
|
-
SELECT {table_seq_name}.NEXTVAL INTO current_val FROM dual;
|
438
|
-
END LOOP;
|
439
|
-
END;
|
440
|
-
""",
|
441
|
-
],
|
461
|
+
'oracle': (
|
462
|
+
"ALTER TABLE {table_name} MODIFY {column_name} "
|
463
|
+
"GENERATED BY DEFAULT ON NULL AS IDENTITY (START WITH {val_plus_1})"
|
464
|
+
),
|
442
465
|
}
|
443
466
|
table_wrappers = {
|
444
467
|
'default' : ('"', '"'),
|
@@ -499,7 +522,8 @@ def dateadd_str(
|
|
499
522
|
flavor: str = 'postgresql',
|
500
523
|
datepart: str = 'day',
|
501
524
|
number: Union[int, float] = 0,
|
502
|
-
begin: Union[str, datetime, int] = 'now'
|
525
|
+
begin: Union[str, datetime, int] = 'now',
|
526
|
+
db_type: Optional[str] = None,
|
503
527
|
) -> str:
|
504
528
|
"""
|
505
529
|
Generate a `DATEADD` clause depending on database flavor.
|
@@ -538,6 +562,10 @@ def dateadd_str(
|
|
538
562
|
begin: Union[str, datetime], default `'now'`
|
539
563
|
Base datetime to which to add dateparts.
|
540
564
|
|
565
|
+
db_type: Optional[str], default None
|
566
|
+
If provided, cast the datetime string as the type.
|
567
|
+
Otherwise, infer this from the input datetime value.
|
568
|
+
|
541
569
|
Returns
|
542
570
|
-------
|
543
571
|
The appropriate `DATEADD` string for the corresponding database flavor.
|
@@ -549,7 +577,7 @@ def dateadd_str(
|
|
549
577
|
... begin = datetime(2022, 1, 1, 0, 0),
|
550
578
|
... number = 1,
|
551
579
|
... )
|
552
|
-
"DATEADD(day, 1, CAST('2022-01-01 00:00:00' AS
|
580
|
+
"DATEADD(day, 1, CAST('2022-01-01 00:00:00' AS DATETIME2))"
|
553
581
|
>>> dateadd_str(
|
554
582
|
... flavor = 'postgresql',
|
555
583
|
... begin = datetime(2022, 1, 1, 0, 0),
|
@@ -592,7 +620,7 @@ def dateadd_str(
|
|
592
620
|
)
|
593
621
|
|
594
622
|
dt_is_utc = begin_time.tzinfo is not None if begin_time is not None else '+' in str(begin)
|
595
|
-
db_type = get_db_type_from_pd_type(
|
623
|
+
db_type = db_type or get_db_type_from_pd_type(
|
596
624
|
('datetime64[ns, UTC]' if dt_is_utc else 'datetime64[ns]'),
|
597
625
|
flavor=flavor,
|
598
626
|
)
|
@@ -717,7 +745,7 @@ def get_distinct_col_count(
|
|
717
745
|
result = connector.value(_meta_query, debug=debug)
|
718
746
|
try:
|
719
747
|
return int(result)
|
720
|
-
except Exception
|
748
|
+
except Exception:
|
721
749
|
return None
|
722
750
|
|
723
751
|
|
@@ -727,12 +755,15 @@ def sql_item_name(item: str, flavor: str, schema: Optional[str] = None) -> str:
|
|
727
755
|
|
728
756
|
Parameters
|
729
757
|
----------
|
730
|
-
item: str
|
758
|
+
item: str
|
731
759
|
The database item (table, view, etc.) in need of quotes.
|
732
760
|
|
733
|
-
flavor: str
|
761
|
+
flavor: str
|
734
762
|
The database flavor (`'postgresql'`, `'mssql'`, `'sqllite'`, etc.).
|
735
763
|
|
764
|
+
schema: Optional[str], default None
|
765
|
+
If provided, prefix the table name with the schema.
|
766
|
+
|
736
767
|
Returns
|
737
768
|
-------
|
738
769
|
A `str` which contains the input `item` wrapped in the corresponding escape characters.
|
@@ -764,6 +795,8 @@ def sql_item_name(item: str, flavor: str, schema: Optional[str] = None) -> str:
|
|
764
795
|
### NOTE: SQLite does not support schemas.
|
765
796
|
if flavor == 'sqlite':
|
766
797
|
schema = None
|
798
|
+
elif flavor == 'mssql' and str(item).startswith('#'):
|
799
|
+
schema = None
|
767
800
|
|
768
801
|
schema_prefix = (
|
769
802
|
(wrappers[0] + schema + wrappers[1] + '.')
|
@@ -1066,7 +1099,7 @@ def get_sqlalchemy_table(
|
|
1066
1099
|
connector.metadata,
|
1067
1100
|
**table_kwargs
|
1068
1101
|
)
|
1069
|
-
except sqlalchemy.exc.NoSuchTableError
|
1102
|
+
except sqlalchemy.exc.NoSuchTableError:
|
1070
1103
|
warn(f"Table '{truncated_table_name}' does not exist in '{connector}'.")
|
1071
1104
|
return None
|
1072
1105
|
return tables[truncated_table_name]
|
@@ -1119,6 +1152,7 @@ def get_table_cols_types(
|
|
1119
1152
|
-------
|
1120
1153
|
A dictionary mapping column names to data types.
|
1121
1154
|
"""
|
1155
|
+
import textwrap
|
1122
1156
|
from meerschaum.connectors import SQLConnector
|
1123
1157
|
sqlalchemy = mrsm.attempt_import('sqlalchemy')
|
1124
1158
|
flavor = flavor or getattr(connectable, 'flavor', None)
|
@@ -1144,7 +1178,7 @@ def get_table_cols_types(
|
|
1144
1178
|
)
|
1145
1179
|
|
1146
1180
|
cols_types_query = sqlalchemy.text(
|
1147
|
-
columns_types_queries.get(
|
1181
|
+
textwrap.dedent(columns_types_queries.get(
|
1148
1182
|
flavor,
|
1149
1183
|
columns_types_queries['default']
|
1150
1184
|
).format(
|
@@ -1155,7 +1189,7 @@ def get_table_cols_types(
|
|
1155
1189
|
table_upper=table_upper,
|
1156
1190
|
table_upper_trunc=table_upper_trunc,
|
1157
1191
|
db_prefix=db_prefix,
|
1158
|
-
)
|
1192
|
+
)).lstrip().rstrip()
|
1159
1193
|
)
|
1160
1194
|
|
1161
1195
|
cols = ['database', 'schema', 'table', 'column', 'type']
|
@@ -1269,6 +1303,7 @@ def get_table_cols_indices(
|
|
1269
1303
|
-------
|
1270
1304
|
A dictionary mapping column names to a list of indices.
|
1271
1305
|
"""
|
1306
|
+
import textwrap
|
1272
1307
|
from collections import defaultdict
|
1273
1308
|
from meerschaum.connectors import SQLConnector
|
1274
1309
|
sqlalchemy = mrsm.attempt_import('sqlalchemy')
|
@@ -1295,7 +1330,7 @@ def get_table_cols_indices(
|
|
1295
1330
|
)
|
1296
1331
|
|
1297
1332
|
cols_indices_query = sqlalchemy.text(
|
1298
|
-
columns_indices_queries.get(
|
1333
|
+
textwrap.dedent(columns_indices_queries.get(
|
1299
1334
|
flavor,
|
1300
1335
|
columns_indices_queries['default']
|
1301
1336
|
).format(
|
@@ -1307,10 +1342,12 @@ def get_table_cols_indices(
|
|
1307
1342
|
table_upper_trunc=table_upper_trunc,
|
1308
1343
|
db_prefix=db_prefix,
|
1309
1344
|
schema=schema,
|
1310
|
-
)
|
1345
|
+
)).lstrip().rstrip()
|
1311
1346
|
)
|
1312
1347
|
|
1313
1348
|
cols = ['database', 'schema', 'table', 'column', 'index', 'index_type']
|
1349
|
+
if flavor == 'mssql':
|
1350
|
+
cols.append('clustered')
|
1314
1351
|
result_cols_ix = dict(enumerate(cols))
|
1315
1352
|
|
1316
1353
|
debug_kwargs = {'debug': debug} if isinstance(connectable, SQLConnector) else {}
|
@@ -1351,7 +1388,6 @@ def get_table_cols_indices(
|
|
1351
1388
|
)
|
1352
1389
|
)
|
1353
1390
|
]
|
1354
|
-
|
1355
1391
|
### NOTE: This may return incorrect columns if the schema is not explicitly stated.
|
1356
1392
|
if cols_types_docs and not cols_types_docs_filtered:
|
1357
1393
|
cols_types_docs_filtered = cols_types_docs
|
@@ -1367,12 +1403,13 @@ def get_table_cols_indices(
|
|
1367
1403
|
else doc['column']
|
1368
1404
|
)
|
1369
1405
|
)
|
1370
|
-
|
1371
|
-
|
1372
|
-
|
1373
|
-
|
1374
|
-
|
1375
|
-
|
1406
|
+
index_doc = {
|
1407
|
+
'name': doc.get('index', None),
|
1408
|
+
'type': doc.get('index_type', None)
|
1409
|
+
}
|
1410
|
+
if flavor == 'mssql':
|
1411
|
+
index_doc['clustered'] = doc.get('clustered', None)
|
1412
|
+
cols_indices[col].append(index_doc)
|
1376
1413
|
|
1377
1414
|
return dict(cols_indices)
|
1378
1415
|
except Exception as e:
|
@@ -1393,6 +1430,7 @@ def get_update_queries(
|
|
1393
1430
|
datetime_col: Optional[str] = None,
|
1394
1431
|
schema: Optional[str] = None,
|
1395
1432
|
patch_schema: Optional[str] = None,
|
1433
|
+
identity_insert: bool = False,
|
1396
1434
|
debug: bool = False,
|
1397
1435
|
) -> List[str]:
|
1398
1436
|
"""
|
@@ -1430,6 +1468,10 @@ def get_update_queries(
|
|
1430
1468
|
If provided, use this schema when quoting the patch table.
|
1431
1469
|
Defaults to `schema`.
|
1432
1470
|
|
1471
|
+
identity_insert: bool, default False
|
1472
|
+
If `True`, include `SET IDENTITY_INSERT` queries before and after the update queries.
|
1473
|
+
Only applies for MSSQL upserts.
|
1474
|
+
|
1433
1475
|
debug: bool, default False
|
1434
1476
|
Verbosity toggle.
|
1435
1477
|
|
@@ -1437,9 +1479,11 @@ def get_update_queries(
|
|
1437
1479
|
-------
|
1438
1480
|
A list of query strings to perform the update operation.
|
1439
1481
|
"""
|
1482
|
+
import textwrap
|
1440
1483
|
from meerschaum.connectors import SQLConnector
|
1441
1484
|
from meerschaum.utils.debug import dprint
|
1442
|
-
from meerschaum.utils.dtypes
|
1485
|
+
from meerschaum.utils.dtypes import are_dtypes_equal
|
1486
|
+
from meerschaum.utils.dtypes.sql import DB_FLAVORS_CAST_DTYPES, get_pd_type_from_db_type
|
1443
1487
|
flavor = flavor or (connectable.flavor if isinstance(connectable, SQLConnector) else None)
|
1444
1488
|
if not flavor:
|
1445
1489
|
raise ValueError("Provide a flavor if using a SQLAlchemy session.")
|
@@ -1532,21 +1576,35 @@ def get_update_queries(
|
|
1532
1576
|
def sets_subquery(l_prefix: str, r_prefix: str):
|
1533
1577
|
if not value_cols:
|
1534
1578
|
return ''
|
1579
|
+
|
1580
|
+
cast_func_cols = {
|
1581
|
+
c_name: (
|
1582
|
+
('', '', '')
|
1583
|
+
if (
|
1584
|
+
flavor == 'oracle'
|
1585
|
+
and are_dtypes_equal(get_pd_type_from_db_type(c_type), 'bytes')
|
1586
|
+
)
|
1587
|
+
else (
|
1588
|
+
('CAST(', f" AS {c_type.replace('_', ' ')}", ')')
|
1589
|
+
if flavor != 'sqlite'
|
1590
|
+
else ('', '', '')
|
1591
|
+
)
|
1592
|
+
)
|
1593
|
+
for c_name, c_type in value_cols
|
1594
|
+
}
|
1535
1595
|
return 'SET ' + ',\n'.join([
|
1536
1596
|
(
|
1537
1597
|
l_prefix + sql_item_name(c_name, flavor, None)
|
1538
1598
|
+ ' = '
|
1539
|
-
+
|
1540
|
-
+ r_prefix
|
1541
|
-
+
|
1542
|
-
+
|
1543
|
-
+ (c_type.replace('_', ' ') if flavor != 'sqlite' else '')
|
1544
|
-
+ (')' if flavor != 'sqlite' else '')
|
1599
|
+
+ cast_func_cols[c_name][0]
|
1600
|
+
+ r_prefix + sql_item_name(c_name, flavor, None)
|
1601
|
+
+ cast_func_cols[c_name][1]
|
1602
|
+
+ cast_func_cols[c_name][2]
|
1545
1603
|
) for c_name, c_type in value_cols
|
1546
1604
|
])
|
1547
1605
|
|
1548
1606
|
def and_subquery(l_prefix: str, r_prefix: str):
|
1549
|
-
return '\
|
1607
|
+
return '\n AND\n '.join([
|
1550
1608
|
(
|
1551
1609
|
"COALESCE("
|
1552
1610
|
+ l_prefix
|
@@ -1554,7 +1612,7 @@ def get_update_queries(
|
|
1554
1612
|
+ ", "
|
1555
1613
|
+ get_null_replacement(c_type, flavor)
|
1556
1614
|
+ ")"
|
1557
|
-
+ '
|
1615
|
+
+ '\n =\n '
|
1558
1616
|
+ "COALESCE("
|
1559
1617
|
+ r_prefix
|
1560
1618
|
+ sql_item_name(c_name, flavor, None)
|
@@ -1564,22 +1622,39 @@ def get_update_queries(
|
|
1564
1622
|
) for c_name, c_type in join_cols_types
|
1565
1623
|
])
|
1566
1624
|
|
1625
|
+
skip_query_val = ""
|
1567
1626
|
target_table_name = sql_item_name(target, flavor, schema)
|
1568
1627
|
patch_table_name = sql_item_name(patch, flavor, patch_schema)
|
1569
1628
|
dt_col_name = sql_item_name(datetime_col, flavor, None) if datetime_col else None
|
1629
|
+
date_bounds_table = patch_table_name if flavor != 'mssql' else '[date_bounds]'
|
1630
|
+
min_dt_col_name = f"MIN({dt_col_name})" if flavor != 'mssql' else '[Min_dt]'
|
1631
|
+
max_dt_col_name = f"MAX({dt_col_name})" if flavor != 'mssql' else '[Max_dt]'
|
1570
1632
|
date_bounds_subquery = (
|
1571
|
-
f"""
|
1572
|
-
|
1573
|
-
|
1574
|
-
"""
|
1633
|
+
f"""f.{dt_col_name} >= (SELECT {min_dt_col_name} FROM {date_bounds_table})
|
1634
|
+
AND
|
1635
|
+
f.{dt_col_name} <= (SELECT {max_dt_col_name} FROM {date_bounds_table})"""
|
1575
1636
|
if datetime_col
|
1576
1637
|
else "1 = 1"
|
1577
1638
|
)
|
1639
|
+
with_temp_date_bounds = f"""WITH [date_bounds] AS (
|
1640
|
+
SELECT MIN({dt_col_name}) AS {min_dt_col_name}, MAX({dt_col_name}) AS {max_dt_col_name}
|
1641
|
+
FROM {patch_table_name}
|
1642
|
+
)""" if datetime_col else ""
|
1643
|
+
identity_insert_on = (
|
1644
|
+
f"SET IDENTITY_INSERT {target_table_name} ON"
|
1645
|
+
if identity_insert
|
1646
|
+
else skip_query_val
|
1647
|
+
)
|
1648
|
+
identity_insert_off = (
|
1649
|
+
f"SET IDENTITY_INSERT {target_table_name} OFF"
|
1650
|
+
if identity_insert
|
1651
|
+
else skip_query_val
|
1652
|
+
)
|
1578
1653
|
|
1579
1654
|
### NOTE: MSSQL upserts must exclude the update portion if only upserting indices.
|
1580
1655
|
when_matched_update_sets_subquery_none = "" if not value_cols else (
|
1581
|
-
"WHEN MATCHED THEN"
|
1582
|
-
f"
|
1656
|
+
"\n WHEN MATCHED THEN\n"
|
1657
|
+
f" UPDATE {sets_subquery('', 'p.')}"
|
1583
1658
|
)
|
1584
1659
|
|
1585
1660
|
cols_equal_values = '\n,'.join(
|
@@ -1595,8 +1670,8 @@ def get_update_queries(
|
|
1595
1670
|
)
|
1596
1671
|
ignore = "IGNORE " if not value_cols else ""
|
1597
1672
|
|
1598
|
-
|
1599
|
-
base_query.format(
|
1673
|
+
formatted_queries = [
|
1674
|
+
textwrap.dedent(base_query.format(
|
1600
1675
|
sets_subquery_none=sets_subquery('', 'p.'),
|
1601
1676
|
sets_subquery_none_excluded=sets_subquery('', 'EXCLUDED.'),
|
1602
1677
|
sets_subquery_f=sets_subquery('f.', 'p.'),
|
@@ -1614,10 +1689,16 @@ def get_update_queries(
|
|
1614
1689
|
cols_equal_values=cols_equal_values,
|
1615
1690
|
on_duplicate_key_update=on_duplicate_key_update,
|
1616
1691
|
ignore=ignore,
|
1617
|
-
|
1692
|
+
with_temp_date_bounds=with_temp_date_bounds,
|
1693
|
+
identity_insert_on=identity_insert_on,
|
1694
|
+
identity_insert_off=identity_insert_off,
|
1695
|
+
)).lstrip().rstrip()
|
1618
1696
|
for base_query in base_queries
|
1619
1697
|
]
|
1620
1698
|
|
1699
|
+
### NOTE: Allow for skipping some queries.
|
1700
|
+
return [query for query in formatted_queries if query]
|
1701
|
+
|
1621
1702
|
|
1622
1703
|
def get_null_replacement(typ: str, flavor: str) -> str:
|
1623
1704
|
"""
|
@@ -1655,11 +1736,14 @@ def get_null_replacement(typ: str, flavor: str) -> str:
|
|
1655
1736
|
)
|
1656
1737
|
return f'CAST({val_to_cast} AS {bool_typ})'
|
1657
1738
|
if 'time' in typ.lower() or 'date' in typ.lower():
|
1658
|
-
|
1739
|
+
db_type = typ if typ.isupper() else None
|
1740
|
+
return dateadd_str(flavor=flavor, begin='1900-01-01', db_type=db_type)
|
1659
1741
|
if 'float' in typ.lower() or 'double' in typ.lower() or typ.lower() in ('decimal',):
|
1660
1742
|
return '-987654321.0'
|
1661
1743
|
if flavor == 'oracle' and typ.lower().split('(', maxsplit=1)[0] == 'char':
|
1662
1744
|
return "'-987654321'"
|
1745
|
+
if flavor == 'oracle' and typ.lower() in ('blob', 'bytes'):
|
1746
|
+
return '00'
|
1663
1747
|
if typ.lower() in ('uniqueidentifier', 'guid', 'uuid'):
|
1664
1748
|
magic_val = 'DEADBEEF-ABBA-BABE-CAFE-DECAFC0FFEE5'
|
1665
1749
|
if flavor == 'mssql':
|
@@ -1867,7 +1951,17 @@ def _get_create_table_query_from_dtypes(
|
|
1867
1951
|
|
1868
1952
|
table_name = sql_item_name(new_table, schema=schema, flavor=flavor)
|
1869
1953
|
primary_key_name = sql_item_name(primary_key, flavor) if primary_key else None
|
1954
|
+
primary_key_constraint_name = (
|
1955
|
+
sql_item_name(f'PK_{new_table}', flavor, None)
|
1956
|
+
if primary_key
|
1957
|
+
else None
|
1958
|
+
)
|
1870
1959
|
datetime_column_name = sql_item_name(datetime_column, flavor) if datetime_column else None
|
1960
|
+
primary_key_clustered = (
|
1961
|
+
"CLUSTERED"
|
1962
|
+
if not datetime_column or datetime_column == primary_key
|
1963
|
+
else "NONCLUSTERED"
|
1964
|
+
)
|
1871
1965
|
query = f"CREATE TABLE {table_name} ("
|
1872
1966
|
if primary_key:
|
1873
1967
|
col_db_type = cols_types[0][1]
|
@@ -1887,6 +1981,8 @@ def _get_create_table_query_from_dtypes(
|
|
1887
1981
|
query += f"\n {col_name} {col_db_type} {auto_increment_str} PRIMARY KEY,"
|
1888
1982
|
elif flavor == 'timescaledb' and datetime_column and datetime_column != primary_key:
|
1889
1983
|
query += f"\n {col_name} {col_db_type}{auto_increment_str} NOT NULL,"
|
1984
|
+
elif flavor == 'mssql':
|
1985
|
+
query += f"\n {col_name} {col_db_type}{auto_increment_str} NOT NULL,"
|
1890
1986
|
else:
|
1891
1987
|
query += f"\n {col_name} {col_db_type} PRIMARY KEY{auto_increment_str} NOT NULL,"
|
1892
1988
|
|
@@ -1902,6 +1998,10 @@ def _get_create_table_query_from_dtypes(
|
|
1902
1998
|
and datetime_column != primary_key
|
1903
1999
|
):
|
1904
2000
|
query += f"\n PRIMARY KEY({datetime_column_name}, {primary_key_name}),"
|
2001
|
+
|
2002
|
+
if flavor == 'mssql' and primary_key:
|
2003
|
+
query += f"\n CONSTRAINT {primary_key_constraint_name} PRIMARY KEY {primary_key_clustered} ({primary_key_name}),"
|
2004
|
+
|
1905
2005
|
query = query[:-1]
|
1906
2006
|
query += "\n)"
|
1907
2007
|
|
@@ -1922,12 +2022,11 @@ def _get_create_table_query_from_cte(
|
|
1922
2022
|
Create a new table from a CTE query.
|
1923
2023
|
"""
|
1924
2024
|
import textwrap
|
1925
|
-
from meerschaum.utils.dtypes.sql import AUTO_INCREMENT_COLUMN_FLAVORS
|
1926
2025
|
create_cte = 'create_query'
|
1927
2026
|
create_cte_name = sql_item_name(create_cte, flavor, None)
|
1928
2027
|
new_table_name = sql_item_name(new_table, flavor, schema)
|
1929
2028
|
primary_key_constraint_name = (
|
1930
|
-
sql_item_name(f'
|
2029
|
+
sql_item_name(f'PK_{new_table}', flavor, None)
|
1931
2030
|
if primary_key
|
1932
2031
|
else None
|
1933
2032
|
)
|
@@ -1936,6 +2035,7 @@ def _get_create_table_query_from_cte(
|
|
1936
2035
|
if primary_key
|
1937
2036
|
else None
|
1938
2037
|
)
|
2038
|
+
primary_key_clustered = "CLUSTERED" if not datetime_column else "NONCLUSTERED"
|
1939
2039
|
datetime_column_name = (
|
1940
2040
|
sql_item_name(datetime_column, flavor)
|
1941
2041
|
if datetime_column
|
@@ -1943,7 +2043,7 @@ def _get_create_table_query_from_cte(
|
|
1943
2043
|
)
|
1944
2044
|
if flavor in ('mssql',):
|
1945
2045
|
query = query.lstrip()
|
1946
|
-
if
|
2046
|
+
if query.lower().startswith('with '):
|
1947
2047
|
final_select_ix = query.lower().rfind('select')
|
1948
2048
|
create_table_query = (
|
1949
2049
|
query[:final_select_ix].rstrip() + ',\n'
|
@@ -1961,7 +2061,7 @@ def _get_create_table_query_from_cte(
|
|
1961
2061
|
|
1962
2062
|
alter_type_query = f"""
|
1963
2063
|
ALTER TABLE {new_table_name}
|
1964
|
-
ADD CONSTRAINT {primary_key_constraint_name} PRIMARY KEY ({primary_key_name})
|
2064
|
+
ADD CONSTRAINT {primary_key_constraint_name} PRIMARY KEY {primary_key_clustered} ({primary_key_name})
|
1965
2065
|
"""
|
1966
2066
|
elif flavor in (None,):
|
1967
2067
|
create_table_query = f"""
|
@@ -2009,11 +2109,11 @@ def _get_create_table_query_from_cte(
|
|
2009
2109
|
ADD PRIMARY KEY ({primary_key_name})
|
2010
2110
|
"""
|
2011
2111
|
|
2012
|
-
create_table_query = textwrap.dedent(create_table_query)
|
2112
|
+
create_table_query = textwrap.dedent(create_table_query).lstrip().rstrip()
|
2013
2113
|
if not primary_key:
|
2014
2114
|
return [create_table_query]
|
2015
2115
|
|
2016
|
-
alter_type_query = textwrap.dedent(alter_type_query)
|
2116
|
+
alter_type_query = textwrap.dedent(alter_type_query).lstrip().rstrip()
|
2017
2117
|
|
2018
2118
|
return [
|
2019
2119
|
create_table_query,
|
@@ -2225,29 +2325,8 @@ def get_reset_autoincrement_queries(
|
|
2225
2325
|
schema = schema or connector.schema
|
2226
2326
|
max_id_name = sql_item_name('max_id', connector.flavor)
|
2227
2327
|
table_name = sql_item_name(table, connector.flavor, schema)
|
2228
|
-
table_trunc = truncate_item_name(table, connector.flavor)
|
2229
2328
|
table_seq_name = sql_item_name(table + '_' + column + '_seq', connector.flavor, schema)
|
2230
2329
|
column_name = sql_item_name(column, connector.flavor)
|
2231
|
-
if connector.flavor == 'oracle':
|
2232
|
-
potential_table_names = set([
|
2233
|
-
f"'{table_trunc.upper()}'",
|
2234
|
-
f"'{table_trunc}'",
|
2235
|
-
f"'{table_name}'",
|
2236
|
-
f"'{table_name.upper()}'",
|
2237
|
-
])
|
2238
|
-
df = connector.read(
|
2239
|
-
"""
|
2240
|
-
SELECT SEQUENCE_NAME
|
2241
|
-
FROM ALL_TAB_IDENTITY_COLS
|
2242
|
-
WHERE TABLE_NAME IN ("""
|
2243
|
-
+ ", ".join([name for name in potential_table_names])
|
2244
|
-
+ """)
|
2245
|
-
""",
|
2246
|
-
debug=debug
|
2247
|
-
)
|
2248
|
-
if len(df) > 0:
|
2249
|
-
table_seq_name = df['sequence_name'][0]
|
2250
|
-
|
2251
2330
|
max_id = connector.value(
|
2252
2331
|
f"""
|
2253
2332
|
SELECT COALESCE(MAX({column_name}), 0) AS {max_id_name}
|
@@ -2272,7 +2351,8 @@ def get_reset_autoincrement_queries(
|
|
2272
2351
|
table=table,
|
2273
2352
|
table_name=table_name,
|
2274
2353
|
table_seq_name=table_seq_name,
|
2275
|
-
val=
|
2354
|
+
val=max_id,
|
2355
|
+
val_plus_1=(max_id + 1),
|
2276
2356
|
)
|
2277
2357
|
for query in reset_queries
|
2278
2358
|
]
|
meerschaum/utils/venv/_Venv.py
CHANGED
@@ -34,10 +34,10 @@ class Venv:
|
|
34
34
|
"""
|
35
35
|
|
36
36
|
def __init__(
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
37
|
+
self,
|
38
|
+
venv: Union[str, 'meerschaum.plugins.Plugin', None] = 'mrsm',
|
39
|
+
debug: bool = False,
|
40
|
+
) -> None:
|
41
41
|
from meerschaum.utils.venv import activate_venv, deactivate_venv, active_venvs
|
42
42
|
### For some weird threading issue,
|
43
43
|
### we can't use `isinstance` here.
|