dcs-sdk 1.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data_diff/__init__.py +221 -0
- data_diff/__main__.py +517 -0
- data_diff/abcs/__init__.py +13 -0
- data_diff/abcs/compiler.py +27 -0
- data_diff/abcs/database_types.py +402 -0
- data_diff/config.py +141 -0
- data_diff/databases/__init__.py +38 -0
- data_diff/databases/_connect.py +323 -0
- data_diff/databases/base.py +1417 -0
- data_diff/databases/bigquery.py +376 -0
- data_diff/databases/clickhouse.py +217 -0
- data_diff/databases/databricks.py +262 -0
- data_diff/databases/duckdb.py +207 -0
- data_diff/databases/mssql.py +343 -0
- data_diff/databases/mysql.py +189 -0
- data_diff/databases/oracle.py +238 -0
- data_diff/databases/postgresql.py +293 -0
- data_diff/databases/presto.py +222 -0
- data_diff/databases/redis.py +93 -0
- data_diff/databases/redshift.py +233 -0
- data_diff/databases/snowflake.py +222 -0
- data_diff/databases/sybase.py +720 -0
- data_diff/databases/trino.py +73 -0
- data_diff/databases/vertica.py +174 -0
- data_diff/diff_tables.py +489 -0
- data_diff/errors.py +17 -0
- data_diff/format.py +369 -0
- data_diff/hashdiff_tables.py +1026 -0
- data_diff/info_tree.py +76 -0
- data_diff/joindiff_tables.py +434 -0
- data_diff/lexicographic_space.py +253 -0
- data_diff/parse_time.py +88 -0
- data_diff/py.typed +0 -0
- data_diff/queries/__init__.py +13 -0
- data_diff/queries/api.py +213 -0
- data_diff/queries/ast_classes.py +811 -0
- data_diff/queries/base.py +38 -0
- data_diff/queries/extras.py +43 -0
- data_diff/query_utils.py +70 -0
- data_diff/schema.py +67 -0
- data_diff/table_segment.py +583 -0
- data_diff/thread_utils.py +112 -0
- data_diff/utils.py +1022 -0
- data_diff/version.py +15 -0
- dcs_core/__init__.py +13 -0
- dcs_core/__main__.py +17 -0
- dcs_core/__version__.py +15 -0
- dcs_core/cli/__init__.py +13 -0
- dcs_core/cli/cli.py +165 -0
- dcs_core/core/__init__.py +19 -0
- dcs_core/core/common/__init__.py +13 -0
- dcs_core/core/common/errors.py +50 -0
- dcs_core/core/common/models/__init__.py +13 -0
- dcs_core/core/common/models/configuration.py +284 -0
- dcs_core/core/common/models/dashboard.py +24 -0
- dcs_core/core/common/models/data_source_resource.py +75 -0
- dcs_core/core/common/models/metric.py +160 -0
- dcs_core/core/common/models/profile.py +75 -0
- dcs_core/core/common/models/validation.py +216 -0
- dcs_core/core/common/models/widget.py +44 -0
- dcs_core/core/configuration/__init__.py +13 -0
- dcs_core/core/configuration/config_loader.py +139 -0
- dcs_core/core/configuration/configuration_parser.py +262 -0
- dcs_core/core/configuration/configuration_parser_arc.py +328 -0
- dcs_core/core/datasource/__init__.py +13 -0
- dcs_core/core/datasource/base.py +62 -0
- dcs_core/core/datasource/manager.py +112 -0
- dcs_core/core/datasource/search_datasource.py +421 -0
- dcs_core/core/datasource/sql_datasource.py +1094 -0
- dcs_core/core/inspect.py +163 -0
- dcs_core/core/logger/__init__.py +13 -0
- dcs_core/core/logger/base.py +32 -0
- dcs_core/core/logger/default_logger.py +94 -0
- dcs_core/core/metric/__init__.py +13 -0
- dcs_core/core/metric/base.py +220 -0
- dcs_core/core/metric/combined_metric.py +98 -0
- dcs_core/core/metric/custom_metric.py +34 -0
- dcs_core/core/metric/manager.py +137 -0
- dcs_core/core/metric/numeric_metric.py +403 -0
- dcs_core/core/metric/reliability_metric.py +90 -0
- dcs_core/core/profiling/__init__.py +13 -0
- dcs_core/core/profiling/datasource_profiling.py +136 -0
- dcs_core/core/profiling/numeric_field_profiling.py +72 -0
- dcs_core/core/profiling/text_field_profiling.py +67 -0
- dcs_core/core/repository/__init__.py +13 -0
- dcs_core/core/repository/metric_repository.py +77 -0
- dcs_core/core/utils/__init__.py +13 -0
- dcs_core/core/utils/log.py +29 -0
- dcs_core/core/utils/tracking.py +105 -0
- dcs_core/core/utils/utils.py +44 -0
- dcs_core/core/validation/__init__.py +13 -0
- dcs_core/core/validation/base.py +230 -0
- dcs_core/core/validation/completeness_validation.py +153 -0
- dcs_core/core/validation/custom_query_validation.py +24 -0
- dcs_core/core/validation/manager.py +282 -0
- dcs_core/core/validation/numeric_validation.py +276 -0
- dcs_core/core/validation/reliability_validation.py +91 -0
- dcs_core/core/validation/uniqueness_validation.py +61 -0
- dcs_core/core/validation/validity_validation.py +738 -0
- dcs_core/integrations/__init__.py +13 -0
- dcs_core/integrations/databases/__init__.py +13 -0
- dcs_core/integrations/databases/bigquery.py +187 -0
- dcs_core/integrations/databases/databricks.py +51 -0
- dcs_core/integrations/databases/db2.py +652 -0
- dcs_core/integrations/databases/elasticsearch.py +61 -0
- dcs_core/integrations/databases/mssql.py +829 -0
- dcs_core/integrations/databases/mysql.py +409 -0
- dcs_core/integrations/databases/opensearch.py +64 -0
- dcs_core/integrations/databases/oracle.py +719 -0
- dcs_core/integrations/databases/postgres.py +482 -0
- dcs_core/integrations/databases/redshift.py +53 -0
- dcs_core/integrations/databases/snowflake.py +48 -0
- dcs_core/integrations/databases/spark_df.py +111 -0
- dcs_core/integrations/databases/sybase.py +1069 -0
- dcs_core/integrations/storage/__init__.py +13 -0
- dcs_core/integrations/storage/local_file.py +149 -0
- dcs_core/integrations/utils/__init__.py +13 -0
- dcs_core/integrations/utils/utils.py +36 -0
- dcs_core/report/__init__.py +13 -0
- dcs_core/report/dashboard.py +211 -0
- dcs_core/report/models.py +88 -0
- dcs_core/report/static/assets/fonts/DMSans-Bold.ttf +0 -0
- dcs_core/report/static/assets/fonts/DMSans-Medium.ttf +0 -0
- dcs_core/report/static/assets/fonts/DMSans-Regular.ttf +0 -0
- dcs_core/report/static/assets/fonts/DMSans-SemiBold.ttf +0 -0
- dcs_core/report/static/assets/images/docs.svg +6 -0
- dcs_core/report/static/assets/images/github.svg +4 -0
- dcs_core/report/static/assets/images/logo.svg +7 -0
- dcs_core/report/static/assets/images/slack.svg +13 -0
- dcs_core/report/static/index.js +2 -0
- dcs_core/report/static/index.js.LICENSE.txt +3971 -0
- dcs_sdk/__init__.py +13 -0
- dcs_sdk/__main__.py +18 -0
- dcs_sdk/__version__.py +15 -0
- dcs_sdk/cli/__init__.py +13 -0
- dcs_sdk/cli/cli.py +163 -0
- dcs_sdk/sdk/__init__.py +58 -0
- dcs_sdk/sdk/config/__init__.py +13 -0
- dcs_sdk/sdk/config/config_loader.py +491 -0
- dcs_sdk/sdk/data_diff/__init__.py +13 -0
- dcs_sdk/sdk/data_diff/data_differ.py +821 -0
- dcs_sdk/sdk/rules/__init__.py +15 -0
- dcs_sdk/sdk/rules/rules_mappping.py +31 -0
- dcs_sdk/sdk/rules/rules_repository.py +214 -0
- dcs_sdk/sdk/rules/schema_rules.py +65 -0
- dcs_sdk/sdk/utils/__init__.py +13 -0
- dcs_sdk/sdk/utils/serializer.py +25 -0
- dcs_sdk/sdk/utils/similarity_score/__init__.py +13 -0
- dcs_sdk/sdk/utils/similarity_score/base_provider.py +153 -0
- dcs_sdk/sdk/utils/similarity_score/cosine_similarity_provider.py +39 -0
- dcs_sdk/sdk/utils/similarity_score/jaccard_provider.py +24 -0
- dcs_sdk/sdk/utils/similarity_score/levenshtein_distance_provider.py +31 -0
- dcs_sdk/sdk/utils/table.py +475 -0
- dcs_sdk/sdk/utils/themes.py +40 -0
- dcs_sdk/sdk/utils/utils.py +349 -0
- dcs_sdk-1.6.5.dist-info/METADATA +150 -0
- dcs_sdk-1.6.5.dist-info/RECORD +159 -0
- dcs_sdk-1.6.5.dist-info/WHEEL +4 -0
- dcs_sdk-1.6.5.dist-info/entry_points.txt +4 -0
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
# Copyright 2022-present, the Waterdip Labs Pvt. Ltd.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
import math
|
|
17
|
+
from typing import Any, ClassVar, Dict, Sequence, Type
|
|
18
|
+
|
|
19
|
+
import attrs
|
|
20
|
+
|
|
21
|
+
from data_diff.abcs.database_types import (
|
|
22
|
+
Boolean,
|
|
23
|
+
ColType,
|
|
24
|
+
Date,
|
|
25
|
+
DbPath,
|
|
26
|
+
Decimal,
|
|
27
|
+
Float,
|
|
28
|
+
Integer,
|
|
29
|
+
NumericType,
|
|
30
|
+
TemporalType,
|
|
31
|
+
Text,
|
|
32
|
+
Timestamp,
|
|
33
|
+
UnknownColType,
|
|
34
|
+
)
|
|
35
|
+
from data_diff.databases.base import (
|
|
36
|
+
CHECKSUM_HEXDIGITS,
|
|
37
|
+
CHECKSUM_OFFSET,
|
|
38
|
+
MD5_HEXDIGITS,
|
|
39
|
+
BaseDialect,
|
|
40
|
+
ThreadedDatabase,
|
|
41
|
+
import_helper,
|
|
42
|
+
parse_table_name,
|
|
43
|
+
)
|
|
44
|
+
from data_diff.schema import RawColumnInfo
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@import_helper(text="You can install it using 'pip install databricks-sql-connector'")
|
|
48
|
+
def import_databricks():
|
|
49
|
+
import databricks.sql
|
|
50
|
+
|
|
51
|
+
return databricks
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@attrs.define(frozen=False)
|
|
55
|
+
class Dialect(BaseDialect):
|
|
56
|
+
name = "Databricks"
|
|
57
|
+
ROUNDS_ON_PREC_LOSS = True
|
|
58
|
+
TYPE_CLASSES = {
|
|
59
|
+
# Numbers
|
|
60
|
+
"INT": Integer,
|
|
61
|
+
"SMALLINT": Integer,
|
|
62
|
+
"TINYINT": Integer,
|
|
63
|
+
"BIGINT": Integer,
|
|
64
|
+
"FLOAT": Float,
|
|
65
|
+
"DOUBLE": Float,
|
|
66
|
+
"DECIMAL": Decimal,
|
|
67
|
+
# Timestamps
|
|
68
|
+
"TIMESTAMP": Timestamp,
|
|
69
|
+
"TIMESTAMP_NTZ": Timestamp,
|
|
70
|
+
"DATE": Date,
|
|
71
|
+
# Text
|
|
72
|
+
"STRING": Text,
|
|
73
|
+
"VARCHAR": Text,
|
|
74
|
+
# Boolean
|
|
75
|
+
"BOOLEAN": Boolean,
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
def type_repr(self, t) -> str:
|
|
79
|
+
try:
|
|
80
|
+
return {str: "STRING"}[t]
|
|
81
|
+
except KeyError:
|
|
82
|
+
return super().type_repr(t)
|
|
83
|
+
|
|
84
|
+
def quote(self, s: str, is_table: bool = False) -> str:
|
|
85
|
+
return f"`{s}`"
|
|
86
|
+
|
|
87
|
+
def to_string(self, s: str) -> str:
|
|
88
|
+
return f"cast({s} as string)"
|
|
89
|
+
|
|
90
|
+
def _convert_db_precision_to_digits(self, p: int) -> int:
|
|
91
|
+
# Subtracting 2 due to wierd precision issues
|
|
92
|
+
return max(super()._convert_db_precision_to_digits(p) - 2, 0)
|
|
93
|
+
|
|
94
|
+
def set_timezone_to_utc(self) -> str:
|
|
95
|
+
return "SET TIME ZONE 'UTC'"
|
|
96
|
+
|
|
97
|
+
def parse_table_name(self, name: str) -> DbPath:
|
|
98
|
+
path = parse_table_name(name)
|
|
99
|
+
return tuple(i for i in path if i is not None)
|
|
100
|
+
|
|
101
|
+
def md5_as_int(self, s: str) -> str:
|
|
102
|
+
return f"cast(conv(substr(md5({s}), {1+MD5_HEXDIGITS-CHECKSUM_HEXDIGITS}), 16, 10) as decimal(38, 0)) - {CHECKSUM_OFFSET}"
|
|
103
|
+
|
|
104
|
+
def md5_as_hex(self, s: str) -> str:
|
|
105
|
+
return f"md5({s})"
|
|
106
|
+
|
|
107
|
+
def normalize_timestamp(self, value: str, coltype: TemporalType) -> str:
|
|
108
|
+
"""Databricks timestamp contains no more than 6 digits in precision"""
|
|
109
|
+
try:
|
|
110
|
+
is_date = coltype.is_date
|
|
111
|
+
except:
|
|
112
|
+
is_date = False
|
|
113
|
+
if isinstance(coltype, Date) or is_date:
|
|
114
|
+
return f"date_format({value}, 'yyyy-MM-dd')"
|
|
115
|
+
if coltype.rounds:
|
|
116
|
+
# cast to timestamp due to unix_micros() requiring timestamp
|
|
117
|
+
timestamp = f"cast(round(unix_micros(cast({value} as timestamp)) / 1000000, {coltype.precision}) * 1000000 as bigint)"
|
|
118
|
+
return f"date_format(timestamp_micros({timestamp}), 'yyyy-MM-dd HH:mm:ss.SSSSSS')"
|
|
119
|
+
|
|
120
|
+
precision_format = "S" * coltype.precision + "0" * (6 - coltype.precision)
|
|
121
|
+
return f"date_format({value}, 'yyyy-MM-dd HH:mm:ss.{precision_format}')"
|
|
122
|
+
|
|
123
|
+
def normalize_number(self, value: str, coltype: NumericType) -> str:
|
|
124
|
+
value = f"cast({value} as decimal(38, {coltype.precision}))"
|
|
125
|
+
if coltype.precision > 0:
|
|
126
|
+
value = f"format_number({value}, {coltype.precision})"
|
|
127
|
+
return f"replace({self.to_string(value)}, ',', '')"
|
|
128
|
+
|
|
129
|
+
def normalize_boolean(self, value: str, _coltype: Boolean) -> str:
|
|
130
|
+
return self.to_string(f"cast ({value} as int)")
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@attrs.define(frozen=False, init=False, kw_only=True)
|
|
134
|
+
class Databricks(ThreadedDatabase):
|
|
135
|
+
DIALECT_CLASS: ClassVar[Type[BaseDialect]] = Dialect
|
|
136
|
+
CONNECT_URI_HELP = "databricks://:<access_token>@<host>/<http_path>"
|
|
137
|
+
CONNECT_URI_PARAMS = ["catalog", "schema"]
|
|
138
|
+
|
|
139
|
+
catalog: str
|
|
140
|
+
_args: Dict[str, Any]
|
|
141
|
+
|
|
142
|
+
def __init__(self, *, thread_count, **kw) -> None:
|
|
143
|
+
super().__init__(thread_count=thread_count)
|
|
144
|
+
logging.getLogger("databricks.sql").setLevel(logging.WARNING)
|
|
145
|
+
|
|
146
|
+
self._args = kw
|
|
147
|
+
self.default_schema = kw.get("schema", "default")
|
|
148
|
+
self.catalog = kw.get("catalog", "hive_metastore")
|
|
149
|
+
|
|
150
|
+
def create_connection(self):
|
|
151
|
+
databricks = import_databricks()
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
return databricks.sql.connect(
|
|
155
|
+
server_hostname=self._args["host"],
|
|
156
|
+
http_path=self._args["http_path"],
|
|
157
|
+
access_token=self._args["access_token"],
|
|
158
|
+
catalog=self.catalog,
|
|
159
|
+
)
|
|
160
|
+
except databricks.sql.exc.Error as e:
|
|
161
|
+
raise ConnectionError(*e.args) from e
|
|
162
|
+
|
|
163
|
+
def query_table_schema(self, path: DbPath) -> Dict[str, RawColumnInfo]:
|
|
164
|
+
# Databricks has INFORMATION_SCHEMA only for Databricks Runtime, not for Databricks SQL.
|
|
165
|
+
# https://docs.databricks.com/spark/latest/spark-sql/language-manual/information-schema/columns.html
|
|
166
|
+
# So, to obtain information about schema, we should use another approach.
|
|
167
|
+
|
|
168
|
+
conn = self.create_connection()
|
|
169
|
+
|
|
170
|
+
catalog, schema, table = self._normalize_table_path(path)
|
|
171
|
+
with conn.cursor() as cursor:
|
|
172
|
+
cursor.columns(catalog_name=catalog, schema_name=schema, table_name=table)
|
|
173
|
+
try:
|
|
174
|
+
rows = cursor.fetchall()
|
|
175
|
+
finally:
|
|
176
|
+
conn.close()
|
|
177
|
+
if not rows:
|
|
178
|
+
raise RuntimeError(f"{self.name}: Table '{'.'.join(path)}' does not exist, or has no columns")
|
|
179
|
+
|
|
180
|
+
d = {
|
|
181
|
+
r.COLUMN_NAME: RawColumnInfo(
|
|
182
|
+
column_name=r.COLUMN_NAME, data_type=r.TYPE_NAME, numeric_precision=r.DECIMAL_DIGITS
|
|
183
|
+
)
|
|
184
|
+
for r in rows
|
|
185
|
+
}
|
|
186
|
+
assert len(d) == len(rows)
|
|
187
|
+
return d
|
|
188
|
+
|
|
189
|
+
# def select_table_schema(self, path: DbPath) -> str:
|
|
190
|
+
# """Provide SQL for selecting the table schema as (name, type, date_prec, num_prec)"""
|
|
191
|
+
# database, schema, name = self._normalize_table_path(path)
|
|
192
|
+
# info_schema_path = ["information_schema", "columns"]
|
|
193
|
+
# if database:
|
|
194
|
+
# info_schema_path.insert(0, database)
|
|
195
|
+
|
|
196
|
+
# return (
|
|
197
|
+
# "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale "
|
|
198
|
+
# f"FROM {'.'.join(info_schema_path)} "
|
|
199
|
+
# f"WHERE table_name = '{name}' AND table_schema = '{schema}'"
|
|
200
|
+
# )
|
|
201
|
+
|
|
202
|
+
def _process_table_schema(
|
|
203
|
+
self, path: DbPath, raw_schema: Dict[str, RawColumnInfo], filter_columns: Sequence[str], where: str = None
|
|
204
|
+
):
|
|
205
|
+
accept = {i.lower() for i in filter_columns}
|
|
206
|
+
col_infos = [row for name, row in raw_schema.items() if name.lower() in accept]
|
|
207
|
+
|
|
208
|
+
resulted_rows = []
|
|
209
|
+
for info in col_infos:
|
|
210
|
+
raw_data_type = info.data_type
|
|
211
|
+
row_type = info.data_type.split("(")[0]
|
|
212
|
+
info = attrs.evolve(info, data_type=row_type)
|
|
213
|
+
type_cls = self.dialect.TYPE_CLASSES.get(row_type, UnknownColType)
|
|
214
|
+
|
|
215
|
+
if issubclass(type_cls, Integer):
|
|
216
|
+
info = attrs.evolve(info, numeric_scale=0)
|
|
217
|
+
|
|
218
|
+
elif issubclass(type_cls, Float):
|
|
219
|
+
numeric_precision = math.ceil(info.numeric_precision / math.log(2, 10))
|
|
220
|
+
info = attrs.evolve(info, numeric_precision=numeric_precision)
|
|
221
|
+
|
|
222
|
+
elif issubclass(type_cls, Decimal):
|
|
223
|
+
items = raw_data_type[8:].rstrip(")").split(",")
|
|
224
|
+
numeric_precision, numeric_scale = int(items[0]), int(items[1])
|
|
225
|
+
info = attrs.evolve(
|
|
226
|
+
info,
|
|
227
|
+
numeric_precision=numeric_precision,
|
|
228
|
+
numeric_scale=numeric_scale,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
elif issubclass(type_cls, Timestamp):
|
|
232
|
+
info = attrs.evolve(
|
|
233
|
+
info,
|
|
234
|
+
datetime_precision=info.numeric_precision,
|
|
235
|
+
numeric_precision=None,
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
else:
|
|
239
|
+
info = attrs.evolve(info, numeric_precision=None)
|
|
240
|
+
|
|
241
|
+
resulted_rows.append(info)
|
|
242
|
+
|
|
243
|
+
col_dict: Dict[str, ColType] = {info.column_name: self.dialect.parse_type(path, info) for info in resulted_rows}
|
|
244
|
+
|
|
245
|
+
self._refine_coltypes(path, col_dict, where)
|
|
246
|
+
return col_dict
|
|
247
|
+
|
|
248
|
+
@property
|
|
249
|
+
def is_autocommit(self) -> bool:
|
|
250
|
+
return True
|
|
251
|
+
|
|
252
|
+
def _normalize_table_path(self, path: DbPath) -> DbPath:
|
|
253
|
+
if len(path) == 1:
|
|
254
|
+
return self.catalog, self.default_schema, path[0]
|
|
255
|
+
elif len(path) == 2:
|
|
256
|
+
return self.catalog, path[0], path[1]
|
|
257
|
+
elif len(path) == 3:
|
|
258
|
+
return path
|
|
259
|
+
|
|
260
|
+
raise ValueError(
|
|
261
|
+
f"{self.name}: Bad table path for {self}: '{'.'.join(path)}'. Expected format: table, schema.table, or catalog.schema.table"
|
|
262
|
+
)
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
# Copyright 2022-present, the Waterdip Labs Pvt. Ltd.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
from typing import Any, ClassVar, Dict, Type, Union
|
|
16
|
+
|
|
17
|
+
import attrs
|
|
18
|
+
from packaging.version import parse as parse_version
|
|
19
|
+
|
|
20
|
+
from data_diff.abcs.database_types import (
|
|
21
|
+
Boolean,
|
|
22
|
+
ColType,
|
|
23
|
+
DbPath,
|
|
24
|
+
Decimal,
|
|
25
|
+
Float,
|
|
26
|
+
FractionalType,
|
|
27
|
+
Integer,
|
|
28
|
+
Native_UUID,
|
|
29
|
+
TemporalType,
|
|
30
|
+
Text,
|
|
31
|
+
Timestamp,
|
|
32
|
+
TimestampTZ,
|
|
33
|
+
)
|
|
34
|
+
from data_diff.databases.base import (
|
|
35
|
+
CHECKSUM_HEXDIGITS,
|
|
36
|
+
CHECKSUM_OFFSET,
|
|
37
|
+
MD5_HEXDIGITS,
|
|
38
|
+
TIMESTAMP_PRECISION_POS,
|
|
39
|
+
BaseDialect,
|
|
40
|
+
ConnectError,
|
|
41
|
+
Database,
|
|
42
|
+
ThreadLocalInterpreter,
|
|
43
|
+
import_helper,
|
|
44
|
+
)
|
|
45
|
+
from data_diff.schema import RawColumnInfo
|
|
46
|
+
from data_diff.utils import match_regexps
|
|
47
|
+
from data_diff.version import __version__
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@import_helper("duckdb")
|
|
51
|
+
def import_duckdb():
|
|
52
|
+
import duckdb
|
|
53
|
+
|
|
54
|
+
return duckdb
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@attrs.define(frozen=False)
|
|
58
|
+
class Dialect(BaseDialect):
|
|
59
|
+
name = "DuckDB"
|
|
60
|
+
ROUNDS_ON_PREC_LOSS = False
|
|
61
|
+
SUPPORTS_PRIMARY_KEY = True
|
|
62
|
+
SUPPORTS_INDEXES = True
|
|
63
|
+
|
|
64
|
+
# https://duckdb.org/docs/sql/data_types/numeric#fixed-point-decimals
|
|
65
|
+
# The default WIDTH and SCALE is DECIMAL(18, 3), if none are specified.
|
|
66
|
+
DEFAULT_NUMERIC_PRECISION = 3
|
|
67
|
+
|
|
68
|
+
TYPE_CLASSES = {
|
|
69
|
+
# Timestamps
|
|
70
|
+
"TIMESTAMP WITH TIME ZONE": TimestampTZ,
|
|
71
|
+
"TIMESTAMP": Timestamp,
|
|
72
|
+
# Numbers
|
|
73
|
+
"DOUBLE": Float,
|
|
74
|
+
"FLOAT": Float,
|
|
75
|
+
"DECIMAL": Decimal,
|
|
76
|
+
"INTEGER": Integer,
|
|
77
|
+
"BIGINT": Integer,
|
|
78
|
+
# Text
|
|
79
|
+
"VARCHAR": Text,
|
|
80
|
+
"TEXT": Text,
|
|
81
|
+
# UUID
|
|
82
|
+
"UUID": Native_UUID,
|
|
83
|
+
# Bool
|
|
84
|
+
"BOOLEAN": Boolean,
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
def quote(self, s: str, is_table: bool = False):
|
|
88
|
+
return f'"{s}"'
|
|
89
|
+
|
|
90
|
+
def to_string(self, s: str):
|
|
91
|
+
return f"{s}::VARCHAR"
|
|
92
|
+
|
|
93
|
+
def _convert_db_precision_to_digits(self, p: int) -> int:
|
|
94
|
+
# Subtracting 2 due to wierd precision issues in PostgreSQL
|
|
95
|
+
return super()._convert_db_precision_to_digits(p) - 2
|
|
96
|
+
|
|
97
|
+
def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType:
|
|
98
|
+
regexps = {
|
|
99
|
+
r"DECIMAL\((\d+),(\d+)\)": Decimal,
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
for m, t_cls in match_regexps(regexps, info.data_type):
|
|
103
|
+
precision = int(m.group(2))
|
|
104
|
+
return t_cls(precision=precision)
|
|
105
|
+
|
|
106
|
+
return super().parse_type(table_path, info)
|
|
107
|
+
|
|
108
|
+
def set_timezone_to_utc(self) -> str:
|
|
109
|
+
return "SET GLOBAL TimeZone='UTC'"
|
|
110
|
+
|
|
111
|
+
def current_timestamp(self) -> str:
|
|
112
|
+
return "current_timestamp"
|
|
113
|
+
|
|
114
|
+
def md5_as_int(self, s: str) -> str:
|
|
115
|
+
return f"('0x' || SUBSTRING(md5({s}), {1+MD5_HEXDIGITS-CHECKSUM_HEXDIGITS},{CHECKSUM_HEXDIGITS}))::BIGINT - {CHECKSUM_OFFSET}"
|
|
116
|
+
|
|
117
|
+
def md5_as_hex(self, s: str) -> str:
|
|
118
|
+
return f"md5({s})"
|
|
119
|
+
|
|
120
|
+
def normalize_timestamp(self, value: str, coltype: TemporalType) -> str:
|
|
121
|
+
# It's precision 6 by default. If precision is less than 6 -> we remove the trailing numbers.
|
|
122
|
+
if coltype.rounds and coltype.precision > 0:
|
|
123
|
+
return f"CONCAT(SUBSTRING(STRFTIME({value}::TIMESTAMP, '%Y-%m-%d %H:%M:%S.'),1,23), LPAD(((ROUND(strftime({value}::timestamp, '%f')::DECIMAL(15,7)/100000,{coltype.precision-1})*100000)::INT)::VARCHAR,6,'0'))"
|
|
124
|
+
|
|
125
|
+
return f"rpad(substring(strftime({value}::timestamp, '%Y-%m-%d %H:%M:%S.%f'),1,{TIMESTAMP_PRECISION_POS+coltype.precision}),26,'0')"
|
|
126
|
+
|
|
127
|
+
def normalize_number(self, value: str, coltype: FractionalType) -> str:
|
|
128
|
+
return self.to_string(f"{value}::DECIMAL(38, {coltype.precision})")
|
|
129
|
+
|
|
130
|
+
def normalize_boolean(self, value: str, _coltype: Boolean) -> str:
|
|
131
|
+
return self.to_string(f"{value}::INTEGER")
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@attrs.define(frozen=False, init=False, kw_only=True)
|
|
135
|
+
class DuckDB(Database):
|
|
136
|
+
DIALECT_CLASS: ClassVar[Type[BaseDialect]] = Dialect
|
|
137
|
+
SUPPORTS_UNIQUE_CONSTAINT = False # Temporary, until we implement it
|
|
138
|
+
CONNECT_URI_HELP = "duckdb://<dbname>@<filepath>"
|
|
139
|
+
CONNECT_URI_PARAMS = ["database", "dbpath"]
|
|
140
|
+
|
|
141
|
+
_args: Dict[str, Any] = attrs.field(init=False)
|
|
142
|
+
_conn: Any = attrs.field(init=False)
|
|
143
|
+
|
|
144
|
+
def __init__(self, **kw) -> None:
|
|
145
|
+
super().__init__()
|
|
146
|
+
self._args = kw
|
|
147
|
+
self._conn = self.create_connection()
|
|
148
|
+
self.default_schema = "main"
|
|
149
|
+
|
|
150
|
+
@property
|
|
151
|
+
def is_autocommit(self) -> bool:
|
|
152
|
+
return True
|
|
153
|
+
|
|
154
|
+
def _query(self, sql_code: Union[str, ThreadLocalInterpreter]):
|
|
155
|
+
"Uses the standard SQL cursor interface"
|
|
156
|
+
return self._query_conn(self._conn, sql_code)
|
|
157
|
+
|
|
158
|
+
def close(self):
|
|
159
|
+
super().close()
|
|
160
|
+
self._conn.close()
|
|
161
|
+
|
|
162
|
+
def create_connection(self):
|
|
163
|
+
ddb = import_duckdb()
|
|
164
|
+
try:
|
|
165
|
+
# custom_user_agent is only available in duckdb >= 0.9.2
|
|
166
|
+
if parse_version(ddb.__version__) >= parse_version("0.9.2"):
|
|
167
|
+
custom_user_agent = f"dcs-diff/v{__version__}"
|
|
168
|
+
config = {"custom_user_agent": custom_user_agent}
|
|
169
|
+
connection = ddb.connect(database=self._args["filepath"], config=config)
|
|
170
|
+
custom_user_agent_results = connection.sql("PRAGMA USER_AGENT;").fetchall()
|
|
171
|
+
custom_user_agent_filtered = custom_user_agent_results[0][0]
|
|
172
|
+
assert custom_user_agent in custom_user_agent_filtered
|
|
173
|
+
else:
|
|
174
|
+
connection = ddb.connect(database=self._args["filepath"])
|
|
175
|
+
return connection
|
|
176
|
+
except ddb.OperationalError as e:
|
|
177
|
+
raise ConnectError(*e.args) from e
|
|
178
|
+
except AssertionError:
|
|
179
|
+
raise ConnectError("Assertion failed: Custom user agent is invalid.") from None
|
|
180
|
+
|
|
181
|
+
def select_table_schema(self, path: DbPath) -> str:
|
|
182
|
+
database, schema, table = self._normalize_table_path(path)
|
|
183
|
+
|
|
184
|
+
info_schema_path = ["information_schema", "columns"]
|
|
185
|
+
|
|
186
|
+
if database:
|
|
187
|
+
info_schema_path.insert(0, database)
|
|
188
|
+
dynamic_database_clause = f"'{database}'"
|
|
189
|
+
else:
|
|
190
|
+
dynamic_database_clause = "current_catalog()"
|
|
191
|
+
|
|
192
|
+
return (
|
|
193
|
+
f"SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale, collation_name, character_maximum_length FROM {'.'.join(info_schema_path)} "
|
|
194
|
+
f"WHERE table_name = '{table}' AND table_schema = '{schema}' and table_catalog = {dynamic_database_clause}"
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
def _normalize_table_path(self, path: DbPath) -> DbPath:
|
|
198
|
+
if len(path) == 1:
|
|
199
|
+
return None, self.default_schema, path[0]
|
|
200
|
+
elif len(path) == 2:
|
|
201
|
+
return None, path[0], path[1]
|
|
202
|
+
elif len(path) == 3:
|
|
203
|
+
return path
|
|
204
|
+
|
|
205
|
+
raise ValueError(
|
|
206
|
+
f"{self.name}: Bad table path for {self}: '{'.'.join(path)}'. Expected format: table, schema.table, or database.schema.table"
|
|
207
|
+
)
|