dcs-sdk 1.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data_diff/__init__.py +221 -0
- data_diff/__main__.py +517 -0
- data_diff/abcs/__init__.py +13 -0
- data_diff/abcs/compiler.py +27 -0
- data_diff/abcs/database_types.py +402 -0
- data_diff/config.py +141 -0
- data_diff/databases/__init__.py +38 -0
- data_diff/databases/_connect.py +323 -0
- data_diff/databases/base.py +1417 -0
- data_diff/databases/bigquery.py +376 -0
- data_diff/databases/clickhouse.py +217 -0
- data_diff/databases/databricks.py +262 -0
- data_diff/databases/duckdb.py +207 -0
- data_diff/databases/mssql.py +343 -0
- data_diff/databases/mysql.py +189 -0
- data_diff/databases/oracle.py +238 -0
- data_diff/databases/postgresql.py +293 -0
- data_diff/databases/presto.py +222 -0
- data_diff/databases/redis.py +93 -0
- data_diff/databases/redshift.py +233 -0
- data_diff/databases/snowflake.py +222 -0
- data_diff/databases/sybase.py +720 -0
- data_diff/databases/trino.py +73 -0
- data_diff/databases/vertica.py +174 -0
- data_diff/diff_tables.py +489 -0
- data_diff/errors.py +17 -0
- data_diff/format.py +369 -0
- data_diff/hashdiff_tables.py +1026 -0
- data_diff/info_tree.py +76 -0
- data_diff/joindiff_tables.py +434 -0
- data_diff/lexicographic_space.py +253 -0
- data_diff/parse_time.py +88 -0
- data_diff/py.typed +0 -0
- data_diff/queries/__init__.py +13 -0
- data_diff/queries/api.py +213 -0
- data_diff/queries/ast_classes.py +811 -0
- data_diff/queries/base.py +38 -0
- data_diff/queries/extras.py +43 -0
- data_diff/query_utils.py +70 -0
- data_diff/schema.py +67 -0
- data_diff/table_segment.py +583 -0
- data_diff/thread_utils.py +112 -0
- data_diff/utils.py +1022 -0
- data_diff/version.py +15 -0
- dcs_core/__init__.py +13 -0
- dcs_core/__main__.py +17 -0
- dcs_core/__version__.py +15 -0
- dcs_core/cli/__init__.py +13 -0
- dcs_core/cli/cli.py +165 -0
- dcs_core/core/__init__.py +19 -0
- dcs_core/core/common/__init__.py +13 -0
- dcs_core/core/common/errors.py +50 -0
- dcs_core/core/common/models/__init__.py +13 -0
- dcs_core/core/common/models/configuration.py +284 -0
- dcs_core/core/common/models/dashboard.py +24 -0
- dcs_core/core/common/models/data_source_resource.py +75 -0
- dcs_core/core/common/models/metric.py +160 -0
- dcs_core/core/common/models/profile.py +75 -0
- dcs_core/core/common/models/validation.py +216 -0
- dcs_core/core/common/models/widget.py +44 -0
- dcs_core/core/configuration/__init__.py +13 -0
- dcs_core/core/configuration/config_loader.py +139 -0
- dcs_core/core/configuration/configuration_parser.py +262 -0
- dcs_core/core/configuration/configuration_parser_arc.py +328 -0
- dcs_core/core/datasource/__init__.py +13 -0
- dcs_core/core/datasource/base.py +62 -0
- dcs_core/core/datasource/manager.py +112 -0
- dcs_core/core/datasource/search_datasource.py +421 -0
- dcs_core/core/datasource/sql_datasource.py +1094 -0
- dcs_core/core/inspect.py +163 -0
- dcs_core/core/logger/__init__.py +13 -0
- dcs_core/core/logger/base.py +32 -0
- dcs_core/core/logger/default_logger.py +94 -0
- dcs_core/core/metric/__init__.py +13 -0
- dcs_core/core/metric/base.py +220 -0
- dcs_core/core/metric/combined_metric.py +98 -0
- dcs_core/core/metric/custom_metric.py +34 -0
- dcs_core/core/metric/manager.py +137 -0
- dcs_core/core/metric/numeric_metric.py +403 -0
- dcs_core/core/metric/reliability_metric.py +90 -0
- dcs_core/core/profiling/__init__.py +13 -0
- dcs_core/core/profiling/datasource_profiling.py +136 -0
- dcs_core/core/profiling/numeric_field_profiling.py +72 -0
- dcs_core/core/profiling/text_field_profiling.py +67 -0
- dcs_core/core/repository/__init__.py +13 -0
- dcs_core/core/repository/metric_repository.py +77 -0
- dcs_core/core/utils/__init__.py +13 -0
- dcs_core/core/utils/log.py +29 -0
- dcs_core/core/utils/tracking.py +105 -0
- dcs_core/core/utils/utils.py +44 -0
- dcs_core/core/validation/__init__.py +13 -0
- dcs_core/core/validation/base.py +230 -0
- dcs_core/core/validation/completeness_validation.py +153 -0
- dcs_core/core/validation/custom_query_validation.py +24 -0
- dcs_core/core/validation/manager.py +282 -0
- dcs_core/core/validation/numeric_validation.py +276 -0
- dcs_core/core/validation/reliability_validation.py +91 -0
- dcs_core/core/validation/uniqueness_validation.py +61 -0
- dcs_core/core/validation/validity_validation.py +738 -0
- dcs_core/integrations/__init__.py +13 -0
- dcs_core/integrations/databases/__init__.py +13 -0
- dcs_core/integrations/databases/bigquery.py +187 -0
- dcs_core/integrations/databases/databricks.py +51 -0
- dcs_core/integrations/databases/db2.py +652 -0
- dcs_core/integrations/databases/elasticsearch.py +61 -0
- dcs_core/integrations/databases/mssql.py +829 -0
- dcs_core/integrations/databases/mysql.py +409 -0
- dcs_core/integrations/databases/opensearch.py +64 -0
- dcs_core/integrations/databases/oracle.py +719 -0
- dcs_core/integrations/databases/postgres.py +482 -0
- dcs_core/integrations/databases/redshift.py +53 -0
- dcs_core/integrations/databases/snowflake.py +48 -0
- dcs_core/integrations/databases/spark_df.py +111 -0
- dcs_core/integrations/databases/sybase.py +1069 -0
- dcs_core/integrations/storage/__init__.py +13 -0
- dcs_core/integrations/storage/local_file.py +149 -0
- dcs_core/integrations/utils/__init__.py +13 -0
- dcs_core/integrations/utils/utils.py +36 -0
- dcs_core/report/__init__.py +13 -0
- dcs_core/report/dashboard.py +211 -0
- dcs_core/report/models.py +88 -0
- dcs_core/report/static/assets/fonts/DMSans-Bold.ttf +0 -0
- dcs_core/report/static/assets/fonts/DMSans-Medium.ttf +0 -0
- dcs_core/report/static/assets/fonts/DMSans-Regular.ttf +0 -0
- dcs_core/report/static/assets/fonts/DMSans-SemiBold.ttf +0 -0
- dcs_core/report/static/assets/images/docs.svg +6 -0
- dcs_core/report/static/assets/images/github.svg +4 -0
- dcs_core/report/static/assets/images/logo.svg +7 -0
- dcs_core/report/static/assets/images/slack.svg +13 -0
- dcs_core/report/static/index.js +2 -0
- dcs_core/report/static/index.js.LICENSE.txt +3971 -0
- dcs_sdk/__init__.py +13 -0
- dcs_sdk/__main__.py +18 -0
- dcs_sdk/__version__.py +15 -0
- dcs_sdk/cli/__init__.py +13 -0
- dcs_sdk/cli/cli.py +163 -0
- dcs_sdk/sdk/__init__.py +58 -0
- dcs_sdk/sdk/config/__init__.py +13 -0
- dcs_sdk/sdk/config/config_loader.py +491 -0
- dcs_sdk/sdk/data_diff/__init__.py +13 -0
- dcs_sdk/sdk/data_diff/data_differ.py +821 -0
- dcs_sdk/sdk/rules/__init__.py +15 -0
- dcs_sdk/sdk/rules/rules_mappping.py +31 -0
- dcs_sdk/sdk/rules/rules_repository.py +214 -0
- dcs_sdk/sdk/rules/schema_rules.py +65 -0
- dcs_sdk/sdk/utils/__init__.py +13 -0
- dcs_sdk/sdk/utils/serializer.py +25 -0
- dcs_sdk/sdk/utils/similarity_score/__init__.py +13 -0
- dcs_sdk/sdk/utils/similarity_score/base_provider.py +153 -0
- dcs_sdk/sdk/utils/similarity_score/cosine_similarity_provider.py +39 -0
- dcs_sdk/sdk/utils/similarity_score/jaccard_provider.py +24 -0
- dcs_sdk/sdk/utils/similarity_score/levenshtein_distance_provider.py +31 -0
- dcs_sdk/sdk/utils/table.py +475 -0
- dcs_sdk/sdk/utils/themes.py +40 -0
- dcs_sdk/sdk/utils/utils.py +349 -0
- dcs_sdk-1.6.5.dist-info/METADATA +150 -0
- dcs_sdk-1.6.5.dist-info/RECORD +159 -0
- dcs_sdk-1.6.5.dist-info/WHEEL +4 -0
- dcs_sdk-1.6.5.dist-info/entry_points.txt +4 -0
data_diff/__main__.py
ADDED
|
@@ -0,0 +1,517 @@
|
|
|
1
|
+
# Copyright 2022-present, the Waterdip Labs Pvt. Ltd.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import json
|
|
16
|
+
import logging
|
|
17
|
+
import os
|
|
18
|
+
import sys
|
|
19
|
+
import time
|
|
20
|
+
from copy import deepcopy
|
|
21
|
+
from datetime import datetime
|
|
22
|
+
from itertools import islice
|
|
23
|
+
from typing import Dict, List, Optional, Set, Tuple, Union
|
|
24
|
+
|
|
25
|
+
import click
|
|
26
|
+
import rich
|
|
27
|
+
from rich.logging import RichHandler
|
|
28
|
+
|
|
29
|
+
from data_diff import Database, DbPath
|
|
30
|
+
from data_diff.config import apply_config_from_file
|
|
31
|
+
from data_diff.databases._connect import connect
|
|
32
|
+
from data_diff.diff_tables import Algorithm, TableDiffer
|
|
33
|
+
from data_diff.hashdiff_tables import (
|
|
34
|
+
DEFAULT_BISECTION_FACTOR,
|
|
35
|
+
DEFAULT_BISECTION_THRESHOLD,
|
|
36
|
+
HashDiffer,
|
|
37
|
+
)
|
|
38
|
+
from data_diff.joindiff_tables import TABLE_WRITE_LIMIT, JoinDiffer
|
|
39
|
+
from data_diff.parse_time import UNITS_STR, ParseError, parse_time_before
|
|
40
|
+
from data_diff.queries.api import current_timestamp
|
|
41
|
+
from data_diff.schema import RawColumnInfo, create_schema
|
|
42
|
+
from data_diff.table_segment import TableSegment
|
|
43
|
+
from data_diff.utils import (
|
|
44
|
+
eval_name_template,
|
|
45
|
+
match_like,
|
|
46
|
+
remove_password_from_url,
|
|
47
|
+
safezip,
|
|
48
|
+
set_entrypoint_name,
|
|
49
|
+
)
|
|
50
|
+
from data_diff.version import __version__
|
|
51
|
+
|
|
52
|
+
COLOR_SCHEME = {
|
|
53
|
+
"+": "green",
|
|
54
|
+
"-": "red",
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
set_entrypoint_name(os.getenv("DATADIFF_TRIGGERED_BY", "CLI"))
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _remove_passwords_in_dict(d: dict) -> None:
|
|
61
|
+
for k, v in d.items():
|
|
62
|
+
if k == "password":
|
|
63
|
+
d[k] = "*" * len(v)
|
|
64
|
+
elif k == "filepath":
|
|
65
|
+
if "motherduck_token=" in v:
|
|
66
|
+
d[k] = v.split("motherduck_token=")[0] + "motherduck_token=**********"
|
|
67
|
+
elif isinstance(v, dict):
|
|
68
|
+
_remove_passwords_in_dict(v)
|
|
69
|
+
elif k.startswith("database"):
|
|
70
|
+
d[k] = remove_password_from_url(v)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _get_schema(pair: Tuple[Database, DbPath]) -> Dict[str, RawColumnInfo]:
|
|
74
|
+
db, table_path = pair
|
|
75
|
+
return db.query_table_schema(table_path)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def diff_schemas(table1, table2, schema1, schema2, columns) -> None:
|
|
79
|
+
logging.info("Diffing schemas...")
|
|
80
|
+
attrs = "name", "type", "datetime_precision", "numeric_precision", "numeric_scale"
|
|
81
|
+
for c in columns:
|
|
82
|
+
if c is None: # Skip for convenience
|
|
83
|
+
continue
|
|
84
|
+
diffs = []
|
|
85
|
+
|
|
86
|
+
if c not in schema1:
|
|
87
|
+
cols = ", ".join(schema1)
|
|
88
|
+
raise ValueError(f"Column '{c}' not found in table 1, named '{table1}'. Columns: {cols}")
|
|
89
|
+
if c not in schema2:
|
|
90
|
+
cols = ", ".join(schema1)
|
|
91
|
+
raise ValueError(f"Column '{c}' not found in table 2, named '{table2}'. Columns: {cols}")
|
|
92
|
+
|
|
93
|
+
col1 = schema1[c]
|
|
94
|
+
col2 = schema2[c]
|
|
95
|
+
|
|
96
|
+
for attr, v1, v2 in safezip(attrs, col1, col2):
|
|
97
|
+
if v1 != v2:
|
|
98
|
+
diffs.append(f"{attr}:({v1} != {v2})")
|
|
99
|
+
if diffs:
|
|
100
|
+
logging.warning(f"Schema mismatch in column '{c}': {', '.join(diffs)}")
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class MyHelpFormatter(click.HelpFormatter):
|
|
104
|
+
def __init__(self, **kwargs) -> None:
|
|
105
|
+
super().__init__(self, **kwargs)
|
|
106
|
+
self.indent_increment = 6
|
|
107
|
+
|
|
108
|
+
def write_usage(self, prog: str, args: str = "", prefix: Optional[str] = None) -> None:
|
|
109
|
+
self.write(f"dcs-diff v{__version__} - efficiently diff rows across database tables.\n\n")
|
|
110
|
+
self.write("Usage:\n")
|
|
111
|
+
self.write(f" * In-db diff: {prog} <database_a> <table_a> <table_b> [OPTIONS]\n")
|
|
112
|
+
self.write(f" * Cross-db diff: {prog} <database_a> <table_a> <database_b> <table_b> [OPTIONS]\n")
|
|
113
|
+
self.write(f" * Using config: {prog} --conf PATH [--run NAME] [OPTIONS]\n")
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
click.Context.formatter_class = MyHelpFormatter
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@click.command(no_args_is_help=True)
|
|
120
|
+
@click.argument("database1", required=False)
|
|
121
|
+
@click.argument("table1", required=False)
|
|
122
|
+
@click.argument("database2", required=False)
|
|
123
|
+
@click.argument("table2", required=False)
|
|
124
|
+
@click.option(
|
|
125
|
+
"-k", "--key-columns", default=[], multiple=True, help="Names of primary key columns. Default='id'.", metavar="NAME"
|
|
126
|
+
)
|
|
127
|
+
@click.option("-t", "--update-column", default=None, help="Name of updated_at/last_updated column", metavar="NAME")
|
|
128
|
+
@click.option(
|
|
129
|
+
"-c",
|
|
130
|
+
"--columns",
|
|
131
|
+
default=[],
|
|
132
|
+
multiple=True,
|
|
133
|
+
help="Names of extra columns to compare."
|
|
134
|
+
"Can be used more than once in the same command. "
|
|
135
|
+
"Accepts a name or a pattern like in SQL. Example: -c col% -c another_col",
|
|
136
|
+
metavar="NAME",
|
|
137
|
+
)
|
|
138
|
+
@click.option("-l", "--limit", default=None, help="Maximum number of differences to find", metavar="NUM")
|
|
139
|
+
@click.option(
|
|
140
|
+
"--bisection-factor",
|
|
141
|
+
default=None,
|
|
142
|
+
help=f"Segments per iteration. Default={DEFAULT_BISECTION_FACTOR}.",
|
|
143
|
+
metavar="NUM",
|
|
144
|
+
)
|
|
145
|
+
@click.option(
|
|
146
|
+
"--bisection-threshold",
|
|
147
|
+
default=None,
|
|
148
|
+
help=f"Minimal bisection threshold. Below it, dcs-diff will download the data and compare it locally. Default={DEFAULT_BISECTION_THRESHOLD}.",
|
|
149
|
+
metavar="NUM",
|
|
150
|
+
)
|
|
151
|
+
@click.option(
|
|
152
|
+
"-m",
|
|
153
|
+
"--materialize-to-table",
|
|
154
|
+
default=None,
|
|
155
|
+
metavar="TABLE_NAME",
|
|
156
|
+
help="(joindiff only) Materialize the diff results into a new table in the database. If a table exists by that name, it will be replaced.",
|
|
157
|
+
)
|
|
158
|
+
@click.option(
|
|
159
|
+
"--min-age",
|
|
160
|
+
default=None,
|
|
161
|
+
help="Considers only rows older than specified. Useful for specifying replication lag."
|
|
162
|
+
"Example: --min-age=5min ignores rows from the last 5 minutes. "
|
|
163
|
+
f"\nValid units: {UNITS_STR}",
|
|
164
|
+
metavar="AGE",
|
|
165
|
+
)
|
|
166
|
+
@click.option(
|
|
167
|
+
"--max-age", default=None, help="Considers only rows younger than specified. See --min-age.", metavar="AGE"
|
|
168
|
+
)
|
|
169
|
+
@click.option("-s", "--stats", is_flag=True, help="Print stats instead of a detailed diff")
|
|
170
|
+
@click.option("-d", "--debug", is_flag=True, help="Print debug info")
|
|
171
|
+
@click.option("--json", "json_output", is_flag=True, help="Print JSONL output for machine readability")
|
|
172
|
+
@click.option("-v", "--verbose", is_flag=True, help="Print extra info")
|
|
173
|
+
@click.option("--version", is_flag=True, help="Print version info and exit")
|
|
174
|
+
@click.option("-i", "--interactive", is_flag=True, help="Confirm queries, implies --debug")
|
|
175
|
+
@click.option(
|
|
176
|
+
"--case-sensitive",
|
|
177
|
+
is_flag=True,
|
|
178
|
+
help="Column names are treated as case-sensitive. Otherwise, dcs-diff corrects their case according to schema.",
|
|
179
|
+
)
|
|
180
|
+
@click.option(
|
|
181
|
+
"--assume-unique-key",
|
|
182
|
+
is_flag=True,
|
|
183
|
+
help="Skip validating the uniqueness of the key column during joindiff, which is costly in non-cloud dbs.",
|
|
184
|
+
)
|
|
185
|
+
@click.option(
|
|
186
|
+
"--sample-exclusive-rows",
|
|
187
|
+
is_flag=True,
|
|
188
|
+
help="Sample several rows that only appear in one of the tables, but not the other. (joindiff only)",
|
|
189
|
+
)
|
|
190
|
+
@click.option(
|
|
191
|
+
"--materialize-all-rows",
|
|
192
|
+
is_flag=True,
|
|
193
|
+
help="Materialize every row, even if they are the same, instead of just the differing rows. (joindiff only)",
|
|
194
|
+
)
|
|
195
|
+
@click.option(
|
|
196
|
+
"--table-write-limit",
|
|
197
|
+
default=TABLE_WRITE_LIMIT,
|
|
198
|
+
help=f"Maximum number of rows to write when creating materialized or sample tables, per thread. Default={TABLE_WRITE_LIMIT}",
|
|
199
|
+
metavar="COUNT",
|
|
200
|
+
)
|
|
201
|
+
@click.option(
|
|
202
|
+
"-j",
|
|
203
|
+
"--threads",
|
|
204
|
+
default=None,
|
|
205
|
+
help="Number of worker threads to use per database. Default=1. "
|
|
206
|
+
"A higher number will increase performance, but take more capacity from your database. "
|
|
207
|
+
"'serial' guarantees a single-threaded execution of the algorithm (useful for debugging).",
|
|
208
|
+
metavar="COUNT",
|
|
209
|
+
)
|
|
210
|
+
@click.option(
|
|
211
|
+
"-w",
|
|
212
|
+
"--where",
|
|
213
|
+
default=None,
|
|
214
|
+
help="An additional 'where' expression to restrict the search space. Beware of SQL Injection!",
|
|
215
|
+
metavar="EXPR",
|
|
216
|
+
)
|
|
217
|
+
@click.option("-a", "--algorithm", default=Algorithm.AUTO.value, type=click.Choice([i.value for i in Algorithm]))
|
|
218
|
+
@click.option(
|
|
219
|
+
"--conf",
|
|
220
|
+
default=None,
|
|
221
|
+
help="Path to a configuration.toml file, to provide a default configuration, and a list of possible runs.",
|
|
222
|
+
metavar="PATH",
|
|
223
|
+
)
|
|
224
|
+
@click.option(
|
|
225
|
+
"--run",
|
|
226
|
+
default=None,
|
|
227
|
+
help="Name of run-configuration to run. If used, CLI arguments for database and table must be omitted.",
|
|
228
|
+
metavar="NAME",
|
|
229
|
+
)
|
|
230
|
+
def main(conf, run, **kw) -> None:
|
|
231
|
+
if kw["table2"] is None and kw["database2"]:
|
|
232
|
+
# Use the "database table table" form
|
|
233
|
+
kw["table2"] = kw["database2"]
|
|
234
|
+
kw["database2"] = kw["database1"]
|
|
235
|
+
|
|
236
|
+
if kw["version"]:
|
|
237
|
+
print(f"v{__version__}")
|
|
238
|
+
return
|
|
239
|
+
|
|
240
|
+
if conf:
|
|
241
|
+
kw = apply_config_from_file(conf, run, kw)
|
|
242
|
+
|
|
243
|
+
# if kw["no_tracking"]:
|
|
244
|
+
# disable_tracking()
|
|
245
|
+
|
|
246
|
+
if kw.get("interactive"):
|
|
247
|
+
kw["debug"] = True
|
|
248
|
+
|
|
249
|
+
try:
|
|
250
|
+
_data_diff(**kw)
|
|
251
|
+
except Exception as e:
|
|
252
|
+
logging.error(e)
|
|
253
|
+
raise
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def _get_dbs(
|
|
257
|
+
threads: int, database1: str, threads1: int, database2: str, threads2: int, interactive: bool
|
|
258
|
+
) -> Tuple[Database, Database]:
|
|
259
|
+
db1 = connect(database1, threads1 or threads)
|
|
260
|
+
if database1 == database2:
|
|
261
|
+
db2 = db1
|
|
262
|
+
else:
|
|
263
|
+
db2 = connect(database2, threads2 or threads)
|
|
264
|
+
|
|
265
|
+
if interactive:
|
|
266
|
+
db1.enable_interactive()
|
|
267
|
+
db2.enable_interactive()
|
|
268
|
+
|
|
269
|
+
return db1, db2
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def _set_age(options: dict, min_age: Optional[str], max_age: Optional[str], db: Database) -> None:
|
|
273
|
+
if min_age or max_age:
|
|
274
|
+
now: datetime = db.query(current_timestamp(), datetime).replace(tzinfo=None)
|
|
275
|
+
try:
|
|
276
|
+
if max_age:
|
|
277
|
+
options["min_update"] = parse_time_before(now, max_age)
|
|
278
|
+
if min_age:
|
|
279
|
+
options["max_update"] = parse_time_before(now, min_age)
|
|
280
|
+
except ParseError as e:
|
|
281
|
+
logging.error(f"Error while parsing age expression: {e}")
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _get_table_differ(
|
|
285
|
+
algorithm: str,
|
|
286
|
+
db1: Database,
|
|
287
|
+
db2: Database,
|
|
288
|
+
threaded: bool,
|
|
289
|
+
threads: int,
|
|
290
|
+
assume_unique_key: bool,
|
|
291
|
+
sample_exclusive_rows: bool,
|
|
292
|
+
materialize_all_rows: bool,
|
|
293
|
+
table_write_limit: int,
|
|
294
|
+
materialize_to_table: Optional[str],
|
|
295
|
+
bisection_factor: Optional[int],
|
|
296
|
+
bisection_threshold: Optional[int],
|
|
297
|
+
) -> TableDiffer:
|
|
298
|
+
algorithm = Algorithm(algorithm)
|
|
299
|
+
if algorithm == Algorithm.AUTO:
|
|
300
|
+
algorithm = Algorithm.JOINDIFF if db1 == db2 else Algorithm.HASHDIFF
|
|
301
|
+
|
|
302
|
+
logging.info(f"Using algorithm '{algorithm.name.lower()}'.")
|
|
303
|
+
|
|
304
|
+
if algorithm == Algorithm.JOINDIFF:
|
|
305
|
+
return JoinDiffer(
|
|
306
|
+
threaded=threaded,
|
|
307
|
+
max_threadpool_size=threads and threads * 2,
|
|
308
|
+
validate_unique_key=not assume_unique_key,
|
|
309
|
+
sample_exclusive_rows=sample_exclusive_rows,
|
|
310
|
+
materialize_all_rows=materialize_all_rows,
|
|
311
|
+
table_write_limit=table_write_limit,
|
|
312
|
+
materialize_to_table=(
|
|
313
|
+
materialize_to_table and db1.dialect.parse_table_name(eval_name_template(materialize_to_table))
|
|
314
|
+
),
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
assert algorithm == Algorithm.HASHDIFF
|
|
318
|
+
return HashDiffer(
|
|
319
|
+
bisection_factor=DEFAULT_BISECTION_FACTOR if bisection_factor is None else bisection_factor,
|
|
320
|
+
bisection_threshold=DEFAULT_BISECTION_THRESHOLD if bisection_threshold is None else bisection_threshold,
|
|
321
|
+
threaded=threaded,
|
|
322
|
+
max_threadpool_size=threads and threads * 2,
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def _print_result(stats, json_output, diff_iter) -> None:
|
|
327
|
+
if stats:
|
|
328
|
+
if json_output:
|
|
329
|
+
rich.print(json.dumps(diff_iter.get_stats_dict()))
|
|
330
|
+
else:
|
|
331
|
+
rich.print(diff_iter.get_stats_string())
|
|
332
|
+
|
|
333
|
+
else:
|
|
334
|
+
for op, values in diff_iter:
|
|
335
|
+
color = COLOR_SCHEME.get(op, "grey62")
|
|
336
|
+
|
|
337
|
+
if json_output:
|
|
338
|
+
jsonl = json.dumps([op, list(values)])
|
|
339
|
+
rich.print(f"[{color}]{jsonl}[/{color}]")
|
|
340
|
+
else:
|
|
341
|
+
text = f"{op} {', '.join(map(str, values))}"
|
|
342
|
+
rich.print(f"[{color}]{text}[/{color}]")
|
|
343
|
+
|
|
344
|
+
sys.stdout.flush()
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def _get_expanded_columns(
|
|
348
|
+
columns: List[str],
|
|
349
|
+
case_sensitive: bool,
|
|
350
|
+
mutual: Set[str],
|
|
351
|
+
db1: Database,
|
|
352
|
+
schema1: dict,
|
|
353
|
+
table1: str,
|
|
354
|
+
db2: Database,
|
|
355
|
+
schema2: dict,
|
|
356
|
+
table2: str,
|
|
357
|
+
) -> Set[str]:
|
|
358
|
+
expanded_columns: Set[str] = set()
|
|
359
|
+
for c in columns:
|
|
360
|
+
cc = c if case_sensitive else c.lower()
|
|
361
|
+
match = set(match_like(cc, mutual))
|
|
362
|
+
if not match:
|
|
363
|
+
m1 = None if any(match_like(cc, schema1.keys())) else f"{db1}/{table1}"
|
|
364
|
+
m2 = None if any(match_like(cc, schema2.keys())) else f"{db2}/{table2}"
|
|
365
|
+
not_matched = ", ".join(m for m in [m1, m2] if m)
|
|
366
|
+
raise ValueError(f"Column '{c}' not found in: {not_matched}")
|
|
367
|
+
|
|
368
|
+
expanded_columns |= match
|
|
369
|
+
return expanded_columns
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def _get_threads(threads: Union[int, str, None], threads1: Optional[int], threads2: Optional[int]) -> Tuple[bool, int]:
|
|
373
|
+
threaded = True
|
|
374
|
+
if threads is None:
|
|
375
|
+
threads = 1
|
|
376
|
+
elif isinstance(threads, str) and threads.lower() == "serial":
|
|
377
|
+
assert not (threads1 or threads2)
|
|
378
|
+
threaded = False
|
|
379
|
+
threads = 1
|
|
380
|
+
else:
|
|
381
|
+
try:
|
|
382
|
+
threads = int(threads)
|
|
383
|
+
except ValueError:
|
|
384
|
+
logging.error("Error: threads must be a number, or 'serial'.")
|
|
385
|
+
raise
|
|
386
|
+
|
|
387
|
+
if threads < 1:
|
|
388
|
+
logging.error("Error: threads must be >= 1")
|
|
389
|
+
raise ValueError("Error: threads must be >= 1")
|
|
390
|
+
|
|
391
|
+
return threaded, threads
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def _data_diff(
|
|
395
|
+
database1,
|
|
396
|
+
table1,
|
|
397
|
+
database2,
|
|
398
|
+
table2,
|
|
399
|
+
key_columns,
|
|
400
|
+
update_column,
|
|
401
|
+
columns,
|
|
402
|
+
limit,
|
|
403
|
+
algorithm,
|
|
404
|
+
bisection_factor,
|
|
405
|
+
bisection_threshold,
|
|
406
|
+
min_age,
|
|
407
|
+
max_age,
|
|
408
|
+
stats,
|
|
409
|
+
debug,
|
|
410
|
+
verbose,
|
|
411
|
+
version,
|
|
412
|
+
interactive,
|
|
413
|
+
threads,
|
|
414
|
+
case_sensitive,
|
|
415
|
+
json_output,
|
|
416
|
+
where,
|
|
417
|
+
assume_unique_key,
|
|
418
|
+
sample_exclusive_rows,
|
|
419
|
+
materialize_all_rows,
|
|
420
|
+
table_write_limit,
|
|
421
|
+
materialize_to_table,
|
|
422
|
+
threads1=None,
|
|
423
|
+
threads2=None,
|
|
424
|
+
__conf__=None,
|
|
425
|
+
) -> None:
|
|
426
|
+
if limit and stats:
|
|
427
|
+
logging.error("Cannot specify a limit when using the -s/--stats switch")
|
|
428
|
+
return
|
|
429
|
+
|
|
430
|
+
key_columns = key_columns or ("id",)
|
|
431
|
+
threaded, threads = _get_threads(threads, threads1, threads2)
|
|
432
|
+
start = time.monotonic()
|
|
433
|
+
|
|
434
|
+
if database1 is None or database2 is None:
|
|
435
|
+
logging.error(
|
|
436
|
+
f"Error: Databases not specified. Got {database1} and {database2}. Use --help for more information."
|
|
437
|
+
)
|
|
438
|
+
return
|
|
439
|
+
|
|
440
|
+
db1: Database
|
|
441
|
+
db2: Database
|
|
442
|
+
db1, db2 = _get_dbs(threads, database1, threads1, database2, threads2, interactive)
|
|
443
|
+
with db1, db2:
|
|
444
|
+
options = {
|
|
445
|
+
"case_sensitive": case_sensitive,
|
|
446
|
+
"where": where,
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
_set_age(options, min_age, max_age, db1)
|
|
450
|
+
dbs: Tuple[Database, Database] = db1, db2
|
|
451
|
+
|
|
452
|
+
differ = _get_table_differ(
|
|
453
|
+
algorithm,
|
|
454
|
+
db1,
|
|
455
|
+
db2,
|
|
456
|
+
threaded,
|
|
457
|
+
threads,
|
|
458
|
+
assume_unique_key,
|
|
459
|
+
sample_exclusive_rows,
|
|
460
|
+
materialize_all_rows,
|
|
461
|
+
table_write_limit,
|
|
462
|
+
materialize_to_table,
|
|
463
|
+
bisection_factor,
|
|
464
|
+
bisection_threshold,
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
table_names = table1, table2
|
|
468
|
+
table_paths = [db.dialect.parse_table_name(t) for db, t in safezip(dbs, table_names)]
|
|
469
|
+
|
|
470
|
+
schemas = list(differ._thread_map(_get_schema, safezip(dbs, table_paths)))
|
|
471
|
+
schema1, schema2 = schemas = [
|
|
472
|
+
create_schema(db.name, table_path, schema, case_sensitive)
|
|
473
|
+
for db, table_path, schema in safezip(dbs, table_paths, schemas)
|
|
474
|
+
]
|
|
475
|
+
|
|
476
|
+
mutual = schema1.keys() & schema2.keys() # Case-aware, according to case_sensitive
|
|
477
|
+
logging.debug(f"Available mutual columns: {mutual}")
|
|
478
|
+
|
|
479
|
+
expanded_columns = _get_expanded_columns(
|
|
480
|
+
columns, case_sensitive, mutual, db1, schema1, table1, db2, schema2, table2
|
|
481
|
+
)
|
|
482
|
+
columns = tuple(expanded_columns - {*key_columns, update_column})
|
|
483
|
+
|
|
484
|
+
if db1 == db2:
|
|
485
|
+
diff_schemas(
|
|
486
|
+
table_names[0],
|
|
487
|
+
table_names[1],
|
|
488
|
+
schema1,
|
|
489
|
+
schema2,
|
|
490
|
+
(
|
|
491
|
+
*key_columns,
|
|
492
|
+
update_column,
|
|
493
|
+
*columns,
|
|
494
|
+
),
|
|
495
|
+
)
|
|
496
|
+
|
|
497
|
+
logging.info(f"Diffing using columns: key={key_columns} update={update_column} extra={columns}.")
|
|
498
|
+
|
|
499
|
+
segments = [
|
|
500
|
+
TableSegment(db, table_path, key_columns, update_column, columns, **options)._with_raw_schema(raw_schema)
|
|
501
|
+
for db, table_path, raw_schema in safezip(dbs, table_paths, schemas)
|
|
502
|
+
]
|
|
503
|
+
|
|
504
|
+
diff_iter = differ.diff_tables(*segments)
|
|
505
|
+
|
|
506
|
+
if limit:
|
|
507
|
+
assert not stats
|
|
508
|
+
diff_iter = islice(diff_iter, int(limit))
|
|
509
|
+
|
|
510
|
+
_print_result(stats, json_output, diff_iter)
|
|
511
|
+
|
|
512
|
+
end = time.monotonic()
|
|
513
|
+
logging.info(f"Duration: {end-start:.2f} seconds.")
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
if __name__ == "__main__":
|
|
517
|
+
main()
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# Copyright 2022-present, the Waterdip Labs Pvt. Ltd.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# Copyright 2022-present, the Waterdip Labs Pvt. Ltd.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
from abc import ABC
|
|
16
|
+
|
|
17
|
+
import attrs
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@attrs.define(frozen=False)
|
|
21
|
+
class AbstractCompiler(ABC):
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@attrs.define(frozen=False, eq=False)
|
|
26
|
+
class Compilable(ABC):
|
|
27
|
+
pass
|