dvt-core 0.1.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dvt-core might be problematic. Click here for more details.
- dvt_core-0.1.8/MANIFEST.in +4 -0
- dvt_core-0.1.8/PKG-INFO +287 -0
- dvt_core-0.1.8/README.md +236 -0
- dvt_core-0.1.8/dbt/__init__.py +7 -0
- dvt_core-0.1.8/dbt/_pydantic_shim.py +26 -0
- dvt_core-0.1.8/dbt/artifacts/__init__.py +0 -0
- dvt_core-0.1.8/dbt/artifacts/exceptions/__init__.py +1 -0
- dvt_core-0.1.8/dbt/artifacts/exceptions/schemas.py +31 -0
- dvt_core-0.1.8/dbt/artifacts/resources/__init__.py +116 -0
- dvt_core-0.1.8/dbt/artifacts/resources/base.py +67 -0
- dvt_core-0.1.8/dbt/artifacts/resources/types.py +93 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/analysis.py +10 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/catalog.py +23 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/components.py +274 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/config.py +277 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/documentation.py +11 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/exposure.py +51 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/function.py +52 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/generic_test.py +31 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/group.py +21 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/hook.py +11 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/macro.py +29 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/metric.py +172 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/model.py +145 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/owner.py +10 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/saved_query.py +111 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/seed.py +41 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/semantic_model.py +314 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/singular_test.py +14 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/snapshot.py +91 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/source_definition.py +84 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/sql_operation.py +10 -0
- dvt_core-0.1.8/dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/__init__.py +0 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/base.py +191 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/batch_results.py +24 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/catalog/__init__.py +11 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/freshness/__init__.py +1 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/manifest/__init__.py +2 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/results.py +147 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/run/__init__.py +2 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/run/v5/__init__.py +0 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/run/v5/run.py +184 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/upgrades/__init__.py +4 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dvt_core-0.1.8/dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dvt_core-0.1.8/dbt/artifacts/utils/validation.py +153 -0
- dvt_core-0.1.8/dbt/cli/__init__.py +1 -0
- dvt_core-0.1.8/dbt/cli/context.py +17 -0
- dvt_core-0.1.8/dbt/cli/exceptions.py +57 -0
- dvt_core-0.1.8/dbt/cli/flags.py +558 -0
- dvt_core-0.1.8/dbt/cli/main.py +1429 -0
- dvt_core-0.1.8/dbt/cli/option_types.py +121 -0
- dvt_core-0.1.8/dbt/cli/options.py +80 -0
- dvt_core-0.1.8/dbt/cli/params.py +804 -0
- dvt_core-0.1.8/dbt/cli/requires.py +434 -0
- dvt_core-0.1.8/dbt/cli/resolvers.py +55 -0
- dvt_core-0.1.8/dbt/cli/types.py +40 -0
- dvt_core-0.1.8/dbt/clients/__init__.py +0 -0
- dvt_core-0.1.8/dbt/clients/checked_load.py +83 -0
- dvt_core-0.1.8/dbt/clients/git.py +164 -0
- dvt_core-0.1.8/dbt/clients/jinja.py +206 -0
- dvt_core-0.1.8/dbt/clients/jinja_static.py +245 -0
- dvt_core-0.1.8/dbt/clients/registry.py +192 -0
- dvt_core-0.1.8/dbt/clients/yaml_helper.py +68 -0
- dvt_core-0.1.8/dbt/compilation.py +833 -0
- dvt_core-0.1.8/dbt/compute/__init__.py +9 -0
- dvt_core-0.1.8/dbt/compute/arrow_bridge.py +308 -0
- dvt_core-0.1.8/dbt/compute/engines/__init__.py +11 -0
- dvt_core-0.1.8/dbt/compute/engines/duckdb_engine.py +214 -0
- dvt_core-0.1.8/dbt/compute/engines/spark_engine.py +311 -0
- dvt_core-0.1.8/dbt/compute/federated_executor.py +356 -0
- dvt_core-0.1.8/dbt/compute/smart_selector.py +272 -0
- dvt_core-0.1.8/dbt/config/__init__.py +4 -0
- dvt_core-0.1.8/dbt/config/catalogs.py +94 -0
- dvt_core-0.1.8/dbt/config/compute.py +284 -0
- dvt_core-0.1.8/dbt/config/dvt_profile.py +363 -0
- dvt_core-0.1.8/dbt/config/profile.py +434 -0
- dvt_core-0.1.8/dbt/config/project.py +873 -0
- dvt_core-0.1.8/dbt/config/renderer.py +231 -0
- dvt_core-0.1.8/dbt/config/runtime.py +531 -0
- dvt_core-0.1.8/dbt/config/selectors.py +208 -0
- dvt_core-0.1.8/dbt/config/utils.py +77 -0
- dvt_core-0.1.8/dbt/constants.py +28 -0
- dvt_core-0.1.8/dbt/context/__init__.py +0 -0
- dvt_core-0.1.8/dbt/context/base.py +745 -0
- dvt_core-0.1.8/dbt/context/configured.py +135 -0
- dvt_core-0.1.8/dbt/context/context_config.py +349 -0
- dvt_core-0.1.8/dbt/context/docs.py +82 -0
- dvt_core-0.1.8/dbt/context/exceptions_jinja.py +178 -0
- dvt_core-0.1.8/dbt/context/macro_resolver.py +195 -0
- dvt_core-0.1.8/dbt/context/macros.py +171 -0
- dvt_core-0.1.8/dbt/context/manifest.py +72 -0
- dvt_core-0.1.8/dbt/context/providers.py +2208 -0
- dvt_core-0.1.8/dbt/context/query_header.py +13 -0
- dvt_core-0.1.8/dbt/context/secret.py +58 -0
- dvt_core-0.1.8/dbt/context/target.py +74 -0
- dvt_core-0.1.8/dbt/contracts/__init__.py +0 -0
- dvt_core-0.1.8/dbt/contracts/files.py +413 -0
- dvt_core-0.1.8/dbt/contracts/graph/__init__.py +0 -0
- dvt_core-0.1.8/dbt/contracts/graph/manifest.py +1904 -0
- dvt_core-0.1.8/dbt/contracts/graph/metrics.py +97 -0
- dvt_core-0.1.8/dbt/contracts/graph/model_config.py +70 -0
- dvt_core-0.1.8/dbt/contracts/graph/node_args.py +42 -0
- dvt_core-0.1.8/dbt/contracts/graph/nodes.py +1806 -0
- dvt_core-0.1.8/dbt/contracts/graph/semantic_manifest.py +232 -0
- dvt_core-0.1.8/dbt/contracts/graph/unparsed.py +811 -0
- dvt_core-0.1.8/dbt/contracts/project.py +417 -0
- dvt_core-0.1.8/dbt/contracts/results.py +53 -0
- dvt_core-0.1.8/dbt/contracts/selection.py +23 -0
- dvt_core-0.1.8/dbt/contracts/sql.py +85 -0
- dvt_core-0.1.8/dbt/contracts/state.py +68 -0
- dvt_core-0.1.8/dbt/contracts/util.py +46 -0
- dvt_core-0.1.8/dbt/deprecations.py +346 -0
- dvt_core-0.1.8/dbt/deps/__init__.py +0 -0
- dvt_core-0.1.8/dbt/deps/base.py +152 -0
- dvt_core-0.1.8/dbt/deps/git.py +195 -0
- dvt_core-0.1.8/dbt/deps/local.py +79 -0
- dvt_core-0.1.8/dbt/deps/registry.py +130 -0
- dvt_core-0.1.8/dbt/deps/resolver.py +149 -0
- dvt_core-0.1.8/dbt/deps/tarball.py +120 -0
- dvt_core-0.1.8/dbt/docs/source/_ext/dbt_click.py +119 -0
- dvt_core-0.1.8/dbt/docs/source/conf.py +32 -0
- dvt_core-0.1.8/dbt/env_vars.py +64 -0
- dvt_core-0.1.8/dbt/event_time/event_time.py +40 -0
- dvt_core-0.1.8/dbt/event_time/sample_window.py +60 -0
- dvt_core-0.1.8/dbt/events/__init__.py +15 -0
- dvt_core-0.1.8/dbt/events/base_types.py +36 -0
- dvt_core-0.1.8/dbt/events/core_types_pb2.py +2 -0
- dvt_core-0.1.8/dbt/events/logging.py +108 -0
- dvt_core-0.1.8/dbt/events/types.py +2516 -0
- dvt_core-0.1.8/dbt/exceptions.py +1486 -0
- dvt_core-0.1.8/dbt/flags.py +89 -0
- dvt_core-0.1.8/dbt/graph/__init__.py +11 -0
- dvt_core-0.1.8/dbt/graph/cli.py +247 -0
- dvt_core-0.1.8/dbt/graph/graph.py +172 -0
- dvt_core-0.1.8/dbt/graph/queue.py +214 -0
- dvt_core-0.1.8/dbt/graph/selector.py +374 -0
- dvt_core-0.1.8/dbt/graph/selector_methods.py +975 -0
- dvt_core-0.1.8/dbt/graph/selector_spec.py +222 -0
- dvt_core-0.1.8/dbt/graph/thread_pool.py +18 -0
- dvt_core-0.1.8/dbt/hooks.py +21 -0
- dvt_core-0.1.8/dbt/include/README.md +49 -0
- dvt_core-0.1.8/dbt/include/__init__.py +3 -0
- dvt_core-0.1.8/dbt/include/starter_project/.gitignore +4 -0
- dvt_core-0.1.8/dbt/include/starter_project/README.md +15 -0
- dvt_core-0.1.8/dbt/include/starter_project/__init__.py +3 -0
- dvt_core-0.1.8/dbt/include/starter_project/analyses/.gitkeep +0 -0
- dvt_core-0.1.8/dbt/include/starter_project/dbt_project.yml +36 -0
- dvt_core-0.1.8/dbt/include/starter_project/macros/.gitkeep +0 -0
- dvt_core-0.1.8/dbt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
- dvt_core-0.1.8/dbt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
- dvt_core-0.1.8/dbt/include/starter_project/models/example/schema.yml +21 -0
- dvt_core-0.1.8/dbt/include/starter_project/seeds/.gitkeep +0 -0
- dvt_core-0.1.8/dbt/include/starter_project/snapshots/.gitkeep +0 -0
- dvt_core-0.1.8/dbt/include/starter_project/tests/.gitkeep +0 -0
- dvt_core-0.1.8/dbt/internal_deprecations.py +26 -0
- dvt_core-0.1.8/dbt/jsonschemas/__init__.py +3 -0
- dvt_core-0.1.8/dbt/jsonschemas/jsonschemas.py +309 -0
- dvt_core-0.1.8/dbt/jsonschemas/project/0.0.110.json +4717 -0
- dvt_core-0.1.8/dbt/jsonschemas/project/0.0.85.json +2015 -0
- dvt_core-0.1.8/dbt/jsonschemas/resources/0.0.110.json +2636 -0
- dvt_core-0.1.8/dbt/jsonschemas/resources/0.0.85.json +2536 -0
- dvt_core-0.1.8/dbt/jsonschemas/resources/latest.json +6773 -0
- dvt_core-0.1.8/dbt/links.py +4 -0
- dvt_core-0.1.8/dbt/materializations/__init__.py +0 -0
- dvt_core-0.1.8/dbt/materializations/incremental/__init__.py +0 -0
- dvt_core-0.1.8/dbt/materializations/incremental/microbatch.py +236 -0
- dvt_core-0.1.8/dbt/mp_context.py +8 -0
- dvt_core-0.1.8/dbt/node_types.py +37 -0
- dvt_core-0.1.8/dbt/parser/__init__.py +23 -0
- dvt_core-0.1.8/dbt/parser/analysis.py +21 -0
- dvt_core-0.1.8/dbt/parser/base.py +548 -0
- dvt_core-0.1.8/dbt/parser/common.py +266 -0
- dvt_core-0.1.8/dbt/parser/docs.py +52 -0
- dvt_core-0.1.8/dbt/parser/fixtures.py +51 -0
- dvt_core-0.1.8/dbt/parser/functions.py +30 -0
- dvt_core-0.1.8/dbt/parser/generic_test.py +100 -0
- dvt_core-0.1.8/dbt/parser/generic_test_builders.py +333 -0
- dvt_core-0.1.8/dbt/parser/hooks.py +118 -0
- dvt_core-0.1.8/dbt/parser/macros.py +137 -0
- dvt_core-0.1.8/dbt/parser/manifest.py +2204 -0
- dvt_core-0.1.8/dbt/parser/models.py +573 -0
- dvt_core-0.1.8/dbt/parser/partial.py +1178 -0
- dvt_core-0.1.8/dbt/parser/read_files.py +445 -0
- dvt_core-0.1.8/dbt/parser/schema_generic_tests.py +422 -0
- dvt_core-0.1.8/dbt/parser/schema_renderer.py +111 -0
- dvt_core-0.1.8/dbt/parser/schema_yaml_readers.py +935 -0
- dvt_core-0.1.8/dbt/parser/schemas.py +1466 -0
- dvt_core-0.1.8/dbt/parser/search.py +149 -0
- dvt_core-0.1.8/dbt/parser/seeds.py +28 -0
- dvt_core-0.1.8/dbt/parser/singular_test.py +20 -0
- dvt_core-0.1.8/dbt/parser/snapshots.py +44 -0
- dvt_core-0.1.8/dbt/parser/sources.py +558 -0
- dvt_core-0.1.8/dbt/parser/sql.py +62 -0
- dvt_core-0.1.8/dbt/parser/unit_tests.py +621 -0
- dvt_core-0.1.8/dbt/plugins/__init__.py +20 -0
- dvt_core-0.1.8/dbt/plugins/contracts.py +9 -0
- dvt_core-0.1.8/dbt/plugins/exceptions.py +2 -0
- dvt_core-0.1.8/dbt/plugins/manager.py +163 -0
- dvt_core-0.1.8/dbt/plugins/manifest.py +21 -0
- dvt_core-0.1.8/dbt/profiler.py +20 -0
- dvt_core-0.1.8/dbt/py.typed +1 -0
- dvt_core-0.1.8/dbt/query_analyzer.py +362 -0
- dvt_core-0.1.8/dbt/runners/__init__.py +2 -0
- dvt_core-0.1.8/dbt/runners/exposure_runner.py +7 -0
- dvt_core-0.1.8/dbt/runners/no_op_runner.py +45 -0
- dvt_core-0.1.8/dbt/runners/saved_query_runner.py +7 -0
- dvt_core-0.1.8/dbt/selected_resources.py +8 -0
- dvt_core-0.1.8/dbt/task/__init__.py +0 -0
- dvt_core-0.1.8/dbt/task/base.py +503 -0
- dvt_core-0.1.8/dbt/task/build.py +197 -0
- dvt_core-0.1.8/dbt/task/clean.py +56 -0
- dvt_core-0.1.8/dbt/task/clone.py +161 -0
- dvt_core-0.1.8/dbt/task/compile.py +150 -0
- dvt_core-0.1.8/dbt/task/debug.py +505 -0
- dvt_core-0.1.8/dbt/task/deps.py +280 -0
- dvt_core-0.1.8/dbt/task/docs/__init__.py +3 -0
- dvt_core-0.1.8/dbt/task/docs/generate.py +434 -0
- dvt_core-0.1.8/dbt/task/docs/index.html +250 -0
- dvt_core-0.1.8/dbt/task/docs/serve.py +29 -0
- dvt_core-0.1.8/dbt/task/freshness.py +322 -0
- dvt_core-0.1.8/dbt/task/function.py +121 -0
- dvt_core-0.1.8/dbt/task/group_lookup.py +46 -0
- dvt_core-0.1.8/dbt/task/init.py +353 -0
- dvt_core-0.1.8/dbt/task/list.py +236 -0
- dvt_core-0.1.8/dbt/task/printer.py +175 -0
- dvt_core-0.1.8/dbt/task/retry.py +175 -0
- dvt_core-0.1.8/dbt/task/run.py +1209 -0
- dvt_core-0.1.8/dbt/task/run_operation.py +141 -0
- dvt_core-0.1.8/dbt/task/runnable.py +758 -0
- dvt_core-0.1.8/dbt/task/seed.py +103 -0
- dvt_core-0.1.8/dbt/task/show.py +149 -0
- dvt_core-0.1.8/dbt/task/snapshot.py +56 -0
- dvt_core-0.1.8/dbt/task/sql.py +110 -0
- dvt_core-0.1.8/dbt/task/test.py +464 -0
- dvt_core-0.1.8/dbt/tests/fixtures/__init__.py +1 -0
- dvt_core-0.1.8/dbt/tests/fixtures/project.py +620 -0
- dvt_core-0.1.8/dbt/tests/util.py +651 -0
- dvt_core-0.1.8/dbt/tracking.py +529 -0
- dvt_core-0.1.8/dbt/utils/__init__.py +3 -0
- dvt_core-0.1.8/dbt/utils/artifact_upload.py +151 -0
- dvt_core-0.1.8/dbt/utils/utils.py +408 -0
- dvt_core-0.1.8/dbt/version.py +268 -0
- dvt_core-0.1.8/dvt_core.egg-info/PKG-INFO +287 -0
- dvt_core-0.1.8/dvt_core.egg-info/SOURCES.txt +507 -0
- dvt_core-0.1.8/dvt_core.egg-info/dependency_links.txt +1 -0
- dvt_core-0.1.8/dvt_core.egg-info/entry_points.txt +3 -0
- dvt_core-0.1.8/dvt_core.egg-info/not-zip-safe +1 -0
- dvt_core-0.1.8/dvt_core.egg-info/requires.txt +25 -0
- dvt_core-0.1.8/dvt_core.egg-info/top_level.txt +1 -0
- dvt_core-0.1.8/pyproject.toml +126 -0
- dvt_core-0.1.8/setup.cfg +4 -0
- dvt_core-0.1.8/setup.py +26 -0
dvt_core-0.1.8/PKG-INFO
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: dvt-core
|
|
3
|
+
Version: 0.1.8
|
|
4
|
+
Summary: DVT (Data Virtualization Tool) - Multi-source data federation and transformation with intelligent query pushdown and compute layer integration.
|
|
5
|
+
Author: DVT Contributors
|
|
6
|
+
Maintainer: DVT Contributors
|
|
7
|
+
License-Expression: Apache-2.0
|
|
8
|
+
Project-URL: Homepage, https://github.com/dvt-core/dvt-core
|
|
9
|
+
Project-URL: Documentation, https://github.com/dvt-core/dvt-core#readme
|
|
10
|
+
Project-URL: Repository, https://github.com/dvt-core/dvt-core.git
|
|
11
|
+
Project-URL: Issues, https://github.com/dvt-core/dvt-core/issues
|
|
12
|
+
Keywords: data,virtualization,federation,multi-source,dbt,analytics,transform,duckdb,spark,pyarrow
|
|
13
|
+
Classifier: Development Status :: 4 - Beta
|
|
14
|
+
Classifier: Operating System :: Microsoft :: Windows
|
|
15
|
+
Classifier: Operating System :: MacOS :: MacOS X
|
|
16
|
+
Classifier: Operating System :: POSIX :: Linux
|
|
17
|
+
Classifier: Programming Language :: Python
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
22
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
23
|
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
24
|
+
Requires-Python: >=3.10
|
|
25
|
+
Description-Content-Type: text/markdown
|
|
26
|
+
Requires-Dist: agate<1.10,>=1.7.0
|
|
27
|
+
Requires-Dist: Jinja2<4,>=3.1.3
|
|
28
|
+
Requires-Dist: mashumaro[msgpack]<3.15,>=3.9
|
|
29
|
+
Requires-Dist: click<9.0,>=8.0.2
|
|
30
|
+
Requires-Dist: jsonschema<5.0,>=4.19.1
|
|
31
|
+
Requires-Dist: networkx<4.0,>=2.3
|
|
32
|
+
Requires-Dist: protobuf<7.0,>=6.0
|
|
33
|
+
Requires-Dist: requests<3.0.0
|
|
34
|
+
Requires-Dist: snowplow-tracker<2.0,>=1.0.2
|
|
35
|
+
Requires-Dist: pathspec<0.13,>=0.9
|
|
36
|
+
Requires-Dist: sqlparse<0.6.0,>=0.5.0
|
|
37
|
+
Requires-Dist: dbt-extractor<=0.6,>=0.5.0
|
|
38
|
+
Requires-Dist: dbt-semantic-interfaces<0.10,>=0.9.0
|
|
39
|
+
Requires-Dist: dbt-common<2.0,>=1.27.0
|
|
40
|
+
Requires-Dist: dbt-adapters<2.0,>=1.15.5
|
|
41
|
+
Requires-Dist: dbt-protos<2.0,>=1.0.375
|
|
42
|
+
Requires-Dist: pydantic<3
|
|
43
|
+
Requires-Dist: packaging>20.9
|
|
44
|
+
Requires-Dist: pytz>=2015.7
|
|
45
|
+
Requires-Dist: pyyaml>=6.0
|
|
46
|
+
Requires-Dist: daff>=1.3.46
|
|
47
|
+
Requires-Dist: typing-extensions>=4.4
|
|
48
|
+
Requires-Dist: pyarrow>=14.0.0
|
|
49
|
+
Requires-Dist: duckdb>=0.9.0
|
|
50
|
+
Requires-Dist: pyspark>=3.4.0
|
|
51
|
+
|
|
52
|
+
# DVT-Core: Data Virtualization Tool
|
|
53
|
+
|
|
54
|
+
**DVT-Core** is a multi-source data federation and transformation platform built on dbt-core architecture. Query and transform data across multiple heterogeneous data sources with intelligent query pushdown and compute layer integration.
|
|
55
|
+
|
|
56
|
+
## Features
|
|
57
|
+
|
|
58
|
+
- 🔄 **Multi-Source Queries**: Join data from PostgreSQL, Snowflake, BigQuery, MySQL, and more in a single query
|
|
59
|
+
- 🧠 **Intelligent Routing**: Automatically pushes down queries when possible, uses compute layer when needed
|
|
60
|
+
- ⚡ **Zero-Copy Performance**: PyArrow-based data transfer for maximum efficiency
|
|
61
|
+
- 🔧 **Familiar Workflow**: Same dbt commands, same project structure, enhanced capabilities
|
|
62
|
+
- 🎯 **Smart Compute Selection**: Automatically chooses DuckDB (fast, embedded) or Spark (scalable, distributed)
|
|
63
|
+
- 🎛️ **Full Control**: Override everything with `target=` and `compute=` config options
|
|
64
|
+
- ✅ **100% Compatible**: Works with existing dbt projects and all dbt adapters
|
|
65
|
+
|
|
66
|
+
## Quick Start
|
|
67
|
+
|
|
68
|
+
### Installation
|
|
69
|
+
|
|
70
|
+
```bash
|
|
71
|
+
pip install dvt-core
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
Or with uv:
|
|
75
|
+
|
|
76
|
+
```bash
|
|
77
|
+
uv pip install dvt-core
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
### Configure Multi-Connection Profile
|
|
81
|
+
|
|
82
|
+
```yaml
|
|
83
|
+
# profiles.yml
|
|
84
|
+
my_project:
|
|
85
|
+
connections:
|
|
86
|
+
postgres_prod:
|
|
87
|
+
type: postgres
|
|
88
|
+
host: prod-db.example.com
|
|
89
|
+
port: 5432
|
|
90
|
+
user: prod_user
|
|
91
|
+
password: "{{ env_var('POSTGRES_PASSWORD') }}"
|
|
92
|
+
database: analytics
|
|
93
|
+
schema: public
|
|
94
|
+
threads: 4
|
|
95
|
+
|
|
96
|
+
snowflake_warehouse:
|
|
97
|
+
type: snowflake
|
|
98
|
+
account: abc123
|
|
99
|
+
user: snow_user
|
|
100
|
+
password: "{{ env_var('SNOWFLAKE_PASSWORD') }}"
|
|
101
|
+
database: warehouse
|
|
102
|
+
schema: public
|
|
103
|
+
warehouse: compute_wh
|
|
104
|
+
threads: 8
|
|
105
|
+
|
|
106
|
+
default_target: snowflake_warehouse
|
|
107
|
+
threads: 4
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
### Define Sources with Connections
|
|
111
|
+
|
|
112
|
+
```yaml
|
|
113
|
+
# models/sources.yml
|
|
114
|
+
sources:
|
|
115
|
+
- name: postgres_data
|
|
116
|
+
connection: postgres_prod
|
|
117
|
+
tables:
|
|
118
|
+
- name: orders
|
|
119
|
+
- name: customers
|
|
120
|
+
|
|
121
|
+
- name: snowflake_data
|
|
122
|
+
connection: snowflake_warehouse
|
|
123
|
+
tables:
|
|
124
|
+
- name: products
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
### Create Multi-Source Model
|
|
128
|
+
|
|
129
|
+
```sql
|
|
130
|
+
-- models/combined_sales.sql
|
|
131
|
+
{{ config(
|
|
132
|
+
materialized='table',
|
|
133
|
+
target='snowflake_warehouse', -- Optional: override materialization target
|
|
134
|
+
compute='duckdb' -- Optional: force compute engine
|
|
135
|
+
) }}
|
|
136
|
+
|
|
137
|
+
SELECT
|
|
138
|
+
o.order_id,
|
|
139
|
+
o.order_date,
|
|
140
|
+
c.customer_name,
|
|
141
|
+
p.product_name,
|
|
142
|
+
o.quantity * p.price as total_amount
|
|
143
|
+
FROM {{ source('postgres_data', 'orders') }} o
|
|
144
|
+
JOIN {{ source('postgres_data', 'customers') }} c
|
|
145
|
+
ON o.customer_id = c.customer_id
|
|
146
|
+
JOIN {{ source('snowflake_data', 'products') }} p
|
|
147
|
+
ON o.product_id = p.product_id
|
|
148
|
+
WHERE o.order_date >= '2024-01-01'
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
### Run DVT
|
|
152
|
+
|
|
153
|
+
```bash
|
|
154
|
+
# Standard dbt commands work
|
|
155
|
+
dvt run --select combined_sales
|
|
156
|
+
|
|
157
|
+
# DVT automatically:
|
|
158
|
+
# 1. Analyzes query (sees postgres + snowflake sources)
|
|
159
|
+
# 2. Determines federated execution needed
|
|
160
|
+
# 3. Selects compute engine (DuckDB or Spark based on workload)
|
|
161
|
+
# 4. Loads data from postgres and snowflake via adapters
|
|
162
|
+
# 5. Executes join in compute engine
|
|
163
|
+
# 6. Materializes result to target (snowflake)
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
## Architecture
|
|
167
|
+
|
|
168
|
+
```
|
|
169
|
+
┌─────────────┐ ┌──────────┐ ┌─────────────┐ ┌──────────┐ ┌──────────────┐
|
|
170
|
+
│ Source DBs │────▶│ Adapters │────▶│ PyArrow │────▶│ Compute │────▶│ Adapters │
|
|
171
|
+
│(Postgres, │ │ (Read) │ │ (Bridge) │ │ (DuckDB/ │ │ (Write) │
|
|
172
|
+
│ MySQL, etc.)│ │ │ │ │ │ Spark) │ │ │
|
|
173
|
+
└─────────────┘ └──────────┘ └─────────────┘ └──────────┘ └──────────────┘
|
|
174
|
+
│
|
|
175
|
+
▼
|
|
176
|
+
┌──────────────┐
|
|
177
|
+
│ Target DB │
|
|
178
|
+
│ (Snowflake, │
|
|
179
|
+
│ BigQuery) │
|
|
180
|
+
└──────────────┘
|
|
181
|
+
```
|
|
182
|
+
|
|
183
|
+
## Execution Strategies
|
|
184
|
+
|
|
185
|
+
### Pushdown (Homogeneous Sources)
|
|
186
|
+
|
|
187
|
+
When all sources come from the same connection, DVT executes the query directly on the source database:
|
|
188
|
+
|
|
189
|
+
```sql
|
|
190
|
+
-- All sources from same connection → Execute on source database
|
|
191
|
+
SELECT * FROM {{ source('postgres', 'orders') }}
|
|
192
|
+
JOIN {{ source('postgres', 'customers') }} USING (customer_id)
|
|
193
|
+
-- Executed directly in PostgreSQL (no data movement)
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
### Federated (Heterogeneous Sources)
|
|
197
|
+
|
|
198
|
+
When sources come from different connections, DVT uses the compute layer:
|
|
199
|
+
|
|
200
|
+
```sql
|
|
201
|
+
-- Sources from different connections → Use compute layer
|
|
202
|
+
SELECT * FROM {{ source('postgres', 'orders') }}
|
|
203
|
+
JOIN {{ source('mysql', 'products') }} USING (product_id)
|
|
204
|
+
-- Data loaded into DuckDB/Spark, join executed there
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
## CLI Commands
|
|
208
|
+
|
|
209
|
+
### Standard dbt Commands
|
|
210
|
+
|
|
211
|
+
All dbt commands work unchanged:
|
|
212
|
+
|
|
213
|
+
```bash
|
|
214
|
+
dvt run
|
|
215
|
+
dvt test
|
|
216
|
+
dvt build
|
|
217
|
+
dvt docs generate
|
|
218
|
+
dvt docs serve
|
|
219
|
+
```
|
|
220
|
+
|
|
221
|
+
### DVT-Specific Commands
|
|
222
|
+
|
|
223
|
+
Manage external Spark clusters:
|
|
224
|
+
|
|
225
|
+
```bash
|
|
226
|
+
# Register external Spark cluster
|
|
227
|
+
dvt compute register prod_cluster --master spark://master:7077
|
|
228
|
+
|
|
229
|
+
# List registered clusters
|
|
230
|
+
dvt compute list
|
|
231
|
+
|
|
232
|
+
# Remove cluster
|
|
233
|
+
dvt compute remove prod_cluster
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
## Configuration Options
|
|
237
|
+
|
|
238
|
+
### Model Configuration
|
|
239
|
+
|
|
240
|
+
```sql
|
|
241
|
+
{{ config(
|
|
242
|
+
materialized='table',
|
|
243
|
+
target='snowflake_analytics', -- Where to write results
|
|
244
|
+
compute='spark' -- Force Spark for processing
|
|
245
|
+
) }}
|
|
246
|
+
```
|
|
247
|
+
|
|
248
|
+
### Smart Compute Selection
|
|
249
|
+
|
|
250
|
+
DVT automatically selects the optimal compute engine:
|
|
251
|
+
|
|
252
|
+
- **DuckDB**: Small/medium workloads (< 1GB), fast in-process execution
|
|
253
|
+
- **Spark**: Large workloads (> 1GB), distributed processing, many sources
|
|
254
|
+
|
|
255
|
+
Override with `compute='duckdb'` or `compute='spark'` in config.
|
|
256
|
+
|
|
257
|
+
## Key Principles
|
|
258
|
+
|
|
259
|
+
1. **Adapters for I/O only** - Read from sources, write to targets
|
|
260
|
+
2. **Compute engines for processing only** - Never materialize
|
|
261
|
+
3. **PyArrow as universal data format** - Zero-copy transfer
|
|
262
|
+
4. **Backward compatibility** - All dbt projects work unchanged
|
|
263
|
+
5. **User configuration always wins** - Override any automatic decision
|
|
264
|
+
|
|
265
|
+
## Requirements
|
|
266
|
+
|
|
267
|
+
- Python 3.10+
|
|
268
|
+
- dbt-compatible adapters for your data sources
|
|
269
|
+
- PyArrow, DuckDB, PySpark (installed automatically)
|
|
270
|
+
|
|
271
|
+
## License
|
|
272
|
+
|
|
273
|
+
Apache License 2.0 (same as dbt-core)
|
|
274
|
+
|
|
275
|
+
## Acknowledgments
|
|
276
|
+
|
|
277
|
+
Built on [dbt-core](https://github.com/dbt-labs/dbt-core) architecture. DVT extends dbt's capabilities while preserving its excellent design patterns and developer experience.
|
|
278
|
+
|
|
279
|
+
## Links
|
|
280
|
+
|
|
281
|
+
- [Documentation](https://github.com/dvt-core/dvt-core#readme)
|
|
282
|
+
- [Issues](https://github.com/dvt-core/dvt-core/issues)
|
|
283
|
+
- [Repository](https://github.com/dvt-core/dvt-core)
|
|
284
|
+
|
|
285
|
+
---
|
|
286
|
+
|
|
287
|
+
**Transform data across any source, materialize to any target, with intelligent query optimization.**
|
dvt_core-0.1.8/README.md
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
# DVT-Core: Data Virtualization Tool
|
|
2
|
+
|
|
3
|
+
**DVT-Core** is a multi-source data federation and transformation platform built on dbt-core architecture. Query and transform data across multiple heterogeneous data sources with intelligent query pushdown and compute layer integration.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- 🔄 **Multi-Source Queries**: Join data from PostgreSQL, Snowflake, BigQuery, MySQL, and more in a single query
|
|
8
|
+
- 🧠 **Intelligent Routing**: Automatically pushes down queries when possible, uses compute layer when needed
|
|
9
|
+
- ⚡ **Zero-Copy Performance**: PyArrow-based data transfer for maximum efficiency
|
|
10
|
+
- 🔧 **Familiar Workflow**: Same dbt commands, same project structure, enhanced capabilities
|
|
11
|
+
- 🎯 **Smart Compute Selection**: Automatically chooses DuckDB (fast, embedded) or Spark (scalable, distributed)
|
|
12
|
+
- 🎛️ **Full Control**: Override everything with `target=` and `compute=` config options
|
|
13
|
+
- ✅ **100% Compatible**: Works with existing dbt projects and all dbt adapters
|
|
14
|
+
|
|
15
|
+
## Quick Start
|
|
16
|
+
|
|
17
|
+
### Installation
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
pip install dvt-core
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
Or with uv:
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
uv pip install dvt-core
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
### Configure Multi-Connection Profile
|
|
30
|
+
|
|
31
|
+
```yaml
|
|
32
|
+
# profiles.yml
|
|
33
|
+
my_project:
|
|
34
|
+
connections:
|
|
35
|
+
postgres_prod:
|
|
36
|
+
type: postgres
|
|
37
|
+
host: prod-db.example.com
|
|
38
|
+
port: 5432
|
|
39
|
+
user: prod_user
|
|
40
|
+
password: "{{ env_var('POSTGRES_PASSWORD') }}"
|
|
41
|
+
database: analytics
|
|
42
|
+
schema: public
|
|
43
|
+
threads: 4
|
|
44
|
+
|
|
45
|
+
snowflake_warehouse:
|
|
46
|
+
type: snowflake
|
|
47
|
+
account: abc123
|
|
48
|
+
user: snow_user
|
|
49
|
+
password: "{{ env_var('SNOWFLAKE_PASSWORD') }}"
|
|
50
|
+
database: warehouse
|
|
51
|
+
schema: public
|
|
52
|
+
warehouse: compute_wh
|
|
53
|
+
threads: 8
|
|
54
|
+
|
|
55
|
+
default_target: snowflake_warehouse
|
|
56
|
+
threads: 4
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
### Define Sources with Connections
|
|
60
|
+
|
|
61
|
+
```yaml
|
|
62
|
+
# models/sources.yml
|
|
63
|
+
sources:
|
|
64
|
+
- name: postgres_data
|
|
65
|
+
connection: postgres_prod
|
|
66
|
+
tables:
|
|
67
|
+
- name: orders
|
|
68
|
+
- name: customers
|
|
69
|
+
|
|
70
|
+
- name: snowflake_data
|
|
71
|
+
connection: snowflake_warehouse
|
|
72
|
+
tables:
|
|
73
|
+
- name: products
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Create Multi-Source Model
|
|
77
|
+
|
|
78
|
+
```sql
|
|
79
|
+
-- models/combined_sales.sql
|
|
80
|
+
{{ config(
|
|
81
|
+
materialized='table',
|
|
82
|
+
target='snowflake_warehouse', -- Optional: override materialization target
|
|
83
|
+
compute='duckdb' -- Optional: force compute engine
|
|
84
|
+
) }}
|
|
85
|
+
|
|
86
|
+
SELECT
|
|
87
|
+
o.order_id,
|
|
88
|
+
o.order_date,
|
|
89
|
+
c.customer_name,
|
|
90
|
+
p.product_name,
|
|
91
|
+
o.quantity * p.price as total_amount
|
|
92
|
+
FROM {{ source('postgres_data', 'orders') }} o
|
|
93
|
+
JOIN {{ source('postgres_data', 'customers') }} c
|
|
94
|
+
ON o.customer_id = c.customer_id
|
|
95
|
+
JOIN {{ source('snowflake_data', 'products') }} p
|
|
96
|
+
ON o.product_id = p.product_id
|
|
97
|
+
WHERE o.order_date >= '2024-01-01'
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
### Run DVT
|
|
101
|
+
|
|
102
|
+
```bash
|
|
103
|
+
# Standard dbt commands work
|
|
104
|
+
dvt run --select combined_sales
|
|
105
|
+
|
|
106
|
+
# DVT automatically:
|
|
107
|
+
# 1. Analyzes query (sees postgres + snowflake sources)
|
|
108
|
+
# 2. Determines federated execution needed
|
|
109
|
+
# 3. Selects compute engine (DuckDB or Spark based on workload)
|
|
110
|
+
# 4. Loads data from postgres and snowflake via adapters
|
|
111
|
+
# 5. Executes join in compute engine
|
|
112
|
+
# 6. Materializes result to target (snowflake)
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
## Architecture
|
|
116
|
+
|
|
117
|
+
```
|
|
118
|
+
┌─────────────┐ ┌──────────┐ ┌─────────────┐ ┌──────────┐ ┌──────────────┐
|
|
119
|
+
│ Source DBs │────▶│ Adapters │────▶│ PyArrow │────▶│ Compute │────▶│ Adapters │
|
|
120
|
+
│(Postgres, │ │ (Read) │ │ (Bridge) │ │ (DuckDB/ │ │ (Write) │
|
|
121
|
+
│ MySQL, etc.)│ │ │ │ │ │ Spark) │ │ │
|
|
122
|
+
└─────────────┘ └──────────┘ └─────────────┘ └──────────┘ └──────────────┘
|
|
123
|
+
│
|
|
124
|
+
▼
|
|
125
|
+
┌──────────────┐
|
|
126
|
+
│ Target DB │
|
|
127
|
+
│ (Snowflake, │
|
|
128
|
+
│ BigQuery) │
|
|
129
|
+
└──────────────┘
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
## Execution Strategies
|
|
133
|
+
|
|
134
|
+
### Pushdown (Homogeneous Sources)
|
|
135
|
+
|
|
136
|
+
When all sources come from the same connection, DVT executes the query directly on the source database:
|
|
137
|
+
|
|
138
|
+
```sql
|
|
139
|
+
-- All sources from same connection → Execute on source database
|
|
140
|
+
SELECT * FROM {{ source('postgres', 'orders') }}
|
|
141
|
+
JOIN {{ source('postgres', 'customers') }} USING (customer_id)
|
|
142
|
+
-- Executed directly in PostgreSQL (no data movement)
|
|
143
|
+
```
|
|
144
|
+
|
|
145
|
+
### Federated (Heterogeneous Sources)
|
|
146
|
+
|
|
147
|
+
When sources come from different connections, DVT uses the compute layer:
|
|
148
|
+
|
|
149
|
+
```sql
|
|
150
|
+
-- Sources from different connections → Use compute layer
|
|
151
|
+
SELECT * FROM {{ source('postgres', 'orders') }}
|
|
152
|
+
JOIN {{ source('mysql', 'products') }} USING (product_id)
|
|
153
|
+
-- Data loaded into DuckDB/Spark, join executed there
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
## CLI Commands
|
|
157
|
+
|
|
158
|
+
### Standard dbt Commands
|
|
159
|
+
|
|
160
|
+
All dbt commands work unchanged:
|
|
161
|
+
|
|
162
|
+
```bash
|
|
163
|
+
dvt run
|
|
164
|
+
dvt test
|
|
165
|
+
dvt build
|
|
166
|
+
dvt docs generate
|
|
167
|
+
dvt docs serve
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
### DVT-Specific Commands
|
|
171
|
+
|
|
172
|
+
Manage external Spark clusters:
|
|
173
|
+
|
|
174
|
+
```bash
|
|
175
|
+
# Register external Spark cluster
|
|
176
|
+
dvt compute register prod_cluster --master spark://master:7077
|
|
177
|
+
|
|
178
|
+
# List registered clusters
|
|
179
|
+
dvt compute list
|
|
180
|
+
|
|
181
|
+
# Remove cluster
|
|
182
|
+
dvt compute remove prod_cluster
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
## Configuration Options
|
|
186
|
+
|
|
187
|
+
### Model Configuration
|
|
188
|
+
|
|
189
|
+
```sql
|
|
190
|
+
{{ config(
|
|
191
|
+
materialized='table',
|
|
192
|
+
target='snowflake_analytics', -- Where to write results
|
|
193
|
+
compute='spark' -- Force Spark for processing
|
|
194
|
+
) }}
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
### Smart Compute Selection
|
|
198
|
+
|
|
199
|
+
DVT automatically selects the optimal compute engine:
|
|
200
|
+
|
|
201
|
+
- **DuckDB**: Small/medium workloads (< 1GB), fast in-process execution
|
|
202
|
+
- **Spark**: Large workloads (> 1GB), distributed processing, many sources
|
|
203
|
+
|
|
204
|
+
Override with `compute='duckdb'` or `compute='spark'` in config.
|
|
205
|
+
|
|
206
|
+
## Key Principles
|
|
207
|
+
|
|
208
|
+
1. **Adapters for I/O only** - Read from sources, write to targets
|
|
209
|
+
2. **Compute engines for processing only** - Never materialize
|
|
210
|
+
3. **PyArrow as universal data format** - Zero-copy transfer
|
|
211
|
+
4. **Backward compatibility** - All dbt projects work unchanged
|
|
212
|
+
5. **User configuration always wins** - Override any automatic decision
|
|
213
|
+
|
|
214
|
+
## Requirements
|
|
215
|
+
|
|
216
|
+
- Python 3.10+
|
|
217
|
+
- dbt-compatible adapters for your data sources
|
|
218
|
+
- PyArrow, DuckDB, PySpark (installed automatically)
|
|
219
|
+
|
|
220
|
+
## License
|
|
221
|
+
|
|
222
|
+
Apache License 2.0 (same as dbt-core)
|
|
223
|
+
|
|
224
|
+
## Acknowledgments
|
|
225
|
+
|
|
226
|
+
Built on [dbt-core](https://github.com/dbt-labs/dbt-core) architecture. DVT extends dbt's capabilities while preserving its excellent design patterns and developer experience.
|
|
227
|
+
|
|
228
|
+
## Links
|
|
229
|
+
|
|
230
|
+
- [Documentation](https://github.com/dvt-core/dvt-core#readme)
|
|
231
|
+
- [Issues](https://github.com/dvt-core/dvt-core/issues)
|
|
232
|
+
- [Repository](https://github.com/dvt-core/dvt-core)
|
|
233
|
+
|
|
234
|
+
---
|
|
235
|
+
|
|
236
|
+
**Transform data across any source, materialize to any target, with intelligent query optimization.**
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
# N.B.
|
|
2
|
+
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
|
3
|
+
# The matching statement is in plugins/postgres/dbt/__init__.py
|
|
4
|
+
|
|
5
|
+
from pkgutil import extend_path
|
|
6
|
+
|
|
7
|
+
__path__ = extend_path(__path__, __name__)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# type: ignore
|
|
2
|
+
|
|
3
|
+
"""Shim to allow support for both Pydantic 1 and Pydantic 2.
|
|
4
|
+
|
|
5
|
+
dbt-core must support both major versions of Pydantic because dbt-core users might be using an environment with
|
|
6
|
+
either version, and we can't restrict them to one or the other. Here, we essentially import all Pydantic objects
|
|
7
|
+
from version 1 that we use. Throughout the repo, we import these objects from this file instead of from Pydantic
|
|
8
|
+
directly, meaning that we essentially only use Pydantic 1 in dbt-core currently, but without forcing that restriction
|
|
9
|
+
on dbt users. The development environment for this repo should be pinned to Pydantic 1 to ensure devs get appropriate
|
|
10
|
+
type hints.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from importlib.metadata import version
|
|
14
|
+
|
|
15
|
+
pydantic_version = version("pydantic")
|
|
16
|
+
# Pydantic uses semantic versioning, i.e. <major>.<minor>.<patch>, and we need to know the major
|
|
17
|
+
pydantic_major = pydantic_version.split(".")[0]
|
|
18
|
+
|
|
19
|
+
if pydantic_major == "1":
|
|
20
|
+
from pydantic import BaseSettings # noqa: F401
|
|
21
|
+
elif pydantic_major == "2":
|
|
22
|
+
from pydantic.v1 import BaseSettings # noqa: F401
|
|
23
|
+
else:
|
|
24
|
+
raise RuntimeError(
|
|
25
|
+
f"Currently only pydantic 1 and 2 are supported, found pydantic {pydantic_version}"
|
|
26
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from dbt.artifacts.exceptions.schemas import IncompatibleSchemaError
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
from dbt_common.exceptions import DbtRuntimeError
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class IncompatibleSchemaError(DbtRuntimeError):
|
|
7
|
+
def __init__(self, expected: str, found: Optional[str] = None) -> None:
|
|
8
|
+
self.expected = expected
|
|
9
|
+
self.found = found
|
|
10
|
+
self.filename = "input file"
|
|
11
|
+
|
|
12
|
+
super().__init__(msg=self.get_message())
|
|
13
|
+
|
|
14
|
+
def add_filename(self, filename: str):
|
|
15
|
+
self.filename = filename
|
|
16
|
+
self.msg = self.get_message()
|
|
17
|
+
|
|
18
|
+
def get_message(self) -> str:
|
|
19
|
+
found_str = "nothing"
|
|
20
|
+
if self.found is not None:
|
|
21
|
+
found_str = f'"{self.found}"'
|
|
22
|
+
|
|
23
|
+
msg = (
|
|
24
|
+
f'Expected a schema version of "{self.expected}" in '
|
|
25
|
+
f"{self.filename}, but found {found_str}. Are you running with a "
|
|
26
|
+
f"different version of dbt?"
|
|
27
|
+
)
|
|
28
|
+
return msg
|
|
29
|
+
|
|
30
|
+
CODE = 10014
|
|
31
|
+
MESSAGE = "Incompatible Schema"
|