dvt-core 0.52.2__cp310-cp310-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dvt-core might be problematic. Click here for more details.
- dbt/__init__.py +7 -0
- dbt/_pydantic_shim.py +26 -0
- dbt/artifacts/__init__.py +0 -0
- dbt/artifacts/exceptions/__init__.py +1 -0
- dbt/artifacts/exceptions/schemas.py +31 -0
- dbt/artifacts/resources/__init__.py +116 -0
- dbt/artifacts/resources/base.py +67 -0
- dbt/artifacts/resources/types.py +93 -0
- dbt/artifacts/resources/v1/analysis.py +10 -0
- dbt/artifacts/resources/v1/catalog.py +23 -0
- dbt/artifacts/resources/v1/components.py +274 -0
- dbt/artifacts/resources/v1/config.py +277 -0
- dbt/artifacts/resources/v1/documentation.py +11 -0
- dbt/artifacts/resources/v1/exposure.py +51 -0
- dbt/artifacts/resources/v1/function.py +52 -0
- dbt/artifacts/resources/v1/generic_test.py +31 -0
- dbt/artifacts/resources/v1/group.py +21 -0
- dbt/artifacts/resources/v1/hook.py +11 -0
- dbt/artifacts/resources/v1/macro.py +29 -0
- dbt/artifacts/resources/v1/metric.py +172 -0
- dbt/artifacts/resources/v1/model.py +145 -0
- dbt/artifacts/resources/v1/owner.py +10 -0
- dbt/artifacts/resources/v1/saved_query.py +111 -0
- dbt/artifacts/resources/v1/seed.py +41 -0
- dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dbt/artifacts/resources/v1/semantic_model.py +314 -0
- dbt/artifacts/resources/v1/singular_test.py +14 -0
- dbt/artifacts/resources/v1/snapshot.py +91 -0
- dbt/artifacts/resources/v1/source_definition.py +84 -0
- dbt/artifacts/resources/v1/sql_operation.py +10 -0
- dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
- dbt/artifacts/schemas/__init__.py +0 -0
- dbt/artifacts/schemas/base.py +191 -0
- dbt/artifacts/schemas/batch_results.py +24 -0
- dbt/artifacts/schemas/catalog/__init__.py +11 -0
- dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
- dbt/artifacts/schemas/freshness/__init__.py +1 -0
- dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
- dbt/artifacts/schemas/manifest/__init__.py +2 -0
- dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
- dbt/artifacts/schemas/results.py +147 -0
- dbt/artifacts/schemas/run/__init__.py +2 -0
- dbt/artifacts/schemas/run/v5/__init__.py +0 -0
- dbt/artifacts/schemas/run/v5/run.py +184 -0
- dbt/artifacts/schemas/upgrades/__init__.py +4 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dbt/artifacts/utils/validation.py +153 -0
- dbt/cli/__init__.py +1 -0
- dbt/cli/context.py +17 -0
- dbt/cli/exceptions.py +57 -0
- dbt/cli/flags.py +560 -0
- dbt/cli/main.py +2039 -0
- dbt/cli/option_types.py +121 -0
- dbt/cli/options.py +80 -0
- dbt/cli/params.py +804 -0
- dbt/cli/requires.py +490 -0
- dbt/cli/resolvers.py +50 -0
- dbt/cli/types.py +40 -0
- dbt/clients/__init__.py +0 -0
- dbt/clients/checked_load.py +83 -0
- dbt/clients/git.py +164 -0
- dbt/clients/jinja.py +206 -0
- dbt/clients/jinja_static.py +245 -0
- dbt/clients/registry.py +192 -0
- dbt/clients/yaml_helper.py +68 -0
- dbt/compilation.py +876 -0
- dbt/compute/__init__.py +14 -0
- dbt/compute/engines/__init__.py +12 -0
- dbt/compute/engines/spark_engine.py +624 -0
- dbt/compute/federated_executor.py +837 -0
- dbt/compute/filter_pushdown.cpython-310-darwin.so +0 -0
- dbt/compute/filter_pushdown.py +273 -0
- dbt/compute/jar_provisioning.cpython-310-darwin.so +0 -0
- dbt/compute/jar_provisioning.py +255 -0
- dbt/compute/java_compat.cpython-310-darwin.so +0 -0
- dbt/compute/java_compat.py +689 -0
- dbt/compute/jdbc_utils.cpython-310-darwin.so +0 -0
- dbt/compute/jdbc_utils.py +678 -0
- dbt/compute/smart_selector.cpython-310-darwin.so +0 -0
- dbt/compute/smart_selector.py +311 -0
- dbt/compute/strategies/__init__.py +54 -0
- dbt/compute/strategies/base.py +165 -0
- dbt/compute/strategies/dataproc.py +207 -0
- dbt/compute/strategies/emr.py +203 -0
- dbt/compute/strategies/local.py +364 -0
- dbt/compute/strategies/standalone.py +262 -0
- dbt/config/__init__.py +4 -0
- dbt/config/catalogs.py +94 -0
- dbt/config/compute.cpython-310-darwin.so +0 -0
- dbt/config/compute.py +547 -0
- dbt/config/dvt_profile.cpython-310-darwin.so +0 -0
- dbt/config/dvt_profile.py +342 -0
- dbt/config/profile.py +422 -0
- dbt/config/project.py +873 -0
- dbt/config/project_utils.py +28 -0
- dbt/config/renderer.py +231 -0
- dbt/config/runtime.py +553 -0
- dbt/config/selectors.py +208 -0
- dbt/config/utils.py +77 -0
- dbt/constants.py +28 -0
- dbt/context/__init__.py +0 -0
- dbt/context/base.py +745 -0
- dbt/context/configured.py +135 -0
- dbt/context/context_config.py +382 -0
- dbt/context/docs.py +82 -0
- dbt/context/exceptions_jinja.py +178 -0
- dbt/context/macro_resolver.py +195 -0
- dbt/context/macros.py +171 -0
- dbt/context/manifest.py +72 -0
- dbt/context/providers.py +2249 -0
- dbt/context/query_header.py +13 -0
- dbt/context/secret.py +58 -0
- dbt/context/target.py +74 -0
- dbt/contracts/__init__.py +0 -0
- dbt/contracts/files.py +413 -0
- dbt/contracts/graph/__init__.py +0 -0
- dbt/contracts/graph/manifest.py +1904 -0
- dbt/contracts/graph/metrics.py +97 -0
- dbt/contracts/graph/model_config.py +70 -0
- dbt/contracts/graph/node_args.py +42 -0
- dbt/contracts/graph/nodes.py +1806 -0
- dbt/contracts/graph/semantic_manifest.py +232 -0
- dbt/contracts/graph/unparsed.py +811 -0
- dbt/contracts/project.py +417 -0
- dbt/contracts/results.py +53 -0
- dbt/contracts/selection.py +23 -0
- dbt/contracts/sql.py +85 -0
- dbt/contracts/state.py +68 -0
- dbt/contracts/util.py +46 -0
- dbt/deprecations.py +346 -0
- dbt/deps/__init__.py +0 -0
- dbt/deps/base.py +152 -0
- dbt/deps/git.py +195 -0
- dbt/deps/local.py +79 -0
- dbt/deps/registry.py +130 -0
- dbt/deps/resolver.py +149 -0
- dbt/deps/tarball.py +120 -0
- dbt/docs/source/_ext/dbt_click.py +119 -0
- dbt/docs/source/conf.py +32 -0
- dbt/env_vars.py +64 -0
- dbt/event_time/event_time.py +40 -0
- dbt/event_time/sample_window.py +60 -0
- dbt/events/__init__.py +15 -0
- dbt/events/base_types.py +36 -0
- dbt/events/core_types_pb2.py +2 -0
- dbt/events/logging.py +108 -0
- dbt/events/types.py +2516 -0
- dbt/exceptions.py +1486 -0
- dbt/flags.py +89 -0
- dbt/graph/__init__.py +11 -0
- dbt/graph/cli.py +247 -0
- dbt/graph/graph.py +172 -0
- dbt/graph/queue.py +214 -0
- dbt/graph/selector.py +374 -0
- dbt/graph/selector_methods.py +975 -0
- dbt/graph/selector_spec.py +222 -0
- dbt/graph/thread_pool.py +18 -0
- dbt/hooks.py +21 -0
- dbt/include/README.md +49 -0
- dbt/include/__init__.py +3 -0
- dbt/include/starter_project/.gitignore +4 -0
- dbt/include/starter_project/README.md +15 -0
- dbt/include/starter_project/__init__.py +3 -0
- dbt/include/starter_project/analyses/.gitkeep +0 -0
- dbt/include/starter_project/dbt_project.yml +36 -0
- dbt/include/starter_project/macros/.gitkeep +0 -0
- dbt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
- dbt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
- dbt/include/starter_project/models/example/schema.yml +21 -0
- dbt/include/starter_project/seeds/.gitkeep +0 -0
- dbt/include/starter_project/snapshots/.gitkeep +0 -0
- dbt/include/starter_project/tests/.gitkeep +0 -0
- dbt/internal_deprecations.py +26 -0
- dbt/jsonschemas/__init__.py +3 -0
- dbt/jsonschemas/jsonschemas.py +309 -0
- dbt/jsonschemas/project/0.0.110.json +4717 -0
- dbt/jsonschemas/project/0.0.85.json +2015 -0
- dbt/jsonschemas/resources/0.0.110.json +2636 -0
- dbt/jsonschemas/resources/0.0.85.json +2536 -0
- dbt/jsonschemas/resources/latest.json +6773 -0
- dbt/links.py +4 -0
- dbt/materializations/__init__.py +0 -0
- dbt/materializations/incremental/__init__.py +0 -0
- dbt/materializations/incremental/microbatch.py +236 -0
- dbt/mp_context.py +8 -0
- dbt/node_types.py +37 -0
- dbt/parser/__init__.py +23 -0
- dbt/parser/analysis.py +21 -0
- dbt/parser/base.py +548 -0
- dbt/parser/common.py +266 -0
- dbt/parser/docs.py +52 -0
- dbt/parser/fixtures.py +51 -0
- dbt/parser/functions.py +30 -0
- dbt/parser/generic_test.py +100 -0
- dbt/parser/generic_test_builders.py +333 -0
- dbt/parser/hooks.py +118 -0
- dbt/parser/macros.py +137 -0
- dbt/parser/manifest.py +2204 -0
- dbt/parser/models.py +573 -0
- dbt/parser/partial.py +1178 -0
- dbt/parser/read_files.py +445 -0
- dbt/parser/schema_generic_tests.py +422 -0
- dbt/parser/schema_renderer.py +111 -0
- dbt/parser/schema_yaml_readers.py +935 -0
- dbt/parser/schemas.py +1466 -0
- dbt/parser/search.py +149 -0
- dbt/parser/seeds.py +28 -0
- dbt/parser/singular_test.py +20 -0
- dbt/parser/snapshots.py +44 -0
- dbt/parser/sources.py +558 -0
- dbt/parser/sql.py +62 -0
- dbt/parser/unit_tests.py +621 -0
- dbt/plugins/__init__.py +20 -0
- dbt/plugins/contracts.py +9 -0
- dbt/plugins/exceptions.py +2 -0
- dbt/plugins/manager.py +163 -0
- dbt/plugins/manifest.py +21 -0
- dbt/profiler.py +20 -0
- dbt/py.typed +1 -0
- dbt/query_analyzer.cpython-310-darwin.so +0 -0
- dbt/query_analyzer.py +410 -0
- dbt/runners/__init__.py +2 -0
- dbt/runners/exposure_runner.py +7 -0
- dbt/runners/no_op_runner.py +45 -0
- dbt/runners/saved_query_runner.py +7 -0
- dbt/selected_resources.py +8 -0
- dbt/task/__init__.py +0 -0
- dbt/task/base.py +503 -0
- dbt/task/build.py +197 -0
- dbt/task/clean.py +56 -0
- dbt/task/clone.py +161 -0
- dbt/task/compile.py +150 -0
- dbt/task/compute.py +454 -0
- dbt/task/debug.py +505 -0
- dbt/task/deps.py +280 -0
- dbt/task/docs/__init__.py +3 -0
- dbt/task/docs/generate.py +660 -0
- dbt/task/docs/index.html +250 -0
- dbt/task/docs/serve.py +29 -0
- dbt/task/freshness.py +322 -0
- dbt/task/function.py +121 -0
- dbt/task/group_lookup.py +46 -0
- dbt/task/init.py +553 -0
- dbt/task/java.py +316 -0
- dbt/task/list.py +236 -0
- dbt/task/printer.py +175 -0
- dbt/task/retry.py +175 -0
- dbt/task/run.py +1306 -0
- dbt/task/run_operation.py +141 -0
- dbt/task/runnable.py +758 -0
- dbt/task/seed.py +103 -0
- dbt/task/show.py +149 -0
- dbt/task/snapshot.py +56 -0
- dbt/task/spark.py +414 -0
- dbt/task/sql.py +110 -0
- dbt/task/target_sync.py +759 -0
- dbt/task/test.py +464 -0
- dbt/tests/fixtures/__init__.py +1 -0
- dbt/tests/fixtures/project.py +620 -0
- dbt/tests/util.py +651 -0
- dbt/tracking.py +529 -0
- dbt/utils/__init__.py +3 -0
- dbt/utils/artifact_upload.py +151 -0
- dbt/utils/utils.py +408 -0
- dbt/version.py +268 -0
- dvt_cli/__init__.py +72 -0
- dvt_core-0.52.2.dist-info/METADATA +286 -0
- dvt_core-0.52.2.dist-info/RECORD +275 -0
- dvt_core-0.52.2.dist-info/WHEEL +5 -0
- dvt_core-0.52.2.dist-info/entry_points.txt +2 -0
- dvt_core-0.52.2.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,660 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
from dataclasses import replace
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from itertools import chain
|
|
6
|
+
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
|
7
|
+
|
|
8
|
+
import agate
|
|
9
|
+
|
|
10
|
+
import dbt.compilation
|
|
11
|
+
import dbt.exceptions
|
|
12
|
+
import dbt.utils
|
|
13
|
+
import dbt_common.utils.formatting
|
|
14
|
+
from dbt.adapters.events.types import (
|
|
15
|
+
BuildingCatalog,
|
|
16
|
+
CannotGenerateDocs,
|
|
17
|
+
CatalogWritten,
|
|
18
|
+
WriteCatalogFailure,
|
|
19
|
+
)
|
|
20
|
+
from dbt.adapters.factory import get_adapter
|
|
21
|
+
from dbt.artifacts.schemas.catalog import (
|
|
22
|
+
CatalogArtifact,
|
|
23
|
+
CatalogKey,
|
|
24
|
+
CatalogResults,
|
|
25
|
+
CatalogTable,
|
|
26
|
+
ColumnMetadata,
|
|
27
|
+
PrimitiveDict,
|
|
28
|
+
StatsDict,
|
|
29
|
+
StatsItem,
|
|
30
|
+
TableMetadata,
|
|
31
|
+
)
|
|
32
|
+
from dbt.artifacts.schemas.results import NodeStatus
|
|
33
|
+
from dbt.constants import CATALOG_FILENAME, MANIFEST_FILE_NAME
|
|
34
|
+
from dbt.context.providers import generate_runtime_macro_context
|
|
35
|
+
from dbt.contracts.graph.manifest import Manifest
|
|
36
|
+
from dbt.contracts.graph.nodes import ResultNode
|
|
37
|
+
from dbt.events.types import ArtifactWritten
|
|
38
|
+
from dbt.exceptions import AmbiguousCatalogMatchError
|
|
39
|
+
from dbt.graph import ResourceTypeSelector
|
|
40
|
+
from dbt.graph.graph import UniqueId
|
|
41
|
+
from dbt.node_types import EXECUTABLE_NODE_TYPES, NodeType
|
|
42
|
+
from dbt.parser.manifest import write_manifest
|
|
43
|
+
from dbt.task.compile import CompileTask
|
|
44
|
+
from dbt.task.docs import DOCS_INDEX_FILE_PATH
|
|
45
|
+
from dbt.utils.artifact_upload import add_artifact_produced
|
|
46
|
+
from dbt_common.clients.system import load_file_contents
|
|
47
|
+
from dbt_common.dataclass_schema import ValidationError
|
|
48
|
+
from dbt_common.events.functions import fire_event
|
|
49
|
+
from dbt_common.exceptions import DbtInternalError
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_stripped_prefix(source: Dict[str, Any], prefix: str) -> Dict[str, Any]:
|
|
53
|
+
"""Go through the source, extracting every key/value pair where the key starts
|
|
54
|
+
with the given prefix.
|
|
55
|
+
"""
|
|
56
|
+
cut = len(prefix)
|
|
57
|
+
return {k[cut:]: v for k, v in source.items() if k.startswith(prefix)}
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def build_catalog_table(data, adapter_type: Optional[str] = None) -> CatalogTable:
|
|
61
|
+
# build the new table's metadata + stats
|
|
62
|
+
metadata = TableMetadata.from_dict(get_stripped_prefix(data, "table_"))
|
|
63
|
+
stats = format_stats(get_stripped_prefix(data, "stats:"))
|
|
64
|
+
|
|
65
|
+
# DVT v0.4.3: Add adapter type metadata for visualization
|
|
66
|
+
# This enables adapter logos and connection badges in dbt docs
|
|
67
|
+
if adapter_type:
|
|
68
|
+
# Add adapter type to metadata comment for catalog display
|
|
69
|
+
comment_text = metadata.comment or ""
|
|
70
|
+
if comment_text and not comment_text.endswith(' '):
|
|
71
|
+
comment_text += " "
|
|
72
|
+
metadata = replace(
|
|
73
|
+
metadata,
|
|
74
|
+
comment=f"{comment_text}[adapter:{adapter_type}]"
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
return CatalogTable(
|
|
78
|
+
metadata=metadata,
|
|
79
|
+
stats=stats,
|
|
80
|
+
columns={},
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# keys are database name, schema name, table name
|
|
85
|
+
class Catalog(Dict[CatalogKey, CatalogTable]):
|
|
86
|
+
def __init__(self, columns: List[PrimitiveDict]) -> None:
|
|
87
|
+
super().__init__()
|
|
88
|
+
for col in columns:
|
|
89
|
+
self.add_column(col)
|
|
90
|
+
|
|
91
|
+
def get_table(self, data: PrimitiveDict, adapter_type: Optional[str] = None) -> CatalogTable:
|
|
92
|
+
database = data.get("table_database")
|
|
93
|
+
if database is None:
|
|
94
|
+
dkey: Optional[str] = None
|
|
95
|
+
else:
|
|
96
|
+
dkey = str(database)
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
key = CatalogKey(
|
|
100
|
+
dkey,
|
|
101
|
+
str(data["table_schema"]),
|
|
102
|
+
str(data["table_name"]),
|
|
103
|
+
)
|
|
104
|
+
except KeyError as exc:
|
|
105
|
+
raise dbt_common.exceptions.CompilationError(
|
|
106
|
+
"Catalog information missing required key {} (got {})".format(exc, data)
|
|
107
|
+
)
|
|
108
|
+
table: CatalogTable
|
|
109
|
+
if key in self:
|
|
110
|
+
table = self[key]
|
|
111
|
+
else:
|
|
112
|
+
table = build_catalog_table(data, adapter_type)
|
|
113
|
+
self[key] = table
|
|
114
|
+
return table
|
|
115
|
+
|
|
116
|
+
def add_column(self, data: PrimitiveDict):
|
|
117
|
+
table = self.get_table(data)
|
|
118
|
+
column_data = get_stripped_prefix(data, "column_")
|
|
119
|
+
# the index should really never be that big so it's ok to end up
|
|
120
|
+
# serializing this to JSON (2^53 is the max safe value there)
|
|
121
|
+
column_data["index"] = int(column_data["index"])
|
|
122
|
+
|
|
123
|
+
column = ColumnMetadata.from_dict(column_data)
|
|
124
|
+
table.columns[column.name] = column
|
|
125
|
+
|
|
126
|
+
def make_unique_id_map(
|
|
127
|
+
self, manifest: Manifest, selected_node_ids: Optional[Set[UniqueId]] = None
|
|
128
|
+
) -> Tuple[Dict[str, CatalogTable], Dict[str, CatalogTable]]:
|
|
129
|
+
"""
|
|
130
|
+
Create mappings between CatalogKeys and CatalogTables for nodes and sources, filtered by selected_node_ids.
|
|
131
|
+
|
|
132
|
+
By default, selected_node_ids is None and all nodes and sources defined in the manifest are included in the mappings.
|
|
133
|
+
"""
|
|
134
|
+
nodes: Dict[str, CatalogTable] = {}
|
|
135
|
+
sources: Dict[str, CatalogTable] = {}
|
|
136
|
+
|
|
137
|
+
node_map, source_map = get_unique_id_mapping(manifest)
|
|
138
|
+
table: CatalogTable
|
|
139
|
+
for table in self.values():
|
|
140
|
+
key = table.key()
|
|
141
|
+
if key in node_map:
|
|
142
|
+
unique_id = node_map[key]
|
|
143
|
+
if selected_node_ids is None or unique_id in selected_node_ids:
|
|
144
|
+
# DVT v0.4.3: Add comprehensive adapter and connection metadata for nodes
|
|
145
|
+
node = manifest.nodes.get(unique_id)
|
|
146
|
+
connection_name = None
|
|
147
|
+
adapter_type = None
|
|
148
|
+
compute_engine = None
|
|
149
|
+
|
|
150
|
+
if node:
|
|
151
|
+
# Get target connection name
|
|
152
|
+
if hasattr(node.config, 'target') and node.config.target:
|
|
153
|
+
connection_name = node.config.target
|
|
154
|
+
|
|
155
|
+
# Get compute engine if specified
|
|
156
|
+
if hasattr(node.config, 'compute') and node.config.compute:
|
|
157
|
+
compute_engine = node.config.compute
|
|
158
|
+
|
|
159
|
+
# Build metadata tags for catalog display
|
|
160
|
+
comment_text = table.metadata.comment or ""
|
|
161
|
+
tags = []
|
|
162
|
+
|
|
163
|
+
if connection_name:
|
|
164
|
+
tags.append(f"target:{connection_name}")
|
|
165
|
+
if compute_engine:
|
|
166
|
+
tags.append(f"compute:{compute_engine}")
|
|
167
|
+
|
|
168
|
+
if tags:
|
|
169
|
+
if comment_text and not comment_text.endswith(' '):
|
|
170
|
+
comment_text += " "
|
|
171
|
+
comment_text += f"[{' | '.join(tags)}]"
|
|
172
|
+
|
|
173
|
+
# Create updated metadata with enriched info
|
|
174
|
+
updated_metadata = replace(
|
|
175
|
+
table.metadata,
|
|
176
|
+
comment=comment_text if tags else table.metadata.comment
|
|
177
|
+
)
|
|
178
|
+
nodes[unique_id] = replace(table, unique_id=unique_id, metadata=updated_metadata)
|
|
179
|
+
|
|
180
|
+
unique_ids = source_map.get(table.key(), set())
|
|
181
|
+
for unique_id in unique_ids:
|
|
182
|
+
if unique_id in sources:
|
|
183
|
+
raise AmbiguousCatalogMatchError(
|
|
184
|
+
unique_id,
|
|
185
|
+
sources[unique_id].to_dict(omit_none=True),
|
|
186
|
+
table.to_dict(omit_none=True),
|
|
187
|
+
)
|
|
188
|
+
elif selected_node_ids is None or unique_id in selected_node_ids:
|
|
189
|
+
# DVT v0.4.3: Add comprehensive adapter and connection metadata for sources
|
|
190
|
+
source = manifest.sources.get(unique_id)
|
|
191
|
+
connection_name = None
|
|
192
|
+
adapter_type = None
|
|
193
|
+
|
|
194
|
+
if source:
|
|
195
|
+
# Get connection name for source
|
|
196
|
+
if hasattr(source, 'connection') and source.connection:
|
|
197
|
+
connection_name = source.connection
|
|
198
|
+
|
|
199
|
+
# Try to determine adapter type from connection
|
|
200
|
+
# Check if we can get adapter info from manifest's profile
|
|
201
|
+
if connection_name:
|
|
202
|
+
# Sources store connection name, we need to map it to adapter type
|
|
203
|
+
# This requires access to the RuntimeConfig which has the profile info
|
|
204
|
+
# For now, we'll add just the connection tag and let dbt docs UI handle it
|
|
205
|
+
pass
|
|
206
|
+
|
|
207
|
+
# Build metadata tags for catalog display
|
|
208
|
+
comment_text = table.metadata.comment or ""
|
|
209
|
+
tags = []
|
|
210
|
+
|
|
211
|
+
if connection_name:
|
|
212
|
+
tags.append(f"source:{connection_name}")
|
|
213
|
+
|
|
214
|
+
if tags:
|
|
215
|
+
if comment_text and not comment_text.endswith(' '):
|
|
216
|
+
comment_text += " "
|
|
217
|
+
comment_text += f"[{' | '.join(tags)}]"
|
|
218
|
+
|
|
219
|
+
# Create updated metadata with enriched info
|
|
220
|
+
updated_metadata = replace(
|
|
221
|
+
table.metadata,
|
|
222
|
+
comment=comment_text if tags else table.metadata.comment
|
|
223
|
+
)
|
|
224
|
+
sources[unique_id] = replace(table, unique_id=unique_id, metadata=updated_metadata)
|
|
225
|
+
return nodes, sources
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def format_stats(stats: PrimitiveDict) -> StatsDict:
|
|
229
|
+
"""Given a dictionary following this layout:
|
|
230
|
+
|
|
231
|
+
{
|
|
232
|
+
'encoded:label': 'Encoded',
|
|
233
|
+
'encoded:value': 'Yes',
|
|
234
|
+
'encoded:description': 'Indicates if the column is encoded',
|
|
235
|
+
'encoded:include': True,
|
|
236
|
+
|
|
237
|
+
'size:label': 'Size',
|
|
238
|
+
'size:value': 128,
|
|
239
|
+
'size:description': 'Size of the table in MB',
|
|
240
|
+
'size:include': True,
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
format_stats will convert the dict into a StatsDict with keys of 'encoded'
|
|
244
|
+
and 'size'.
|
|
245
|
+
"""
|
|
246
|
+
stats_collector: StatsDict = {}
|
|
247
|
+
|
|
248
|
+
base_keys = {k.split(":")[0] for k in stats}
|
|
249
|
+
for key in base_keys:
|
|
250
|
+
dct: PrimitiveDict = {"id": key}
|
|
251
|
+
for subkey in ("label", "value", "description", "include"):
|
|
252
|
+
dct[subkey] = stats["{}:{}".format(key, subkey)]
|
|
253
|
+
|
|
254
|
+
try:
|
|
255
|
+
stats_item = StatsItem.from_dict(dct)
|
|
256
|
+
except ValidationError:
|
|
257
|
+
continue
|
|
258
|
+
if stats_item.include:
|
|
259
|
+
stats_collector[key] = stats_item
|
|
260
|
+
|
|
261
|
+
# we always have a 'has_stats' field, it's never included
|
|
262
|
+
has_stats = StatsItem(
|
|
263
|
+
id="has_stats",
|
|
264
|
+
label="Has Stats?",
|
|
265
|
+
value=len(stats_collector) > 0,
|
|
266
|
+
description="Indicates whether there are statistics for this table",
|
|
267
|
+
include=False,
|
|
268
|
+
)
|
|
269
|
+
stats_collector["has_stats"] = has_stats
|
|
270
|
+
return stats_collector
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def mapping_key(node: ResultNode) -> CatalogKey:
|
|
274
|
+
dkey = dbt_common.utils.formatting.lowercase(node.database)
|
|
275
|
+
return CatalogKey(dkey, node.schema.lower(), node.identifier.lower())
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def get_unique_id_mapping(
|
|
279
|
+
manifest: Manifest,
|
|
280
|
+
) -> Tuple[Dict[CatalogKey, str], Dict[CatalogKey, Set[str]]]:
|
|
281
|
+
# A single relation could have multiple unique IDs pointing to it if a
|
|
282
|
+
# source were also a node.
|
|
283
|
+
node_map: Dict[CatalogKey, str] = {}
|
|
284
|
+
source_map: Dict[CatalogKey, Set[str]] = {}
|
|
285
|
+
for unique_id, node in manifest.nodes.items():
|
|
286
|
+
key = mapping_key(node)
|
|
287
|
+
node_map[key] = unique_id
|
|
288
|
+
|
|
289
|
+
for unique_id, source in manifest.sources.items():
|
|
290
|
+
key = mapping_key(source)
|
|
291
|
+
if key not in source_map:
|
|
292
|
+
source_map[key] = set()
|
|
293
|
+
source_map[key].add(unique_id)
|
|
294
|
+
return node_map, source_map
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
class GenerateTask(CompileTask):
|
|
298
|
+
def run(self) -> CatalogArtifact:
|
|
299
|
+
compile_results = None
|
|
300
|
+
if self.args.compile:
|
|
301
|
+
compile_results = CompileTask.run(self)
|
|
302
|
+
if any(r.status == NodeStatus.Error for r in compile_results):
|
|
303
|
+
fire_event(CannotGenerateDocs())
|
|
304
|
+
return CatalogArtifact.from_results(
|
|
305
|
+
nodes={},
|
|
306
|
+
sources={},
|
|
307
|
+
generated_at=datetime.now(timezone.utc).replace(tzinfo=None),
|
|
308
|
+
errors=None,
|
|
309
|
+
compile_results=compile_results,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
shutil.copyfile(
|
|
313
|
+
DOCS_INDEX_FILE_PATH, os.path.join(self.config.project_target_path, "index.html")
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
for asset_path in self.config.asset_paths:
|
|
317
|
+
to_asset_path = os.path.join(self.config.project_target_path, asset_path)
|
|
318
|
+
|
|
319
|
+
if os.path.exists(to_asset_path):
|
|
320
|
+
shutil.rmtree(to_asset_path)
|
|
321
|
+
|
|
322
|
+
from_asset_path = os.path.join(self.config.project_root, asset_path)
|
|
323
|
+
|
|
324
|
+
if os.path.exists(from_asset_path):
|
|
325
|
+
shutil.copytree(from_asset_path, to_asset_path)
|
|
326
|
+
|
|
327
|
+
if self.manifest is None:
|
|
328
|
+
raise DbtInternalError("self.manifest was None in run!")
|
|
329
|
+
|
|
330
|
+
selected_node_ids: Optional[Set[UniqueId]] = None
|
|
331
|
+
if self.args.empty_catalog:
|
|
332
|
+
catalog_table: agate.Table = agate.Table([])
|
|
333
|
+
exceptions: List[Exception] = []
|
|
334
|
+
selected_node_ids = set()
|
|
335
|
+
else:
|
|
336
|
+
# DVT v0.4.4: Multi-adapter catalog generation
|
|
337
|
+
# Group catalogable nodes by their connection/adapter to avoid cross-db errors
|
|
338
|
+
fire_event(BuildingCatalog())
|
|
339
|
+
|
|
340
|
+
# Get selected nodes if applicable
|
|
341
|
+
relations = None
|
|
342
|
+
if self.job_queue is not None:
|
|
343
|
+
selected_node_ids = self.job_queue.get_selected_nodes()
|
|
344
|
+
selected_nodes = self._get_nodes_from_ids(self.manifest, selected_node_ids)
|
|
345
|
+
|
|
346
|
+
# Source selection is handled separately
|
|
347
|
+
selected_source_ids = self._get_selected_source_ids()
|
|
348
|
+
selected_source_nodes = self._get_nodes_from_ids(
|
|
349
|
+
self.manifest, selected_source_ids
|
|
350
|
+
)
|
|
351
|
+
selected_node_ids.update(selected_source_ids)
|
|
352
|
+
selected_nodes.extend(selected_source_nodes)
|
|
353
|
+
|
|
354
|
+
# Group all catalogable nodes by their connection/adapter
|
|
355
|
+
catalogable_nodes = chain(
|
|
356
|
+
[
|
|
357
|
+
node
|
|
358
|
+
for node in self.manifest.nodes.values()
|
|
359
|
+
if (node.is_relational and not node.is_ephemeral_model)
|
|
360
|
+
],
|
|
361
|
+
self.manifest.sources.values(),
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
# Group nodes by connection name
|
|
365
|
+
from collections import defaultdict
|
|
366
|
+
from dbt.contracts.graph.nodes import SourceDefinition
|
|
367
|
+
|
|
368
|
+
nodes_by_connection: Dict[str, List] = defaultdict(list)
|
|
369
|
+
for node in catalogable_nodes:
|
|
370
|
+
# Determine which connection/adapter this node uses
|
|
371
|
+
if isinstance(node, SourceDefinition):
|
|
372
|
+
# Sources use their 'connection' field or meta.connection
|
|
373
|
+
connection_name = (
|
|
374
|
+
node.connection or
|
|
375
|
+
(node.meta.get('connection') if node.meta else None) or
|
|
376
|
+
self.config.target_name
|
|
377
|
+
)
|
|
378
|
+
elif hasattr(node, 'config') and hasattr(node.config, 'target') and node.config.target:
|
|
379
|
+
# Models use config.target override
|
|
380
|
+
connection_name = node.config.target
|
|
381
|
+
else:
|
|
382
|
+
# Default to target connection
|
|
383
|
+
connection_name = self.config.target_name
|
|
384
|
+
|
|
385
|
+
nodes_by_connection[connection_name].append(node)
|
|
386
|
+
|
|
387
|
+
# Query catalog for each connection with its appropriate adapter
|
|
388
|
+
all_catalog_tables: List[agate.Table] = []
|
|
389
|
+
exceptions: List[Exception] = []
|
|
390
|
+
|
|
391
|
+
for connection_name, nodes in nodes_by_connection.items():
|
|
392
|
+
try:
|
|
393
|
+
# Get adapter for this connection
|
|
394
|
+
adapter = self.config.get_adapter(connection_name)
|
|
395
|
+
|
|
396
|
+
# DVT v0.4.7: Set macro resolver and context generator for adapter
|
|
397
|
+
adapter.set_macro_resolver(self.manifest)
|
|
398
|
+
adapter.set_macro_context_generator(generate_runtime_macro_context)
|
|
399
|
+
|
|
400
|
+
with adapter.connection_named(f"generate_catalog_{connection_name}"):
|
|
401
|
+
# Build relations set for this connection if we have selected nodes
|
|
402
|
+
connection_relations = None
|
|
403
|
+
if self.job_queue is not None and selected_node_ids:
|
|
404
|
+
connection_relations = {
|
|
405
|
+
adapter.Relation.create_from(adapter.config, node)
|
|
406
|
+
for node in nodes
|
|
407
|
+
if node.unique_id in selected_node_ids
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
# Get schemas used by this connection's nodes
|
|
411
|
+
connection_schemas = set()
|
|
412
|
+
for node in nodes:
|
|
413
|
+
if hasattr(node, 'schema') and node.schema:
|
|
414
|
+
if hasattr(node, 'database') and node.database:
|
|
415
|
+
connection_schemas.add((node.database, node.schema))
|
|
416
|
+
|
|
417
|
+
# Query catalog for this connection's nodes
|
|
418
|
+
catalog_table_part, connection_exceptions = adapter.get_filtered_catalog(
|
|
419
|
+
nodes, connection_schemas, connection_relations
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
all_catalog_tables.append(catalog_table_part)
|
|
423
|
+
|
|
424
|
+
# DVT v0.4.7: Filter out "not implemented" errors from Snowflake/other adapters
|
|
425
|
+
# that don't support catalog generation
|
|
426
|
+
filtered_exceptions = [
|
|
427
|
+
e for e in connection_exceptions
|
|
428
|
+
if not ("not implemented" in str(e).lower() and
|
|
429
|
+
isinstance(e, dbt.exceptions.CompilationError))
|
|
430
|
+
]
|
|
431
|
+
exceptions.extend(filtered_exceptions)
|
|
432
|
+
|
|
433
|
+
except dbt.exceptions.CompilationError as e:
|
|
434
|
+
# DVT v0.4.9: Universal fallback for adapters without get_catalog_relations
|
|
435
|
+
if "not implemented" in str(e).lower():
|
|
436
|
+
try:
|
|
437
|
+
# Try INFORMATION_SCHEMA fallback (works for most SQL databases)
|
|
438
|
+
catalog_table_part = self._get_catalog_via_information_schema(
|
|
439
|
+
adapter, connection_name, connection_schemas
|
|
440
|
+
)
|
|
441
|
+
if catalog_table_part and len(catalog_table_part) > 0:
|
|
442
|
+
all_catalog_tables.append(catalog_table_part)
|
|
443
|
+
fire_event(
|
|
444
|
+
BuildingCatalog() # Log success
|
|
445
|
+
)
|
|
446
|
+
except Exception as fallback_ex:
|
|
447
|
+
# DVT v0.4.9: Log fallback errors for debugging
|
|
448
|
+
import traceback
|
|
449
|
+
fire_event(
|
|
450
|
+
CannotGenerateDocs(
|
|
451
|
+
msg=f"INFORMATION_SCHEMA fallback failed for '{connection_name}': {str(fallback_ex)}\n{traceback.format_exc()}"
|
|
452
|
+
)
|
|
453
|
+
)
|
|
454
|
+
else:
|
|
455
|
+
# Other compilation errors should be reported
|
|
456
|
+
exceptions.append(e)
|
|
457
|
+
except Exception as e:
|
|
458
|
+
# Log error but continue with other connections
|
|
459
|
+
exceptions.append(e)
|
|
460
|
+
|
|
461
|
+
# Merge all catalog tables into one
|
|
462
|
+
if all_catalog_tables:
|
|
463
|
+
# Merge by concatenating rows from all tables
|
|
464
|
+
if len(all_catalog_tables) == 1:
|
|
465
|
+
catalog_table = all_catalog_tables[0]
|
|
466
|
+
else:
|
|
467
|
+
# Combine all tables - they should have the same columns
|
|
468
|
+
catalog_table = agate.Table.merge(all_catalog_tables)
|
|
469
|
+
else:
|
|
470
|
+
catalog_table = agate.Table([])
|
|
471
|
+
|
|
472
|
+
catalog_data: List[PrimitiveDict] = [
|
|
473
|
+
dict(zip(catalog_table.column_names, map(dbt.utils._coerce_decimal, row)))
|
|
474
|
+
for row in catalog_table
|
|
475
|
+
]
|
|
476
|
+
|
|
477
|
+
catalog = Catalog(catalog_data)
|
|
478
|
+
|
|
479
|
+
errors: Optional[List[str]] = None
|
|
480
|
+
if exceptions:
|
|
481
|
+
errors = [str(e) for e in exceptions]
|
|
482
|
+
|
|
483
|
+
nodes, sources = catalog.make_unique_id_map(self.manifest, selected_node_ids)
|
|
484
|
+
results = self.get_catalog_results(
|
|
485
|
+
nodes=nodes,
|
|
486
|
+
sources=sources,
|
|
487
|
+
generated_at=datetime.now(timezone.utc).replace(tzinfo=None),
|
|
488
|
+
compile_results=compile_results,
|
|
489
|
+
errors=errors,
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
catalog_path = os.path.join(self.config.project_target_path, CATALOG_FILENAME)
|
|
493
|
+
results.write(catalog_path)
|
|
494
|
+
add_artifact_produced(catalog_path)
|
|
495
|
+
fire_event(
|
|
496
|
+
ArtifactWritten(artifact_type=results.__class__.__name__, artifact_path=catalog_path)
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
if self.args.compile:
|
|
500
|
+
write_manifest(self.manifest, self.config.project_target_path)
|
|
501
|
+
|
|
502
|
+
if self.args.static:
|
|
503
|
+
|
|
504
|
+
# Read manifest.json and catalog.json
|
|
505
|
+
read_manifest_data = load_file_contents(
|
|
506
|
+
os.path.join(self.config.project_target_path, MANIFEST_FILE_NAME)
|
|
507
|
+
)
|
|
508
|
+
read_catalog_data = load_file_contents(catalog_path)
|
|
509
|
+
|
|
510
|
+
# Create new static index file contents
|
|
511
|
+
index_data = load_file_contents(DOCS_INDEX_FILE_PATH)
|
|
512
|
+
index_data = index_data.replace('"MANIFEST.JSON INLINE DATA"', read_manifest_data)
|
|
513
|
+
index_data = index_data.replace('"CATALOG.JSON INLINE DATA"', read_catalog_data)
|
|
514
|
+
|
|
515
|
+
# Write out the new index file
|
|
516
|
+
static_index_path = os.path.join(self.config.project_target_path, "static_index.html")
|
|
517
|
+
with open(static_index_path, "wb") as static_index_file:
|
|
518
|
+
static_index_file.write(bytes(index_data, "utf8"))
|
|
519
|
+
|
|
520
|
+
if exceptions:
|
|
521
|
+
fire_event(WriteCatalogFailure(num_exceptions=len(exceptions)))
|
|
522
|
+
fire_event(CatalogWritten(path=os.path.abspath(catalog_path)))
|
|
523
|
+
return results
|
|
524
|
+
|
|
525
|
+
def get_node_selector(self) -> ResourceTypeSelector:
|
|
526
|
+
if self.manifest is None or self.graph is None:
|
|
527
|
+
raise DbtInternalError("manifest and graph must be set to perform node selection")
|
|
528
|
+
return ResourceTypeSelector(
|
|
529
|
+
graph=self.graph,
|
|
530
|
+
manifest=self.manifest,
|
|
531
|
+
previous_state=self.previous_state,
|
|
532
|
+
resource_types=EXECUTABLE_NODE_TYPES,
|
|
533
|
+
include_empty_nodes=True,
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
def get_catalog_results(
|
|
537
|
+
self,
|
|
538
|
+
nodes: Dict[str, CatalogTable],
|
|
539
|
+
sources: Dict[str, CatalogTable],
|
|
540
|
+
generated_at: datetime,
|
|
541
|
+
compile_results: Optional[Any],
|
|
542
|
+
errors: Optional[List[str]],
|
|
543
|
+
) -> CatalogArtifact:
|
|
544
|
+
return CatalogArtifact.from_results(
|
|
545
|
+
generated_at=generated_at,
|
|
546
|
+
nodes=nodes,
|
|
547
|
+
sources=sources,
|
|
548
|
+
compile_results=compile_results,
|
|
549
|
+
errors=errors,
|
|
550
|
+
)
|
|
551
|
+
|
|
552
|
+
@classmethod
|
|
553
|
+
def interpret_results(self, results: Optional[CatalogResults]) -> bool:
|
|
554
|
+
if results is None:
|
|
555
|
+
return False
|
|
556
|
+
if results.errors:
|
|
557
|
+
return False
|
|
558
|
+
compile_results = results._compile_results
|
|
559
|
+
if compile_results is None:
|
|
560
|
+
return True
|
|
561
|
+
|
|
562
|
+
return super().interpret_results(compile_results)
|
|
563
|
+
|
|
564
|
+
@staticmethod
|
|
565
|
+
def _get_nodes_from_ids(manifest: Manifest, node_ids: Iterable[str]) -> List[ResultNode]:
|
|
566
|
+
selected: List[ResultNode] = []
|
|
567
|
+
for unique_id in node_ids:
|
|
568
|
+
if unique_id in manifest.nodes:
|
|
569
|
+
node = manifest.nodes[unique_id]
|
|
570
|
+
if node.is_relational and not node.is_ephemeral_model:
|
|
571
|
+
selected.append(node)
|
|
572
|
+
elif unique_id in manifest.sources:
|
|
573
|
+
source = manifest.sources[unique_id]
|
|
574
|
+
selected.append(source)
|
|
575
|
+
return selected
|
|
576
|
+
|
|
577
|
+
def _get_selected_source_ids(self) -> Set[UniqueId]:
|
|
578
|
+
if self.manifest is None or self.graph is None:
|
|
579
|
+
raise DbtInternalError("manifest and graph must be set to perform node selection")
|
|
580
|
+
|
|
581
|
+
source_selector = ResourceTypeSelector(
|
|
582
|
+
graph=self.graph,
|
|
583
|
+
manifest=self.manifest,
|
|
584
|
+
previous_state=self.previous_state,
|
|
585
|
+
resource_types=[NodeType.Source],
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
return source_selector.get_graph_queue(self.get_selection_spec()).get_selected_nodes()
|
|
589
|
+
|
|
590
|
+
def _get_catalog_via_information_schema(
|
|
591
|
+
self, adapter, connection_name: str, schemas: Set[Tuple[str, str]]
|
|
592
|
+
) -> agate.Table:
|
|
593
|
+
"""
|
|
594
|
+
DVT v0.4.8: Universal fallback for catalog generation using INFORMATION_SCHEMA.
|
|
595
|
+
|
|
596
|
+
Works for most SQL databases (Postgres, MySQL, Snowflake, Redshift, BigQuery, SQL Server).
|
|
597
|
+
Falls back gracefully for databases without INFORMATION_SCHEMA (Oracle, DB2).
|
|
598
|
+
|
|
599
|
+
:param adapter: Database adapter
|
|
600
|
+
:param connection_name: Connection name for logging
|
|
601
|
+
:param schemas: Set of (database, schema) tuples to query
|
|
602
|
+
:return: agate.Table with catalog data
|
|
603
|
+
"""
|
|
604
|
+
if not schemas:
|
|
605
|
+
return agate.Table([])
|
|
606
|
+
|
|
607
|
+
# Build WHERE clause for schemas
|
|
608
|
+
schema_conditions = []
|
|
609
|
+
for database, schema in schemas:
|
|
610
|
+
# Most databases only need schema filter, some need database too
|
|
611
|
+
schema_conditions.append(f"table_schema = '{schema}'")
|
|
612
|
+
|
|
613
|
+
where_clause = " OR ".join(schema_conditions)
|
|
614
|
+
|
|
615
|
+
# Universal INFORMATION_SCHEMA query (works for most SQL databases)
|
|
616
|
+
query = f"""
|
|
617
|
+
SELECT
|
|
618
|
+
table_catalog as table_database,
|
|
619
|
+
table_schema,
|
|
620
|
+
table_name,
|
|
621
|
+
column_name,
|
|
622
|
+
data_type,
|
|
623
|
+
ordinal_position as column_index
|
|
624
|
+
FROM information_schema.columns
|
|
625
|
+
WHERE {where_clause}
|
|
626
|
+
ORDER BY table_schema, table_name, ordinal_position
|
|
627
|
+
"""
|
|
628
|
+
|
|
629
|
+
try:
|
|
630
|
+
# Execute query using adapter's connection
|
|
631
|
+
_, result = adapter.execute(query, auto_begin=False, fetch=True)
|
|
632
|
+
|
|
633
|
+
# Convert to agate.Table format expected by catalog
|
|
634
|
+
if result and len(result) > 0:
|
|
635
|
+
# Transform result into catalog format
|
|
636
|
+
catalog_data = []
|
|
637
|
+
for row in result:
|
|
638
|
+
catalog_data.append({
|
|
639
|
+
'table_database': row[0],
|
|
640
|
+
'table_schema': row[1],
|
|
641
|
+
'table_name': row[2],
|
|
642
|
+
'column_name': row[3],
|
|
643
|
+
'column_type': row[4],
|
|
644
|
+
'column_index': row[5]
|
|
645
|
+
})
|
|
646
|
+
|
|
647
|
+
# Create agate.Table with proper column types
|
|
648
|
+
return agate.Table(catalog_data)
|
|
649
|
+
else:
|
|
650
|
+
return agate.Table([])
|
|
651
|
+
|
|
652
|
+
except Exception as e:
|
|
653
|
+
# Fallback failed - database might not support INFORMATION_SCHEMA
|
|
654
|
+
# (e.g., Oracle, DB2, or permission issues)
|
|
655
|
+
fire_event(
|
|
656
|
+
CannotGenerateDocs(
|
|
657
|
+
msg=f"INFORMATION_SCHEMA fallback failed for '{connection_name}': {str(e)}"
|
|
658
|
+
)
|
|
659
|
+
)
|
|
660
|
+
return agate.Table([])
|