dbt-cube-sync 0.1.0a6__py3-none-any.whl → 0.1.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-cube-sync might be problematic. Click here for more details.
- dbt_cube_sync/core/db_inspector.py +69 -8
- {dbt_cube_sync-0.1.0a6.dist-info → dbt_cube_sync-0.1.0a7.dist-info}/METADATA +1 -1
- {dbt_cube_sync-0.1.0a6.dist-info → dbt_cube_sync-0.1.0a7.dist-info}/RECORD +5 -5
- {dbt_cube_sync-0.1.0a6.dist-info → dbt_cube_sync-0.1.0a7.dist-info}/WHEEL +0 -0
- {dbt_cube_sync-0.1.0a6.dist-info → dbt_cube_sync-0.1.0a7.dist-info}/entry_points.txt +0 -0
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Database inspector - fetches column types using SQLAlchemy
|
|
3
3
|
"""
|
|
4
4
|
from typing import Dict, Optional
|
|
5
|
-
from sqlalchemy import create_engine, inspect, MetaData, Table
|
|
5
|
+
from sqlalchemy import create_engine, inspect, MetaData, Table, text
|
|
6
6
|
from sqlalchemy.engine import Engine
|
|
7
7
|
|
|
8
8
|
|
|
@@ -16,8 +16,17 @@ class DatabaseInspector:
|
|
|
16
16
|
Args:
|
|
17
17
|
sqlalchemy_uri: SQLAlchemy connection URI (e.g., postgresql://user:pass@host:port/db)
|
|
18
18
|
"""
|
|
19
|
-
|
|
19
|
+
# Add connect_args for Redshift compatibility
|
|
20
|
+
if 'redshift' in sqlalchemy_uri:
|
|
21
|
+
self.engine: Engine = create_engine(
|
|
22
|
+
sqlalchemy_uri,
|
|
23
|
+
connect_args={'sslmode': 'prefer'}
|
|
24
|
+
)
|
|
25
|
+
else:
|
|
26
|
+
self.engine: Engine = create_engine(sqlalchemy_uri)
|
|
27
|
+
|
|
20
28
|
self.inspector = inspect(self.engine)
|
|
29
|
+
self.is_redshift = 'redshift' in sqlalchemy_uri.lower()
|
|
21
30
|
|
|
22
31
|
def get_table_columns(self, schema: str, table_name: str) -> Dict[str, str]:
|
|
23
32
|
"""
|
|
@@ -33,19 +42,71 @@ class DatabaseInspector:
|
|
|
33
42
|
columns = {}
|
|
34
43
|
|
|
35
44
|
try:
|
|
36
|
-
#
|
|
37
|
-
|
|
45
|
+
# For Redshift, use direct SQL query to avoid pg_catalog issues
|
|
46
|
+
if self.is_redshift:
|
|
47
|
+
columns = self._get_redshift_columns(schema, table_name)
|
|
48
|
+
else:
|
|
49
|
+
# Get columns from the database using inspector
|
|
50
|
+
table_columns = self.inspector.get_columns(table_name, schema=schema)
|
|
38
51
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
52
|
+
for column in table_columns:
|
|
53
|
+
col_name = column['name']
|
|
54
|
+
col_type = str(column['type'])
|
|
55
|
+
columns[col_name] = col_type
|
|
43
56
|
|
|
44
57
|
except Exception as e:
|
|
45
58
|
print(f"Warning: Could not inspect table {schema}.{table_name}: {e}")
|
|
46
59
|
|
|
47
60
|
return columns
|
|
48
61
|
|
|
62
|
+
def _get_redshift_columns(self, schema: str, table_name: str) -> Dict[str, str]:
|
|
63
|
+
"""
|
|
64
|
+
Get columns for Redshift using direct SQL query
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
schema: Database schema name
|
|
68
|
+
table_name: Table name
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
Dictionary mapping column names to data types
|
|
72
|
+
"""
|
|
73
|
+
columns = {}
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
# Query Redshift's pg_table_def view which is more reliable
|
|
77
|
+
query = text("""
|
|
78
|
+
SELECT column_name, data_type
|
|
79
|
+
FROM pg_table_def
|
|
80
|
+
WHERE schemaname = :schema
|
|
81
|
+
AND tablename = :table_name
|
|
82
|
+
ORDER BY column_name
|
|
83
|
+
""")
|
|
84
|
+
|
|
85
|
+
with self.engine.connect() as conn:
|
|
86
|
+
result = conn.execute(query, {"schema": schema, "table_name": table_name})
|
|
87
|
+
for row in result:
|
|
88
|
+
columns[row[0]] = row[1]
|
|
89
|
+
|
|
90
|
+
except Exception as e:
|
|
91
|
+
# Fallback to information_schema if pg_table_def fails
|
|
92
|
+
try:
|
|
93
|
+
query = text("""
|
|
94
|
+
SELECT column_name, data_type
|
|
95
|
+
FROM information_schema.columns
|
|
96
|
+
WHERE table_schema = :schema
|
|
97
|
+
AND table_name = :table_name
|
|
98
|
+
ORDER BY ordinal_position
|
|
99
|
+
""")
|
|
100
|
+
|
|
101
|
+
with self.engine.connect() as conn:
|
|
102
|
+
result = conn.execute(query, {"schema": schema, "table_name": table_name})
|
|
103
|
+
for row in result:
|
|
104
|
+
columns[row[0]] = row[1]
|
|
105
|
+
except Exception as fallback_error:
|
|
106
|
+
print(f"Warning: Could not query Redshift table {schema}.{table_name}: {fallback_error}")
|
|
107
|
+
|
|
108
|
+
return columns
|
|
109
|
+
|
|
49
110
|
def close(self):
|
|
50
111
|
"""Close the database connection"""
|
|
51
112
|
self.engine.dispose()
|
|
@@ -8,10 +8,10 @@ dbt_cube_sync/connectors/superset.py,sha256=5YEqadVZRPFAJkgvhqkse3JuGJkQHfyvT88j
|
|
|
8
8
|
dbt_cube_sync/connectors/tableau.py,sha256=jKve1zErzTbgPOtmPB92ZwZl4I6uEySedM51JiwlGrE,1261
|
|
9
9
|
dbt_cube_sync/core/__init__.py,sha256=kgsawtU5dqEvnHz6dU8qwJbH3rtIV7QlK2MhtYVDCaY,46
|
|
10
10
|
dbt_cube_sync/core/cube_generator.py,sha256=o_-fa09F3RQADueIgou8EFhmxKd7PbQ-hCJmXvRuvWM,10839
|
|
11
|
-
dbt_cube_sync/core/db_inspector.py,sha256=
|
|
11
|
+
dbt_cube_sync/core/db_inspector.py,sha256=HK7hpU56X5ED-i_vXGB9rVs79eAwgEXS_SMdk08PHs0,3850
|
|
12
12
|
dbt_cube_sync/core/dbt_parser.py,sha256=vQEUO19WYdeFNnulU2_PD4hdHUtTO-Y9BXfHuH6ZVnM,10192
|
|
13
13
|
dbt_cube_sync/core/models.py,sha256=JjiFAO0vbfVZkKOd6NcZb_JMGSVMTMfQiYjHcZbKtnI,2811
|
|
14
|
-
dbt_cube_sync-0.1.
|
|
15
|
-
dbt_cube_sync-0.1.
|
|
16
|
-
dbt_cube_sync-0.1.
|
|
17
|
-
dbt_cube_sync-0.1.
|
|
14
|
+
dbt_cube_sync-0.1.0a7.dist-info/METADATA,sha256=2cn4bF41UTmo9nv817Xmd2I5LjGmzNYC9gNX_tlmz3Y,8274
|
|
15
|
+
dbt_cube_sync-0.1.0a7.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
16
|
+
dbt_cube_sync-0.1.0a7.dist-info/entry_points.txt,sha256=iEAB_nZ1AoSeFwSHPY2tr02xmTHLVFKp5CJeFh0AfCw,56
|
|
17
|
+
dbt_cube_sync-0.1.0a7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|