dbt-cube-sync 0.1.0a2__tar.gz → 0.1.0a3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/PKG-INFO +1 -1
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/connectors/superset.py +10 -36
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/pyproject.toml +1 -1
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/README.md +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/__init__.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/cli.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/config.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/connectors/__init__.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/connectors/base.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/connectors/powerbi.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/connectors/tableau.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/core/__init__.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/core/cube_generator.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/core/dbt_parser.py +0 -0
- {dbt_cube_sync-0.1.0a2 → dbt_cube_sync-0.1.0a3}/dbt_cube_sync/core/models.py +0 -0
|
@@ -191,19 +191,7 @@ class SupersetConnector(BaseConnector):
|
|
|
191
191
|
|
|
192
192
|
cube_name = cube_name_match.group(1)
|
|
193
193
|
|
|
194
|
-
# Use public schema and cube name for Superset dataset
|
|
195
|
-
schema_name = "public"
|
|
196
|
-
table_name = cube_name # Use cube name (not database table name)
|
|
197
|
-
|
|
198
|
-
# Extract actual database table for reference (but don't use it for dataset)
|
|
199
|
-
sql_match = re.search(r'sql:\s*[`"\']\s*SELECT\s+.*FROM\s+(\w+\.\w+)', content, re.IGNORECASE)
|
|
200
|
-
actual_db_table = None
|
|
201
|
-
if sql_match:
|
|
202
|
-
actual_db_table = sql_match.group(1)
|
|
203
|
-
|
|
204
194
|
print(f" Cube: {cube_name}")
|
|
205
|
-
print(f" Schema: {schema_name}")
|
|
206
|
-
print(f" Table: {table_name}")
|
|
207
195
|
|
|
208
196
|
# Parse dimensions
|
|
209
197
|
dimensions = self._parse_dimensions(content)
|
|
@@ -213,9 +201,8 @@ class SupersetConnector(BaseConnector):
|
|
|
213
201
|
|
|
214
202
|
return {
|
|
215
203
|
'cube_name': cube_name,
|
|
216
|
-
'schema':
|
|
217
|
-
'table_name':
|
|
218
|
-
'actual_db_table': actual_db_table, # This is the real DB table
|
|
204
|
+
'schema': 'public', # Always use public schema for Cube.js
|
|
205
|
+
'table_name': cube_name, # Use cube name as table name (e.g., CoursePerformanceSummary)
|
|
219
206
|
'dimensions': dimensions,
|
|
220
207
|
'measures': measures
|
|
221
208
|
}
|
|
@@ -409,27 +396,14 @@ class SupersetConnector(BaseConnector):
|
|
|
409
396
|
"""Create a new dataset in Superset"""
|
|
410
397
|
dataset_url = f"{self.base_url}/api/v1/dataset/"
|
|
411
398
|
|
|
412
|
-
#
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
"sql": sql_query,
|
|
421
|
-
"normalize_columns": False,
|
|
422
|
-
"always_filter_main_dttm": False
|
|
423
|
-
}
|
|
424
|
-
else:
|
|
425
|
-
# Fallback to direct table reference
|
|
426
|
-
payload = {
|
|
427
|
-
"database": self.database_id,
|
|
428
|
-
"schema": schema_info['schema'],
|
|
429
|
-
"table_name": schema_info['table_name'],
|
|
430
|
-
"normalize_columns": False,
|
|
431
|
-
"always_filter_main_dttm": False
|
|
432
|
-
}
|
|
399
|
+
# Create a simple table dataset (Cube.js will handle the actual data source)
|
|
400
|
+
payload = {
|
|
401
|
+
"database": self.database_id,
|
|
402
|
+
"schema": schema_info['schema'], # "public"
|
|
403
|
+
"table_name": schema_info['table_name'], # cube name like "CoursePerformanceSummary"
|
|
404
|
+
"normalize_columns": False,
|
|
405
|
+
"always_filter_main_dttm": False
|
|
406
|
+
}
|
|
433
407
|
|
|
434
408
|
print(f"\\n📊 Creating new dataset: {schema_info['table_name']}")
|
|
435
409
|
response = self.session.post(dataset_url, json=payload)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|