dbt-cube-sync 0.1.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-cube-sync might be problematic. Click here for more details.
- dbt_cube_sync/__init__.py +5 -0
- dbt_cube_sync/cli.py +135 -0
- dbt_cube_sync/config.py +121 -0
- dbt_cube_sync/connectors/__init__.py +1 -0
- dbt_cube_sync/connectors/base.py +95 -0
- dbt_cube_sync/connectors/powerbi.py +34 -0
- dbt_cube_sync/connectors/superset.py +556 -0
- dbt_cube_sync/connectors/tableau.py +34 -0
- dbt_cube_sync/core/__init__.py +1 -0
- dbt_cube_sync/core/cube_generator.py +188 -0
- dbt_cube_sync/core/dbt_parser.py +178 -0
- dbt_cube_sync/core/models.py +66 -0
- dbt_cube_sync-0.1.0a1.dist-info/METADATA +230 -0
- dbt_cube_sync-0.1.0a1.dist-info/RECORD +16 -0
- dbt_cube_sync-0.1.0a1.dist-info/WHEEL +4 -0
- dbt_cube_sync-0.1.0a1.dist-info/entry_points.txt +3 -0
dbt_cube_sync/cli.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""
|
|
2
|
+
CLI interface for dbt-cube-sync tool
|
|
3
|
+
"""
|
|
4
|
+
import click
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from .core.dbt_parser import DbtParser
|
|
10
|
+
from .core.cube_generator import CubeGenerator
|
|
11
|
+
from .connectors.base import ConnectorRegistry
|
|
12
|
+
from .config import Config
|
|
13
|
+
|
|
14
|
+
# Import connectors to register them
|
|
15
|
+
from .connectors import superset, tableau, powerbi
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class CustomGroup(click.Group):
|
|
19
|
+
def get_command(self, ctx, cmd_name):
|
|
20
|
+
# Handle common mistake of typing dbt-cube-sync twice
|
|
21
|
+
if cmd_name == 'dbt-cube-sync':
|
|
22
|
+
click.echo("❌ Error: You typed 'dbt-cube-sync' twice!")
|
|
23
|
+
click.echo("💡 Just run: dbt-cube-sync <command>")
|
|
24
|
+
click.echo("\nAvailable commands:")
|
|
25
|
+
click.echo(" dbt-cube-sync --help # Show help")
|
|
26
|
+
click.echo(" dbt-cube-sync --version # Show version")
|
|
27
|
+
click.echo(" dbt-cube-sync dbt-to-cube -m manifest -c catalog -o output # Generate Cube.js schemas")
|
|
28
|
+
click.echo(" dbt-cube-sync cube-to-bi superset -c cubes -u url -n user -p pass -d Cube # Sync to BI tool")
|
|
29
|
+
ctx.exit(1)
|
|
30
|
+
|
|
31
|
+
return super().get_command(ctx, cmd_name)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@click.group(cls=CustomGroup)
|
|
35
|
+
@click.version_option()
|
|
36
|
+
def main():
|
|
37
|
+
"""dbt-cube-sync: Synchronization tool for dbt models to Cube.js schemas and BI tools"""
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@main.command()
|
|
42
|
+
@click.option('--manifest', '-m',
|
|
43
|
+
required=True,
|
|
44
|
+
help='Path to dbt manifest.json file')
|
|
45
|
+
@click.option('--catalog', '-c',
|
|
46
|
+
required=True,
|
|
47
|
+
help='Path to dbt catalog.json file')
|
|
48
|
+
@click.option('--output', '-o',
|
|
49
|
+
required=True,
|
|
50
|
+
help='Output directory for Cube.js files')
|
|
51
|
+
@click.option('--template-dir', '-t',
|
|
52
|
+
default='./cube/templates',
|
|
53
|
+
help='Directory containing Cube.js templates')
|
|
54
|
+
def dbt_to_cube(manifest: str, catalog: str, output: str, template_dir: str):
|
|
55
|
+
"""Generate Cube.js schemas from dbt models"""
|
|
56
|
+
try:
|
|
57
|
+
click.echo("🔄 Parsing dbt manifest...")
|
|
58
|
+
parser = DbtParser(manifest, catalog)
|
|
59
|
+
models = parser.parse_models()
|
|
60
|
+
|
|
61
|
+
click.echo(f"📊 Found {len(models)} dbt models")
|
|
62
|
+
|
|
63
|
+
click.echo("🏗️ Generating Cube.js schemas...")
|
|
64
|
+
generator = CubeGenerator(template_dir, output)
|
|
65
|
+
generated_files = generator.generate_cube_files(models)
|
|
66
|
+
|
|
67
|
+
click.echo(f"✅ Generated {len(generated_files)} Cube.js files:")
|
|
68
|
+
for file_path in generated_files:
|
|
69
|
+
click.echo(f" • {file_path}")
|
|
70
|
+
|
|
71
|
+
except Exception as e:
|
|
72
|
+
click.echo(f"❌ Error: {str(e)}", err=True)
|
|
73
|
+
sys.exit(1)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@main.command()
|
|
77
|
+
@click.argument('bi_tool', type=click.Choice(['superset', 'tableau', 'powerbi']))
|
|
78
|
+
@click.option('--cube-files', '-c',
|
|
79
|
+
required=True,
|
|
80
|
+
help='Directory containing Cube.js metric files')
|
|
81
|
+
@click.option('--url', '-u',
|
|
82
|
+
required=True,
|
|
83
|
+
help='BI tool URL (e.g., http://localhost:8088)')
|
|
84
|
+
@click.option('--username', '-n',
|
|
85
|
+
required=True,
|
|
86
|
+
help='BI tool username')
|
|
87
|
+
@click.option('--password', '-p',
|
|
88
|
+
required=True,
|
|
89
|
+
help='BI tool password')
|
|
90
|
+
@click.option('--cube-connection-name', '-d',
|
|
91
|
+
default='Cube',
|
|
92
|
+
help='Name of the Cube database connection in the BI tool (default: Cube)')
|
|
93
|
+
def cube_to_bi(bi_tool: str, cube_files: str, url: str, username: str, password: str, cube_connection_name: str):
|
|
94
|
+
"""Sync Cube.js schemas to BI tool datasets"""
|
|
95
|
+
try:
|
|
96
|
+
click.echo(f"🔄 Connecting to {bi_tool.title()} at {url}...")
|
|
97
|
+
|
|
98
|
+
# Create connector config from command line params
|
|
99
|
+
connector_config = {
|
|
100
|
+
'url': url,
|
|
101
|
+
'username': username,
|
|
102
|
+
'password': password,
|
|
103
|
+
'database_name': cube_connection_name
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
connector_instance = ConnectorRegistry.get_connector(bi_tool, **connector_config)
|
|
107
|
+
|
|
108
|
+
click.echo(f"📊 Syncing Cube.js schemas to {bi_tool.title()}...")
|
|
109
|
+
results = connector_instance.sync_cube_schemas(cube_files)
|
|
110
|
+
|
|
111
|
+
successful = sum(1 for r in results if r.status == 'success')
|
|
112
|
+
failed = sum(1 for r in results if r.status == 'failed')
|
|
113
|
+
|
|
114
|
+
click.echo(f"✅ Sync complete: {successful} successful, {failed} failed")
|
|
115
|
+
|
|
116
|
+
# Show detailed results
|
|
117
|
+
for result in results:
|
|
118
|
+
status_emoji = "✅" if result.status == 'success' else "❌"
|
|
119
|
+
click.echo(f" {status_emoji} {result.file_or_dataset}: {result.message}")
|
|
120
|
+
|
|
121
|
+
except Exception as e:
|
|
122
|
+
click.echo(f"❌ Error: {str(e)}", err=True)
|
|
123
|
+
sys.exit(1)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@main.command()
|
|
128
|
+
def version():
|
|
129
|
+
"""Show version information"""
|
|
130
|
+
from . import __version__
|
|
131
|
+
click.echo(f"dbt-cube-sync version {__version__}")
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
if __name__ == '__main__':
|
|
135
|
+
main()
|
dbt_cube_sync/config.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration management for dbt-cube-sync
|
|
3
|
+
"""
|
|
4
|
+
import yaml
|
|
5
|
+
from typing import Dict, Any, Optional
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from pydantic import BaseModel, validator
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ConnectorConfig(BaseModel):
|
|
11
|
+
"""Configuration for a BI tool connector"""
|
|
12
|
+
type: str
|
|
13
|
+
url: Optional[str] = None
|
|
14
|
+
username: Optional[str] = None
|
|
15
|
+
password: Optional[str] = None
|
|
16
|
+
database_name: Optional[str] = "Cube"
|
|
17
|
+
|
|
18
|
+
@validator('type')
|
|
19
|
+
def validate_type(cls, v):
|
|
20
|
+
supported_types = ['superset', 'tableau', 'powerbi']
|
|
21
|
+
if v not in supported_types:
|
|
22
|
+
raise ValueError(f"Unsupported connector type: {v}. Supported: {supported_types}")
|
|
23
|
+
return v
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class Config(BaseModel):
|
|
27
|
+
"""Main configuration class"""
|
|
28
|
+
connectors: Dict[str, ConnectorConfig] = {}
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def load_from_file(cls, config_path: str) -> 'Config':
|
|
32
|
+
"""
|
|
33
|
+
Load configuration from YAML file
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
config_path: Path to the YAML configuration file
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
Config instance
|
|
40
|
+
"""
|
|
41
|
+
config_file = Path(config_path)
|
|
42
|
+
if not config_file.exists():
|
|
43
|
+
raise FileNotFoundError(f"Configuration file not found: {config_path}")
|
|
44
|
+
|
|
45
|
+
with open(config_file, 'r') as f:
|
|
46
|
+
data = yaml.safe_load(f)
|
|
47
|
+
|
|
48
|
+
# Parse connector configurations
|
|
49
|
+
connectors = {}
|
|
50
|
+
for name, connector_data in data.get('connectors', {}).items():
|
|
51
|
+
connectors[name] = ConnectorConfig(**connector_data)
|
|
52
|
+
|
|
53
|
+
return cls(connectors=connectors)
|
|
54
|
+
|
|
55
|
+
def get_connector_config(self, connector_name: str) -> Dict[str, Any]:
|
|
56
|
+
"""
|
|
57
|
+
Get configuration for a specific connector
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
connector_name: Name of the connector (e.g., 'superset')
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Dictionary with connector configuration
|
|
64
|
+
"""
|
|
65
|
+
if connector_name not in self.connectors:
|
|
66
|
+
available = list(self.connectors.keys())
|
|
67
|
+
raise ValueError(f"Connector '{connector_name}' not found in config. Available: {available}")
|
|
68
|
+
|
|
69
|
+
config = self.connectors[connector_name]
|
|
70
|
+
return config.dict()
|
|
71
|
+
|
|
72
|
+
def save_to_file(self, config_path: str) -> None:
|
|
73
|
+
"""
|
|
74
|
+
Save configuration to YAML file
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
config_path: Path to save the configuration file
|
|
78
|
+
"""
|
|
79
|
+
data = {
|
|
80
|
+
'connectors': {
|
|
81
|
+
name: config.dict() for name, config in self.connectors.items()
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
config_file = Path(config_path)
|
|
86
|
+
config_file.parent.mkdir(parents=True, exist_ok=True)
|
|
87
|
+
|
|
88
|
+
with open(config_file, 'w') as f:
|
|
89
|
+
yaml.dump(data, f, default_flow_style=False, indent=2)
|
|
90
|
+
|
|
91
|
+
@classmethod
|
|
92
|
+
def create_sample_config(cls, config_path: str) -> None:
|
|
93
|
+
"""
|
|
94
|
+
Create a sample configuration file
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
config_path: Path to create the sample configuration file
|
|
98
|
+
"""
|
|
99
|
+
sample_config = cls(
|
|
100
|
+
connectors={
|
|
101
|
+
'superset': ConnectorConfig(
|
|
102
|
+
type='superset',
|
|
103
|
+
url='http://localhost:8088',
|
|
104
|
+
username='admin',
|
|
105
|
+
password='admin',
|
|
106
|
+
database_name='Cube'
|
|
107
|
+
),
|
|
108
|
+
'tableau': ConnectorConfig(
|
|
109
|
+
type='tableau',
|
|
110
|
+
url='https://your-tableau-server.com',
|
|
111
|
+
username='your-username',
|
|
112
|
+
password='your-password'
|
|
113
|
+
),
|
|
114
|
+
'powerbi': ConnectorConfig(
|
|
115
|
+
type='powerbi',
|
|
116
|
+
# Add PowerBI specific configuration fields here
|
|
117
|
+
)
|
|
118
|
+
}
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
sample_config.save_to_file(config_path)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""BI tool connectors for syncing Cube.js schemas"""
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract base connector for BI tools
|
|
3
|
+
"""
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from typing import List, Dict, Any
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from ..core.models import SyncResult
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class BaseConnector(ABC):
|
|
12
|
+
"""Abstract base class for BI tool connectors"""
|
|
13
|
+
|
|
14
|
+
def __init__(self, **config):
|
|
15
|
+
"""
|
|
16
|
+
Initialize the connector with configuration
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
**config: Connector-specific configuration parameters
|
|
20
|
+
"""
|
|
21
|
+
self.config = config
|
|
22
|
+
self._validate_config()
|
|
23
|
+
|
|
24
|
+
@abstractmethod
|
|
25
|
+
def _validate_config(self) -> None:
|
|
26
|
+
"""Validate the provided configuration"""
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
@abstractmethod
|
|
30
|
+
def connect(self) -> None:
|
|
31
|
+
"""Establish connection to the BI tool"""
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
def sync_cube_schemas(self, cube_dir: str) -> List[SyncResult]:
|
|
36
|
+
"""
|
|
37
|
+
Sync all Cube.js schemas from directory to BI tool
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
cube_dir: Directory containing Cube.js schema files
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
List of SyncResult objects with status of each file
|
|
44
|
+
"""
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
def sync_single_schema(self, cube_file_path: str) -> SyncResult:
|
|
49
|
+
"""
|
|
50
|
+
Sync a single Cube.js schema file to BI tool
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
cube_file_path: Path to the Cube.js schema file
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
SyncResult object with status
|
|
57
|
+
"""
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
def _get_cube_files(self, cube_dir: str) -> List[Path]:
|
|
61
|
+
"""Get all .js files from the cube directory"""
|
|
62
|
+
cube_path = Path(cube_dir)
|
|
63
|
+
if not cube_path.exists():
|
|
64
|
+
raise FileNotFoundError(f"Cube directory not found: {cube_dir}")
|
|
65
|
+
|
|
66
|
+
return list(cube_path.glob("*.js"))
|
|
67
|
+
|
|
68
|
+
def get_connector_type(self) -> str:
|
|
69
|
+
"""Return the type of this connector"""
|
|
70
|
+
return self.__class__.__name__.replace('Connector', '').lower()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class ConnectorRegistry:
|
|
74
|
+
"""Registry for managing available connectors"""
|
|
75
|
+
|
|
76
|
+
_connectors: Dict[str, type] = {}
|
|
77
|
+
|
|
78
|
+
@classmethod
|
|
79
|
+
def register(cls, name: str, connector_class: type) -> None:
|
|
80
|
+
"""Register a connector class"""
|
|
81
|
+
cls._connectors[name] = connector_class
|
|
82
|
+
|
|
83
|
+
@classmethod
|
|
84
|
+
def get_connector(cls, name: str, **config) -> BaseConnector:
|
|
85
|
+
"""Get an instance of a registered connector"""
|
|
86
|
+
if name not in cls._connectors:
|
|
87
|
+
available = list(cls._connectors.keys())
|
|
88
|
+
raise ValueError(f"Unknown connector '{name}'. Available: {available}")
|
|
89
|
+
|
|
90
|
+
return cls._connectors[name](**config)
|
|
91
|
+
|
|
92
|
+
@classmethod
|
|
93
|
+
def list_connectors(cls) -> List[str]:
|
|
94
|
+
"""List all registered connector names"""
|
|
95
|
+
return list(cls._connectors.keys())
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"""
|
|
2
|
+
PowerBI connector placeholder for future implementation
|
|
3
|
+
"""
|
|
4
|
+
from typing import List
|
|
5
|
+
from .base import BaseConnector, ConnectorRegistry
|
|
6
|
+
from ..core.models import SyncResult
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PowerBIConnector(BaseConnector):
|
|
10
|
+
"""Connector for Microsoft Power BI (placeholder implementation)"""
|
|
11
|
+
|
|
12
|
+
def _validate_config(self) -> None:
|
|
13
|
+
"""Validate the provided configuration"""
|
|
14
|
+
# TODO: Implement PowerBI-specific validation
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
def connect(self) -> None:
|
|
18
|
+
"""Establish connection to Power BI"""
|
|
19
|
+
# TODO: Implement PowerBI connection logic
|
|
20
|
+
raise NotImplementedError("PowerBI connector not yet implemented")
|
|
21
|
+
|
|
22
|
+
def sync_cube_schemas(self, cube_dir: str) -> List[SyncResult]:
|
|
23
|
+
"""Sync all Cube.js schemas from directory to Power BI"""
|
|
24
|
+
# TODO: Implement PowerBI sync logic
|
|
25
|
+
raise NotImplementedError("PowerBI connector not yet implemented")
|
|
26
|
+
|
|
27
|
+
def sync_single_schema(self, cube_file_path: str) -> SyncResult:
|
|
28
|
+
"""Sync a single Cube.js schema file to Power BI"""
|
|
29
|
+
# TODO: Implement single schema sync for PowerBI
|
|
30
|
+
raise NotImplementedError("PowerBI connector not yet implemented")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# Register the PowerBI connector
|
|
34
|
+
ConnectorRegistry.register('powerbi', PowerBIConnector)
|