dbt-cube-sync 0.1.0a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-cube-sync might be problematic. Click here for more details.

@@ -0,0 +1,230 @@
1
+ Metadata-Version: 2.4
2
+ Name: dbt-cube-sync
3
+ Version: 0.1.0a1
4
+ Summary: Synchronization tool for dbt models to Cube.js schemas and BI tools
5
+ Author: Ponder
6
+ Requires-Python: >=3.9,<4.0
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: Programming Language :: Python :: 3.9
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Classifier: Programming Language :: Python :: 3.11
11
+ Classifier: Programming Language :: Python :: 3.12
12
+ Classifier: Programming Language :: Python :: 3.13
13
+ Classifier: Programming Language :: Python :: 3.14
14
+ Requires-Dist: click (>=8.1.7,<9.0.0)
15
+ Requires-Dist: jinja2 (>=3.1.2,<4.0.0)
16
+ Requires-Dist: pydantic (>=2.5.0,<3.0.0)
17
+ Requires-Dist: pyyaml (>=6.0,<7.0)
18
+ Requires-Dist: requests (>=2.31.0,<3.0.0)
19
+ Description-Content-Type: text/markdown
20
+
21
+ # dbt-cube-sync
22
+
23
+ A powerful synchronization tool that creates a seamless pipeline from dbt models to Cube.js schemas and BI tools (Superset, Tableau, PowerBI).
24
+
25
+ ## Features
26
+
27
+ - 🔄 **dbt → Cube.js**: Auto-generate Cube.js schemas from dbt models with metrics
28
+ - 📊 **Cube.js → BI Tools**: Sync schemas to multiple BI platforms
29
+ - 🏗️ **Extensible Architecture**: Plugin-based connector system for easy BI tool integration
30
+ - 🐳 **Docker Support**: Containerized execution with orchestration support
31
+ - 🎯 **CLI Interface**: Simple command-line tools for automation
32
+
33
+ ## Supported BI Tools
34
+
35
+ - ✅ **Apache Superset** - Full implementation
36
+ - 🚧 **Tableau** - Placeholder (coming soon)
37
+ - 🚧 **PowerBI** - Placeholder (coming soon)
38
+
39
+ ## Installation
40
+
41
+ ### Using Poetry (Development)
42
+
43
+ ```bash
44
+ cd dbt-cube-sync
45
+ poetry install
46
+ poetry run dbt-cube-sync --help
47
+ ```
48
+
49
+ ### Using Docker
50
+
51
+ ```bash
52
+ docker build -t dbt-cube-sync .
53
+ docker run --rm dbt-cube-sync --help
54
+ ```
55
+
56
+ ## Quick Start
57
+
58
+ ### 1. Create Configuration File
59
+
60
+ ```bash
61
+ # Create sample config
62
+ dbt-cube-sync create-config sync-config.yaml
63
+
64
+ # Edit the config file with your BI tool credentials
65
+ ```
66
+
67
+ ### 2. Generate Cube.js Schemas
68
+
69
+ ```bash
70
+ # Generate from dbt manifest
71
+ dbt-cube-sync generate-cubes \\
72
+ --dbt-manifest ./DbtEducationalDataProject/target/manifest.json \\
73
+ --output-dir ./cube/conf/cube_output
74
+ ```
75
+
76
+ ### 3. Sync to BI Tool
77
+
78
+ ```bash
79
+ # Sync to Superset
80
+ dbt-cube-sync sync-bi superset \\
81
+ --cube-dir ./cube/conf/cube_output \\
82
+ --config-file ./sync-config.yaml
83
+ ```
84
+
85
+ ### 4. Full Pipeline
86
+
87
+ ```bash
88
+ # Complete dbt → Cube.js → Superset pipeline
89
+ dbt-cube-sync full-sync \\
90
+ --dbt-manifest ./DbtEducationalDataProject/target/manifest.json \\
91
+ --cube-dir ./cube/conf/cube_output \\
92
+ --bi-connector superset \\
93
+ --config-file ./sync-config.yaml
94
+ ```
95
+
96
+ ## Configuration
97
+
98
+ ### Sample Configuration (`sync-config.yaml`)
99
+
100
+ ```yaml
101
+ connectors:
102
+ superset:
103
+ type: superset
104
+ url: http://localhost:8088
105
+ username: admin
106
+ password: admin
107
+ database_name: Cube
108
+
109
+ tableau:
110
+ type: tableau
111
+ url: https://your-tableau-server.com
112
+ username: your-username
113
+ password: your-password
114
+
115
+ powerbi:
116
+ type: powerbi
117
+ # PowerBI specific configuration
118
+ ```
119
+
120
+ ## CLI Commands
121
+
122
+ ### `generate-cubes`
123
+ Generate Cube.js schema files from dbt models.
124
+
125
+ **Options:**
126
+ - `--dbt-manifest` / `-m`: Path to dbt manifest.json file
127
+ - `--output-dir` / `-o`: Output directory for Cube.js files
128
+ - `--template-dir` / `-t`: Directory containing Cube.js templates
129
+
130
+ ### `sync-bi`
131
+ Sync Cube.js schemas to BI tool datasets.
132
+
133
+ **Arguments:**
134
+ - `connector`: BI tool type (`superset`, `tableau`, `powerbi`)
135
+
136
+ **Options:**
137
+ - `--cube-dir` / `-c`: Directory containing Cube.js files
138
+ - `--config-file` / `-f`: Configuration file for BI tool connection
139
+
140
+ ### `full-sync`
141
+ Complete pipeline: dbt models → Cube.js schemas → BI tool datasets.
142
+
143
+ **Options:**
144
+ - `--dbt-manifest` / `-m`: Path to dbt manifest.json file
145
+ - `--cube-dir` / `-c`: Directory for Cube.js files
146
+ - `--template-dir` / `-t`: Directory containing Cube.js templates
147
+ - `--bi-connector` / `-b`: BI tool to sync to
148
+ - `--config-file` / `-f`: Configuration file for BI tool connection
149
+
150
+ ## Architecture
151
+
152
+ ```
153
+ dbt models (with metrics)
154
+
155
+ dbt-cube-sync generate-cubes
156
+
157
+ Cube.js schemas
158
+
159
+ dbt-cube-sync sync-bi [connector]
160
+
161
+ BI Tool Datasets (Superset/Tableau/PowerBI)
162
+ ```
163
+
164
+ ### Project Structure
165
+
166
+ ```
167
+ dbt-cube-sync/
168
+ ├── dbt_cube_sync/
169
+ │ ├── cli.py # CLI interface
170
+ │ ├── config.py # Configuration management
171
+ │ ├── core/
172
+ │ │ ├── dbt_parser.py # dbt manifest parser
173
+ │ │ ├── cube_generator.py # Cube.js generator
174
+ │ │ └── models.py # Pydantic data models
175
+ │ └── connectors/
176
+ │ ├── base.py # Abstract base connector
177
+ │ ├── superset.py # Superset implementation
178
+ │ ├── tableau.py # Tableau placeholder
179
+ │ └── powerbi.py # PowerBI placeholder
180
+ ├── Dockerfile # Container definition
181
+ ├── pyproject.toml # Poetry configuration
182
+ └── README.md
183
+ ```
184
+
185
+ ## Adding New BI Connectors
186
+
187
+ 1. Create a new connector class inheriting from `BaseConnector`
188
+ 2. Implement the required abstract methods
189
+ 3. Register the connector using `ConnectorRegistry.register()`
190
+
191
+ Example:
192
+ ```python
193
+ from .base import BaseConnector, ConnectorRegistry
194
+
195
+ class MyBIConnector(BaseConnector):
196
+ def _validate_config(self):
197
+ # Validation logic
198
+ pass
199
+
200
+ def connect(self):
201
+ # Connection logic
202
+ pass
203
+
204
+ def sync_cube_schemas(self, cube_dir):
205
+ # Sync implementation
206
+ pass
207
+
208
+ # Register the connector
209
+ ConnectorRegistry.register('mybi', MyBIConnector)
210
+ ```
211
+
212
+ ## Docker Integration
213
+
214
+ The tool is designed to work in containerized environments with proper dependency orchestration:
215
+
216
+ 1. **dbt docs**: Runs `dbt build` then serves documentation
217
+ 2. **dbt-cube-sync**: Runs sync pipeline after dbt and Cube.js are ready
218
+ 3. **BI Tools**: Receive synced datasets after sync completes
219
+
220
+ ## Contributing
221
+
222
+ 1. Fork the repository
223
+ 2. Create a feature branch
224
+ 3. Implement your changes
225
+ 4. Add tests if applicable
226
+ 5. Submit a pull request
227
+
228
+ ## License
229
+
230
+ MIT License - see LICENSE file for details.
@@ -0,0 +1,210 @@
1
+ # dbt-cube-sync
2
+
3
+ A powerful synchronization tool that creates a seamless pipeline from dbt models to Cube.js schemas and BI tools (Superset, Tableau, PowerBI).
4
+
5
+ ## Features
6
+
7
+ - 🔄 **dbt → Cube.js**: Auto-generate Cube.js schemas from dbt models with metrics
8
+ - 📊 **Cube.js → BI Tools**: Sync schemas to multiple BI platforms
9
+ - 🏗️ **Extensible Architecture**: Plugin-based connector system for easy BI tool integration
10
+ - 🐳 **Docker Support**: Containerized execution with orchestration support
11
+ - 🎯 **CLI Interface**: Simple command-line tools for automation
12
+
13
+ ## Supported BI Tools
14
+
15
+ - ✅ **Apache Superset** - Full implementation
16
+ - 🚧 **Tableau** - Placeholder (coming soon)
17
+ - 🚧 **PowerBI** - Placeholder (coming soon)
18
+
19
+ ## Installation
20
+
21
+ ### Using Poetry (Development)
22
+
23
+ ```bash
24
+ cd dbt-cube-sync
25
+ poetry install
26
+ poetry run dbt-cube-sync --help
27
+ ```
28
+
29
+ ### Using Docker
30
+
31
+ ```bash
32
+ docker build -t dbt-cube-sync .
33
+ docker run --rm dbt-cube-sync --help
34
+ ```
35
+
36
+ ## Quick Start
37
+
38
+ ### 1. Create Configuration File
39
+
40
+ ```bash
41
+ # Create sample config
42
+ dbt-cube-sync create-config sync-config.yaml
43
+
44
+ # Edit the config file with your BI tool credentials
45
+ ```
46
+
47
+ ### 2. Generate Cube.js Schemas
48
+
49
+ ```bash
50
+ # Generate from dbt manifest
51
+ dbt-cube-sync generate-cubes \\
52
+ --dbt-manifest ./DbtEducationalDataProject/target/manifest.json \\
53
+ --output-dir ./cube/conf/cube_output
54
+ ```
55
+
56
+ ### 3. Sync to BI Tool
57
+
58
+ ```bash
59
+ # Sync to Superset
60
+ dbt-cube-sync sync-bi superset \\
61
+ --cube-dir ./cube/conf/cube_output \\
62
+ --config-file ./sync-config.yaml
63
+ ```
64
+
65
+ ### 4. Full Pipeline
66
+
67
+ ```bash
68
+ # Complete dbt → Cube.js → Superset pipeline
69
+ dbt-cube-sync full-sync \\
70
+ --dbt-manifest ./DbtEducationalDataProject/target/manifest.json \\
71
+ --cube-dir ./cube/conf/cube_output \\
72
+ --bi-connector superset \\
73
+ --config-file ./sync-config.yaml
74
+ ```
75
+
76
+ ## Configuration
77
+
78
+ ### Sample Configuration (`sync-config.yaml`)
79
+
80
+ ```yaml
81
+ connectors:
82
+ superset:
83
+ type: superset
84
+ url: http://localhost:8088
85
+ username: admin
86
+ password: admin
87
+ database_name: Cube
88
+
89
+ tableau:
90
+ type: tableau
91
+ url: https://your-tableau-server.com
92
+ username: your-username
93
+ password: your-password
94
+
95
+ powerbi:
96
+ type: powerbi
97
+ # PowerBI specific configuration
98
+ ```
99
+
100
+ ## CLI Commands
101
+
102
+ ### `generate-cubes`
103
+ Generate Cube.js schema files from dbt models.
104
+
105
+ **Options:**
106
+ - `--dbt-manifest` / `-m`: Path to dbt manifest.json file
107
+ - `--output-dir` / `-o`: Output directory for Cube.js files
108
+ - `--template-dir` / `-t`: Directory containing Cube.js templates
109
+
110
+ ### `sync-bi`
111
+ Sync Cube.js schemas to BI tool datasets.
112
+
113
+ **Arguments:**
114
+ - `connector`: BI tool type (`superset`, `tableau`, `powerbi`)
115
+
116
+ **Options:**
117
+ - `--cube-dir` / `-c`: Directory containing Cube.js files
118
+ - `--config-file` / `-f`: Configuration file for BI tool connection
119
+
120
+ ### `full-sync`
121
+ Complete pipeline: dbt models → Cube.js schemas → BI tool datasets.
122
+
123
+ **Options:**
124
+ - `--dbt-manifest` / `-m`: Path to dbt manifest.json file
125
+ - `--cube-dir` / `-c`: Directory for Cube.js files
126
+ - `--template-dir` / `-t`: Directory containing Cube.js templates
127
+ - `--bi-connector` / `-b`: BI tool to sync to
128
+ - `--config-file` / `-f`: Configuration file for BI tool connection
129
+
130
+ ## Architecture
131
+
132
+ ```
133
+ dbt models (with metrics)
134
+
135
+ dbt-cube-sync generate-cubes
136
+
137
+ Cube.js schemas
138
+
139
+ dbt-cube-sync sync-bi [connector]
140
+
141
+ BI Tool Datasets (Superset/Tableau/PowerBI)
142
+ ```
143
+
144
+ ### Project Structure
145
+
146
+ ```
147
+ dbt-cube-sync/
148
+ ├── dbt_cube_sync/
149
+ │ ├── cli.py # CLI interface
150
+ │ ├── config.py # Configuration management
151
+ │ ├── core/
152
+ │ │ ├── dbt_parser.py # dbt manifest parser
153
+ │ │ ├── cube_generator.py # Cube.js generator
154
+ │ │ └── models.py # Pydantic data models
155
+ │ └── connectors/
156
+ │ ├── base.py # Abstract base connector
157
+ │ ├── superset.py # Superset implementation
158
+ │ ├── tableau.py # Tableau placeholder
159
+ │ └── powerbi.py # PowerBI placeholder
160
+ ├── Dockerfile # Container definition
161
+ ├── pyproject.toml # Poetry configuration
162
+ └── README.md
163
+ ```
164
+
165
+ ## Adding New BI Connectors
166
+
167
+ 1. Create a new connector class inheriting from `BaseConnector`
168
+ 2. Implement the required abstract methods
169
+ 3. Register the connector using `ConnectorRegistry.register()`
170
+
171
+ Example:
172
+ ```python
173
+ from .base import BaseConnector, ConnectorRegistry
174
+
175
+ class MyBIConnector(BaseConnector):
176
+ def _validate_config(self):
177
+ # Validation logic
178
+ pass
179
+
180
+ def connect(self):
181
+ # Connection logic
182
+ pass
183
+
184
+ def sync_cube_schemas(self, cube_dir):
185
+ # Sync implementation
186
+ pass
187
+
188
+ # Register the connector
189
+ ConnectorRegistry.register('mybi', MyBIConnector)
190
+ ```
191
+
192
+ ## Docker Integration
193
+
194
+ The tool is designed to work in containerized environments with proper dependency orchestration:
195
+
196
+ 1. **dbt docs**: Runs `dbt build` then serves documentation
197
+ 2. **dbt-cube-sync**: Runs sync pipeline after dbt and Cube.js are ready
198
+ 3. **BI Tools**: Receive synced datasets after sync completes
199
+
200
+ ## Contributing
201
+
202
+ 1. Fork the repository
203
+ 2. Create a feature branch
204
+ 3. Implement your changes
205
+ 4. Add tests if applicable
206
+ 5. Submit a pull request
207
+
208
+ ## License
209
+
210
+ MIT License - see LICENSE file for details.
@@ -0,0 +1,5 @@
1
+ """
2
+ dbt-cube-sync: Synchronization tool for dbt models to Cube.js schemas and BI tools
3
+ """
4
+
5
+ __version__ = "0.1.0"
@@ -0,0 +1,135 @@
1
+ """
2
+ CLI interface for dbt-cube-sync tool
3
+ """
4
+ import click
5
+ import sys
6
+ from pathlib import Path
7
+ from typing import Optional
8
+
9
+ from .core.dbt_parser import DbtParser
10
+ from .core.cube_generator import CubeGenerator
11
+ from .connectors.base import ConnectorRegistry
12
+ from .config import Config
13
+
14
+ # Import connectors to register them
15
+ from .connectors import superset, tableau, powerbi
16
+
17
+
18
+ class CustomGroup(click.Group):
19
+ def get_command(self, ctx, cmd_name):
20
+ # Handle common mistake of typing dbt-cube-sync twice
21
+ if cmd_name == 'dbt-cube-sync':
22
+ click.echo("❌ Error: You typed 'dbt-cube-sync' twice!")
23
+ click.echo("💡 Just run: dbt-cube-sync <command>")
24
+ click.echo("\nAvailable commands:")
25
+ click.echo(" dbt-cube-sync --help # Show help")
26
+ click.echo(" dbt-cube-sync --version # Show version")
27
+ click.echo(" dbt-cube-sync dbt-to-cube -m manifest -c catalog -o output # Generate Cube.js schemas")
28
+ click.echo(" dbt-cube-sync cube-to-bi superset -c cubes -u url -n user -p pass -d Cube # Sync to BI tool")
29
+ ctx.exit(1)
30
+
31
+ return super().get_command(ctx, cmd_name)
32
+
33
+
34
+ @click.group(cls=CustomGroup)
35
+ @click.version_option()
36
+ def main():
37
+ """dbt-cube-sync: Synchronization tool for dbt models to Cube.js schemas and BI tools"""
38
+ pass
39
+
40
+
41
+ @main.command()
42
+ @click.option('--manifest', '-m',
43
+ required=True,
44
+ help='Path to dbt manifest.json file')
45
+ @click.option('--catalog', '-c',
46
+ required=True,
47
+ help='Path to dbt catalog.json file')
48
+ @click.option('--output', '-o',
49
+ required=True,
50
+ help='Output directory for Cube.js files')
51
+ @click.option('--template-dir', '-t',
52
+ default='./cube/templates',
53
+ help='Directory containing Cube.js templates')
54
+ def dbt_to_cube(manifest: str, catalog: str, output: str, template_dir: str):
55
+ """Generate Cube.js schemas from dbt models"""
56
+ try:
57
+ click.echo("🔄 Parsing dbt manifest...")
58
+ parser = DbtParser(manifest, catalog)
59
+ models = parser.parse_models()
60
+
61
+ click.echo(f"📊 Found {len(models)} dbt models")
62
+
63
+ click.echo("🏗️ Generating Cube.js schemas...")
64
+ generator = CubeGenerator(template_dir, output)
65
+ generated_files = generator.generate_cube_files(models)
66
+
67
+ click.echo(f"✅ Generated {len(generated_files)} Cube.js files:")
68
+ for file_path in generated_files:
69
+ click.echo(f" • {file_path}")
70
+
71
+ except Exception as e:
72
+ click.echo(f"❌ Error: {str(e)}", err=True)
73
+ sys.exit(1)
74
+
75
+
76
+ @main.command()
77
+ @click.argument('bi_tool', type=click.Choice(['superset', 'tableau', 'powerbi']))
78
+ @click.option('--cube-files', '-c',
79
+ required=True,
80
+ help='Directory containing Cube.js metric files')
81
+ @click.option('--url', '-u',
82
+ required=True,
83
+ help='BI tool URL (e.g., http://localhost:8088)')
84
+ @click.option('--username', '-n',
85
+ required=True,
86
+ help='BI tool username')
87
+ @click.option('--password', '-p',
88
+ required=True,
89
+ help='BI tool password')
90
+ @click.option('--cube-connection-name', '-d',
91
+ default='Cube',
92
+ help='Name of the Cube database connection in the BI tool (default: Cube)')
93
+ def cube_to_bi(bi_tool: str, cube_files: str, url: str, username: str, password: str, cube_connection_name: str):
94
+ """Sync Cube.js schemas to BI tool datasets"""
95
+ try:
96
+ click.echo(f"🔄 Connecting to {bi_tool.title()} at {url}...")
97
+
98
+ # Create connector config from command line params
99
+ connector_config = {
100
+ 'url': url,
101
+ 'username': username,
102
+ 'password': password,
103
+ 'database_name': cube_connection_name
104
+ }
105
+
106
+ connector_instance = ConnectorRegistry.get_connector(bi_tool, **connector_config)
107
+
108
+ click.echo(f"📊 Syncing Cube.js schemas to {bi_tool.title()}...")
109
+ results = connector_instance.sync_cube_schemas(cube_files)
110
+
111
+ successful = sum(1 for r in results if r.status == 'success')
112
+ failed = sum(1 for r in results if r.status == 'failed')
113
+
114
+ click.echo(f"✅ Sync complete: {successful} successful, {failed} failed")
115
+
116
+ # Show detailed results
117
+ for result in results:
118
+ status_emoji = "✅" if result.status == 'success' else "❌"
119
+ click.echo(f" {status_emoji} {result.file_or_dataset}: {result.message}")
120
+
121
+ except Exception as e:
122
+ click.echo(f"❌ Error: {str(e)}", err=True)
123
+ sys.exit(1)
124
+
125
+
126
+
127
+ @main.command()
128
+ def version():
129
+ """Show version information"""
130
+ from . import __version__
131
+ click.echo(f"dbt-cube-sync version {__version__}")
132
+
133
+
134
+ if __name__ == '__main__':
135
+ main()
@@ -0,0 +1,121 @@
1
+ """
2
+ Configuration management for dbt-cube-sync
3
+ """
4
+ import yaml
5
+ from typing import Dict, Any, Optional
6
+ from pathlib import Path
7
+ from pydantic import BaseModel, validator
8
+
9
+
10
+ class ConnectorConfig(BaseModel):
11
+ """Configuration for a BI tool connector"""
12
+ type: str
13
+ url: Optional[str] = None
14
+ username: Optional[str] = None
15
+ password: Optional[str] = None
16
+ database_name: Optional[str] = "Cube"
17
+
18
+ @validator('type')
19
+ def validate_type(cls, v):
20
+ supported_types = ['superset', 'tableau', 'powerbi']
21
+ if v not in supported_types:
22
+ raise ValueError(f"Unsupported connector type: {v}. Supported: {supported_types}")
23
+ return v
24
+
25
+
26
+ class Config(BaseModel):
27
+ """Main configuration class"""
28
+ connectors: Dict[str, ConnectorConfig] = {}
29
+
30
+ @classmethod
31
+ def load_from_file(cls, config_path: str) -> 'Config':
32
+ """
33
+ Load configuration from YAML file
34
+
35
+ Args:
36
+ config_path: Path to the YAML configuration file
37
+
38
+ Returns:
39
+ Config instance
40
+ """
41
+ config_file = Path(config_path)
42
+ if not config_file.exists():
43
+ raise FileNotFoundError(f"Configuration file not found: {config_path}")
44
+
45
+ with open(config_file, 'r') as f:
46
+ data = yaml.safe_load(f)
47
+
48
+ # Parse connector configurations
49
+ connectors = {}
50
+ for name, connector_data in data.get('connectors', {}).items():
51
+ connectors[name] = ConnectorConfig(**connector_data)
52
+
53
+ return cls(connectors=connectors)
54
+
55
+ def get_connector_config(self, connector_name: str) -> Dict[str, Any]:
56
+ """
57
+ Get configuration for a specific connector
58
+
59
+ Args:
60
+ connector_name: Name of the connector (e.g., 'superset')
61
+
62
+ Returns:
63
+ Dictionary with connector configuration
64
+ """
65
+ if connector_name not in self.connectors:
66
+ available = list(self.connectors.keys())
67
+ raise ValueError(f"Connector '{connector_name}' not found in config. Available: {available}")
68
+
69
+ config = self.connectors[connector_name]
70
+ return config.dict()
71
+
72
+ def save_to_file(self, config_path: str) -> None:
73
+ """
74
+ Save configuration to YAML file
75
+
76
+ Args:
77
+ config_path: Path to save the configuration file
78
+ """
79
+ data = {
80
+ 'connectors': {
81
+ name: config.dict() for name, config in self.connectors.items()
82
+ }
83
+ }
84
+
85
+ config_file = Path(config_path)
86
+ config_file.parent.mkdir(parents=True, exist_ok=True)
87
+
88
+ with open(config_file, 'w') as f:
89
+ yaml.dump(data, f, default_flow_style=False, indent=2)
90
+
91
+ @classmethod
92
+ def create_sample_config(cls, config_path: str) -> None:
93
+ """
94
+ Create a sample configuration file
95
+
96
+ Args:
97
+ config_path: Path to create the sample configuration file
98
+ """
99
+ sample_config = cls(
100
+ connectors={
101
+ 'superset': ConnectorConfig(
102
+ type='superset',
103
+ url='http://localhost:8088',
104
+ username='admin',
105
+ password='admin',
106
+ database_name='Cube'
107
+ ),
108
+ 'tableau': ConnectorConfig(
109
+ type='tableau',
110
+ url='https://your-tableau-server.com',
111
+ username='your-username',
112
+ password='your-password'
113
+ ),
114
+ 'powerbi': ConnectorConfig(
115
+ type='powerbi',
116
+ # Add PowerBI specific configuration fields here
117
+ )
118
+ }
119
+ )
120
+
121
+ sample_config.save_to_file(config_path)
@@ -0,0 +1 @@
1
+ """BI tool connectors for syncing Cube.js schemas"""