dbt-cube-sync 0.1.0a10__py3-none-any.whl → 0.1.0a11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-cube-sync might be problematic. Click here for more details.

dbt_cube_sync/cli.py CHANGED
@@ -357,21 +357,22 @@ def sync_all(
357
357
  modified_models = set()
358
358
  removed_models = set()
359
359
 
360
- # ============================================================
361
- # STEP 1: Incremental dbt → Cube.js sync
362
- # ============================================================
363
- click.echo("\n[1/3] dbt → Cube.js schemas")
364
- click.echo("-" * 40)
365
-
366
- # Initialize state manager
360
+ # Initialize state manager and load previous state
367
361
  state_manager = StateManager(state_path)
368
362
  previous_state = None
363
+ current_state = None
369
364
 
370
365
  if not force_full_sync:
371
366
  previous_state = state_manager.load_state()
372
367
  if previous_state:
373
368
  click.echo(f" Loaded state from {state_path}")
374
369
 
370
+ # ============================================================
371
+ # STEP 1: Incremental dbt → Cube.js sync
372
+ # ============================================================
373
+ click.echo("\n[1/3] dbt → Cube.js schemas")
374
+ click.echo("-" * 40)
375
+
375
376
  # Parse manifest
376
377
  parser = DbtParser(
377
378
  manifest_path=manifest,
@@ -414,65 +415,112 @@ def sync_all(
414
415
 
415
416
  # Generate Cube.js files for changed models
416
417
  generated_files = {}
417
- if node_ids_to_process:
418
- parsed_models = parser.parse_models(node_ids_filter=node_ids_to_process)
419
-
420
- if parsed_models:
421
- generator = CubeGenerator('./cube/templates', output)
422
- generated_files = generator.generate_cube_files(parsed_models)
423
- click.echo(f" Generated {len(generated_files)} Cube.js files")
424
-
425
- # Save state
418
+ cube_sync_error = None
419
+ try:
420
+ if node_ids_to_process:
421
+ parsed_models = parser.parse_models(node_ids_filter=node_ids_to_process)
422
+
423
+ if parsed_models:
424
+ generator = CubeGenerator('./cube/templates', output)
425
+ generated_files = generator.generate_cube_files(parsed_models)
426
+ click.echo(f" Generated {len(generated_files)} Cube.js files")
427
+ except Exception as e:
428
+ cube_sync_error = str(e)
429
+ click.echo(f" Error: {cube_sync_error}", err=True)
430
+
431
+ # Build/update state
426
432
  if changes_detected or force_full_sync:
427
433
  if previous_state and not force_full_sync:
428
- new_state = state_manager.merge_state(
434
+ current_state = state_manager.merge_state(
429
435
  previous_state, manifest, manifest_nodes, generated_files, removed_models
430
436
  )
431
437
  else:
432
- new_state = state_manager.create_state_from_results(
438
+ current_state = state_manager.create_state_from_results(
433
439
  manifest, manifest_nodes, generated_files
434
440
  )
435
- state_manager.save_state(new_state)
436
- click.echo(f" State saved to {state_path}")
441
+ else:
442
+ # No changes - use previous state or create empty one
443
+ current_state = previous_state or state_manager.create_state_from_results(
444
+ manifest, manifest_nodes, {}
445
+ )
446
+
447
+ # Update cube_sync step state
448
+ current_state = state_manager.update_step_state(
449
+ current_state,
450
+ 'cube_sync',
451
+ 'failed' if cube_sync_error else 'success',
452
+ cube_sync_error
453
+ )
454
+ state_manager.save_state(current_state)
455
+ click.echo(f" State saved to {state_path}")
437
456
 
438
457
  # ============================================================
439
458
  # STEP 2: Sync to Superset (if configured)
440
459
  # ============================================================
441
- if superset_url and superset_username and superset_password:
442
- click.echo("\n[2/3] Cube.js → Superset")
443
- click.echo("-" * 40)
460
+ click.echo("\n[2/3] Cube.js Superset")
461
+ click.echo("-" * 40)
444
462
 
445
- if not changes_detected and not force_full_sync:
446
- click.echo(" Skipped - no changes detected")
447
- else:
448
- connector_config = {
449
- 'url': superset_url,
450
- 'username': superset_username,
451
- 'password': superset_password,
452
- 'database_name': cube_connection_name
453
- }
454
-
455
- connector = ConnectorRegistry.get_connector('superset', **connector_config)
456
- results = connector.sync_cube_schemas(output)
457
-
458
- successful = sum(1 for r in results if r.status == 'success')
459
- failed = sum(1 for r in results if r.status == 'failed')
460
- click.echo(f" Synced: {successful} successful, {failed} failed")
461
- else:
462
- click.echo("\n[2/3] Cube.js → Superset")
463
- click.echo("-" * 40)
463
+ if not superset_url or not superset_username or not superset_password:
464
464
  click.echo(" Skipped - no Superset credentials provided")
465
+ current_state = state_manager.update_step_state(current_state, 'superset_sync', 'skipped')
466
+ state_manager.save_state(current_state)
467
+ else:
468
+ should_run_superset = state_manager.should_run_step(
469
+ 'superset_sync', previous_state, changes_detected
470
+ ) or force_full_sync
471
+
472
+ if not should_run_superset:
473
+ click.echo(" Skipped - no changes and previous sync succeeded")
474
+ else:
475
+ superset_error = None
476
+ try:
477
+ connector_config = {
478
+ 'url': superset_url,
479
+ 'username': superset_username,
480
+ 'password': superset_password,
481
+ 'database_name': cube_connection_name
482
+ }
483
+
484
+ connector = ConnectorRegistry.get_connector('superset', **connector_config)
485
+ results = connector.sync_cube_schemas(output)
486
+
487
+ successful = sum(1 for r in results if r.status == 'success')
488
+ failed = sum(1 for r in results if r.status == 'failed')
489
+ click.echo(f" Synced: {successful} successful, {failed} failed")
490
+
491
+ if failed > 0:
492
+ superset_error = f"{failed} datasets failed to sync"
493
+ except Exception as e:
494
+ superset_error = str(e)
495
+ click.echo(f" Error: {superset_error}", err=True)
496
+
497
+ current_state = state_manager.update_step_state(
498
+ current_state,
499
+ 'superset_sync',
500
+ 'failed' if superset_error else 'success',
501
+ superset_error
502
+ )
503
+ state_manager.save_state(current_state)
465
504
 
466
505
  # ============================================================
467
506
  # STEP 3: Update RAG embeddings (if configured)
468
507
  # ============================================================
469
- if rag_api_url:
470
- click.echo("\n[3/3] Update RAG embeddings")
471
- click.echo("-" * 40)
508
+ click.echo("\n[3/3] Update RAG embeddings")
509
+ click.echo("-" * 40)
472
510
 
473
- if not changes_detected and not force_full_sync:
474
- click.echo(" Skipped - no changes detected")
511
+ if not rag_api_url:
512
+ click.echo(" Skipped - no RAG API URL provided")
513
+ current_state = state_manager.update_step_state(current_state, 'rag_sync', 'skipped')
514
+ state_manager.save_state(current_state)
515
+ else:
516
+ should_run_rag = state_manager.should_run_step(
517
+ 'rag_sync', previous_state, changes_detected
518
+ ) or force_full_sync
519
+
520
+ if not should_run_rag:
521
+ click.echo(" Skipped - no changes and previous sync succeeded")
475
522
  else:
523
+ rag_error = None
476
524
  try:
477
525
  # Call the RAG API to re-ingest embeddings
478
526
  response = requests.post(
@@ -485,13 +533,19 @@ def sync_all(
485
533
  result = response.json()
486
534
  click.echo(f" Ingested {result.get('schemas_ingested', 0)} schema documents")
487
535
  else:
488
- click.echo(f" Warning: RAG API returned {response.status_code}", err=True)
536
+ rag_error = f"RAG API returned {response.status_code}"
537
+ click.echo(f" Error: {rag_error}", err=True)
489
538
  except requests.RequestException as e:
490
- click.echo(f" Warning: Could not reach RAG API: {e}", err=True)
491
- else:
492
- click.echo("\n[3/3] Update RAG embeddings")
493
- click.echo("-" * 40)
494
- click.echo(" Skipped - no RAG API URL provided")
539
+ rag_error = str(e)
540
+ click.echo(f" Error: Could not reach RAG API: {rag_error}", err=True)
541
+
542
+ current_state = state_manager.update_step_state(
543
+ current_state,
544
+ 'rag_sync',
545
+ 'failed' if rag_error else 'success',
546
+ rag_error
547
+ )
548
+ state_manager.save_state(current_state)
495
549
 
496
550
  # ============================================================
497
551
  # Summary
@@ -500,12 +554,27 @@ def sync_all(
500
554
  click.echo("SYNC COMPLETE")
501
555
  click.echo("=" * 60)
502
556
 
557
+ # Show step statuses
558
+ click.echo(f" Cube sync: {current_state.cube_sync.status if current_state.cube_sync else 'unknown'}")
559
+ click.echo(f" Superset sync: {current_state.superset_sync.status if current_state.superset_sync else 'unknown'}")
560
+ click.echo(f" RAG sync: {current_state.rag_sync.status if current_state.rag_sync else 'unknown'}")
561
+
503
562
  if changes_detected or force_full_sync:
504
563
  click.echo(f" Models processed: {len(added_models) + len(modified_models)}")
505
564
  click.echo(f" Models removed: {len(removed_models)}")
506
565
  click.echo(f" Cube.js files generated: {len(generated_files)}")
507
566
  else:
508
- click.echo(" No changes - everything is up to date")
567
+ click.echo(" No model changes detected")
568
+
569
+ # Exit with error if any step failed
570
+ any_failed = (
571
+ (current_state.cube_sync and current_state.cube_sync.status == 'failed') or
572
+ (current_state.superset_sync and current_state.superset_sync.status == 'failed') or
573
+ (current_state.rag_sync and current_state.rag_sync.status == 'failed')
574
+ )
575
+ if any_failed:
576
+ click.echo("\n ⚠️ Some steps failed - they will be retried on next run")
577
+ sys.exit(1)
509
578
 
510
579
  except Exception as e:
511
580
  click.echo(f"Error: {str(e)}", err=True)
@@ -114,9 +114,20 @@ class ModelState(BaseModel):
114
114
  output_file: str
115
115
 
116
116
 
117
+ class StepState(BaseModel):
118
+ """Represents the state of a pipeline step"""
119
+ status: str # 'success', 'failed', 'skipped'
120
+ last_run: Optional[str] = None
121
+ error: Optional[str] = None
122
+
123
+
117
124
  class SyncState(BaseModel):
118
125
  """Represents the overall state for incremental sync"""
119
- version: str = "1.0"
126
+ version: str = "1.1"
120
127
  last_sync_timestamp: str
121
128
  manifest_path: str
122
- models: Dict[str, ModelState] = {}
129
+ models: Dict[str, ModelState] = {}
130
+ # Step states for tracking pipeline progress
131
+ cube_sync: Optional[StepState] = None
132
+ superset_sync: Optional[StepState] = None
133
+ rag_sync: Optional[StepState] = None
@@ -10,7 +10,7 @@ from datetime import datetime
10
10
  from pathlib import Path
11
11
  from typing import Dict, List, Optional, Set, Tuple
12
12
 
13
- from .models import ModelState, SyncState
13
+ from .models import ModelState, StepState, SyncState
14
14
 
15
15
 
16
16
  class StateManager:
@@ -219,3 +219,65 @@ class StateManager:
219
219
  files_to_delete.append(output_file)
220
220
 
221
221
  return files_to_delete
222
+
223
+ def should_run_step(
224
+ self,
225
+ step_name: str,
226
+ previous_state: Optional[SyncState],
227
+ changes_detected: bool,
228
+ ) -> bool:
229
+ """
230
+ Determine if a pipeline step should run.
231
+
232
+ A step should run if:
233
+ - There are changes detected, OR
234
+ - The previous run of this step failed
235
+
236
+ Args:
237
+ step_name: Name of the step ('cube_sync', 'superset_sync', 'rag_sync')
238
+ previous_state: Previous sync state
239
+ changes_detected: Whether model changes were detected
240
+
241
+ Returns:
242
+ True if the step should run
243
+ """
244
+ if changes_detected:
245
+ return True
246
+
247
+ if previous_state is None:
248
+ return True
249
+
250
+ step_state = getattr(previous_state, step_name, None)
251
+ if step_state is None:
252
+ return True
253
+
254
+ # Re-run if previous attempt failed
255
+ return step_state.status == 'failed'
256
+
257
+ def update_step_state(
258
+ self,
259
+ state: SyncState,
260
+ step_name: str,
261
+ status: str,
262
+ error: Optional[str] = None,
263
+ ) -> SyncState:
264
+ """
265
+ Update the state of a pipeline step.
266
+
267
+ Args:
268
+ state: Current sync state
269
+ step_name: Name of the step ('cube_sync', 'superset_sync', 'rag_sync')
270
+ status: Step status ('success', 'failed', 'skipped')
271
+ error: Error message if failed
272
+
273
+ Returns:
274
+ Updated SyncState
275
+ """
276
+ timestamp = datetime.utcnow().isoformat() + "Z"
277
+ step_state = StepState(
278
+ status=status,
279
+ last_run=timestamp,
280
+ error=error,
281
+ )
282
+ setattr(state, step_name, step_state)
283
+ return state
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbt-cube-sync
3
- Version: 0.1.0a10
3
+ Version: 0.1.0a11
4
4
  Summary: Synchronization tool for dbt models to Cube.js schemas and BI tools
5
5
  Author: Ponder
6
6
  Requires-Python: >=3.9,<4.0
@@ -1,5 +1,5 @@
1
1
  dbt_cube_sync/__init__.py,sha256=aifkfgUDRPL5v0LZzceH2LXu66YDkJjdpvKwXsdikbI,113
2
- dbt_cube_sync/cli.py,sha256=AxSVF3hJJqovk51mjA8Nyyte5NkfukSF3sAjk_VYJ6Y,20992
2
+ dbt_cube_sync/cli.py,sha256=bD7rOPstM9qESCTaovEbfdl80wz5yhk8GDnHnb-hBd0,24117
3
3
  dbt_cube_sync/config.py,sha256=qhGE7CxTmh0RhPizgd3x3Yj-3L2LoC00UQIDT0q9FlQ,3858
4
4
  dbt_cube_sync/connectors/__init__.py,sha256=NG6tYZ3CYD5bG_MfNLZrUM8YoBEKArG8-AOmJ8pwvQI,52
5
5
  dbt_cube_sync/connectors/base.py,sha256=JLzerxJdt34z0kWuyieL6UQhf5_dUYPGmwkiRWBuSPY,2802
@@ -10,9 +10,9 @@ dbt_cube_sync/core/__init__.py,sha256=kgsawtU5dqEvnHz6dU8qwJbH3rtIV7QlK2MhtYVDCa
10
10
  dbt_cube_sync/core/cube_generator.py,sha256=DtmaA_dtWmBVJnSWHVoQi-3KEsRc0axHZpCUEcKeYAk,11061
11
11
  dbt_cube_sync/core/db_inspector.py,sha256=V_cd12FBXj-1gB2JZeYmkQluUO-UYufy_tvfYoJXCGI,5073
12
12
  dbt_cube_sync/core/dbt_parser.py,sha256=KbhDoB0ULP6JDUPZPDVbm9yCtRKrW17ptGoJvVLtueY,12763
13
- dbt_cube_sync/core/models.py,sha256=2s5iZ9MEBGfSzkB4HJB5vG0mZqNXNJSfAD3Byw1IVe4,3203
14
- dbt_cube_sync/core/state_manager.py,sha256=7uXJtlZBIWj6s6XgAhNlP6UHdfhH0y461iyQlfidqGI,7233
15
- dbt_cube_sync-0.1.0a10.dist-info/METADATA,sha256=Foy8KI7-ILdZwTvejfjukArtnGNJJg85vwIlQoMS31w,10681
16
- dbt_cube_sync-0.1.0a10.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
17
- dbt_cube_sync-0.1.0a10.dist-info/entry_points.txt,sha256=iEAB_nZ1AoSeFwSHPY2tr02xmTHLVFKp5CJeFh0AfCw,56
18
- dbt_cube_sync-0.1.0a10.dist-info/RECORD,,
13
+ dbt_cube_sync/core/models.py,sha256=KoYrIRjnUPwDigrJmvldv0kb0V6rY141XavjRKfgB3A,3578
14
+ dbt_cube_sync/core/state_manager.py,sha256=MoEzAkyVPvSEiiiPaBz_V8X5hqZqzZ4t1w-Sjf_3oUA,8963
15
+ dbt_cube_sync-0.1.0a11.dist-info/METADATA,sha256=Ayr_7pO1GjQpZ1btSdmsMsm8n4ueiviwuY5ot7roh2Q,10681
16
+ dbt_cube_sync-0.1.0a11.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
17
+ dbt_cube_sync-0.1.0a11.dist-info/entry_points.txt,sha256=iEAB_nZ1AoSeFwSHPY2tr02xmTHLVFKp5CJeFh0AfCw,56
18
+ dbt_cube_sync-0.1.0a11.dist-info/RECORD,,