openms-insight 0.1.2__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,15 +5,15 @@ This package provides reusable, interactive Streamlit components backed by Vue.j
5
5
  visualizations with cross-component selection state management.
6
6
  """
7
7
 
8
+ from .components.heatmap import Heatmap
9
+ from .components.lineplot import LinePlot
10
+ from .components.sequenceview import SequenceView, SequenceViewResult
11
+ from .components.table import Table
8
12
  from .core.base import BaseComponent
9
- from .core.state import StateManager
10
- from .core.registry import register_component, get_component_class
11
13
  from .core.cache import CacheMissError
12
-
13
- from .components.table import Table
14
- from .components.lineplot import LinePlot
15
- from .components.heatmap import Heatmap
16
- from .components.sequenceview import SequenceView
14
+ from .core.registry import get_component_class, register_component
15
+ from .core.state import StateManager
16
+ from .rendering.bridge import clear_component_annotations, get_component_annotations
17
17
 
18
18
  __version__ = "0.1.0"
19
19
 
@@ -29,4 +29,8 @@ __all__ = [
29
29
  "LinePlot",
30
30
  "Heatmap",
31
31
  "SequenceView",
32
+ "SequenceViewResult",
33
+ # Utilities
34
+ "get_component_annotations",
35
+ "clear_component_annotations",
32
36
  ]
@@ -1,8 +1,8 @@
1
1
  """Visualization components."""
2
2
 
3
- from .table import Table
4
- from .lineplot import LinePlot
5
3
  from .heatmap import Heatmap
4
+ from .lineplot import LinePlot
5
+ from .table import Table
6
6
 
7
7
  __all__ = [
8
8
  "Table",
@@ -22,10 +22,10 @@ def _make_zoom_cache_key(zoom: Optional[Dict[str, Any]]) -> tuple:
22
22
  if zoom is None:
23
23
  return (None,)
24
24
  return (
25
- ('x0', zoom.get('xRange', [-1, -1])[0]),
26
- ('x1', zoom.get('xRange', [-1, -1])[1]),
27
- ('y0', zoom.get('yRange', [-1, -1])[0]),
28
- ('y1', zoom.get('yRange', [-1, -1])[1]),
25
+ ("x0", zoom.get("xRange", [-1, -1])[0]),
26
+ ("x1", zoom.get("xRange", [-1, -1])[1]),
27
+ ("y0", zoom.get("yRange", [-1, -1])[0]),
28
+ ("y1", zoom.get("yRange", [-1, -1])[1]),
29
29
  )
30
30
 
31
31
 
@@ -66,11 +66,11 @@ class Heatmap(BaseComponent):
66
66
  def __init__(
67
67
  self,
68
68
  cache_id: str,
69
- x_column: str,
70
- y_column: str,
69
+ x_column: Optional[str] = None,
70
+ y_column: Optional[str] = None,
71
71
  data: Optional[pl.LazyFrame] = None,
72
72
  data_path: Optional[str] = None,
73
- intensity_column: str = 'intensity',
73
+ intensity_column: Optional[str] = None,
74
74
  filters: Optional[Dict[str, str]] = None,
75
75
  filter_defaults: Optional[Dict[str, Any]] = None,
76
76
  interactivity: Optional[Dict[str, str]] = None,
@@ -79,15 +79,15 @@ class Heatmap(BaseComponent):
79
79
  min_points: int = 20000,
80
80
  x_bins: int = 400,
81
81
  y_bins: int = 50,
82
- zoom_identifier: str = 'heatmap_zoom',
82
+ zoom_identifier: str = "heatmap_zoom",
83
83
  title: Optional[str] = None,
84
84
  x_label: Optional[str] = None,
85
85
  y_label: Optional[str] = None,
86
- colorscale: str = 'Portland',
86
+ colorscale: str = "Portland",
87
87
  use_simple_downsample: bool = False,
88
88
  use_streaming: bool = True,
89
89
  categorical_filters: Optional[List[str]] = None,
90
- **kwargs
90
+ **kwargs,
91
91
  ):
92
92
  """
93
93
  Initialize the Heatmap component.
@@ -165,7 +165,7 @@ class Heatmap(BaseComponent):
165
165
  use_simple_downsample=use_simple_downsample,
166
166
  use_streaming=use_streaming,
167
167
  categorical_filters=categorical_filters,
168
- **kwargs
168
+ **kwargs,
169
169
  )
170
170
 
171
171
  def _get_cache_config(self) -> Dict[str, Any]:
@@ -176,17 +176,39 @@ class Heatmap(BaseComponent):
176
176
  Dict of config values that affect preprocessing
177
177
  """
178
178
  return {
179
- 'x_column': self._x_column,
180
- 'y_column': self._y_column,
181
- 'intensity_column': self._intensity_column,
182
- 'min_points': self._min_points,
183
- 'x_bins': self._x_bins,
184
- 'y_bins': self._y_bins,
185
- 'use_simple_downsample': self._use_simple_downsample,
186
- 'use_streaming': self._use_streaming,
187
- 'categorical_filters': sorted(self._categorical_filters),
179
+ "x_column": self._x_column,
180
+ "y_column": self._y_column,
181
+ "intensity_column": self._intensity_column,
182
+ "min_points": self._min_points,
183
+ "x_bins": self._x_bins,
184
+ "y_bins": self._y_bins,
185
+ "use_simple_downsample": self._use_simple_downsample,
186
+ "use_streaming": self._use_streaming,
187
+ "categorical_filters": sorted(self._categorical_filters),
188
+ "zoom_identifier": self._zoom_identifier,
189
+ "title": self._title,
190
+ "x_label": self._x_label,
191
+ "y_label": self._y_label,
192
+ "colorscale": self._colorscale,
188
193
  }
189
194
 
195
+ def _restore_cache_config(self, config: Dict[str, Any]) -> None:
196
+ """Restore component-specific configuration from cached config."""
197
+ self._x_column = config.get("x_column")
198
+ self._y_column = config.get("y_column")
199
+ self._intensity_column = config.get("intensity_column", "intensity")
200
+ self._min_points = config.get("min_points", 20000)
201
+ self._x_bins = config.get("x_bins", 400)
202
+ self._y_bins = config.get("y_bins", 50)
203
+ self._use_simple_downsample = config.get("use_simple_downsample", False)
204
+ self._use_streaming = config.get("use_streaming", True)
205
+ self._categorical_filters = config.get("categorical_filters", [])
206
+ self._zoom_identifier = config.get("zoom_identifier", "heatmap_zoom")
207
+ self._title = config.get("title")
208
+ self._x_label = config.get("x_label", self._x_column)
209
+ self._y_label = config.get("y_label", self._y_column)
210
+ self._colorscale = config.get("colorscale", "Portland")
211
+
190
212
  def get_state_dependencies(self) -> list:
191
213
  """
192
214
  Return list of state keys that affect this component's data.
@@ -244,53 +266,71 @@ class Heatmap(BaseComponent):
244
266
  self._x_column,
245
267
  self._y_column,
246
268
  )
247
- self._preprocessed_data['x_range'] = x_range
248
- self._preprocessed_data['y_range'] = y_range
269
+ self._preprocessed_data["x_range"] = x_range
270
+ self._preprocessed_data["y_range"] = y_range
249
271
 
250
272
  # Get total count
251
273
  total = self._raw_data.select(pl.len()).collect().item()
252
- self._preprocessed_data['total'] = total
274
+ self._preprocessed_data["total"] = total
253
275
 
254
276
  # Store metadata about categorical filters
255
- self._preprocessed_data['has_categorical_filters'] = True
256
- self._preprocessed_data['categorical_filter_values'] = {}
277
+ self._preprocessed_data["has_categorical_filters"] = True
278
+ self._preprocessed_data["categorical_filter_values"] = {}
257
279
 
258
280
  # Process each categorical filter
259
281
  for filter_id in self._categorical_filters:
260
282
  if filter_id not in self._filters:
261
- print(f"[HEATMAP] Warning: categorical_filter '{filter_id}' not in filters, skipping", file=sys.stderr)
283
+ print(
284
+ f"[HEATMAP] Warning: categorical_filter '{filter_id}' not in filters, skipping",
285
+ file=sys.stderr,
286
+ )
262
287
  continue
263
288
 
264
289
  column_name = self._filters[filter_id]
265
290
 
266
291
  # Get unique values for this filter
267
292
  unique_values = (
268
- self._raw_data
269
- .select(pl.col(column_name))
293
+ self._raw_data.select(pl.col(column_name))
270
294
  .unique()
271
295
  .collect()
272
296
  .to_series()
273
297
  .to_list()
274
298
  )
275
- unique_values = sorted([v for v in unique_values if v is not None and v >= 0])
299
+ unique_values = sorted(
300
+ [v for v in unique_values if v is not None and v >= 0]
301
+ )
276
302
 
277
- print(f"[HEATMAP] Categorical filter '{filter_id}' ({column_name}): {len(unique_values)} unique values", file=sys.stderr)
303
+ print(
304
+ f"[HEATMAP] Categorical filter '{filter_id}' ({column_name}): {len(unique_values)} unique values",
305
+ file=sys.stderr,
306
+ )
278
307
 
279
- self._preprocessed_data['categorical_filter_values'][filter_id] = unique_values
308
+ self._preprocessed_data["categorical_filter_values"][filter_id] = (
309
+ unique_values
310
+ )
280
311
 
281
312
  # Create compression levels for each filter value
282
313
  for filter_value in unique_values:
283
314
  # Filter data to this value
284
- filtered_data = self._raw_data.filter(pl.col(column_name) == filter_value)
315
+ filtered_data = self._raw_data.filter(
316
+ pl.col(column_name) == filter_value
317
+ )
285
318
  filtered_total = filtered_data.select(pl.len()).collect().item()
286
319
 
287
320
  # Compute level sizes for this filtered subset
288
- level_sizes = compute_compression_levels(self._min_points, filtered_total)
321
+ level_sizes = compute_compression_levels(
322
+ self._min_points, filtered_total
323
+ )
289
324
 
290
- print(f"[HEATMAP] Value {filter_value}: {filtered_total:,} pts → levels {level_sizes}", file=sys.stderr)
325
+ print(
326
+ f"[HEATMAP] Value {filter_value}: {filtered_total:,} pts → levels {level_sizes}",
327
+ file=sys.stderr,
328
+ )
291
329
 
292
330
  # Store level sizes for this filter value
293
- self._preprocessed_data[f'cat_level_sizes_{filter_id}_{filter_value}'] = level_sizes
331
+ self._preprocessed_data[
332
+ f"cat_level_sizes_{filter_id}_{filter_value}"
333
+ ] = level_sizes
294
334
 
295
335
  # Build each compressed level
296
336
  for level_idx, target_size in enumerate(level_sizes):
@@ -319,22 +359,24 @@ class Heatmap(BaseComponent):
319
359
  # Sort by x, y for efficient range query predicate pushdown
320
360
  level = level.sort([self._x_column, self._y_column])
321
361
  # Store LazyFrame for streaming to disk
322
- level_key = f'cat_level_{filter_id}_{filter_value}_{level_idx}'
362
+ level_key = f"cat_level_{filter_id}_{filter_value}_{level_idx}"
323
363
  self._preprocessed_data[level_key] = level # Keep lazy
324
364
 
325
365
  # Add full resolution as final level (for zoom fallback)
326
366
  # Also sorted for consistent predicate pushdown behavior
327
367
  num_compressed = len(level_sizes)
328
- full_res_key = f'cat_level_{filter_id}_{filter_value}_{num_compressed}'
368
+ full_res_key = f"cat_level_{filter_id}_{filter_value}_{num_compressed}"
329
369
  self._preprocessed_data[full_res_key] = filtered_data.sort(
330
370
  [self._x_column, self._y_column]
331
371
  )
332
- self._preprocessed_data[f'cat_num_levels_{filter_id}_{filter_value}'] = num_compressed + 1
372
+ self._preprocessed_data[
373
+ f"cat_num_levels_{filter_id}_{filter_value}"
374
+ ] = num_compressed + 1
333
375
 
334
376
  # Also create global levels for when no categorical filter is selected
335
377
  # (fallback to standard behavior)
336
378
  level_sizes = compute_compression_levels(self._min_points, total)
337
- self._preprocessed_data['level_sizes'] = level_sizes
379
+ self._preprocessed_data["level_sizes"] = level_sizes
338
380
 
339
381
  for i, size in enumerate(level_sizes):
340
382
  # If target size equals total, skip downsampling - use all data
@@ -360,15 +402,15 @@ class Heatmap(BaseComponent):
360
402
  )
361
403
  # Sort by x, y for efficient range query predicate pushdown
362
404
  level = level.sort([self._x_column, self._y_column])
363
- self._preprocessed_data[f'level_{i}'] = level # Keep lazy
405
+ self._preprocessed_data[f"level_{i}"] = level # Keep lazy
364
406
 
365
407
  # Add full resolution as final level (for zoom fallback)
366
408
  # Also sorted for consistent predicate pushdown behavior
367
409
  num_compressed = len(level_sizes)
368
- self._preprocessed_data[f'level_{num_compressed}'] = self._raw_data.sort(
410
+ self._preprocessed_data[f"level_{num_compressed}"] = self._raw_data.sort(
369
411
  [self._x_column, self._y_column]
370
412
  )
371
- self._preprocessed_data['num_levels'] = num_compressed + 1
413
+ self._preprocessed_data["num_levels"] = num_compressed + 1
372
414
 
373
415
  def _preprocess_streaming(self) -> None:
374
416
  """
@@ -383,19 +425,19 @@ class Heatmap(BaseComponent):
383
425
  self._x_column,
384
426
  self._y_column,
385
427
  )
386
- self._preprocessed_data['x_range'] = x_range
387
- self._preprocessed_data['y_range'] = y_range
428
+ self._preprocessed_data["x_range"] = x_range
429
+ self._preprocessed_data["y_range"] = y_range
388
430
 
389
431
  # Get total count
390
432
  total = self._raw_data.select(pl.len()).collect().item()
391
- self._preprocessed_data['total'] = total
433
+ self._preprocessed_data["total"] = total
392
434
 
393
435
  # Compute target sizes for levels
394
436
  level_sizes = compute_compression_levels(self._min_points, total)
395
- self._preprocessed_data['level_sizes'] = level_sizes
437
+ self._preprocessed_data["level_sizes"] = level_sizes
396
438
 
397
439
  # Build and collect each level
398
- self._preprocessed_data['levels'] = []
440
+ self._preprocessed_data["levels"] = []
399
441
 
400
442
  for i, size in enumerate(level_sizes):
401
443
  # If target size equals total, skip downsampling - use all data
@@ -424,17 +466,17 @@ class Heatmap(BaseComponent):
424
466
  level = level.sort([self._x_column, self._y_column])
425
467
  # Store LazyFrame for streaming to disk
426
468
  # Base class will use sink_parquet() to stream without full materialization
427
- self._preprocessed_data[f'level_{i}'] = level # Keep lazy
469
+ self._preprocessed_data[f"level_{i}"] = level # Keep lazy
428
470
 
429
471
  # Add full resolution as final level (for zoom fallback)
430
472
  # Also sorted for consistent predicate pushdown behavior
431
473
  num_compressed = len(level_sizes)
432
- self._preprocessed_data[f'level_{num_compressed}'] = self._raw_data.sort(
474
+ self._preprocessed_data[f"level_{num_compressed}"] = self._raw_data.sort(
433
475
  [self._x_column, self._y_column]
434
476
  )
435
477
 
436
478
  # Store number of levels for reconstruction (includes full resolution)
437
- self._preprocessed_data['num_levels'] = num_compressed + 1
479
+ self._preprocessed_data["num_levels"] = num_compressed + 1
438
480
 
439
481
  def _preprocess_eager(self) -> None:
440
482
  """
@@ -450,16 +492,16 @@ class Heatmap(BaseComponent):
450
492
  self._x_column,
451
493
  self._y_column,
452
494
  )
453
- self._preprocessed_data['x_range'] = x_range
454
- self._preprocessed_data['y_range'] = y_range
495
+ self._preprocessed_data["x_range"] = x_range
496
+ self._preprocessed_data["y_range"] = y_range
455
497
 
456
498
  # Get total count
457
499
  total = self._raw_data.select(pl.len()).collect().item()
458
- self._preprocessed_data['total'] = total
500
+ self._preprocessed_data["total"] = total
459
501
 
460
502
  # Compute compression level target sizes
461
503
  level_sizes = compute_compression_levels(self._min_points, total)
462
- self._preprocessed_data['level_sizes'] = level_sizes
504
+ self._preprocessed_data["level_sizes"] = level_sizes
463
505
 
464
506
  # Build levels from largest to smallest
465
507
  if level_sizes:
@@ -493,21 +535,23 @@ class Heatmap(BaseComponent):
493
535
  # Store LazyFrame for streaming to disk
494
536
  level_idx = len(level_sizes) - 1 - i
495
537
  if isinstance(downsampled, pl.LazyFrame):
496
- self._preprocessed_data[f'level_{level_idx}'] = downsampled # Keep lazy
538
+ self._preprocessed_data[f"level_{level_idx}"] = (
539
+ downsampled # Keep lazy
540
+ )
497
541
  else:
498
542
  # DataFrame from downsample_2d - convert back to lazy
499
- self._preprocessed_data[f'level_{level_idx}'] = downsampled.lazy()
543
+ self._preprocessed_data[f"level_{level_idx}"] = downsampled.lazy()
500
544
  current = downsampled
501
545
 
502
546
  # Add full resolution as final level (for zoom fallback)
503
547
  # Also sorted for consistent predicate pushdown behavior
504
548
  num_compressed = len(level_sizes)
505
- self._preprocessed_data[f'level_{num_compressed}'] = self._raw_data.sort(
549
+ self._preprocessed_data[f"level_{num_compressed}"] = self._raw_data.sort(
506
550
  [self._x_column, self._y_column]
507
551
  )
508
552
 
509
553
  # Store number of levels for reconstruction (includes full resolution)
510
- self._preprocessed_data['num_levels'] = num_compressed + 1
554
+ self._preprocessed_data["num_levels"] = num_compressed + 1
511
555
 
512
556
  def _get_levels(self) -> list:
513
557
  """
@@ -516,11 +560,11 @@ class Heatmap(BaseComponent):
516
560
  Reconstructs the levels list from preprocessed data,
517
561
  adding full resolution at the end.
518
562
  """
519
- num_levels = self._preprocessed_data.get('num_levels', 0)
563
+ num_levels = self._preprocessed_data.get("num_levels", 0)
520
564
  levels = []
521
565
 
522
566
  for i in range(num_levels):
523
- level_data = self._preprocessed_data.get(f'level_{i}')
567
+ level_data = self._preprocessed_data.get(f"level_{i}")
524
568
  if level_data is not None:
525
569
  levels.append(level_data)
526
570
 
@@ -543,7 +587,7 @@ class Heatmap(BaseComponent):
543
587
  Returns ([], None) if no categorical levels exist for this filter
544
588
  """
545
589
  # Check if we have categorical levels for this filter/value
546
- num_levels_key = f'cat_num_levels_{filter_id}_{filter_value}'
590
+ num_levels_key = f"cat_num_levels_{filter_id}_{filter_value}"
547
591
  num_levels = self._preprocessed_data.get(num_levels_key, 0)
548
592
 
549
593
  if num_levels == 0:
@@ -551,14 +595,16 @@ class Heatmap(BaseComponent):
551
595
 
552
596
  levels = []
553
597
  for i in range(num_levels):
554
- level_key = f'cat_level_{filter_id}_{filter_value}_{i}'
598
+ level_key = f"cat_level_{filter_id}_{filter_value}_{i}"
555
599
  level_data = self._preprocessed_data.get(level_key)
556
600
  if level_data is not None:
557
601
  levels.append(level_data)
558
602
 
559
603
  return levels, None # Full resolution included in cached levels
560
604
 
561
- def _get_levels_for_state(self, state: Dict[str, Any]) -> Tuple[list, Optional[pl.LazyFrame]]:
605
+ def _get_levels_for_state(
606
+ self, state: Dict[str, Any]
607
+ ) -> Tuple[list, Optional[pl.LazyFrame]]:
562
608
  """
563
609
  Get appropriate compression levels based on current filter state.
564
610
 
@@ -573,8 +619,10 @@ class Heatmap(BaseComponent):
573
619
  Tuple of (levels list, raw data for full resolution)
574
620
  """
575
621
  # Check if we have categorical filters and a selected value
576
- if self._preprocessed_data.get('has_categorical_filters'):
577
- cat_filter_values = self._preprocessed_data.get('categorical_filter_values', {})
622
+ if self._preprocessed_data.get("has_categorical_filters"):
623
+ cat_filter_values = self._preprocessed_data.get(
624
+ "categorical_filter_values", {}
625
+ )
578
626
 
579
627
  for filter_id in self._categorical_filters:
580
628
  if filter_id not in cat_filter_values:
@@ -590,7 +638,9 @@ class Heatmap(BaseComponent):
590
638
 
591
639
  # Check if this value has per-filter levels
592
640
  if selected_value in cat_filter_values[filter_id]:
593
- levels, filtered_raw = self._get_categorical_levels(filter_id, selected_value)
641
+ levels, filtered_raw = self._get_categorical_levels(
642
+ filter_id, selected_value
643
+ )
594
644
  if levels:
595
645
  return levels, filtered_raw
596
646
 
@@ -599,22 +649,19 @@ class Heatmap(BaseComponent):
599
649
 
600
650
  def _get_vue_component_name(self) -> str:
601
651
  """Return the Vue component name."""
602
- return 'PlotlyHeatmap'
652
+ return "PlotlyHeatmap"
603
653
 
604
654
  def _get_data_key(self) -> str:
605
655
  """Return the key used to send primary data to Vue."""
606
- return 'heatmapData'
656
+ return "heatmapData"
607
657
 
608
658
  def _is_no_zoom(self, zoom: Optional[Dict[str, Any]]) -> bool:
609
659
  """Check if zoom state represents no zoom (full view)."""
610
660
  if zoom is None:
611
661
  return True
612
- x_range = zoom.get('xRange', [-1, -1])
613
- y_range = zoom.get('yRange', [-1, -1])
614
- return (
615
- x_range[0] < 0 and x_range[1] < 0 and
616
- y_range[0] < 0 and y_range[1] < 0
617
- )
662
+ x_range = zoom.get("xRange", [-1, -1])
663
+ y_range = zoom.get("yRange", [-1, -1])
664
+ return x_range[0] < 0 and x_range[1] < 0 and y_range[0] < 0 and y_range[1] < 0
618
665
 
619
666
  def _select_level_for_zoom(
620
667
  self,
@@ -641,8 +688,9 @@ class Heatmap(BaseComponent):
641
688
  Filtered Polars DataFrame at appropriate resolution
642
689
  """
643
690
  import sys
644
- x0, x1 = zoom['xRange']
645
- y0, y1 = zoom['yRange']
691
+
692
+ x0, x1 = zoom["xRange"]
693
+ y0, y1 = zoom["yRange"]
646
694
 
647
695
  # Add raw data as final level if available
648
696
  all_levels = list(levels)
@@ -658,10 +706,10 @@ class Heatmap(BaseComponent):
658
706
 
659
707
  # Filter to zoom range
660
708
  filtered_lazy = level_data.filter(
661
- (pl.col(self._x_column) >= x0) &
662
- (pl.col(self._x_column) <= x1) &
663
- (pl.col(self._y_column) >= y0) &
664
- (pl.col(self._y_column) <= y1)
709
+ (pl.col(self._x_column) >= x0)
710
+ & (pl.col(self._x_column) <= x1)
711
+ & (pl.col(self._y_column) >= y0)
712
+ & (pl.col(self._y_column) <= y1)
665
713
  )
666
714
 
667
715
  # Apply non-categorical filters if any
@@ -680,7 +728,10 @@ class Heatmap(BaseComponent):
680
728
 
681
729
  count = len(filtered)
682
730
  last_filtered = filtered
683
- print(f"[HEATMAP] Level {level_idx}: {count} pts in zoom range", file=sys.stderr)
731
+ print(
732
+ f"[HEATMAP] Level {level_idx}: {count} pts in zoom range",
733
+ file=sys.stderr,
734
+ )
684
735
 
685
736
  if count >= self._min_points:
686
737
  # This level has enough detail
@@ -740,6 +791,7 @@ class Heatmap(BaseComponent):
740
791
  Dict with heatmapData (pandas DataFrame) and _hash for change detection
741
792
  """
742
793
  import sys
794
+
743
795
  zoom = state.get(self._zoom_identifier)
744
796
 
745
797
  # Build columns to select
@@ -761,7 +813,9 @@ class Heatmap(BaseComponent):
761
813
 
762
814
  # Get levels based on current state (may use per-filter levels)
763
815
  levels, filtered_raw = self._get_levels_for_state(state)
764
- level_sizes = [len(l) if isinstance(l, pl.DataFrame) else '?' for l in levels]
816
+ level_sizes = [
817
+ len(lvl) if isinstance(lvl, pl.DataFrame) else "?" for lvl in levels
818
+ ]
765
819
 
766
820
  # Determine which filters still need to be applied at render time
767
821
  # (filters not in categorical_filters need runtime application)
@@ -775,12 +829,15 @@ class Heatmap(BaseComponent):
775
829
  # No zoom - use smallest level
776
830
  if not levels:
777
831
  # No levels available
778
- print(f"[HEATMAP] No levels available", file=sys.stderr)
779
- return {'heatmapData': pl.DataFrame().to_pandas(), '_hash': ''}
832
+ print("[HEATMAP] No levels available", file=sys.stderr)
833
+ return {"heatmapData": pl.DataFrame().to_pandas(), "_hash": ""}
780
834
 
781
835
  data = levels[0]
782
- using_cat = self._preprocessed_data.get('has_categorical_filters', False)
783
- print(f"[HEATMAP] No zoom → level 0 ({level_sizes[0]} pts), levels={level_sizes}, categorical={using_cat}", file=sys.stderr)
836
+ using_cat = self._preprocessed_data.get("has_categorical_filters", False)
837
+ print(
838
+ f"[HEATMAP] No zoom → level 0 ({level_sizes[0]} pts), levels={level_sizes}, categorical={using_cat}",
839
+ file=sys.stderr,
840
+ )
784
841
 
785
842
  # Ensure we have a LazyFrame
786
843
  if isinstance(data, pl.DataFrame):
@@ -796,7 +853,9 @@ class Heatmap(BaseComponent):
796
853
  filter_defaults=self._filter_defaults,
797
854
  )
798
855
  # Sort by intensity ascending so high-intensity points are drawn on top
799
- df_pandas = df_pandas.sort_values(self._intensity_column).reset_index(drop=True)
856
+ df_pandas = df_pandas.sort_values(self._intensity_column).reset_index(
857
+ drop=True
858
+ )
800
859
  else:
801
860
  # No filters to apply - levels already filtered by categorical filter
802
861
  schema_names = data.collect_schema().names()
@@ -817,13 +876,16 @@ class Heatmap(BaseComponent):
817
876
  df_polars = df_polars.select(available_cols)
818
877
  # Sort by intensity ascending so high-intensity points are drawn on top
819
878
  df_polars = df_polars.sort(self._intensity_column)
820
- print(f"[HEATMAP] Selected {len(df_polars)} pts for zoom, levels={level_sizes}", file=sys.stderr)
879
+ print(
880
+ f"[HEATMAP] Selected {len(df_polars)} pts for zoom, levels={level_sizes}",
881
+ file=sys.stderr,
882
+ )
821
883
  data_hash = compute_dataframe_hash(df_polars)
822
884
  df_pandas = df_polars.to_pandas()
823
885
 
824
886
  return {
825
- 'heatmapData': df_pandas,
826
- '_hash': data_hash,
887
+ "heatmapData": df_pandas,
888
+ "_hash": data_hash,
827
889
  }
828
890
 
829
891
  def _get_component_args(self) -> Dict[str, Any]:
@@ -834,19 +896,19 @@ class Heatmap(BaseComponent):
834
896
  Dict with all heatmap configuration for Vue
835
897
  """
836
898
  args: Dict[str, Any] = {
837
- 'componentType': self._get_vue_component_name(),
838
- 'xColumn': self._x_column,
839
- 'yColumn': self._y_column,
840
- 'intensityColumn': self._intensity_column,
841
- 'xLabel': self._x_label,
842
- 'yLabel': self._y_label,
843
- 'colorscale': self._colorscale,
844
- 'zoomIdentifier': self._zoom_identifier,
845
- 'interactivity': self._interactivity,
899
+ "componentType": self._get_vue_component_name(),
900
+ "xColumn": self._x_column,
901
+ "yColumn": self._y_column,
902
+ "intensityColumn": self._intensity_column,
903
+ "xLabel": self._x_label,
904
+ "yLabel": self._y_label,
905
+ "colorscale": self._colorscale,
906
+ "zoomIdentifier": self._zoom_identifier,
907
+ "interactivity": self._interactivity,
846
908
  }
847
909
 
848
910
  if self._title:
849
- args['title'] = self._title
911
+ args["title"] = self._title
850
912
 
851
913
  # Add any extra config options
852
914
  args.update(self._config)
@@ -858,7 +920,7 @@ class Heatmap(BaseComponent):
858
920
  colorscale: Optional[str] = None,
859
921
  x_label: Optional[str] = None,
860
922
  y_label: Optional[str] = None,
861
- ) -> 'Heatmap':
923
+ ) -> "Heatmap":
862
924
  """
863
925
  Update heatmap styling.
864
926