goad-py 0.8.0__cp38-abi3-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of goad-py might be problematic. Click here for more details.

@@ -0,0 +1,614 @@
1
+ """
2
+ PHIPS-specific convergence extension for GOAD.
3
+
4
+ This module provides convergence tracking for PHIPS detector DSCS values,
5
+ which requires Custom binning with PHIPS detector geometry and post-processing
6
+ to compute mean DSCS at each of the 20 PHIPS detectors.
7
+ """
8
+
9
+ import os
10
+ import random
11
+ from dataclasses import dataclass
12
+ from pathlib import Path
13
+ from typing import Dict, List, Optional, Tuple
14
+
15
+ import numpy as np
16
+ from rich.console import Console
17
+
18
+ from . import _goad_py as goad
19
+ from .convergence import ConvergenceResults
20
+ from .convergence_display import (
21
+ ArrayConvergenceVariable,
22
+ ConvergenceDisplay,
23
+ )
24
+
25
+
26
+ @dataclass
27
+ class PHIPSConvergable:
28
+ """Convergence criteria for PHIPS detector DSCS values."""
29
+
30
+ tolerance_type: str = "relative" # 'relative' or 'absolute'
31
+ tolerance: float = 0.25 # Default 25% relative tolerance
32
+ detector_indices: Optional[List[int]] = (
33
+ None # Specific detectors to check (None = all)
34
+ )
35
+
36
+ def __post_init__(self):
37
+ valid_types = {"relative", "absolute"}
38
+ if self.tolerance_type not in valid_types:
39
+ raise ValueError(
40
+ f"Invalid tolerance_type '{self.tolerance_type}'. Must be one of {valid_types}"
41
+ )
42
+
43
+ if self.tolerance <= 0:
44
+ raise ValueError(f"Tolerance must be positive, got {self.tolerance}")
45
+
46
+ if self.detector_indices is not None:
47
+ if not isinstance(self.detector_indices, list):
48
+ raise ValueError("detector_indices must be a list of integers")
49
+ if not all(0 <= idx < 20 for idx in self.detector_indices):
50
+ raise ValueError("detector_indices must be in range [0, 19]")
51
+
52
+
53
+ class PHIPSConvergence:
54
+ """
55
+ Convergence study for PHIPS detector DSCS values.
56
+
57
+ Requires Custom binning with PHIPS detector geometry (phips_bins_edges.toml).
58
+ Computes mean DSCS at each of 20 PHIPS detectors and tracks convergence.
59
+ """
60
+
61
+ # PHIPS detector parameters (from phips_detector_angles.py)
62
+ NUM_DETECTORS = 20
63
+ THETA_START = 18.0 # degrees
64
+ THETA_END = 170.0 # degrees
65
+ DETECTOR_WIDTH = 7.0 # degrees (aperture)
66
+
67
+ def __init__(
68
+ self,
69
+ settings: goad.Settings,
70
+ convergable: PHIPSConvergable,
71
+ batch_size: int = 24,
72
+ max_orientations: int = 100_000,
73
+ min_batches: int = 10,
74
+ log_file: Optional[str] = None,
75
+ ):
76
+ """
77
+ Initialize a PHIPS convergence study.
78
+
79
+ Args:
80
+ settings: GOAD settings with Custom binning scheme
81
+ convergable: PHIPS convergence criteria
82
+ batch_size: Number of orientations per iteration
83
+ max_orientations: Maximum total orientations before stopping
84
+ min_batches: Minimum number of batches before allowing convergence
85
+ log_file: Optional path to log file for convergence progress
86
+ """
87
+ self.settings = settings
88
+ # Enable quiet mode to suppress Rust progress bars
89
+ self.settings.quiet = True
90
+ self.convergable = convergable
91
+ self.batch_size = batch_size
92
+ self.max_orientations = max_orientations
93
+ self.min_batches = min_batches
94
+
95
+ # Validate inputs
96
+ if batch_size <= 0:
97
+ raise ValueError(f"batch_size must be positive, got {batch_size}")
98
+
99
+ if max_orientations <= 0:
100
+ raise ValueError(
101
+ f"max_orientations must be positive, got {max_orientations}"
102
+ )
103
+
104
+ if min_batches <= 0:
105
+ raise ValueError(f"min_batches must be positive, got {min_batches}")
106
+
107
+ # Initialize tracking variables
108
+ self.n_orientations = 0
109
+ self.convergence_history = []
110
+
111
+ # Batch-based statistics tracking
112
+ self.batch_data = [] # List of batch statistics
113
+
114
+ # PHIPS detector centers (20 detectors from 18° to 170°)
115
+ self.detector_centers = np.linspace(
116
+ self.THETA_START, self.THETA_END, self.NUM_DETECTORS
117
+ )
118
+ self.half_width = self.DETECTOR_WIDTH / 2.0
119
+
120
+ # Accumulated PHIPS DSCS for final average
121
+ self.phips_dscs_sum = None
122
+
123
+ # Rich console
124
+ self._console = Console()
125
+
126
+ # Create display variable for PHIPS DSCS
127
+ display_variable = ArrayConvergenceVariable(
128
+ name="phips_dscs",
129
+ tolerance=convergable.tolerance,
130
+ tolerance_type=convergable.tolerance_type,
131
+ indices=convergable.detector_indices,
132
+ )
133
+
134
+ # Initialize display system
135
+ self._display = ConvergenceDisplay(
136
+ variables=[display_variable],
137
+ batch_size=self.batch_size,
138
+ min_batches=self.min_batches,
139
+ convergence_type=self._get_convergence_type(),
140
+ console=self._console,
141
+ log_file=log_file,
142
+ )
143
+
144
+ def _compute_phips_dscs_from_mueller2d(self, results: goad.Results) -> np.ndarray:
145
+ """
146
+ Compute mean DSCS at each of 20 PHIPS detectors from Custom binning results.
147
+
148
+ Args:
149
+ results: Results from MultiProblem with Custom binning
150
+
151
+ Returns:
152
+ Array of shape (20,) with mean DSCS per detector (NaN if no bins in detector)
153
+ """
154
+ # Get mueller_2d from Custom binning
155
+ mueller_2d = np.array(results.mueller) # Shape: (n_custom_bins, 16)
156
+ bins_2d = results.bins # List of (theta_center, phi_center) tuples
157
+
158
+ # Extract theta angles from bin centers
159
+ theta_angles = np.array([bin_tuple[0] for bin_tuple in bins_2d])
160
+
161
+ # Extract S11 and convert to DSCS
162
+ s11_values = mueller_2d[:, 0]
163
+ k = 2 * np.pi / self.settings.wavelength
164
+ dscs_conversion_factor = 1e-12 / k**2
165
+ dscs_values = s11_values * dscs_conversion_factor
166
+
167
+ # Compute mean DSCS for each detector
168
+ detector_dscs = []
169
+ for bin_center_theta in self.detector_centers:
170
+ lower_bound = bin_center_theta - self.half_width
171
+ upper_bound = bin_center_theta + self.half_width
172
+
173
+ # Find custom bins within this detector's angular window
174
+ indices = np.where(
175
+ (theta_angles >= lower_bound) & (theta_angles < upper_bound)
176
+ )[0]
177
+
178
+ if len(indices) > 0:
179
+ # Mean DSCS over bins in this detector window
180
+ mean_dscs = np.mean(dscs_values[indices])
181
+ detector_dscs.append(mean_dscs)
182
+ else:
183
+ # No bins in this detector window
184
+ detector_dscs.append(np.nan)
185
+
186
+ return np.array(detector_dscs) # Shape: (20,)
187
+
188
+ def _update_statistics(self, results: goad.Results, batch_size: int):
189
+ """
190
+ Update statistics with new batch results.
191
+
192
+ Args:
193
+ results: Results from a MultiProblem run
194
+ batch_size: Number of orientations in this batch
195
+ """
196
+ # Compute PHIPS DSCS for this batch
197
+ phips_dscs = self._compute_phips_dscs_from_mueller2d(results)
198
+
199
+ # Store batch data
200
+ batch_info = {
201
+ "batch_size": batch_size,
202
+ "phips_dscs": phips_dscs, # Shape: (20,)
203
+ }
204
+ self.batch_data.append(batch_info)
205
+
206
+ # Accumulate for final average
207
+ if self.phips_dscs_sum is None:
208
+ self.phips_dscs_sum = phips_dscs * batch_size
209
+ else:
210
+ self.phips_dscs_sum += phips_dscs * batch_size
211
+
212
+ # Update total orientation count
213
+ self.n_orientations += batch_size
214
+
215
+ def _calculate_phips_mean_and_sem(self) -> Tuple[np.ndarray, np.ndarray]:
216
+ """
217
+ Calculate mean and SEM arrays for PHIPS DSCS across detectors.
218
+
219
+ Returns:
220
+ Tuple of (mean_array, sem_array) where each is shape (20,)
221
+ """
222
+ if not self.batch_data:
223
+ return np.full(self.NUM_DETECTORS, np.nan), np.full(
224
+ self.NUM_DETECTORS, np.inf
225
+ )
226
+
227
+ # Extract batch values: shape (n_batches, 20)
228
+ batch_arrays = np.array([batch["phips_dscs"] for batch in self.batch_data])
229
+ batch_sizes = np.array([batch["batch_size"] for batch in self.batch_data])
230
+
231
+ if len(batch_arrays) < 2:
232
+ # Can't estimate variance with < 2 batches
233
+ mean_array = batch_arrays[0]
234
+ sem_array = np.full(self.NUM_DETECTORS, np.inf)
235
+ return mean_array, sem_array
236
+
237
+ # Calculate mean and SEM independently for each detector
238
+ # Use nanmean to handle NaN values (detectors with no data)
239
+ mean_array = np.average(
240
+ batch_arrays, axis=0, weights=batch_sizes
241
+ ) # Shape: (20,)
242
+
243
+ # Variance between batches at each detector (ignoring NaNs)
244
+ batch_means_variance = np.nanvar(batch_arrays, axis=0, ddof=1) # Shape: (20,)
245
+
246
+ # Scale up to estimate population variance
247
+ avg_batch_size = np.mean(batch_sizes)
248
+ estimated_population_variance = batch_means_variance * avg_batch_size
249
+
250
+ # Calculate SEM for total sample
251
+ total_n = np.sum(batch_sizes)
252
+ sem_array = np.sqrt(
253
+ estimated_population_variance / (total_n - 1)
254
+ ) # Shape: (20,)
255
+
256
+ return mean_array, sem_array
257
+
258
+ def _check_convergence(self) -> bool:
259
+ """
260
+ Check if PHIPS DSCS values have converged.
261
+
262
+ Returns:
263
+ True if converged, False otherwise
264
+ """
265
+ if len(self.batch_data) < self.min_batches:
266
+ return False
267
+
268
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
269
+
270
+ # Determine which detectors to check
271
+ if self.convergable.detector_indices is not None:
272
+ check_indices = self.convergable.detector_indices
273
+ else:
274
+ # Check all detectors that have data (not NaN)
275
+ check_indices = np.where(~np.isnan(mean_dscs))[0]
276
+
277
+ if len(check_indices) == 0:
278
+ return False # No valid detectors to check
279
+
280
+ # Extract values for detectors to check
281
+ mean_subset = mean_dscs[check_indices]
282
+ sem_subset = sem_dscs[check_indices]
283
+
284
+ # Check convergence based on tolerance type
285
+ if self.convergable.tolerance_type == "relative":
286
+ # Relative SEM
287
+ with np.errstate(divide="ignore", invalid="ignore"):
288
+ relative_sem = np.where(
289
+ mean_subset != 0, sem_subset / np.abs(mean_subset), np.inf
290
+ )
291
+ converged = np.all(relative_sem < self.convergable.tolerance)
292
+ else: # absolute
293
+ converged = np.all(sem_subset < self.convergable.tolerance)
294
+
295
+ return converged
296
+
297
+ def _get_convergence_type(self) -> str:
298
+ """Get the convergence type name for display."""
299
+ class_name = self.__class__.__name__
300
+ if class_name == "PHIPSEnsembleConvergence":
301
+ return "PHIPS Ensemble"
302
+ elif class_name == "PHIPSConvergence":
303
+ return "PHIPS"
304
+ else:
305
+ return class_name
306
+
307
+ def _get_detector_angles(self, variable: str) -> np.ndarray:
308
+ """Get detector angles for PHIPS detectors."""
309
+ return self.detector_centers
310
+
311
+ def _get_phips_stats(self, variable: str) -> Tuple[float, float]:
312
+ """Get mean and SEM for a single PHIPS detector (not used for array display)."""
313
+ # This is not used since PHIPS uses array display, but required by interface
314
+ return 0.0, 0.0
315
+
316
+ def _update_convergence_history(self):
317
+ """Update convergence history with current worst-case SEM."""
318
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
319
+
320
+ if len(mean_dscs) > 0:
321
+ # Find worst-case detector
322
+ if self.convergable.tolerance_type == "relative":
323
+ with np.errstate(divide="ignore", invalid="ignore"):
324
+ relative_sem = np.where(
325
+ mean_dscs != 0, sem_dscs / np.abs(mean_dscs), np.inf
326
+ )
327
+ worst_sem = np.max(relative_sem)
328
+ else:
329
+ worst_sem = np.max(sem_dscs)
330
+
331
+ self.convergence_history.append(
332
+ (self.n_orientations, "phips_dscs", worst_sem)
333
+ )
334
+
335
+ def run(self) -> ConvergenceResults:
336
+ """
337
+ Run convergence study until criteria are met or max orientations reached.
338
+
339
+ Returns:
340
+ ConvergenceResults with PHIPS DSCS values
341
+ """
342
+ iteration = 0
343
+ converged = False
344
+
345
+ # Create Live context for smooth updating display
346
+ with self._display.create_live_context() as live:
347
+ # Show initial display before first batch
348
+ initial_display = self._display.build_display(
349
+ iteration=0,
350
+ n_orientations=self.n_orientations,
351
+ get_stats=self._get_phips_stats,
352
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
353
+ get_bin_labels=self._get_detector_angles,
354
+ power_ratio=None,
355
+ geom_info=None,
356
+ )
357
+ live.update(initial_display)
358
+
359
+ while not converged and self.n_orientations < self.max_orientations:
360
+ iteration += 1
361
+
362
+ # Create orientations for this batch
363
+ orientations = goad.create_uniform_orientation(self.batch_size)
364
+ self.settings.orientation = orientations
365
+
366
+ # Run MultiProblem with error handling for bad geometries
367
+ try:
368
+ mp = goad.MultiProblem(self.settings)
369
+ mp.py_solve()
370
+ except Exception as e:
371
+ # Geometry loading failed (bad faces, degenerate geometry, etc.)
372
+ # For single-geometry convergence, we can't skip - must raise error
373
+ error_msg = (
374
+ f"Failed to initialize MultiProblem with geometry '{self.settings.geom_path}': {e}\n"
375
+ f"Please check geometry file for:\n"
376
+ f" - Degenerate faces (area = 0)\n"
377
+ f" - Non-planar geometry\n"
378
+ f" - Faces that are too small\n"
379
+ f" - Invalid mesh topology\n"
380
+ f" - Geometry file corruption"
381
+ )
382
+ raise type(e)(error_msg) from e
383
+
384
+ # Update statistics
385
+ self._update_statistics(mp.results, self.batch_size)
386
+
387
+ # Update convergence history
388
+ self._update_convergence_history()
389
+
390
+ # Check convergence
391
+ converged = self._check_convergence()
392
+
393
+ # Update live display
394
+ display = self._display.build_display(
395
+ iteration=iteration,
396
+ n_orientations=self.n_orientations,
397
+ get_stats=self._get_phips_stats,
398
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
399
+ get_bin_labels=self._get_detector_angles,
400
+ power_ratio=None,
401
+ geom_info=None,
402
+ )
403
+ live.update(display)
404
+
405
+ # Compute final results
406
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
407
+
408
+ # Create results
409
+ results = ConvergenceResults(
410
+ converged=converged,
411
+ n_orientations=self.n_orientations,
412
+ values={"phips_dscs": mean_dscs}, # Array of 20 values
413
+ sem_values={"phips_dscs": sem_dscs}, # Array of 20 SEMs
414
+ mueller_1d=None,
415
+ mueller_2d=None,
416
+ convergence_history=self.convergence_history,
417
+ warning=None
418
+ if converged
419
+ else f"Did not converge within {self.max_orientations} orientations",
420
+ )
421
+
422
+ # Print final summary
423
+ if converged:
424
+ print(f"\nConverged after {self.n_orientations} orientations.")
425
+ else:
426
+ print(
427
+ f"\nWarning: Did not converge within {self.max_orientations} orientations"
428
+ )
429
+
430
+ return results
431
+
432
+
433
+ class PHIPSEnsembleConvergence(PHIPSConvergence):
434
+ """
435
+ Ensemble convergence study for PHIPS detector DSCS values.
436
+
437
+ Combines PHIPS detector DSCS tracking with ensemble geometry averaging.
438
+ Each batch randomly selects a geometry file and runs orientation averaging,
439
+ allowing convergence of DSCS values averaged over both orientations and geometries.
440
+ """
441
+
442
+ def __init__(
443
+ self,
444
+ settings: goad.Settings,
445
+ convergable: PHIPSConvergable,
446
+ geom_dir: str,
447
+ batch_size: int = 24,
448
+ max_orientations: int = 100_000,
449
+ min_batches: int = 10,
450
+ log_file: Optional[str] = None,
451
+ ):
452
+ """
453
+ Initialize a PHIPS ensemble convergence study.
454
+
455
+ Args:
456
+ settings: GOAD settings with Custom binning (geom_path will be overridden)
457
+ convergable: PHIPS convergence criteria
458
+ geom_dir: Directory containing .obj geometry files
459
+ batch_size: Number of orientations per iteration
460
+ max_orientations: Maximum total orientations before stopping
461
+ min_batches: Minimum number of batches before allowing convergence
462
+ log_file: Optional path to log file for convergence progress
463
+ """
464
+ # Discover all .obj files in directory
465
+ geom_path = Path(geom_dir)
466
+ if not geom_path.exists():
467
+ raise ValueError(f"Geometry directory does not exist: {geom_dir}")
468
+
469
+ if not geom_path.is_dir():
470
+ raise ValueError(f"Path is not a directory: {geom_dir}")
471
+
472
+ self.geom_files = sorted([f.name for f in geom_path.glob("*.obj")])
473
+
474
+ if not self.geom_files:
475
+ raise ValueError(f"No .obj files found in directory: {geom_dir}")
476
+
477
+ self.geom_dir = str(geom_path.resolve())
478
+
479
+ print(f"Found {len(self.geom_files)} geometry files in {self.geom_dir}")
480
+
481
+ # Call parent constructor
482
+ super().__init__(
483
+ settings=settings,
484
+ convergable=convergable,
485
+ batch_size=batch_size,
486
+ max_orientations=max_orientations,
487
+ min_batches=min_batches,
488
+ log_file=log_file,
489
+ )
490
+
491
+ def run(self) -> ConvergenceResults:
492
+ """
493
+ Run ensemble convergence study.
494
+
495
+ Each batch iteration randomly selects a geometry file from the
496
+ ensemble directory before running the orientation averaging.
497
+
498
+ Returns:
499
+ ConvergenceResults with ensemble-averaged PHIPS DSCS values
500
+ """
501
+ iteration = 0
502
+ converged = False
503
+ skipped_geometries = [] # Track skipped geometry files
504
+
505
+ # Create Live context for smooth updating display
506
+ with self._display.create_live_context() as live:
507
+ # Show initial display before first batch
508
+ initial_display = self._display.build_display(
509
+ iteration=0,
510
+ n_orientations=self.n_orientations,
511
+ get_stats=self._get_phips_stats,
512
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
513
+ get_bin_labels=self._get_detector_angles,
514
+ power_ratio=None,
515
+ geom_info=None,
516
+ )
517
+ live.update(initial_display)
518
+
519
+ while not converged and self.n_orientations < self.max_orientations:
520
+ iteration += 1
521
+
522
+ # Randomly select a geometry file for this batch
523
+ geom_file = random.choice(self.geom_files)
524
+ geom_path = os.path.join(self.geom_dir, geom_file)
525
+
526
+ # Create orientations for this batch
527
+ orientations = goad.create_uniform_orientation(self.batch_size)
528
+
529
+ # Update settings with selected geometry and orientations
530
+ self.settings.geom_path = geom_path
531
+ self.settings.orientation = orientations
532
+
533
+ # Run MultiProblem
534
+ try:
535
+ mp = goad.MultiProblem(self.settings)
536
+ mp.py_solve()
537
+ except Exception as e:
538
+ # Geometry loading failed (bad faces, degenerate geometry, etc.)
539
+ print(f"\nWarning: Skipping geometry '{geom_file}': {e}")
540
+ skipped_geometries.append(geom_file)
541
+
542
+ # Check if all geometries have been skipped
543
+ if len(skipped_geometries) >= len(self.geom_files):
544
+ raise ValueError(
545
+ f"All {len(self.geom_files)} geometry files failed to load. "
546
+ "Please check geometry files for degenerate faces, non-planar geometry, "
547
+ "or faces that are too small."
548
+ )
549
+
550
+ # Skip this iteration without updating statistics
551
+ continue
552
+
553
+ # Update statistics
554
+ self._update_statistics(mp.results, self.batch_size)
555
+
556
+ # Update convergence history
557
+ self._update_convergence_history()
558
+
559
+ # Check convergence
560
+ converged = self._check_convergence()
561
+
562
+ # Update live display with geometry info
563
+ geom_info = f"Geom: {geom_file}"
564
+ display = self._display.build_display(
565
+ iteration=iteration,
566
+ n_orientations=self.n_orientations,
567
+ get_stats=self._get_phips_stats,
568
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
569
+ get_bin_labels=self._get_detector_angles,
570
+ power_ratio=None,
571
+ geom_info=geom_info,
572
+ )
573
+ live.update(display)
574
+
575
+ # Compute final results
576
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
577
+
578
+ # Prepare warning message
579
+ warning = None
580
+ if not converged:
581
+ warning = f"Did not converge within {self.max_orientations} orientations"
582
+
583
+ # Add skipped geometries info to warning
584
+ if skipped_geometries:
585
+ skipped_msg = f"Skipped {len(skipped_geometries)} bad geometries"
586
+ warning = f"{warning} | {skipped_msg}" if warning else skipped_msg
587
+
588
+ # Create results
589
+ results = ConvergenceResults(
590
+ converged=converged,
591
+ n_orientations=self.n_orientations,
592
+ values={"phips_dscs": mean_dscs},
593
+ sem_values={"phips_dscs": sem_dscs},
594
+ mueller_1d=None,
595
+ mueller_2d=None,
596
+ convergence_history=self.convergence_history,
597
+ warning=warning,
598
+ )
599
+
600
+ # Print final summary
601
+ if converged:
602
+ print(f"\nEnsemble converged after {self.n_orientations} orientations.")
603
+ else:
604
+ print(
605
+ f"\nWarning: Did not converge within {self.max_orientations} orientations"
606
+ )
607
+
608
+ # Report skipped geometries if any
609
+ if skipped_geometries:
610
+ print(
611
+ f"Note: Skipped {len(skipped_geometries)} geometry file(s) due to errors"
612
+ )
613
+
614
+ return results