goad-py 0.7.0__cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of goad-py might be problematic. Click here for more details.

@@ -0,0 +1,608 @@
1
+ """
2
+ PHIPS-specific convergence extension for GOAD.
3
+
4
+ This module provides convergence tracking for PHIPS detector DSCS values,
5
+ which requires Custom binning with PHIPS detector geometry and post-processing
6
+ to compute mean DSCS at each of the 20 PHIPS detectors.
7
+ """
8
+
9
+ import os
10
+ import random
11
+ from dataclasses import dataclass
12
+ from pathlib import Path
13
+ from typing import Dict, List, Optional, Tuple
14
+
15
+ import numpy as np
16
+ from rich.console import Console
17
+
18
+ from . import _goad_py as goad
19
+ from .convergence import ConvergenceResults
20
+ from .convergence_display import (
21
+ ArrayConvergenceVariable,
22
+ ConvergenceDisplay,
23
+ )
24
+
25
+
26
+ @dataclass
27
+ class PHIPSConvergable:
28
+ """Convergence criteria for PHIPS detector DSCS values."""
29
+
30
+ tolerance_type: str = "relative" # 'relative' or 'absolute'
31
+ tolerance: float = 0.25 # Default 25% relative tolerance
32
+ detector_indices: Optional[List[int]] = (
33
+ None # Specific detectors to check (None = all)
34
+ )
35
+
36
+ def __post_init__(self):
37
+ valid_types = {"relative", "absolute"}
38
+ if self.tolerance_type not in valid_types:
39
+ raise ValueError(
40
+ f"Invalid tolerance_type '{self.tolerance_type}'. Must be one of {valid_types}"
41
+ )
42
+
43
+ if self.tolerance <= 0:
44
+ raise ValueError(f"Tolerance must be positive, got {self.tolerance}")
45
+
46
+ if self.detector_indices is not None:
47
+ if not isinstance(self.detector_indices, list):
48
+ raise ValueError("detector_indices must be a list of integers")
49
+ if not all(0 <= idx < 20 for idx in self.detector_indices):
50
+ raise ValueError("detector_indices must be in range [0, 19]")
51
+
52
+
53
+ class PHIPSConvergence:
54
+ """
55
+ Convergence study for PHIPS detector DSCS values.
56
+
57
+ Requires Custom binning with PHIPS detector geometry (phips_bins_edges.toml).
58
+ Computes mean DSCS at each of 20 PHIPS detectors and tracks convergence.
59
+ """
60
+
61
+ # PHIPS detector parameters (from phips_detector_angles.py)
62
+ NUM_DETECTORS = 20
63
+ THETA_START = 18.0 # degrees
64
+ THETA_END = 170.0 # degrees
65
+ DETECTOR_WIDTH = 7.0 # degrees (aperture)
66
+
67
+ def __init__(
68
+ self,
69
+ settings: goad.Settings,
70
+ convergable: PHIPSConvergable,
71
+ batch_size: int = 24,
72
+ max_orientations: int = 100_000,
73
+ min_batches: int = 10,
74
+ ):
75
+ """
76
+ Initialize a PHIPS convergence study.
77
+
78
+ Args:
79
+ settings: GOAD settings with Custom binning scheme
80
+ convergable: PHIPS convergence criteria
81
+ batch_size: Number of orientations per iteration
82
+ max_orientations: Maximum total orientations before stopping
83
+ min_batches: Minimum number of batches before allowing convergence
84
+ """
85
+ self.settings = settings
86
+ # Enable quiet mode to suppress Rust progress bars
87
+ self.settings.quiet = True
88
+ self.convergable = convergable
89
+ self.batch_size = batch_size
90
+ self.max_orientations = max_orientations
91
+ self.min_batches = min_batches
92
+
93
+ # Validate inputs
94
+ if batch_size <= 0:
95
+ raise ValueError(f"batch_size must be positive, got {batch_size}")
96
+
97
+ if max_orientations <= 0:
98
+ raise ValueError(
99
+ f"max_orientations must be positive, got {max_orientations}"
100
+ )
101
+
102
+ if min_batches <= 0:
103
+ raise ValueError(f"min_batches must be positive, got {min_batches}")
104
+
105
+ # Initialize tracking variables
106
+ self.n_orientations = 0
107
+ self.convergence_history = []
108
+
109
+ # Batch-based statistics tracking
110
+ self.batch_data = [] # List of batch statistics
111
+
112
+ # PHIPS detector centers (20 detectors from 18° to 170°)
113
+ self.detector_centers = np.linspace(
114
+ self.THETA_START, self.THETA_END, self.NUM_DETECTORS
115
+ )
116
+ self.half_width = self.DETECTOR_WIDTH / 2.0
117
+
118
+ # Accumulated PHIPS DSCS for final average
119
+ self.phips_dscs_sum = None
120
+
121
+ # Rich console
122
+ self._console = Console()
123
+
124
+ # Create display variable for PHIPS DSCS
125
+ display_variable = ArrayConvergenceVariable(
126
+ name="phips_dscs",
127
+ tolerance=convergable.tolerance,
128
+ tolerance_type=convergable.tolerance_type,
129
+ indices=convergable.detector_indices,
130
+ )
131
+
132
+ # Initialize display system
133
+ self._display = ConvergenceDisplay(
134
+ variables=[display_variable],
135
+ batch_size=self.batch_size,
136
+ min_batches=self.min_batches,
137
+ convergence_type=self._get_convergence_type(),
138
+ console=self._console,
139
+ )
140
+
141
+ def _compute_phips_dscs_from_mueller2d(self, results: goad.Results) -> np.ndarray:
142
+ """
143
+ Compute mean DSCS at each of 20 PHIPS detectors from Custom binning results.
144
+
145
+ Args:
146
+ results: Results from MultiProblem with Custom binning
147
+
148
+ Returns:
149
+ Array of shape (20,) with mean DSCS per detector (NaN if no bins in detector)
150
+ """
151
+ # Get mueller_2d from Custom binning
152
+ mueller_2d = np.array(results.mueller) # Shape: (n_custom_bins, 16)
153
+ bins_2d = results.bins # List of (theta_center, phi_center) tuples
154
+
155
+ # Extract theta angles from bin centers
156
+ theta_angles = np.array([bin_tuple[0] for bin_tuple in bins_2d])
157
+
158
+ # Extract S11 and convert to DSCS
159
+ s11_values = mueller_2d[:, 0]
160
+ k = 2 * np.pi / self.settings.wavelength
161
+ dscs_conversion_factor = 1e-12 / k**2
162
+ dscs_values = s11_values * dscs_conversion_factor
163
+
164
+ # Compute mean DSCS for each detector
165
+ detector_dscs = []
166
+ for bin_center_theta in self.detector_centers:
167
+ lower_bound = bin_center_theta - self.half_width
168
+ upper_bound = bin_center_theta + self.half_width
169
+
170
+ # Find custom bins within this detector's angular window
171
+ indices = np.where(
172
+ (theta_angles >= lower_bound) & (theta_angles < upper_bound)
173
+ )[0]
174
+
175
+ if len(indices) > 0:
176
+ # Mean DSCS over bins in this detector window
177
+ mean_dscs = np.mean(dscs_values[indices])
178
+ detector_dscs.append(mean_dscs)
179
+ else:
180
+ # No bins in this detector window
181
+ detector_dscs.append(np.nan)
182
+
183
+ return np.array(detector_dscs) # Shape: (20,)
184
+
185
+ def _update_statistics(self, results: goad.Results, batch_size: int):
186
+ """
187
+ Update statistics with new batch results.
188
+
189
+ Args:
190
+ results: Results from a MultiProblem run
191
+ batch_size: Number of orientations in this batch
192
+ """
193
+ # Compute PHIPS DSCS for this batch
194
+ phips_dscs = self._compute_phips_dscs_from_mueller2d(results)
195
+
196
+ # Store batch data
197
+ batch_info = {
198
+ "batch_size": batch_size,
199
+ "phips_dscs": phips_dscs, # Shape: (20,)
200
+ }
201
+ self.batch_data.append(batch_info)
202
+
203
+ # Accumulate for final average
204
+ if self.phips_dscs_sum is None:
205
+ self.phips_dscs_sum = phips_dscs * batch_size
206
+ else:
207
+ self.phips_dscs_sum += phips_dscs * batch_size
208
+
209
+ # Update total orientation count
210
+ self.n_orientations += batch_size
211
+
212
+ def _calculate_phips_mean_and_sem(self) -> Tuple[np.ndarray, np.ndarray]:
213
+ """
214
+ Calculate mean and SEM arrays for PHIPS DSCS across detectors.
215
+
216
+ Returns:
217
+ Tuple of (mean_array, sem_array) where each is shape (20,)
218
+ """
219
+ if not self.batch_data:
220
+ return np.full(self.NUM_DETECTORS, np.nan), np.full(
221
+ self.NUM_DETECTORS, np.inf
222
+ )
223
+
224
+ # Extract batch values: shape (n_batches, 20)
225
+ batch_arrays = np.array([batch["phips_dscs"] for batch in self.batch_data])
226
+ batch_sizes = np.array([batch["batch_size"] for batch in self.batch_data])
227
+
228
+ if len(batch_arrays) < 2:
229
+ # Can't estimate variance with < 2 batches
230
+ mean_array = batch_arrays[0]
231
+ sem_array = np.full(self.NUM_DETECTORS, np.inf)
232
+ return mean_array, sem_array
233
+
234
+ # Calculate mean and SEM independently for each detector
235
+ # Use nanmean to handle NaN values (detectors with no data)
236
+ mean_array = np.average(
237
+ batch_arrays, axis=0, weights=batch_sizes
238
+ ) # Shape: (20,)
239
+
240
+ # Variance between batches at each detector (ignoring NaNs)
241
+ batch_means_variance = np.nanvar(batch_arrays, axis=0, ddof=1) # Shape: (20,)
242
+
243
+ # Scale up to estimate population variance
244
+ avg_batch_size = np.mean(batch_sizes)
245
+ estimated_population_variance = batch_means_variance * avg_batch_size
246
+
247
+ # Calculate SEM for total sample
248
+ total_n = np.sum(batch_sizes)
249
+ sem_array = np.sqrt(
250
+ estimated_population_variance / (total_n - 1)
251
+ ) # Shape: (20,)
252
+
253
+ return mean_array, sem_array
254
+
255
+ def _check_convergence(self) -> bool:
256
+ """
257
+ Check if PHIPS DSCS values have converged.
258
+
259
+ Returns:
260
+ True if converged, False otherwise
261
+ """
262
+ if len(self.batch_data) < self.min_batches:
263
+ return False
264
+
265
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
266
+
267
+ # Determine which detectors to check
268
+ if self.convergable.detector_indices is not None:
269
+ check_indices = self.convergable.detector_indices
270
+ else:
271
+ # Check all detectors that have data (not NaN)
272
+ check_indices = np.where(~np.isnan(mean_dscs))[0]
273
+
274
+ if len(check_indices) == 0:
275
+ return False # No valid detectors to check
276
+
277
+ # Extract values for detectors to check
278
+ mean_subset = mean_dscs[check_indices]
279
+ sem_subset = sem_dscs[check_indices]
280
+
281
+ # Check convergence based on tolerance type
282
+ if self.convergable.tolerance_type == "relative":
283
+ # Relative SEM
284
+ with np.errstate(divide="ignore", invalid="ignore"):
285
+ relative_sem = np.where(
286
+ mean_subset != 0, sem_subset / np.abs(mean_subset), np.inf
287
+ )
288
+ converged = np.all(relative_sem < self.convergable.tolerance)
289
+ else: # absolute
290
+ converged = np.all(sem_subset < self.convergable.tolerance)
291
+
292
+ return converged
293
+
294
+ def _get_convergence_type(self) -> str:
295
+ """Get the convergence type name for display."""
296
+ class_name = self.__class__.__name__
297
+ if class_name == "PHIPSEnsembleConvergence":
298
+ return "PHIPS Ensemble"
299
+ elif class_name == "PHIPSConvergence":
300
+ return "PHIPS"
301
+ else:
302
+ return class_name
303
+
304
+ def _get_detector_angles(self, variable: str) -> np.ndarray:
305
+ """Get detector angles for PHIPS detectors."""
306
+ return self.detector_centers
307
+
308
+ def _get_phips_stats(self, variable: str) -> Tuple[float, float]:
309
+ """Get mean and SEM for a single PHIPS detector (not used for array display)."""
310
+ # This is not used since PHIPS uses array display, but required by interface
311
+ return 0.0, 0.0
312
+
313
+ def _update_convergence_history(self):
314
+ """Update convergence history with current worst-case SEM."""
315
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
316
+
317
+ if len(mean_dscs) > 0:
318
+ # Find worst-case detector
319
+ if self.convergable.tolerance_type == "relative":
320
+ with np.errstate(divide="ignore", invalid="ignore"):
321
+ relative_sem = np.where(
322
+ mean_dscs != 0, sem_dscs / np.abs(mean_dscs), np.inf
323
+ )
324
+ worst_sem = np.max(relative_sem)
325
+ else:
326
+ worst_sem = np.max(sem_dscs)
327
+
328
+ self.convergence_history.append(
329
+ (self.n_orientations, "phips_dscs", worst_sem)
330
+ )
331
+
332
+ def run(self) -> ConvergenceResults:
333
+ """
334
+ Run convergence study until criteria are met or max orientations reached.
335
+
336
+ Returns:
337
+ ConvergenceResults with PHIPS DSCS values
338
+ """
339
+ iteration = 0
340
+ converged = False
341
+
342
+ # Create Live context for smooth updating display
343
+ with self._display.create_live_context() as live:
344
+ # Show initial display before first batch
345
+ initial_display = self._display.build_display(
346
+ iteration=0,
347
+ n_orientations=self.n_orientations,
348
+ get_stats=self._get_phips_stats,
349
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
350
+ get_bin_labels=self._get_detector_angles,
351
+ power_ratio=None,
352
+ geom_info=None,
353
+ )
354
+ live.update(initial_display)
355
+
356
+ while not converged and self.n_orientations < self.max_orientations:
357
+ iteration += 1
358
+
359
+ # Create orientations for this batch
360
+ orientations = goad.create_uniform_orientation(self.batch_size)
361
+ self.settings.orientation = orientations
362
+
363
+ # Run MultiProblem with error handling for bad geometries
364
+ try:
365
+ mp = goad.MultiProblem(self.settings)
366
+ mp.py_solve()
367
+ except Exception as e:
368
+ # Geometry loading failed (bad faces, degenerate geometry, etc.)
369
+ # For single-geometry convergence, we can't skip - must raise error
370
+ error_msg = (
371
+ f"Failed to initialize MultiProblem with geometry '{self.settings.geom_path}': {e}\n"
372
+ f"Please check geometry file for:\n"
373
+ f" - Degenerate faces (area = 0)\n"
374
+ f" - Non-planar geometry\n"
375
+ f" - Faces that are too small\n"
376
+ f" - Invalid mesh topology\n"
377
+ f" - Geometry file corruption"
378
+ )
379
+ raise type(e)(error_msg) from e
380
+
381
+ # Update statistics
382
+ self._update_statistics(mp.results, self.batch_size)
383
+
384
+ # Update convergence history
385
+ self._update_convergence_history()
386
+
387
+ # Check convergence
388
+ converged = self._check_convergence()
389
+
390
+ # Update live display
391
+ display = self._display.build_display(
392
+ iteration=iteration,
393
+ n_orientations=self.n_orientations,
394
+ get_stats=self._get_phips_stats,
395
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
396
+ get_bin_labels=self._get_detector_angles,
397
+ power_ratio=None,
398
+ geom_info=None,
399
+ )
400
+ live.update(display)
401
+
402
+ # Compute final results
403
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
404
+
405
+ # Create results
406
+ results = ConvergenceResults(
407
+ converged=converged,
408
+ n_orientations=self.n_orientations,
409
+ values={"phips_dscs": mean_dscs}, # Array of 20 values
410
+ sem_values={"phips_dscs": sem_dscs}, # Array of 20 SEMs
411
+ mueller_1d=None,
412
+ mueller_2d=None,
413
+ convergence_history=self.convergence_history,
414
+ warning=None
415
+ if converged
416
+ else f"Did not converge within {self.max_orientations} orientations",
417
+ )
418
+
419
+ # Print final summary
420
+ if converged:
421
+ print(f"\nConverged after {self.n_orientations} orientations.")
422
+ else:
423
+ print(
424
+ f"\nWarning: Did not converge within {self.max_orientations} orientations"
425
+ )
426
+
427
+ return results
428
+
429
+
430
+ class PHIPSEnsembleConvergence(PHIPSConvergence):
431
+ """
432
+ Ensemble convergence study for PHIPS detector DSCS values.
433
+
434
+ Combines PHIPS detector DSCS tracking with ensemble geometry averaging.
435
+ Each batch randomly selects a geometry file and runs orientation averaging,
436
+ allowing convergence of DSCS values averaged over both orientations and geometries.
437
+ """
438
+
439
+ def __init__(
440
+ self,
441
+ settings: goad.Settings,
442
+ convergable: PHIPSConvergable,
443
+ geom_dir: str,
444
+ batch_size: int = 24,
445
+ max_orientations: int = 100_000,
446
+ min_batches: int = 10,
447
+ ):
448
+ """
449
+ Initialize a PHIPS ensemble convergence study.
450
+
451
+ Args:
452
+ settings: GOAD settings with Custom binning (geom_path will be overridden)
453
+ convergable: PHIPS convergence criteria
454
+ geom_dir: Directory containing .obj geometry files
455
+ batch_size: Number of orientations per iteration
456
+ max_orientations: Maximum total orientations before stopping
457
+ min_batches: Minimum number of batches before allowing convergence
458
+ """
459
+ # Discover all .obj files in directory
460
+ geom_path = Path(geom_dir)
461
+ if not geom_path.exists():
462
+ raise ValueError(f"Geometry directory does not exist: {geom_dir}")
463
+
464
+ if not geom_path.is_dir():
465
+ raise ValueError(f"Path is not a directory: {geom_dir}")
466
+
467
+ self.geom_files = sorted([f.name for f in geom_path.glob("*.obj")])
468
+
469
+ if not self.geom_files:
470
+ raise ValueError(f"No .obj files found in directory: {geom_dir}")
471
+
472
+ self.geom_dir = str(geom_path.resolve())
473
+
474
+ print(f"Found {len(self.geom_files)} geometry files in {self.geom_dir}")
475
+
476
+ # Call parent constructor
477
+ super().__init__(
478
+ settings=settings,
479
+ convergable=convergable,
480
+ batch_size=batch_size,
481
+ max_orientations=max_orientations,
482
+ min_batches=min_batches,
483
+ )
484
+
485
+ def run(self) -> ConvergenceResults:
486
+ """
487
+ Run ensemble convergence study.
488
+
489
+ Each batch iteration randomly selects a geometry file from the
490
+ ensemble directory before running the orientation averaging.
491
+
492
+ Returns:
493
+ ConvergenceResults with ensemble-averaged PHIPS DSCS values
494
+ """
495
+ iteration = 0
496
+ converged = False
497
+ skipped_geometries = [] # Track skipped geometry files
498
+
499
+ # Create Live context for smooth updating display
500
+ with self._display.create_live_context() as live:
501
+ # Show initial display before first batch
502
+ initial_display = self._display.build_display(
503
+ iteration=0,
504
+ n_orientations=self.n_orientations,
505
+ get_stats=self._get_phips_stats,
506
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
507
+ get_bin_labels=self._get_detector_angles,
508
+ power_ratio=None,
509
+ geom_info=None,
510
+ )
511
+ live.update(initial_display)
512
+
513
+ while not converged and self.n_orientations < self.max_orientations:
514
+ iteration += 1
515
+
516
+ # Randomly select a geometry file for this batch
517
+ geom_file = random.choice(self.geom_files)
518
+ geom_path = os.path.join(self.geom_dir, geom_file)
519
+
520
+ # Create orientations for this batch
521
+ orientations = goad.create_uniform_orientation(self.batch_size)
522
+
523
+ # Update settings with selected geometry and orientations
524
+ self.settings.geom_path = geom_path
525
+ self.settings.orientation = orientations
526
+
527
+ # Run MultiProblem
528
+ try:
529
+ mp = goad.MultiProblem(self.settings)
530
+ mp.py_solve()
531
+ except Exception as e:
532
+ # Geometry loading failed (bad faces, degenerate geometry, etc.)
533
+ print(f"\nWarning: Skipping geometry '{geom_file}': {e}")
534
+ skipped_geometries.append(geom_file)
535
+
536
+ # Check if all geometries have been skipped
537
+ if len(skipped_geometries) >= len(self.geom_files):
538
+ raise ValueError(
539
+ f"All {len(self.geom_files)} geometry files failed to load. "
540
+ "Please check geometry files for degenerate faces, non-planar geometry, "
541
+ "or faces that are too small."
542
+ )
543
+
544
+ # Skip this iteration without updating statistics
545
+ continue
546
+
547
+ # Update statistics
548
+ self._update_statistics(mp.results, self.batch_size)
549
+
550
+ # Update convergence history
551
+ self._update_convergence_history()
552
+
553
+ # Check convergence
554
+ converged = self._check_convergence()
555
+
556
+ # Update live display with geometry info
557
+ geom_info = f"Geom: {geom_file}"
558
+ display = self._display.build_display(
559
+ iteration=iteration,
560
+ n_orientations=self.n_orientations,
561
+ get_stats=self._get_phips_stats,
562
+ get_array_stats=lambda var: self._calculate_phips_mean_and_sem(),
563
+ get_bin_labels=self._get_detector_angles,
564
+ power_ratio=None,
565
+ geom_info=geom_info,
566
+ )
567
+ live.update(display)
568
+
569
+ # Compute final results
570
+ mean_dscs, sem_dscs = self._calculate_phips_mean_and_sem()
571
+
572
+ # Prepare warning message
573
+ warning = None
574
+ if not converged:
575
+ warning = f"Did not converge within {self.max_orientations} orientations"
576
+
577
+ # Add skipped geometries info to warning
578
+ if skipped_geometries:
579
+ skipped_msg = f"Skipped {len(skipped_geometries)} bad geometries"
580
+ warning = f"{warning} | {skipped_msg}" if warning else skipped_msg
581
+
582
+ # Create results
583
+ results = ConvergenceResults(
584
+ converged=converged,
585
+ n_orientations=self.n_orientations,
586
+ values={"phips_dscs": mean_dscs},
587
+ sem_values={"phips_dscs": sem_dscs},
588
+ mueller_1d=None,
589
+ mueller_2d=None,
590
+ convergence_history=self.convergence_history,
591
+ warning=warning,
592
+ )
593
+
594
+ # Print final summary
595
+ if converged:
596
+ print(f"\nEnsemble converged after {self.n_orientations} orientations.")
597
+ else:
598
+ print(
599
+ f"\nWarning: Did not converge within {self.max_orientations} orientations"
600
+ )
601
+
602
+ # Report skipped geometries if any
603
+ if skipped_geometries:
604
+ print(
605
+ f"Note: Skipped {len(skipped_geometries)} geometry file(s) due to errors"
606
+ )
607
+
608
+ return results