pyelq 1.1.4__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. pyelq/__init__.py +1 -0
  2. pyelq/component/__init__.py +1 -0
  3. pyelq/component/background.py +19 -13
  4. pyelq/component/component.py +2 -1
  5. pyelq/component/error_model.py +2 -1
  6. pyelq/component/offset.py +2 -1
  7. pyelq/component/source_model.py +78 -29
  8. pyelq/coordinate_system.py +1 -0
  9. pyelq/data_access/__init__.py +1 -0
  10. pyelq/data_access/data_access.py +1 -1
  11. pyelq/dispersion_model/__init__.py +4 -3
  12. pyelq/dispersion_model/dispersion_model.py +202 -0
  13. pyelq/dispersion_model/finite_volume.py +1084 -0
  14. pyelq/dispersion_model/gaussian_plume.py +8 -189
  15. pyelq/dispersion_model/site_layout.py +97 -0
  16. pyelq/dlm.py +11 -15
  17. pyelq/gas_species.py +1 -0
  18. pyelq/meteorology/__init__.py +6 -0
  19. pyelq/{meteorology.py → meteorology/meteorology.py} +388 -387
  20. pyelq/meteorology/meteorology_windfield.py +180 -0
  21. pyelq/model.py +2 -1
  22. pyelq/plotting/__init__.py +1 -0
  23. pyelq/plotting/plot.py +1 -0
  24. pyelq/preprocessing.py +98 -38
  25. pyelq/sensor/__init__.py +1 -0
  26. pyelq/sensor/sensor.py +70 -5
  27. pyelq/source_map.py +1 -0
  28. pyelq/support_functions/__init__.py +1 -0
  29. pyelq/support_functions/post_processing.py +1 -0
  30. pyelq/support_functions/spatio_temporal_interpolation.py +1 -0
  31. {pyelq-1.1.4.dist-info → pyelq-1.2.0.dist-info}/METADATA +45 -44
  32. pyelq-1.2.0.dist-info/RECORD +37 -0
  33. {pyelq-1.1.4.dist-info → pyelq-1.2.0.dist-info}/WHEEL +1 -1
  34. pyelq-1.1.4.dist-info/RECORD +0 -32
  35. {pyelq-1.1.4.dist-info → pyelq-1.2.0.dist-info/licenses}/LICENSE.md +0 -0
  36. {pyelq-1.1.4.dist-info → pyelq-1.2.0.dist-info/licenses}/LICENSES/Apache-2.0.txt +0 -0
pyelq/__init__.py CHANGED
@@ -2,6 +2,7 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  """Main pyELQ module."""
5
+
5
6
  __all__ = [
6
7
  "component",
7
8
  "data_access",
@@ -3,4 +3,5 @@
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
5
  """Components Module."""
6
+
6
7
  __all__ = ["background", "component", "error_model", "offset", "source_model"]
@@ -1,7 +1,6 @@
1
1
  # SPDX-FileCopyrightText: 2024 Shell Global Solutions International B.V. All Rights Reserved.
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
-
5
4
  """Model components for background modelling."""
6
5
 
7
6
  from abc import abstractmethod
@@ -22,7 +21,7 @@ from sklearn.neighbors import NearestNeighbors
22
21
  from pyelq.component.component import Component
23
22
  from pyelq.coordinate_system import Coordinate
24
23
  from pyelq.gas_species import GasSpecies
25
- from pyelq.meteorology import MeteorologyGroup
24
+ from pyelq.meteorology.meteorology import MeteorologyGroup
26
25
  from pyelq.sensor.beam import Beam
27
26
  from pyelq.sensor.sensor import SensorGroup
28
27
 
@@ -51,6 +50,7 @@ class Background(Component):
51
50
  initial_precision (float): initial value for the scalar precision parameter.
52
51
  basis_matrix (sparse.csr_array): [n_obs x n_time] matrix mapping the background model parameters on to the
53
52
  observations.
53
+ precision_time_0 (float): precision relating to the first time stamp in the model. Defaults to 0.01.
54
54
 
55
55
  """
56
56
 
@@ -65,6 +65,7 @@ class Background(Component):
65
65
  prior_precision_rate: float = 1e-3
66
66
  initial_precision: float = 1.0
67
67
  basis_matrix: sparse.csr_array = field(init=False)
68
+ precision_time_0: float = field(init=False, default=0.01)
68
69
 
69
70
  @abstractmethod
70
71
  def initialise(self, sensor_object: SensorGroup, meteorology: MeteorologyGroup, gas_species: GasSpecies):
@@ -168,6 +169,9 @@ class TemporalBackground(Background):
168
169
  def initialise(self, sensor_object: SensorGroup, meteorology: MeteorologyGroup, gas_species: GasSpecies):
169
170
  """Create temporal background model from sensor, meteorology and gas species inputs.
170
171
 
172
+ The precision matrix is made to be full rank by adjusting the precision at the first time point using the
173
+ precision_time_0 attribute.
174
+
171
175
  Args:
172
176
  sensor_object (SensorGroup): sensor data object.
173
177
  meteorology (MeteorologyGroup): meteorology data object.
@@ -180,6 +184,8 @@ class TemporalBackground(Background):
180
184
  self.n_parameter = len(self.time)
181
185
  self.basis_matrix = sparse.csr_array((np.ones(self.n_obs), (np.array(range(self.n_obs)), unique_inverse)))
182
186
  self.precision_matrix = gmrf.precision_temporal(time=self.time)
187
+ lam = self.precision_matrix[0, 0]
188
+ self.precision_matrix[0, 0] = lam * (2.0 - lam / (self.precision_time_0 + lam))
183
189
  if self.mean_bg is None:
184
190
  self.mean_bg = gas_species.global_background
185
191
 
@@ -193,8 +199,8 @@ class SpatioTemporalBackground(Background):
193
199
  The background parameter is an [n_location * n_time x 1] (if self.spatial_dependence is True) or an [n_time x 1]
194
200
  vector (if self.spatial_dependence is False). In the spatio-temporal case, the background vector is assumed to
195
201
  unwrap over space and time as follows:
196
- bg = [b_1(t_1), b_2(t_1),..., b_nlct(t_1),...,b_1(t_k),..., b_nlct(t_k),...].T
197
- where nlct is the number of sensor locations.
202
+ bg = [b_1(t_1), b_2(t_1),..., b_n_lct(t_1),...,b_1(t_k),..., b_n_lct(t_k),...].T
203
+ where n_lct is the number of sensor locations.
198
204
  This unwrapping mechanism is chosen as it greatly speeds up the sparse matrix operations in the solver (vs. the
199
205
  alternative).
200
206
 
@@ -226,7 +232,6 @@ class SpatioTemporalBackground(Background):
226
232
  spatial_precision_matrix (np.ndarray): spatial component of the precision matrix. The full model precision
227
233
  matrix is the Kronecker product of this matrix with the self.temporal_precision_matrix. Simply set to 1 if
228
234
  self.spatial_dependence is False.
229
- precision_time_0 (float): precision relating to the first time stamp in the model. Defaults to 0.01.
230
235
 
231
236
  """
232
237
 
@@ -238,7 +243,6 @@ class SpatioTemporalBackground(Background):
238
243
  location: Coordinate = field(init=False)
239
244
  temporal_precision_matrix: Union[np.ndarray, sparse.csc_matrix] = field(init=False)
240
245
  spatial_precision_matrix: np.ndarray = field(init=False)
241
- precision_time_0: float = field(init=False, default=0.01)
242
246
 
243
247
  def initialise(self, sensor_object: SensorGroup, meteorology: MeteorologyGroup, gas_species: GasSpecies):
244
248
  """Take data inputs and extract relevant properties.
@@ -303,10 +307,9 @@ class SpatioTemporalBackground(Background):
303
307
  self.time = pd.array(np.unique(sensor_object.time), dtype="datetime64[ns]")
304
308
  self.n_time = len(self.time)
305
309
  else:
306
- self.time = pd.array(
307
- pd.date_range(start=np.min(sensor_object.time), end=np.max(sensor_object.time), periods=self.n_time),
308
- dtype="datetime64[ns]",
309
- )
310
+ self.time = pd.date_range(
311
+ start=np.min(sensor_object.time), end=np.max(sensor_object.time), periods=self.n_time
312
+ ).array
310
313
 
311
314
  def make_spatial_knots(self, sensor_object: SensorGroup):
312
315
  """Create the spatial grid for the model.
@@ -333,6 +336,9 @@ class SpatioTemporalBackground(Background):
333
336
 
334
337
  Defined as the Kronecker product of the temporal precision matrix and the spatial precision matrix.
335
338
 
339
+ The precision matrix is made to be full rank by adjusting the precision at the first time point using the
340
+ precision_time_0 attribute.
341
+
336
342
  """
337
343
  self.temporal_precision_matrix = gmrf.precision_temporal(time=self.time)
338
344
  lam = self.temporal_precision_matrix[0, 0]
@@ -386,6 +392,6 @@ class SpatioTemporalBackground(Background):
386
392
  self.location.north[k] = np.mean(sensor.location.to_enu().north, axis=0)
387
393
  self.location.up[k] = np.mean(sensor.location.to_enu().up, axis=0)
388
394
  else:
389
- self.location.east[k] = sensor.location.to_enu().east
390
- self.location.north[k] = sensor.location.to_enu().north
391
- self.location.up[k] = sensor.location.to_enu().up
395
+ self.location.east[k] = sensor.location.to_enu().east.item()
396
+ self.location.north[k] = sensor.location.to_enu().north.item()
397
+ self.location.up[k] = sensor.location.to_enu().up.item()
@@ -3,13 +3,14 @@
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
5
  """Superclass for model components."""
6
+
6
7
  from abc import ABC, abstractmethod
7
8
  from dataclasses import dataclass
8
9
 
9
10
  from openmcmc.model import Model
10
11
 
11
12
  from pyelq.gas_species import GasSpecies
12
- from pyelq.meteorology import MeteorologyGroup
13
+ from pyelq.meteorology.meteorology import MeteorologyGroup
13
14
  from pyelq.sensor.sensor import SensorGroup
14
15
 
15
16
 
@@ -4,6 +4,7 @@
4
4
 
5
5
  # -*- coding: utf-8 -*-
6
6
  """Error model module."""
7
+
7
8
  from dataclasses import dataclass, field
8
9
  from typing import TYPE_CHECKING, Union
9
10
 
@@ -15,7 +16,7 @@ from openmcmc.sampler.sampler import NormalGamma
15
16
 
16
17
  from pyelq.component.component import Component
17
18
  from pyelq.gas_species import GasSpecies
18
- from pyelq.meteorology import MeteorologyGroup
19
+ from pyelq.meteorology.meteorology import MeteorologyGroup
19
20
  from pyelq.sensor.sensor import Sensor, SensorGroup
20
21
 
21
22
  if TYPE_CHECKING:
pyelq/component/offset.py CHANGED
@@ -4,6 +4,7 @@
4
4
 
5
5
  # -*- coding: utf-8 -*-
6
6
  """Offset module."""
7
+
7
8
  from dataclasses import dataclass, field
8
9
  from typing import TYPE_CHECKING, Union
9
10
 
@@ -17,7 +18,7 @@ from scipy import sparse
17
18
 
18
19
  from pyelq.component.component import Component
19
20
  from pyelq.gas_species import GasSpecies
20
- from pyelq.meteorology import Meteorology
21
+ from pyelq.meteorology.meteorology import Meteorology
21
22
  from pyelq.sensor.sensor import Sensor, SensorGroup
22
23
 
23
24
  if TYPE_CHECKING:
@@ -34,7 +34,7 @@ from pyelq.component.component import Component
34
34
  from pyelq.coordinate_system import ENU
35
35
  from pyelq.dispersion_model.gaussian_plume import GaussianPlume
36
36
  from pyelq.gas_species import GasSpecies
37
- from pyelq.meteorology import Meteorology
37
+ from pyelq.meteorology.meteorology import Meteorology
38
38
  from pyelq.sensor.sensor import SensorGroup
39
39
  from pyelq.source_map import SourceMap
40
40
 
@@ -221,6 +221,7 @@ class NullGrouping(SourceGrouping):
221
221
 
222
222
  Args:
223
223
  store (dict): dictionary containing samples from the MCMC.
224
+
224
225
  """
225
226
  self.number_on_sources = np.count_nonzero(np.logical_not(np.isnan(store[self.map["source"]])), axis=0)
226
227
 
@@ -482,6 +483,8 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
482
483
 
483
484
  reversible_jump (bool): logical indicating whether the reversible jump algorithm for estimation of the number
484
485
  of sources and their locations should be run. Defaults to False.
486
+ distribution_number_sources (str): distribution for the number of sources in the solution. Can be either
487
+ "Poisson" or "Uniform". Defaults to "Poisson".
485
488
  random_walk_step_size (np.ndarray): (3 x 1) array specifying the standard deviations of the distributions
486
489
  from which the random walk sampler draws new source locations. Defaults to np.array([1.0, 1.0, 0.1]).
487
490
  site_limits (np.ndarray): (3 x 2) array specifying the lower (column 0) and upper (column 1) limits of the
@@ -489,7 +492,7 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
489
492
  the solution).
490
493
  rate_num_sources (int): specification for the parameter for the Poisson prior distribution for the total number
491
494
  of sources. Only relevant for cases where reversible_jump == True (where the number of sources in the
492
- solution can change).
495
+ solution can change). Unused in the case of a Uniform prior (self.distribution_number_sources == "Uniform").
493
496
  n_sources_max (int): maximum number of sources that can feature in the solution. Only relevant for cases where
494
497
  reversible_jump == True (where the number of sources in the solution can change).
495
498
  emission_proposal_std (float): standard deviation of the truncated Gaussian distribution used to propose the
@@ -528,6 +531,7 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
528
531
  gas_species: GasSpecies = field(init=False, default=None)
529
532
 
530
533
  reversible_jump: bool = False
534
+ distribution_number_sources: str = "Poisson"
531
535
  random_walk_step_size: np.ndarray = field(default_factory=lambda: np.array([1.0, 1.0, 0.1], ndmin=2).T)
532
536
  site_limits: np.ndarray = None
533
537
  rate_num_sources: int = 5
@@ -553,9 +557,9 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
553
557
  def __post_init__(self):
554
558
  """Post-initialisation of the class.
555
559
 
556
- This function is called after the class has been initialised,
557
- and is used to set up the mapping dictionary for the class by applying the
558
- append_string function to the mapping dictionary.
560
+ This function is called after the class has been initialised, and is used to set up the mapping dictionary for
561
+ the class by applying the append_string function to the mapping dictionary.
562
+
559
563
  """
560
564
  if self.label_string is not None:
561
565
  self.append_string(self.label_string)
@@ -589,9 +593,10 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
589
593
  self.coupling = self.dispersion_model.compute_coupling(
590
594
  sensor_object, meteorology, gas_species, output_stacked=True
591
595
  )
596
+
597
+ self.sensor_object = sensor_object
592
598
  self.screen_coverage()
593
599
  if self.reversible_jump:
594
- self.sensor_object = sensor_object
595
600
  self.meteorology = meteorology
596
601
  self.gas_species = gas_species
597
602
 
@@ -630,14 +635,45 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
630
635
 
631
636
  def screen_coverage(self):
632
637
  """Screen the initial source map for coverage."""
633
- in_coverage_area = self.dispersion_model.compute_coverage(
634
- self.coupling, coverage_threshold=self.coverage_threshold, threshold_function=self.threshold_function
635
- )
638
+ in_coverage_area = self.compute_coverage(self.coupling)
636
639
  self.coupling = self.coupling[:, in_coverage_area]
637
640
  all_locations = self.dispersion_model.source_map.location.to_array()
638
641
  screened_locations = all_locations[in_coverage_area, :]
639
642
  self.dispersion_model.source_map.location.from_array(screened_locations)
640
643
 
644
+ def compute_coverage(self, couplings: np.ndarray, **kwargs) -> np.ndarray:
645
+ """Returns a logical vector that indicates which sources in the couplings are, or are not, within the coverage.
646
+
647
+ The 'coverage' is the area inside which all sources are well covered by wind data. E.g. If wind exclusively
648
+ blows towards East, then all sources to the East of any sensor are 'invisible', and are not within the coverage.
649
+
650
+ Couplings are returned in hr/kg. Some threshold function defines the largest allowed coupling value. This is
651
+ used to calculate estimated emission rates in kg/hr. Any emissions which are greater than the value of
652
+ 'self.coverage_threshold' are defined as not within the coverage.
653
+
654
+ If sensor_object.source_on is being used only the parts where th coupling is computed are used in the coverage
655
+ check. This avoids threshold_function being affected by large amounts of zero values.
656
+
657
+ Args:
658
+ couplings (np.ndarray): Array of coupling values. Dimensions: n_data points x n_sources.
659
+ kwargs (dict, optional): Keyword arguments required for the threshold function.
660
+
661
+ Returns:
662
+ coverage (np.ndarray): A logical array specifying which sources are within the coverage.
663
+
664
+ """
665
+ if self.sensor_object.source_on is not None:
666
+ couplings = deepcopy(couplings)
667
+ index_keep = self.sensor_object.source_on > 0
668
+ couplings = couplings[index_keep]
669
+
670
+ coupling_threshold = self.threshold_function(couplings, **kwargs)
671
+ no_warning_threshold = np.where(coupling_threshold <= 1e-100, 1, coupling_threshold)
672
+ no_warning_estimated_emission_rates = np.where(coupling_threshold <= 1e-100, np.inf, 1 / no_warning_threshold)
673
+ coverage = no_warning_estimated_emission_rates < self.coverage_threshold
674
+
675
+ return coverage
676
+
641
677
  def update_coupling_column(self, state: dict, update_column: int) -> dict:
642
678
  """Update the coupling, based on changes to the source locations as part of inversion.
643
679
 
@@ -710,15 +746,11 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
710
746
  (i.e. log[p(current | proposed)])
711
747
 
712
748
  """
713
- prop_state = self.update_coupling_column(prop_state, int(prop_state[self.map["number_sources"]]) - 1)
749
+ prop_state = self.update_coupling_column(prop_state, int(prop_state[self.map["number_sources"]].item()) - 1)
714
750
  prop_state[self.map["allocation"]] = np.concatenate(
715
751
  (prop_state[self.map["allocation"]], np.array([0], ndmin=2)), axis=0
716
752
  )
717
- in_cov_area = self.dispersion_model.compute_coverage(
718
- prop_state[self.map["coupling_matrix"]][:, -1],
719
- coverage_threshold=self.coverage_threshold,
720
- threshold_function=self.threshold_function,
721
- )
753
+ in_cov_area = self.compute_coverage(prop_state[self.map["coupling_matrix"]][:, -1])
722
754
  if not in_cov_area:
723
755
  logp_pr_g_cr = 1e10
724
756
  else:
@@ -760,30 +792,34 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
760
792
 
761
793
  return prop_state, logp_pr_g_cr, logp_cr_g_pr
762
794
 
763
- def move_function(self, current_state: dict, update_column: int) -> dict:
795
+ def move_function(self, prop_state: dict, update_column: int) -> Tuple[dict, float, float]:
764
796
  """Re-compute the coupling after a source location move.
765
797
 
766
798
  Function first updates the coupling column, and then checks whether the location passes a coverage test. If the
767
- location does not have good enough coverage, the state reverts to the coupling from the current state.
799
+ location does not have good enough coverage, we return a high log-probability of the move to reject.
768
800
 
769
801
  Args:
770
- current_state (dict): dictionary containing parameters of the current state.
802
+ prop_state (dict): dictionary containing parameters of the proposed state.
771
803
  update_column (int): index of the coupling column to be updated.
772
804
 
773
805
  Returns:
774
- dict: proposed state, with updated coupling matrix.
806
+ prop_state (dict): proposed state, with coupling matrix and source emission rate vector updated.
807
+ logp_pr_g_cr (float): log-transition density of the proposed state given the current state
808
+ (i.e. log[p(proposed | current)])
809
+ logp_cr_g_pr (float): log-transition density of the current state given the proposed state
810
+ (i.e. log[p(current | proposed)])
775
811
 
776
812
  """
777
- prop_state = deepcopy(current_state)
778
813
  prop_state = self.update_coupling_column(prop_state, update_column)
779
- in_cov_area = self.dispersion_model.compute_coverage(
780
- prop_state[self.map["coupling_matrix"]][:, update_column],
781
- coverage_threshold=self.coverage_threshold,
782
- threshold_function=self.threshold_function,
783
- )
814
+ in_cov_area = self.compute_coverage(prop_state[self.map["coupling_matrix"]][:, update_column])
815
+
784
816
  if not in_cov_area:
785
- prop_state = deepcopy(current_state)
786
- return prop_state
817
+ logp_pr_g_cr = 1e10
818
+ else:
819
+ logp_pr_g_cr = 0.0
820
+ logp_cr_g_pr = 0.0
821
+
822
+ return prop_state, logp_pr_g_cr, logp_cr_g_pr
787
823
 
788
824
  def make_model(self, model: list) -> list:
789
825
  """Take model list and append new elements from current model component.
@@ -813,7 +849,18 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
813
849
  domain_response_upper=self.site_limits[:, [1]],
814
850
  )
815
851
  )
816
- model.append(Poisson(response=self.map["number_sources"], rate=self.map["number_source_rate"]))
852
+ if self.distribution_number_sources == "Uniform":
853
+ model.append(
854
+ Uniform(
855
+ response=self.map["number_sources"],
856
+ domain_response_lower=1,
857
+ domain_response_upper=self.n_sources_max,
858
+ )
859
+ )
860
+ elif self.distribution_number_sources == "Poisson":
861
+ model.append(Poisson(response=self.map["number_sources"], rate=self.map["number_source_rate"]))
862
+ else:
863
+ raise ValueError("Invalid distribution type for number of sources.")
817
864
  return model
818
865
 
819
866
  def make_sampler(self, model: Model, sampler_list: list) -> list:
@@ -854,7 +901,9 @@ class SourceModel(Component, SourceGrouping, SourceDistribution):
854
901
  state[self.map["precision_prior_rate"]] = np.ones_like(self.initial_precision) * self.prior_precision_rate
855
902
  if self.reversible_jump:
856
903
  state[self.map["source_location"]] = self.dispersion_model.source_map.location.to_array().T
857
- state[self.map["number_sources"]] = state[self.map["source_location"]].shape[1]
904
+ state[self.map["number_sources"]] = np.array(
905
+ state[self.map["source_location"]].shape[1], ndmin=2, dtype=int
906
+ )
858
907
  state[self.map["number_source_rate"]] = self.rate_num_sources
859
908
  return state
860
909
 
@@ -10,6 +10,7 @@ pyELQ. Each coordinate system has relevant methods for features that are commonl
10
10
  conversions between each of the systems, alongside some functionality for interpolation.
11
11
 
12
12
  """
13
+
13
14
  from abc import ABC, abstractmethod
14
15
  from copy import deepcopy
15
16
  from dataclasses import dataclass, field
@@ -2,4 +2,5 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  """Data Access Module."""
5
+
5
6
  __all__ = ["data_access"]
@@ -16,7 +16,7 @@ from typing import Any, Union
16
16
 
17
17
  import pandas as pd
18
18
 
19
- from pyelq.meteorology import Meteorology, MeteorologyGroup
19
+ from pyelq.meteorology.meteorology import Meteorology, MeteorologyGroup
20
20
  from pyelq.sensor.sensor import Sensor, SensorGroup
21
21
 
22
22
 
@@ -1,5 +1,6 @@
1
- # SPDX-FileCopyrightText: 2024 Shell Global Solutions International B.V. All Rights Reserved.
1
+ # SPDX-FileCopyrightText: 2026 Shell Global Solutions International B.V. All Rights Reserved.
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- """Dispersion Model Module."""
5
- __all__ = ["gaussian_plume"]
4
+ """Data Access Module."""
5
+
6
+ __all__ = ["dispersion_model", "finite_volume", "gaussian_plume", "site_layout"]
@@ -0,0 +1,202 @@
1
+ # SPDX-FileCopyrightText: 2026 Shell Global Solutions International B.V. All Rights Reserved.
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ # -*- coding: utf-8 -*-
6
+ """DispersionModel module.
7
+
8
+ The super class for the Gaussian Plume and Finite Volume dispersion models used in pyELQ.
9
+
10
+ The Mathematics of Atmospheric Dispersion Modeling, John M. Stockie, DOI. 10.1137/10080991X
11
+
12
+ """
13
+
14
+ from abc import ABC
15
+ from dataclasses import dataclass
16
+ from typing import Union
17
+
18
+ import numpy as np
19
+
20
+ import pyelq.support_functions.spatio_temporal_interpolation as sti
21
+ from pyelq.gas_species import GasSpecies
22
+ from pyelq.meteorology.meteorology import Meteorology
23
+ from pyelq.sensor.satellite import Satellite
24
+ from pyelq.sensor.sensor import Sensor
25
+ from pyelq.source_map import SourceMap
26
+
27
+
28
+ @dataclass
29
+ class DispersionModel(ABC):
30
+ """Defines the dispersion model class.
31
+
32
+ Attributes:
33
+ source_map (Sourcemap): SourceMap object used for the dispersion model.
34
+ minimum_contribution (float): All elements in the plume coupling smaller than this number will be set
35
+ to 0. Helps to speed up matrix multiplications/matrix inverses, also helps with stability.
36
+
37
+ """
38
+
39
+ source_map: SourceMap
40
+ minimum_contribution: float = 0
41
+
42
+ def calculate_gas_density(
43
+ self,
44
+ meteorology: Meteorology,
45
+ sensor_object: Sensor,
46
+ gas_object: Union[GasSpecies, None],
47
+ run_interpolation: bool = True,
48
+ ) -> np.ndarray:
49
+ """Helper function to calculate the gas density using ideal gas law.
50
+
51
+ https://en.wikipedia.org/wiki/Ideal_gas
52
+
53
+ When a gas object is passed as input we calculate the density according to that gas. We check if the
54
+ meteorology object has a temperature and/or pressure value and use those accordingly. Otherwise, we use Standard
55
+ Temperature and Pressure (STP).
56
+
57
+ If run_interpolation is True, we interpolate the temperature and pressure values to the source locations/times
58
+ such that this is consistent with the other calculations, i.e. we only do spatial interpolation when the sensor
59
+ is a Satellite object and temporal interpolation otherwise.
60
+
61
+ When no gas_object is passed in we just set the gas density value to 1.
62
+
63
+ Args:
64
+ meteorology (Meteorology): Meteorology object potentially containing temperature or pressure values
65
+ sensor_object (Sensor): Sensor object containing information about where to interpolate to
66
+ gas_object (Union[GasSpecies, None]): Gas species object which actually calculates the correct density
67
+ run_interpolation (bool): Flag indicating whether to run interpolation, defaults to True.
68
+
69
+ Returns:
70
+ gas_density (np.ndarray): Numpy array of shape [1 x nof_sources] (Satellite sensor)
71
+ or [nof_observations x 1] (otherwise) containing the gas density values to use
72
+
73
+ """
74
+ if not isinstance(gas_object, GasSpecies):
75
+ if isinstance(sensor_object, Satellite):
76
+ return np.ones((1, self.source_map.nof_sources))
77
+ return np.ones((sensor_object.nof_observations, 1))
78
+
79
+ if meteorology.temperature is None:
80
+ temperature = np.array([[273.15]])
81
+
82
+ elif run_interpolation:
83
+ temperature = self.interpolate_meteorology(
84
+ meteorology=meteorology, variable_name="temperature", sensor_object=sensor_object
85
+ )
86
+ else:
87
+ temperature = meteorology.temperature
88
+
89
+ if meteorology.pressure is None:
90
+ pressure = np.array([[101.325]])
91
+ elif run_interpolation:
92
+ pressure = self.interpolate_meteorology(
93
+ meteorology=meteorology, variable_name="pressure", sensor_object=sensor_object
94
+ )
95
+ else:
96
+ pressure = meteorology.pressure
97
+
98
+ gas_density = gas_object.gas_density(temperature=temperature, pressure=pressure)
99
+ return gas_density
100
+
101
+ def interpolate_all_meteorology(
102
+ self,
103
+ sensor_object: Sensor,
104
+ meteorology: Meteorology,
105
+ gas_object: Union[GasSpecies, None],
106
+ run_interpolation: bool,
107
+ ):
108
+ """Function which carries out interpolation of all meteorological information.
109
+
110
+ The flag run_interpolation determines whether the interpolation should be carried out. If this is set to be
111
+ False, the meteorological parameters are simply set to the values stored on the meteorology object (i.e. we
112
+ assume that the meteorology has already been interpolated). This functionality is required to avoid wasted
113
+ computation in the case of e.g. a reversible jump run.
114
+
115
+ Args:
116
+ sensor_object (Sensor): object containing locations/times onto which met information should
117
+ be interpolated.
118
+ meteorology (Meteorology): object containing meteorology information for interpolation.
119
+ gas_object (Union[GasSpecies, None]): object containing gas information.
120
+ run_interpolation (bool): logical indicating whether the meteorology information needs to be interpolated.
121
+
122
+ Returns:
123
+ gas_density (np.ndarray): numpy array of shape [n_data x 1] of gas densities.
124
+ u_interpolated (np.ndarray): numpy array of shape [n_data x 1] of northerly wind components.
125
+ v_interpolated (np.ndarray): numpy array of shape [n_data x 1] of easterly wind components.
126
+ wind_turbulence_horizontal (np.ndarray): numpy array of shape [n_data x 1] of horizontal turbulence
127
+ parameters.
128
+ wind_turbulence_vertical (np.ndarray): numpy array of shape [n_data x 1] of vertical turbulence
129
+ parameters.
130
+
131
+ """
132
+ if run_interpolation:
133
+ gas_density = self.calculate_gas_density(
134
+ meteorology=meteorology, sensor_object=sensor_object, gas_object=gas_object
135
+ )
136
+ u_interpolated = self.interpolate_meteorology(
137
+ meteorology=meteorology, variable_name="u_component", sensor_object=sensor_object
138
+ )
139
+ v_interpolated = self.interpolate_meteorology(
140
+ meteorology=meteorology, variable_name="v_component", sensor_object=sensor_object
141
+ )
142
+ wind_turbulence_horizontal = self.interpolate_meteorology(
143
+ meteorology=meteorology, variable_name="wind_turbulence_horizontal", sensor_object=sensor_object
144
+ )
145
+ wind_turbulence_vertical = self.interpolate_meteorology(
146
+ meteorology=meteorology, variable_name="wind_turbulence_vertical", sensor_object=sensor_object
147
+ )
148
+ else:
149
+ if gas_object is None:
150
+ gas_density = np.ones((meteorology.nof_observations, 1))
151
+ else:
152
+ gas_density = gas_object.gas_density(temperature=meteorology.temperature, pressure=meteorology.pressure)
153
+ gas_density = gas_density.reshape((gas_density.size, 1))
154
+ u_interpolated = meteorology.u_component.reshape((meteorology.u_component.size, 1))
155
+ v_interpolated = meteorology.v_component.reshape((meteorology.v_component.size, 1))
156
+ wind_turbulence_horizontal = meteorology.wind_turbulence_horizontal.reshape(
157
+ (meteorology.wind_turbulence_horizontal.size, 1)
158
+ )
159
+ wind_turbulence_vertical = meteorology.wind_turbulence_vertical.reshape(
160
+ (meteorology.wind_turbulence_vertical.size, 1)
161
+ )
162
+
163
+ return gas_density, u_interpolated, v_interpolated, wind_turbulence_horizontal, wind_turbulence_vertical
164
+
165
+ def interpolate_meteorology(
166
+ self, meteorology: Meteorology, variable_name: str, sensor_object: Sensor
167
+ ) -> Union[np.ndarray, None]:
168
+ """Helper function to interpolate meteorology variables.
169
+
170
+ This function interpolates meteorological variables to times in Sensor or Sources in sourcemap. It also
171
+ calculates the wind speed and mathematical angle between the u- and v-components which in turn gets used in the
172
+ calculation of the Gaussian plume.
173
+
174
+ When the input sensor object is a Satellite type we use spatial interpolation using the interpolation method
175
+ from the coordinate system class as this takes care of the coordinate systems.
176
+ When the input sensor object is of another time we use temporal interpolation (assumption is spatial uniformity
177
+ for all observations over a small(er) area).
178
+
179
+ Args:
180
+ meteorology (Meteorology): Meteorology object containing u- and v-components of wind including their
181
+ spatial location
182
+ variable_name (str): String name of an attribute in the meteorology input object which needs to be
183
+ interpolated
184
+ sensor_object (Sensor): Sensor object containing information about where to interpolate to
185
+
186
+ Returns:
187
+ variable_interpolated (np.ndarray): Interpolated values
188
+
189
+ """
190
+ variable = getattr(meteorology, variable_name)
191
+ if variable is None:
192
+ return None
193
+
194
+ if isinstance(sensor_object, Satellite):
195
+ variable_interpolated = meteorology.location.interpolate(variable, self.source_map.location)
196
+ variable_interpolated = variable_interpolated.reshape(1, self.source_map.nof_sources)
197
+ else:
198
+ variable_interpolated = sti.interpolate(
199
+ time_in=meteorology.time, values_in=variable, time_out=sensor_object.time
200
+ )
201
+ variable_interpolated = variable_interpolated.reshape(sensor_object.nof_observations, 1)
202
+ return variable_interpolated