pyelq 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyelq/preprocessing.py ADDED
@@ -0,0 +1,262 @@
1
+ # SPDX-FileCopyrightText: 2024 Shell Global Solutions International B.V. All Rights Reserved.
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ """Class for performing preprocessing on the loaded data."""
6
+
7
+ from copy import deepcopy
8
+ from dataclasses import dataclass
9
+ from typing import Union
10
+
11
+ import numpy as np
12
+ import pandas as pd
13
+
14
+ from pyelq.meteorology import Meteorology, MeteorologyGroup
15
+ from pyelq.sensor.sensor import Sensor, SensorGroup
16
+ from pyelq.support_functions.spatio_temporal_interpolation import temporal_resampling
17
+
18
+
19
+ @dataclass
20
+ class Preprocessor:
21
+ """Class which implements generic functionality for pre-processing of sensor and meteorology information.
22
+
23
+ Attributes:
24
+ time_bin_edges (pd.arrays.DatetimeArray): edges of the time bins to be used for smoothing/interpolation.
25
+ sensor_object (SensorGroup): sensor group object containing raw data.
26
+ met_object (Meteorology): met object containing raw data.
27
+ aggregate_function (str): function to be used for aggregation of data. Defaults to mean.
28
+ sensor_fields (list): standard list of sensor attributes that we wish to regularize and/or filter.
29
+ met_fields (list): standard list of meteorology attributes that we wish to regularize/filter.
30
+
31
+ """
32
+
33
+ time_bin_edges: pd.arrays.DatetimeArray
34
+ sensor_object: SensorGroup
35
+ met_object: Union[Meteorology, MeteorologyGroup]
36
+ aggregate_function: str = "mean"
37
+ sensor_fields = ["time", "concentration", "source_on"]
38
+ met_fields = [
39
+ "time",
40
+ "wind_direction",
41
+ "wind_speed",
42
+ "pressure",
43
+ "temperature",
44
+ "u_component",
45
+ "v_component",
46
+ "w_component",
47
+ "wind_turbulence_horizontal",
48
+ "wind_turbulence_vertical",
49
+ ]
50
+
51
+ def __post_init__(self) -> None:
52
+ """Initialise the class.
53
+
54
+ Attaching the sensor and meteorology objects as attributes, and running initial regularization and NaN filtering
55
+ steps.
56
+
57
+ Before running the regularization & NaN filtering, the function ensures that u_component and v_component are
58
+ present as fields on met_object. The post-smoothing wind speed and direction are then calculated from the
59
+ smoothed u and v components, to eliminate the need to take means of directions when binning.
60
+
61
+ The sensor and meteorology group objects attached to the class will have identical numbers of data points per
62
+ device, identical time stamps, and be free of NaNs.
63
+
64
+ """
65
+ self.met_object.calculate_uv_from_wind_speed_direction()
66
+
67
+ self.regularize_data()
68
+ self.met_object.calculate_wind_direction_from_uv()
69
+ self.met_object.calculate_wind_speed_from_uv()
70
+ self.filter_nans()
71
+
72
+ def regularize_data(self) -> None:
73
+ """Smoothing or interpolation of data onto a common set of time points.
74
+
75
+ Function which takes in sensor and meteorology objects containing raw data (on original time points), and
76
+ smooths or interpolates these onto a common set of time points.
77
+
78
+ When a SensorGroup object is supplied, the function will return a SensorGroup object with the same number of
79
+ sensors. When a MeteorologyGroup object is supplied, the function will return a MeteorologyGroup object with the
80
+ same number of objects. When a Meteorology object is supplied, the function will return a MeteorologyGroup
81
+ object with the same number of objects as there is sensors in the SensorGroup object. The individual Meteorology
82
+ objects will be identical.
83
+
84
+ Assumes that sensor_object and met_object attributes contain the RAW data, on the original time stamps, as
85
+ loaded from file/API using the relevant data access class.
86
+
87
+ After the function has been run, the sensor and meteorology group objects attached to the class as attributes
88
+ will have identical time stamps, but may still contain NaNs.
89
+
90
+ """
91
+ sensor_out = deepcopy(self.sensor_object)
92
+ for sns_new, sns_old in zip(sensor_out.values(), self.sensor_object.values()):
93
+ for field in self.sensor_fields:
94
+ if (field != "time") and (getattr(sns_old, field) is not None):
95
+ time_out, resampled_values = temporal_resampling(
96
+ sns_old.time, getattr(sns_old, field), self.time_bin_edges, self.aggregate_function
97
+ )
98
+ setattr(sns_new, field, resampled_values)
99
+ sns_new.time = time_out
100
+
101
+ met_out = MeteorologyGroup()
102
+ if isinstance(self.met_object, Meteorology):
103
+ single_met_object = self.interpolate_single_met_object(met_in_object=self.met_object)
104
+ for key in sensor_out.keys():
105
+ met_out[key] = single_met_object
106
+ else:
107
+ for key, temp_met_object in self.met_object.items():
108
+ met_out[key] = self.interpolate_single_met_object(met_in_object=temp_met_object)
109
+
110
+ self.sensor_object = sensor_out
111
+ self.met_object = met_out
112
+
113
+ def filter_nans(self) -> None:
114
+ """Filter out data points where any of the specified sensor or meteorology fields has a NaN value.
115
+
116
+ Assumes that sensor_object and met_object attributes have first been passed through the regularize_data
117
+ function, and thus have fields on aligned time grids.
118
+
119
+ Function first works through all sensor and meteorology fields and finds indices of all times where there is a
120
+ NaN value in any field. Then, it uses the resulting index to filter all fields.
121
+
122
+ The result of this function is that the sensor_object and met_object attributes of the class are updated, any
123
+ NaN values having been removed.
124
+
125
+ """
126
+ for sns_key, met_key in zip(self.sensor_object, self.met_object):
127
+ sns_in = self.sensor_object[sns_key]
128
+ met_in = self.met_object[met_key]
129
+ filter_index = np.ones(sns_in.nof_observations, dtype=bool)
130
+ for field in self.sensor_fields:
131
+ if (field != "time") and (getattr(sns_in, field) is not None):
132
+ filter_index = np.logical_and(filter_index, np.logical_not(np.isnan(getattr(sns_in, field))))
133
+ for field in self.met_fields:
134
+ if (field != "time") and (getattr(met_in, field) is not None):
135
+ filter_index = np.logical_and(filter_index, np.logical_not(np.isnan(getattr(met_in, field))))
136
+
137
+ self.sensor_object[sns_key] = self.filter_object_fields(sns_in, self.sensor_fields, filter_index)
138
+ self.met_object[met_key] = self.filter_object_fields(met_in, self.met_fields, filter_index)
139
+
140
+ def filter_on_met(self, filter_variable: list, lower_limit: list = None, upper_limit: list = None) -> None:
141
+ """Filter the supplied data on given properties of the meteorological data.
142
+
143
+ Assumes that the SensorGroup and MeteorologyGroup objects attached as attributes have corresponding values (one
144
+ per sensor device), and have attributes that have been pre-smoothed/interpolated onto a common time grid per
145
+ device.
146
+
147
+ The result of this function is that the sensor_object and met_object attributes are updated with the filtered
148
+ versions.
149
+
150
+ Args:
151
+ filter_variable (list of str): list of meteorology variables that we wish to use for filtering.
152
+ lower_limit (list of float): list of lower limits associated with the variables in filter_variables.
153
+ Defaults to None.
154
+ upper_limit (list of float): list of upper limits associated with the variables in filter_variables.
155
+ Defaults to None.
156
+
157
+ """
158
+ if lower_limit is None:
159
+ lower_limit = [-np.inf] * len(filter_variable)
160
+ if upper_limit is None:
161
+ upper_limit = [np.inf] * len(filter_variable)
162
+
163
+ for vrb, low, high in zip(filter_variable, lower_limit, upper_limit):
164
+ for sns_key, met_key in zip(self.sensor_object, self.met_object):
165
+ sns_in = self.sensor_object[sns_key]
166
+ met_in = self.met_object[met_key]
167
+ index_keep = np.logical_and(getattr(met_in, vrb) >= low, getattr(met_in, vrb) <= high)
168
+ self.sensor_object[sns_key] = self.filter_object_fields(sns_in, self.sensor_fields, index_keep)
169
+ self.met_object[met_key] = self.filter_object_fields(met_in, self.met_fields, index_keep)
170
+
171
+ def block_data(
172
+ self, time_edges: pd.arrays.DatetimeArray, data_object: Union[SensorGroup, MeteorologyGroup]
173
+ ) -> list:
174
+ """Break the supplied data group objects into time-blocked chunks.
175
+
176
+ Returning a list of sensor and meteorology group objects per time chunk.
177
+
178
+ If there is no data for a given device in a particular period, then that device is simply dropped from the group
179
+ object in that block.
180
+
181
+ Either a SensorGroup or a MeteorologyGroup object can be supplied, and the list of blocked objects returned will
182
+ be of the same type.
183
+
184
+ Args:
185
+ time_edges (pd.Arrays.DatetimeArray): [(n_period + 1) x 1] array of edges of the time bins to be used for
186
+ dividing the data into blocks.
187
+ data_object (SensorGroup or MeteorologyGroup): data object containing either or meteorological data, to be
188
+ divided into blocks.
189
+
190
+ Returns:
191
+ data_list (list): list of [n_period x 1] data objects, each list element being either a SensorGroup or
192
+ MeteorologyGroup object (depending on the input) containing the data for the corresponding period.
193
+
194
+ """
195
+ data_list = []
196
+ nof_periods = len(time_edges) - 1
197
+ if isinstance(data_object, SensorGroup):
198
+ field_list = self.sensor_fields
199
+ elif isinstance(data_object, MeteorologyGroup):
200
+ field_list = self.met_fields
201
+ else:
202
+ raise TypeError("Data input must be either a SensorGroup or MeteorologyGroup.")
203
+
204
+ for k in range(nof_periods):
205
+ data_list.append(type(data_object)())
206
+ for key, dat in data_object.items():
207
+ idx_time = (dat.time >= time_edges[k]) & (dat.time <= time_edges[k + 1])
208
+ if np.any(idx_time):
209
+ data_list[-1][key] = deepcopy(dat)
210
+ data_list[-1][key] = self.filter_object_fields(data_list[-1][key], field_list, idx_time)
211
+ return data_list
212
+
213
+ @staticmethod
214
+ def filter_object_fields(
215
+ data_object: Union[Sensor, Meteorology], fields: list, index: np.ndarray
216
+ ) -> Union[Sensor, Meteorology]:
217
+ """Apply a filter index to all the fields in a given data object.
218
+
219
+ Can be used for either a Sensor or Meteorology object.
220
+
221
+ Args:
222
+ data_object (Union[Sensor, Meteorology]): sensor or meteorology object (corresponding to a single device)
223
+ for which fields are to be filtered.
224
+ fields (list): list of field names to be filtered.
225
+ index (np.ndarray): filter index.
226
+
227
+ Returns:
228
+ Union[Sensor, Meteorology]: filtered data object.
229
+
230
+ """
231
+ return_object = deepcopy(data_object)
232
+ for field in fields:
233
+ if getattr(return_object, field) is not None:
234
+ setattr(return_object, field, getattr(return_object, field)[index])
235
+ return return_object
236
+
237
+ def interpolate_single_met_object(self, met_in_object: Meteorology) -> Meteorology:
238
+ """Interpolate a single Meteorology object onto the time grid of the class.
239
+
240
+ Args:
241
+ met_in_object (Meteorology): Meteorology object to be interpolated onto the time grid of the class.
242
+
243
+ Returns:
244
+ met_out_object (Meteorology): interpolated Meteorology object.
245
+
246
+ """
247
+ met_out_object = Meteorology()
248
+ time_out = None
249
+ for field in self.met_fields:
250
+ if (field != "time") and (getattr(met_in_object, field) is not None):
251
+ time_out, resampled_values = temporal_resampling(
252
+ met_in_object.time,
253
+ getattr(met_in_object, field),
254
+ self.time_bin_edges,
255
+ self.aggregate_function,
256
+ )
257
+ setattr(met_out_object, field, resampled_values)
258
+
259
+ if time_out is not None:
260
+ met_out_object.time = time_out
261
+
262
+ return met_out_object
@@ -0,0 +1,5 @@
1
+ # SPDX-FileCopyrightText: 2024 Shell Global Solutions International B.V. All Rights Reserved.
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ """Sensor Module."""
5
+ __all__ = ["satellite", "beam", "sensor"]
pyelq/sensor/beam.py ADDED
@@ -0,0 +1,55 @@
1
+ # SPDX-FileCopyrightText: 2024 Shell Global Solutions International B.V. All Rights Reserved.
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ # -*- coding: utf-8 -*-
6
+ """Beam module.
7
+
8
+ Subclass of Sensor. Used for beam sensors
9
+
10
+ """
11
+
12
+ from dataclasses import dataclass
13
+
14
+ import numpy as np
15
+
16
+ from pyelq.sensor.sensor import Sensor
17
+
18
+
19
+ @dataclass
20
+ class Beam(Sensor):
21
+ """Defines Beam sensor class.
22
+
23
+ Location attribute from superclass is assumed to be a Coordinate class object containing 2 locations, the first of
24
+ the sensor and the second of the retro.
25
+
26
+ Attributes:
27
+ n_beam_knots (int, optional): Number of beam knots to evaluate along a single beam
28
+
29
+ """
30
+
31
+ n_beam_knots: int = 50
32
+
33
+ @property
34
+ def midpoint(self) -> np.ndarray:
35
+ """np.ndarray: Midpoint of the beam."""
36
+ return np.mean(self.location.to_array(), axis=0)
37
+
38
+ def make_beam_knots(self, ref_latitude, ref_longitude, ref_altitude=0) -> np.ndarray:
39
+ """Create beam knot locations.
40
+
41
+ Creates beam knot locations based on location attribute and n_beam_knot attribute.
42
+ Results in an array of beam knot locations of shape [n_beam_knots x 3]. Have to provide a reference point in
43
+ order to create the beam knots in a local frame, spaced in meters
44
+
45
+ Args:
46
+ ref_latitude (float): Reference latitude in degrees
47
+ ref_longitude (float): Reference longitude in degrees
48
+ ref_altitude (float, optional): Reference altitude in meters
49
+
50
+ """
51
+ temp_location = self.location.to_enu(
52
+ ref_latitude=ref_latitude, ref_longitude=ref_longitude, ref_altitude=ref_altitude
53
+ ).to_array()
54
+ beam_knot_array = np.linspace(temp_location[0, :], temp_location[1, :], num=self.n_beam_knots, endpoint=True)
55
+ return beam_knot_array
@@ -0,0 +1,59 @@
1
+ # SPDX-FileCopyrightText: 2024 Shell Global Solutions International B.V. All Rights Reserved.
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ # -*- coding: utf-8 -*-
6
+ """Satellite module.
7
+
8
+ Subclass of Sensor. Mainly used to accommodate satellite sensor TROPOMI. See:
9
+ http://www.tropomi.eu/data-products/methane
10
+ : http: //www.tropomi.eu/data-products/methane and http://www.tropomi.eu/data-products/nitrogen-dioxide
11
+
12
+ """
13
+
14
+ from dataclasses import dataclass, field
15
+
16
+ import numpy as np
17
+
18
+ from pyelq.sensor.sensor import Sensor
19
+
20
+
21
+ @dataclass
22
+ class Satellite(Sensor):
23
+ """Defines Satellite sensor class.
24
+
25
+ Attributes:
26
+ qa_value (np.ndarray, optional): Array containing quality values associated with the observations.
27
+ precision (np.ndarray, optional): Array containing precision values associated with the observations.
28
+ precision_kernel (np.ndarray, optional): Array containing precision kernel values associated with the
29
+ observations.
30
+ ground_pixel (np.ndarray, optional): Array containing ground pixels values associated with the observations.
31
+ Ground pixels are indicating the dimension perpendicular to the flight direction.
32
+ scanline (np.ndarray, optional): Array containing scanline values associated with the observations.
33
+ Scanlines are indicating the dimension in the direction of flight.
34
+ orbit (np.ndarray, optional): Array containing orbit values associated with the observations.
35
+ pixel_bounds (np.ndarray, optional): Array containing Polygon features which define the pixel bounds.
36
+
37
+ """
38
+
39
+ qa_value: np.ndarray = field(init=False)
40
+ precision: np.ndarray = field(init=False)
41
+ precision_kernel: np.ndarray = field(init=False)
42
+ ground_pixel: np.ndarray = field(init=False)
43
+ scanline: np.ndarray = field(init=False)
44
+ orbit: np.ndarray = field(init=False, default=None)
45
+ pixel_bounds: np.ndarray = field(init=False)
46
+
47
+ def get_orbits(self) -> np.ndarray:
48
+ """Gets the unique orbits which are present in the data.
49
+
50
+ Raises:
51
+ ValueError: When orbits attribute is None
52
+
53
+ Returns:
54
+ np.ndarray: Unique orbits present in the data.
55
+
56
+ """
57
+ if self.orbit is None:
58
+ raise ValueError("Orbits attribute is None")
59
+ return np.unique(self.orbit)
pyelq/sensor/sensor.py ADDED
@@ -0,0 +1,241 @@
1
+ # SPDX-FileCopyrightText: 2024 Shell Global Solutions International B.V. All Rights Reserved.
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ # -*- coding: utf-8 -*-
6
+ """Sensor module.
7
+
8
+ The superclass for the sensor classes. This module provides the higher level Sensor and SensorGroup classes. The Sensor
9
+ class is a single sensor, the SensorGroup is a dictionary of Sensors. The SensorGroup class is created to deal with the
10
+ properties over all sensors together.
11
+
12
+ """
13
+
14
+ from copy import deepcopy
15
+ from dataclasses import dataclass, field
16
+
17
+ import numpy as np
18
+ import pandas as pd
19
+ import plotly.express as px
20
+ import plotly.graph_objects as go
21
+ from pandas.arrays import DatetimeArray
22
+
23
+ from pyelq.coordinate_system import ECEF, ENU, LLA, Coordinate
24
+
25
+
26
+ @dataclass
27
+ class Sensor:
28
+ """Defines the properties and methods of the sensor class.
29
+
30
+ Attributes:
31
+ label (str, optional): String label for sensor
32
+ time (pandas.arrays.DatetimeArray, optional): Array containing time values associated with concentration
33
+ reading
34
+ location (Coordinate, optional): Coordinate object specifying the observation locations
35
+ concentration (np.ndarray, optional): Array containing concentration values associated with time reading
36
+ source_on (np.ndarray, optional): Array of size nof_observations containing boolean values indicating
37
+ whether a source is on or off for each observation, i.e. we are assuming the sensor can/can't see a source
38
+
39
+ """
40
+
41
+ label: str = field(init=False)
42
+ time: DatetimeArray = field(init=False, default=None)
43
+ location: Coordinate = field(init=False)
44
+ concentration: np.ndarray = field(default_factory=lambda: np.array([]))
45
+ source_on: np.ndarray = field(init=False, default=None)
46
+
47
+ @property
48
+ def nof_observations(self) -> int:
49
+ """Int: Number of observations contained in concentration array."""
50
+ return self.concentration.size
51
+
52
+ def plot_sensor_location(self, fig: go.Figure, color=None) -> go.Figure:
53
+ """Plotting the sensor location.
54
+
55
+ Args:
56
+ fig (go.Figure): Plotly figure object to add the trace to
57
+ color (`optional`): When specified, the color to be used
58
+
59
+ Returns:
60
+ fig (go.Figure): Plotly figure object with sensor location trace added to it
61
+
62
+ """
63
+ lla_object = self.location.to_lla()
64
+
65
+ marker_dict = {"size": 10, "opacity": 0.8}
66
+ if color is not None:
67
+ marker_dict["color"] = color
68
+
69
+ fig.add_trace(
70
+ go.Scattermap(
71
+ mode="markers+lines",
72
+ lat=np.array(lla_object.latitude),
73
+ lon=np.array(lla_object.longitude),
74
+ marker=marker_dict,
75
+ line={"width": 3},
76
+ name=self.label,
77
+ )
78
+ )
79
+ return fig
80
+
81
+ def plot_timeseries(self, fig: go.Figure, color=None, mode: str = "markers") -> go.Figure:
82
+ """Timeseries plot of the sensor concentration observations.
83
+
84
+ Args:
85
+ fig (go.Figure): Plotly figure object to add the trace to
86
+ color (`optional`): When specified, the color to be used
87
+ mode (str, optional): Mode used for plotting, i.e. markers, lines or markers+lines
88
+
89
+ Returns:
90
+ fig (go.Figure): Plotly figure object with sensor concentration timeseries trace added to it
91
+
92
+ """
93
+ marker_dict = {"size": 5, "opacity": 1}
94
+ if color is not None:
95
+ marker_dict["color"] = color
96
+
97
+ fig.add_trace(
98
+ go.Scatter(
99
+ x=self.time,
100
+ y=self.concentration.flatten(),
101
+ mode=mode,
102
+ marker=marker_dict,
103
+ name=self.label,
104
+ legendgroup=self.label,
105
+ )
106
+ )
107
+
108
+ return fig
109
+
110
+
111
+ @dataclass
112
+ class SensorGroup(dict):
113
+ """A dictionary containing multiple Sensors.
114
+
115
+ This class is used when we want to combine a collection of sensors and be able to store/access overall properties.
116
+
117
+ Attributes:
118
+ color_map (list, optional): Default colormap to use for plotting
119
+
120
+ """
121
+
122
+ color_map: list = field(default_factory=list, init=False)
123
+
124
+ def __post_init__(self):
125
+ self.color_map = px.colors.qualitative.Pastel
126
+
127
+ @property
128
+ def nof_observations(self) -> int:
129
+ """Int: The total number of observations across all the sensors."""
130
+ return int(np.sum([sensor.nof_observations for sensor in self.values()], axis=None))
131
+
132
+ @property
133
+ def concentration(self) -> np.ndarray:
134
+ """np.ndarray: Column vector of concentration values across all sensors, unwrapped per sensor."""
135
+ return np.concatenate([sensor.concentration.flatten() for sensor in self.values()], axis=0)
136
+
137
+ @property
138
+ def time(self) -> pd.arrays.DatetimeArray:
139
+ """DatetimeArray: Column vector of time values across all sensors."""
140
+ return pd.array(np.concatenate([sensor.time for sensor in self.values()]), dtype="datetime64[ns]")
141
+
142
+ @property
143
+ def location(self) -> Coordinate:
144
+ """Coordinate: Coordinate object containing observation locations from all sensors in the group."""
145
+ location_object = deepcopy(list(self.values())[0].location)
146
+ if isinstance(location_object, ENU):
147
+ attr_list = ["east", "north", "up"]
148
+ elif isinstance(location_object, LLA):
149
+ attr_list = ["latitude", "longitude", "altitude"]
150
+ elif isinstance(location_object, ECEF):
151
+ attr_list = ["x", "y", "z"]
152
+ else:
153
+ raise TypeError(
154
+ f"Location object should be either ENU, LLA or ECEF, while currently it is{type(location_object)}"
155
+ )
156
+ for attr in attr_list:
157
+ setattr(
158
+ location_object,
159
+ attr,
160
+ np.concatenate([np.array(getattr(sensor.location, attr), ndmin=1) for sensor in self.values()], axis=0),
161
+ )
162
+ return location_object
163
+
164
+ @property
165
+ def sensor_index(self) -> np.ndarray:
166
+ """np.ndarray: Column vector of integer indices linking concentration observation to a particular sensor."""
167
+ return np.concatenate(
168
+ [np.ones(sensor.nof_observations, dtype=int) * i for i, sensor in enumerate(self.values())]
169
+ )
170
+
171
+ @property
172
+ def source_on(self) -> np.ndarray:
173
+ """Column vector of booleans indicating whether sources are expected to be on, unwrapped over sensors.
174
+
175
+ Assumes source is on when None is specified for a specific sensor.
176
+
177
+ Returns:
178
+ np.ndarray: Source on attribute, unwrapped over sensors.
179
+
180
+ """
181
+ overall_idx = np.array([])
182
+ for curr_key in list(self.keys()):
183
+ if self[curr_key].source_on is None:
184
+ temp_idx = np.ones(self[curr_key].nof_observations).astype(bool)
185
+ else:
186
+ temp_idx = self[curr_key].source_on
187
+
188
+ overall_idx = np.concatenate([overall_idx, temp_idx])
189
+ return overall_idx.astype(bool)
190
+
191
+ @property
192
+ def nof_sensors(self) -> int:
193
+ """Int: Number of sensors contained in the SensorGroup."""
194
+ return len(self)
195
+
196
+ def add_sensor(self, sensor: Sensor):
197
+ """Add a sensor to the SensorGroup."""
198
+ self[sensor.label] = sensor
199
+
200
+ def plot_sensor_location(self, fig: go.Figure, color_map: list = None) -> go.Figure:
201
+ """Plotting of the locations of all sensors in the SensorGroup.
202
+
203
+ Args:
204
+ fig (go.Figure): Plotly figure object to add the trace to
205
+ color_map (list, optional): When specified, the colormap to be used, plotting will cycle through
206
+ the colors
207
+
208
+ Returns:
209
+ fig (go.Figure): Plotly figure object with sensor location traces added to it
210
+
211
+ """
212
+ if color_map is None:
213
+ color_map = self.color_map
214
+
215
+ for i, sensor in enumerate(self.values()):
216
+ color_idx = i % len(color_map)
217
+ fig = sensor.plot_sensor_location(fig, color=color_map[color_idx])
218
+
219
+ return fig
220
+
221
+ def plot_timeseries(self, fig: go.Figure, color_map: list = None, mode: str = "markers") -> go.Figure:
222
+ """Plotting of the concentration timeseries of all sensors in the SensorGroup.
223
+
224
+ Args:
225
+ fig (go.Figure): Plotly figure object to add the trace to
226
+ color_map (list, optional): When specified, the colormap to be used, plotting will cycle through
227
+ the colors
228
+ mode (str, optional): Mode used for plotting, i.e. markers, lines or markers+lines
229
+
230
+ Returns:
231
+ fig (go.Figure): Plotly figure object with sensor concentration time series traces added to it
232
+
233
+ """
234
+ if color_map is None:
235
+ color_map = self.color_map
236
+
237
+ for i, sensor in enumerate(self.values()):
238
+ color_idx = i % len(color_map)
239
+ fig = sensor.plot_timeseries(fig, color=color_map[color_idx], mode=mode)
240
+
241
+ return fig