EBC-Measurements 0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ Metadata-Version: 2.1
2
+ Name: EBC_Measurements
3
+ Version: 0.1
4
+ Summary: A tool for different measurement devices
5
+ Home-page: https://github.com/RWTH-EBC/EBC_Measurements
6
+ Author: RWTH Aachen University, E.ON Energy Research Center, Institute for Energy Efficient Buildings and Indoor Climate
7
+ Author-email: ebc-abos@eonerc.rwth-aachen.de
8
+ License: MIT
9
+ License-File: LICENSE
@@ -0,0 +1,19 @@
1
+ LICENSE
2
+ README.md
3
+ setup.py
4
+ EBC_Measurements.egg-info/PKG-INFO
5
+ EBC_Measurements.egg-info/SOURCES.txt
6
+ EBC_Measurements.egg-info/dependency_links.txt
7
+ EBC_Measurements.egg-info/top_level.txt
8
+ ebcmeasurements/__init__.py
9
+ ebcmeasurements/Base/Auxiliary.py
10
+ ebcmeasurements/Base/DataLogger.py
11
+ ebcmeasurements/Base/DataOutput.py
12
+ ebcmeasurements/Base/DataSource.py
13
+ ebcmeasurements/Base/DataSourceOutput.py
14
+ ebcmeasurements/Base/__init__.py
15
+ ebcmeasurements/Beckhoff/AdsDataSourceOutput.py
16
+ ebcmeasurements/Beckhoff/__init__.py
17
+ ebcmeasurements/Sensor_Electronic/SensoSysDataSource.py
18
+ ebcmeasurements/Sensor_Electronic/SensoSysDevices.py
19
+ ebcmeasurements/Sensor_Electronic/__init__.py
@@ -0,0 +1 @@
1
+ ebcmeasurements
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 RWTH Aachen University - E.ON Energy Research Center - Institute for Energy Efficient Buildings and Indoor Climate
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,9 @@
1
+ Metadata-Version: 2.1
2
+ Name: EBC_Measurements
3
+ Version: 0.1
4
+ Summary: A tool for different measurement devices
5
+ Home-page: https://github.com/RWTH-EBC/EBC_Measurements
6
+ Author: RWTH Aachen University, E.ON Energy Research Center, Institute for Energy Efficient Buildings and Indoor Climate
7
+ Author-email: ebc-abos@eonerc.rwth-aachen.de
8
+ License: MIT
9
+ License-File: LICENSE
@@ -0,0 +1,2 @@
1
+ # EBC-Measurements
2
+ Measurement programms for different devices
@@ -0,0 +1,14 @@
1
+ import json
2
+
3
+
4
+ def load_json(file_name: str) -> list | dict:
5
+ """Read a json file"""
6
+ with open(file_name, 'r') as f:
7
+ content = json.load(f)
8
+ return content
9
+
10
+
11
+ def dump_json(content: list | dict, file_name: str):
12
+ """Dump a json file"""
13
+ with open(file_name, 'w') as f:
14
+ json.dump(content, f, indent=4)
@@ -0,0 +1,312 @@
1
+ """
2
+ Base module: DataLogger, incl. ABC of DataSource and DataOutput
3
+ """
4
+
5
+ from ebcmeasurements.Base import DataSource, DataOutput, DataSourceOutput
6
+ from abc import ABC, abstractmethod
7
+ import time
8
+ import logging
9
+ # Load logging configuration from file
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class DataLoggerBase(ABC):
14
+ def __init__(
15
+ self,
16
+ data_sources_mapping: dict[str: DataSource.DataSourceBase],
17
+ data_outputs_mapping: dict[str: DataOutput.DataOutputBase],
18
+ data_rename_mapping: dict[str: dict[str: dict[str: str]]] | None = None,
19
+ **kwargs
20
+ ):
21
+ """
22
+ Initialize data logger instance
23
+
24
+ The format of data_sources_mapping is as follows:
25
+ {
26
+ '<source1_name>': instance1 of DataSource,
27
+ '<source2_name>': instance2 of DataSource,
28
+ ...
29
+ }
30
+
31
+ The format of data_outputs_mapping is as follows:
32
+ {
33
+ '<output1_name>': instance1 of class DataOutput,
34
+ '<output2_name>': instance2 of class DataOutput,
35
+ ...
36
+ }
37
+
38
+ The format of data_rename_mapping is as follows:
39
+ {
40
+ '<source1_name>': {
41
+ <'output1_name'>: {
42
+ <variable_name_in_source1>: <new_variable_name_in_output1>,
43
+ ...
44
+ },
45
+ <'output2_name'>: {
46
+ <variable_name_in_source1>: <new_variable_name_in_output2>,
47
+ ...
48
+ },
49
+ },
50
+ '<source2_name>': {
51
+ <'output1_name'>: {
52
+ <variable_name_in_source2>: <new_variable_name_in_output1>,
53
+ ...
54
+ },
55
+ <'output2_name'>: {
56
+ <variable_name_in_source2>: <new_variable_name_in_output2>,
57
+ ...
58
+ },
59
+ },
60
+ ...
61
+ }
62
+
63
+ :param data_sources_mapping: Mapping of multiple data sources
64
+ :param data_outputs_mapping: Mapping of multiple data outputs
65
+ :param data_rename_mapping: Mapping of rename for data sources and data outputs, None to use default names
66
+ provided by data sources
67
+ :param **kwargs:
68
+ 'data_rename_mapping_explicit': bool: If set True, all variable keys in rename mapping will be checked, if
69
+ they are available in data source
70
+ """
71
+ # Extract all data sources and outputs to dict (values as instance(s)), also for nested class, e.g. Beckhoff
72
+ self._data_sources_mapping = {
73
+ k: ds.data_source if isinstance(ds, DataSourceOutput.DataSourceOutputBase) else ds
74
+ for k, ds in data_sources_mapping.items()
75
+ }
76
+ self._data_outputs_mapping = {
77
+ k: do.data_output if isinstance(do, DataSourceOutput.DataSourceOutputBase) else do
78
+ for k, do in data_outputs_mapping.items()
79
+ }
80
+
81
+ # Check rename mapping of data sources and outputs
82
+ if data_rename_mapping is None:
83
+ self._data_rename_mapping = None
84
+ else:
85
+ # Check data source name
86
+ for ds_name, output_dict in data_rename_mapping.items():
87
+ if ds_name in self._data_sources_mapping.keys():
88
+ # Check data output name
89
+ for do_name, mapping in output_dict.items():
90
+ if do_name in self._data_outputs_mapping.keys():
91
+ # Check mapping keys
92
+ if kwargs.get('data_rename_mapping_explicit', False):
93
+ for key in mapping.keys():
94
+ if key not in self._data_sources_mapping[ds_name].all_variable_names:
95
+ raise ValueError(
96
+ f"Explicit checking activated: Invalid variable name '{key}' for data "
97
+ f"source '{ds_name}' data output '{do_name}' for rename mapping"
98
+ )
99
+ else:
100
+ raise ValueError(f"Invalid data output name '{do_name}' for rename mapping")
101
+ else:
102
+ raise ValueError(f"Invalid data source name '{ds_name}' for rename mapping")
103
+ # Checking complete
104
+ self._data_rename_mapping = data_rename_mapping
105
+ logger.info(f"Data rename activated, using mapping: \n{self._data_rename_mapping}")
106
+
107
+ # All variable names from all data sources, this will be set to DataOutput
108
+ if self._data_rename_mapping is None:
109
+ # Without rename
110
+ self._all_variable_names_dict = {
111
+ ds_name: {
112
+ do_name: tuple(ds.all_variable_names) # Origin names without rename
113
+ for do_name in self._data_outputs_mapping.keys()
114
+ }
115
+ for ds_name, ds in self._data_sources_mapping.items()
116
+ }
117
+ else:
118
+ # With rename
119
+ self._all_variable_names_dict = {
120
+ ds_name: {
121
+ do_name: tuple(
122
+ self._data_rename_mapping.get(ds_name, {}).get(do_name, {}).get(var, var) # Rename
123
+ for var in ds.all_variable_names
124
+ )
125
+ for do_name in self._data_outputs_mapping.keys()
126
+ }
127
+ for ds_name, ds in self._data_sources_mapping.items()
128
+ }
129
+
130
+ # Set all_variable_names for each DataOutput
131
+ for do_name, do in self._data_outputs_mapping.items():
132
+ # Collect variable names from all data sources for the current output
133
+ all_data_sources_all_variable_names = tuple(
134
+ var_name
135
+ for ds_name in self._data_sources_mapping.keys()
136
+ for var_name in self._all_variable_names_dict[ds_name][do_name]
137
+ )
138
+
139
+ if do.log_time_required:
140
+ # With key of log time
141
+ do.all_variable_names = (do.key_of_log_time,) + all_data_sources_all_variable_names
142
+ else:
143
+ # Without key of log time, only all variable names
144
+ do.all_variable_names = all_data_sources_all_variable_names
145
+
146
+ # Additional methods for DataOutput that must be initialed
147
+ for do in self._data_outputs_mapping.values():
148
+ # Csv output
149
+ if isinstance(do, DataOutput.DataOutputCsv):
150
+ # Write csv header line
151
+ do.write_header_line()
152
+ else:
153
+ pass
154
+
155
+ def read_data_all_sources(self) -> dict[str: dict]:
156
+ """Read data from all data sources"""
157
+ return {
158
+ ds_name: ds.read_data()
159
+ for ds_name, ds in self._data_sources_mapping.items()
160
+ }
161
+
162
+ def log_data_all_outputs(self, data: dict[str: dict], timestamp: str = None):
163
+ """Log data to all data outputs"""
164
+ for do_name, do in self._data_outputs_mapping.items():
165
+ # Unzip and rename key for the current output
166
+ if self._data_rename_mapping is None:
167
+ unzipped_data = {
168
+ var: value
169
+ for ds_name, ds_data in data.items()
170
+ for var, value in ds_data.items()
171
+ }
172
+ else:
173
+ unzipped_data = {
174
+ self._data_rename_mapping.get(ds_name, {}).get(do_name, {}).get(var, var): value
175
+ for ds_name, ds_data in data.items()
176
+ for var, value in ds_data.items()
177
+ }
178
+ # Add log time as settings
179
+ if do.log_time_required:
180
+ # This data output requires log time
181
+ if timestamp is None:
182
+ raise ValueError(f"The data output '{do}' requires timestamp but got None")
183
+ else:
184
+ # Add timestamp to data
185
+ unzipped_data[do.key_of_log_time] = timestamp
186
+ # Log data to this output
187
+ logger.debug(f"Logging data: {unzipped_data} to {do}")
188
+ do.log_data(unzipped_data) # Log to output
189
+
190
+ @abstractmethod
191
+ def run_data_logging(self, **kwargs):
192
+ """Run data logging"""
193
+ pass
194
+
195
+ @property
196
+ def data_sources_mapping(self) -> dict:
197
+ return self._data_sources_mapping
198
+
199
+ @property
200
+ def data_outputs_mapping(self) -> dict:
201
+ return self._data_outputs_mapping
202
+
203
+
204
+ class DataLoggerTimeTrigger(DataLoggerBase):
205
+ def __init__(
206
+ self,
207
+ data_sources_mapping: dict[str: DataSource.DataSourceBase],
208
+ data_outputs_mapping: dict[str: DataOutput.DataOutputBase],
209
+ data_rename_mapping: dict[str: dict[str: dict[str: str]]] | None = None,
210
+ **kwargs
211
+ ):
212
+ """Time triggerd data logger"""
213
+ logger.info("Initializing DataLoggerTimeTrigger ...")
214
+ super().__init__(data_sources_mapping, data_outputs_mapping, data_rename_mapping, **kwargs)
215
+
216
+ def run_data_logging(self, interval: int | float, duration: int | float | None):
217
+ """
218
+ Run data logging
219
+ :param interval: Log interval in second
220
+ :param duration: Log duration in second, if None, the duration is infinite
221
+ """
222
+ # Check the input
223
+ if interval <= 0:
224
+ raise ValueError(f"Logging interval '{interval}' should be greater than 0")
225
+ if duration is not None:
226
+ if duration <= 0:
227
+ raise ValueError(f"Logging duration '{duration}' should be 'None' or a value greater than 0")
228
+
229
+ # Init time values
230
+ start_time = time.time()
231
+ end_time = None if duration is None else start_time + duration
232
+ next_log_time = start_time # Init next logging time
233
+ log_count = 0 # Init count of logging
234
+
235
+ logger.info(f"Starting data logging at time {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))}")
236
+ if end_time is None:
237
+ logger.info("Estimated end time: infinite")
238
+ else:
239
+ logger.info(f"Estimated end time {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end_time))}")
240
+
241
+ # Logging data
242
+ try:
243
+ while end_time is None or time.time() < end_time:
244
+ # Update next logging time
245
+ next_log_time += interval
246
+
247
+ # Get timestamp
248
+ timestamp = self.get_timestamp_now()
249
+
250
+ # Get data from all sources
251
+ data = self.read_data_all_sources()
252
+
253
+ # Log count
254
+ log_count += 1 # Update log counter
255
+ print(f"Logging count(s): {log_count}") # Print log counter to console
256
+
257
+ # Log data to each output
258
+ self.log_data_all_outputs(data, timestamp)
259
+
260
+ # Calculate the time to sleep to maintain the interval
261
+ sleep_time = next_log_time - time.time()
262
+ if sleep_time > 0:
263
+ logger.debug(f"sleep_time = {sleep_time}")
264
+ time.sleep(sleep_time)
265
+ else:
266
+ logger.warning(f"sleep_time = {sleep_time} is negative")
267
+
268
+ # Finish data logging
269
+ logger.info("Data logging completed")
270
+ except KeyboardInterrupt:
271
+ logger.warning("Data logging stopped manually")
272
+
273
+ @staticmethod
274
+ def get_timestamp_now() -> str:
275
+ """Get the timestamp by now"""
276
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
277
+
278
+
279
+ if __name__ == "__main__":
280
+ # Init data sources
281
+ data_source_1 = DataSource.RandomDataSource(size=5, key_missing_rate=0, value_missing_rate=0.5)
282
+ data_source_2 = DataSource.RandomStringSource(size=5, str_length=5, key_missing_rate=0.5, value_missing_rate=0.5)
283
+
284
+ # Init outputs
285
+ data_output_1 = DataOutput.DataOutputCsv(file_name='Test/csv_logger_1.csv')
286
+ data_output_2 = DataOutput.DataOutputCsv(file_name='Test/csv_logger_2.csv', csv_writer_settings={'delimiter': '\t'})
287
+
288
+ data_logger = DataLoggerTimeTrigger(
289
+ data_sources_mapping={
290
+ 'Sou1': data_source_1,
291
+ 'Sou2': data_source_2,
292
+ },
293
+ data_outputs_mapping={
294
+ 'Log1': data_output_1,
295
+ 'Log2': data_output_2,
296
+ },
297
+ data_rename_mapping={
298
+ 'Sou1': {
299
+ 'Log1': {'RandData0': 'RandData0InLog1'},
300
+ 'Log2': {'RandData1': 'RandData1InLog2', 'RandData2': 'RandData2InLog2'},
301
+ },
302
+ 'Sou2': {
303
+ 'Log2': {'RandStr0': 'RandStr000'},
304
+ }
305
+ }
306
+ )
307
+ print(f"Data sources mapping: {data_logger.data_sources_mapping}")
308
+ print(f"Data outputs mapping: {data_logger.data_outputs_mapping}")
309
+ data_logger.run_data_logging(
310
+ interval=2,
311
+ duration=10
312
+ )
@@ -0,0 +1,124 @@
1
+ """
2
+ Module Data Output
3
+
4
+ Data output module will always receive data in type 'dict' from data logger, with keys of variable names.
5
+ """
6
+
7
+ from abc import ABC, abstractmethod
8
+ from typing import TypedDict
9
+ import csv
10
+ import os
11
+ import logging.config
12
+ # Load logging configuration from file
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class DataOutputBase(ABC):
17
+ # Class attribute: key's name for the logged time
18
+ key_of_log_time = 'LogTime'
19
+
20
+ def __init__(self, log_time_required: bool):
21
+ # Internal variable for property 'all_variable_names'
22
+ # It should be set by a DataLogger instance via property setter
23
+ self._all_variable_names: tuple[str, ...] = ()
24
+
25
+ # Internal variable for property 'log_time_required'
26
+ # It should be defined during the initialization
27
+ self._log_time_required = log_time_required
28
+
29
+ @abstractmethod
30
+ def log_data(self, data: dict):
31
+ """
32
+ Log data to output
33
+
34
+ This method must be implemented in child class and will be used by the DataLogger to log data to the output
35
+ """
36
+ pass
37
+
38
+ @staticmethod
39
+ def generate_dir_of_file(file_name: str):
40
+ """Generate a directory to save file if it does not exist"""
41
+ dir_path = os.path.dirname(file_name)
42
+ if not os.path.exists(dir_path):
43
+ os.makedirs(dir_path)
44
+
45
+ @property
46
+ def all_variable_names(self) -> tuple[str, ...]:
47
+ """
48
+ All possible variable names of this data output
49
+
50
+ This property returns a tuple containing the names of all variables that this data output can potentially
51
+ contain.
52
+ """
53
+ return self._all_variable_names
54
+
55
+ @all_variable_names.setter
56
+ def all_variable_names(self, names: tuple[str, ...]):
57
+ self._all_variable_names = names
58
+
59
+ @property
60
+ def log_time_required(self) -> bool:
61
+ """If this data output requires log time by data logging"""
62
+ return self._log_time_required
63
+
64
+
65
+ class DataOutputCsv(DataOutputBase):
66
+ class CsvWriterSettings(TypedDict):
67
+ """Typed dict for csv writer settings"""
68
+ delimiter: str
69
+
70
+ def __init__(
71
+ self,
72
+ file_name: str,
73
+ csv_writer_settings: dict | None = None
74
+ ):
75
+ """
76
+ Initialize data output instance for csv data
77
+ :param file_name: File name to save csv data with full path
78
+ :param csv_writer_settings: Settings of csv writer, supported keys: 'delimiter', if None, use default settings
79
+ """
80
+ logger.info("Initializing DataOutputCsv ...")
81
+
82
+ super().__init__(log_time_required=True) # csv file always requires log time
83
+ self.file_name = file_name
84
+ self.generate_dir_of_file(self.file_name) # Generate file path if not exists
85
+
86
+ # Set default csv_writer_settings
87
+ self.csv_writer_settings: 'DataOutputCsv.CsvWriterSettings' = {
88
+ 'delimiter': ';' # Delimiter of csv-file
89
+ }
90
+
91
+ # Set csv_writer_settings
92
+ if csv_writer_settings is None:
93
+ # Use default csv_writer_settings
94
+ logger.info(f"Using default csv writer settings: {self.csv_writer_settings}")
95
+ else:
96
+ # Check all keys in csv_writer_settings
97
+ for key in csv_writer_settings.keys():
98
+ if key not in self.csv_writer_settings.keys():
99
+ raise ValueError(f"Invalid key in csv_writer_settings: '{key}'")
100
+ # Update csv_writer_settings
101
+ self.csv_writer_settings.update(csv_writer_settings)
102
+ logger.info(f"Using csv writer settings: {self.csv_writer_settings}")
103
+
104
+ def log_data(self, data: dict):
105
+ """Log data to csv"""
106
+ # Create a data dictionary based on the order of all variable names
107
+ reordered_data = {k: data.get(k, None) for k in self._all_variable_names}
108
+ self._append_to_csv(list(reordered_data.values())) # Append data to csv
109
+
110
+ def write_header_line(self):
111
+ """Write header line as the first row of csv, this method must be called by initializing DataLogger"""
112
+ self._write_to_csv(list(self._all_variable_names))
113
+
114
+ def _write_to_csv(self, row: list):
115
+ """Write a csv, the existing content in the file is erased as soon as the file is opened"""
116
+ with open(self.file_name, 'w', newline='') as f:
117
+ csv_writer = csv.writer(f, **self.csv_writer_settings)
118
+ csv_writer.writerow(row)
119
+
120
+ def _append_to_csv(self, row: list):
121
+ """Append a new line to csv, the existing content in the file is preserved"""
122
+ with open(self.file_name, 'a', newline='') as f:
123
+ csv_writer = csv.writer(f, **self.csv_writer_settings)
124
+ csv_writer.writerow(row)
@@ -0,0 +1,103 @@
1
+ """
2
+ Module Data source
3
+
4
+ Data source module must always provide data in type 'dict', with keys of variable names.
5
+ """
6
+
7
+ from abc import ABC, abstractmethod
8
+ import random
9
+
10
+
11
+ class DataSourceBase(ABC):
12
+ """Base class of data source"""
13
+ def __init__(self):
14
+ # Internal variable for property 'all_variable_names'
15
+ # It should be defined during the initialization, e.g. from a configuration file, from inside the class, or
16
+ # from reading parameters of all devices. Using tuple to ensure the elements are immutable.
17
+ self._all_variable_names: tuple[str, ...] = ()
18
+
19
+ @abstractmethod
20
+ def read_data(self) -> dict:
21
+ """
22
+ Read data from source
23
+
24
+ This method must be implemented in child classes and will be used by the DataLogger to retrieve data.
25
+ """
26
+ pass
27
+
28
+ @property
29
+ def all_variable_names(self) -> tuple[str, ...]:
30
+ """
31
+ All possible variable names provided by this data source
32
+
33
+ This property returns a tuple containing the names of all variables that this data source can potentially
34
+ provide.
35
+ """
36
+ return self._all_variable_names
37
+
38
+
39
+ class RandomDataSource(DataSourceBase):
40
+ def __init__(self, size: int = 10, key_missing_rate: float = 0.5, value_missing_rate: float = 0.5):
41
+ """
42
+ Random data source to simulate data generation
43
+ :param size: Number of variables to generate
44
+ :param key_missing_rate: Probability of a key being excluded from the final dictionary
45
+ :param value_missing_rate: Probability of assigning None to a value instead of a random float
46
+ """
47
+ super().__init__()
48
+ if not (0.0 <= key_missing_rate <= 1.0):
49
+ raise ValueError(f"key_missing_rate '{key_missing_rate}' must be between 0.0 and 1.0")
50
+ if not (0.0 <= value_missing_rate <= 1.0):
51
+ raise ValueError(f"value_missing_rate '{value_missing_rate}' must be between 0.0 and 1.0")
52
+
53
+ self.size = size
54
+ self.key_missing_rate = key_missing_rate
55
+ self.value_missing_rate = value_missing_rate
56
+ self._all_variable_names = tuple(f'RandData{n}' for n in range(self.size)) # Define all data names
57
+
58
+ def read_data(self) -> dict:
59
+ """Generate random data for each variable name, randomly drop some keys, and randomly insert None values"""
60
+ return {
61
+ name: (None if random.random() < self.value_missing_rate else random.uniform(0.0, 100.0))
62
+ for name in self._all_variable_names
63
+ if random.random() >= self.key_missing_rate
64
+ }
65
+
66
+
67
+ class RandomStringSource(RandomDataSource):
68
+ def __init__(
69
+ self, size: int = 10, str_length: int = 5, key_missing_rate: float = 0.5, value_missing_rate: float = 0.5):
70
+ """
71
+ Random string source to simulate data generation
72
+ :param size: Number of variables to generate
73
+ :param str_length: Length of each random string
74
+ :param key_missing_rate: Probability of a key being excluded from the final dictionary
75
+ :param value_missing_rate: Probability of assigning None to a value instead of a random float
76
+ """
77
+ super().__init__(size, key_missing_rate, value_missing_rate)
78
+ self.str_length = str_length
79
+ self._all_variable_names = tuple(f'RandStr{n}' for n in range(self.size)) # Re-define all data names
80
+
81
+ def read_data(self) -> dict:
82
+ def generate_random_string(length: int) -> str:
83
+ """Generate random string with defined length"""
84
+ chars = '1234567890AaBbCcDdEeFf'
85
+ return ''.join(random.choice(chars) for _ in range(length))
86
+
87
+ return {
88
+ name: (None if random.random() < self.value_missing_rate else generate_random_string(self.str_length))
89
+ for name in self._all_variable_names
90
+ if random.random() >= self.key_missing_rate
91
+ }
92
+
93
+
94
+ if __name__ == "__main__":
95
+ random_data_source = RandomDataSource(size=10, key_missing_rate=0.5, value_missing_rate=0.5)
96
+ print(f"All variable names of random data source: {random_data_source.all_variable_names}")
97
+ for _ in range(10):
98
+ print(random_data_source.read_data())
99
+
100
+ random_str_source = RandomStringSource(size=10, str_length=8, key_missing_rate=0.5, value_missing_rate=0.5)
101
+ print(f"All variable names of random string source: {random_str_source.all_variable_names}")
102
+ for _ in range(10):
103
+ print(random_str_source.read_data())
@@ -0,0 +1,50 @@
1
+ """
2
+ Module Data source output
3
+
4
+ Data source output is for system that contains source (for read data) and output (for log data) via one system
5
+ interface, e.g. Beckhoff PLC (ADS interface), MQTT (client interface)
6
+ """
7
+
8
+ from abc import ABC
9
+ from ebcmeasurements.Base import DataSource, DataOutput
10
+ from typing import Optional
11
+ import os
12
+ import logging.config
13
+ # Load logging configuration from file
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class DataSourceOutputBase(ABC):
18
+ """Base class of data source output"""
19
+ class SystemDataSource(DataSource.DataSourceBase, ABC):
20
+ """Nested class for data source operation"""
21
+ def __init__(self, system: Optional[object]):
22
+ super().__init__()
23
+ self.system = system
24
+
25
+ class SystemDataOutput(DataOutput.DataOutputBase, ABC):
26
+ """Nested class for data output operation"""
27
+ def __init__(self, system: Optional[object], log_time_required: Optional[bool]):
28
+ super().__init__(log_time_required)
29
+ self.system = system
30
+
31
+ def __init__(self):
32
+ self.system: object = None # System of data source and data output
33
+ self._data_source: Optional[DataSourceOutputBase.SystemDataSource] = None
34
+ self._data_output: Optional[DataSourceOutputBase.SystemDataOutput] = None
35
+
36
+ @property
37
+ def data_source(self) -> 'DataSourceOutputBase.SystemDataSource':
38
+ """Instance of SystemDataSource, initialized on first access"""
39
+ if self._data_source is None:
40
+ # Lazy initialization with properties
41
+ self._data_source = self.SystemDataSource(system=self.system)
42
+ return self._data_source
43
+
44
+ @property
45
+ def data_output(self) -> 'DataSourceOutputBase.SystemDataOutput':
46
+ """Instance of SystemDataOutput, initialized on first access"""
47
+ if self._data_output is None:
48
+ # Lazy initialization with properties
49
+ self._data_output = self.SystemDataOutput(system=self.system, log_time_required=None)
50
+ return self._data_output
File without changes