pycoustic 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pycoustic-0.1.0/PKG-INFO +16 -0
- pycoustic-0.1.0/README.md +0 -0
- pycoustic-0.1.0/pycoustic/__init__.py +0 -0
- pycoustic-0.1.0/pycoustic/log.py +358 -0
- pycoustic-0.1.0/pycoustic/survey.py +328 -0
- pycoustic-0.1.0/pyproject.toml +17 -0
pycoustic-0.1.0/PKG-INFO
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: pycoustic
|
3
|
+
Version: 0.1.0
|
4
|
+
Summary:
|
5
|
+
Author: thumpercastle
|
6
|
+
Author-email: tony.ryb@gmail.com
|
7
|
+
Requires-Python: >=3.10,<4.0
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
11
|
+
Requires-Dist: numpy (==2.2.6)
|
12
|
+
Requires-Dist: openpyxl (==3.1.5)
|
13
|
+
Requires-Dist: pandas (==2.2.3)
|
14
|
+
Description-Content-Type: text/markdown
|
15
|
+
|
16
|
+
|
File without changes
|
File without changes
|
@@ -0,0 +1,358 @@
|
|
1
|
+
import pandas as pd
|
2
|
+
import numpy as np
|
3
|
+
import datetime as dt
|
4
|
+
import warnings
|
5
|
+
|
6
|
+
|
7
|
+
class Log:
|
8
|
+
def __init__(self, path=""):
|
9
|
+
"""
|
10
|
+
The Log class is used to store the measured noise data from one data logger.
|
11
|
+
The data must be entered in a .csv file with headings in the specific format "Leq A", "L90 125" etc.
|
12
|
+
:param path: the file path for the .csv noise data
|
13
|
+
"""
|
14
|
+
self._filepath = path
|
15
|
+
self._master = pd.read_csv(path, index_col="Time", parse_dates=["Time"], dayfirst=True)
|
16
|
+
self._master.index = pd.to_datetime(self._master.index)
|
17
|
+
self._master = self._master.sort_index(axis=1)
|
18
|
+
self._start = self._master.index.min()
|
19
|
+
self._end = self._master.index.max()
|
20
|
+
self._assign_header()
|
21
|
+
|
22
|
+
# Assign day, evening, night periods
|
23
|
+
self._night_start = None
|
24
|
+
self._day_start = None
|
25
|
+
self._evening_start = None
|
26
|
+
self._init_periods()
|
27
|
+
|
28
|
+
# Prepare night-time indices and antilogs
|
29
|
+
self._antilogs = self._prep_antilogs() # Use the antilogs dataframe as input to Leq calculations
|
30
|
+
self._master = self._append_night_idx(data=self._master)
|
31
|
+
self._antilogs = self._append_night_idx(data=self._antilogs)
|
32
|
+
|
33
|
+
self._decimals = 1
|
34
|
+
|
35
|
+
def _assign_header(self):
|
36
|
+
csv_headers = self._master.columns.to_list()
|
37
|
+
superheaders = [item.split(" ")[0] for item in csv_headers]
|
38
|
+
subheaders = [item.split(" ")[1] for item in csv_headers]
|
39
|
+
# Convert numerical subheaders to ints
|
40
|
+
for i in range(len(subheaders)):
|
41
|
+
try:
|
42
|
+
subheaders[i] = float(subheaders[i])
|
43
|
+
except Exception:
|
44
|
+
continue
|
45
|
+
self._master.columns = [superheaders, subheaders]
|
46
|
+
self._master.sort_index(axis=1, level=1, inplace=True)
|
47
|
+
|
48
|
+
def _init_periods(self):
|
49
|
+
times = {"day": (7, 0), "evening": (23, 0), "night": (23, 0)}
|
50
|
+
self._day_start = dt.time(times["day"][0], times["day"][1])
|
51
|
+
self._evening_start = dt.time(times["evening"][0], times["evening"][1])
|
52
|
+
self._night_start = dt.time(times["night"][0], times["night"][1])
|
53
|
+
|
54
|
+
|
55
|
+
def _prep_antilogs(self):
|
56
|
+
"""
|
57
|
+
Private method creates a copy dataframe of master, but with dB sound pressure levels presented as antilogs.
|
58
|
+
This antilogs dataframe should be used if you want to undertake calculations of Leqs and similar.
|
59
|
+
:return:
|
60
|
+
"""
|
61
|
+
return self._master.copy().apply(lambda x: np.power(10, (x / 10)))
|
62
|
+
|
63
|
+
def _append_night_idx(self, data=None):
|
64
|
+
"""
|
65
|
+
Private method appends an additional column of the measurement date and time, but with the early morning
|
66
|
+
dates set to the day before.
|
67
|
+
e.g.
|
68
|
+
the measurement at 16-12-2024 23:57 would stay as is, but
|
69
|
+
the measurement at 17-12-2024 00:02 would have a night index of 16-12-2024 00:02
|
70
|
+
The logic behind this is that it allows us to process a night-time as one contiguous period, whereas
|
71
|
+
Pandas would otherwise treat the two measurements as separate because of their differing dates.
|
72
|
+
:param data:
|
73
|
+
:return:
|
74
|
+
"""
|
75
|
+
night_indices = data.index.to_list()
|
76
|
+
if self._night_start > self._day_start:
|
77
|
+
for i in range(len(night_indices)):
|
78
|
+
if night_indices[i].time() < self._day_start:
|
79
|
+
night_indices[i] += dt.timedelta(days=-1)
|
80
|
+
data["Night idx"] = night_indices
|
81
|
+
return data
|
82
|
+
|
83
|
+
def _return_as_night_idx(self, data=None):
|
84
|
+
"""
|
85
|
+
Private method to set the dataframe index as the night_idx. This is used when undertaking data processing for
|
86
|
+
night-time periods.
|
87
|
+
:param data:
|
88
|
+
:return:
|
89
|
+
"""
|
90
|
+
if ("Night idx", "") not in data.columns:
|
91
|
+
raise Exception("No night indices in current DataFrame")
|
92
|
+
return data.set_index("Night idx")
|
93
|
+
|
94
|
+
def _none_if_zero(self, df):
|
95
|
+
if len(df) == 0:
|
96
|
+
return None
|
97
|
+
else:
|
98
|
+
return df
|
99
|
+
|
100
|
+
def _recompute_leq(self, data=None, t="15min", cols=None):
|
101
|
+
"""
|
102
|
+
Private method to recompute shorter Leq measurements as longer ones.
|
103
|
+
:param data: Input data (should be in antilog format)
|
104
|
+
:param t: The desired Leq period
|
105
|
+
:param cols: Which columns of the input data do you wish to recompute?
|
106
|
+
:return:
|
107
|
+
"""
|
108
|
+
# Set default mutable args
|
109
|
+
if data is None:
|
110
|
+
data = self._antilogs
|
111
|
+
if cols is None:
|
112
|
+
cols = ["Leq", "L90"]
|
113
|
+
# Loop through column superheaders and recompute as a longer Leq
|
114
|
+
recomputed = pd.DataFrame(columns=data.columns)
|
115
|
+
for idx in cols:
|
116
|
+
if idx in data.columns:
|
117
|
+
recomputed[idx] = data[idx].resample(t).mean().\
|
118
|
+
apply(lambda x: np.round((10 * np.log10(x)), self._decimals))
|
119
|
+
return self._none_if_zero(recomputed)
|
120
|
+
|
121
|
+
def _recompute_night_idx(self, data=None, t="15min"):
|
122
|
+
"""
|
123
|
+
Internal method to recompute night index column.
|
124
|
+
:param data: input dataframe to be recomputed
|
125
|
+
:param t: desired measurement period
|
126
|
+
:return: dataframe with night index column recomputed to the desired period
|
127
|
+
"""
|
128
|
+
if data is None:
|
129
|
+
raise Exception("No DataFrame provided for night idx")
|
130
|
+
if ("Night idx", "") in data.columns:
|
131
|
+
data["Night idx"] = data["Night idx"].resample(t).asfreq()
|
132
|
+
else:
|
133
|
+
data["Night idx"] = self._master["Night idx"].resample(t).asfreq()
|
134
|
+
return data
|
135
|
+
|
136
|
+
def _recompute_max(self, data=None, t="15min", pivot_cols=None, hold_spectrum=False):
|
137
|
+
"""
|
138
|
+
Private method to recompute max readings from shorter to longer periods.
|
139
|
+
:param data: input data, usually self._master
|
140
|
+
:param t: desired measurement period
|
141
|
+
:param pivot_cols: how to choose the highest value - this will usually be "Lmax A". This is especially
|
142
|
+
important when you want to get specific octave band data for an Lmax event. If you wanted to recompute maxes
|
143
|
+
as the events with the highest values at 500 Hz, you could enter [("Lmax", 500)]. Caution: This functionality
|
144
|
+
has not been tested
|
145
|
+
:param hold_spectrum: if hold_spectrum, the dataframe returned will contain the highest value at each octave
|
146
|
+
band over the new measurement period, i.e. like the Lmax Hold setting on a sound level meter.
|
147
|
+
If hold_spectrum=false, the dataframe will contain the spectrum for the highest event around the pivot column,
|
148
|
+
i.e. the spectrum for that specific LAmax event
|
149
|
+
:return: returns a dataframe with the values recomputed to the desired measurement period.
|
150
|
+
"""
|
151
|
+
# Set default mutable args
|
152
|
+
if pivot_cols is None:
|
153
|
+
pivot_cols = [("Lmax", "A")]
|
154
|
+
if data is None:
|
155
|
+
data = self._master
|
156
|
+
# Loop through column superheaders and recompute over a longer period
|
157
|
+
combined = pd.DataFrame(columns=data.columns)
|
158
|
+
if hold_spectrum: # Hold the highest value, in given period per frequency band
|
159
|
+
for col in pivot_cols:
|
160
|
+
if col in combined.columns:
|
161
|
+
max_hold = data.resample(t)[col[0]].max()
|
162
|
+
combined[col[0]] = max_hold
|
163
|
+
else: # Event spectrum (octave band data corresponding to the highest A-weighted event)
|
164
|
+
for col in pivot_cols:
|
165
|
+
if col in combined.columns:
|
166
|
+
idx = data[col[0]].groupby(pd.Grouper(freq=t)).max()
|
167
|
+
combined[col[0]] = idx
|
168
|
+
return combined
|
169
|
+
|
170
|
+
def _as_multiindex(self, df=None, super=None, name1="Date", name2="Num"):
|
171
|
+
subs = df.index.to_list() # List of subheaders
|
172
|
+
# Super will likely be the date
|
173
|
+
tuples = [(super, sub) for sub in subs]
|
174
|
+
idx = pd.MultiIndex.from_tuples(tuples, names=[name1, name2])
|
175
|
+
if isinstance(df, pd.Series):
|
176
|
+
df = pd.DataFrame(data=df)
|
177
|
+
return df.set_index(idx, inplace=False)
|
178
|
+
|
179
|
+
def _get_period(self, data=None, period="days", night_idx=True):
|
180
|
+
"""
|
181
|
+
Private method to get data for daytime, evening or night-time periods.
|
182
|
+
:param data: Input data, usually master
|
183
|
+
:param period: string, "days", "evenings" or "nights"
|
184
|
+
:param night_idx: Bool. Needs to be True if you want to compute contiguous night-time periods. If False,
|
185
|
+
it will consider early morning measurements as part of the following day, i.e. the cut-off becomes midnight.
|
186
|
+
:return:
|
187
|
+
"""
|
188
|
+
if data is None:
|
189
|
+
data = self._master
|
190
|
+
if period == "days":
|
191
|
+
return data.between_time(self._day_start, self._evening_start, inclusive="left")
|
192
|
+
elif period == "evenings":
|
193
|
+
return data.between_time(self._evening_start, self._night_start, inclusive="left")
|
194
|
+
elif period == "nights":
|
195
|
+
if night_idx:
|
196
|
+
data = self._return_as_night_idx(data=data)
|
197
|
+
return data.between_time(self._night_start, self._day_start, inclusive="left")
|
198
|
+
|
199
|
+
def _leq_by_date(self, data, cols=None):
|
200
|
+
"""
|
201
|
+
Private method to undertake Leq calculations organised by date. For contiguous night-time periods crossing
|
202
|
+
over midnight (e.g. from 23:00 to 07:00), the input data needs to have a night-time index.
|
203
|
+
This method is normally used for calculating Leq over a specific daytime, evening or night-time period, hence
|
204
|
+
it is usually passed the output of _get_period()
|
205
|
+
:param data: Input data. Must be antilogs, and usually with night-time index
|
206
|
+
:param cols: Which columns do you wish to recalculate? If ["Leq"] it will calculate for all subcolumns within
|
207
|
+
that heading, i.e. all frequency bands and A-weighted. If you want an individual column, use [("Leq", "A")] for
|
208
|
+
example.
|
209
|
+
:return: A dataframe of the calculated Leq for the data, organised by dates
|
210
|
+
"""
|
211
|
+
if cols is None:
|
212
|
+
cols = ["Leq"]
|
213
|
+
return data[cols].groupby(data.index.date).mean().apply(lambda x: np.round((10 * np.log10(x)), self._decimals))
|
214
|
+
|
215
|
+
# ###########################---PUBLIC---######################################
|
216
|
+
# ss++
|
217
|
+
def get_data(self):
|
218
|
+
"""
|
219
|
+
# Returns a dataframe of the loaded csv
|
220
|
+
"""
|
221
|
+
return self._master
|
222
|
+
#ss--
|
223
|
+
|
224
|
+
def get_antilogs(self):
|
225
|
+
return self._antilogs
|
226
|
+
|
227
|
+
|
228
|
+
def as_interval(self, data=None, antilogs=None, t="15min", leq_cols=None, max_pivots=None,
|
229
|
+
hold_spectrum=False):
|
230
|
+
"""
|
231
|
+
Returns a dataframe recomputed as longer periods. This implements the private leq and max recalculations
|
232
|
+
:param data: input dataframe, usually master
|
233
|
+
:param antilogs: antilog dataframe, used for leq calcs
|
234
|
+
:param t: desired output period
|
235
|
+
:param leq_cols: which Leq columns to include
|
236
|
+
:param max_pivots: which value to pivot the Lmax recalculation on
|
237
|
+
:param hold_spectrum: True will be Lmax hold, False will be Lmax event
|
238
|
+
:return: a dataframe recalculated to the desired period, with the desired columns
|
239
|
+
"""
|
240
|
+
# Set defaults for mutable args
|
241
|
+
if data is None:
|
242
|
+
data = self._master
|
243
|
+
if antilogs is None:
|
244
|
+
antilogs = self._antilogs
|
245
|
+
if leq_cols is None:
|
246
|
+
leq_cols = ["Leq", "L90"]
|
247
|
+
if max_pivots is None:
|
248
|
+
max_pivots = [("Lmax", "A")]
|
249
|
+
leq = self._recompute_leq(data=antilogs, t=t, cols=leq_cols)
|
250
|
+
maxes = self._recompute_max(data=data, t=t, pivot_cols=max_pivots, hold_spectrum=hold_spectrum)
|
251
|
+
conc = pd.concat([leq, maxes], axis=1).sort_index(axis=1).dropna(axis=1, how="all")
|
252
|
+
conc = self._append_night_idx(data=conc) # Re-append night indices
|
253
|
+
return conc.dropna(axis=0, how="all")
|
254
|
+
|
255
|
+
def get_nth_high_low(self, n=10, data=None, pivot_col=None, all_cols=False, high=True):
|
256
|
+
"""
|
257
|
+
Return a dataframe with the nth-highest or nth-lowest values for the specified parameters.
|
258
|
+
This is useful for calculating the 10th-highest or 15th-highest Lmax values, but can be used for other purposes
|
259
|
+
:param n: The nth-highest or nth-lowest values to return
|
260
|
+
:param data: Input dataframe, usually a night-time dataframe with night-time indices
|
261
|
+
:param pivot_col: Tuple of strings,
|
262
|
+
Which column to use for the highest-lowest computation. Other columns in the row will follow.
|
263
|
+
:param all_cols: Perform this operation over all columns?
|
264
|
+
:param high: True for high, False for low
|
265
|
+
:return: dataframe with the nth-highest or -lowest values for the specified parameters.
|
266
|
+
"""
|
267
|
+
if data is None:
|
268
|
+
data = self._master
|
269
|
+
if pivot_col is None:
|
270
|
+
pivot_col = ("Lmax", "A")
|
271
|
+
nth = None
|
272
|
+
if high:
|
273
|
+
nth = data.sort_values(by=pivot_col, ascending=False)
|
274
|
+
if not high:
|
275
|
+
nth = data.sort_values(by=pivot_col, ascending=True)
|
276
|
+
nth["Time"] = nth.index.time
|
277
|
+
if all_cols:
|
278
|
+
return nth.groupby(by=nth.index.date).nth(n-1)
|
279
|
+
else:
|
280
|
+
return nth[[pivot_col[0], "Time"]].groupby(by=nth.index.date).nth(n-1)
|
281
|
+
|
282
|
+
def get_modal(self, data=None, by_date=True, cols=None, round_decimals=True):
|
283
|
+
"""
|
284
|
+
Return a dataframe with the modal values
|
285
|
+
:param data: Input dataframe, usually master
|
286
|
+
:param by_date: Bool. Group the modal values by date, as opposed to an overall modal value (currently not
|
287
|
+
implemented).
|
288
|
+
:param cols: List of tuples of the desired columns. e.g. [("L90", "A"), ("Leq", "A")]
|
289
|
+
:param round_decimals: Bool. Round the values to 0 decimal places.
|
290
|
+
:return: A dataframe with the modal values for the desired columns, either grouped by date or overall.
|
291
|
+
"""
|
292
|
+
if data is None:
|
293
|
+
data = self._master
|
294
|
+
if round_decimals:
|
295
|
+
data = data.round()
|
296
|
+
if cols is None:
|
297
|
+
cols = [("L90", "A")]
|
298
|
+
if by_date:
|
299
|
+
dates = np.unique(data.index.date)
|
300
|
+
modes_by_date = pd.DataFrame()
|
301
|
+
for date in range(len(dates)):
|
302
|
+
date_str = dates[date].strftime("%Y-%m-%d")
|
303
|
+
mode_by_date = data[cols].loc[date_str].mode()
|
304
|
+
mode_by_date = self._as_multiindex(df=mode_by_date, super=date_str)
|
305
|
+
modes_by_date = pd.concat([modes_by_date, mode_by_date])
|
306
|
+
return modes_by_date
|
307
|
+
else:
|
308
|
+
return data[cols].mode()
|
309
|
+
|
310
|
+
def counts(self, data=None, cols=None, round_decimals=True):
|
311
|
+
if data is None:
|
312
|
+
data = self._master
|
313
|
+
if round_decimals:
|
314
|
+
data = data.round()
|
315
|
+
if cols is None:
|
316
|
+
cols = [("L90", "A")]
|
317
|
+
return data[cols].value_counts()
|
318
|
+
|
319
|
+
def set_periods(self, times=None):
|
320
|
+
"""
|
321
|
+
Set the daytime, night-time and evening periods. To disable evening periods, simply set it the same
|
322
|
+
as night-time.
|
323
|
+
:param times: A dictionary with strings as keys and integer tuples as values.
|
324
|
+
The first value in the tuple represents the hour of the day that period starts at (24hr clock), and the
|
325
|
+
second value represents the minutes past the hour.
|
326
|
+
e.g. for daytime from 07:00 to 19:00, evening 19:00 to 23:00 and night-time 23:00 to 07:00,
|
327
|
+
times = {"day": (7, 0), "evening": (19, 0), "night": (23, 0)}
|
328
|
+
NOTES:
|
329
|
+
Night-time must cross over midnight. (TBC experimentally).
|
330
|
+
Evening must be between daytime and night-time. To
|
331
|
+
:return: None.
|
332
|
+
"""
|
333
|
+
if times is None:
|
334
|
+
times = {"day": (7, 0), "evening": (23, 0), "night": (23, 0)}
|
335
|
+
self._day_start = dt.time(times["day"][0], times["day"][1])
|
336
|
+
self._evening_start = dt.time(times["evening"][0], times["evening"][1])
|
337
|
+
self._night_start = dt.time(times["night"][0], times["night"][1])
|
338
|
+
# Recompute night indices
|
339
|
+
self._master.drop(labels="Night idx", axis=1, inplace=True)
|
340
|
+
self._antilogs.drop(labels="Night idx", axis=1, inplace=True)
|
341
|
+
self._master = self._append_night_idx(data=self._master)
|
342
|
+
self._antilogs = self._append_night_idx(data=self._antilogs)
|
343
|
+
|
344
|
+
def get_period_times(self):
|
345
|
+
"""
|
346
|
+
:return: the tuples of period start times.
|
347
|
+
"""
|
348
|
+
return self._day_start, self._evening_start, self._night_start
|
349
|
+
|
350
|
+
def is_evening(self):
|
351
|
+
"""
|
352
|
+
Check if evening periods are enabled.
|
353
|
+
:return: True if evening periods are enabled, False otherwise.
|
354
|
+
"""
|
355
|
+
if self._evening_start == self._night_start:
|
356
|
+
return False
|
357
|
+
else:
|
358
|
+
return True
|
@@ -0,0 +1,328 @@
|
|
1
|
+
import pandas as pd
|
2
|
+
import numpy as np
|
3
|
+
|
4
|
+
|
5
|
+
class Survey:
|
6
|
+
"""
|
7
|
+
Survey Class is an overarching class which takes multiple Log objects and processes and summarises them together.
|
8
|
+
This should be the main interface for user interaction with their survey data.
|
9
|
+
"""
|
10
|
+
|
11
|
+
# ###########################---PRIVATE---######################################
|
12
|
+
|
13
|
+
def __init__(self):
|
14
|
+
self._logs = {}
|
15
|
+
|
16
|
+
def _insert_multiindex(self, df=None, super=None, name1="Position", name2="Date"):
|
17
|
+
subs = df.index.to_list() # List of subheaders (dates)
|
18
|
+
# Super should be the position name (key from master dictionary)
|
19
|
+
tuples = [(super, sub) for sub in subs]
|
20
|
+
idx = pd.MultiIndex.from_tuples(tuples, names=[name1, name2])
|
21
|
+
return df.set_index(idx, inplace=False)
|
22
|
+
|
23
|
+
def _insert_header(self, df=None, new_head_list=None, header_idx=None):
|
24
|
+
cols = df.columns.to_list()
|
25
|
+
new_cols = [list(c) for c in zip(*cols)]
|
26
|
+
new_cols.insert(header_idx, new_head_list)
|
27
|
+
df.columns = new_cols
|
28
|
+
return df
|
29
|
+
|
30
|
+
# ###########################---PUBLIC---######################################
|
31
|
+
|
32
|
+
def set_periods(self, times=None):
|
33
|
+
"""
|
34
|
+
Set the daytime, evening and night-time periods of all Log objects in the Survey.
|
35
|
+
To disable evening periods, simply set it the same as night-time.
|
36
|
+
:param times: A dictionary with strings as keys and integer tuples as values.
|
37
|
+
The first value in the tuple represents the hour of the day that period starts at (24hr clock), and the
|
38
|
+
second value represents the minutes past the hour.
|
39
|
+
e.g. for daytime from 07:00 to 19:00, evening 19:00 to 23:00 and night-time 23:00 to 07:00,
|
40
|
+
times = {"day": (7, 0), "evening": (19, 0), "night": (23, 0)}
|
41
|
+
NOTES:
|
42
|
+
Night-time must cross over midnight. (TBC experimentally).
|
43
|
+
Evening must be between daytime and night-time. To
|
44
|
+
:return: None.
|
45
|
+
"""
|
46
|
+
if times is None:
|
47
|
+
times = {"day": (7, 0), "evening": (23, 0), "night": (23, 0)}
|
48
|
+
for key in self._logs.keys():
|
49
|
+
self._logs[key].set_periods(times=times)
|
50
|
+
|
51
|
+
def add_log(self, data=None, name=""):
|
52
|
+
"""
|
53
|
+
Add a Log object to the Survey object.
|
54
|
+
:param data: Initialised Log object
|
55
|
+
:param name: Name of the position, e.g. "A1"
|
56
|
+
:return: None.
|
57
|
+
"""
|
58
|
+
self._logs[name] = data
|
59
|
+
|
60
|
+
def get_periods(self):
|
61
|
+
"""
|
62
|
+
Check the currently-set daytime, evening and night-time periods for each Log object in the Survey.
|
63
|
+
:return: Tuples of start times.
|
64
|
+
"""
|
65
|
+
periods = {}
|
66
|
+
for key in self._logs.keys():
|
67
|
+
periods[key] = self._logs[key].get_period_times()
|
68
|
+
return periods
|
69
|
+
|
70
|
+
def resi_summary(self, leq_cols=None, max_cols=None, lmax_n=10, lmax_t="2min"):
|
71
|
+
"""
|
72
|
+
Get a dataframe summarising the parameters relevant to assessment of internal ambient noise levels in
|
73
|
+
UK residential property assessments. Daytime and night-time Leqs, and nth-highest Lmax values all presented
|
74
|
+
in a succinct table. These will be summarised as per the daytime, evening and night-time periods set (default
|
75
|
+
daytime 07:00 to 23:00 and night-time 23:00 to 07:00).
|
76
|
+
The date of the Lmax values are presented for the night-time period beginning on that date. i.e. an Lmax
|
77
|
+
on 20/12/2024 would have occurred in the night-time period starting on that date and ending the following
|
78
|
+
morning.
|
79
|
+
:param leq_cols: List of tuples. The columns on which to perform Leq calculations. This can include L90
|
80
|
+
columns, or spectral values. e.g. leq_cols = [("Leq", "A"), ("L90", "125")]
|
81
|
+
:param max_cols: List of tuples. The columns on which to get the nth-highest values.
|
82
|
+
Default max_cols = [("Lmax", "A")]
|
83
|
+
:param lmax_n: Int. The nth-highest value for max_cols. Default 10 for 10th-highest.
|
84
|
+
:param lmax_t: String. This is the time period over which to compute nth-highest Lmax values.
|
85
|
+
e.g. "2min" computes the nth-highest Lmaxes over 2-minute periods. Note that the chosen period must be
|
86
|
+
equal to or more than the measurement period. So you cannot measure in 5-minute periods and request 2-minute
|
87
|
+
Lmaxes.
|
88
|
+
:return: A dataframe presenting a summary of the Leq and Lmax values requested.
|
89
|
+
"""
|
90
|
+
combi = pd.DataFrame()
|
91
|
+
if leq_cols is None:
|
92
|
+
leq_cols = [("Leq", "A")]
|
93
|
+
if max_cols is None:
|
94
|
+
max_cols = [("Lmax", "A")]
|
95
|
+
for key in self._logs.keys():
|
96
|
+
log = self._logs[key]
|
97
|
+
combined_list = []
|
98
|
+
# Day
|
99
|
+
days = log._leq_by_date(log._get_period(data=log.get_antilogs(), period="days"), cols=leq_cols)
|
100
|
+
days.sort_index(inplace=True)
|
101
|
+
combined_list.append(days)
|
102
|
+
period_headers = ["Daytime" for i in range(len(leq_cols))]
|
103
|
+
# Evening
|
104
|
+
if log.is_evening():
|
105
|
+
evenings = log._leq_by_date(log._get_period(data=log.get_antilogs(), period="evenings"), cols=leq_cols)
|
106
|
+
evenings.sort_index(inplace=True)
|
107
|
+
combined_list.append(evenings)
|
108
|
+
for i in range(len(leq_cols)):
|
109
|
+
period_headers.append("Evening")
|
110
|
+
# Night Leq
|
111
|
+
nights = log._leq_by_date(log._get_period(data=log.get_antilogs(), period="nights"), cols=leq_cols)
|
112
|
+
nights.sort_index(inplace=True)
|
113
|
+
combined_list.append(nights)
|
114
|
+
for i in range(len(leq_cols)):
|
115
|
+
period_headers.append("Night-time")
|
116
|
+
# Night max
|
117
|
+
maxes = log.as_interval(t=lmax_t)
|
118
|
+
maxes = log._get_period(data=maxes, period="nights", night_idx=True)
|
119
|
+
maxes = log.get_nth_high_low(n=lmax_n, data=maxes)[max_cols]
|
120
|
+
maxes.sort_index(inplace=True)
|
121
|
+
# +++
|
122
|
+
# SS Feb2025 - Code changed to prevent exception
|
123
|
+
#maxes.index = maxes.index.date
|
124
|
+
try:
|
125
|
+
maxes.index = pd.to_datetime(maxes.index)
|
126
|
+
maxes.index = maxes.index.date
|
127
|
+
except Exception as e:
|
128
|
+
print(f"Error converting index to date: {e}")
|
129
|
+
# SSS ---
|
130
|
+
maxes.index.name = None
|
131
|
+
combined_list.append(maxes)
|
132
|
+
for i in range(len(max_cols)):
|
133
|
+
period_headers.append("Night-time")
|
134
|
+
summary = pd.concat(objs=combined_list, axis=1)
|
135
|
+
summary = self._insert_multiindex(df=summary, super=key)
|
136
|
+
combi = pd.concat(objs=[combi, summary], axis=0)
|
137
|
+
combi = self._insert_header(df=combi, new_head_list=period_headers, header_idx=0)
|
138
|
+
return combi
|
139
|
+
|
140
|
+
def modal(self, cols=None, by_date=False, day_t="60min", evening_t="60min", night_t="15min"):
|
141
|
+
"""
|
142
|
+
Get a dataframe summarising Modal L90 values for each time period, as suggested by BS 4142:2014.
|
143
|
+
Currently, this method rounds the values to 0 decimal places by default and there is no alternative
|
144
|
+
implementation.
|
145
|
+
Note that this function will estimate L90s as a longer value by performing an Leq computation on them.
|
146
|
+
The measured data in Logs must be smaller than or equal to the desired period, i.e. you can't measure in 15-
|
147
|
+
minute periods and request 5-minute modal values.
|
148
|
+
:param cols: List of tuples of the columns desired. This does not have to be L90s, but can be any column.
|
149
|
+
:param by_date: Bool. If True, group the modal values by date. If False, present one modal value for each
|
150
|
+
period.
|
151
|
+
:param day_t: String. Measurement period T. i.e. daytime measurements will compute modal values of
|
152
|
+
L90,60min by default.
|
153
|
+
:param evening_t: String. Measurement period T. i.e. evening measurements will compute modal values of
|
154
|
+
L90,60min by default, unless evenings are disabled (which they are by default).
|
155
|
+
:param night_t: Measurement period T. i.e. night-time measurements will compute modal values of
|
156
|
+
L90,15min by default.
|
157
|
+
:return: A dataframe of modal values for each time period.
|
158
|
+
"""
|
159
|
+
if cols is None:
|
160
|
+
cols = [("L90", "A")]
|
161
|
+
combi = pd.DataFrame()
|
162
|
+
period_headers = []
|
163
|
+
for key in self._logs.keys():
|
164
|
+
# Key is the name of the measurement position
|
165
|
+
log = self._logs[key]
|
166
|
+
pos_summary = []
|
167
|
+
# Daytime
|
168
|
+
period_headers = ["Daytime"]
|
169
|
+
days = log.get_modal(data=log._get_period(data=log.as_interval(t=day_t), period="days"), by_date=by_date, cols=cols)
|
170
|
+
days.sort_index(inplace=True)
|
171
|
+
pos_summary.append(days)
|
172
|
+
# Evening
|
173
|
+
if log.is_evening():
|
174
|
+
period_headers.append("Evening")
|
175
|
+
evenings = log.get_modal(data=log._get_period(data=log.as_interval(t=evening_t), period="evenings"), by_date=by_date, cols=cols)
|
176
|
+
evenings.sort_index(inplace=True)
|
177
|
+
pos_summary.append(evenings)
|
178
|
+
# Night time
|
179
|
+
nights = log.get_modal(data=log._get_period(data=log.as_interval(t=night_t), period="nights"), by_date=by_date, cols=cols)
|
180
|
+
nights.sort_index(inplace=True)
|
181
|
+
pos_summary.append(nights)
|
182
|
+
period_headers.append("Night-time")
|
183
|
+
pos_df = pd.concat(pos_summary, axis=1)
|
184
|
+
pos_df = self._insert_multiindex(pos_df, super=key)
|
185
|
+
combi = pd.concat([combi, pos_df], axis=0)
|
186
|
+
combi = self._insert_header(df=combi, new_head_list=period_headers, header_idx=0)
|
187
|
+
return combi
|
188
|
+
|
189
|
+
def counts(self, cols=None, day_t="60min", evening_t="60min", night_t="15min"):
|
190
|
+
if cols is None:
|
191
|
+
cols = [("L90", "A")]
|
192
|
+
combi = pd.DataFrame()
|
193
|
+
period_headers = []
|
194
|
+
for key in self._logs.keys():
|
195
|
+
# Key is the name of the measurement position
|
196
|
+
log = self._logs[key]
|
197
|
+
pos_summary = []
|
198
|
+
# Daytime
|
199
|
+
period_headers = ["Daytime"]
|
200
|
+
days = log.counts(data=log._get_period(data=log.as_interval(t=day_t), period="days"), cols=cols)
|
201
|
+
days.sort_index(inplace=True)
|
202
|
+
pos_summary.append(days)
|
203
|
+
# Evening
|
204
|
+
if log.is_evening():
|
205
|
+
period_headers.append("Evening")
|
206
|
+
evenings = log.counts(data=log._get_period(data=log.as_interval(t=evening_t), period="evenings"), cols=cols)
|
207
|
+
evenings.sort_index(inplace=True)
|
208
|
+
pos_summary.append(evenings)
|
209
|
+
# Night time
|
210
|
+
nights = log.counts(data=log._get_period(data=log.as_interval(t=night_t), period="nights"), cols=cols)
|
211
|
+
nights.sort_index(inplace=True)
|
212
|
+
pos_summary.append(nights)
|
213
|
+
period_headers.append("Night-time")
|
214
|
+
pos_df = pd.concat(pos_summary, axis=1)
|
215
|
+
pos_df = self._insert_multiindex(pos_df, super=key)
|
216
|
+
combi = pd.concat([combi, pos_df], axis=0)
|
217
|
+
combi = self._insert_header(df=combi, new_head_list=period_headers, header_idx=0)
|
218
|
+
return combi
|
219
|
+
|
220
|
+
def lmax_spectra(self, n=10, t="2min", period="nights"):
|
221
|
+
"""
|
222
|
+
Get spectral data for the nth-highest Lmax values during a given time period.
|
223
|
+
This computes Lmax Event spectra. Lmax Hold spectra has not yet been implemented.
|
224
|
+
Assumptions and inputs as per Survey.resi_summary() method.
|
225
|
+
IMPORTANT: The dates of the Lmax values are presented for the night-time period beginning on that date.
|
226
|
+
This means that for early morning timings, the date is behind by one day.
|
227
|
+
e.g. an Lmax presented as occurring at 20/12/2024 at 01:22 would have occurred at 21/12/2024 at 01:22.
|
228
|
+
:param n: Int. Nth-highest Lmax. Default 10th-highest.
|
229
|
+
:param t: String. This is the time period over which to compute nth-highest Lmax values.
|
230
|
+
e.g. "2min" computes the nth-highest Lmaxes over 2-minute periods. Note that the chosen period must be
|
231
|
+
equal to or more than the measurement period. So you cannot measure in 5-minute periods and request 2-minute
|
232
|
+
Lmaxes.
|
233
|
+
:param period: String. "days", "evenings" or "nights"
|
234
|
+
:return: Dataframe of nth-highest Lmax Event spectra.
|
235
|
+
"""
|
236
|
+
combi = pd.DataFrame()
|
237
|
+
# TODO: The night-time timestamp on this is sometimes out by a minute.
|
238
|
+
for key in self._logs.keys():
|
239
|
+
log = self._logs[key]
|
240
|
+
combined_list = []
|
241
|
+
maxes = log.get_nth_high_low(n=n, data=log._get_period(data=log.as_interval(t=t), period=period))[["Lmax", "Time"]]
|
242
|
+
maxes.sort_index(inplace=True)
|
243
|
+
combined_list.append(maxes)
|
244
|
+
summary = pd.concat(objs=combined_list, axis=1)
|
245
|
+
summary = self._insert_multiindex(df=summary, super=key)
|
246
|
+
combi = pd.concat(objs=[combi, summary], axis=0)
|
247
|
+
return combi
|
248
|
+
|
249
|
+
# TODO: get_lowest_l90
|
250
|
+
|
251
|
+
def leq_spectra(self, leq_cols=None):
|
252
|
+
"""
|
253
|
+
Compute Leqs over daytime, evening and night-time periods.
|
254
|
+
This is an overall Leq, and does not group Leqs by date.
|
255
|
+
:param leq_cols: List of strings or List of Tuples.
|
256
|
+
For all Leq columns, use ["Leq"]. For specific columns, use list of tuples [("Leq", "A"), ("Leq", 125)]
|
257
|
+
:return: A dataframe with a continuous Leq computation across dates, for each time period.
|
258
|
+
"""
|
259
|
+
#TODO: C:\Users\tonyr\PycharmProjects\src\tests.py:674: FutureWarning: The behavior of pd.concat with len(keys) != len(objs) is deprecated. In a future version this will raise instead of truncating to the smaller of the two sequences combi = pd.concat(all_pos, axis=1, keys=["UA1", "UA2"])
|
260
|
+
all_pos = []
|
261
|
+
if leq_cols is None:
|
262
|
+
leq_cols = ["Leq"]
|
263
|
+
for key in self._logs.keys():
|
264
|
+
log = self._logs[key]
|
265
|
+
# Day
|
266
|
+
days = log._get_period(data=log.get_antilogs(), period="days")
|
267
|
+
days = days[leq_cols].apply(lambda x: np.round(10*np.log10(np.mean(x)), DECIMALS))
|
268
|
+
# Night-time
|
269
|
+
nights = log._get_period(data=log.get_antilogs(), period="nights")
|
270
|
+
nights = nights[leq_cols].apply(lambda x: np.round(10*np.log10(np.mean(x)), DECIMALS))
|
271
|
+
df = pd.DataFrame
|
272
|
+
# Evening
|
273
|
+
if log.is_evening():
|
274
|
+
evenings = log._get_period(data=log.get_antilogs(), period="evenings")
|
275
|
+
evenings = evenings[leq_cols].apply(lambda x: np.round(10 * np.log10(np.mean(x)), DECIMALS))
|
276
|
+
df = pd.concat([days, evenings, nights], axis=1, keys=["Daytime", "Evening", "Night-time"])
|
277
|
+
else:
|
278
|
+
df = pd.concat([days, nights], axis=1, keys=["Daytime", "Night-time"])
|
279
|
+
all_pos.append(df)
|
280
|
+
combi = pd.concat(all_pos, axis=1, keys=["UA1", "UA2"])
|
281
|
+
combi = combi.transpose()
|
282
|
+
return combi
|
283
|
+
|
284
|
+
# def typical_leq_spectra(self, leq_cols=None):
|
285
|
+
# """
|
286
|
+
# DEPRECATED 2025/06/05. Replaced by .leq_spectra() **TT**
|
287
|
+
# Compute Leqs over daytime, evening and night-time periods.
|
288
|
+
# This is an overall Leq, and does not group Leqs by date.
|
289
|
+
# :param leq_cols: List of strings or List of Tuples.
|
290
|
+
# For all Leq columns, use ["Leq"]. For specific columns, use list of tuples [("Leq", "A"), ("Leq", 125)]
|
291
|
+
# :return: A dataframe with a continuous Leq computation across dates, for each time period.
|
292
|
+
# """
|
293
|
+
# combi = pd.DataFrame()
|
294
|
+
# if leq_cols is None:
|
295
|
+
# leq_cols = ["Leq"]
|
296
|
+
# for key in self._logs.keys():
|
297
|
+
# log = self._logs[key]
|
298
|
+
# combined_list = []
|
299
|
+
# # Day
|
300
|
+
# days = log._get_period(data=log.get_antilogs(), period="days")
|
301
|
+
# days = days[leq_cols].apply(lambda x: np.round(10*np.log10(np.mean(x)), DECIMALS))
|
302
|
+
# #days.sort_index(inplace=True)
|
303
|
+
# combined_list.append(days)
|
304
|
+
# period_headers = ["Daytime" for i in range(len(leq_cols))]
|
305
|
+
# # Evening
|
306
|
+
# if log.is_evening():
|
307
|
+
# evenings = log._get_period(data=log.get_antilogs(), period="evenings")
|
308
|
+
# evenings = evenings[leq_cols].apply(lambda x: np.round(10*np.log10(np.mean(x)), DECIMALS))
|
309
|
+
# evenings.sort_index(inplace=True)
|
310
|
+
# combined_list.append(evenings)
|
311
|
+
# for i in range(len(leq_cols)):
|
312
|
+
# period_headers.append("Evening")
|
313
|
+
# # Night Leq
|
314
|
+
# nights = log._get_period(data=log.get_antilogs(), period="nights")
|
315
|
+
# nights = nights[leq_cols].apply(lambda x: np.round(10*np.log10(np.mean(x)), DECIMALS))
|
316
|
+
# nights.sort_index(inplace=True)
|
317
|
+
# combined_list.append(nights)
|
318
|
+
# for i in range(len(leq_cols)):
|
319
|
+
# period_headers.append("Night-time")
|
320
|
+
# summary = pd.concat(objs=combined_list, axis=1)
|
321
|
+
# summary = self._insert_multiindex(df=summary, super=key)
|
322
|
+
# combi = pd.concat(objs=[combi, summary], axis=0)
|
323
|
+
# new_head_dict = {}
|
324
|
+
# for i in range(len(period_headers)):
|
325
|
+
# new_head_dict[i] = period_headers[i]
|
326
|
+
# combi.rename(columns=new_head_dict, inplace=True)
|
327
|
+
# #combi = combi.transpose()
|
328
|
+
# return combi
|
@@ -0,0 +1,17 @@
|
|
1
|
+
[tool.poetry]
|
2
|
+
name = "pycoustic"
|
3
|
+
version = "0.1.0"
|
4
|
+
description = ""
|
5
|
+
authors = ["thumpercastle <tony.ryb@gmail.com>"]
|
6
|
+
readme = "README.md"
|
7
|
+
|
8
|
+
[tool.poetry.dependencies]
|
9
|
+
python = "^3.10"
|
10
|
+
pandas = "2.2.3"
|
11
|
+
numpy = "2.2.6"
|
12
|
+
openpyxl = "3.1.5"
|
13
|
+
|
14
|
+
|
15
|
+
[build-system]
|
16
|
+
requires = ["poetry-core"]
|
17
|
+
build-backend = "poetry.core.masonry.api"
|