pycoustic 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pycoustic/survey.py CHANGED
@@ -1,6 +1,7 @@
1
+ import requests
1
2
  import pandas as pd
2
3
  import numpy as np
3
- from .weather import WeatherHistory
4
+ # from .weather import WeatherHistory
4
5
 
5
6
 
6
7
  DECIMALS=1
@@ -193,17 +194,17 @@ class Survey:
193
194
  pos_summary = []
194
195
  # Daytime
195
196
  period_headers = ["Daytime"]
196
- days = log.get_modal(data=log._get_period(data=log.as_interval(t=day_t), period="days"), by_date=by_date, cols=cols)
197
+ days = log.get_modal(data=log.get_period(data=log.as_interval(t=day_t), period="days"), by_date=by_date, cols=cols)
197
198
  days.sort_index(inplace=True)
198
199
  pos_summary.append(days)
199
200
  # Evening
200
201
  if log.is_evening():
201
202
  period_headers.append("Evening")
202
- evenings = log.get_modal(data=log._get_period(data=log.as_interval(t=evening_t), period="evenings"), by_date=by_date, cols=cols)
203
+ evenings = log.get_modal(data=log.get_period(data=log.as_interval(t=evening_t), period="evenings"), by_date=by_date, cols=cols)
203
204
  evenings.sort_index(inplace=True)
204
205
  pos_summary.append(evenings)
205
206
  # Night time
206
- nights = log.get_modal(data=log._get_period(data=log.as_interval(t=night_t), period="nights"), by_date=by_date, cols=cols)
207
+ nights = log.get_modal(data=log.get_period(data=log.as_interval(t=night_t), period="nights"), by_date=by_date, cols=cols)
207
208
  nights.sort_index(inplace=True)
208
209
  pos_summary.append(nights)
209
210
  period_headers.append("Night-time")
@@ -234,17 +235,17 @@ class Survey:
234
235
  pos_summary = []
235
236
  # Daytime
236
237
  period_headers = ["Daytime"]
237
- days = log.counts(data=log._get_period(data=log.as_interval(t=day_t), period="days"), cols=cols)
238
+ days = log.counts(data=log.get_period(data=log.as_interval(t=day_t), period="days"), cols=cols)
238
239
  days.sort_index(inplace=True)
239
240
  pos_summary.append(days)
240
241
  # Evening
241
242
  if log.is_evening():
242
243
  period_headers.append("Evening")
243
- evenings = log.counts(data=log._get_period(data=log.as_interval(t=evening_t), period="evenings"), cols=cols)
244
+ evenings = log.counts(data=log.get_period(data=log.as_interval(t=evening_t), period="evenings"), cols=cols)
244
245
  evenings.sort_index(inplace=True)
245
246
  pos_summary.append(evenings)
246
247
  # Night time
247
- nights = log.counts(data=log._get_period(data=log.as_interval(t=night_t), period="nights"), cols=cols)
248
+ nights = log.counts(data=log.get_period(data=log.as_interval(t=night_t), period="nights"), cols=cols)
248
249
  nights.sort_index(inplace=True)
249
250
  pos_summary.append(nights)
250
251
  period_headers.append("Night-time")
@@ -276,7 +277,7 @@ class Survey:
276
277
  for key in self._logs.keys():
277
278
  log = self._logs[key]
278
279
  combined_list = []
279
- maxes = log.get_nth_high_low(n=n, data=log._get_period(data=log.as_interval(t=t), period=period))[["Lmax", "Time"]]
280
+ maxes = log.get_nth_high_low(n=n, data=log.get_period(data=log.as_interval(t=t), period=period))[["Lmax", "Time"]]
280
281
  maxes.sort_index(inplace=True)
281
282
  combined_list.append(maxes)
282
283
  summary = pd.concat(objs=combined_list, axis=1)
@@ -299,15 +300,15 @@ class Survey:
299
300
  for key in self._logs.keys():
300
301
  log = self._logs[key]
301
302
  # Day
302
- days = log._get_period(data=log.get_antilogs(), period="days")
303
+ days = log.get_period(data=log.get_antilogs(), period="days")
303
304
  days = days[leq_cols].apply(lambda x: np.round(10*np.log10(np.mean(x)), DECIMALS))
304
305
  # Night-time
305
- nights = log._get_period(data=log.get_antilogs(), period="nights")
306
+ nights = log.get_period(data=log.get_antilogs(), period="nights")
306
307
  nights = nights[leq_cols].apply(lambda x: np.round(10*np.log10(np.mean(x)), DECIMALS))
307
308
  df = pd.DataFrame
308
309
  # Evening
309
310
  if log.is_evening():
310
- evenings = log._get_period(data=log.get_antilogs(), period="evenings")
311
+ evenings = log.get_period(data=log.get_antilogs(), period="evenings")
311
312
  evenings = evenings[leq_cols].apply(lambda x: np.round(10 * np.log10(np.mean(x)), DECIMALS))
312
313
  df = pd.concat([days, evenings, nights], axis=1, keys=["Daytime", "Evening", "Night-time"])
313
314
  else:
@@ -344,6 +345,101 @@ class Survey:
344
345
  index=["Min", "Max", "Mean"]).drop(columns=["dt"]).round(decimals=1)
345
346
 
346
347
 
348
+
349
+ appid = ""
350
+ # with open("tests/openweather_app_id.txt") as f:
351
+ # appid = f.readlines()[0]
352
+
353
+ w_dict = {
354
+ "start": "2022-09-16 12:00:00",
355
+ "end": "2022-09-17 18:00:00",
356
+ "interval": 6,
357
+ "api_key": appid,
358
+ "country": "GB",
359
+ "postcode": "WC1",
360
+ "tz": "GB"
361
+ }
362
+
363
+
364
+ def test_weather_obj(weather_test_dict):
365
+ hist = WeatherHistory(start=w_dict["start"], end=w_dict["end"], interval=w_dict["interval"],
366
+ api_key=w_dict["api_key"], country=w_dict["country"], postcode=w_dict["postcode"],
367
+ tz=w_dict["tz"])
368
+ hist.compute_weather_history()
369
+ return hist
370
+
371
+ #TODO: Make this take the start and end times of a Survey object.
372
+ #TODO: Implement post codes instead of coordinates
373
+ #TODO: Implement the WeatherHistory as methods within Survey.
374
+ class WeatherHistory:
375
+ def __init__(self):
376
+ return
377
+
378
+ def reinit(self, start=None, end=None, interval=6, api_key="", country="GB", postcode="WC1", tz="",
379
+ units="metric"):
380
+ if api_key==None:
381
+ raise ValueError("API key is missing")
382
+ if type(start) == str:
383
+ self._start = dt.datetime.strptime(start, "%Y-%m-%d %H:%M:%S")
384
+ else:
385
+ self._start = start
386
+ if type(end) == str:
387
+ self._end = dt.datetime.strptime(end, "%Y-%m-%d %H:%M:%S")
388
+ else:
389
+ self._end = end
390
+ self._interval = interval
391
+ self._api_key = str(api_key)
392
+ self._lat, self._lon = self.get_latlon(api_key=api_key, country=country, postcode=postcode)
393
+ self._hist = None
394
+ self._units = units
395
+
396
+ def get_latlon(self, api_key="", country="GB", postcode=""):
397
+ query = str("http://api.openweathermap.org/geo/1.0/zip?zip=" + postcode + "," + country + "&appid=" + api_key)
398
+ resp = requests.get(query)
399
+ return resp.json()["lat"], resp.json()["lon"]
400
+
401
+ def _construct_api_call(self, timestamp):
402
+ base = "https://api.openweathermap.org/data/3.0/onecall/timemachine?"
403
+ query = str(base + "lat=" + str(self._lat) + "&" + "lon=" + str(self._lon) + "&" + "units=" + self._units + \
404
+ "&" + "dt=" + str(timestamp) + "&" + "appid=" + self._api_key)
405
+ return query
406
+
407
+ def _construct_timestamps(self):
408
+ next_time = (self._start + dt.timedelta(hours=self._interval))
409
+ timestamps = [int(self._start.timestamp())]
410
+ while next_time < self._end:
411
+ timestamps.append(int(next_time.timestamp()))
412
+ next_time += dt.timedelta(hours=self._interval)
413
+ return timestamps
414
+
415
+ def _make_and_parse_api_call(self, query):
416
+ response = requests.get(query)
417
+ # This drops some unwanted cols like lat, lon, timezone and tz offset.
418
+ resp_dict = response.json()["data"][0]
419
+ del resp_dict["weather"] # delete weather key as not useful.
420
+ # TODO: parse 'weather' nested dict.
421
+ return resp_dict
422
+
423
+ def compute_weather_history(self, drop_cols):
424
+ # construct timestamps
425
+ timestamps = self._construct_timestamps()
426
+ # make calls to API
427
+ responses = []
428
+ for ts in timestamps:
429
+ query = self._construct_api_call(timestamp=ts)
430
+ response_dict = self._make_and_parse_api_call(query=query)
431
+ responses.append(pd.Series(response_dict))
432
+ df = pd.concat(responses, axis=1).transpose()
433
+ for col in ["dt", "sunrise", "sunset"]:
434
+ df[col] = df[col].apply(lambda x: dt.datetime.fromtimestamp(int(x))) # convert timestamp into datetime
435
+ df.drop(columns=drop_cols, inplace=True)
436
+ return df
437
+
438
+ def get_weather_history(self):
439
+ return self._hist
440
+
441
+
442
+
347
443
  # TODO: Fix this bug in weatherhist
348
444
  # survey.weather(api_key=r"eef3f749e018627b70c2ead1475a1a32", postcode="HA8")
349
445
  # dt temp pressure humidity clouds wind_speed wind_deg \
@@ -429,4 +525,10 @@ class Survey:
429
525
  # File "C:\Users\tonyr\PycharmProjects\pycoustic\.venv2\Lib\site-packages\numpy\_core\_methods.py", line 48, in _amin
430
526
  # return umr_minimum(a, axis, None, out, keepdims, initial, where)
431
527
  # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
432
- # TypeError: '<=' not supported between instances of 'dict' and 'dict'
528
+ # TypeError: '<=' not supported between instances of 'dict' and 'dict'
529
+
530
+
531
+ #TODO: Fix this error:
532
+ #
533
+ # C:\Users\tonyr\PycharmProjects\pycoustic\pycoustic\survey.py:316: FutureWarning:
534
+ # The behavior of pd.concat with len(keys) != len(objs) is deprecated. In a future version this will raise instead of truncating to the smaller of the two sequences
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pycoustic
3
- Version: 0.1.9
3
+ Version: 0.1.11
4
4
  Summary:
5
5
  Author: thumpercastle
6
6
  Author-email: tony.ryb@gmail.com
@@ -6,9 +6,9 @@ pycoustic/pycoustic_streamlit_gpt5.py,sha256=gpkPPBGwADt9HFI4S7YD1U-TjpLTMVwcBUJ
6
6
  pycoustic/streamlit-ai.py,sha256=OZdrQbGwQyVvA_4Q8bTOCZUZGdSlZG9NL9z3f16W-A8,16414
7
7
  pycoustic/streamlit-new.py,sha256=AR5dwQinMXugvGcyNvI_W59bfFRGj6E90Fqah9toKto,4885
8
8
  pycoustic/streamlit_pycoustic_gpt5_dead.py,sha256=sFUxLkvNUZoh2cVzruqsJJiLIlJxOQQpYYK6oHZfPlM,7309
9
- pycoustic/survey.py,sha256=6gC2sd0vOusx8bEyCwqmfSR5k04VeV93Ong0OdEVVks,24071
9
+ pycoustic/survey.py,sha256=GFbesunnfsDMP6rpEmVNBRxFkQ3c0NgHEVn0mGfeQCY,28120
10
10
  pycoustic/tkgui.py,sha256=YAy5f_qkXZ3yU8BvB-nIVQX1fYwPs_IkwmDEXHPMAa4,13997
11
11
  pycoustic/weather.py,sha256=q9FbDKjY0WaNvaYMHeDk7Bhbq0_Q7ehsTM_vUaCjeAk,3753
12
- pycoustic-0.1.9.dist-info/METADATA,sha256=2NDXL0ovNkEJKxx-P2ErBkdTHNA1AWL77RFAaKQdI6o,8515
13
- pycoustic-0.1.9.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
14
- pycoustic-0.1.9.dist-info/RECORD,,
12
+ pycoustic-0.1.11.dist-info/METADATA,sha256=ne7ZzFdkEEapdG6P9xl4zMIwUzFTnCcjuCmV32Fhz7s,8516
13
+ pycoustic-0.1.11.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
14
+ pycoustic-0.1.11.dist-info/RECORD,,