ecopipeline 0.10.2__tar.gz → 0.11.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ecopipeline-0.10.2/src/ecopipeline.egg-info → ecopipeline-0.11.0}/PKG-INFO +1 -1
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/setup.cfg +1 -1
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/__init__.py +1 -0
- ecopipeline-0.11.0/src/ecopipeline/utils/NOAADataDownloader.py +498 -0
- ecopipeline-0.11.0/src/ecopipeline/utils/__init__.py +2 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0/src/ecopipeline.egg-info}/PKG-INFO +1 -1
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline.egg-info/SOURCES.txt +1 -0
- ecopipeline-0.10.2/src/ecopipeline/utils/__init__.py +0 -1
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/LICENSE +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/README.md +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/pyproject.toml +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/setup.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/event_tracking/__init__.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/event_tracking/event_tracking.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/extract/__init__.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/extract/extract.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/load/__init__.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/load/load.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/transform/__init__.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/transform/bayview.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/transform/lbnl.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/transform/transform.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/utils/ConfigManager.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline/utils/unit_convert.py +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline.egg-info/dependency_links.txt +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline.egg-info/requires.txt +0 -0
- {ecopipeline-0.10.2 → ecopipeline-0.11.0}/src/ecopipeline.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[metadata]
|
|
2
2
|
name = ecopipeline
|
|
3
|
-
version = 0.
|
|
3
|
+
version = 0.11.0
|
|
4
4
|
authors = ["Carlos Bello, <bellocarlos@seattleu.edu>, Emil Fahrig <fahrigemil@seattleu.edu>, Casey Mang <cmang@seattleu.edu>, Julian Harris <harrisjulian@seattleu.edu>, Roger Tram <rtram@seattleu.edu>, Nolan Price <nolan@ecotope.com>"]
|
|
5
5
|
description = Contains functions for use in Ecotope Datapipelines
|
|
6
6
|
long_description = file: README.md
|
|
@@ -0,0 +1,498 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import pandas as pd
|
|
3
|
+
# from datetime import datetime, timedelta
|
|
4
|
+
# import os
|
|
5
|
+
# import gzip
|
|
6
|
+
# import urllib.request
|
|
7
|
+
from io import StringIO
|
|
8
|
+
|
|
9
|
+
class NOAADataDownloader:
|
|
10
|
+
def __init__(self, station_code, api_token=None):
|
|
11
|
+
"""
|
|
12
|
+
Initialize downloader for a specific weather station
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
station_code (str): Airport code (e.g., 'KLAX', 'LAX', 'JFK', 'ORD')
|
|
16
|
+
api_token (str, optional): NOAA API token for daily data access
|
|
17
|
+
"""
|
|
18
|
+
self.station_code = station_code.upper().strip()
|
|
19
|
+
self.api_token = api_token
|
|
20
|
+
self.base_url = "https://www.ncdc.noaa.gov/cdo-web/api/v2/"
|
|
21
|
+
|
|
22
|
+
# Clean airport code - add K if not present for US airports
|
|
23
|
+
if len(self.station_code) == 3 and not self.station_code.startswith('K'):
|
|
24
|
+
self.station_code = 'K' + self.station_code
|
|
25
|
+
|
|
26
|
+
# Find station information
|
|
27
|
+
self.station_info = self._find_station_info()
|
|
28
|
+
|
|
29
|
+
if not self.station_info:
|
|
30
|
+
raise ValueError(f"Could not find weather station for {station_code}")
|
|
31
|
+
|
|
32
|
+
print(f"Initialized downloader for: {self.station_info['name']}")
|
|
33
|
+
if self.station_info.get('usaf') and self.station_info.get('wban'):
|
|
34
|
+
print(f"ISD Station ID: {self.station_info['usaf']}-{self.station_info['wban']}")
|
|
35
|
+
if self.station_info.get('ghcn_id'):
|
|
36
|
+
print(f"GHCN-D Station ID: {self.station_info['ghcn_id']}")
|
|
37
|
+
|
|
38
|
+
def _find_station_info(self):
|
|
39
|
+
"""Find station information for the given airport code"""
|
|
40
|
+
|
|
41
|
+
# First try common stations mapping
|
|
42
|
+
common_stations = self._get_common_stations()
|
|
43
|
+
if self.station_code in common_stations:
|
|
44
|
+
return common_stations[self.station_code]
|
|
45
|
+
|
|
46
|
+
# Try searching ISD station history
|
|
47
|
+
isd_station = self._search_isd_stations()
|
|
48
|
+
if isd_station:
|
|
49
|
+
return isd_station
|
|
50
|
+
|
|
51
|
+
# Try API search if token available
|
|
52
|
+
if self.api_token:
|
|
53
|
+
api_station = self._search_api_stations()
|
|
54
|
+
if api_station:
|
|
55
|
+
return api_station
|
|
56
|
+
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
def _get_common_stations(self):
|
|
60
|
+
"""Return mapping of common airport codes to station information"""
|
|
61
|
+
return {
|
|
62
|
+
'KLAX': {
|
|
63
|
+
'name': 'LOS ANGELES INTERNATIONAL AIRPORT',
|
|
64
|
+
'usaf': '722950',
|
|
65
|
+
'wban': '23174',
|
|
66
|
+
'ghcn_id': 'GHCND:USW00023174',
|
|
67
|
+
'latitude': 33.938,
|
|
68
|
+
'longitude': -118.389,
|
|
69
|
+
'elevation': 32.0
|
|
70
|
+
},
|
|
71
|
+
'KJFK': {
|
|
72
|
+
'name': 'JOHN F KENNEDY INTERNATIONAL AIRPORT',
|
|
73
|
+
'usaf': '744860',
|
|
74
|
+
'wban': '94789',
|
|
75
|
+
'ghcn_id': 'GHCND:USW00094789',
|
|
76
|
+
'latitude': 40.640,
|
|
77
|
+
'longitude': -73.779,
|
|
78
|
+
'elevation': 3.4
|
|
79
|
+
},
|
|
80
|
+
'KORD': {
|
|
81
|
+
'name': 'CHICAGO OHARE INTERNATIONAL AIRPORT',
|
|
82
|
+
'usaf': '725300',
|
|
83
|
+
'wban': '94846',
|
|
84
|
+
'ghcn_id': 'GHCND:USW00094846',
|
|
85
|
+
'latitude': 41.995,
|
|
86
|
+
'longitude': -87.934,
|
|
87
|
+
'elevation': 201.5
|
|
88
|
+
},
|
|
89
|
+
'KDEN': {
|
|
90
|
+
'name': 'DENVER INTERNATIONAL AIRPORT',
|
|
91
|
+
'usaf': '725650',
|
|
92
|
+
'wban': '03017',
|
|
93
|
+
'ghcn_id': 'GHCND:USW00003017',
|
|
94
|
+
'latitude': 39.833,
|
|
95
|
+
'longitude': -104.65,
|
|
96
|
+
'elevation': 1640.0
|
|
97
|
+
},
|
|
98
|
+
'KATL': {
|
|
99
|
+
'name': 'HARTSFIELD JACKSON ATLANTA INTERNATIONAL AIRPORT',
|
|
100
|
+
'usaf': '722190',
|
|
101
|
+
'wban': '13874',
|
|
102
|
+
'ghcn_id': 'GHCND:USW00013874',
|
|
103
|
+
'latitude': 33.640,
|
|
104
|
+
'longitude': -84.427,
|
|
105
|
+
'elevation': 308.5
|
|
106
|
+
},
|
|
107
|
+
'KMIA': {
|
|
108
|
+
'name': 'MIAMI INTERNATIONAL AIRPORT',
|
|
109
|
+
'usaf': '722020',
|
|
110
|
+
'wban': '12839',
|
|
111
|
+
'ghcn_id': 'GHCND:USW00012839',
|
|
112
|
+
'latitude': 25.793,
|
|
113
|
+
'longitude': -80.290,
|
|
114
|
+
'elevation': 11.0
|
|
115
|
+
},
|
|
116
|
+
'KSEA': {
|
|
117
|
+
'name': 'SEATTLE TACOMA INTERNATIONAL AIRPORT',
|
|
118
|
+
'usaf': '727930',
|
|
119
|
+
'wban': '24233',
|
|
120
|
+
'ghcn_id': 'GHCND:USW00024233',
|
|
121
|
+
'latitude': 47.449,
|
|
122
|
+
'longitude': -122.309,
|
|
123
|
+
'elevation': 131.1
|
|
124
|
+
},
|
|
125
|
+
'KBOS': {
|
|
126
|
+
'name': 'BOSTON LOGAN INTERNATIONAL AIRPORT',
|
|
127
|
+
'usaf': '725090',
|
|
128
|
+
'wban': '14739',
|
|
129
|
+
'ghcn_id': 'GHCND:USW00014739',
|
|
130
|
+
'latitude': 42.361,
|
|
131
|
+
'longitude': -71.020,
|
|
132
|
+
'elevation': 6.1
|
|
133
|
+
},
|
|
134
|
+
'KPHX': {
|
|
135
|
+
'name': 'PHOENIX SKY HARBOR INTERNATIONAL AIRPORT',
|
|
136
|
+
'usaf': '722780',
|
|
137
|
+
'wban': '23183',
|
|
138
|
+
'ghcn_id': 'GHCND:USW00023183',
|
|
139
|
+
'latitude': 33.434,
|
|
140
|
+
'longitude': -112.008,
|
|
141
|
+
'elevation': 337.1
|
|
142
|
+
},
|
|
143
|
+
'KLAS': {
|
|
144
|
+
'name': 'LAS VEGAS MCCARRAN INTERNATIONAL AIRPORT',
|
|
145
|
+
'usaf': '723860',
|
|
146
|
+
'wban': '23169',
|
|
147
|
+
'ghcn_id': 'GHCND:USW00023169',
|
|
148
|
+
'latitude': 36.080,
|
|
149
|
+
'longitude': -115.152,
|
|
150
|
+
'elevation': 664.1
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
def _search_isd_stations(self):
|
|
155
|
+
"""Search ISD station history for the airport"""
|
|
156
|
+
try:
|
|
157
|
+
url = "https://www.ncei.noaa.gov/data/global-hourly/doc/isd-history.csv"
|
|
158
|
+
response = requests.get(url, timeout=10)
|
|
159
|
+
response.raise_for_status()
|
|
160
|
+
|
|
161
|
+
df = pd.read_csv(StringIO(response.text))
|
|
162
|
+
|
|
163
|
+
# Search for airport code in station name
|
|
164
|
+
search_terms = [
|
|
165
|
+
self.station_code.replace('K', ''), # LAX from KLAX
|
|
166
|
+
self.station_code, # KLAX
|
|
167
|
+
self.station_code + ' ', # Exact match with space
|
|
168
|
+
]
|
|
169
|
+
|
|
170
|
+
for term in search_terms:
|
|
171
|
+
mask = df['STATION NAME'].str.contains(term, case=False, na=False)
|
|
172
|
+
matches = df[mask]
|
|
173
|
+
|
|
174
|
+
if not matches.empty:
|
|
175
|
+
# Take the first match with recent data
|
|
176
|
+
best_match = matches.iloc[0]
|
|
177
|
+
|
|
178
|
+
return {
|
|
179
|
+
'name': best_match['STATION NAME'],
|
|
180
|
+
'usaf': str(best_match['USAF']).zfill(6),
|
|
181
|
+
'wban': str(best_match['WBAN']).zfill(5),
|
|
182
|
+
'country': best_match['CTRY'],
|
|
183
|
+
'state': best_match.get('STATE', ''),
|
|
184
|
+
'latitude': best_match['LAT'],
|
|
185
|
+
'longitude': best_match['LON'],
|
|
186
|
+
'elevation': best_match['ELEV(M)'],
|
|
187
|
+
'begin_date': str(best_match['BEGIN']),
|
|
188
|
+
'end_date': str(best_match['END'])
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
return None
|
|
192
|
+
|
|
193
|
+
except Exception as e:
|
|
194
|
+
print(f"ISD search failed: {e}")
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
def _search_api_stations(self):
|
|
198
|
+
"""Search for stations using NOAA API"""
|
|
199
|
+
if not self.api_token:
|
|
200
|
+
return None
|
|
201
|
+
|
|
202
|
+
try:
|
|
203
|
+
url = f"{self.base_url}stations"
|
|
204
|
+
params = {'limit': 100, 'format': 'json'}
|
|
205
|
+
headers = {"token": self.api_token}
|
|
206
|
+
|
|
207
|
+
response = requests.get(url, params=params, headers=headers, timeout=10)
|
|
208
|
+
response.raise_for_status()
|
|
209
|
+
|
|
210
|
+
data = response.json()
|
|
211
|
+
if 'results' in data:
|
|
212
|
+
search_terms = [self.station_code.replace('K', ''), self.station_code]
|
|
213
|
+
|
|
214
|
+
for station in data['results']:
|
|
215
|
+
name = station.get('name', '').upper()
|
|
216
|
+
for term in search_terms:
|
|
217
|
+
if term in name:
|
|
218
|
+
return {
|
|
219
|
+
'name': station.get('name'),
|
|
220
|
+
'ghcn_id': station.get('id'),
|
|
221
|
+
'latitude': station.get('latitude'),
|
|
222
|
+
'longitude': station.get('longitude'),
|
|
223
|
+
'elevation': station.get('elevation'),
|
|
224
|
+
'mindate': station.get('mindate'),
|
|
225
|
+
'maxdate': station.get('maxdate')
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return None
|
|
229
|
+
|
|
230
|
+
except Exception as e:
|
|
231
|
+
print(f"API search failed: {e}")
|
|
232
|
+
return None
|
|
233
|
+
|
|
234
|
+
def get_station_info(self):
|
|
235
|
+
"""Return station information"""
|
|
236
|
+
return self.station_info.copy()
|
|
237
|
+
|
|
238
|
+
# def download_hourly_data(self, start_date, end_date, data_types=None):
|
|
239
|
+
# """
|
|
240
|
+
# Download hourly weather data using NOAA's data access API
|
|
241
|
+
|
|
242
|
+
# Args:
|
|
243
|
+
# start_date (str or pd.Timestamp): Start date in YYYY-MM-DD format or pandas Timestamp or datetime
|
|
244
|
+
# end_date (str or pd.Timestamp): End date in YYYY-MM-DD format or pandas Timestamp or datetime
|
|
245
|
+
# data_types (list, optional): List of data types to download
|
|
246
|
+
|
|
247
|
+
# Returns:
|
|
248
|
+
# pandas.DataFrame: Hourly weather data
|
|
249
|
+
# """
|
|
250
|
+
# if not (self.station_info.get('usaf') and self.station_info.get('wban')):
|
|
251
|
+
# raise ValueError("Station does not have ISD identifiers for hourly data")
|
|
252
|
+
|
|
253
|
+
# # Convert pd.Timestamp to string format if needed
|
|
254
|
+
# if isinstance(start_date, pd.Timestamp):
|
|
255
|
+
# start_date = start_date.strftime('%Y-%m-%d')
|
|
256
|
+
# elif hasattr(start_date, 'strftime'): # datetime.datetime or similar
|
|
257
|
+
# start_date = start_date.strftime('%Y-%m-%d')
|
|
258
|
+
|
|
259
|
+
# if isinstance(end_date, pd.Timestamp):
|
|
260
|
+
# end_date = end_date.strftime('%Y-%m-%d')
|
|
261
|
+
# elif hasattr(end_date, 'strftime'): # datetime.datetime or similar
|
|
262
|
+
# end_date = end_date.strftime('%Y-%m-%d')
|
|
263
|
+
|
|
264
|
+
# # Create station ID in format expected by the API
|
|
265
|
+
# station_id = f"{self.station_info['usaf']}{self.station_info['wban']}"
|
|
266
|
+
# # station_id = "USW00023174"#"USC00457180"
|
|
267
|
+
# # print("station_id is ",station_id)
|
|
268
|
+
# # Default data types for hourly weather data
|
|
269
|
+
# if not data_types:
|
|
270
|
+
# data_types = [
|
|
271
|
+
# 'TMP', # Temperature
|
|
272
|
+
# 'DEW', # Dew point
|
|
273
|
+
# 'SLP', # Sea level pressure
|
|
274
|
+
# 'WND', # Wind direction and speed
|
|
275
|
+
# 'VIS', # Visibility
|
|
276
|
+
# 'AA1' # Precipitation (if available)
|
|
277
|
+
# ]
|
|
278
|
+
|
|
279
|
+
# # NOAA's data access API endpoint
|
|
280
|
+
# base_url = "https://www.ncei.noaa.gov/access/services/data/v1"
|
|
281
|
+
|
|
282
|
+
# params = {
|
|
283
|
+
# 'dataset': 'global-hourly',
|
|
284
|
+
# # 'dataTypes': 'TMP',#','.join(data_types),
|
|
285
|
+
# 'stations': station_id,
|
|
286
|
+
# 'startDate': start_date,
|
|
287
|
+
# 'endDate': end_date,
|
|
288
|
+
# 'format': 'json',
|
|
289
|
+
# 'includeAttributes': 'true',
|
|
290
|
+
# 'includeStationName': 'true',
|
|
291
|
+
# 'includeStationLocation': 'true'
|
|
292
|
+
# }
|
|
293
|
+
|
|
294
|
+
# try:
|
|
295
|
+
# print(f"Downloading hourly data from {start_date} to {end_date}...")
|
|
296
|
+
# print(f"Station: {station_id} ({self.station_info.get('name', 'Unknown')})")
|
|
297
|
+
# full_url = requests.Request('GET', base_url, params=params).prepare().url
|
|
298
|
+
# print(f"API Request URL:")
|
|
299
|
+
# print(f"{full_url}")
|
|
300
|
+
# print()
|
|
301
|
+
# # https://www.ncei.noaa.gov/access/services/data/v1?dataset=global-hourly
|
|
302
|
+
# # &dataTypes=TMP%2CDEW%2CSLP%2CWND%2CVIS%2CAA1&stations=USW00023174&startDate=2025-08-26&endDate=2025-09-18&format=json
|
|
303
|
+
# # &includeAttributes=true&includeStationName=true&includeStationLocation=true
|
|
304
|
+
|
|
305
|
+
# # https://www.ncei.noaa.gov/access/services/data/v1?dataset=global-summary-of-the-year
|
|
306
|
+
# # &dataTypes=DP01,DP05,DP10,DSND,DSNW,DT00,DT32,DX32,DX70,DX90,SNOW,PRCP&stations=ASN00084027&startDate=1952-01-01&endDate=1970-12-31&includeAttributes=true&format=pdf
|
|
307
|
+
|
|
308
|
+
# response = requests.get(base_url, params=params, timeout=60)
|
|
309
|
+
# response.raise_for_status()
|
|
310
|
+
|
|
311
|
+
# # Parse JSON response
|
|
312
|
+
# data = response.json()
|
|
313
|
+
|
|
314
|
+
# if not data:
|
|
315
|
+
# print("No data returned from API")
|
|
316
|
+
# return pd.DataFrame()
|
|
317
|
+
|
|
318
|
+
# # Convert to DataFrame
|
|
319
|
+
# df = pd.DataFrame(data)
|
|
320
|
+
|
|
321
|
+
# if df.empty:
|
|
322
|
+
# print("No hourly data found for the specified parameters")
|
|
323
|
+
# return pd.DataFrame()
|
|
324
|
+
|
|
325
|
+
# # Process the data
|
|
326
|
+
# df = self._process_hourly_data(df)
|
|
327
|
+
|
|
328
|
+
# print(f"Successfully downloaded {len(df)} hourly records")
|
|
329
|
+
# return df
|
|
330
|
+
|
|
331
|
+
# except requests.exceptions.RequestException as e:
|
|
332
|
+
# print(f"API request failed: {e}")
|
|
333
|
+
# if hasattr(e, 'response') and e.response is not None:
|
|
334
|
+
# print(f"Response status: {e.response.status_code}")
|
|
335
|
+
# print(f"Response text: {e.response.text[:500]}...")
|
|
336
|
+
# return pd.DataFrame()
|
|
337
|
+
# except Exception as e:
|
|
338
|
+
# print(f"Failed to download hourly data: {e}")
|
|
339
|
+
# return pd.DataFrame()
|
|
340
|
+
|
|
341
|
+
# def _process_hourly_data(self, df):
|
|
342
|
+
# """Process and clean hourly data from NOAA API"""
|
|
343
|
+
# try:
|
|
344
|
+
# # Convert DATE to datetime
|
|
345
|
+
# if 'DATE' in df.columns:
|
|
346
|
+
# df['datetime'] = pd.to_datetime(df['DATE'], errors='coerce')
|
|
347
|
+
# df = df.dropna(subset=['datetime'])
|
|
348
|
+
# df = df.sort_values('datetime')
|
|
349
|
+
|
|
350
|
+
# # Process temperature data (convert tenths of degrees C to C)
|
|
351
|
+
# if 'TMP' in df.columns:
|
|
352
|
+
# df['temperature_c'] = pd.to_numeric(df['TMP'], errors='coerce') / 10
|
|
353
|
+
# df['temperature_f'] = df['temperature_c'] * 9/5 + 32
|
|
354
|
+
|
|
355
|
+
# # Process dew point data
|
|
356
|
+
# if 'DEW' in df.columns:
|
|
357
|
+
# df['dewpoint_c'] = pd.to_numeric(df['DEW'], errors='coerce') / 10
|
|
358
|
+
# df['dewpoint_f'] = df['dewpoint_c'] * 9/5 + 32
|
|
359
|
+
|
|
360
|
+
# # Process sea level pressure (convert tenths of hPa to hPa)
|
|
361
|
+
# if 'SLP' in df.columns:
|
|
362
|
+
# df['pressure_hpa'] = pd.to_numeric(df['SLP'], errors='coerce') / 10
|
|
363
|
+
|
|
364
|
+
# # Process wind data - format is typically "999,9" (direction,speed)
|
|
365
|
+
# if 'WND' in df.columns:
|
|
366
|
+
# wind_data = df['WND'].astype(str)
|
|
367
|
+
|
|
368
|
+
# # Extract wind direction and speed
|
|
369
|
+
# wind_direction = []
|
|
370
|
+
# wind_speed = []
|
|
371
|
+
|
|
372
|
+
# for wind_str in wind_data:
|
|
373
|
+
# try:
|
|
374
|
+
# if ',' in wind_str:
|
|
375
|
+
# dir_str, speed_str = wind_str.split(',')[:2]
|
|
376
|
+
|
|
377
|
+
# # Wind direction (degrees)
|
|
378
|
+
# direction = int(dir_str) if dir_str != '999' else None
|
|
379
|
+
# wind_direction.append(direction)
|
|
380
|
+
|
|
381
|
+
# # Wind speed (tenths of m/s to m/s)
|
|
382
|
+
# speed = float(speed_str) / 10 if speed_str != '9999' else None
|
|
383
|
+
# wind_speed.append(speed)
|
|
384
|
+
# else:
|
|
385
|
+
# wind_direction.append(None)
|
|
386
|
+
# wind_speed.append(None)
|
|
387
|
+
# except (ValueError, IndexError):
|
|
388
|
+
# wind_direction.append(None)
|
|
389
|
+
# wind_speed.append(None)
|
|
390
|
+
|
|
391
|
+
# df['wind_direction'] = wind_direction
|
|
392
|
+
# df['wind_speed_mps'] = wind_speed
|
|
393
|
+
# df['wind_speed_kmh'] = pd.Series(wind_speed) * 3.6
|
|
394
|
+
# df['wind_speed_mph'] = pd.Series(wind_speed) * 2.237
|
|
395
|
+
|
|
396
|
+
# # Process visibility (meters)
|
|
397
|
+
# if 'VIS' in df.columns:
|
|
398
|
+
# df['visibility_m'] = pd.to_numeric(df['VIS'], errors='coerce')
|
|
399
|
+
# df['visibility_km'] = df['visibility_m'] / 1000
|
|
400
|
+
# df['visibility_mi'] = df['visibility_m'] / 1609.34
|
|
401
|
+
|
|
402
|
+
# # Add station information columns
|
|
403
|
+
# if 'STATION' in df.columns:
|
|
404
|
+
# df['station_id'] = df['STATION']
|
|
405
|
+
|
|
406
|
+
# if 'NAME' in df.columns:
|
|
407
|
+
# df['station_name'] = df['NAME']
|
|
408
|
+
|
|
409
|
+
# if 'LATITUDE' in df.columns:
|
|
410
|
+
# df['latitude'] = pd.to_numeric(df['LATITUDE'], errors='coerce')
|
|
411
|
+
|
|
412
|
+
# if 'LONGITUDE' in df.columns:
|
|
413
|
+
# df['longitude'] = pd.to_numeric(df['LONGITUDE'], errors='coerce')
|
|
414
|
+
|
|
415
|
+
# if 'ELEVATION' in df.columns:
|
|
416
|
+
# df['elevation_m'] = pd.to_numeric(df['ELEVATION'], errors='coerce')
|
|
417
|
+
|
|
418
|
+
# return df
|
|
419
|
+
|
|
420
|
+
# except Exception as e:
|
|
421
|
+
# print(f"Error processing hourly data: {e}")
|
|
422
|
+
# return df
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
def download_daily_TAVG_data(self, start_date, end_date, convert_to_fahrenheit = True):
|
|
426
|
+
"""
|
|
427
|
+
Download daily Average Temperature data using NOAA API
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
start_date (str or pd.Timestamp): Start date in YYYY-MM-DD format or pandas Timestamp or datetime
|
|
431
|
+
end_date (str or pd.Timestamp): End date in YYYY-MM-DD format or pandas Timestamp or datetime
|
|
432
|
+
convert_to_fahrenheit (bool): converts temperature values to fahrenhiet. Otherwise will be celcius*10
|
|
433
|
+
|
|
434
|
+
Returns:
|
|
435
|
+
pandas.DataFrame: Daily weather data
|
|
436
|
+
"""
|
|
437
|
+
if not self.api_token:
|
|
438
|
+
raise ValueError("API token required for daily data. Get one from https://www.ncdc.noaa.gov/cdo-web/token")
|
|
439
|
+
|
|
440
|
+
if not self.station_info.get('ghcn_id'):
|
|
441
|
+
raise ValueError("Station does not have GHCN-D identifier for daily data")
|
|
442
|
+
|
|
443
|
+
# Convert pd.Timestamp to string format if needed
|
|
444
|
+
if isinstance(start_date, pd.Timestamp):
|
|
445
|
+
start_date = start_date.strftime('%Y-%m-%d')
|
|
446
|
+
elif hasattr(start_date, 'strftime'): # datetime.datetime or similar
|
|
447
|
+
start_date = start_date.strftime('%Y-%m-%d')
|
|
448
|
+
|
|
449
|
+
if isinstance(end_date, pd.Timestamp):
|
|
450
|
+
end_date = end_date.strftime('%Y-%m-%d')
|
|
451
|
+
elif hasattr(end_date, 'strftime'): # datetime.datetime or similar
|
|
452
|
+
end_date = end_date.strftime('%Y-%m-%d')
|
|
453
|
+
|
|
454
|
+
# if not datatypes:
|
|
455
|
+
# datatypes = ['TAVG']
|
|
456
|
+
|
|
457
|
+
url = f"{self.base_url}data"
|
|
458
|
+
params = {
|
|
459
|
+
'datasetid': 'GHCND',
|
|
460
|
+
'stationid': self.station_info['ghcn_id'],
|
|
461
|
+
'startdate': start_date,
|
|
462
|
+
'enddate': end_date,
|
|
463
|
+
'datatypeid': 'TAVG',
|
|
464
|
+
'limit': 1000,
|
|
465
|
+
'format': 'json'
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
try:
|
|
469
|
+
print(f"Downloading daily data from {start_date} to {end_date}...")
|
|
470
|
+
|
|
471
|
+
headers = {"token": self.api_token}
|
|
472
|
+
response = requests.get(url, params=params, headers=headers)
|
|
473
|
+
response.raise_for_status()
|
|
474
|
+
|
|
475
|
+
data = response.json()
|
|
476
|
+
if 'results' in data:
|
|
477
|
+
df = pd.DataFrame(data['results'])
|
|
478
|
+
|
|
479
|
+
if not df.empty:
|
|
480
|
+
df['date'] = pd.to_datetime(df['date'])
|
|
481
|
+
df = df.sort_values('date')
|
|
482
|
+
# Convert value from tenths of Celsius to Fahrenheit
|
|
483
|
+
df['value'] = (df['value'] / 10) * 9/5 + 32
|
|
484
|
+
df = df.set_index('date')
|
|
485
|
+
df = df[['value']].rename(columns={'value': 'OAT_NOAA'})
|
|
486
|
+
|
|
487
|
+
print(f"Successfully downloaded {len(df)} daily records")
|
|
488
|
+
return df
|
|
489
|
+
else:
|
|
490
|
+
print("No daily data found for the specified parameters")
|
|
491
|
+
return pd.DataFrame()
|
|
492
|
+
else:
|
|
493
|
+
print("No daily data found")
|
|
494
|
+
return pd.DataFrame()
|
|
495
|
+
|
|
496
|
+
except requests.exceptions.RequestException as e:
|
|
497
|
+
print(f"Daily data download failed: {e}")
|
|
498
|
+
return pd.DataFrame()
|
|
@@ -20,5 +20,6 @@ src/ecopipeline/transform/bayview.py
|
|
|
20
20
|
src/ecopipeline/transform/lbnl.py
|
|
21
21
|
src/ecopipeline/transform/transform.py
|
|
22
22
|
src/ecopipeline/utils/ConfigManager.py
|
|
23
|
+
src/ecopipeline/utils/NOAADataDownloader.py
|
|
23
24
|
src/ecopipeline/utils/__init__.py
|
|
24
25
|
src/ecopipeline/utils/unit_convert.py
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
from .ConfigManager import *
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|