loone-data-prep 1.2.4__py3-none-any.whl → 1.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. loone_data_prep/GEOGLOWS_LOONE_DATA_PREP.py +47 -16
  2. loone_data_prep/LOONE_DATA_PREP.py +0 -1
  3. loone_data_prep/dbhydro_insights.py +195 -0
  4. loone_data_prep/flow_data/S65E_total.py +57 -57
  5. loone_data_prep/flow_data/forecast_bias_correction.py +1 -1
  6. loone_data_prep/flow_data/get_forecast_flows.py +19 -105
  7. loone_data_prep/flow_data/get_inflows.py +18 -8
  8. loone_data_prep/flow_data/get_outflows.py +16 -7
  9. loone_data_prep/flow_data/hydro.py +62 -91
  10. loone_data_prep/forecast_scripts/get_Chla_predicted.py +1 -1
  11. loone_data_prep/forecast_scripts/get_NO_Loads_predicted.py +1 -1
  12. loone_data_prep/forecast_scripts/new_combined_weather_forecast.py +220 -0
  13. loone_data_prep/utils.py +262 -32
  14. loone_data_prep/water_level_data/get_all.py +52 -44
  15. loone_data_prep/water_level_data/hydro.py +49 -68
  16. loone_data_prep/water_quality_data/get_inflows.py +69 -27
  17. loone_data_prep/water_quality_data/get_lake_wq.py +130 -33
  18. loone_data_prep/water_quality_data/wq.py +114 -88
  19. loone_data_prep/weather_data/get_all.py +5 -3
  20. loone_data_prep/weather_data/weather.py +117 -180
  21. {loone_data_prep-1.2.4.dist-info → loone_data_prep-1.3.1.dist-info}/METADATA +2 -8
  22. loone_data_prep-1.3.1.dist-info/RECORD +38 -0
  23. {loone_data_prep-1.2.4.dist-info → loone_data_prep-1.3.1.dist-info}/WHEEL +1 -1
  24. loone_data_prep/forecast_scripts/create_forecast_LOWs.py +0 -170
  25. loone_data_prep/forecast_scripts/weather_forecast.py +0 -199
  26. loone_data_prep-1.2.4.dist-info/RECORD +0 -38
  27. {loone_data_prep-1.2.4.dist-info → loone_data_prep-1.3.1.dist-info}/licenses/LICENSE +0 -0
  28. {loone_data_prep-1.2.4.dist-info → loone_data_prep-1.3.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,38 @@
1
+ loone_data_prep/GEOGLOWS_LOONE_DATA_PREP.py,sha256=P1CV9UtePWCzsPmni_U881eb10BXugeeBaj2JDbmI0M,37569
2
+ loone_data_prep/LOONE_DATA_PREP.py,sha256=pk7AQMdsiq6nwOXlNClso2ICIasyDgUV16Wo9un99NE,69303
3
+ loone_data_prep/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ loone_data_prep/data_analyses_fns.py,sha256=BZ7famrSKoUfExQvZfbl72CyADHLb-zzgdWZ-kLJxcQ,4603
5
+ loone_data_prep/dbhydro_insights.py,sha256=4vG5V5mPrulY9RThOflqxxfFW6AwKGi-xAESD7omxIs,6495
6
+ loone_data_prep/herbie_utils.py,sha256=O5Lcn6lSWUKjJwWhak7U3eyUPxo4jH59xdfBgl5ExJQ,914
7
+ loone_data_prep/utils.py,sha256=89dSclB7eJKnXjEZUoI4YnvTM2pha_uWLbi2GMC6UjY,45357
8
+ loone_data_prep/flow_data/S65E_total.py,sha256=wGKRkihrx01u5wk7b2s1AFOj_PUTPolpjCMBkAvQVlc,3671
9
+ loone_data_prep/flow_data/__init__.py,sha256=u7fENFUZsJjyl13Bc9ZE47sHMKmjxtqXhV9t7vDTm7Y,93
10
+ loone_data_prep/flow_data/forecast_bias_correction.py,sha256=j71igE9hwY_lWAH2R7Pw25TnNyoSgN4ppSwVWnX50mY,11320
11
+ loone_data_prep/flow_data/get_forecast_flows.py,sha256=nW38zEGYqpM_xQRqA4qPZ5Cb-B48MNWAlWiCspis3Ok,12968
12
+ loone_data_prep/flow_data/get_inflows.py,sha256=fUEwF_nnNfzawRgHM2hxlkhhevcxEnpk_SlVY_p07Cc,7216
13
+ loone_data_prep/flow_data/get_outflows.py,sha256=ytsAsznaXYo6qI4ZBTEzmn-EO52DMp51_bIatv3ZwOc,6177
14
+ loone_data_prep/flow_data/hydro.py,sha256=kIbf_fu161TkZ8cSbAwV6NFnch-LER-vj_400RrbPoY,4072
15
+ loone_data_prep/forecast_scripts/Chla_merged.py,sha256=PxVEbTrqHEka6Jg0QjEC6qfFtPNzY-0_71WmlelAfPY,1225
16
+ loone_data_prep/forecast_scripts/forecast_stages.py,sha256=6S6aHlYi2_t6GAh901KBiBWPueYCwAzyb-AliHJexoU,1373
17
+ loone_data_prep/forecast_scripts/get_Chla_predicted.py,sha256=_P5_op8P_Z8d2dYWG4E5zGXcmw5TEwaK4CKdveoyPN4,4789
18
+ loone_data_prep/forecast_scripts/get_NO_Loads_predicted.py,sha256=7HdRe7acw1AAzBgSdv_i7zIDIrl7gy6ykXwzWn96LPI,4223
19
+ loone_data_prep/forecast_scripts/loone_q_predict.py,sha256=k8ndTnsRly4BxGS52Gznca75oX2wkPX6nkid6Ccb6aQ,5834
20
+ loone_data_prep/forecast_scripts/loone_wq_predict.py,sha256=xCiH6QScTYdeZyAhqoqNiJEDTFoXJPh-Yma9VGN_-GY,2123
21
+ loone_data_prep/forecast_scripts/new_combined_weather_forecast.py,sha256=6-_05pQ2Vj_I_218ROGrM5U5q7NZE8Wi6xfwv0DQQcY,9124
22
+ loone_data_prep/forecast_scripts/predict_PI.py,sha256=f0n2-gt5t9FKNdpJ5QGpyP2QBFLDGetYzfTYL95Vi_8,1937
23
+ loone_data_prep/forecast_scripts/trib_cond.py,sha256=LlMxD0a9jwtQ9grI4Ho0KpTgphl6VAjg1cBUtfXZ01A,4030
24
+ loone_data_prep/water_level_data/__init__.py,sha256=rgHDDkwccemsZnwUlw2M0h2ML4KmI89yPscmLoxbEHM,43
25
+ loone_data_prep/water_level_data/get_all.py,sha256=QsDULwm8D0bE4bvJ1o89LYgyx0w9bD-r_95WvsHuqz4,11329
26
+ loone_data_prep/water_level_data/hydro.py,sha256=cc_NWoQBwyLlmQtAyxWY7lIhrozIrDixNsgwdk-_8DI,3477
27
+ loone_data_prep/water_quality_data/__init__.py,sha256=PREV_pqo5welPDjgNvkKnRLLVV-uvhKVy6y6R3A2E78,57
28
+ loone_data_prep/water_quality_data/get_inflows.py,sha256=q1989ZKLezAzfDR5c50_7yCgdNCFiDinLhEYgLUdeS4,9703
29
+ loone_data_prep/water_quality_data/get_lake_wq.py,sha256=ew8O49XxdUvMi6B9VYOmuHxmmYkA4pJ8lN3Dq5XL9a4,12347
30
+ loone_data_prep/water_quality_data/wq.py,sha256=Ez_fLBiqJo1XrZAaSkGFZ9LZGUC7a4DaUUySnTZt1ig,6852
31
+ loone_data_prep/weather_data/__init__.py,sha256=TX58EPgGRzEK_LmLze79lC4L7kU_j3yZf5_iC4nOIP4,45
32
+ loone_data_prep/weather_data/get_all.py,sha256=rMvIZzjI1f68T24PkQuCr8VtHX0JkeTm3vO1x-gHemU,7064
33
+ loone_data_prep/weather_data/weather.py,sha256=vGcRcC36fBMvgYJqkKnzAuAZR5QFe2IIPsV11X8Xf28,9501
34
+ loone_data_prep-1.3.1.dist-info/licenses/LICENSE,sha256=rR1QKggtQUbAoYu2SW1ouI5xPqt9g4jvRRpZ0ZfnuqQ,1497
35
+ loone_data_prep-1.3.1.dist-info/METADATA,sha256=xY9LAFhZOtn4V9axjscAEdBdAUSa6q_8fCV1_Vbffkw,4206
36
+ loone_data_prep-1.3.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
37
+ loone_data_prep-1.3.1.dist-info/top_level.txt,sha256=wDyJMJiCO5huTAuNmvxpjFxtvGaq_8Tr4hFFcXf8jLE,16
38
+ loone_data_prep-1.3.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,170 +0,0 @@
1
- import os
2
- from herbie import FastHerbie
3
- from datetime import datetime
4
- import pandas as pd
5
- from retry_requests import retry as retry_requests
6
- from retry import retry
7
- import warnings
8
- from typing import Tuple
9
- from loone_data_prep.herbie_utils import get_fast_herbie_object
10
-
11
-
12
- def generate_wind_forecasts(output_dir):
13
- # Ensure output directory exists
14
- warnings.filterwarnings("ignore", message="Will not remove GRIB file because it previously existed.")
15
- os.makedirs(output_dir, exist_ok=True)
16
-
17
- # Define points of interest
18
- points = pd.DataFrame({
19
- "longitude": [-80.7934, -80.9724, -80.7828, -80.7890],
20
- "latitude": [27.1389, 26.9567, 26.8226, 26.9018]
21
- })
22
-
23
- # Station-specific file and column names
24
- file_map = {
25
- "Point_1": ("L001_WNDS_MPH_predicted.csv", "L001_WNDS_MPH"),
26
- "Point_2": ("L005_WNDS_MPH_predicted.csv", "L005_WNDS_MPH"),
27
- "Point_3": ("L006_WNDS_MPH_predicted.csv", "L006_WNDS_MPH"),
28
- "Point_4": ("LZ40_WNDS_MPH_predicted.csv", "LZ40_WNDS_MPH")
29
- }
30
-
31
- today_str = datetime.today().strftime('%Y-%m-%d 00:00')
32
- FH = get_fast_herbie_object(today_str)
33
- print("FastHerbie initialized.")
34
- dfs = []
35
-
36
- variables = {
37
- "10u": "10u",
38
- "10v": "10v",
39
- "2t": "2t",
40
-
41
- }
42
-
43
- # Loop through points and extract data
44
- for index, point in points.iterrows():
45
- print(f"\nProcessing Point {index + 1}: ({point.latitude}, {point.longitude})")
46
-
47
- point_df = pd.DataFrame({
48
- "longitude": [point.longitude],
49
- "latitude": [point.latitude]
50
- })
51
-
52
- # Loop through variables for current point and extract data
53
- for var_key, var_name in variables.items():
54
- # Get the current variable data at the current point
55
- print(f" Variable: {var_key}")
56
- try:
57
- df, var_name_actual = _download_herbie_variable(FH, var_key, var_name, point_df)
58
- except Exception as e:
59
- print(f"Error processing {var_key} for Point {index + 1} ({point.latitude}, {point.longitude}): {e}")
60
- print(f'Skipping {var_key}')
61
- continue
62
-
63
- # Append the DataFrame and variable name to the list
64
- if not df.empty:
65
- dfs.append((index, var_name_actual, df))
66
-
67
- # Merge and process data per point
68
- results = {}
69
- for point_index in range(len(points)):
70
- u_df = [df for idx, name, df in dfs if idx == point_index and name == "u10"][0]
71
- v_df = [df for idx, name, df in dfs if idx == point_index and name == "v10"][0]
72
- merged = u_df.merge(v_df, on="datetime", how="outer")
73
-
74
- # Compute wind speed and correction
75
- merged["wind_speed"] = (merged["u10"] ** 2 + merged["v10"] ** 2) ** 0.5
76
- merged["wind_speed_corrected"] = 0.4167 * merged["wind_speed"] + 4.1868
77
- merged["wind_speed_corrected"] = merged["wind_speed_corrected"] * 2.23694 # m/s to mph
78
-
79
- results[f"Point_{point_index + 1}"] = merged
80
-
81
- # Save outputs with station-specific column names
82
- for key, (filename, new_col_name) in file_map.items():
83
- df = results[key].copy()
84
- df = df[["datetime", "wind_speed_corrected"]].rename(columns={
85
- "wind_speed_corrected": new_col_name,
86
- "datetime": "date"
87
- })
88
- filepath = os.path.join(output_dir, filename)
89
- df.to_csv(filepath, index=False)
90
- # Save 2-meter air temperature data
91
- airt_file_map = {
92
- "Point_1": "L001_AIRT_Degrees Celsius_forecast.csv",
93
- "Point_2": "L005_AIRT_Degrees Celsius_forecast.csv",
94
- "Point_3": "L006_AIRT_Degrees Celsius_forecast.csv",
95
- "Point_4": "LZ40_AIRT_Degrees Celsius_forecast.csv"
96
- }
97
- airt_column_map = {
98
- "Point_1": "L001_AIRT_Degrees Celsius",
99
- "Point_2": "L005_AIRT_Degrees Celsius",
100
- "Point_3": "L006_AIRT_Degrees Celsius",
101
- "Point_4": "LZ40_AIRT_Degrees Celsius"
102
- }
103
-
104
- for key in airt_file_map:
105
- point_index = int(key.split("_")[1]) - 1
106
- df_airt = [df for idx, name, df in dfs if idx == point_index and name == "t2m"][0].copy()
107
- df_airt["t2m"] = df_airt["t2m"] - 273.15 # Convert from Kelvin to Celsius
108
- df_airt = df_airt.rename(columns={
109
- "datetime": "date",
110
- "t2m": airt_column_map[key]
111
- })
112
- filepath = os.path.join(output_dir, airt_file_map[key])
113
- df_airt.to_csv(filepath, index=False)
114
-
115
-
116
- @retry(Exception, tries=5, delay=15, max_delay=60, backoff=2)
117
- def _download_herbie_variable(fast_herbie_object: FastHerbie, variable_key: str, variable_name: str, point_df: pd.DataFrame) -> Tuple[pd.DataFrame, str]:
118
- """
119
- Download a specific variable from the Herbie API.
120
-
121
- Args:
122
- fast_herbie_object: An instance of the FastHerbie class.
123
- variable_key: The key of the variable to download.
124
- variable_name: The name of the variable to download.
125
- point_df: A DataFrame containing the point of interest (longitude and latitude).
126
-
127
- Returns:
128
- A DataFrame containing the downloaded variable data.
129
-
130
- Example:
131
- point_df = pd.DataFrame({"longitude": [-80.7934], "latitude": [27.1389]})
132
- df, var_name_actual = _download_herbie_variable(FastHerbie('2020-05-16 00:00', model='ifs', fxx=range(0, 360, 3)), '10u', '10u', point_df)
133
- """
134
- # Download and load dataset
135
- fast_herbie_object.download(f":{variable_key}")
136
- ds = fast_herbie_object.xarray(f":{variable_key}", backend_kwargs={"decode_timedelta": True})
137
-
138
- # Extract point data
139
- dsi = ds.herbie.pick_points(point_df, method="nearest")
140
-
141
- # Close and delete the original dataset to free up resources
142
- ds.close()
143
- del ds
144
-
145
- # Get actual variable name
146
- if variable_name == "10u":
147
- var_name_actual = "u10" # Map 10u to u10
148
- elif variable_name == "10v":
149
- var_name_actual = "v10" # Map 10v to v10
150
- elif variable_name == "2t":
151
- var_name_actual = "t2m" #TODO: check that this is correct
152
-
153
- # Convert to DataFrame
154
- time_series = dsi[var_name_actual].squeeze()
155
- df = time_series.to_dataframe().reset_index()
156
-
157
- # Handle datetime columns
158
- if "valid_time" in df.columns:
159
- df = df.rename(columns={"valid_time": "datetime"})
160
- elif "step" in df.columns and "time" in dsi.coords:
161
- df["datetime"] = dsi.time.values[0] + df["step"]
162
-
163
- # Close and delete the intermediate dataset to free memory
164
- dsi.close()
165
- del dsi, time_series
166
-
167
- # Retain necessary columns
168
- df = df[["datetime", var_name_actual]].drop_duplicates()
169
-
170
- return df, var_name_actual
@@ -1,199 +0,0 @@
1
- from herbie import FastHerbie
2
- from datetime import datetime
3
- import pandas as pd
4
- import openmeteo_requests
5
- import argparse
6
- import requests_cache
7
- from retry_requests import retry as retry_requests
8
- from retry import retry
9
- import warnings
10
- from loone_data_prep.herbie_utils import get_fast_herbie_object
11
-
12
- warnings.filterwarnings("ignore", message="Will not remove GRIB file because it previously existed.")
13
-
14
-
15
- def download_weather_forecast(file_path):
16
- # Get today's date in the required format
17
- today_str = datetime.today().strftime('%Y-%m-%d 00:00')
18
-
19
- # Define variables to download and extract
20
- variables = {
21
- "10u": "10u",
22
- "ssrd": "ssrd",
23
- "tp": "tp",
24
- "10v": "10v",
25
- }
26
-
27
- # Initialize FastHerbie
28
- FH = get_fast_herbie_object(today_str)
29
- print("FastHerbie initialized.")
30
-
31
- dfs = []
32
-
33
- for var_key, var_name in variables.items():
34
- # Download the current variable
35
- print(f"Processing {var_key}...")
36
- try:
37
- df = _download_herbie_variable(FH, var_key, var_name)
38
- except Exception as e:
39
- print(f"Error processing {var_key}: {e}")
40
- print(f'Skipping {var_key}')
41
- continue
42
-
43
- # Append to list
44
- if not df.empty:
45
- dfs.append(df)
46
-
47
- try:
48
- # Merge all variables into a single DataFrame
49
- final_df = dfs[0]
50
- for df in dfs[1:]:
51
- final_df = final_df.merge(df, on="datetime", how="outer")
52
- print(final_df)
53
- # Calculate wind speed
54
- final_df["wind_speed"] = (final_df["u10"] ** 2 + final_df["v10"] ** 2) ** 0.5
55
-
56
- #rainfall corrected: OLS Regression Equation: Corrected Forecast = 0.7247 * Forecast + 0.1853
57
- final_df["tp_corrected"] = 0.7247 * final_df["tp"] + 0.1853
58
-
59
- #wind speed correction: Corrected Forecast = 0.4167 * Forecast + 4.1868
60
- final_df["wind_speed_corrected"] = 0.4167 * final_df["wind_speed"] + 4.1868
61
-
62
- #radiation correction will need to be fixed because it was done on fdir instead of ssdr
63
- #radiation corrected: Corrected Forecast = 0.0553 * Forecast - 0.0081
64
- final_df["ssrd_corrected"] = 0.0553 * final_df["ssrd"] - 0.0081
65
- except Exception as e:
66
- print(f'Error correcting herbie weather data: {e}')
67
-
68
- try:
69
- # Setup the Open-Meteo API client with cache and retry on error
70
- cache_session = requests_cache.CachedSession('.cache', expire_after = 3600)
71
- retry_session = retry_requests(cache_session, retries = 5, backoff_factor = 0.2)
72
- openmeteo = openmeteo_requests.Client(session = retry_session)
73
-
74
- # Make sure all required weather variables are listed here
75
- # The order of variables in hourly or daily is important to assign them correctly below
76
- url = "https://api.open-meteo.com/v1/forecast"
77
- params = {
78
- "latitude": 26.9690,
79
- "longitude": -80.7976,
80
- "hourly": "evapotranspiration",
81
- "forecast_days": 16,
82
- "models": "gfs_seamless"
83
- }
84
- responses = openmeteo.weather_api(url, params=params)
85
-
86
-
87
- # Process first location. Add a for-loop for multiple locations or weather models
88
- response = responses[0]
89
-
90
- hourly = response.Hourly()
91
- hourly_evapotranspiration = hourly.Variables(0).ValuesAsNumpy()
92
-
93
- hourly_data = {"date": pd.date_range(
94
- start = pd.to_datetime(hourly.Time(), unit = "s", utc = True),
95
- end = pd.to_datetime(hourly.TimeEnd(), unit = "s", utc = True),
96
- freq = pd.Timedelta(seconds = hourly.Interval()),
97
- inclusive = "left"
98
- )}
99
-
100
- hourly_data["evapotranspiration"] = hourly_evapotranspiration
101
-
102
- hourly_dataframe = pd.DataFrame(data = hourly_data)
103
-
104
- # Convert datetime to date for merging
105
- final_df['date'] = final_df['datetime']
106
- # Ensure final_df['date'] is timezone-aware (convert to UTC)
107
- final_df['date'] = pd.to_datetime(final_df['date'], utc=True)
108
-
109
- # Ensure hourly_dataframe['date'] is also timezone-aware (convert to UTC)
110
- hourly_dataframe['date'] = pd.to_datetime(hourly_dataframe['date'], utc=True)
111
-
112
- # Merge while keeping only matching dates from final_df
113
- merged_df = final_df.merge(hourly_dataframe, on='date', how='left')
114
-
115
- # Print final combined DataFrame
116
- merged_df.drop(columns=['date'], inplace=True)
117
- # print(merged_df)
118
-
119
- merged_df.to_csv(file_path, index=False)
120
- except Exception as e:
121
- print(f'Error retrieving openmeteo weather data: {e}')
122
-
123
-
124
- @retry(Exception, tries=5, delay=15, max_delay=60, backoff=2)
125
- def _download_herbie_variable(fast_herbie_object: FastHerbie, variable_key: str, variable_name: str) -> pd.DataFrame:
126
- """
127
- Download a specific variable from the Herbie API.
128
-
129
- Args:
130
- fast_herbie_object: An instance of the FastHerbie class.
131
- variable_key: The key of the variable to download.
132
- variable_name: The name of the variable to download.
133
-
134
- Returns:
135
- A DataFrame containing the downloaded variable data.
136
-
137
- Example:
138
- df = _download_herbie_variable(FastHerbie('2020-05-16 00:00', model='ifs', fxx=range(0, 360, 3)), '10u', '10u')
139
- """
140
- # Define point of interest
141
- points = pd.DataFrame({"longitude": [-80.7976], "latitude": [26.9690]})
142
-
143
- # Download and load the dataset
144
- fast_herbie_object.download(f":{variable_key}")
145
- ds = fast_herbie_object.xarray(f":{variable_key}", backend_kwargs={"decode_timedelta": True})
146
-
147
- # Extract point data
148
- dsi = ds.herbie.pick_points(points, method="nearest")
149
-
150
- # Close and delete the original dataset to free up resources
151
- ds.close()
152
- del ds
153
-
154
- # Extract the correct variable name dynamically
155
- if variable_name == "10u":
156
- var_name_actual = "u10" # Map 10u to u10
157
- elif variable_name == "10v":
158
- var_name_actual = "v10" # Map 10v to v10
159
- else:
160
- var_name_actual = variable_name # For ssrd and tp, use the same name
161
-
162
- # Extract time series
163
- time_series = dsi[var_name_actual].squeeze()
164
-
165
- # Convert to DataFrame
166
- df = time_series.to_dataframe().reset_index()
167
-
168
- # Convert `valid_time` to datetime
169
- if "valid_time" in df.columns:
170
- df = df.rename(columns={"valid_time": "datetime"})
171
- elif "step" in df.columns and "time" in dsi.coords:
172
- df["datetime"] = dsi.time.values[0] + df["step"]
173
-
174
- # Keep only datetime and variable of interest
175
- df = df[["datetime", var_name_actual]].drop_duplicates()
176
-
177
- # Print extracted data
178
- # print(df)
179
-
180
- # Clean up intermediate datasets to free memory
181
- del dsi, time_series
182
-
183
- return df
184
-
185
-
186
- def main():
187
- # Set up command-line argument parsing
188
- parser = argparse.ArgumentParser(description="Download and process weather forecast data.")
189
- parser.add_argument("file_path", help="Path to save the resulting CSV file.")
190
-
191
- # Parse the arguments
192
- args = parser.parse_args()
193
-
194
- # Call the function with the provided file path
195
- download_weather_forecast(args.file_path)
196
-
197
-
198
- if __name__ == "__main__":
199
- main()
@@ -1,38 +0,0 @@
1
- loone_data_prep/GEOGLOWS_LOONE_DATA_PREP.py,sha256=gfpnaOTjZ-YhWqOEvOaDvear4_59IbqARpLyg2Y_c8U,35851
2
- loone_data_prep/LOONE_DATA_PREP.py,sha256=vEWcGHKN10ipLk9o5I5aKu_LPfDyFW3HBJ8GgqISYjA,69315
3
- loone_data_prep/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- loone_data_prep/data_analyses_fns.py,sha256=BZ7famrSKoUfExQvZfbl72CyADHLb-zzgdWZ-kLJxcQ,4603
5
- loone_data_prep/herbie_utils.py,sha256=O5Lcn6lSWUKjJwWhak7U3eyUPxo4jH59xdfBgl5ExJQ,914
6
- loone_data_prep/utils.py,sha256=UlNc84ofh3ZY3lYsgQmDsgGgohXIBwZ0bK9rX6SgGF4,35730
7
- loone_data_prep/flow_data/S65E_total.py,sha256=szNUfj0EyyyDzuKNhTGAZtWc5owiOpxYS55YTt4u19k,2835
8
- loone_data_prep/flow_data/__init__.py,sha256=u7fENFUZsJjyl13Bc9ZE47sHMKmjxtqXhV9t7vDTm7Y,93
9
- loone_data_prep/flow_data/forecast_bias_correction.py,sha256=pcMH7qR3RZvXOHoYOtP7azNn5rVuRPL9mwgoVk2NeLA,11378
10
- loone_data_prep/flow_data/get_forecast_flows.py,sha256=i8C-TsqOVGeB5DdtcxaTkuvVXMN1ayDVVk9cikbRLU4,16155
11
- loone_data_prep/flow_data/get_inflows.py,sha256=xKuSyJBdPrpjqMdRiyNDyxwdhYVIgLhiTP0k_1I1uWI,6456
12
- loone_data_prep/flow_data/get_outflows.py,sha256=x7aisIkbXoTkcubFQLDghX-P8lztPq-tU0dQzoVRTtQ,5620
13
- loone_data_prep/flow_data/hydro.py,sha256=5MwrzSUTCgPgeC_YGhz-en1CbOMp379Qf5zjpJlp-HM,5312
14
- loone_data_prep/forecast_scripts/Chla_merged.py,sha256=PxVEbTrqHEka6Jg0QjEC6qfFtPNzY-0_71WmlelAfPY,1225
15
- loone_data_prep/forecast_scripts/create_forecast_LOWs.py,sha256=xUYO0_9EbtVDX6LPBAfDFyvQQIFN7dNaNYFO4D5pe8Y,6591
16
- loone_data_prep/forecast_scripts/forecast_stages.py,sha256=6S6aHlYi2_t6GAh901KBiBWPueYCwAzyb-AliHJexoU,1373
17
- loone_data_prep/forecast_scripts/get_Chla_predicted.py,sha256=wnGFJlu2zyO1QSUiQ3W8iAcLOtkDZpLhuRr037Nmgb4,4759
18
- loone_data_prep/forecast_scripts/get_NO_Loads_predicted.py,sha256=MvJNgY7KPkjyot2BYInQCcp5lg8_N_D_SLSt8WpUmHQ,4200
19
- loone_data_prep/forecast_scripts/loone_q_predict.py,sha256=k8ndTnsRly4BxGS52Gznca75oX2wkPX6nkid6Ccb6aQ,5834
20
- loone_data_prep/forecast_scripts/loone_wq_predict.py,sha256=xCiH6QScTYdeZyAhqoqNiJEDTFoXJPh-Yma9VGN_-GY,2123
21
- loone_data_prep/forecast_scripts/predict_PI.py,sha256=f0n2-gt5t9FKNdpJ5QGpyP2QBFLDGetYzfTYL95Vi_8,1937
22
- loone_data_prep/forecast_scripts/trib_cond.py,sha256=LlMxD0a9jwtQ9grI4Ho0KpTgphl6VAjg1cBUtfXZ01A,4030
23
- loone_data_prep/forecast_scripts/weather_forecast.py,sha256=5RFA2Pg4j9Df3633SEt6vEAQH0HXjR3TVDgNqYqETEY,7108
24
- loone_data_prep/water_level_data/__init__.py,sha256=rgHDDkwccemsZnwUlw2M0h2ML4KmI89yPscmLoxbEHM,43
25
- loone_data_prep/water_level_data/get_all.py,sha256=arPSWpb0XfQm0GKZJmoWhWdLuuNDxtGVX6_6UuD1_Vs,10885
26
- loone_data_prep/water_level_data/hydro.py,sha256=PtsNdMXe1Y4e5CzEyLH6nJx_xv8sB90orGcSgxt7nao,3653
27
- loone_data_prep/water_quality_data/__init__.py,sha256=PREV_pqo5welPDjgNvkKnRLLVV-uvhKVy6y6R3A2E78,57
28
- loone_data_prep/water_quality_data/get_inflows.py,sha256=01wAVJaDSQiamc5qIOf4BIYCBkvW-bdJOpiPbOFAIl4,7295
29
- loone_data_prep/water_quality_data/get_lake_wq.py,sha256=gcideLf2oddFVl_vEdhFkXhwhhtI58ZafKWhlpQ23X4,7791
30
- loone_data_prep/water_quality_data/wq.py,sha256=sl6G3iDCk6QUzpHTXPHpRZNMBG0-wHuc6zdYbKI4eQk,5077
31
- loone_data_prep/weather_data/__init__.py,sha256=TX58EPgGRzEK_LmLze79lC4L7kU_j3yZf5_iC4nOIP4,45
32
- loone_data_prep/weather_data/get_all.py,sha256=aCufuxORU51XhXt7LN9wN_V4qtjNt1qRC1UKlI2b3Ko,6918
33
- loone_data_prep/weather_data/weather.py,sha256=hvceksrGSnDkCjheBVBuPgY1DrdR0ZAtrFB-K2tYTtk,12043
34
- loone_data_prep-1.2.4.dist-info/licenses/LICENSE,sha256=rR1QKggtQUbAoYu2SW1ouI5xPqt9g4jvRRpZ0ZfnuqQ,1497
35
- loone_data_prep-1.2.4.dist-info/METADATA,sha256=WIXZJw2ShnnkeaZGRYL7JtjE-yIIDerzWtFPCxt9SVQ,4343
36
- loone_data_prep-1.2.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
37
- loone_data_prep-1.2.4.dist-info/top_level.txt,sha256=wDyJMJiCO5huTAuNmvxpjFxtvGaq_8Tr4hFFcXf8jLE,16
38
- loone_data_prep-1.2.4.dist-info/RECORD,,