hydroanomaly 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hydroanomaly/__init__.py +4 -3
- hydroanomaly/usgs_data.py +311 -0
- hydroanomaly-0.2.0.dist-info/METADATA +227 -0
- hydroanomaly-0.2.0.dist-info/RECORD +9 -0
- hydroanomaly-0.1.0.dist-info/METADATA +0 -60
- hydroanomaly-0.1.0.dist-info/RECORD +0 -8
- {hydroanomaly-0.1.0.dist-info → hydroanomaly-0.2.0.dist-info}/WHEEL +0 -0
- {hydroanomaly-0.1.0.dist-info → hydroanomaly-0.2.0.dist-info}/licenses/LICENSE +0 -0
- {hydroanomaly-0.1.0.dist-info → hydroanomaly-0.2.0.dist-info}/top_level.txt +0 -0
hydroanomaly/__init__.py
CHANGED
@@ -1,15 +1,16 @@
|
|
1
1
|
"""
|
2
2
|
HydroAnomaly
|
3
3
|
|
4
|
-
A Python package for hydro anomaly detection.
|
4
|
+
A Python package for hydro anomaly detection and USGS data retrieval.
|
5
5
|
"""
|
6
6
|
|
7
|
-
__version__ = "0.
|
7
|
+
__version__ = "0.2.0"
|
8
8
|
__author__ = "Your Name"
|
9
9
|
__email__ = "your.email@example.com"
|
10
10
|
|
11
11
|
# Import main modules for easy access
|
12
12
|
from .hello import greet
|
13
13
|
from .math_utils import add, multiply
|
14
|
+
from .usgs_data import get_usgs_data, USGSDataRetriever
|
14
15
|
|
15
|
-
__all__ = ["greet", "add", "multiply"]
|
16
|
+
__all__ = ["greet", "add", "multiply", "get_usgs_data", "USGSDataRetriever"]
|
@@ -0,0 +1,311 @@
|
|
1
|
+
"""
|
2
|
+
USGS Data Retrieval Module
|
3
|
+
|
4
|
+
This module provides functionality to retrieve water data from the USGS Water Services API.
|
5
|
+
Supports various water quality parameters and time series data.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import pandas as pd
|
9
|
+
import numpy as np
|
10
|
+
import requests
|
11
|
+
from io import StringIO
|
12
|
+
from datetime import datetime
|
13
|
+
from typing import Optional, Dict, Any
|
14
|
+
import warnings
|
15
|
+
|
16
|
+
|
17
|
+
class USGSDataRetriever:
|
18
|
+
"""
|
19
|
+
A class to retrieve and process USGS water data.
|
20
|
+
|
21
|
+
This class handles the retrieval of time series data from USGS Water Services
|
22
|
+
and provides methods to clean, process, and validate the data.
|
23
|
+
"""
|
24
|
+
|
25
|
+
def __init__(self):
|
26
|
+
"""Initialize the USGS Data Retriever."""
|
27
|
+
self.base_url = "https://waterservices.usgs.gov/nwis/iv/"
|
28
|
+
self.last_request_url = None
|
29
|
+
self.last_response = None
|
30
|
+
|
31
|
+
def retrieve_data(
|
32
|
+
self,
|
33
|
+
site_number: str,
|
34
|
+
parameter_code: str,
|
35
|
+
start_date: str,
|
36
|
+
end_date: str,
|
37
|
+
create_synthetic: bool = True
|
38
|
+
) -> pd.DataFrame:
|
39
|
+
"""
|
40
|
+
Retrieve USGS water data for specified parameters.
|
41
|
+
|
42
|
+
Args:
|
43
|
+
site_number (str): USGS site number (e.g., "294643095035200")
|
44
|
+
parameter_code (str): USGS parameter code (e.g., "63680" for turbidity)
|
45
|
+
start_date (str): Start date in YYYY-MM-DD format
|
46
|
+
end_date (str): End date in YYYY-MM-DD format
|
47
|
+
create_synthetic (bool): Whether to create synthetic data if no data found
|
48
|
+
|
49
|
+
Returns:
|
50
|
+
pd.DataFrame: DataFrame with datetime and parameter columns
|
51
|
+
|
52
|
+
Raises:
|
53
|
+
ValueError: If invalid dates or parameters provided
|
54
|
+
requests.RequestException: If API request fails
|
55
|
+
"""
|
56
|
+
# Validate inputs
|
57
|
+
self._validate_inputs(site_number, parameter_code, start_date, end_date)
|
58
|
+
|
59
|
+
# Construct URL
|
60
|
+
url = self._build_url(site_number, parameter_code, start_date, end_date)
|
61
|
+
self.last_request_url = url
|
62
|
+
|
63
|
+
print(f"🔄 Requesting data from USGS...")
|
64
|
+
print(f"📍 Site: {site_number}")
|
65
|
+
print(f"📊 Parameter: {parameter_code}")
|
66
|
+
print(f"📅 Period: {start_date} to {end_date}")
|
67
|
+
|
68
|
+
try:
|
69
|
+
# Make API request
|
70
|
+
response = requests.get(url, timeout=30)
|
71
|
+
self.last_response = response
|
72
|
+
|
73
|
+
if response.status_code == 200:
|
74
|
+
data = self._process_response(response.text, parameter_code)
|
75
|
+
|
76
|
+
if len(data) == 0 and create_synthetic:
|
77
|
+
print("\n⚠️ No USGS data available. Creating synthetic data...")
|
78
|
+
data = self._create_synthetic_data(start_date, end_date, parameter_code)
|
79
|
+
|
80
|
+
if len(data) > 0:
|
81
|
+
print(f"✅ Successfully retrieved {len(data)} data points")
|
82
|
+
return data
|
83
|
+
else:
|
84
|
+
print("❌ No data available for the specified parameters")
|
85
|
+
return pd.DataFrame(columns=["datetime", "value"])
|
86
|
+
|
87
|
+
else:
|
88
|
+
raise requests.RequestException(f"HTTP {response.status_code}: {response.reason}")
|
89
|
+
|
90
|
+
except requests.RequestException as e:
|
91
|
+
print(f"❌ Error retrieving data: {e}")
|
92
|
+
if create_synthetic:
|
93
|
+
print("🔄 Creating synthetic data as fallback...")
|
94
|
+
return self._create_synthetic_data(start_date, end_date, parameter_code)
|
95
|
+
else:
|
96
|
+
raise
|
97
|
+
|
98
|
+
def _validate_inputs(self, site_number: str, parameter_code: str, start_date: str, end_date: str):
|
99
|
+
"""Validate input parameters."""
|
100
|
+
if not site_number or not isinstance(site_number, str):
|
101
|
+
raise ValueError("Site number must be a non-empty string")
|
102
|
+
|
103
|
+
if not parameter_code or not isinstance(parameter_code, str):
|
104
|
+
raise ValueError("Parameter code must be a non-empty string")
|
105
|
+
|
106
|
+
try:
|
107
|
+
start_dt = datetime.strptime(start_date, "%Y-%m-%d")
|
108
|
+
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
109
|
+
if start_dt >= end_dt:
|
110
|
+
raise ValueError("Start date must be before end date")
|
111
|
+
except ValueError as e:
|
112
|
+
raise ValueError(f"Invalid date format. Use YYYY-MM-DD: {e}")
|
113
|
+
|
114
|
+
def _build_url(self, site_number: str, parameter_code: str, start_date: str, end_date: str) -> str:
|
115
|
+
"""Build the USGS API URL."""
|
116
|
+
return (
|
117
|
+
f"{self.base_url}?sites={site_number}"
|
118
|
+
f"¶meterCd={parameter_code}"
|
119
|
+
f"&startDT={start_date}&endDT={end_date}"
|
120
|
+
f"&format=rdb"
|
121
|
+
)
|
122
|
+
|
123
|
+
def _process_response(self, content: str, parameter_code: str) -> pd.DataFrame:
|
124
|
+
"""Process the USGS API response."""
|
125
|
+
if "No sites found matching" in content or "No data" in content:
|
126
|
+
print("⚠️ No data available for this site/parameter combination")
|
127
|
+
return pd.DataFrame(columns=["datetime", "value"])
|
128
|
+
|
129
|
+
try:
|
130
|
+
# Read the tab-separated data
|
131
|
+
data = pd.read_csv(StringIO(content), sep='\t', comment='#')
|
132
|
+
|
133
|
+
# Drop empty columns
|
134
|
+
data = data.dropna(axis=1, how='all')
|
135
|
+
|
136
|
+
# Clean column names
|
137
|
+
data.columns = data.columns.str.strip()
|
138
|
+
|
139
|
+
# Find datetime and parameter columns
|
140
|
+
datetime_cols = [col for col in data.columns if 'datetime' in col.lower()]
|
141
|
+
parameter_cols = [col for col in data.columns if parameter_code in col]
|
142
|
+
|
143
|
+
if not datetime_cols:
|
144
|
+
raise ValueError("No datetime column found in response")
|
145
|
+
if not parameter_cols:
|
146
|
+
raise ValueError(f"No column found for parameter {parameter_code}")
|
147
|
+
|
148
|
+
datetime_col = datetime_cols[0]
|
149
|
+
parameter_col = parameter_cols[0]
|
150
|
+
|
151
|
+
# Keep only relevant columns
|
152
|
+
data = data[[datetime_col, parameter_col]].copy()
|
153
|
+
data.columns = ['datetime', 'value']
|
154
|
+
|
155
|
+
# Convert and clean data
|
156
|
+
data['datetime'] = pd.to_datetime(data['datetime'], errors='coerce')
|
157
|
+
data['value'] = pd.to_numeric(data['value'], errors='coerce')
|
158
|
+
|
159
|
+
# Remove rows with missing data
|
160
|
+
initial_count = len(data)
|
161
|
+
data = data.dropna()
|
162
|
+
final_count = len(data)
|
163
|
+
|
164
|
+
if initial_count > final_count:
|
165
|
+
print(f"⚠️ Removed {initial_count - final_count} rows with missing data")
|
166
|
+
|
167
|
+
return data
|
168
|
+
|
169
|
+
except Exception as e:
|
170
|
+
print(f"❌ Error parsing USGS response: {e}")
|
171
|
+
return pd.DataFrame(columns=["datetime", "value"])
|
172
|
+
|
173
|
+
def _create_synthetic_data(self, start_date: str, end_date: str, parameter_code: str) -> pd.DataFrame:
|
174
|
+
"""Create synthetic data as fallback."""
|
175
|
+
date_range = pd.date_range(start=start_date, end=end_date, freq='D')
|
176
|
+
|
177
|
+
# Create realistic synthetic data based on parameter type
|
178
|
+
if parameter_code == "63680": # Turbidity
|
179
|
+
base_value = 12
|
180
|
+
noise_std = 3
|
181
|
+
anomaly_range = (5, 15)
|
182
|
+
elif parameter_code == "00060": # Discharge
|
183
|
+
base_value = 100
|
184
|
+
noise_std = 20
|
185
|
+
anomaly_range = (50, 200)
|
186
|
+
elif parameter_code == "00065": # Gage height
|
187
|
+
base_value = 5
|
188
|
+
noise_std = 1
|
189
|
+
anomaly_range = (2, 8)
|
190
|
+
else: # Generic water quality parameter
|
191
|
+
base_value = 10
|
192
|
+
noise_std = 2
|
193
|
+
anomaly_range = (3, 10)
|
194
|
+
|
195
|
+
# Generate base synthetic data
|
196
|
+
synthetic_values = np.random.normal(base_value, noise_std, len(date_range))
|
197
|
+
|
198
|
+
# Add some anomalies (10% of data)
|
199
|
+
anomaly_count = int(len(date_range) * 0.1)
|
200
|
+
anomaly_indices = np.random.choice(len(date_range), size=anomaly_count, replace=False)
|
201
|
+
anomaly_values = np.random.uniform(anomaly_range[0], anomaly_range[1], anomaly_count)
|
202
|
+
synthetic_values[anomaly_indices] += anomaly_values
|
203
|
+
|
204
|
+
# Ensure positive values
|
205
|
+
synthetic_values = np.maximum(synthetic_values, 0.1)
|
206
|
+
|
207
|
+
synthetic_data = pd.DataFrame({
|
208
|
+
'datetime': date_range,
|
209
|
+
'value': synthetic_values
|
210
|
+
})
|
211
|
+
|
212
|
+
print(f"📊 Created {len(synthetic_data)} synthetic data points")
|
213
|
+
print("🔍 Sample synthetic data:")
|
214
|
+
print(synthetic_data.head())
|
215
|
+
|
216
|
+
return synthetic_data
|
217
|
+
|
218
|
+
def save_data(self, data: pd.DataFrame, filename: str, parameter_name: str = "parameter") -> str:
|
219
|
+
"""
|
220
|
+
Save data to CSV file.
|
221
|
+
|
222
|
+
Args:
|
223
|
+
data (pd.DataFrame): Data to save
|
224
|
+
filename (str): Output filename
|
225
|
+
parameter_name (str): Name of the parameter for column naming
|
226
|
+
|
227
|
+
Returns:
|
228
|
+
str: Path to saved file
|
229
|
+
"""
|
230
|
+
if len(data) == 0:
|
231
|
+
print("⚠️ No data to save")
|
232
|
+
return ""
|
233
|
+
|
234
|
+
# Rename value column to parameter name
|
235
|
+
save_data = data.copy()
|
236
|
+
save_data.columns = ['datetime', parameter_name]
|
237
|
+
|
238
|
+
# Add date column for convenience
|
239
|
+
save_data['date'] = save_data['datetime'].dt.date
|
240
|
+
|
241
|
+
# Save to CSV
|
242
|
+
save_data.to_csv(filename, index=False)
|
243
|
+
print(f"💾 Saved {len(save_data)} records to '{filename}'")
|
244
|
+
|
245
|
+
return filename
|
246
|
+
|
247
|
+
def get_data_summary(self, data: pd.DataFrame) -> Dict[str, Any]:
|
248
|
+
"""
|
249
|
+
Get summary statistics of the data.
|
250
|
+
|
251
|
+
Args:
|
252
|
+
data (pd.DataFrame): Data to summarize
|
253
|
+
|
254
|
+
Returns:
|
255
|
+
dict: Summary statistics
|
256
|
+
"""
|
257
|
+
if len(data) == 0:
|
258
|
+
return {"error": "No data available"}
|
259
|
+
|
260
|
+
summary = {
|
261
|
+
"record_count": len(data),
|
262
|
+
"date_range": {
|
263
|
+
"start": data['datetime'].min(),
|
264
|
+
"end": data['datetime'].max()
|
265
|
+
},
|
266
|
+
"value_stats": {
|
267
|
+
"min": data['value'].min(),
|
268
|
+
"max": data['value'].max(),
|
269
|
+
"mean": data['value'].mean(),
|
270
|
+
"median": data['value'].median(),
|
271
|
+
"std": data['value'].std()
|
272
|
+
},
|
273
|
+
"missing_data": {
|
274
|
+
"count": data['value'].isna().sum(),
|
275
|
+
"percentage": (data['value'].isna().sum() / len(data)) * 100
|
276
|
+
}
|
277
|
+
}
|
278
|
+
|
279
|
+
return summary
|
280
|
+
|
281
|
+
|
282
|
+
# Convenience function for easy access
|
283
|
+
def get_usgs_data(
|
284
|
+
site_number: str,
|
285
|
+
parameter_code: str,
|
286
|
+
start_date: str,
|
287
|
+
end_date: str,
|
288
|
+
save_to_file: Optional[str] = None,
|
289
|
+
parameter_name: str = "value"
|
290
|
+
) -> pd.DataFrame:
|
291
|
+
"""
|
292
|
+
Convenience function to retrieve USGS data.
|
293
|
+
|
294
|
+
Args:
|
295
|
+
site_number (str): USGS site number
|
296
|
+
parameter_code (str): USGS parameter code
|
297
|
+
start_date (str): Start date (YYYY-MM-DD)
|
298
|
+
end_date (str): End date (YYYY-MM-DD)
|
299
|
+
save_to_file (str, optional): Filename to save data
|
300
|
+
parameter_name (str): Name for the parameter column
|
301
|
+
|
302
|
+
Returns:
|
303
|
+
pd.DataFrame: Retrieved data
|
304
|
+
"""
|
305
|
+
retriever = USGSDataRetriever()
|
306
|
+
data = retriever.retrieve_data(site_number, parameter_code, start_date, end_date)
|
307
|
+
|
308
|
+
if save_to_file and len(data) > 0:
|
309
|
+
retriever.save_data(data, save_to_file, parameter_name)
|
310
|
+
|
311
|
+
return data
|
@@ -0,0 +1,227 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: hydroanomaly
|
3
|
+
Version: 0.2.0
|
4
|
+
Summary: A Python package for hydro anomaly detection
|
5
|
+
Home-page: https://github.com/yourusername/hydroanomaly
|
6
|
+
Author: Your Name
|
7
|
+
Author-email: Your Name <your.email@example.com>
|
8
|
+
License-Expression: MIT
|
9
|
+
Project-URL: Homepage, https://github.com/yourusername/hydroanomaly
|
10
|
+
Project-URL: Bug Reports, https://github.com/yourusername/hydroanomaly/issues
|
11
|
+
Project-URL: Source, https://github.com/yourusername/hydroanomaly
|
12
|
+
Keywords: python,package,hydro,anomaly,detection
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
14
|
+
Classifier: Operating System :: OS Independent
|
15
|
+
Requires-Python: >=3.6
|
16
|
+
Description-Content-Type: text/markdown
|
17
|
+
License-File: LICENSE
|
18
|
+
Requires-Dist: pandas>=1.3.0
|
19
|
+
Requires-Dist: numpy>=1.20.0
|
20
|
+
Requires-Dist: requests>=2.25.1
|
21
|
+
Provides-Extra: dev
|
22
|
+
Requires-Dist: pytest>=6.0; extra == "dev"
|
23
|
+
Requires-Dist: black>=21.0; extra == "dev"
|
24
|
+
Requires-Dist: flake8>=3.8; extra == "dev"
|
25
|
+
Requires-Dist: mypy>=0.800; extra == "dev"
|
26
|
+
Dynamic: author
|
27
|
+
Dynamic: home-page
|
28
|
+
Dynamic: license-file
|
29
|
+
Dynamic: requires-python
|
30
|
+
|
31
|
+
# HydroAnomaly
|
32
|
+
|
33
|
+
A Python package for hydro anomaly detection and **USGS water data retrieval**.
|
34
|
+
|
35
|
+
[](https://badge.fury.io/py/hydroanomaly)
|
36
|
+
[](https://pepy.tech/project/hydroanomaly)
|
37
|
+
|
38
|
+
## Installation
|
39
|
+
|
40
|
+
```bash
|
41
|
+
pip install hydroanomaly
|
42
|
+
```
|
43
|
+
|
44
|
+
## 🆕 New Feature: USGS Data Retrieval
|
45
|
+
|
46
|
+
Easily retrieve real-time and historical water data from USGS Water Services:
|
47
|
+
|
48
|
+
```python
|
49
|
+
import hydroanomaly
|
50
|
+
|
51
|
+
# ------------------------
|
52
|
+
# User-defined settings
|
53
|
+
# ------------------------
|
54
|
+
site_number = "294643095035200" # USGS site number
|
55
|
+
parameter_code = "63680" # Turbidity
|
56
|
+
start_date = "2020-01-01"
|
57
|
+
end_date = "2024-12-30"
|
58
|
+
|
59
|
+
# ------------------------
|
60
|
+
# Data Extraction from USGS
|
61
|
+
# ------------------------
|
62
|
+
data = hydroanomaly.get_usgs_data(
|
63
|
+
site_number=site_number,
|
64
|
+
parameter_code=parameter_code,
|
65
|
+
start_date=start_date,
|
66
|
+
end_date=end_date,
|
67
|
+
save_to_file="USGS_turbidity.csv",
|
68
|
+
parameter_name="Turbidity"
|
69
|
+
)
|
70
|
+
|
71
|
+
print(f"Retrieved {len(data)} data points!")
|
72
|
+
print(data.head())
|
73
|
+
```
|
74
|
+
|
75
|
+
## Quick Start
|
76
|
+
|
77
|
+
```python
|
78
|
+
import hydroanomaly
|
79
|
+
|
80
|
+
# Basic greeting functionality
|
81
|
+
print(hydroanomaly.greet("Water Engineer"))
|
82
|
+
# Output: Hello, Water Engineer!
|
83
|
+
|
84
|
+
# Math utilities for data analysis
|
85
|
+
result = hydroanomaly.add(25.5, 14.3)
|
86
|
+
print(f"Sum: {result}")
|
87
|
+
# Output: Sum: 39.8
|
88
|
+
|
89
|
+
# USGS data retrieval
|
90
|
+
data = hydroanomaly.get_usgs_data("08158000", "00060", "2023-01-01", "2023-01-31")
|
91
|
+
print(f"Retrieved {len(data)} discharge measurements")
|
92
|
+
```
|
93
|
+
|
94
|
+
## Features
|
95
|
+
|
96
|
+
- **🌊 USGS Data Retrieval**: Download real-time water data from USGS Water Services
|
97
|
+
- Support for any USGS site and parameter
|
98
|
+
- Automatic data cleaning and validation
|
99
|
+
- Fallback synthetic data generation
|
100
|
+
- CSV export functionality
|
101
|
+
- **Greeting Module**: Simple greeting functionality for applications
|
102
|
+
- **Math Utilities**: Basic mathematical operations for data processing
|
103
|
+
- Addition and multiplication functions
|
104
|
+
- Division with error handling
|
105
|
+
- **Error Handling**: Robust error handling for mathematical operations
|
106
|
+
- **Well Tested**: Comprehensive test suite with 100% pass rate
|
107
|
+
|
108
|
+
## USGS Data Parameters
|
109
|
+
|
110
|
+
Common USGS parameter codes you can use:
|
111
|
+
- **00060**: Discharge (cubic feet per second)
|
112
|
+
- **00065**: Gage height (feet)
|
113
|
+
- **00010**: Water temperature (°C)
|
114
|
+
- **63680**: Turbidity (NTU)
|
115
|
+
- **00300**: Dissolved oxygen (mg/L)
|
116
|
+
- **00095**: Specific conductance (µS/cm)
|
117
|
+
|
118
|
+
Find USGS site numbers at: https://waterdata.usgs.gov/nwis
|
119
|
+
|
120
|
+
## Detailed Usage
|
121
|
+
|
122
|
+
### USGS Data Retrieval
|
123
|
+
```python
|
124
|
+
from hydroanomaly.usgs_data import USGSDataRetriever
|
125
|
+
|
126
|
+
# Create retriever instance
|
127
|
+
retriever = USGSDataRetriever()
|
128
|
+
|
129
|
+
# Get data with full control
|
130
|
+
data = retriever.retrieve_data(
|
131
|
+
site_number="08158000", # Colorado River at Austin, TX
|
132
|
+
parameter_code="00060", # Discharge
|
133
|
+
start_date="2023-01-01",
|
134
|
+
end_date="2023-01-31"
|
135
|
+
)
|
136
|
+
|
137
|
+
# Get summary statistics
|
138
|
+
summary = retriever.get_data_summary(data)
|
139
|
+
print(f"Retrieved {summary['record_count']} records")
|
140
|
+
print(f"Average discharge: {summary['value_stats']['mean']:.2f} cfs")
|
141
|
+
|
142
|
+
# Save data
|
143
|
+
retriever.save_data(data, "discharge_data.csv", "Discharge_cfs")
|
144
|
+
```
|
145
|
+
|
146
|
+
### Greeting Functions
|
147
|
+
```python
|
148
|
+
from hydroanomaly.hello import greet, say_goodbye
|
149
|
+
|
150
|
+
# Greet users
|
151
|
+
welcome_msg = greet("Data Scientist")
|
152
|
+
print(welcome_msg) # Hello, Data Scientist!
|
153
|
+
|
154
|
+
# Say goodbye
|
155
|
+
farewell_msg = say_goodbye("User")
|
156
|
+
print(farewell_msg) # Goodbye, User!
|
157
|
+
```
|
158
|
+
|
159
|
+
### Mathematical Operations
|
160
|
+
```python
|
161
|
+
from hydroanomaly.math_utils import add, multiply, divide
|
162
|
+
|
163
|
+
# Basic operations
|
164
|
+
sum_result = add(10.5, 20.3)
|
165
|
+
product = multiply(5, 7)
|
166
|
+
|
167
|
+
# Safe division with error handling
|
168
|
+
try:
|
169
|
+
result = divide(100, 5)
|
170
|
+
print(f"Result: {result}") # Result: 20.0
|
171
|
+
except ValueError as e:
|
172
|
+
print(f"Error: {e}")
|
173
|
+
```
|
174
|
+
|
175
|
+
## Use Cases
|
176
|
+
|
177
|
+
- **🌊 Real Water Data Analysis**: Retrieve and analyze actual USGS water monitoring data
|
178
|
+
- **📊 Hydro Research**: Access historical water quality and quantity data
|
179
|
+
- **🚰 Water Management**: Monitor discharge, water levels, and quality parameters
|
180
|
+
- **🎓 Educational Projects**: Learn data analysis with real environmental data
|
181
|
+
- **🔬 Environmental Studies**: Research water patterns and anomalies
|
182
|
+
- **⚡ Quick Prototyping**: Rapidly access water data for proof-of-concepts
|
183
|
+
|
184
|
+
## API Reference
|
185
|
+
|
186
|
+
### hydroanomaly.greet(name="World")
|
187
|
+
Returns a greeting message.
|
188
|
+
|
189
|
+
**Parameters:**
|
190
|
+
- `name` (str, optional): Name to greet. Defaults to "World".
|
191
|
+
|
192
|
+
**Returns:**
|
193
|
+
- str: Greeting message
|
194
|
+
|
195
|
+
### hydroanomaly.add(a, b)
|
196
|
+
Adds two numbers.
|
197
|
+
|
198
|
+
**Parameters:**
|
199
|
+
- `a` (int/float): First number
|
200
|
+
- `b` (int/float): Second number
|
201
|
+
|
202
|
+
**Returns:**
|
203
|
+
- int/float: Sum of a and b
|
204
|
+
|
205
|
+
### hydroanomaly.multiply(a, b)
|
206
|
+
Multiplies two numbers.
|
207
|
+
|
208
|
+
**Parameters:**
|
209
|
+
- `a` (int/float): First number
|
210
|
+
- `b` (int/float): Second number
|
211
|
+
|
212
|
+
**Returns:**
|
213
|
+
- int/float: Product of a and b
|
214
|
+
|
215
|
+
## Features
|
216
|
+
|
217
|
+
- Feature 1
|
218
|
+
- Feature 2
|
219
|
+
- Feature 3
|
220
|
+
|
221
|
+
## Contributing
|
222
|
+
|
223
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
224
|
+
|
225
|
+
## License
|
226
|
+
|
227
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
@@ -0,0 +1,9 @@
|
|
1
|
+
hydroanomaly/__init__.py,sha256=i3F0lmj7Jia0je1ArZhx4OsK-nnPCulQSz5WoCpZ3MI,413
|
2
|
+
hydroanomaly/hello.py,sha256=AhK7UKF_3TyZcWL4IDlZq_BXdKQzUP-is-jv59fgqk4,566
|
3
|
+
hydroanomaly/math_utils.py,sha256=CDOGWAiRlb2PK5SNFysumnzp7_LbZ9aleHLR_3lsGrs,856
|
4
|
+
hydroanomaly/usgs_data.py,sha256=zUvfu3go-7cQuFtD8Hbm7pABpw_RPWuJxE66NhxYmIU,11631
|
5
|
+
hydroanomaly-0.2.0.dist-info/licenses/LICENSE,sha256=OphKV48tcMv6ep-7j-8T6nycykPT0g8ZlMJ9zbGvdPs,1066
|
6
|
+
hydroanomaly-0.2.0.dist-info/METADATA,sha256=7VORMpDk286u6ElwB890-ISI9FlynP21CXk-dSR-8sM,6140
|
7
|
+
hydroanomaly-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
8
|
+
hydroanomaly-0.2.0.dist-info/top_level.txt,sha256=t-5Lc-eTLlkxIhR_N1Cpp6_YZafKS3xLLk9D2CtbE7o,13
|
9
|
+
hydroanomaly-0.2.0.dist-info/RECORD,,
|
@@ -1,60 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.4
|
2
|
-
Name: hydroanomaly
|
3
|
-
Version: 0.1.0
|
4
|
-
Summary: A Python package for hydro anomaly detection
|
5
|
-
Home-page: https://github.com/yourusername/hydroanomaly
|
6
|
-
Author: Your Name
|
7
|
-
Author-email: Your Name <your.email@example.com>
|
8
|
-
License: MIT
|
9
|
-
Project-URL: Homepage, https://github.com/yourusername/hydroanomaly
|
10
|
-
Project-URL: Bug Reports, https://github.com/yourusername/hydroanomaly/issues
|
11
|
-
Project-URL: Source, https://github.com/yourusername/hydroanomaly
|
12
|
-
Keywords: python,package,hydro,anomaly,detection
|
13
|
-
Classifier: Programming Language :: Python :: 3
|
14
|
-
Classifier: License :: OSI Approved :: MIT License
|
15
|
-
Classifier: Operating System :: OS Independent
|
16
|
-
Requires-Python: >=3.6
|
17
|
-
Description-Content-Type: text/markdown
|
18
|
-
License-File: LICENSE
|
19
|
-
Provides-Extra: dev
|
20
|
-
Requires-Dist: pytest>=6.0; extra == "dev"
|
21
|
-
Requires-Dist: black>=21.0; extra == "dev"
|
22
|
-
Requires-Dist: flake8>=3.8; extra == "dev"
|
23
|
-
Requires-Dist: mypy>=0.800; extra == "dev"
|
24
|
-
Dynamic: author
|
25
|
-
Dynamic: home-page
|
26
|
-
Dynamic: license-file
|
27
|
-
Dynamic: requires-python
|
28
|
-
|
29
|
-
# HydroAnomaly
|
30
|
-
|
31
|
-
A Python package for hydro anomaly detection.
|
32
|
-
|
33
|
-
## Installation
|
34
|
-
|
35
|
-
```bash
|
36
|
-
pip install hydroanomaly
|
37
|
-
```
|
38
|
-
|
39
|
-
## Usage
|
40
|
-
|
41
|
-
```python
|
42
|
-
from hydroanomaly import hello
|
43
|
-
|
44
|
-
# Example usage
|
45
|
-
hello.greet("World")
|
46
|
-
```
|
47
|
-
|
48
|
-
## Features
|
49
|
-
|
50
|
-
- Feature 1
|
51
|
-
- Feature 2
|
52
|
-
- Feature 3
|
53
|
-
|
54
|
-
## Contributing
|
55
|
-
|
56
|
-
Contributions are welcome! Please feel free to submit a Pull Request.
|
57
|
-
|
58
|
-
## License
|
59
|
-
|
60
|
-
This project is licensed under the MIT License - see the LICENSE file for details.
|
@@ -1,8 +0,0 @@
|
|
1
|
-
hydroanomaly/__init__.py,sha256=kUDFH0OgO9N8JgiU66z4ivD_xVNxMqkU3X522ebiXas,295
|
2
|
-
hydroanomaly/hello.py,sha256=AhK7UKF_3TyZcWL4IDlZq_BXdKQzUP-is-jv59fgqk4,566
|
3
|
-
hydroanomaly/math_utils.py,sha256=CDOGWAiRlb2PK5SNFysumnzp7_LbZ9aleHLR_3lsGrs,856
|
4
|
-
hydroanomaly-0.1.0.dist-info/licenses/LICENSE,sha256=OphKV48tcMv6ep-7j-8T6nycykPT0g8ZlMJ9zbGvdPs,1066
|
5
|
-
hydroanomaly-0.1.0.dist-info/METADATA,sha256=xQoNIBUgw2D3cUJpyCvMbkUrfJSAdxqW_3AFjF-8ots,1458
|
6
|
-
hydroanomaly-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
7
|
-
hydroanomaly-0.1.0.dist-info/top_level.txt,sha256=t-5Lc-eTLlkxIhR_N1Cpp6_YZafKS3xLLk9D2CtbE7o,13
|
8
|
-
hydroanomaly-0.1.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|