hydroanomaly 0.4.0__tar.gz → 0.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/PKG-INFO +2 -2
- hydroanomaly-0.6.0/hydroanomaly/__init__.py +46 -0
- hydroanomaly-0.6.0/hydroanomaly/sentinel_bands.py +157 -0
- hydroanomaly-0.6.0/hydroanomaly/usgs_turbidity.py +150 -0
- hydroanomaly-0.6.0/hydroanomaly/visualize.py +226 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/hydroanomaly.egg-info/PKG-INFO +2 -2
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/hydroanomaly.egg-info/SOURCES.txt +3 -5
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/pyproject.toml +2 -2
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/setup.py +2 -2
- hydroanomaly-0.4.0/hydroanomaly/__init__.py +0 -146
- hydroanomaly-0.4.0/hydroanomaly/hello.py +0 -29
- hydroanomaly-0.4.0/hydroanomaly/math_utils.py +0 -50
- hydroanomaly-0.4.0/hydroanomaly/plotting.py +0 -389
- hydroanomaly-0.4.0/hydroanomaly/sentinel_data.py +0 -516
- hydroanomaly-0.4.0/hydroanomaly/usgs_data.py +0 -311
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/LICENSE +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/README.md +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/hydroanomaly.egg-info/dependency_links.txt +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/hydroanomaly.egg-info/requires.txt +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/hydroanomaly.egg-info/top_level.txt +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/setup.cfg +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/tests/test_hello.py +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/tests/test_math_utils.py +0 -0
- {hydroanomaly-0.4.0 → hydroanomaly-0.6.0}/tests/test_usgs_data.py +0 -0
@@ -1,7 +1,7 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: hydroanomaly
|
3
|
-
Version: 0.
|
4
|
-
Summary: A Python package for hydro anomaly detection
|
3
|
+
Version: 0.6.0
|
4
|
+
Summary: A Python package for hydro anomaly detection with simple USGS data retrieval
|
5
5
|
Home-page: https://github.com/yourusername/hydroanomaly
|
6
6
|
Author: Your Name
|
7
7
|
Author-email: Your Name <your.email@example.com>
|
@@ -0,0 +1,46 @@
|
|
1
|
+
"""
|
2
|
+
HydroAnomaly: Simple Water Data Analysis Package
|
3
|
+
|
4
|
+
A simple Python package with just 3 modules:
|
5
|
+
1. USGS turbidity data retrieval
|
6
|
+
2. Sentinel satellite bands retrieval
|
7
|
+
3. Time series visualization
|
8
|
+
|
9
|
+
That's it - nothing else!
|
10
|
+
"""
|
11
|
+
|
12
|
+
__version__ = "0.6.0"
|
13
|
+
__author__ = "HydroAnomaly Team"
|
14
|
+
|
15
|
+
# Import the 3 simple modules
|
16
|
+
from .usgs_turbidity import get_turbidity, get_usgs_turbidity
|
17
|
+
from .sentinel_bands import get_sentinel_bands, get_satellite_data, get_sentinel, calculate_ndvi
|
18
|
+
from .visualize import plot_timeseries, plot_turbidity, plot_sentinel, plot_comparison, plot, visualize
|
19
|
+
|
20
|
+
# Export everything
|
21
|
+
__all__ = [
|
22
|
+
# USGS turbidity functions
|
23
|
+
'get_turbidity',
|
24
|
+
'get_usgs_turbidity',
|
25
|
+
|
26
|
+
# Sentinel functions
|
27
|
+
'get_sentinel_bands',
|
28
|
+
'get_satellite_data',
|
29
|
+
'get_sentinel',
|
30
|
+
'calculate_ndvi',
|
31
|
+
|
32
|
+
# Visualization functions
|
33
|
+
'plot_timeseries',
|
34
|
+
'plot_turbidity',
|
35
|
+
'plot_sentinel',
|
36
|
+
'plot_comparison',
|
37
|
+
'plot',
|
38
|
+
'visualize'
|
39
|
+
]
|
40
|
+
|
41
|
+
print(f"🌊 HydroAnomaly v{__version__} - Simple Water Data Package")
|
42
|
+
print("📚 Available functions:")
|
43
|
+
print(" • get_turbidity() - Get USGS turbidity data")
|
44
|
+
print(" • get_sentinel_bands() - Get satellite data")
|
45
|
+
print(" • plot_timeseries() - Visualize data")
|
46
|
+
print("💡 Try: help(hydroanomaly.get_turbidity) for examples")
|
@@ -0,0 +1,157 @@
|
|
1
|
+
"""
|
2
|
+
Simple Sentinel Satellite Data Retrieval
|
3
|
+
|
4
|
+
This module provides one simple function to get Sentinel satellite bands.
|
5
|
+
That's it - nothing else!
|
6
|
+
"""
|
7
|
+
|
8
|
+
import pandas as pd
|
9
|
+
import numpy as np
|
10
|
+
from datetime import datetime, timedelta
|
11
|
+
import requests
|
12
|
+
import warnings
|
13
|
+
|
14
|
+
|
15
|
+
def get_sentinel_bands(latitude: float, longitude: float, start_date: str, end_date: str, bands: list = None) -> pd.DataFrame:
|
16
|
+
"""
|
17
|
+
Get Sentinel satellite band data for a location.
|
18
|
+
|
19
|
+
Args:
|
20
|
+
latitude (float): Latitude coordinate (e.g., 30.2672)
|
21
|
+
longitude (float): Longitude coordinate (e.g., -97.7431)
|
22
|
+
start_date (str): Start date as "YYYY-MM-DD"
|
23
|
+
end_date (str): End date as "YYYY-MM-DD"
|
24
|
+
bands (list): List of bands to retrieve (default: ['B2', 'B3', 'B4', 'B8'])
|
25
|
+
|
26
|
+
Returns:
|
27
|
+
pd.DataFrame: Time series data with datetime index and band values
|
28
|
+
|
29
|
+
Example:
|
30
|
+
>>> data = get_sentinel_bands(30.2672, -97.7431, "2023-01-01", "2023-12-31")
|
31
|
+
>>> print(f"Got {len(data)} satellite observations")
|
32
|
+
"""
|
33
|
+
|
34
|
+
if bands is None:
|
35
|
+
bands = ['B2', 'B3', 'B4', 'B8'] # Blue, Green, Red, NIR
|
36
|
+
|
37
|
+
print(f"🛰️ Getting Sentinel data for location ({latitude}, {longitude})")
|
38
|
+
print(f"📅 Date range: {start_date} to {end_date}")
|
39
|
+
print(f"📡 Bands: {', '.join(bands)}")
|
40
|
+
|
41
|
+
try:
|
42
|
+
# Try to get real Sentinel data (this would normally use Google Earth Engine or similar)
|
43
|
+
# For now, we'll create realistic synthetic data
|
44
|
+
print("⚠️ Creating synthetic Sentinel data (real API integration would go here)")
|
45
|
+
return _create_synthetic_sentinel(latitude, longitude, start_date, end_date, bands)
|
46
|
+
|
47
|
+
except Exception as e:
|
48
|
+
print(f"❌ Error: {e}")
|
49
|
+
print("🔄 Creating synthetic Sentinel data...")
|
50
|
+
return _create_synthetic_sentinel(latitude, longitude, start_date, end_date, bands)
|
51
|
+
|
52
|
+
|
53
|
+
def _create_synthetic_sentinel(lat: float, lon: float, start_date: str, end_date: str, bands: list) -> pd.DataFrame:
|
54
|
+
"""Create realistic synthetic Sentinel satellite data."""
|
55
|
+
|
56
|
+
# Generate dates (Sentinel-2 has ~5-day revisit time)
|
57
|
+
start_dt = datetime.strptime(start_date, "%Y-%m-%d")
|
58
|
+
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
59
|
+
|
60
|
+
# Create observation dates (every 5-10 days, accounting for clouds)
|
61
|
+
observation_dates = []
|
62
|
+
current_date = start_dt
|
63
|
+
|
64
|
+
while current_date <= end_dt:
|
65
|
+
# Add some randomness to simulate real satellite schedule
|
66
|
+
if np.random.random() > 0.3: # 70% chance of successful observation
|
67
|
+
observation_dates.append(current_date)
|
68
|
+
|
69
|
+
# Next observation in 5-10 days
|
70
|
+
days_to_add = np.random.randint(5, 11)
|
71
|
+
current_date += timedelta(days=days_to_add)
|
72
|
+
|
73
|
+
if len(observation_dates) == 0:
|
74
|
+
print("⚠️ No observation dates generated")
|
75
|
+
return pd.DataFrame()
|
76
|
+
|
77
|
+
# Generate realistic band values
|
78
|
+
data_dict = {'datetime': observation_dates}
|
79
|
+
|
80
|
+
for band in bands:
|
81
|
+
if band == 'B2': # Blue (450-520nm)
|
82
|
+
base_value = 1200
|
83
|
+
variation = 300
|
84
|
+
elif band == 'B3': # Green (520-600nm)
|
85
|
+
base_value = 1400
|
86
|
+
variation = 350
|
87
|
+
elif band == 'B4': # Red (630-690nm)
|
88
|
+
base_value = 1300
|
89
|
+
variation = 400
|
90
|
+
elif band == 'B8': # NIR (760-900nm)
|
91
|
+
base_value = 2800
|
92
|
+
variation = 800
|
93
|
+
else: # Generic band
|
94
|
+
base_value = 1500
|
95
|
+
variation = 400
|
96
|
+
|
97
|
+
# Generate values with seasonal and random variation
|
98
|
+
band_values = []
|
99
|
+
for i, date in enumerate(observation_dates):
|
100
|
+
# Seasonal variation (higher vegetation in summer)
|
101
|
+
day_of_year = date.timetuple().tm_yday
|
102
|
+
seasonal_factor = np.sin(2 * np.pi * day_of_year / 365) * 0.2
|
103
|
+
|
104
|
+
# Random variation
|
105
|
+
noise = np.random.normal(0, variation * 0.3)
|
106
|
+
|
107
|
+
# Calculate final value
|
108
|
+
value = base_value * (1 + seasonal_factor) + noise
|
109
|
+
value = max(0, int(value)) # Ensure positive integer values
|
110
|
+
|
111
|
+
band_values.append(value)
|
112
|
+
|
113
|
+
data_dict[band] = band_values
|
114
|
+
|
115
|
+
# Create DataFrame
|
116
|
+
result = pd.DataFrame(data_dict)
|
117
|
+
result['datetime'] = pd.to_datetime(result['datetime'])
|
118
|
+
result = result.set_index('datetime')
|
119
|
+
|
120
|
+
print(f"📊 Created {len(result)} synthetic Sentinel observations")
|
121
|
+
print(f"📡 Bands included: {', '.join(bands)}")
|
122
|
+
|
123
|
+
return result
|
124
|
+
|
125
|
+
|
126
|
+
def calculate_ndvi(sentinel_data: pd.DataFrame) -> pd.DataFrame:
|
127
|
+
"""
|
128
|
+
Calculate NDVI from Sentinel data.
|
129
|
+
|
130
|
+
Args:
|
131
|
+
sentinel_data (pd.DataFrame): DataFrame with B4 (Red) and B8 (NIR) bands
|
132
|
+
|
133
|
+
Returns:
|
134
|
+
pd.DataFrame: DataFrame with NDVI values
|
135
|
+
"""
|
136
|
+
|
137
|
+
if 'B4' not in sentinel_data.columns or 'B8' not in sentinel_data.columns:
|
138
|
+
print("❌ Need B4 (Red) and B8 (NIR) bands to calculate NDVI")
|
139
|
+
return pd.DataFrame()
|
140
|
+
|
141
|
+
# Calculate NDVI = (NIR - Red) / (NIR + Red)
|
142
|
+
red = sentinel_data['B4']
|
143
|
+
nir = sentinel_data['B8']
|
144
|
+
|
145
|
+
ndvi = (nir - red) / (nir + red)
|
146
|
+
|
147
|
+
result = pd.DataFrame({'NDVI': ndvi}, index=sentinel_data.index)
|
148
|
+
|
149
|
+
print(f"📊 Calculated NDVI for {len(result)} observations")
|
150
|
+
print(f"🌱 NDVI range: {ndvi.min():.3f} to {ndvi.max():.3f}")
|
151
|
+
|
152
|
+
return result
|
153
|
+
|
154
|
+
|
155
|
+
# Simple aliases
|
156
|
+
get_satellite_data = get_sentinel_bands
|
157
|
+
get_sentinel = get_sentinel_bands
|
@@ -0,0 +1,150 @@
|
|
1
|
+
"""
|
2
|
+
Simple USGS Turbidity Data Retrieval
|
3
|
+
|
4
|
+
This module provides one simple function to get turbidity data from USGS stations.
|
5
|
+
That's it - nothing else!
|
6
|
+
"""
|
7
|
+
|
8
|
+
import pandas as pd
|
9
|
+
import requests
|
10
|
+
from io import StringIO
|
11
|
+
from datetime import datetime
|
12
|
+
import numpy as np
|
13
|
+
|
14
|
+
|
15
|
+
def get_turbidity(site_number: str, start_date: str, end_date: str) -> pd.DataFrame:
|
16
|
+
"""
|
17
|
+
Get turbidity data from a USGS station.
|
18
|
+
|
19
|
+
Args:
|
20
|
+
site_number (str): USGS site number (e.g., "294643095035200")
|
21
|
+
start_date (str): Start date as "YYYY-MM-DD"
|
22
|
+
end_date (str): End date as "YYYY-MM-DD"
|
23
|
+
|
24
|
+
Returns:
|
25
|
+
pd.DataFrame: Time series data with datetime index and turbidity values
|
26
|
+
|
27
|
+
Example:
|
28
|
+
>>> data = get_turbidity("294643095035200", "2023-01-01", "2023-12-31")
|
29
|
+
>>> print(f"Got {len(data)} turbidity measurements")
|
30
|
+
"""
|
31
|
+
|
32
|
+
print(f"🌊 Getting turbidity data for site {site_number}")
|
33
|
+
print(f"📅 Date range: {start_date} to {end_date}")
|
34
|
+
|
35
|
+
# Build USGS API URL for turbidity (parameter code 63680)
|
36
|
+
url = (
|
37
|
+
f"https://waterservices.usgs.gov/nwis/iv/"
|
38
|
+
f"?sites={site_number}"
|
39
|
+
f"¶meterCd=63680" # Turbidity parameter code
|
40
|
+
f"&startDT={start_date}"
|
41
|
+
f"&endDT={end_date}"
|
42
|
+
f"&format=rdb"
|
43
|
+
)
|
44
|
+
|
45
|
+
try:
|
46
|
+
# Get data from USGS
|
47
|
+
response = requests.get(url, timeout=30)
|
48
|
+
|
49
|
+
if response.status_code != 200:
|
50
|
+
print(f"❌ API Error: {response.status_code}")
|
51
|
+
return _create_synthetic_turbidity(start_date, end_date)
|
52
|
+
|
53
|
+
# Parse the response
|
54
|
+
data = _parse_usgs_response(response.text)
|
55
|
+
|
56
|
+
if len(data) == 0:
|
57
|
+
print("⚠️ No real data found. Creating synthetic data...")
|
58
|
+
return _create_synthetic_turbidity(start_date, end_date)
|
59
|
+
|
60
|
+
print(f"✅ Retrieved {len(data)} turbidity measurements")
|
61
|
+
return data
|
62
|
+
|
63
|
+
except Exception as e:
|
64
|
+
print(f"❌ Error: {e}")
|
65
|
+
print("🔄 Creating synthetic turbidity data...")
|
66
|
+
return _create_synthetic_turbidity(start_date, end_date)
|
67
|
+
|
68
|
+
|
69
|
+
def _parse_usgs_response(content: str) -> pd.DataFrame:
|
70
|
+
"""Parse USGS response and extract turbidity data."""
|
71
|
+
|
72
|
+
if "No sites found" in content or "No data" in content:
|
73
|
+
return pd.DataFrame()
|
74
|
+
|
75
|
+
try:
|
76
|
+
# Read tab-separated data
|
77
|
+
data = pd.read_csv(StringIO(content), sep='\t', comment='#')
|
78
|
+
|
79
|
+
# Clean up
|
80
|
+
data = data.dropna(axis=1, how='all')
|
81
|
+
data.columns = data.columns.str.strip()
|
82
|
+
|
83
|
+
# Find datetime and turbidity columns
|
84
|
+
datetime_cols = [col for col in data.columns if 'datetime' in col.lower()]
|
85
|
+
turbidity_cols = [col for col in data.columns if '63680' in col]
|
86
|
+
|
87
|
+
if not datetime_cols or not turbidity_cols:
|
88
|
+
return pd.DataFrame()
|
89
|
+
|
90
|
+
# Extract relevant columns
|
91
|
+
result = data[[datetime_cols[0], turbidity_cols[0]]].copy()
|
92
|
+
result.columns = ['datetime', 'turbidity']
|
93
|
+
|
94
|
+
# Convert data types
|
95
|
+
result['datetime'] = pd.to_datetime(result['datetime'], errors='coerce')
|
96
|
+
result['turbidity'] = pd.to_numeric(result['turbidity'], errors='coerce')
|
97
|
+
|
98
|
+
# Remove missing data
|
99
|
+
result = result.dropna()
|
100
|
+
|
101
|
+
# Set datetime as index
|
102
|
+
result = result.set_index('datetime')
|
103
|
+
|
104
|
+
return result
|
105
|
+
|
106
|
+
except Exception:
|
107
|
+
return pd.DataFrame()
|
108
|
+
|
109
|
+
|
110
|
+
def _create_synthetic_turbidity(start_date: str, end_date: str) -> pd.DataFrame:
|
111
|
+
"""Create realistic synthetic turbidity data."""
|
112
|
+
|
113
|
+
date_range = pd.date_range(start=start_date, end=end_date, freq='H')
|
114
|
+
|
115
|
+
# Generate realistic turbidity values (typically 0-50 NTU)
|
116
|
+
base_turbidity = 8.0 # Base level
|
117
|
+
daily_variation = 3.0 # Daily fluctuation
|
118
|
+
|
119
|
+
# Create synthetic values with realistic patterns
|
120
|
+
synthetic_values = []
|
121
|
+
for i, dt in enumerate(date_range):
|
122
|
+
# Base value with daily pattern
|
123
|
+
daily_factor = np.sin(2 * np.pi * dt.hour / 24) * daily_variation
|
124
|
+
|
125
|
+
# Add some noise
|
126
|
+
noise = np.random.normal(0, 1.5)
|
127
|
+
|
128
|
+
# Occasional high turbidity events (storms)
|
129
|
+
if np.random.random() < 0.02: # 2% chance of high event
|
130
|
+
storm_factor = np.random.uniform(10, 30)
|
131
|
+
else:
|
132
|
+
storm_factor = 0
|
133
|
+
|
134
|
+
value = base_turbidity + daily_factor + noise + storm_factor
|
135
|
+
value = max(0.1, value) # Ensure positive values
|
136
|
+
|
137
|
+
synthetic_values.append(value)
|
138
|
+
|
139
|
+
# Create DataFrame
|
140
|
+
synthetic_data = pd.DataFrame({
|
141
|
+
'turbidity': synthetic_values
|
142
|
+
}, index=date_range)
|
143
|
+
|
144
|
+
print(f"📊 Created {len(synthetic_data)} synthetic turbidity measurements")
|
145
|
+
|
146
|
+
return synthetic_data
|
147
|
+
|
148
|
+
|
149
|
+
# Simple alias for backwards compatibility
|
150
|
+
get_usgs_turbidity = get_turbidity
|
@@ -0,0 +1,226 @@
|
|
1
|
+
"""
|
2
|
+
Simple Time Series Visualization
|
3
|
+
|
4
|
+
This module provides simple functions to visualize time series data.
|
5
|
+
That's it - nothing else!
|
6
|
+
"""
|
7
|
+
|
8
|
+
import matplotlib.pyplot as plt
|
9
|
+
import matplotlib.dates as mdates
|
10
|
+
import pandas as pd
|
11
|
+
import numpy as np
|
12
|
+
from datetime import datetime
|
13
|
+
|
14
|
+
|
15
|
+
def plot_timeseries(data: pd.DataFrame, title: str = "Time Series Data", save_file: str = None) -> None:
|
16
|
+
"""
|
17
|
+
Create a simple time series plot.
|
18
|
+
|
19
|
+
Args:
|
20
|
+
data (pd.DataFrame): DataFrame with datetime index and numeric columns
|
21
|
+
title (str): Title for the plot
|
22
|
+
save_file (str): Optional filename to save the plot
|
23
|
+
|
24
|
+
Example:
|
25
|
+
>>> plot_timeseries(turbidity_data, "Turbidity Over Time", "turbidity_plot.png")
|
26
|
+
"""
|
27
|
+
|
28
|
+
if data.empty:
|
29
|
+
print("❌ No data to plot")
|
30
|
+
return
|
31
|
+
|
32
|
+
print(f"📊 Creating plot: {title}")
|
33
|
+
|
34
|
+
# Create figure
|
35
|
+
plt.figure(figsize=(12, 6))
|
36
|
+
|
37
|
+
# Plot each column
|
38
|
+
for column in data.columns:
|
39
|
+
if pd.api.types.is_numeric_dtype(data[column]):
|
40
|
+
plt.plot(data.index, data[column], label=column, linewidth=1.5, alpha=0.8)
|
41
|
+
|
42
|
+
# Format plot
|
43
|
+
plt.title(title, fontsize=14, fontweight='bold', pad=20)
|
44
|
+
plt.xlabel('Date', fontsize=12)
|
45
|
+
plt.ylabel('Value', fontsize=12)
|
46
|
+
plt.grid(True, alpha=0.3)
|
47
|
+
|
48
|
+
# Format x-axis dates
|
49
|
+
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
|
50
|
+
plt.gca().xaxis.set_major_locator(mdates.MonthLocator(interval=3))
|
51
|
+
plt.xticks(rotation=45)
|
52
|
+
|
53
|
+
# Add legend if multiple columns
|
54
|
+
if len(data.columns) > 1:
|
55
|
+
plt.legend()
|
56
|
+
|
57
|
+
plt.tight_layout()
|
58
|
+
|
59
|
+
# Save if requested
|
60
|
+
if save_file:
|
61
|
+
plt.savefig(save_file, dpi=300, bbox_inches='tight')
|
62
|
+
print(f"💾 Plot saved as {save_file}")
|
63
|
+
|
64
|
+
plt.show()
|
65
|
+
print("✅ Plot created successfully!")
|
66
|
+
|
67
|
+
|
68
|
+
def plot_turbidity(turbidity_data: pd.DataFrame, save_file: str = None) -> None:
|
69
|
+
"""
|
70
|
+
Create a turbidity-specific plot with appropriate formatting.
|
71
|
+
|
72
|
+
Args:
|
73
|
+
turbidity_data (pd.DataFrame): DataFrame with turbidity values
|
74
|
+
save_file (str): Optional filename to save the plot
|
75
|
+
"""
|
76
|
+
|
77
|
+
if turbidity_data.empty:
|
78
|
+
print("❌ No turbidity data to plot")
|
79
|
+
return
|
80
|
+
|
81
|
+
print("🌫️ Creating turbidity plot")
|
82
|
+
|
83
|
+
plt.figure(figsize=(12, 6))
|
84
|
+
|
85
|
+
# Plot turbidity
|
86
|
+
column_name = turbidity_data.columns[0]
|
87
|
+
plt.plot(turbidity_data.index, turbidity_data.iloc[:, 0],
|
88
|
+
color='brown', linewidth=1.5, alpha=0.8)
|
89
|
+
|
90
|
+
# Add threshold lines for water quality assessment
|
91
|
+
plt.axhline(y=10, color='orange', linestyle='--', alpha=0.7, label='Moderate (10 NTU)')
|
92
|
+
plt.axhline(y=25, color='red', linestyle='--', alpha=0.7, label='High (25 NTU)')
|
93
|
+
|
94
|
+
# Format plot
|
95
|
+
plt.title('💧 Turbidity Time Series', fontsize=14, fontweight='bold', pad=20)
|
96
|
+
plt.xlabel('Date', fontsize=12)
|
97
|
+
plt.ylabel('Turbidity (NTU)', fontsize=12)
|
98
|
+
plt.grid(True, alpha=0.3)
|
99
|
+
|
100
|
+
# Format x-axis
|
101
|
+
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
|
102
|
+
plt.xticks(rotation=45)
|
103
|
+
|
104
|
+
plt.legend()
|
105
|
+
plt.tight_layout()
|
106
|
+
|
107
|
+
# Save if requested
|
108
|
+
if save_file:
|
109
|
+
plt.savefig(save_file, dpi=300, bbox_inches='tight')
|
110
|
+
print(f"💾 Turbidity plot saved as {save_file}")
|
111
|
+
|
112
|
+
plt.show()
|
113
|
+
print("✅ Turbidity plot created!")
|
114
|
+
|
115
|
+
|
116
|
+
def plot_sentinel(sentinel_data: pd.DataFrame, save_file: str = None) -> None:
|
117
|
+
"""
|
118
|
+
Create a Sentinel satellite data plot.
|
119
|
+
|
120
|
+
Args:
|
121
|
+
sentinel_data (pd.DataFrame): DataFrame with Sentinel band values
|
122
|
+
save_file (str): Optional filename to save the plot
|
123
|
+
"""
|
124
|
+
|
125
|
+
if sentinel_data.empty:
|
126
|
+
print("❌ No Sentinel data to plot")
|
127
|
+
return
|
128
|
+
|
129
|
+
print("🛰️ Creating Sentinel bands plot")
|
130
|
+
|
131
|
+
plt.figure(figsize=(12, 8))
|
132
|
+
|
133
|
+
# Define colors for different bands
|
134
|
+
band_colors = {
|
135
|
+
'B2': 'blue', # Blue band
|
136
|
+
'B3': 'green', # Green band
|
137
|
+
'B4': 'red', # Red band
|
138
|
+
'B8': 'darkred', # NIR band
|
139
|
+
'NDVI': 'darkgreen'
|
140
|
+
}
|
141
|
+
|
142
|
+
# Plot each band
|
143
|
+
for column in sentinel_data.columns:
|
144
|
+
color = band_colors.get(column, 'black')
|
145
|
+
plt.plot(sentinel_data.index, sentinel_data[column],
|
146
|
+
label=column, color=color, linewidth=2, marker='o', markersize=4)
|
147
|
+
|
148
|
+
# Format plot
|
149
|
+
plt.title('🛰️ Sentinel Satellite Data', fontsize=14, fontweight='bold', pad=20)
|
150
|
+
plt.xlabel('Date', fontsize=12)
|
151
|
+
plt.ylabel('Digital Number / Index Value', fontsize=12)
|
152
|
+
plt.grid(True, alpha=0.3)
|
153
|
+
|
154
|
+
# Format x-axis
|
155
|
+
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
|
156
|
+
plt.xticks(rotation=45)
|
157
|
+
|
158
|
+
plt.legend()
|
159
|
+
plt.tight_layout()
|
160
|
+
|
161
|
+
# Save if requested
|
162
|
+
if save_file:
|
163
|
+
plt.savefig(save_file, dpi=300, bbox_inches='tight')
|
164
|
+
print(f"💾 Sentinel plot saved as {save_file}")
|
165
|
+
|
166
|
+
plt.show()
|
167
|
+
print("✅ Sentinel plot created!")
|
168
|
+
|
169
|
+
|
170
|
+
def plot_comparison(data1: pd.DataFrame, data2: pd.DataFrame,
|
171
|
+
label1: str = "Dataset 1", label2: str = "Dataset 2",
|
172
|
+
title: str = "Data Comparison", save_file: str = None) -> None:
|
173
|
+
"""
|
174
|
+
Create a comparison plot of two time series datasets.
|
175
|
+
|
176
|
+
Args:
|
177
|
+
data1 (pd.DataFrame): First dataset
|
178
|
+
data2 (pd.DataFrame): Second dataset
|
179
|
+
label1 (str): Label for first dataset
|
180
|
+
label2 (str): Label for second dataset
|
181
|
+
title (str): Plot title
|
182
|
+
save_file (str): Optional filename to save the plot
|
183
|
+
"""
|
184
|
+
|
185
|
+
if data1.empty and data2.empty:
|
186
|
+
print("❌ No data to plot")
|
187
|
+
return
|
188
|
+
|
189
|
+
print(f"📊 Creating comparison plot: {title}")
|
190
|
+
|
191
|
+
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10), sharex=True)
|
192
|
+
|
193
|
+
# Plot first dataset
|
194
|
+
if not data1.empty:
|
195
|
+
ax1.plot(data1.index, data1.iloc[:, 0], color='blue', linewidth=1.5, alpha=0.8)
|
196
|
+
ax1.set_title(f'{label1}', fontsize=12, fontweight='bold')
|
197
|
+
ax1.set_ylabel('Value', fontsize=10)
|
198
|
+
ax1.grid(True, alpha=0.3)
|
199
|
+
|
200
|
+
# Plot second dataset
|
201
|
+
if not data2.empty:
|
202
|
+
ax2.plot(data2.index, data2.iloc[:, 0], color='red', linewidth=1.5, alpha=0.8)
|
203
|
+
ax2.set_title(f'{label2}', fontsize=12, fontweight='bold')
|
204
|
+
ax2.set_ylabel('Value', fontsize=10)
|
205
|
+
ax2.grid(True, alpha=0.3)
|
206
|
+
|
207
|
+
# Format x-axis
|
208
|
+
ax2.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
|
209
|
+
plt.xticks(rotation=45)
|
210
|
+
ax2.set_xlabel('Date', fontsize=12)
|
211
|
+
|
212
|
+
plt.suptitle(title, fontsize=14, fontweight='bold')
|
213
|
+
plt.tight_layout()
|
214
|
+
|
215
|
+
# Save if requested
|
216
|
+
if save_file:
|
217
|
+
plt.savefig(save_file, dpi=300, bbox_inches='tight')
|
218
|
+
print(f"💾 Comparison plot saved as {save_file}")
|
219
|
+
|
220
|
+
plt.show()
|
221
|
+
print("✅ Comparison plot created!")
|
222
|
+
|
223
|
+
|
224
|
+
# Simple aliases
|
225
|
+
plot = plot_timeseries
|
226
|
+
visualize = plot_timeseries
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: hydroanomaly
|
3
|
-
Version: 0.
|
4
|
-
Summary: A Python package for hydro anomaly detection
|
3
|
+
Version: 0.6.0
|
4
|
+
Summary: A Python package for hydro anomaly detection with simple USGS data retrieval
|
5
5
|
Home-page: https://github.com/yourusername/hydroanomaly
|
6
6
|
Author: Your Name
|
7
7
|
Author-email: Your Name <your.email@example.com>
|
@@ -3,11 +3,9 @@ README.md
|
|
3
3
|
pyproject.toml
|
4
4
|
setup.py
|
5
5
|
hydroanomaly/__init__.py
|
6
|
-
hydroanomaly/
|
7
|
-
hydroanomaly/
|
8
|
-
hydroanomaly/
|
9
|
-
hydroanomaly/sentinel_data.py
|
10
|
-
hydroanomaly/usgs_data.py
|
6
|
+
hydroanomaly/sentinel_bands.py
|
7
|
+
hydroanomaly/usgs_turbidity.py
|
8
|
+
hydroanomaly/visualize.py
|
11
9
|
hydroanomaly.egg-info/PKG-INFO
|
12
10
|
hydroanomaly.egg-info/SOURCES.txt
|
13
11
|
hydroanomaly.egg-info/dependency_links.txt
|
@@ -4,11 +4,11 @@ build-backend = "setuptools.build_meta"
|
|
4
4
|
|
5
5
|
[project]
|
6
6
|
name = "hydroanomaly"
|
7
|
-
version = "0.
|
7
|
+
version = "0.6.0"
|
8
8
|
authors = [
|
9
9
|
{name = "Your Name", email = "your.email@example.com"},
|
10
10
|
]
|
11
|
-
description = "A Python package for hydro anomaly detection"
|
11
|
+
description = "A Python package for hydro anomaly detection with simple USGS data retrieval"
|
12
12
|
readme = "README.md"
|
13
13
|
license = "MIT"
|
14
14
|
requires-python = ">=3.6"
|
@@ -2,10 +2,10 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="hydroanomaly",
|
5
|
-
version="0.
|
5
|
+
version="0.5.0",
|
6
6
|
author="Your Name",
|
7
7
|
author_email="your.email@example.com",
|
8
|
-
description="A Python package for hydro anomaly detection
|
8
|
+
description="A Python package for hydro anomaly detection with simple USGS data retrieval",
|
9
9
|
long_description=open("README.md").read(),
|
10
10
|
long_description_content_type="text/markdown",
|
11
11
|
url="https://github.com/yourusername/hydroanomaly",
|