hydroanomaly 0.5.0__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/PKG-INFO +24 -8
  2. hydroanomaly-0.7.0/hydroanomaly/__init__.py +46 -0
  3. hydroanomaly-0.7.0/hydroanomaly/sentinel_bands.py +104 -0
  4. hydroanomaly-0.7.0/hydroanomaly/usgs_turbidity.py +177 -0
  5. hydroanomaly-0.7.0/hydroanomaly/visualize.py +226 -0
  6. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/hydroanomaly.egg-info/PKG-INFO +24 -8
  7. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/hydroanomaly.egg-info/SOURCES.txt +3 -6
  8. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/pyproject.toml +5 -3
  9. hydroanomaly-0.5.0/hydroanomaly/__init__.py +0 -146
  10. hydroanomaly-0.5.0/hydroanomaly/hello.py +0 -29
  11. hydroanomaly-0.5.0/hydroanomaly/math_utils.py +0 -50
  12. hydroanomaly-0.5.0/hydroanomaly/plotting.py +0 -389
  13. hydroanomaly-0.5.0/hydroanomaly/sentinel_data.py +0 -516
  14. hydroanomaly-0.5.0/hydroanomaly/usgs_data.py +0 -389
  15. hydroanomaly-0.5.0/setup.py +0 -31
  16. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/LICENSE +0 -0
  17. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/README.md +0 -0
  18. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/hydroanomaly.egg-info/dependency_links.txt +0 -0
  19. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/hydroanomaly.egg-info/requires.txt +0 -0
  20. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/hydroanomaly.egg-info/top_level.txt +0 -0
  21. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/setup.cfg +0 -0
  22. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/tests/test_hello.py +0 -0
  23. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/tests/test_math_utils.py +0 -0
  24. {hydroanomaly-0.5.0 → hydroanomaly-0.7.0}/tests/test_usgs_data.py +0 -0
@@ -1,11 +1,30 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hydroanomaly
3
- Version: 0.5.0
3
+ Version: 0.7.0
4
4
  Summary: A Python package for hydro anomaly detection with simple USGS data retrieval
5
- Home-page: https://github.com/yourusername/hydroanomaly
6
- Author: Your Name
7
- Author-email: Your Name <your.email@example.com>
8
- License-Expression: MIT
5
+ Author-email: Ehsan Kahrizi <ehsan.kahrizi@usu.edu>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2025 Your Name
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
9
28
  Project-URL: Homepage, https://github.com/yourusername/hydroanomaly
10
29
  Project-URL: Bug Reports, https://github.com/yourusername/hydroanomaly/issues
11
30
  Project-URL: Source, https://github.com/yourusername/hydroanomaly
@@ -30,10 +49,7 @@ Requires-Dist: pytest>=6.0; extra == "dev"
30
49
  Requires-Dist: black>=21.0; extra == "dev"
31
50
  Requires-Dist: flake8>=3.8; extra == "dev"
32
51
  Requires-Dist: mypy>=0.800; extra == "dev"
33
- Dynamic: author
34
- Dynamic: home-page
35
52
  Dynamic: license-file
36
- Dynamic: requires-python
37
53
 
38
54
  # HydroAnomaly
39
55
 
@@ -0,0 +1,46 @@
1
+ """
2
+ HydroAnomaly: Simple Water Data Analysis Package
3
+
4
+ A simple Python package with just 3 modules:
5
+ 1. USGS turbidity data retrieval (returns data and site coordinates)
6
+ 2. Sentinel satellite bands retrieval
7
+ 3. Time series visualization
8
+
9
+ That's it - nothing else!
10
+ """
11
+
12
+ __version__ = "0.7.0"
13
+ __author__ = "Ehsan Kahrizi (Ehsan.kahrizi@usu.edu)"
14
+
15
+ # Import the 3 simple modules
16
+ from .usgs_turbidity import get_turbidity, get_usgs_turbidity
17
+ from .sentinel_bands import get_sentinel_bands, get_satellite_data, get_sentinel, calculate_ndvi
18
+ from .visualize import plot_timeseries, plot_turbidity, plot_sentinel, plot_comparison, plot, visualize
19
+
20
+ # Export everything
21
+ __all__ = [
22
+ # USGS turbidity functions
23
+ 'get_turbidity',
24
+ 'get_usgs_turbidity',
25
+
26
+ # Sentinel functions
27
+ 'get_sentinel_bands',
28
+ 'get_satellite_data',
29
+ 'get_sentinel',
30
+ 'calculate_ndvi',
31
+
32
+ # Visualization functions
33
+ 'plot_timeseries',
34
+ 'plot_turbidity',
35
+ 'plot_sentinel',
36
+ 'plot_comparison',
37
+ 'plot',
38
+ 'visualize'
39
+ ]
40
+
41
+ print(f"HydroAnomaly v{__version__} - Simple Water Data Package")
42
+ print("Available functions:")
43
+ print(" • get_turbidity() - Get USGS turbidity data and site coordinates")
44
+ print(" • get_sentinel_bands() - Get satellite data")
45
+ print(" • plot_timeseries() - Visualize data")
46
+ print("Try: help(hydroanomaly.get_turbidity) for examples")
@@ -0,0 +1,104 @@
1
+ """
2
+ Sentinel-2 Satellite Data Retrieval using Google Earth Engine (GEE)
3
+
4
+ This module provides a function to retrieve Sentinel-2 satellite band data
5
+ for a specified location and time period, with masking and cloud filtering.
6
+ """
7
+
8
+ import pandas as pd
9
+ import numpy as np
10
+ from datetime import datetime, timedelta
11
+ import requests
12
+ import warnings
13
+
14
+ def get_sentinel_bands_gee(
15
+ latitude: float,
16
+ longitude: float,
17
+ start_date: str,
18
+ end_date: str,
19
+ bands: list = None,
20
+ buffer_meters: int = 20,
21
+ cloudy_pixel_percentage: int = 20,
22
+ masks_to_apply: list = None
23
+ ) -> pd.DataFrame:
24
+ """
25
+ Retrieve Sentinel-2 bands from Google Earth Engine, applying custom masking.
26
+
27
+ Args:
28
+ latitude (float): Latitude of center point.
29
+ longitude (float): Longitude of center point.
30
+ start_date (str): Start date as "YYYY-MM-DD".
31
+ end_date (str): End date as "YYYY-MM-DD".
32
+ bands (list): List of bands to retrieve (default is common Sentinel-2 bands).
33
+ buffer_meters (int): Buffer size around the point, in meters.
34
+ cloudy_pixel_percentage (int): Maximum allowed cloud percentage for each image.
35
+ masks_to_apply (list): Masking strategies (e.g., ["water", "no_cloud_shadow", ...]).
36
+
37
+ Returns:
38
+ pd.DataFrame: DataFrame with band reflectance values per date.
39
+
40
+ Example:
41
+ >>> import ee
42
+ >>> ee.Authenticate()
43
+ >>> ee.Initialize()
44
+ >>> df = get_sentinel_bands_gee(29.77, -95.06, "2021-01-01", "2021-12-31")
45
+ >>> print(df.head())
46
+ """
47
+ if bands is None:
48
+ bands = ['B2','B3','B4','B8','SCL']
49
+ if masks_to_apply is None:
50
+ masks_to_apply = ["water", "no_cloud_shadow", "no_clouds", "no_snow_ice", "no_saturated"]
51
+
52
+ point = ee.Geometry.Point([longitude, latitude])
53
+ buffered_point = point.buffer(buffer_meters)
54
+
55
+ s2 = (ee.ImageCollection('COPERNICUS/S2_SR_HARMONIZED')
56
+ .filterBounds(buffered_point)
57
+ .filterDate(start_date, end_date)
58
+ .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', cloudy_pixel_percentage))
59
+ .select(bands))
60
+
61
+ def dynamic_scl_mask(image):
62
+ scl = image.select('SCL')
63
+ mask = ee.Image.constant(1)
64
+ if "water" in masks_to_apply:
65
+ mask = mask.And(scl.eq(6))
66
+ if "no_cloud_shadow" in masks_to_apply:
67
+ mask = mask.And(scl.neq(3))
68
+ if "no_clouds" in masks_to_apply:
69
+ cloud_mask = scl.neq(8).And(scl.neq(9)).And(scl.neq(10))
70
+ mask = mask.And(cloud_mask)
71
+ if "no_snow_ice" in masks_to_apply:
72
+ mask = mask.And(scl.neq(11))
73
+ if "no_saturated" in masks_to_apply:
74
+ mask = mask.And(scl.neq(1))
75
+ return image.updateMask(mask)
76
+
77
+ s2_masked = s2.map(dynamic_scl_mask)
78
+
79
+ def extract_features(image):
80
+ date = image.date().format('YYYY-MM-dd HH:mm:ss')
81
+ values = image.reduceRegion(
82
+ reducer=ee.Reducer.mean(),
83
+ geometry=buffered_point,
84
+ scale=20,
85
+ maxPixels=1e8
86
+ )
87
+ return ee.Feature(None, values.set('date', date))
88
+
89
+ features = s2_masked.map(extract_features)
90
+ fc = ee.FeatureCollection(features).filter(ee.Filter.notNull(['B2']))
91
+
92
+ data = fc.getInfo()
93
+ rows = [f['properties'] for f in data['features']]
94
+ df = pd.DataFrame(rows)
95
+ if not df.empty:
96
+ df['date'] = pd.to_datetime(df['date'])
97
+ df = df.sort_values('date')
98
+ df = df.set_index('date')
99
+ return df
100
+
101
+ # Aliases for user convenience
102
+ get_sentinel_bands = get_sentinel_bands_gee
103
+ get_satellite_data = get_sentinel_bands_gee
104
+ get_sentinel = get_sentinel_bands_gee
@@ -0,0 +1,177 @@
1
+ """
2
+ Simple USGS Turbidity Data Retrieval
3
+
4
+ This module provides one simple function to get turbidity data from USGS stations.
5
+ That's it - nothing else!
6
+ """
7
+
8
+ import pandas as pd
9
+ import requests
10
+ from io import StringIO
11
+ from datetime import datetime
12
+ import numpy as np
13
+
14
+ # Function for retrive data ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
15
+ def get_turbidity(site_number: str, start_date: str, end_date: str) -> pd.DataFrame:
16
+ """
17
+ Get turbidity data from a USGS station.
18
+
19
+ Args:
20
+ site_number (str): USGS site number (e.g., "294643095035200")
21
+ start_date (str): Start date as "YYYY-MM-DD"
22
+ end_date (str): End date as "YYYY-MM-DD"
23
+
24
+ Returns:
25
+ tuple: (pd.DataFrame, (latitude, longitude)) or (empty DataFrame, (None, None)) if not found.
26
+ * Note: pd.DataFrame: Time series data with datetime index and turbidity values
27
+
28
+ Example:
29
+ >>> data = get_turbidity("294643095035200", "2023-01-01", "2023-12-31")
30
+ >>> print(f"Got {len(data)} turbidity measurements")
31
+ """
32
+
33
+ # --- Validate inputs ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
34
+ print(f"Getting turbidity data for site {site_number}")
35
+ print(f"Date range: {start_date} to {end_date}")
36
+
37
+ # --- Retrieve site metadata (lat/lon) ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
38
+ site_url = (
39
+ f"https://waterservices.usgs.gov/nwis/site/"
40
+ f"?sites={site_number}"
41
+ f"&format=rdb")
42
+ try:
43
+ site_resp = requests.get(site_url, timeout=15)
44
+ if site_resp.status_code != 200:
45
+ print(f"Could not get site metadata: {site_resp.status_code}")
46
+ lat, lon = None, None
47
+ else:
48
+ df_meta = pd.read_csv(StringIO(site_resp.text), sep="\t", comment="#")
49
+ df_meta = df_meta.dropna(axis=1, how="all")
50
+ lat, lon = None, None
51
+ if not df_meta.empty:
52
+ lat = float(df_meta["dec_lat_va"].iloc[0]) if "dec_lat_va" in df_meta.columns else None
53
+ lon = float(df_meta["dec_long_va"].iloc[0]) if "dec_long_va" in df_meta.columns else None
54
+ except Exception as e:
55
+ print(f"Error getting site coordinates: {e}")
56
+ lat, lon = None, None
57
+
58
+
59
+ # --- Retrieve turbidity data (Build USGS API URL for turbidity (parameter code 63680))------------------------------------------------------------------------------------------------------------------------------------------------------------------
60
+ url = (
61
+ f"https://waterservices.usgs.gov/nwis/iv/"
62
+ f"?sites={site_number}"
63
+ f"&parameterCd=63680" # Turbidity parameter code
64
+ f"&startDT={start_date}"
65
+ f"&endDT={end_date}"
66
+ f"&format=rdb")
67
+
68
+ try:
69
+ # Get data from USGS
70
+ response = requests.get(url, timeout=30)
71
+
72
+ if response.status_code != 200:
73
+ print(f"No data found: API returned status {response.status_code}.")
74
+ print("Data for the specified site or parameters does not exist.")
75
+ return pd.DataFrame(), (lat, lon)
76
+
77
+ # Parse the response
78
+ data = _parse_usgs_response(response.text)
79
+
80
+ if len(data) == 0:
81
+ print("No data found for the specified parameters or date range.")
82
+ return pd.DataFrame(), (lat, lon)
83
+
84
+ print(f"Retrieved {len(data)} turbidity measurements")
85
+ return data, (lat, lon)
86
+
87
+ except Exception as e:
88
+ print(f"Error: {e}")
89
+ print("Data for the specified site or parameters does not exist.")
90
+ return pd.DataFrame(), (lat, lon)
91
+
92
+
93
+ # Function for parse and cleaning Turbidity Time Series from USGS API Respons as DataFrame ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
94
+ def _parse_usgs_response(content: str) -> pd.DataFrame:
95
+ """Parse USGS response and extract turbidity data."""
96
+
97
+ if "No sites found" in content or "No data" in content:
98
+ return pd.DataFrame()
99
+
100
+ try:
101
+ # Read tab-separated data
102
+ data = pd.read_csv(StringIO(content), sep='\t', comment='#')
103
+
104
+ # Clean up
105
+ data = data.dropna(axis=1, how='all')
106
+ data.columns = data.columns.str.strip()
107
+
108
+ # Find datetime and turbidity columns
109
+ datetime_cols = [col for col in data.columns if 'datetime' in col.lower()]
110
+ turbidity_cols = [col for col in data.columns if '63680' in col]
111
+
112
+ if not datetime_cols or not turbidity_cols:
113
+ return pd.DataFrame()
114
+
115
+ # Extract relevant columns
116
+ result = data[[datetime_cols[0], turbidity_cols[0]]].copy()
117
+ result.columns = ['datetime', 'turbidity']
118
+
119
+ # Convert data types
120
+ result['datetime'] = pd.to_datetime(result['datetime'], errors='coerce')
121
+ result['turbidity'] = pd.to_numeric(result['turbidity'], errors='coerce')
122
+
123
+ # Remove missing data
124
+ result = result.dropna()
125
+
126
+ # Set datetime as index
127
+ result = result.set_index('datetime')
128
+
129
+ return result
130
+
131
+ except Exception:
132
+ return pd.DataFrame()
133
+
134
+
135
+ """
136
+ # Generate synthetic data if data not exist in USGS ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
137
+ def _create_synthetic_turbidity(start_date: str, end_date: str) -> pd.DataFrame:
138
+ """Create realistic synthetic turbidity data."""
139
+
140
+ date_range = pd.date_range(start=start_date, end=end_date, freq='H')
141
+
142
+ # Generate realistic turbidity values (typically 0-50 NTU)
143
+ base_turbidity = 8.0 # Base level
144
+ daily_variation = 3.0 # Daily fluctuation
145
+
146
+ # Create synthetic values with realistic patterns
147
+ synthetic_values = []
148
+ for i, dt in enumerate(date_range):
149
+ # Base value with daily pattern
150
+ daily_factor = np.sin(2 * np.pi * dt.hour / 24) * daily_variation
151
+
152
+ # Add some noise
153
+ noise = np.random.normal(0, 1.5)
154
+
155
+ # Occasional high turbidity events (storms)
156
+ if np.random.random() < 0.02: # 2% chance of high event
157
+ storm_factor = np.random.uniform(10, 30)
158
+ else:
159
+ storm_factor = 0
160
+
161
+ value = base_turbidity + daily_factor + noise + storm_factor
162
+ value = max(0.1, value) # Ensure positive values
163
+
164
+ synthetic_values.append(value)
165
+
166
+ # Create DataFrame
167
+ synthetic_data = pd.DataFrame({
168
+ 'turbidity': synthetic_values
169
+ }, index=date_range)
170
+
171
+ print(f"Created {len(synthetic_data)} synthetic turbidity measurements")
172
+
173
+ return synthetic_data
174
+ """
175
+
176
+ # Simple alias for backwards compatibility
177
+ get_usgs_turbidity = get_turbidity
@@ -0,0 +1,226 @@
1
+ """
2
+ Simple Time Series Visualization
3
+
4
+ This module provides simple functions to visualize time series data.
5
+ That's it - nothing else!
6
+ """
7
+
8
+ import matplotlib.pyplot as plt
9
+ import matplotlib.dates as mdates
10
+ import pandas as pd
11
+ import numpy as np
12
+ from datetime import datetime
13
+
14
+
15
+ def plot_timeseries(data: pd.DataFrame, title: str = "Time Series Data", save_file: str = None) -> None:
16
+ """
17
+ Create a simple time series plot.
18
+
19
+ Args:
20
+ data (pd.DataFrame): DataFrame with datetime index and numeric columns
21
+ title (str): Title for the plot
22
+ save_file (str): Optional filename to save the plot
23
+
24
+ Example:
25
+ >>> plot_timeseries(turbidity_data, "Turbidity Over Time", "turbidity_plot.png")
26
+ """
27
+
28
+ if data.empty:
29
+ print("❌ No data to plot")
30
+ return
31
+
32
+ print(f"📊 Creating plot: {title}")
33
+
34
+ # Create figure
35
+ plt.figure(figsize=(12, 6))
36
+
37
+ # Plot each column
38
+ for column in data.columns:
39
+ if pd.api.types.is_numeric_dtype(data[column]):
40
+ plt.plot(data.index, data[column], label=column, linewidth=1.5, alpha=0.8)
41
+
42
+ # Format plot
43
+ plt.title(title, fontsize=14, fontweight='bold', pad=20)
44
+ plt.xlabel('Date', fontsize=12)
45
+ plt.ylabel('Value', fontsize=12)
46
+ plt.grid(True, alpha=0.3)
47
+
48
+ # Format x-axis dates
49
+ plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
50
+ plt.gca().xaxis.set_major_locator(mdates.MonthLocator(interval=3))
51
+ plt.xticks(rotation=45)
52
+
53
+ # Add legend if multiple columns
54
+ if len(data.columns) > 1:
55
+ plt.legend()
56
+
57
+ plt.tight_layout()
58
+
59
+ # Save if requested
60
+ if save_file:
61
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
62
+ print(f"💾 Plot saved as {save_file}")
63
+
64
+ plt.show()
65
+ print("✅ Plot created successfully!")
66
+
67
+
68
+ def plot_turbidity(turbidity_data: pd.DataFrame, save_file: str = None) -> None:
69
+ """
70
+ Create a turbidity-specific plot with appropriate formatting.
71
+
72
+ Args:
73
+ turbidity_data (pd.DataFrame): DataFrame with turbidity values
74
+ save_file (str): Optional filename to save the plot
75
+ """
76
+
77
+ if turbidity_data.empty:
78
+ print("❌ No turbidity data to plot")
79
+ return
80
+
81
+ print("🌫️ Creating turbidity plot")
82
+
83
+ plt.figure(figsize=(12, 6))
84
+
85
+ # Plot turbidity
86
+ column_name = turbidity_data.columns[0]
87
+ plt.plot(turbidity_data.index, turbidity_data.iloc[:, 0],
88
+ color='brown', linewidth=1.5, alpha=0.8)
89
+
90
+ # Add threshold lines for water quality assessment
91
+ plt.axhline(y=10, color='orange', linestyle='--', alpha=0.7, label='Moderate (10 NTU)')
92
+ plt.axhline(y=25, color='red', linestyle='--', alpha=0.7, label='High (25 NTU)')
93
+
94
+ # Format plot
95
+ plt.title('💧 Turbidity Time Series', fontsize=14, fontweight='bold', pad=20)
96
+ plt.xlabel('Date', fontsize=12)
97
+ plt.ylabel('Turbidity (NTU)', fontsize=12)
98
+ plt.grid(True, alpha=0.3)
99
+
100
+ # Format x-axis
101
+ plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
102
+ plt.xticks(rotation=45)
103
+
104
+ plt.legend()
105
+ plt.tight_layout()
106
+
107
+ # Save if requested
108
+ if save_file:
109
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
110
+ print(f"💾 Turbidity plot saved as {save_file}")
111
+
112
+ plt.show()
113
+ print("✅ Turbidity plot created!")
114
+
115
+
116
+ def plot_sentinel(sentinel_data: pd.DataFrame, save_file: str = None) -> None:
117
+ """
118
+ Create a Sentinel satellite data plot.
119
+
120
+ Args:
121
+ sentinel_data (pd.DataFrame): DataFrame with Sentinel band values
122
+ save_file (str): Optional filename to save the plot
123
+ """
124
+
125
+ if sentinel_data.empty:
126
+ print("❌ No Sentinel data to plot")
127
+ return
128
+
129
+ print("🛰️ Creating Sentinel bands plot")
130
+
131
+ plt.figure(figsize=(12, 8))
132
+
133
+ # Define colors for different bands
134
+ band_colors = {
135
+ 'B2': 'blue', # Blue band
136
+ 'B3': 'green', # Green band
137
+ 'B4': 'red', # Red band
138
+ 'B8': 'darkred', # NIR band
139
+ 'NDVI': 'darkgreen'
140
+ }
141
+
142
+ # Plot each band
143
+ for column in sentinel_data.columns:
144
+ color = band_colors.get(column, 'black')
145
+ plt.plot(sentinel_data.index, sentinel_data[column],
146
+ label=column, color=color, linewidth=2, marker='o', markersize=4)
147
+
148
+ # Format plot
149
+ plt.title('🛰️ Sentinel Satellite Data', fontsize=14, fontweight='bold', pad=20)
150
+ plt.xlabel('Date', fontsize=12)
151
+ plt.ylabel('Digital Number / Index Value', fontsize=12)
152
+ plt.grid(True, alpha=0.3)
153
+
154
+ # Format x-axis
155
+ plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
156
+ plt.xticks(rotation=45)
157
+
158
+ plt.legend()
159
+ plt.tight_layout()
160
+
161
+ # Save if requested
162
+ if save_file:
163
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
164
+ print(f"💾 Sentinel plot saved as {save_file}")
165
+
166
+ plt.show()
167
+ print("✅ Sentinel plot created!")
168
+
169
+
170
+ def plot_comparison(data1: pd.DataFrame, data2: pd.DataFrame,
171
+ label1: str = "Dataset 1", label2: str = "Dataset 2",
172
+ title: str = "Data Comparison", save_file: str = None) -> None:
173
+ """
174
+ Create a comparison plot of two time series datasets.
175
+
176
+ Args:
177
+ data1 (pd.DataFrame): First dataset
178
+ data2 (pd.DataFrame): Second dataset
179
+ label1 (str): Label for first dataset
180
+ label2 (str): Label for second dataset
181
+ title (str): Plot title
182
+ save_file (str): Optional filename to save the plot
183
+ """
184
+
185
+ if data1.empty and data2.empty:
186
+ print("❌ No data to plot")
187
+ return
188
+
189
+ print(f"📊 Creating comparison plot: {title}")
190
+
191
+ fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10), sharex=True)
192
+
193
+ # Plot first dataset
194
+ if not data1.empty:
195
+ ax1.plot(data1.index, data1.iloc[:, 0], color='blue', linewidth=1.5, alpha=0.8)
196
+ ax1.set_title(f'{label1}', fontsize=12, fontweight='bold')
197
+ ax1.set_ylabel('Value', fontsize=10)
198
+ ax1.grid(True, alpha=0.3)
199
+
200
+ # Plot second dataset
201
+ if not data2.empty:
202
+ ax2.plot(data2.index, data2.iloc[:, 0], color='red', linewidth=1.5, alpha=0.8)
203
+ ax2.set_title(f'{label2}', fontsize=12, fontweight='bold')
204
+ ax2.set_ylabel('Value', fontsize=10)
205
+ ax2.grid(True, alpha=0.3)
206
+
207
+ # Format x-axis
208
+ ax2.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
209
+ plt.xticks(rotation=45)
210
+ ax2.set_xlabel('Date', fontsize=12)
211
+
212
+ plt.suptitle(title, fontsize=14, fontweight='bold')
213
+ plt.tight_layout()
214
+
215
+ # Save if requested
216
+ if save_file:
217
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
218
+ print(f"💾 Comparison plot saved as {save_file}")
219
+
220
+ plt.show()
221
+ print("✅ Comparison plot created!")
222
+
223
+
224
+ # Simple aliases
225
+ plot = plot_timeseries
226
+ visualize = plot_timeseries
@@ -1,11 +1,30 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hydroanomaly
3
- Version: 0.5.0
3
+ Version: 0.7.0
4
4
  Summary: A Python package for hydro anomaly detection with simple USGS data retrieval
5
- Home-page: https://github.com/yourusername/hydroanomaly
6
- Author: Your Name
7
- Author-email: Your Name <your.email@example.com>
8
- License-Expression: MIT
5
+ Author-email: Ehsan Kahrizi <ehsan.kahrizi@usu.edu>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2025 Your Name
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
9
28
  Project-URL: Homepage, https://github.com/yourusername/hydroanomaly
10
29
  Project-URL: Bug Reports, https://github.com/yourusername/hydroanomaly/issues
11
30
  Project-URL: Source, https://github.com/yourusername/hydroanomaly
@@ -30,10 +49,7 @@ Requires-Dist: pytest>=6.0; extra == "dev"
30
49
  Requires-Dist: black>=21.0; extra == "dev"
31
50
  Requires-Dist: flake8>=3.8; extra == "dev"
32
51
  Requires-Dist: mypy>=0.800; extra == "dev"
33
- Dynamic: author
34
- Dynamic: home-page
35
52
  Dynamic: license-file
36
- Dynamic: requires-python
37
53
 
38
54
  # HydroAnomaly
39
55
 
@@ -1,13 +1,10 @@
1
1
  LICENSE
2
2
  README.md
3
3
  pyproject.toml
4
- setup.py
5
4
  hydroanomaly/__init__.py
6
- hydroanomaly/hello.py
7
- hydroanomaly/math_utils.py
8
- hydroanomaly/plotting.py
9
- hydroanomaly/sentinel_data.py
10
- hydroanomaly/usgs_data.py
5
+ hydroanomaly/sentinel_bands.py
6
+ hydroanomaly/usgs_turbidity.py
7
+ hydroanomaly/visualize.py
11
8
  hydroanomaly.egg-info/PKG-INFO
12
9
  hydroanomaly.egg-info/SOURCES.txt
13
10
  hydroanomaly.egg-info/dependency_links.txt
@@ -4,13 +4,15 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "hydroanomaly"
7
- version = "0.5.0"
7
+ version = "0.7.0"
8
8
  authors = [
9
- {name = "Your Name", email = "your.email@example.com"},
9
+ {name = "Ehsan Kahrizi", email = "ehsan.kahrizi@usu.edu"},
10
10
  ]
11
+
12
+
11
13
  description = "A Python package for hydro anomaly detection with simple USGS data retrieval"
12
14
  readme = "README.md"
13
- license = "MIT"
15
+ license = {file = "LICENSE"}
14
16
  requires-python = ">=3.6"
15
17
  classifiers = [
16
18
  "Programming Language :: Python :: 3",