hydroanomaly 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hydroanomaly/__init__.py CHANGED
@@ -1,146 +1,46 @@
1
1
  """
2
- HydroAnomaly
2
+ HydroAnomaly: Simple Water Data Analysis Package
3
3
 
4
- A Python package for hydro anomaly detection, USGS data retrieval,
5
- time series visualization, and Sentinel satellite data analysis.
4
+ A simple Python package with just 3 modules:
5
+ 1. USGS turbidity data retrieval (returns data and site coordinates)
6
+ 2. Sentinel satellite bands retrieval
7
+ 3. Time series visualization
8
+
9
+ That's it - nothing else!
6
10
  """
7
11
 
8
- __version__ = "0.5.0"
9
- __author__ = "Your Name"
10
- __email__ = "your.email@example.com"
12
+ __version__ = "0.7.0"
13
+ __author__ = "Ehsan Kahrizi (Ehsan.kahrizi@usu.edu)"
11
14
 
12
- # Import main modules for easy access
13
- from .hello import greet
14
- from .math_utils import add, multiply
15
- from .usgs_data import get_usgs_data, get_usgs_simple, USGSDataRetriever
16
- from .plotting import plot_usgs_data, plot_multiple_gages, quick_plot, WaterDataPlotter
15
+ # Import the 3 simple modules
16
+ from .usgs_turbidity import get_turbidity, get_usgs_turbidity
17
+ from .sentinel_bands import get_sentinel_bands, get_satellite_data, get_sentinel, calculate_ndvi
18
+ from .visualize import plot_timeseries, plot_turbidity, plot_sentinel, plot_comparison, plot, visualize
17
19
 
18
- # Base exports
20
+ # Export everything
19
21
  __all__ = [
20
- 'greet',
21
- 'add', 'multiply',
22
- 'get_usgs_data', 'get_usgs_simple', 'USGSDataRetriever',
23
- 'plot_usgs_data', 'plot_multiple_gages', 'quick_plot', 'WaterDataPlotter',
24
- 'get_discharge', 'get_temperature', 'get_water_level'
22
+ # USGS turbidity functions
23
+ 'get_turbidity',
24
+ 'get_usgs_turbidity',
25
+
26
+ # Sentinel functions
27
+ 'get_sentinel_bands',
28
+ 'get_satellite_data',
29
+ 'get_sentinel',
30
+ 'calculate_ndvi',
31
+
32
+ # Visualization functions
33
+ 'plot_timeseries',
34
+ 'plot_turbidity',
35
+ 'plot_sentinel',
36
+ 'plot_comparison',
37
+ 'plot',
38
+ 'visualize'
25
39
  ]
26
40
 
27
- # Try to import Sentinel functionality (optional GEE dependency)
28
- try:
29
- from .sentinel_data import (
30
- SentinelDataRetriever,
31
- SentinelConfig,
32
- setup_gee_authentication,
33
- initialize_gee,
34
- get_water_area_time_series,
35
- detect_water_changes
36
- )
37
- _SENTINEL_AVAILABLE = True
38
-
39
- # Add Sentinel functions to exports
40
- __all__.extend([
41
- 'SentinelDataRetriever',
42
- 'SentinelConfig',
43
- 'setup_gee_authentication',
44
- 'initialize_gee',
45
- 'get_water_area_time_series',
46
- 'detect_water_changes'
47
- ])
48
-
49
- except ImportError as e:
50
- print("⚠️ Sentinel data functionality not available.")
51
- print("💡 To use Google Earth Engine features, install:")
52
- print(" pip install earthengine-api")
53
- print(" Then authenticate: earthengine authenticate")
54
- _SENTINEL_AVAILABLE = False
55
-
56
- # Create placeholder functions for better error messages
57
- def setup_gee_authentication(*args, **kwargs):
58
- raise ImportError("Google Earth Engine not available. Install with: pip install earthengine-api")
59
-
60
- def initialize_gee(*args, **kwargs):
61
- raise ImportError("Google Earth Engine not available. Install with: pip install earthengine-api")
62
-
63
- def get_water_area_time_series(*args, **kwargs):
64
- raise ImportError("Google Earth Engine not available. Install with: pip install earthengine-api")
65
-
66
- def detect_water_changes(*args, **kwargs):
67
- raise ImportError("Google Earth Engine not available. Install with: pip install earthengine-api")
68
-
69
- # Convenience functions for common use cases
70
- def get_discharge(gage_number, start_date, end_date, save_file=None):
71
- """
72
- Quick function to get discharge data from any USGS gage.
73
-
74
- Args:
75
- gage_number (str): USGS gage number (e.g., "08158000")
76
- start_date (str): Start date in YYYY-MM-DD format
77
- end_date (str): End date in YYYY-MM-DD format
78
- save_file (str, optional): Filename to save data
79
-
80
- Returns:
81
- pandas.DataFrame: Discharge data
82
-
83
- Example:
84
- >>> import hydroanomaly
85
- >>> data = hydroanomaly.get_discharge("08158000", "2023-01-01", "2023-01-31")
86
- >>> print(f"Got {len(data)} discharge measurements")
87
- """
88
- return get_usgs_data(
89
- site_number=gage_number,
90
- parameter_code="00060", # Discharge
91
- start_date=start_date,
92
- end_date=end_date,
93
- save_to_file=save_file,
94
- parameter_name="Discharge_cfs"
95
- )
96
-
97
- def get_water_level(gage_number, start_date, end_date, save_file=None):
98
- """
99
- Quick function to get water level data from any USGS gage.
100
-
101
- Args:
102
- gage_number (str): USGS gage number (e.g., "08158000")
103
- start_date (str): Start date in YYYY-MM-DD format
104
- end_date (str): End date in YYYY-MM-DD format
105
- save_file (str, optional): Filename to save data
106
-
107
- Returns:
108
- pandas.DataFrame: Water level data
109
- """
110
- return get_usgs_data(
111
- site_number=gage_number,
112
- parameter_code="00065", # Gage height
113
- start_date=start_date,
114
- end_date=end_date,
115
- save_to_file=save_file,
116
- parameter_name="WaterLevel_ft"
117
- )
118
-
119
- def get_temperature(gage_number, start_date, end_date, save_file=None):
120
- """
121
- Quick function to get water temperature data from any USGS gage.
122
-
123
- Args:
124
- gage_number (str): USGS gage number (e.g., "08158000")
125
- start_date (str): Start date in YYYY-MM-DD format
126
- end_date (str): End date in YYYY-MM-DD format
127
- save_file (str, optional): Filename to save data
128
-
129
- Returns:
130
- pandas.DataFrame: Temperature data
131
- """
132
- return get_usgs_data(
133
- site_number=gage_number,
134
- parameter_code="00010", # Temperature
135
- start_date=start_date,
136
- end_date=end_date,
137
- save_to_file=save_file,
138
- parameter_name="Temperature_C"
139
- )
140
-
141
- __all__ = [
142
- "greet", "add", "multiply",
143
- "get_usgs_data", "USGSDataRetriever",
144
- "get_discharge", "get_water_level", "get_temperature",
145
- "plot_usgs_data", "plot_multiple_gages", "quick_plot", "WaterDataPlotter"
146
- ]
41
+ print(f"HydroAnomaly v{__version__} - Simple Water Data Package")
42
+ print("Available functions:")
43
+ print(" • get_turbidity() - Get USGS turbidity data and site coordinates")
44
+ print(" • get_sentinel_bands() - Get satellite data")
45
+ print(" • plot_timeseries() - Visualize data")
46
+ print("Try: help(hydroanomaly.get_turbidity) for examples")
@@ -0,0 +1,104 @@
1
+ """
2
+ Sentinel-2 Satellite Data Retrieval using Google Earth Engine (GEE)
3
+
4
+ This module provides a function to retrieve Sentinel-2 satellite band data
5
+ for a specified location and time period, with masking and cloud filtering.
6
+ """
7
+
8
+ import pandas as pd
9
+ import numpy as np
10
+ from datetime import datetime, timedelta
11
+ import requests
12
+ import warnings
13
+
14
+ def get_sentinel_bands_gee(
15
+ latitude: float,
16
+ longitude: float,
17
+ start_date: str,
18
+ end_date: str,
19
+ bands: list = None,
20
+ buffer_meters: int = 20,
21
+ cloudy_pixel_percentage: int = 20,
22
+ masks_to_apply: list = None
23
+ ) -> pd.DataFrame:
24
+ """
25
+ Retrieve Sentinel-2 bands from Google Earth Engine, applying custom masking.
26
+
27
+ Args:
28
+ latitude (float): Latitude of center point.
29
+ longitude (float): Longitude of center point.
30
+ start_date (str): Start date as "YYYY-MM-DD".
31
+ end_date (str): End date as "YYYY-MM-DD".
32
+ bands (list): List of bands to retrieve (default is common Sentinel-2 bands).
33
+ buffer_meters (int): Buffer size around the point, in meters.
34
+ cloudy_pixel_percentage (int): Maximum allowed cloud percentage for each image.
35
+ masks_to_apply (list): Masking strategies (e.g., ["water", "no_cloud_shadow", ...]).
36
+
37
+ Returns:
38
+ pd.DataFrame: DataFrame with band reflectance values per date.
39
+
40
+ Example:
41
+ >>> import ee
42
+ >>> ee.Authenticate()
43
+ >>> ee.Initialize()
44
+ >>> df = get_sentinel_bands_gee(29.77, -95.06, "2021-01-01", "2021-12-31")
45
+ >>> print(df.head())
46
+ """
47
+ if bands is None:
48
+ bands = ['B2','B3','B4','B8','SCL']
49
+ if masks_to_apply is None:
50
+ masks_to_apply = ["water", "no_cloud_shadow", "no_clouds", "no_snow_ice", "no_saturated"]
51
+
52
+ point = ee.Geometry.Point([longitude, latitude])
53
+ buffered_point = point.buffer(buffer_meters)
54
+
55
+ s2 = (ee.ImageCollection('COPERNICUS/S2_SR_HARMONIZED')
56
+ .filterBounds(buffered_point)
57
+ .filterDate(start_date, end_date)
58
+ .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', cloudy_pixel_percentage))
59
+ .select(bands))
60
+
61
+ def dynamic_scl_mask(image):
62
+ scl = image.select('SCL')
63
+ mask = ee.Image.constant(1)
64
+ if "water" in masks_to_apply:
65
+ mask = mask.And(scl.eq(6))
66
+ if "no_cloud_shadow" in masks_to_apply:
67
+ mask = mask.And(scl.neq(3))
68
+ if "no_clouds" in masks_to_apply:
69
+ cloud_mask = scl.neq(8).And(scl.neq(9)).And(scl.neq(10))
70
+ mask = mask.And(cloud_mask)
71
+ if "no_snow_ice" in masks_to_apply:
72
+ mask = mask.And(scl.neq(11))
73
+ if "no_saturated" in masks_to_apply:
74
+ mask = mask.And(scl.neq(1))
75
+ return image.updateMask(mask)
76
+
77
+ s2_masked = s2.map(dynamic_scl_mask)
78
+
79
+ def extract_features(image):
80
+ date = image.date().format('YYYY-MM-dd HH:mm:ss')
81
+ values = image.reduceRegion(
82
+ reducer=ee.Reducer.mean(),
83
+ geometry=buffered_point,
84
+ scale=20,
85
+ maxPixels=1e8
86
+ )
87
+ return ee.Feature(None, values.set('date', date))
88
+
89
+ features = s2_masked.map(extract_features)
90
+ fc = ee.FeatureCollection(features).filter(ee.Filter.notNull(['B2']))
91
+
92
+ data = fc.getInfo()
93
+ rows = [f['properties'] for f in data['features']]
94
+ df = pd.DataFrame(rows)
95
+ if not df.empty:
96
+ df['date'] = pd.to_datetime(df['date'])
97
+ df = df.sort_values('date')
98
+ df = df.set_index('date')
99
+ return df
100
+
101
+ # Aliases for user convenience
102
+ get_sentinel_bands = get_sentinel_bands_gee
103
+ get_satellite_data = get_sentinel_bands_gee
104
+ get_sentinel = get_sentinel_bands_gee
@@ -0,0 +1,177 @@
1
+ """
2
+ Simple USGS Turbidity Data Retrieval
3
+
4
+ This module provides one simple function to get turbidity data from USGS stations.
5
+ That's it - nothing else!
6
+ """
7
+
8
+ import pandas as pd
9
+ import requests
10
+ from io import StringIO
11
+ from datetime import datetime
12
+ import numpy as np
13
+
14
+ # Function for retrive data ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
15
+ def get_turbidity(site_number: str, start_date: str, end_date: str) -> pd.DataFrame:
16
+ """
17
+ Get turbidity data from a USGS station.
18
+
19
+ Args:
20
+ site_number (str): USGS site number (e.g., "294643095035200")
21
+ start_date (str): Start date as "YYYY-MM-DD"
22
+ end_date (str): End date as "YYYY-MM-DD"
23
+
24
+ Returns:
25
+ tuple: (pd.DataFrame, (latitude, longitude)) or (empty DataFrame, (None, None)) if not found.
26
+ * Note: pd.DataFrame: Time series data with datetime index and turbidity values
27
+
28
+ Example:
29
+ >>> data = get_turbidity("294643095035200", "2023-01-01", "2023-12-31")
30
+ >>> print(f"Got {len(data)} turbidity measurements")
31
+ """
32
+
33
+ # --- Validate inputs ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
34
+ print(f"Getting turbidity data for site {site_number}")
35
+ print(f"Date range: {start_date} to {end_date}")
36
+
37
+ # --- Retrieve site metadata (lat/lon) ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
38
+ site_url = (
39
+ f"https://waterservices.usgs.gov/nwis/site/"
40
+ f"?sites={site_number}"
41
+ f"&format=rdb")
42
+ try:
43
+ site_resp = requests.get(site_url, timeout=15)
44
+ if site_resp.status_code != 200:
45
+ print(f"Could not get site metadata: {site_resp.status_code}")
46
+ lat, lon = None, None
47
+ else:
48
+ df_meta = pd.read_csv(StringIO(site_resp.text), sep="\t", comment="#")
49
+ df_meta = df_meta.dropna(axis=1, how="all")
50
+ lat, lon = None, None
51
+ if not df_meta.empty:
52
+ lat = float(df_meta["dec_lat_va"].iloc[0]) if "dec_lat_va" in df_meta.columns else None
53
+ lon = float(df_meta["dec_long_va"].iloc[0]) if "dec_long_va" in df_meta.columns else None
54
+ except Exception as e:
55
+ print(f"Error getting site coordinates: {e}")
56
+ lat, lon = None, None
57
+
58
+
59
+ # --- Retrieve turbidity data (Build USGS API URL for turbidity (parameter code 63680))------------------------------------------------------------------------------------------------------------------------------------------------------------------
60
+ url = (
61
+ f"https://waterservices.usgs.gov/nwis/iv/"
62
+ f"?sites={site_number}"
63
+ f"&parameterCd=63680" # Turbidity parameter code
64
+ f"&startDT={start_date}"
65
+ f"&endDT={end_date}"
66
+ f"&format=rdb")
67
+
68
+ try:
69
+ # Get data from USGS
70
+ response = requests.get(url, timeout=30)
71
+
72
+ if response.status_code != 200:
73
+ print(f"No data found: API returned status {response.status_code}.")
74
+ print("Data for the specified site or parameters does not exist.")
75
+ return pd.DataFrame(), (lat, lon)
76
+
77
+ # Parse the response
78
+ data = _parse_usgs_response(response.text)
79
+
80
+ if len(data) == 0:
81
+ print("No data found for the specified parameters or date range.")
82
+ return pd.DataFrame(), (lat, lon)
83
+
84
+ print(f"Retrieved {len(data)} turbidity measurements")
85
+ return data, (lat, lon)
86
+
87
+ except Exception as e:
88
+ print(f"Error: {e}")
89
+ print("Data for the specified site or parameters does not exist.")
90
+ return pd.DataFrame(), (lat, lon)
91
+
92
+
93
+ # Function for parse and cleaning Turbidity Time Series from USGS API Respons as DataFrame ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
94
+ def _parse_usgs_response(content: str) -> pd.DataFrame:
95
+ """Parse USGS response and extract turbidity data."""
96
+
97
+ if "No sites found" in content or "No data" in content:
98
+ return pd.DataFrame()
99
+
100
+ try:
101
+ # Read tab-separated data
102
+ data = pd.read_csv(StringIO(content), sep='\t', comment='#')
103
+
104
+ # Clean up
105
+ data = data.dropna(axis=1, how='all')
106
+ data.columns = data.columns.str.strip()
107
+
108
+ # Find datetime and turbidity columns
109
+ datetime_cols = [col for col in data.columns if 'datetime' in col.lower()]
110
+ turbidity_cols = [col for col in data.columns if '63680' in col]
111
+
112
+ if not datetime_cols or not turbidity_cols:
113
+ return pd.DataFrame()
114
+
115
+ # Extract relevant columns
116
+ result = data[[datetime_cols[0], turbidity_cols[0]]].copy()
117
+ result.columns = ['datetime', 'turbidity']
118
+
119
+ # Convert data types
120
+ result['datetime'] = pd.to_datetime(result['datetime'], errors='coerce')
121
+ result['turbidity'] = pd.to_numeric(result['turbidity'], errors='coerce')
122
+
123
+ # Remove missing data
124
+ result = result.dropna()
125
+
126
+ # Set datetime as index
127
+ result = result.set_index('datetime')
128
+
129
+ return result
130
+
131
+ except Exception:
132
+ return pd.DataFrame()
133
+
134
+
135
+ """
136
+ # Generate synthetic data if data not exist in USGS ---------------------------------------------------------------------------------------------------------------------------------------------------------------------
137
+ def _create_synthetic_turbidity(start_date: str, end_date: str) -> pd.DataFrame:
138
+ """Create realistic synthetic turbidity data."""
139
+
140
+ date_range = pd.date_range(start=start_date, end=end_date, freq='H')
141
+
142
+ # Generate realistic turbidity values (typically 0-50 NTU)
143
+ base_turbidity = 8.0 # Base level
144
+ daily_variation = 3.0 # Daily fluctuation
145
+
146
+ # Create synthetic values with realistic patterns
147
+ synthetic_values = []
148
+ for i, dt in enumerate(date_range):
149
+ # Base value with daily pattern
150
+ daily_factor = np.sin(2 * np.pi * dt.hour / 24) * daily_variation
151
+
152
+ # Add some noise
153
+ noise = np.random.normal(0, 1.5)
154
+
155
+ # Occasional high turbidity events (storms)
156
+ if np.random.random() < 0.02: # 2% chance of high event
157
+ storm_factor = np.random.uniform(10, 30)
158
+ else:
159
+ storm_factor = 0
160
+
161
+ value = base_turbidity + daily_factor + noise + storm_factor
162
+ value = max(0.1, value) # Ensure positive values
163
+
164
+ synthetic_values.append(value)
165
+
166
+ # Create DataFrame
167
+ synthetic_data = pd.DataFrame({
168
+ 'turbidity': synthetic_values
169
+ }, index=date_range)
170
+
171
+ print(f"Created {len(synthetic_data)} synthetic turbidity measurements")
172
+
173
+ return synthetic_data
174
+ """
175
+
176
+ # Simple alias for backwards compatibility
177
+ get_usgs_turbidity = get_turbidity
@@ -0,0 +1,226 @@
1
+ """
2
+ Simple Time Series Visualization
3
+
4
+ This module provides simple functions to visualize time series data.
5
+ That's it - nothing else!
6
+ """
7
+
8
+ import matplotlib.pyplot as plt
9
+ import matplotlib.dates as mdates
10
+ import pandas as pd
11
+ import numpy as np
12
+ from datetime import datetime
13
+
14
+
15
+ def plot_timeseries(data: pd.DataFrame, title: str = "Time Series Data", save_file: str = None) -> None:
16
+ """
17
+ Create a simple time series plot.
18
+
19
+ Args:
20
+ data (pd.DataFrame): DataFrame with datetime index and numeric columns
21
+ title (str): Title for the plot
22
+ save_file (str): Optional filename to save the plot
23
+
24
+ Example:
25
+ >>> plot_timeseries(turbidity_data, "Turbidity Over Time", "turbidity_plot.png")
26
+ """
27
+
28
+ if data.empty:
29
+ print("❌ No data to plot")
30
+ return
31
+
32
+ print(f"📊 Creating plot: {title}")
33
+
34
+ # Create figure
35
+ plt.figure(figsize=(12, 6))
36
+
37
+ # Plot each column
38
+ for column in data.columns:
39
+ if pd.api.types.is_numeric_dtype(data[column]):
40
+ plt.plot(data.index, data[column], label=column, linewidth=1.5, alpha=0.8)
41
+
42
+ # Format plot
43
+ plt.title(title, fontsize=14, fontweight='bold', pad=20)
44
+ plt.xlabel('Date', fontsize=12)
45
+ plt.ylabel('Value', fontsize=12)
46
+ plt.grid(True, alpha=0.3)
47
+
48
+ # Format x-axis dates
49
+ plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
50
+ plt.gca().xaxis.set_major_locator(mdates.MonthLocator(interval=3))
51
+ plt.xticks(rotation=45)
52
+
53
+ # Add legend if multiple columns
54
+ if len(data.columns) > 1:
55
+ plt.legend()
56
+
57
+ plt.tight_layout()
58
+
59
+ # Save if requested
60
+ if save_file:
61
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
62
+ print(f"💾 Plot saved as {save_file}")
63
+
64
+ plt.show()
65
+ print("✅ Plot created successfully!")
66
+
67
+
68
+ def plot_turbidity(turbidity_data: pd.DataFrame, save_file: str = None) -> None:
69
+ """
70
+ Create a turbidity-specific plot with appropriate formatting.
71
+
72
+ Args:
73
+ turbidity_data (pd.DataFrame): DataFrame with turbidity values
74
+ save_file (str): Optional filename to save the plot
75
+ """
76
+
77
+ if turbidity_data.empty:
78
+ print("❌ No turbidity data to plot")
79
+ return
80
+
81
+ print("🌫️ Creating turbidity plot")
82
+
83
+ plt.figure(figsize=(12, 6))
84
+
85
+ # Plot turbidity
86
+ column_name = turbidity_data.columns[0]
87
+ plt.plot(turbidity_data.index, turbidity_data.iloc[:, 0],
88
+ color='brown', linewidth=1.5, alpha=0.8)
89
+
90
+ # Add threshold lines for water quality assessment
91
+ plt.axhline(y=10, color='orange', linestyle='--', alpha=0.7, label='Moderate (10 NTU)')
92
+ plt.axhline(y=25, color='red', linestyle='--', alpha=0.7, label='High (25 NTU)')
93
+
94
+ # Format plot
95
+ plt.title('💧 Turbidity Time Series', fontsize=14, fontweight='bold', pad=20)
96
+ plt.xlabel('Date', fontsize=12)
97
+ plt.ylabel('Turbidity (NTU)', fontsize=12)
98
+ plt.grid(True, alpha=0.3)
99
+
100
+ # Format x-axis
101
+ plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
102
+ plt.xticks(rotation=45)
103
+
104
+ plt.legend()
105
+ plt.tight_layout()
106
+
107
+ # Save if requested
108
+ if save_file:
109
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
110
+ print(f"💾 Turbidity plot saved as {save_file}")
111
+
112
+ plt.show()
113
+ print("✅ Turbidity plot created!")
114
+
115
+
116
+ def plot_sentinel(sentinel_data: pd.DataFrame, save_file: str = None) -> None:
117
+ """
118
+ Create a Sentinel satellite data plot.
119
+
120
+ Args:
121
+ sentinel_data (pd.DataFrame): DataFrame with Sentinel band values
122
+ save_file (str): Optional filename to save the plot
123
+ """
124
+
125
+ if sentinel_data.empty:
126
+ print("❌ No Sentinel data to plot")
127
+ return
128
+
129
+ print("🛰️ Creating Sentinel bands plot")
130
+
131
+ plt.figure(figsize=(12, 8))
132
+
133
+ # Define colors for different bands
134
+ band_colors = {
135
+ 'B2': 'blue', # Blue band
136
+ 'B3': 'green', # Green band
137
+ 'B4': 'red', # Red band
138
+ 'B8': 'darkred', # NIR band
139
+ 'NDVI': 'darkgreen'
140
+ }
141
+
142
+ # Plot each band
143
+ for column in sentinel_data.columns:
144
+ color = band_colors.get(column, 'black')
145
+ plt.plot(sentinel_data.index, sentinel_data[column],
146
+ label=column, color=color, linewidth=2, marker='o', markersize=4)
147
+
148
+ # Format plot
149
+ plt.title('🛰️ Sentinel Satellite Data', fontsize=14, fontweight='bold', pad=20)
150
+ plt.xlabel('Date', fontsize=12)
151
+ plt.ylabel('Digital Number / Index Value', fontsize=12)
152
+ plt.grid(True, alpha=0.3)
153
+
154
+ # Format x-axis
155
+ plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
156
+ plt.xticks(rotation=45)
157
+
158
+ plt.legend()
159
+ plt.tight_layout()
160
+
161
+ # Save if requested
162
+ if save_file:
163
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
164
+ print(f"💾 Sentinel plot saved as {save_file}")
165
+
166
+ plt.show()
167
+ print("✅ Sentinel plot created!")
168
+
169
+
170
+ def plot_comparison(data1: pd.DataFrame, data2: pd.DataFrame,
171
+ label1: str = "Dataset 1", label2: str = "Dataset 2",
172
+ title: str = "Data Comparison", save_file: str = None) -> None:
173
+ """
174
+ Create a comparison plot of two time series datasets.
175
+
176
+ Args:
177
+ data1 (pd.DataFrame): First dataset
178
+ data2 (pd.DataFrame): Second dataset
179
+ label1 (str): Label for first dataset
180
+ label2 (str): Label for second dataset
181
+ title (str): Plot title
182
+ save_file (str): Optional filename to save the plot
183
+ """
184
+
185
+ if data1.empty and data2.empty:
186
+ print("❌ No data to plot")
187
+ return
188
+
189
+ print(f"📊 Creating comparison plot: {title}")
190
+
191
+ fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10), sharex=True)
192
+
193
+ # Plot first dataset
194
+ if not data1.empty:
195
+ ax1.plot(data1.index, data1.iloc[:, 0], color='blue', linewidth=1.5, alpha=0.8)
196
+ ax1.set_title(f'{label1}', fontsize=12, fontweight='bold')
197
+ ax1.set_ylabel('Value', fontsize=10)
198
+ ax1.grid(True, alpha=0.3)
199
+
200
+ # Plot second dataset
201
+ if not data2.empty:
202
+ ax2.plot(data2.index, data2.iloc[:, 0], color='red', linewidth=1.5, alpha=0.8)
203
+ ax2.set_title(f'{label2}', fontsize=12, fontweight='bold')
204
+ ax2.set_ylabel('Value', fontsize=10)
205
+ ax2.grid(True, alpha=0.3)
206
+
207
+ # Format x-axis
208
+ ax2.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
209
+ plt.xticks(rotation=45)
210
+ ax2.set_xlabel('Date', fontsize=12)
211
+
212
+ plt.suptitle(title, fontsize=14, fontweight='bold')
213
+ plt.tight_layout()
214
+
215
+ # Save if requested
216
+ if save_file:
217
+ plt.savefig(save_file, dpi=300, bbox_inches='tight')
218
+ print(f"💾 Comparison plot saved as {save_file}")
219
+
220
+ plt.show()
221
+ print("✅ Comparison plot created!")
222
+
223
+
224
+ # Simple aliases
225
+ plot = plot_timeseries
226
+ visualize = plot_timeseries