fvs-python 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fvs_python-0.2.3.dist-info/METADATA +254 -0
- fvs_python-0.2.3.dist-info/RECORD +149 -0
- fvs_python-0.2.3.dist-info/WHEEL +5 -0
- fvs_python-0.2.3.dist-info/licenses/LICENSE +21 -0
- fvs_python-0.2.3.dist-info/top_level.txt +1 -0
- pyfvs/__init__.py +107 -0
- pyfvs/bark_ratio.py +323 -0
- pyfvs/cfg/CFG_README.md +73 -0
- pyfvs/cfg/dbh_bounding_table_4_7_1_8.json +103 -0
- pyfvs/cfg/ecounit_coefficients_table_4_7_1_5.json +981 -0
- pyfvs/cfg/ecounit_coefficients_table_4_7_1_6.json +856 -0
- pyfvs/cfg/forest_type_mapping_table_4_7_1_4.json +38 -0
- pyfvs/cfg/fortype_coefficients_table_4_7_1_3.json +1183 -0
- pyfvs/cfg/functional_forms.yaml +111 -0
- pyfvs/cfg/growth_model_parameters.yaml +98 -0
- pyfvs/cfg/plant_values_table_4_7_1_7.json +15 -0
- pyfvs/cfg/site_index_transformation.yaml +113 -0
- pyfvs/cfg/sn_bark_ratio_coefficients.json +115 -0
- pyfvs/cfg/sn_crown_competition_factor.json +109 -0
- pyfvs/cfg/sn_crown_ratio_coefficients.json +956 -0
- pyfvs/cfg/sn_crown_width_coefficients.json +1664 -0
- pyfvs/cfg/sn_diameter_growth_coefficients.json +300 -0
- pyfvs/cfg/sn_height_diameter_coefficients.json +97 -0
- pyfvs/cfg/sn_large_tree_diameter_growth.json +191 -0
- pyfvs/cfg/sn_large_tree_height_growth.json +229 -0
- pyfvs/cfg/sn_large_tree_height_growth_coefficients.json +1187 -0
- pyfvs/cfg/sn_mortality_model.json +176 -0
- pyfvs/cfg/sn_regeneration_model.json +252 -0
- pyfvs/cfg/sn_relative_site_index.json +263 -0
- pyfvs/cfg/sn_small_tree_height_growth.json +879 -0
- pyfvs/cfg/sn_species_codes_table.json +728 -0
- pyfvs/cfg/sn_stand_density_index.json +398 -0
- pyfvs/cfg/species/ab_american_basswood.yaml +251 -0
- pyfvs/cfg/species/ae_american_elm.yaml +240 -0
- pyfvs/cfg/species/ah_american_hornbeam.yaml +250 -0
- pyfvs/cfg/species/ap_american_plum.yaml +251 -0
- pyfvs/cfg/species/as_american_sycamore.yaml +253 -0
- pyfvs/cfg/species/ba_black_ash.yaml +254 -0
- pyfvs/cfg/species/bb_basswood.yaml +254 -0
- pyfvs/cfg/species/bc_black_cherry.yaml +254 -0
- pyfvs/cfg/species/bd_sweet_birch.yaml +252 -0
- pyfvs/cfg/species/be_american_beech.yaml +251 -0
- pyfvs/cfg/species/bg_black_gum.yaml +252 -0
- pyfvs/cfg/species/bj_blue_jay.yaml +254 -0
- pyfvs/cfg/species/bk_sugar_maple.yaml +251 -0
- pyfvs/cfg/species/bn_butternut.yaml +252 -0
- pyfvs/cfg/species/bo_red_maple.yaml +255 -0
- pyfvs/cfg/species/bt_bigtooth_aspen.yaml +254 -0
- pyfvs/cfg/species/bu_buckeye.yaml +252 -0
- pyfvs/cfg/species/by_bald_cypress.yaml +255 -0
- pyfvs/cfg/species/ca_american_chestnut.yaml +254 -0
- pyfvs/cfg/species/cb_cucumber_tree.yaml +254 -0
- pyfvs/cfg/species/ck_virginia_pine.yaml +254 -0
- pyfvs/cfg/species/co_pond_cypress.yaml +251 -0
- pyfvs/cfg/species/ct_catalpa.yaml +251 -0
- pyfvs/cfg/species/cw_chestnut_oak.yaml +253 -0
- pyfvs/cfg/species/dw_dogwood.yaml +250 -0
- pyfvs/cfg/species/el_american_hornbeam.yaml +254 -0
- pyfvs/cfg/species/fm_flowering_dogwood.yaml +251 -0
- pyfvs/cfg/species/fr_fraser_fir.yaml +247 -0
- pyfvs/cfg/species/ga_green_ash.yaml +254 -0
- pyfvs/cfg/species/ha_hawthorn.yaml +252 -0
- pyfvs/cfg/species/hb_hornbeam.yaml +254 -0
- pyfvs/cfg/species/hh_dogwood.yaml +251 -0
- pyfvs/cfg/species/hi_hickory_species.yaml +252 -0
- pyfvs/cfg/species/hl_holly.yaml +254 -0
- pyfvs/cfg/species/hm_eastern_hemlock.yaml +246 -0
- pyfvs/cfg/species/hy_holly.yaml +252 -0
- pyfvs/cfg/species/ju_eastern_juniper.yaml +247 -0
- pyfvs/cfg/species/lb_loblolly_bay.yaml +254 -0
- pyfvs/cfg/species/lk_laurel_oak.yaml +254 -0
- pyfvs/cfg/species/ll_longleaf_pine.yaml +265 -0
- pyfvs/cfg/species/lo_silver_maple.yaml +252 -0
- pyfvs/cfg/species/lp_loblolly_pine.yaml +268 -0
- pyfvs/cfg/species/mb_mountain_birch.yaml +250 -0
- pyfvs/cfg/species/mg_magnolia.yaml +251 -0
- pyfvs/cfg/species/ml_maple_leaf.yaml +254 -0
- pyfvs/cfg/species/ms_maple_species.yaml +247 -0
- pyfvs/cfg/species/mv_magnolia_vine.yaml +254 -0
- pyfvs/cfg/species/oh_other_hardwood.yaml +231 -0
- pyfvs/cfg/species/os_other_softwood.yaml +232 -0
- pyfvs/cfg/species/ot_other_tree.yaml +210 -0
- pyfvs/cfg/species/ov_overcup_oak.yaml +254 -0
- pyfvs/cfg/species/pc_pond_cypress.yaml +254 -0
- pyfvs/cfg/species/pd_pitch_pine.yaml +245 -0
- pyfvs/cfg/species/pi_pine_species.yaml +246 -0
- pyfvs/cfg/species/po_american_beech.yaml +254 -0
- pyfvs/cfg/species/pp_pond_pine.yaml +246 -0
- pyfvs/cfg/species/ps_persimmon.yaml +251 -0
- pyfvs/cfg/species/pu_pond_pine.yaml +249 -0
- pyfvs/cfg/species/qs_flowering_dogwood.yaml +254 -0
- pyfvs/cfg/species/ra_red_ash.yaml +245 -0
- pyfvs/cfg/species/rd_redbud.yaml +251 -0
- pyfvs/cfg/species/rl_red_elm.yaml +240 -0
- pyfvs/cfg/species/rm_red_maple.yaml +256 -0
- pyfvs/cfg/species/ro_eastern_hemlock.yaml +255 -0
- pyfvs/cfg/species/sa_slash_pine.yaml +265 -0
- pyfvs/cfg/species/sb_sweet_birch.yaml +255 -0
- pyfvs/cfg/species/sd_sand_pine.yaml +251 -0
- pyfvs/cfg/species/sk_swamp_oak.yaml +253 -0
- pyfvs/cfg/species/sm_sugar_maple.yaml +252 -0
- pyfvs/cfg/species/sn_loblolly_pine.yaml +254 -0
- pyfvs/cfg/species/so_southern_oak.yaml +253 -0
- pyfvs/cfg/species/sp_shortleaf_pine.yaml +267 -0
- pyfvs/cfg/species/sr_spruce_pine.yaml +246 -0
- pyfvs/cfg/species/ss_basswood.yaml +251 -0
- pyfvs/cfg/species/su_sweetgum.yaml +255 -0
- pyfvs/cfg/species/sv_silver_maple.yaml +255 -0
- pyfvs/cfg/species/sy_sycamore.yaml +254 -0
- pyfvs/cfg/species/tm_tamarack.yaml +246 -0
- pyfvs/cfg/species/to_tulip_oak.yaml +254 -0
- pyfvs/cfg/species/ts_tulip_tree.yaml +253 -0
- pyfvs/cfg/species/vp_virginia_pine.yaml +248 -0
- pyfvs/cfg/species/wa_white_ash.yaml +254 -0
- pyfvs/cfg/species/we_white_elm.yaml +250 -0
- pyfvs/cfg/species/wi_willow.yaml +248 -0
- pyfvs/cfg/species/wk_water_oak.yaml +254 -0
- pyfvs/cfg/species/wn_walnut.yaml +254 -0
- pyfvs/cfg/species/wo_white_oak.yaml +256 -0
- pyfvs/cfg/species/wp_white_pine.yaml +250 -0
- pyfvs/cfg/species/wt_water_tupelo.yaml +254 -0
- pyfvs/cfg/species/yp_yellow_poplar.yaml +261 -0
- pyfvs/cfg/species_config.yaml +106 -0
- pyfvs/clark_profile.py +323 -0
- pyfvs/competition.py +332 -0
- pyfvs/config_loader.py +375 -0
- pyfvs/crown_competition_factor.py +464 -0
- pyfvs/crown_ratio.py +377 -0
- pyfvs/crown_width.py +512 -0
- pyfvs/data_export.py +356 -0
- pyfvs/ecological_unit.py +272 -0
- pyfvs/exceptions.py +86 -0
- pyfvs/fia_integration.py +876 -0
- pyfvs/forest_type.py +253 -0
- pyfvs/growth_plots.py +579 -0
- pyfvs/harvest.py +603 -0
- pyfvs/height_diameter.py +248 -0
- pyfvs/large_tree_height_growth.py +822 -0
- pyfvs/logging_config.py +213 -0
- pyfvs/main.py +99 -0
- pyfvs/mortality.py +431 -0
- pyfvs/parameters.py +121 -0
- pyfvs/simulation_engine.py +386 -0
- pyfvs/stand.py +1004 -0
- pyfvs/stand_metrics.py +436 -0
- pyfvs/stand_output.py +552 -0
- pyfvs/tree.py +756 -0
- pyfvs/validation.py +190 -0
- pyfvs/volume_library.py +761 -0
pyfvs/data_export.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Data export utilities for FVS-Python.
|
|
3
|
+
Provides various formats for exporting simulation results.
|
|
4
|
+
"""
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import List, Dict, Any, Optional, Union
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import numpy as np
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
|
|
12
|
+
from .logging_config import get_logger
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DataExporter:
|
|
16
|
+
"""Handles export of simulation data to various formats."""
|
|
17
|
+
|
|
18
|
+
def __init__(self, output_dir: Path):
|
|
19
|
+
"""Initialize the data exporter.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
output_dir: Directory for output files
|
|
23
|
+
"""
|
|
24
|
+
self.output_dir = Path(output_dir)
|
|
25
|
+
self.output_dir.mkdir(exist_ok=True, parents=True)
|
|
26
|
+
self.logger = get_logger(__name__)
|
|
27
|
+
|
|
28
|
+
def export_to_csv(self,
|
|
29
|
+
data: Union[pd.DataFrame, List[Dict]],
|
|
30
|
+
filename: str,
|
|
31
|
+
include_metadata: bool = True) -> Path:
|
|
32
|
+
"""Export data to CSV format.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
data: Data to export (DataFrame or list of dicts)
|
|
36
|
+
filename: Output filename
|
|
37
|
+
include_metadata: Whether to include metadata header
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Path to exported file
|
|
41
|
+
"""
|
|
42
|
+
filepath = self.output_dir / f"{filename}.csv"
|
|
43
|
+
|
|
44
|
+
if isinstance(data, list):
|
|
45
|
+
df = pd.DataFrame(data)
|
|
46
|
+
else:
|
|
47
|
+
df = data.copy()
|
|
48
|
+
|
|
49
|
+
with open(filepath, 'w', newline='') as f:
|
|
50
|
+
if include_metadata:
|
|
51
|
+
# Write metadata header
|
|
52
|
+
f.write(f"# FVS-Python Export\n")
|
|
53
|
+
f.write(f"# Generated: {datetime.now().isoformat()}\n")
|
|
54
|
+
f.write(f"# Records: {len(df)}\n")
|
|
55
|
+
f.write(f"# Columns: {', '.join(df.columns)}\n")
|
|
56
|
+
f.write("#\n")
|
|
57
|
+
|
|
58
|
+
# Write data
|
|
59
|
+
df.to_csv(f, index=False)
|
|
60
|
+
|
|
61
|
+
self.logger.info(f"Exported {len(df)} records to {filepath}")
|
|
62
|
+
return filepath
|
|
63
|
+
|
|
64
|
+
def export_to_json(self,
|
|
65
|
+
data: Union[pd.DataFrame, List[Dict], Dict],
|
|
66
|
+
filename: str,
|
|
67
|
+
include_metadata: bool = True,
|
|
68
|
+
format_style: str = 'records') -> Path:
|
|
69
|
+
"""Export data to JSON format.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
data: Data to export
|
|
73
|
+
filename: Output filename
|
|
74
|
+
include_metadata: Whether to include metadata
|
|
75
|
+
format_style: JSON format ('records', 'values', 'index', 'split')
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Path to exported file
|
|
79
|
+
"""
|
|
80
|
+
filepath = self.output_dir / f"{filename}.json"
|
|
81
|
+
|
|
82
|
+
# Prepare data
|
|
83
|
+
if isinstance(data, pd.DataFrame):
|
|
84
|
+
if format_style == 'records':
|
|
85
|
+
export_data = data.to_dict('records')
|
|
86
|
+
elif format_style == 'values':
|
|
87
|
+
export_data = data.values.tolist()
|
|
88
|
+
elif format_style == 'index':
|
|
89
|
+
export_data = data.to_dict('index')
|
|
90
|
+
elif format_style == 'split':
|
|
91
|
+
export_data = data.to_dict('split')
|
|
92
|
+
else:
|
|
93
|
+
export_data = data.to_dict('records')
|
|
94
|
+
elif isinstance(data, list):
|
|
95
|
+
export_data = data
|
|
96
|
+
else:
|
|
97
|
+
export_data = data
|
|
98
|
+
|
|
99
|
+
# Create output structure
|
|
100
|
+
output = {}
|
|
101
|
+
if include_metadata:
|
|
102
|
+
output['metadata'] = {
|
|
103
|
+
'generator': 'FVS-Python',
|
|
104
|
+
'generated_at': datetime.now().isoformat(),
|
|
105
|
+
'format': format_style,
|
|
106
|
+
'record_count': len(export_data) if isinstance(export_data, list) else 1
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
output['data'] = export_data
|
|
110
|
+
|
|
111
|
+
with open(filepath, 'w') as f:
|
|
112
|
+
json.dump(output, f, indent=2, default=self._json_serializer)
|
|
113
|
+
|
|
114
|
+
self.logger.info(f"Exported data to {filepath}")
|
|
115
|
+
return filepath
|
|
116
|
+
|
|
117
|
+
def export_to_excel(self,
|
|
118
|
+
data: Union[pd.DataFrame, Dict[str, pd.DataFrame]],
|
|
119
|
+
filename: str) -> Path:
|
|
120
|
+
"""Export data to Excel format.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
data: Data to export (single DataFrame or dict of DataFrames for multiple sheets)
|
|
124
|
+
filename: Output filename
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
Path to exported file
|
|
128
|
+
"""
|
|
129
|
+
filepath = self.output_dir / f"{filename}.xlsx"
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
with pd.ExcelWriter(filepath, engine='openpyxl') as writer:
|
|
133
|
+
if isinstance(data, pd.DataFrame):
|
|
134
|
+
data.to_excel(writer, sheet_name='Data', index=False)
|
|
135
|
+
else:
|
|
136
|
+
for sheet_name, df in data.items():
|
|
137
|
+
df.to_excel(writer, sheet_name=sheet_name, index=False)
|
|
138
|
+
|
|
139
|
+
self.logger.info(f"Exported data to Excel file {filepath}")
|
|
140
|
+
return filepath
|
|
141
|
+
|
|
142
|
+
except ImportError:
|
|
143
|
+
self.logger.warning("openpyxl not available, falling back to CSV export")
|
|
144
|
+
if isinstance(data, pd.DataFrame):
|
|
145
|
+
return self.export_to_csv(data, filename)
|
|
146
|
+
else:
|
|
147
|
+
first_sheet = next(iter(data.values()))
|
|
148
|
+
return self.export_to_csv(first_sheet, filename)
|
|
149
|
+
|
|
150
|
+
def export_yield_table(self,
|
|
151
|
+
yield_table: pd.DataFrame,
|
|
152
|
+
format: str = 'csv',
|
|
153
|
+
filename: Optional[str] = None) -> Path:
|
|
154
|
+
"""Export yield table with proper formatting.
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
yield_table: Yield table DataFrame
|
|
158
|
+
format: Export format ('csv', 'json', 'excel')
|
|
159
|
+
filename: Custom filename (optional)
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
Path to exported file
|
|
163
|
+
"""
|
|
164
|
+
if filename is None:
|
|
165
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
166
|
+
filename = f"yield_table_{timestamp}"
|
|
167
|
+
|
|
168
|
+
# Round numeric columns for better presentation
|
|
169
|
+
display_table = yield_table.copy()
|
|
170
|
+
numeric_columns = ['mean_dbh', 'mean_height', 'basal_area', 'volume']
|
|
171
|
+
for col in numeric_columns:
|
|
172
|
+
if col in display_table.columns:
|
|
173
|
+
display_table[col] = display_table[col].round(2)
|
|
174
|
+
|
|
175
|
+
# Sort by logical order
|
|
176
|
+
if all(col in display_table.columns for col in ['species', 'site_index', 'initial_tpa', 'age']):
|
|
177
|
+
display_table = display_table.sort_values(['species', 'site_index', 'initial_tpa', 'age'])
|
|
178
|
+
|
|
179
|
+
if format.lower() == 'csv':
|
|
180
|
+
# No metadata for CSV to allow easy reading back
|
|
181
|
+
return self.export_to_csv(display_table, filename, include_metadata=False)
|
|
182
|
+
elif format.lower() == 'json':
|
|
183
|
+
return self.export_to_json(display_table, filename)
|
|
184
|
+
elif format.lower() == 'excel':
|
|
185
|
+
return self.export_to_excel(display_table, filename)
|
|
186
|
+
else:
|
|
187
|
+
raise ValueError(f"Unsupported format: {format}")
|
|
188
|
+
|
|
189
|
+
def export_scenario_comparison(self,
|
|
190
|
+
comparison_df: pd.DataFrame,
|
|
191
|
+
format: str = 'csv',
|
|
192
|
+
filename: Optional[str] = None) -> Path:
|
|
193
|
+
"""Export scenario comparison results.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
comparison_df: Scenario comparison DataFrame
|
|
197
|
+
format: Export format
|
|
198
|
+
filename: Custom filename (optional)
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
Path to exported file
|
|
202
|
+
"""
|
|
203
|
+
if filename is None:
|
|
204
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
205
|
+
filename = f"scenario_comparison_{timestamp}"
|
|
206
|
+
|
|
207
|
+
# Create summary statistics
|
|
208
|
+
if format.lower() == 'excel':
|
|
209
|
+
# Create multiple sheets for Excel
|
|
210
|
+
sheets = {
|
|
211
|
+
'Raw_Data': comparison_df,
|
|
212
|
+
'Summary': self._create_scenario_summary(comparison_df)
|
|
213
|
+
}
|
|
214
|
+
return self.export_to_excel(sheets, filename)
|
|
215
|
+
elif format.lower() == 'csv':
|
|
216
|
+
# No metadata for CSV to allow easy reading back
|
|
217
|
+
return self.export_to_csv(comparison_df, filename, include_metadata=False)
|
|
218
|
+
else:
|
|
219
|
+
return getattr(self, f'export_to_{format.lower()}')(comparison_df, filename)
|
|
220
|
+
|
|
221
|
+
def export_stand_metrics(self,
|
|
222
|
+
metrics_over_time: List[Dict],
|
|
223
|
+
format: str = 'csv',
|
|
224
|
+
filename: Optional[str] = None) -> Path:
|
|
225
|
+
"""Export stand metrics over time.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
metrics_over_time: List of metric dictionaries
|
|
229
|
+
format: Export format ('csv', 'json', 'excel')
|
|
230
|
+
filename: Custom filename (optional)
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
Path to exported file
|
|
234
|
+
"""
|
|
235
|
+
if filename is None:
|
|
236
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
237
|
+
filename = f"stand_metrics_{timestamp}"
|
|
238
|
+
|
|
239
|
+
df = pd.DataFrame(metrics_over_time)
|
|
240
|
+
|
|
241
|
+
# Round numeric columns
|
|
242
|
+
numeric_columns = df.select_dtypes(include=[np.number]).columns
|
|
243
|
+
df[numeric_columns] = df[numeric_columns].round(2)
|
|
244
|
+
|
|
245
|
+
if format.lower() == 'csv':
|
|
246
|
+
# No metadata for CSV to allow easy reading back
|
|
247
|
+
return self.export_to_csv(df, filename, include_metadata=False)
|
|
248
|
+
elif format.lower() == 'json':
|
|
249
|
+
return self.export_to_json(df, filename)
|
|
250
|
+
elif format.lower() == 'excel':
|
|
251
|
+
return self.export_to_excel(df, filename)
|
|
252
|
+
else:
|
|
253
|
+
raise ValueError(f"Unsupported format: {format}")
|
|
254
|
+
|
|
255
|
+
def create_summary_report(self,
|
|
256
|
+
simulation_results: Dict[str, Any],
|
|
257
|
+
filename: Optional[str] = None) -> Path:
|
|
258
|
+
"""Create a comprehensive summary report.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
simulation_results: Dictionary containing all simulation results
|
|
262
|
+
filename: Custom filename (optional)
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
Path to summary report file
|
|
266
|
+
"""
|
|
267
|
+
if filename is None:
|
|
268
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
269
|
+
filename = f"simulation_summary_{timestamp}"
|
|
270
|
+
|
|
271
|
+
filepath = self.output_dir / f"{filename}.txt"
|
|
272
|
+
|
|
273
|
+
with open(filepath, 'w') as f:
|
|
274
|
+
f.write("FVS-Python Simulation Summary Report\n")
|
|
275
|
+
f.write("=" * 50 + "\n\n")
|
|
276
|
+
|
|
277
|
+
f.write(f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
|
|
278
|
+
f.write("Software: FVS-Python v1.0.0\n\n")
|
|
279
|
+
|
|
280
|
+
# Simulation parameters
|
|
281
|
+
if 'parameters' in simulation_results:
|
|
282
|
+
params = simulation_results['parameters']
|
|
283
|
+
f.write("Simulation Parameters:\n")
|
|
284
|
+
f.write("-" * 25 + "\n")
|
|
285
|
+
for key, value in params.items():
|
|
286
|
+
f.write(f"{key}: {value}\n")
|
|
287
|
+
f.write("\n")
|
|
288
|
+
|
|
289
|
+
# Final metrics
|
|
290
|
+
if 'final_metrics' in simulation_results:
|
|
291
|
+
metrics = simulation_results['final_metrics']
|
|
292
|
+
f.write("Final Stand Metrics:\n")
|
|
293
|
+
f.write("-" * 25 + "\n")
|
|
294
|
+
f.write(f"Age: {metrics.get('age', 'N/A')} years\n")
|
|
295
|
+
f.write(f"Trees per Acre: {metrics.get('tpa', 'N/A'):.0f}\n")
|
|
296
|
+
f.write(f"Mean DBH: {metrics.get('mean_dbh', 'N/A'):.1f} inches\n")
|
|
297
|
+
f.write(f"Mean Height: {metrics.get('mean_height', 'N/A'):.1f} feet\n")
|
|
298
|
+
f.write(f"Basal Area: {metrics.get('basal_area', 'N/A'):.1f} sq ft/acre\n")
|
|
299
|
+
f.write(f"Volume: {metrics.get('volume', 'N/A'):.0f} cubic feet/acre\n")
|
|
300
|
+
f.write("\n")
|
|
301
|
+
|
|
302
|
+
# Growth summary
|
|
303
|
+
if 'growth_summary' in simulation_results:
|
|
304
|
+
f.write("Growth Summary:\n")
|
|
305
|
+
f.write("-" * 25 + "\n")
|
|
306
|
+
summary = simulation_results['growth_summary']
|
|
307
|
+
f.write(f"Total DBH Growth: {summary.get('total_dbh_growth', 'N/A'):.1f} inches\n")
|
|
308
|
+
f.write(f"Total Height Growth: {summary.get('total_height_growth', 'N/A'):.1f} feet\n")
|
|
309
|
+
f.write(f"Total Volume Growth: {summary.get('total_volume_growth', 'N/A'):.0f} cu ft/acre\n")
|
|
310
|
+
f.write(f"Survival Rate: {summary.get('survival_rate', 'N/A'):.1%}\n")
|
|
311
|
+
f.write("\n")
|
|
312
|
+
|
|
313
|
+
# File references
|
|
314
|
+
f.write("Associated Files:\n")
|
|
315
|
+
f.write("-" * 25 + "\n")
|
|
316
|
+
if 'output_files' in simulation_results:
|
|
317
|
+
for file_type, filepath_ref in simulation_results['output_files'].items():
|
|
318
|
+
f.write(f"{file_type}: {filepath_ref}\n")
|
|
319
|
+
|
|
320
|
+
self.logger.info(f"Created summary report: {filepath}")
|
|
321
|
+
return filepath
|
|
322
|
+
|
|
323
|
+
def _json_serializer(self, obj):
|
|
324
|
+
"""JSON serializer for numpy types."""
|
|
325
|
+
if isinstance(obj, np.integer):
|
|
326
|
+
return int(obj)
|
|
327
|
+
elif isinstance(obj, np.floating):
|
|
328
|
+
return float(obj)
|
|
329
|
+
elif isinstance(obj, np.ndarray):
|
|
330
|
+
return obj.tolist()
|
|
331
|
+
elif isinstance(obj, pd.Timestamp):
|
|
332
|
+
return obj.isoformat()
|
|
333
|
+
return str(obj)
|
|
334
|
+
|
|
335
|
+
def _create_scenario_summary(self, comparison_df: pd.DataFrame) -> pd.DataFrame:
|
|
336
|
+
"""Create summary statistics for scenario comparison."""
|
|
337
|
+
if 'scenario' not in comparison_df.columns:
|
|
338
|
+
return pd.DataFrame()
|
|
339
|
+
|
|
340
|
+
# Get final metrics for each scenario
|
|
341
|
+
final_metrics = []
|
|
342
|
+
for scenario in comparison_df['scenario'].unique():
|
|
343
|
+
scenario_data = comparison_df[comparison_df['scenario'] == scenario]
|
|
344
|
+
final_row = scenario_data[scenario_data['age'] == scenario_data['age'].max()].iloc[0]
|
|
345
|
+
|
|
346
|
+
summary = {
|
|
347
|
+
'scenario': scenario,
|
|
348
|
+
'final_age': final_row['age'],
|
|
349
|
+
'final_tpa': final_row.get('tpa', 0),
|
|
350
|
+
'final_volume': final_row.get('volume', 0),
|
|
351
|
+
'final_mean_dbh': final_row.get('mean_dbh', 0),
|
|
352
|
+
'final_mean_height': final_row.get('mean_height', 0)
|
|
353
|
+
}
|
|
354
|
+
final_metrics.append(summary)
|
|
355
|
+
|
|
356
|
+
return pd.DataFrame(final_metrics)
|
pyfvs/ecological_unit.py
ADDED
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Ecological unit classification system for FVS-Python.
|
|
3
|
+
|
|
4
|
+
Implements the FVS Southern variant ecological unit (ECOUNIT) classification system
|
|
5
|
+
for modifying growth predictions based on ecological subsection codes.
|
|
6
|
+
|
|
7
|
+
The system uses two coefficient tables:
|
|
8
|
+
- Table 4.7.1.5: For mountain/province-level ecounits (M221, M222, M231, 221, 222, 231T)
|
|
9
|
+
- Table 4.7.1.6: For lowland ecounits (231L, 232, 234, 255, 411)
|
|
10
|
+
"""
|
|
11
|
+
import json
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Dict, Any, Optional, Set
|
|
14
|
+
from .config_loader import get_config_loader
|
|
15
|
+
from .exceptions import ConfigurationError
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# Define the ecological unit groups for each table
|
|
19
|
+
MOUNTAIN_PROVINCE_ECOUNITS: Set[str] = {"M221", "M222", "M231", "221", "222", "231T"}
|
|
20
|
+
LOWLAND_ECOUNITS: Set[str] = {"231L", "232", "234", "255", "411"}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def select_ecounit_table(ecounit_group: str) -> str:
|
|
24
|
+
"""Determine which coefficient table to use based on ecological unit group.
|
|
25
|
+
|
|
26
|
+
Table 4.7.1.5 is used for mountain/province-level ecounits:
|
|
27
|
+
M221, M222, M231, 221, 222, 231T
|
|
28
|
+
|
|
29
|
+
Table 4.7.1.6 is used for lowland ecounits:
|
|
30
|
+
231L, 232, 234, 255, 411
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
ecounit_group: Ecological unit group code (e.g., "M221", "232", "231L")
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Table identifier string: "table_4_7_1_5" or "table_4_7_1_6"
|
|
37
|
+
"""
|
|
38
|
+
normalized_group = ecounit_group.upper().strip()
|
|
39
|
+
|
|
40
|
+
if normalized_group in MOUNTAIN_PROVINCE_ECOUNITS:
|
|
41
|
+
return "table_4_7_1_5"
|
|
42
|
+
elif normalized_group in LOWLAND_ECOUNITS:
|
|
43
|
+
return "table_4_7_1_6"
|
|
44
|
+
else:
|
|
45
|
+
# Unknown ecounit - default to table 4.7.1.5 as it has broader coverage
|
|
46
|
+
return "table_4_7_1_5"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class EcologicalUnitClassifier:
|
|
50
|
+
"""Classifier for mapping ecological subsection codes to ECOUNIT groups.
|
|
51
|
+
|
|
52
|
+
This class implements the FVS Southern variant ecological unit classification
|
|
53
|
+
system, handling both Table 4.7.1.5 (mountain/province-level regions) and
|
|
54
|
+
Table 4.7.1.6 (lowland regions).
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
# Class-level cache for coefficient tables
|
|
58
|
+
_coefficients_table_5: Optional[Dict[str, Any]] = None
|
|
59
|
+
_coefficients_table_6: Optional[Dict[str, Any]] = None
|
|
60
|
+
_tables_loaded: bool = False
|
|
61
|
+
|
|
62
|
+
def __init__(self):
|
|
63
|
+
"""Initialize the ecological unit classifier."""
|
|
64
|
+
if not EcologicalUnitClassifier._tables_loaded:
|
|
65
|
+
self._load_coefficient_tables()
|
|
66
|
+
|
|
67
|
+
def _load_coefficient_tables(self) -> None:
|
|
68
|
+
"""Load coefficient tables from JSON files in the cfg directory."""
|
|
69
|
+
try:
|
|
70
|
+
loader = get_config_loader()
|
|
71
|
+
cfg_dir = loader.cfg_dir
|
|
72
|
+
|
|
73
|
+
# Load Table 4.7.1.5 (mountain/province ecounits)
|
|
74
|
+
table_5_path = cfg_dir / "ecounit_coefficients_table_4_7_1_5.json"
|
|
75
|
+
if table_5_path.exists():
|
|
76
|
+
with open(table_5_path, 'r', encoding='utf-8') as f:
|
|
77
|
+
EcologicalUnitClassifier._coefficients_table_5 = json.load(f)
|
|
78
|
+
else:
|
|
79
|
+
EcologicalUnitClassifier._coefficients_table_5 = self._get_empty_table()
|
|
80
|
+
|
|
81
|
+
# Load Table 4.7.1.6 (lowland ecounits)
|
|
82
|
+
table_6_path = cfg_dir / "ecounit_coefficients_table_4_7_1_6.json"
|
|
83
|
+
if table_6_path.exists():
|
|
84
|
+
with open(table_6_path, 'r', encoding='utf-8') as f:
|
|
85
|
+
EcologicalUnitClassifier._coefficients_table_6 = json.load(f)
|
|
86
|
+
else:
|
|
87
|
+
EcologicalUnitClassifier._coefficients_table_6 = self._get_empty_table()
|
|
88
|
+
|
|
89
|
+
EcologicalUnitClassifier._tables_loaded = True
|
|
90
|
+
|
|
91
|
+
except json.JSONDecodeError as e:
|
|
92
|
+
raise ConfigurationError(
|
|
93
|
+
f"Failed to parse ecological unit coefficient files: {str(e)}"
|
|
94
|
+
) from e
|
|
95
|
+
except Exception as e:
|
|
96
|
+
if isinstance(e, ConfigurationError):
|
|
97
|
+
raise
|
|
98
|
+
# Set empty tables to avoid repeated failures
|
|
99
|
+
EcologicalUnitClassifier._coefficients_table_5 = self._get_empty_table()
|
|
100
|
+
EcologicalUnitClassifier._coefficients_table_6 = self._get_empty_table()
|
|
101
|
+
EcologicalUnitClassifier._tables_loaded = True
|
|
102
|
+
|
|
103
|
+
@staticmethod
|
|
104
|
+
def _get_empty_table() -> Dict[str, Any]:
|
|
105
|
+
"""Return an empty coefficient table structure."""
|
|
106
|
+
return {
|
|
107
|
+
"table_description": "Empty fallback table",
|
|
108
|
+
"ecological_unit_groups": {},
|
|
109
|
+
"species_coefficients": {}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
def get_coefficient(self, species_code: str, ecounit_group: str) -> float:
|
|
113
|
+
"""Get the ecological unit coefficient for a species and ecounit group.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
species_code: FVS species code (e.g., "LP", "SP", "WO")
|
|
117
|
+
ecounit_group: Ecological unit group code (e.g., "M221", "232")
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
The coefficient value to add to the growth equation.
|
|
121
|
+
Returns 0.0 if the species or ecounit is not found.
|
|
122
|
+
"""
|
|
123
|
+
normalized_species = species_code.upper().strip()
|
|
124
|
+
normalized_ecounit = ecounit_group.upper().strip()
|
|
125
|
+
|
|
126
|
+
# Select the appropriate table
|
|
127
|
+
table_name = select_ecounit_table(normalized_ecounit)
|
|
128
|
+
|
|
129
|
+
if table_name == "table_4_7_1_5":
|
|
130
|
+
coefficients = self._coefficients_table_5
|
|
131
|
+
else:
|
|
132
|
+
coefficients = self._coefficients_table_6
|
|
133
|
+
|
|
134
|
+
if coefficients is None:
|
|
135
|
+
return 0.0
|
|
136
|
+
|
|
137
|
+
# Look up species coefficients
|
|
138
|
+
species_data = coefficients.get("species_coefficients", {}).get(normalized_species)
|
|
139
|
+
if species_data is None:
|
|
140
|
+
return 0.0
|
|
141
|
+
|
|
142
|
+
# Look up ecounit coefficient
|
|
143
|
+
ecounit_coefficients = species_data.get("coefficients", {})
|
|
144
|
+
return ecounit_coefficients.get(normalized_ecounit, 0.0)
|
|
145
|
+
|
|
146
|
+
def get_base_ecounit(self, species_code: str, table: str = "table_4_7_1_5") -> Optional[str]:
|
|
147
|
+
"""Get the base ecological unit for a species."""
|
|
148
|
+
normalized_species = species_code.upper().strip()
|
|
149
|
+
|
|
150
|
+
if table == "table_4_7_1_5":
|
|
151
|
+
coefficients = self._coefficients_table_5
|
|
152
|
+
else:
|
|
153
|
+
coefficients = self._coefficients_table_6
|
|
154
|
+
|
|
155
|
+
if coefficients is None:
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
species_data = coefficients.get("species_coefficients", {}).get(normalized_species)
|
|
159
|
+
if species_data is None:
|
|
160
|
+
return None
|
|
161
|
+
|
|
162
|
+
return species_data.get("base_ecounit")
|
|
163
|
+
|
|
164
|
+
def get_available_species(self, table: str = "table_4_7_1_5") -> list:
|
|
165
|
+
"""Get list of species codes available in a coefficient table."""
|
|
166
|
+
if table == "table_4_7_1_5":
|
|
167
|
+
coefficients = self._coefficients_table_5
|
|
168
|
+
else:
|
|
169
|
+
coefficients = self._coefficients_table_6
|
|
170
|
+
|
|
171
|
+
if coefficients is None:
|
|
172
|
+
return []
|
|
173
|
+
|
|
174
|
+
return list(coefficients.get("species_coefficients", {}).keys())
|
|
175
|
+
|
|
176
|
+
def get_all_coefficients_for_species(self, species_code: str) -> Dict[str, float]:
|
|
177
|
+
"""Get all ecological unit coefficients for a species across both tables."""
|
|
178
|
+
normalized_species = species_code.upper().strip()
|
|
179
|
+
all_coefficients = {}
|
|
180
|
+
|
|
181
|
+
# Get coefficients from Table 4.7.1.5
|
|
182
|
+
if self._coefficients_table_5:
|
|
183
|
+
species_data = self._coefficients_table_5.get(
|
|
184
|
+
"species_coefficients", {}
|
|
185
|
+
).get(normalized_species, {})
|
|
186
|
+
all_coefficients.update(species_data.get("coefficients", {}))
|
|
187
|
+
|
|
188
|
+
# Get coefficients from Table 4.7.1.6
|
|
189
|
+
if self._coefficients_table_6:
|
|
190
|
+
species_data = self._coefficients_table_6.get(
|
|
191
|
+
"species_coefficients", {}
|
|
192
|
+
).get(normalized_species, {})
|
|
193
|
+
all_coefficients.update(species_data.get("coefficients", {}))
|
|
194
|
+
|
|
195
|
+
return all_coefficients
|
|
196
|
+
|
|
197
|
+
def is_lowland_ecounit(self, ecounit_group: str) -> bool:
|
|
198
|
+
"""Check if an ecological unit group is classified as lowland."""
|
|
199
|
+
normalized_ecounit = ecounit_group.upper().strip()
|
|
200
|
+
return normalized_ecounit in LOWLAND_ECOUNITS
|
|
201
|
+
|
|
202
|
+
def is_mountain_province_ecounit(self, ecounit_group: str) -> bool:
|
|
203
|
+
"""Check if an ecological unit group is classified as mountain/province."""
|
|
204
|
+
normalized_ecounit = ecounit_group.upper().strip()
|
|
205
|
+
return normalized_ecounit in MOUNTAIN_PROVINCE_ECOUNITS
|
|
206
|
+
|
|
207
|
+
@classmethod
|
|
208
|
+
def reset_cache(cls) -> None:
|
|
209
|
+
"""Reset the cached coefficient tables."""
|
|
210
|
+
cls._coefficients_table_5 = None
|
|
211
|
+
cls._coefficients_table_6 = None
|
|
212
|
+
cls._tables_loaded = False
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def get_ecounit_effect(species_code: str, ecounit_group: str) -> float:
|
|
216
|
+
"""Get the ecological unit coefficient for a given species and ecounit group.
|
|
217
|
+
|
|
218
|
+
Loads the appropriate coefficient table based on whether the ecounit is
|
|
219
|
+
in a lowland or upland/mountain region, then returns the coefficient
|
|
220
|
+
that should be added to the base growth equation.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
species_code: FVS species code (e.g., "LP", "SP", "WO")
|
|
224
|
+
ecounit_group: Ecological unit group code (e.g., "M221", "232")
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
The ecological unit coefficient (effect) for the species/ecounit combination.
|
|
228
|
+
Returns 0.0 if the species or ecounit is not found in the table.
|
|
229
|
+
"""
|
|
230
|
+
classifier = EcologicalUnitClassifier()
|
|
231
|
+
return classifier.get_coefficient(species_code, ecounit_group)
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def create_classifier() -> EcologicalUnitClassifier:
|
|
235
|
+
"""Factory function to create an ecological unit classifier."""
|
|
236
|
+
return EcologicalUnitClassifier()
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def get_ecounit_summary(species_code: str) -> Dict[str, Any]:
|
|
240
|
+
"""Get a summary of ecological unit effects for a species."""
|
|
241
|
+
classifier = EcologicalUnitClassifier()
|
|
242
|
+
normalized_species = species_code.upper().strip()
|
|
243
|
+
|
|
244
|
+
# Get effects from both tables
|
|
245
|
+
mountain_effects = {}
|
|
246
|
+
lowland_effects = {}
|
|
247
|
+
|
|
248
|
+
for ecounit in MOUNTAIN_PROVINCE_ECOUNITS:
|
|
249
|
+
coeff = classifier.get_coefficient(normalized_species, ecounit)
|
|
250
|
+
if coeff != 0.0 or ecounit == classifier.get_base_ecounit(
|
|
251
|
+
normalized_species, "table_4_7_1_5"
|
|
252
|
+
):
|
|
253
|
+
mountain_effects[ecounit] = coeff
|
|
254
|
+
|
|
255
|
+
for ecounit in LOWLAND_ECOUNITS:
|
|
256
|
+
coeff = classifier.get_coefficient(normalized_species, ecounit)
|
|
257
|
+
if coeff != 0.0 or ecounit == classifier.get_base_ecounit(
|
|
258
|
+
normalized_species, "table_4_7_1_6"
|
|
259
|
+
):
|
|
260
|
+
lowland_effects[ecounit] = coeff
|
|
261
|
+
|
|
262
|
+
return {
|
|
263
|
+
"species": normalized_species,
|
|
264
|
+
"mountain_province_effects": mountain_effects,
|
|
265
|
+
"lowland_effects": lowland_effects,
|
|
266
|
+
"base_ecounit_table_5": classifier.get_base_ecounit(
|
|
267
|
+
normalized_species, "table_4_7_1_5"
|
|
268
|
+
),
|
|
269
|
+
"base_ecounit_table_6": classifier.get_base_ecounit(
|
|
270
|
+
normalized_species, "table_4_7_1_6"
|
|
271
|
+
)
|
|
272
|
+
}
|