ebm 0.99.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ebm/__init__.py +0 -0
- ebm/__main__.py +152 -0
- ebm/__version__.py +1 -0
- ebm/cmd/__init__.py +0 -0
- ebm/cmd/calibrate.py +83 -0
- ebm/cmd/calibrate_excel_com_io.py +128 -0
- ebm/cmd/heating_systems_by_year.py +18 -0
- ebm/cmd/helpers.py +134 -0
- ebm/cmd/initialize.py +167 -0
- ebm/cmd/migrate.py +92 -0
- ebm/cmd/pipeline.py +227 -0
- ebm/cmd/prepare_main.py +174 -0
- ebm/cmd/result_handler.py +272 -0
- ebm/cmd/run_calculation.py +221 -0
- ebm/data/area.csv +92 -0
- ebm/data/area_new_residential_buildings.csv +3 -0
- ebm/data/area_per_person.csv +12 -0
- ebm/data/building_code_parameters.csv +9 -0
- ebm/data/energy_need_behaviour_factor.csv +6 -0
- ebm/data/energy_need_improvements.csv +7 -0
- ebm/data/energy_need_original_condition.csv +534 -0
- ebm/data/heating_system_efficiencies.csv +13 -0
- ebm/data/heating_system_forecast.csv +9 -0
- ebm/data/heating_system_initial_shares.csv +1113 -0
- ebm/data/holiday_home_energy_consumption.csv +24 -0
- ebm/data/holiday_home_stock.csv +25 -0
- ebm/data/improvement_building_upgrade.csv +9 -0
- ebm/data/new_buildings_residential.csv +32 -0
- ebm/data/population_forecast.csv +51 -0
- ebm/data/s_curve.csv +40 -0
- ebm/energy_consumption.py +307 -0
- ebm/extractors.py +115 -0
- ebm/heating_system_forecast.py +472 -0
- ebm/holiday_home_energy.py +341 -0
- ebm/migrations.py +224 -0
- ebm/model/__init__.py +0 -0
- ebm/model/area.py +403 -0
- ebm/model/bema.py +149 -0
- ebm/model/building_category.py +150 -0
- ebm/model/building_condition.py +78 -0
- ebm/model/calibrate_energy_requirements.py +84 -0
- ebm/model/calibrate_heating_systems.py +180 -0
- ebm/model/column_operations.py +157 -0
- ebm/model/construction.py +827 -0
- ebm/model/data_classes.py +223 -0
- ebm/model/database_manager.py +410 -0
- ebm/model/dataframemodels.py +115 -0
- ebm/model/defaults.py +30 -0
- ebm/model/energy_need.py +6 -0
- ebm/model/energy_need_filter.py +182 -0
- ebm/model/energy_purpose.py +115 -0
- ebm/model/energy_requirement.py +353 -0
- ebm/model/energy_use.py +202 -0
- ebm/model/enums.py +8 -0
- ebm/model/exceptions.py +4 -0
- ebm/model/file_handler.py +388 -0
- ebm/model/filter_scurve_params.py +83 -0
- ebm/model/filter_tek.py +152 -0
- ebm/model/heat_pump.py +53 -0
- ebm/model/heating_systems.py +20 -0
- ebm/model/heating_systems_parameter.py +17 -0
- ebm/model/heating_systems_projection.py +3 -0
- ebm/model/heating_systems_share.py +28 -0
- ebm/model/scurve.py +224 -0
- ebm/model/tek.py +1 -0
- ebm/s_curve.py +515 -0
- ebm/services/__init__.py +0 -0
- ebm/services/calibration_writer.py +262 -0
- ebm/services/console.py +106 -0
- ebm/services/excel_loader.py +66 -0
- ebm/services/files.py +38 -0
- ebm/services/spreadsheet.py +289 -0
- ebm/temp_calc.py +99 -0
- ebm/validators.py +565 -0
- ebm-0.99.3.dist-info/METADATA +217 -0
- ebm-0.99.3.dist-info/RECORD +80 -0
- ebm-0.99.3.dist-info/WHEEL +5 -0
- ebm-0.99.3.dist-info/entry_points.txt +3 -0
- ebm-0.99.3.dist-info/licenses/LICENSE +21 -0
- ebm-0.99.3.dist-info/top_level.txt +1 -0
ebm/__init__.py
ADDED
File without changes
|
ebm/__main__.py
ADDED
@@ -0,0 +1,152 @@
|
|
1
|
+
"""EBM start from where when running as a script or module"""
|
2
|
+
import os
|
3
|
+
import pathlib
|
4
|
+
import sys
|
5
|
+
|
6
|
+
import pandas as pd
|
7
|
+
from loguru import logger
|
8
|
+
|
9
|
+
from ebm.cmd import prepare_main
|
10
|
+
from ebm.cmd.helpers import configure_json_log, configure_loglevel, load_environment_from_dotenv
|
11
|
+
from ebm.cmd.initialize import create_output_directory, init
|
12
|
+
from ebm.cmd.migrate import migrate_directories
|
13
|
+
from ebm.cmd.pipeline import export_energy_model_reports
|
14
|
+
from ebm.cmd.result_handler import EbmDefaultHandler, append_result, transform_model_to_horizontal
|
15
|
+
from ebm.cmd.run_calculation import validate_years
|
16
|
+
from ebm.model.building_category import BuildingCategory
|
17
|
+
from ebm.model.database_manager import DatabaseManager
|
18
|
+
from ebm.model.enums import ReturnCode
|
19
|
+
from ebm.model.file_handler import FileHandler
|
20
|
+
|
21
|
+
df = None
|
22
|
+
|
23
|
+
|
24
|
+
def main() -> tuple[ReturnCode, pd.DataFrame | None]:
|
25
|
+
"""
|
26
|
+
Execute the EBM module as a script.
|
27
|
+
|
28
|
+
This function serves as the entry point for the script. It handles argument parsing,
|
29
|
+
initializes necessary components, and orchestrates the main workflow of the script.
|
30
|
+
|
31
|
+
Returns
|
32
|
+
-------
|
33
|
+
exit code : tuple[ReturnCode, pd.DataFrame]
|
34
|
+
zero when the program exits gracefully
|
35
|
+
|
36
|
+
"""
|
37
|
+
load_environment_from_dotenv()
|
38
|
+
configure_loglevel(log_format=os.environ.get('LOG_FORMAT', '{level.icon} <level>{message}</level>'))
|
39
|
+
configure_json_log()
|
40
|
+
|
41
|
+
logger.debug(f'Starting {sys.executable} {__file__}')
|
42
|
+
|
43
|
+
program_name = 'ebm'
|
44
|
+
default_path = pathlib.Path('output/ebm_output.xlsx')
|
45
|
+
|
46
|
+
arguments = prepare_main.make_arguments(program_name, default_path)
|
47
|
+
|
48
|
+
# Make local variable from arguments for clarity
|
49
|
+
building_categories = [BuildingCategory.from_string(b_c) for b_c in arguments.categories]
|
50
|
+
if not building_categories:
|
51
|
+
building_categories = list(BuildingCategory)
|
52
|
+
|
53
|
+
# `;` Will normally be interpreted as line end when typed in a shell. If the
|
54
|
+
# delimiter is empty make the assumption that the user used ;. An empty delimiter is not valid anyway.
|
55
|
+
csv_delimiter = arguments.csv_delimiter if arguments.csv_delimiter else ';'
|
56
|
+
|
57
|
+
# Make sure everything is working as expected
|
58
|
+
model_years = validate_years(start_year=arguments.start_year, end_year=arguments.end_year)
|
59
|
+
|
60
|
+
input_directory = arguments.input
|
61
|
+
logger.info(f'Using data from "{input_directory}"')
|
62
|
+
database_manager = DatabaseManager(file_handler=FileHandler(directory=input_directory))
|
63
|
+
|
64
|
+
# Create input directory if requested
|
65
|
+
if arguments.create_input:
|
66
|
+
if init(database_manager.file_handler):
|
67
|
+
logger.success('Finished creating input files in {input_directory}',
|
68
|
+
input_directory=database_manager.file_handler.input_directory)
|
69
|
+
return ReturnCode.OK, None
|
70
|
+
# Exit with 0 for success. The assumption is that the user would like to review the input before proceeding.
|
71
|
+
return ReturnCode.MISSING_INPUT_FILES, None
|
72
|
+
if arguments.migrate:
|
73
|
+
migrate_directories([database_manager.file_handler.input_directory])
|
74
|
+
logger.success('Finished migration')
|
75
|
+
return ReturnCode.OK, None
|
76
|
+
|
77
|
+
missing_input_error = f"""
|
78
|
+
Use `<program name> --create-input --input={input_directory}` to create an input directory with the default input files
|
79
|
+
""".strip().replace('\n', ' ')
|
80
|
+
|
81
|
+
# Make sure all required files exists
|
82
|
+
try:
|
83
|
+
missing_files = database_manager.file_handler.check_for_missing_files()
|
84
|
+
if missing_files:
|
85
|
+
print(missing_input_error, file=sys.stderr)
|
86
|
+
return ReturnCode.MISSING_INPUT_FILES, None
|
87
|
+
except FileNotFoundError as file_not_found:
|
88
|
+
if str(file_not_found).startswith('Input Directory Not Found'):
|
89
|
+
logger.error(f'Input Directory "{input_directory}" Not Found')
|
90
|
+
print(missing_input_error, file=sys.stderr)
|
91
|
+
return ReturnCode.FILE_NOT_ACCESSIBLE, None
|
92
|
+
|
93
|
+
database_manager.file_handler.validate_input_files()
|
94
|
+
|
95
|
+
output_file = arguments.output_file
|
96
|
+
create_output_directory(filename=output_file)
|
97
|
+
|
98
|
+
output_file_return_code = prepare_main.check_output_file_status(output_file, arguments.force, default_path,
|
99
|
+
program_name)
|
100
|
+
if output_file_return_code!= ReturnCode.OK:
|
101
|
+
return output_file_return_code, None
|
102
|
+
|
103
|
+
step_choice = arguments.step
|
104
|
+
|
105
|
+
convert_result_to_horizontal: bool = arguments.horizontal_years
|
106
|
+
|
107
|
+
default_handler = EbmDefaultHandler()
|
108
|
+
|
109
|
+
model = None
|
110
|
+
|
111
|
+
files_to_open = [output_file]
|
112
|
+
|
113
|
+
if step_choice == 'energy-use':
|
114
|
+
output_directory = output_file if output_file.is_dir() else output_file.parent
|
115
|
+
files_to_open = export_energy_model_reports(model_years, database_manager, output_directory)
|
116
|
+
else:
|
117
|
+
model = default_handler.extract_model(model_years, building_categories, database_manager, step_choice)
|
118
|
+
|
119
|
+
if convert_result_to_horizontal and (step_choice in ['area-forecast', 'energy-requirements']) and output_file.suffix=='.xlsx':
|
120
|
+
sheet_name_prefix = 'area' if step_choice == 'area-forecast' else 'energy'
|
121
|
+
logger.debug(f'Transform heating {step_choice}')
|
122
|
+
|
123
|
+
df = transform_model_to_horizontal(model.reset_index())
|
124
|
+
append_result(output_file, df, f'{sheet_name_prefix} condition')
|
125
|
+
|
126
|
+
model = model.reset_index()
|
127
|
+
# Demolition should not be summed any further
|
128
|
+
model = model[model.building_condition!='demolition']
|
129
|
+
model['building_condition'] = 'all'
|
130
|
+
df = transform_model_to_horizontal(model)
|
131
|
+
append_result(output_file, df, f'{sheet_name_prefix} TEK')
|
132
|
+
|
133
|
+
model['building_code'] = 'all'
|
134
|
+
df = transform_model_to_horizontal(model)
|
135
|
+
append_result(output_file, df, f'{sheet_name_prefix} category')
|
136
|
+
logger.success('Wrote {filename}', filename=output_file)
|
137
|
+
else:
|
138
|
+
default_handler.write_tqdm_result(output_file, model, csv_delimiter)
|
139
|
+
|
140
|
+
for file_to_open in files_to_open:
|
141
|
+
if arguments.open or os.environ.get('EBM_ALWAYS_OPEN', 'FALSE').upper() == 'TRUE':
|
142
|
+
logger.info(f'Open {file_to_open}')
|
143
|
+
os.startfile(file_to_open, 'open')
|
144
|
+
else:
|
145
|
+
logger.debug(f'Finished {file_to_open}')
|
146
|
+
|
147
|
+
return ReturnCode.OK, model
|
148
|
+
|
149
|
+
|
150
|
+
if __name__ == '__main__':
|
151
|
+
exit_code, result = main()
|
152
|
+
df = result
|
ebm/__version__.py
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
version = "0.99.3"
|
ebm/cmd/__init__.py
ADDED
File without changes
|
ebm/cmd/calibrate.py
ADDED
@@ -0,0 +1,83 @@
|
|
1
|
+
import pathlib
|
2
|
+
|
3
|
+
from loguru import logger
|
4
|
+
import pandas as pd
|
5
|
+
|
6
|
+
from dotenv import load_dotenv
|
7
|
+
|
8
|
+
from ebm.model.bema import map_sort_order
|
9
|
+
|
10
|
+
from ebm.model.calibrate_heating_systems import extract_area_forecast, extract_energy_requirements, \
|
11
|
+
extract_heating_systems
|
12
|
+
from ebm.model.data_classes import YearRange
|
13
|
+
from ebm.services.files import make_unique_path
|
14
|
+
|
15
|
+
CALIBRATION_YEAR = 2023
|
16
|
+
|
17
|
+
model_period = YearRange(2020, 2050)
|
18
|
+
start_year = model_period.start
|
19
|
+
end_year = model_period.end
|
20
|
+
|
21
|
+
|
22
|
+
def run_calibration(database_manager,
|
23
|
+
calibration_year,
|
24
|
+
area_forecast: pd.DataFrame = None,
|
25
|
+
write_to_output = False):
|
26
|
+
"""
|
27
|
+
|
28
|
+
Parameters
|
29
|
+
----------
|
30
|
+
database_manager : ebm.model.database_manager.DatabaseManager
|
31
|
+
|
32
|
+
Returns
|
33
|
+
-------
|
34
|
+
pandas.core.frame.DataFrame
|
35
|
+
"""
|
36
|
+
load_dotenv(pathlib.Path('.env'))
|
37
|
+
|
38
|
+
input_directory = database_manager.file_handler.input_directory
|
39
|
+
|
40
|
+
logger.info(f'Using input directory "{input_directory}"')
|
41
|
+
logger.info('Extract area forecast')
|
42
|
+
area_forecast = extract_area_forecast(database_manager) if area_forecast is None else area_forecast
|
43
|
+
if write_to_output:
|
44
|
+
write_dataframe(area_forecast[area_forecast.year == calibration_year], 'area_forecast')
|
45
|
+
|
46
|
+
logger.info('Extract energy requirements')
|
47
|
+
energy_requirements = extract_energy_requirements(area_forecast, database_manager)
|
48
|
+
if write_to_output:
|
49
|
+
en_req = energy_requirements.xs(2023, level='year').reset_index().sort_values(
|
50
|
+
by='building_category', key=lambda x: x.map(map_sort_order))
|
51
|
+
write_dataframe(en_req, 'energy_requirements')
|
52
|
+
grouped = en_req[['building_category', 'm2', 'kwh_m2', 'energy_requirement']].groupby(
|
53
|
+
by=['building_category'], as_index=False).agg({'m2': 'first', 'kwh_m2': 'first', 'energy_requirement': 'sum'})
|
54
|
+
grouped = grouped.sort_values(by='building_category', key=lambda x: x.map(map_sort_order))
|
55
|
+
write_dataframe(grouped, 'energy_requirements_sum', sheet_name='sum')
|
56
|
+
|
57
|
+
logger.info('Extract heating systems')
|
58
|
+
heating_systems = extract_heating_systems(energy_requirements, database_manager)
|
59
|
+
if write_to_output:
|
60
|
+
write_dataframe(heating_systems.xs(2023, level='year'), 'heating_systems')
|
61
|
+
|
62
|
+
|
63
|
+
return heating_systems
|
64
|
+
|
65
|
+
|
66
|
+
def write_dataframe(df, name='dataframe', sheet_name='Sheet1'):
|
67
|
+
output_directory = pathlib.Path('output')
|
68
|
+
if output_directory.is_dir():
|
69
|
+
logger.debug(f'Writing {name} to file')
|
70
|
+
output_file = output_directory / f'{name}.xlsx'
|
71
|
+
output_file = make_unique_path(output_file)
|
72
|
+
df.to_excel(output_file, merge_cells=False, sheet_name=sheet_name)
|
73
|
+
logger.info(f'Wrote {name} to {output_file} ! {sheet_name if sheet_name!="Sheet1" else ""}')
|
74
|
+
else:
|
75
|
+
logger.warning(f'Cannot write to {output_directory}. Directory does not exists')
|
76
|
+
|
77
|
+
|
78
|
+
def main():
|
79
|
+
raise NotImplementedError('Running calibrate as a script is not supported')
|
80
|
+
|
81
|
+
|
82
|
+
if __name__ == '__main__':
|
83
|
+
main()
|
@@ -0,0 +1,128 @@
|
|
1
|
+
import os
|
2
|
+
import pathlib
|
3
|
+
import time
|
4
|
+
|
5
|
+
import pandas as pd
|
6
|
+
from dotenv import load_dotenv
|
7
|
+
from loguru import logger
|
8
|
+
|
9
|
+
from ebm.cmd.calibrate import run_calibration, write_dataframe
|
10
|
+
from ebm.cmd.helpers import configure_loglevel
|
11
|
+
from ebm.model.file_handler import FileHandler
|
12
|
+
from ebm.model.database_manager import DatabaseManager
|
13
|
+
from ebm.model.calibrate_energy_requirements import EnergyRequirementCalibrationWriter, \
|
14
|
+
EnergyConsumptionCalibrationWriter
|
15
|
+
from ebm.model.calibrate_heating_systems import DistributionOfHeatingSystems, group_heating_systems_by_energy_carrier
|
16
|
+
from ebm.services.calibration_writer import ComCalibrationReader, ExcelComCalibrationResultWriter
|
17
|
+
|
18
|
+
LOG_FORMAT = """
|
19
|
+
<green>{time:HH:mm:ss.SSS}</green> | <blue>{elapsed}</blue> | <level>{level: <8}</level> | <cyan>{function: <20}</cyan>:<cyan>{line: <3}</cyan> - <level>{message}</level>
|
20
|
+
""".strip()
|
21
|
+
|
22
|
+
|
23
|
+
def heatpump_filter(df):
|
24
|
+
vannbasert = [n for n in df.index.get_level_values('heating_systems').unique() if
|
25
|
+
n.startswith('HP Central heating')]
|
26
|
+
elektrisk = [n for n in df.index.get_level_values('heating_systems').unique() if
|
27
|
+
n.startswith('HP') and n not in vannbasert]
|
28
|
+
el_slice = (slice(None), ['original_condition'], ['heating_rv'], ['TEK07'], slice(None), elektrisk + vannbasert)
|
29
|
+
df = df.loc[el_slice] # luftluft
|
30
|
+
return df
|
31
|
+
|
32
|
+
|
33
|
+
def main():
|
34
|
+
start_time = time.time()
|
35
|
+
load_dotenv(pathlib.Path('.env'))
|
36
|
+
configure_loglevel(log_format=LOG_FORMAT)
|
37
|
+
|
38
|
+
write_to_disk = os.environ.get('EBM_WRITE_TO_DISK', 'False').upper() == 'TRUE'
|
39
|
+
calibration_year = int(os.environ.get('EBM_CALIBRATION_YEAR', 2023))
|
40
|
+
calibration_spreadsheet_name = os.environ.get("EBM_CALIBRATION_OUT", "Kalibreringsark.xlsx!Ut")
|
41
|
+
calibration_sheet = os.environ.get("EBM_CALIBRATION_SHEET", "Kalibreringsark.xlsx!Kalibreringsfaktorer")
|
42
|
+
energy_requirements_calibration_file = os.environ.get('EBM_CALIBRATION_ENERGY_REQUIREMENT',
|
43
|
+
f'kalibrering/{FileHandler.CALIBRATE_ENERGY_REQUIREMENT}')
|
44
|
+
energy_consumption_calibration_file = os.environ.get('EBM_CALIBRATION_ENERGY_CONSUMPTION',
|
45
|
+
f'kalibrering/{FileHandler.CALIBRATE_ENERGY_CONSUMPTION}')
|
46
|
+
|
47
|
+
energy_source_target_cells = os.environ.get('EBM_CALIBRATION_ENERGY_SOURCE_USAGE', 'C64:E68')
|
48
|
+
ebm_calibration_energy_heating_pump = os.environ.get('EBM_CALIBRATION_ENERGY_HEATING_PUMP', 'C72:E74')
|
49
|
+
hs_distribution_cells = os.environ.get('EBM_CALIBRATION_ENERGY_HEATING_SYSTEMS_DISTRIBUTION', 'C32:F44')
|
50
|
+
|
51
|
+
output_directory = pathlib.Path('output')
|
52
|
+
|
53
|
+
logger.info(f'Loading {calibration_sheet}')
|
54
|
+
workbook_name = calibration_sheet.split('!')[0]
|
55
|
+
sheet_name = calibration_sheet.split('!')[1] if '!' in calibration_sheet else 'Kalibreringsfaktorer'
|
56
|
+
|
57
|
+
com_calibration_reader = ComCalibrationReader(workbook_name, sheet_name)
|
58
|
+
calibration = com_calibration_reader.extract()
|
59
|
+
logger.info(f'Make {calibration_sheet} compatible with ebm')
|
60
|
+
energy_source_by_building_group = com_calibration_reader.transform(calibration)
|
61
|
+
|
62
|
+
logger.info('Write calibration to ebm')
|
63
|
+
eq_calibration_writer = EnergyRequirementCalibrationWriter()
|
64
|
+
eq_calibration_writer.load(energy_source_by_building_group, energy_requirements_calibration_file)
|
65
|
+
|
66
|
+
ec_calibration_writer = EnergyConsumptionCalibrationWriter()
|
67
|
+
ec_calibration = ec_calibration_writer.transform(energy_source_by_building_group)
|
68
|
+
ec_calibration_writer.load(ec_calibration, energy_consumption_calibration_file)
|
69
|
+
|
70
|
+
logger.info('Calculate calibrated energy use')
|
71
|
+
area_forecast = None
|
72
|
+
area_forecast_file = pathlib.Path('kalibrert/area_forecast.csv')
|
73
|
+
if area_forecast_file.is_file():
|
74
|
+
logger.info(f' Using {area_forecast_file}')
|
75
|
+
area_forecast = pd.read_csv(area_forecast_file)
|
76
|
+
|
77
|
+
database_manager = DatabaseManager(FileHandler(directory='kalibrert'))
|
78
|
+
|
79
|
+
df = run_calibration(database_manager, calibration_year=2023,
|
80
|
+
area_forecast=area_forecast, write_to_output=write_to_disk)
|
81
|
+
|
82
|
+
# df = heatpump_filter(df)
|
83
|
+
|
84
|
+
logger.info('Transform heating systems')
|
85
|
+
|
86
|
+
energy_source_by_building_group = group_heating_systems_by_energy_carrier(df)
|
87
|
+
energy_source_by_building_group = energy_source_by_building_group.xs(2023, level='year')
|
88
|
+
|
89
|
+
if write_to_disk:
|
90
|
+
if not output_directory.is_dir():
|
91
|
+
output_directory.mkdir()
|
92
|
+
write_dataframe(energy_source_by_building_group, 'energy_source_by_building_group')
|
93
|
+
|
94
|
+
energy_source_by_building_group = energy_source_by_building_group.fillna(0)
|
95
|
+
|
96
|
+
logger.info(f'Writing heating systems distribution to {calibration_spreadsheet_name}')
|
97
|
+
hs_distribution_writer = ExcelComCalibrationResultWriter(excel_filename=calibration_spreadsheet_name,
|
98
|
+
target_cells=hs_distribution_cells)
|
99
|
+
|
100
|
+
distribution_of_heating_systems = DistributionOfHeatingSystems()
|
101
|
+
shares_start_year = distribution_of_heating_systems.extract(database_manager)
|
102
|
+
heating_systems_distribution = distribution_of_heating_systems.transform(shares_start_year)
|
103
|
+
|
104
|
+
hs_distribution_writer.extract()
|
105
|
+
hs_distribution_writer.transform(heating_systems_distribution)
|
106
|
+
hs_distribution_writer.load()
|
107
|
+
|
108
|
+
logger.info(f'Writing energy_source using writer to {calibration_spreadsheet_name}')
|
109
|
+
energy_source_excel_com_writer = ExcelComCalibrationResultWriter(
|
110
|
+
excel_filename=calibration_spreadsheet_name, target_cells=energy_source_target_cells)
|
111
|
+
|
112
|
+
energy_source_excel_com_writer.extract()
|
113
|
+
energy_source_excel_com_writer.transform(energy_source_by_building_group)
|
114
|
+
energy_source_excel_com_writer.load()
|
115
|
+
|
116
|
+
logger.info(f'Writing calculated energy pump use to {calibration_spreadsheet_name}')
|
117
|
+
heatpump_excel_com_writer = ExcelComCalibrationResultWriter(
|
118
|
+
excel_filename=calibration_spreadsheet_name, target_cells=ebm_calibration_energy_heating_pump)
|
119
|
+
|
120
|
+
heatpump_excel_com_writer.extract()
|
121
|
+
heatpump_excel_com_writer.transform(energy_source_by_building_group)
|
122
|
+
heatpump_excel_com_writer.load()
|
123
|
+
|
124
|
+
logger.info(f'Calibrated {calibration_spreadsheet_name} in {round(time.time() - start_time, 2)} seconds')
|
125
|
+
|
126
|
+
|
127
|
+
if __name__ == '__main__':
|
128
|
+
main()
|
@@ -0,0 +1,18 @@
|
|
1
|
+
import pandas as pd
|
2
|
+
|
3
|
+
from ebm.model.calibrate_heating_systems import group_heating_systems_by_energy_carrier
|
4
|
+
from ebm.model.data_classes import YearRange
|
5
|
+
|
6
|
+
|
7
|
+
def group_heating_systems_energy_source_by_year(hs: pd.DataFrame) -> pd.DataFrame:
|
8
|
+
df = hs.set_index(['building_category', 'building_condition', 'purpose', 'building_code', 'year', 'heating_systems'])
|
9
|
+
|
10
|
+
return group_heating_systems_by_energy_carrier(df)
|
11
|
+
|
12
|
+
|
13
|
+
|
14
|
+
|
15
|
+
def group_heating_systems_energy_source_by_year_horizontal(hs: pd.DataFrame, year_range: YearRange=None) -> pd.DataFrame:
|
16
|
+
df = group_heating_systems_energy_source_by_year(hs)
|
17
|
+
return df.reset_index().pivot(columns=['year'], index=['building_category', 'energy_source'], values=['energy_use'])
|
18
|
+
|
ebm/cmd/helpers.py
ADDED
@@ -0,0 +1,134 @@
|
|
1
|
+
import os
|
2
|
+
import pathlib
|
3
|
+
import sys
|
4
|
+
from datetime import datetime
|
5
|
+
|
6
|
+
from dotenv import find_dotenv, load_dotenv
|
7
|
+
from loguru import logger
|
8
|
+
|
9
|
+
|
10
|
+
def load_environment_from_dotenv() -> None:
|
11
|
+
"""
|
12
|
+
Load environment variables from a .env file located in the current working directory.
|
13
|
+
|
14
|
+
If a .env file is found, its contents are loaded into the environment.
|
15
|
+
"""
|
16
|
+
env_file = pathlib.Path(find_dotenv(usecwd=True))
|
17
|
+
if env_file.is_file():
|
18
|
+
logger.trace('Loading environment from {env_file}', env_file=env_file)
|
19
|
+
load_dotenv(env_file)
|
20
|
+
else:
|
21
|
+
logger.trace(f'.env not found in {env_file}', env_file=env_file.absolute())
|
22
|
+
|
23
|
+
|
24
|
+
def configure_json_log(log_directory: str|bool=False) -> None:
|
25
|
+
"""
|
26
|
+
Configure JSON logging using the `loguru` logger.
|
27
|
+
|
28
|
+
This function sets up structured JSON logging to a file, with the log file path
|
29
|
+
determined by the `LOG_DIRECTORY` environment variable or the provided `log_directory` argument.
|
30
|
+
If `LOG_DIRECTORY` is set to 'TRUE', the default directory 'log' is used.
|
31
|
+
If it is set to 'FALSE', logging is skipped.
|
32
|
+
|
33
|
+
Parameters
|
34
|
+
----------
|
35
|
+
log_directory : str or bool, optional
|
36
|
+
The directory where the log file should be saved. If set to `False`, logging is disabled
|
37
|
+
unless overridden by the `LOG_DIRECTORY` environment variable.
|
38
|
+
|
39
|
+
Environment Variables
|
40
|
+
---------------------
|
41
|
+
LOG_DIRECTORY : str
|
42
|
+
Overrides the `log_directory` argument when set. Special values:
|
43
|
+
- 'TRUE': uses default directory 'log'
|
44
|
+
- 'FALSE': disables logging
|
45
|
+
|
46
|
+
Notes
|
47
|
+
-----
|
48
|
+
- The log file is named using the current timestamp in ISO format (without colons).
|
49
|
+
- The log file is serialized in JSON format.
|
50
|
+
- The directory is created if it does not exist.
|
51
|
+
|
52
|
+
Examples
|
53
|
+
--------
|
54
|
+
>>> configure_json_log("logs")
|
55
|
+
>>> os.environ["LOG_DIRECTORY"] = "TRUE"
|
56
|
+
|
57
|
+
>>> configure_json_log(False)
|
58
|
+
|
59
|
+
"""
|
60
|
+
if not log_directory:
|
61
|
+
return
|
62
|
+
|
63
|
+
script_name = pathlib.Path(pathlib.Path(sys.argv[0]))
|
64
|
+
file_stem = script_name.stem if script_name.stem!='__main__' else script_name.parent.name + script_name.stem
|
65
|
+
if 'PYTEST_CURRENT_TEST' in os.environ and os.environ.get('PYTEST_CURRENT_TEST'):
|
66
|
+
pytest_current_test = os.environ.get('PYTEST_CURRENT_TEST').split('::')
|
67
|
+
file_stem = pathlib.Path(pytest_current_test[0]).stem + pytest_current_test[1].replace('(call)', '').strip()
|
68
|
+
|
69
|
+
env_log_directory = os.environ.get('LOG_DIRECTORY', log_directory)
|
70
|
+
if isinstance(env_log_directory, bool):
|
71
|
+
env_log_directory = pathlib.Path.cwd() / 'log'
|
72
|
+
log_to_json = str(env_log_directory).upper().strip()!='FALSE'
|
73
|
+
env_log_directory = env_log_directory if log_to_json and str(env_log_directory).upper().strip() != 'TRUE' else 'log'
|
74
|
+
|
75
|
+
if log_to_json:
|
76
|
+
log_directory = pathlib.Path(env_log_directory if env_log_directory else log_directory)
|
77
|
+
if log_directory.is_file():
|
78
|
+
logger.warning(f'LOG_DIRECTORY={log_directory} is a file. Skipping json logging')
|
79
|
+
return
|
80
|
+
log_directory.mkdir(exist_ok=True)
|
81
|
+
|
82
|
+
log_start = datetime.now()
|
83
|
+
timestamp = log_start.isoformat(timespec='seconds').replace(':', '')
|
84
|
+
log_filename = log_directory / f'{file_stem}-{timestamp}.json'
|
85
|
+
if log_filename.is_file():
|
86
|
+
log_start_milliseconds = log_start.isoformat(timespec='milliseconds').replace(':', '')
|
87
|
+
log_filename = log_filename.with_stem(f'{file_stem}-{log_start_milliseconds}')
|
88
|
+
|
89
|
+
logger.debug(f'Logging json to {log_filename}')
|
90
|
+
logger.add(log_filename, level=os.environ.get('LOG_LEVEL_JSON', 'TRACE'), serialize=True)
|
91
|
+
if len(sys.argv) > 1:
|
92
|
+
logger.info(f'argv={sys.argv[1:]}')
|
93
|
+
else:
|
94
|
+
logger.debug('Skipping json log. LOG_DIRECTORY is undefined.')
|
95
|
+
|
96
|
+
|
97
|
+
def configure_loglevel(log_format: str | None = None, level: str = 'INFO') -> None:
|
98
|
+
"""
|
99
|
+
Configure the loguru logger with a specified log level and format.
|
100
|
+
|
101
|
+
By default, sets the log level to INFO unless either:
|
102
|
+
- The '--debug' flag is present in the command-line arguments (`sys.argv`), or
|
103
|
+
- The environment variable DEBUG is set to 'TRUE' (case-insensitive).
|
104
|
+
|
105
|
+
If debug mode is enabled, the log level is set to DEBUG and a filter is applied
|
106
|
+
to suppress DEBUG logs from the 'ebm.model.file_handler' logger.
|
107
|
+
|
108
|
+
Parameters
|
109
|
+
----------
|
110
|
+
log_format : str, optional
|
111
|
+
Custom format string for log messages. If not provided, the default format is used.
|
112
|
+
level : str, optional
|
113
|
+
Default log level to use when debug mode is not active. Defaults to 'INFO'.
|
114
|
+
|
115
|
+
Returns
|
116
|
+
-------
|
117
|
+
None
|
118
|
+
|
119
|
+
"""
|
120
|
+
logger.remove()
|
121
|
+
options = {'level': level}
|
122
|
+
if log_format:
|
123
|
+
options['format'] = log_format
|
124
|
+
|
125
|
+
# Accessing sys.argv directly since we want to figure out the log level before loading arguments with arg_parser.
|
126
|
+
# Debug level may also be conveyed through environment variables, so read that from environ as well.
|
127
|
+
if '--debug' in sys.argv or os.environ.get('DEBUG', '').upper() == 'TRUE':
|
128
|
+
options['level'] = 'DEBUG'
|
129
|
+
|
130
|
+
logger.add(sys.stderr,
|
131
|
+
filter=lambda f: not (f['name'] == 'ebm.model.file_handler' and f['level'].name == 'DEBUG'),
|
132
|
+
**options)
|
133
|
+
|
134
|
+
|