ahuora-builder 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ahuora_builder/__init__.py +0 -0
- ahuora_builder/arc_manager.py +33 -0
- ahuora_builder/build_state.py +57 -0
- ahuora_builder/custom/PIDController.py +494 -0
- ahuora_builder/custom/PySMOModel.py +178 -0
- ahuora_builder/custom/SimpleEffectivenessHX_DH.py +727 -0
- ahuora_builder/custom/__init__.py +0 -0
- ahuora_builder/custom/add_initial_dynamics.py +35 -0
- ahuora_builder/custom/custom_compressor.py +107 -0
- ahuora_builder/custom/custom_cooler.py +33 -0
- ahuora_builder/custom/custom_heat_exchanger.py +183 -0
- ahuora_builder/custom/custom_heat_exchanger_1d.py +258 -0
- ahuora_builder/custom/custom_heater.py +41 -0
- ahuora_builder/custom/custom_pressure_changer.py +34 -0
- ahuora_builder/custom/custom_pump.py +107 -0
- ahuora_builder/custom/custom_separator.py +371 -0
- ahuora_builder/custom/custom_tank.py +133 -0
- ahuora_builder/custom/custom_turbine.py +132 -0
- ahuora_builder/custom/custom_valve.py +300 -0
- ahuora_builder/custom/custom_variable.py +29 -0
- ahuora_builder/custom/direct_steam_injection.py +371 -0
- ahuora_builder/custom/energy/__init__.py +0 -0
- ahuora_builder/custom/energy/acBus.py +280 -0
- ahuora_builder/custom/energy/ac_property_package.py +279 -0
- ahuora_builder/custom/energy/battery.py +170 -0
- ahuora_builder/custom/energy/bus.py +182 -0
- ahuora_builder/custom/energy/energy_mixer.py +195 -0
- ahuora_builder/custom/energy/energy_splitter.py +228 -0
- ahuora_builder/custom/energy/grid.py +173 -0
- ahuora_builder/custom/energy/hydro.py +169 -0
- ahuora_builder/custom/energy/link.py +137 -0
- ahuora_builder/custom/energy/load.py +155 -0
- ahuora_builder/custom/energy/mainDistributionBoard.py +257 -0
- ahuora_builder/custom/energy/power_property_package.py +253 -0
- ahuora_builder/custom/energy/solar.py +176 -0
- ahuora_builder/custom/energy/storage.py +230 -0
- ahuora_builder/custom/energy/storage_wrapper +0 -0
- ahuora_builder/custom/energy/tests/__init__.py +0 -0
- ahuora_builder/custom/energy/tests/test_bus.py +44 -0
- ahuora_builder/custom/energy/tests/test_energy_mixer.py +46 -0
- ahuora_builder/custom/energy/tests/test_mdb.py +49 -0
- ahuora_builder/custom/energy/transformer.py +187 -0
- ahuora_builder/custom/energy/transformer_property_package.py +267 -0
- ahuora_builder/custom/energy/transmissionLine.py +228 -0
- ahuora_builder/custom/energy/wind.py +206 -0
- ahuora_builder/custom/hda_ideal_VLE.py +1341 -0
- ahuora_builder/custom/hda_reaction.py +182 -0
- ahuora_builder/custom/heat_exchanger_1d_wrapper.py +31 -0
- ahuora_builder/custom/integration_block.py +106 -0
- ahuora_builder/custom/inverted.py +81 -0
- ahuora_builder/custom/performance_curves.py +1 -0
- ahuora_builder/custom/reactions/__init__.py +0 -0
- ahuora_builder/custom/reactions/hda_stoich.py +10 -0
- ahuora_builder/custom/simple_separator.py +680 -0
- ahuora_builder/custom/tests/__init__.py +0 -0
- ahuora_builder/custom/tests/test_SimpleEffectivenessHX_DH.py +91 -0
- ahuora_builder/custom/tests/test_custom_tank.py +70 -0
- ahuora_builder/custom/tests/test_direct_steam_injection.py +41 -0
- ahuora_builder/custom/tests/test_simple_separator.py +46 -0
- ahuora_builder/custom/tests/test_waterpipe.py +46 -0
- ahuora_builder/custom/thermal_utility_systems/desuperheater.py +624 -0
- ahuora_builder/custom/thermal_utility_systems/header.py +889 -0
- ahuora_builder/custom/thermal_utility_systems/simple_heat_pump.py +567 -0
- ahuora_builder/custom/thermal_utility_systems/steam_header.py +353 -0
- ahuora_builder/custom/thermal_utility_systems/steam_user.py +944 -0
- ahuora_builder/custom/thermal_utility_systems/temp.py +349 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_desuperheater.py +142 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_header.py +998 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_ntu_hx.py +129 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_simple_heat_pump.py +120 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_steam_header.py +703 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_steam_user.py +277 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_waterpipe.py +36 -0
- ahuora_builder/custom/thermal_utility_systems/tests/test_willans_turbine.py +253 -0
- ahuora_builder/custom/thermal_utility_systems/willans_turbine.py +804 -0
- ahuora_builder/custom/translator.py +129 -0
- ahuora_builder/custom/updated_pressure_changer.py +1404 -0
- ahuora_builder/custom/valve_wrapper.py +38 -0
- ahuora_builder/custom/water_tank_with_units.py +456 -0
- ahuora_builder/diagnostics/__init__.py +0 -0
- ahuora_builder/diagnostics/infeasibilities.py +40 -0
- ahuora_builder/diagnostics/tests/__init__.py +0 -0
- ahuora_builder/diagnostics/tests/test_infeasibilities.py +28 -0
- ahuora_builder/flowsheet_manager.py +542 -0
- ahuora_builder/flowsheet_manager_type.py +20 -0
- ahuora_builder/generate_python_file.py +440 -0
- ahuora_builder/methods/BlockContext.py +84 -0
- ahuora_builder/methods/__init__.py +0 -0
- ahuora_builder/methods/adapter.py +355 -0
- ahuora_builder/methods/adapter_library.py +549 -0
- ahuora_builder/methods/adapter_methods.py +80 -0
- ahuora_builder/methods/expression_parsing.py +105 -0
- ahuora_builder/methods/load_unit_model.py +147 -0
- ahuora_builder/methods/slice_manipulation.py +7 -0
- ahuora_builder/methods/tests/__init__.py +0 -0
- ahuora_builder/methods/tests/test_expression_parsing.py +15 -0
- ahuora_builder/methods/units_handler.py +129 -0
- ahuora_builder/ml_wizard.py +101 -0
- ahuora_builder/port_manager.py +20 -0
- ahuora_builder/properties_manager.py +44 -0
- ahuora_builder/property_package_manager.py +78 -0
- ahuora_builder/solver.py +38 -0
- ahuora_builder/tear_manager.py +98 -0
- ahuora_builder/tests/__init__.py +0 -0
- ahuora_builder/tests/test_generate_python_file/__init__.py +0 -0
- ahuora_builder/tests/test_generate_python_file/configurations/compressor_generated.py +63 -0
- ahuora_builder/tests/test_generate_python_file/configurations/heat_exchanger_generated.py +70 -0
- ahuora_builder/tests/test_generate_python_file/configurations/pump_generated.py +84 -0
- ahuora_builder/tests/test_generate_python_file/configurations/recycle_generated.py +73 -0
- ahuora_builder/tests/test_generate_python_file/test_generate_python_file.py +108 -0
- ahuora_builder/tests/test_solver/__init__.py +0 -0
- ahuora_builder/tests/test_solver/configurations/BT_PR.json +59 -0
- ahuora_builder/tests/test_solver/configurations/BT_PR_solved.json +59 -0
- ahuora_builder/tests/test_solver/configurations/bus.json +99 -0
- ahuora_builder/tests/test_solver/configurations/bus_solved.json +50 -0
- ahuora_builder/tests/test_solver/configurations/compound_separator.json +377 -0
- ahuora_builder/tests/test_solver/configurations/compound_separator_solved.json +374 -0
- ahuora_builder/tests/test_solver/configurations/compressor.json +38 -0
- ahuora_builder/tests/test_solver/configurations/compressor_solved.json +68 -0
- ahuora_builder/tests/test_solver/configurations/constraints.json +44 -0
- ahuora_builder/tests/test_solver/configurations/constraints_solved.json +59 -0
- ahuora_builder/tests/test_solver/configurations/control.json +39 -0
- ahuora_builder/tests/test_solver/configurations/control_solved.json +68 -0
- ahuora_builder/tests/test_solver/configurations/dynamic_tank.json +733 -0
- ahuora_builder/tests/test_solver/configurations/dynamic_tank_solved.json +846 -0
- ahuora_builder/tests/test_solver/configurations/elimination.json +39 -0
- ahuora_builder/tests/test_solver/configurations/elimination_solved.json +68 -0
- ahuora_builder/tests/test_solver/configurations/expressions.json +68 -0
- ahuora_builder/tests/test_solver/configurations/expressions_solved.json +104 -0
- ahuora_builder/tests/test_solver/configurations/header.json +1192 -0
- ahuora_builder/tests/test_solver/configurations/header_solved.json +761 -0
- ahuora_builder/tests/test_solver/configurations/heat_exchanger.json +63 -0
- ahuora_builder/tests/test_solver/configurations/heat_exchanger_solved.json +104 -0
- ahuora_builder/tests/test_solver/configurations/heat_pump.json +137 -0
- ahuora_builder/tests/test_solver/configurations/heat_pump_solved.json +104 -0
- ahuora_builder/tests/test_solver/configurations/machine_learning.json +2156 -0
- ahuora_builder/tests/test_solver/configurations/machine_learning_solved.json +266 -0
- ahuora_builder/tests/test_solver/configurations/mass_flow_tear.json +77 -0
- ahuora_builder/tests/test_solver/configurations/mass_flow_tear_solved.json +68 -0
- ahuora_builder/tests/test_solver/configurations/milk_heater.json +521 -0
- ahuora_builder/tests/test_solver/configurations/milk_heater_solved.json +311 -0
- ahuora_builder/tests/test_solver/configurations/mixer.json +44 -0
- ahuora_builder/tests/test_solver/configurations/mixer_solved.json +86 -0
- ahuora_builder/tests/test_solver/configurations/optimization.json +62 -0
- ahuora_builder/tests/test_solver/configurations/optimization_solved.json +59 -0
- ahuora_builder/tests/test_solver/configurations/propane_heat_pump.json +167 -0
- ahuora_builder/tests/test_solver/configurations/propane_heat_pump_solved.json +158 -0
- ahuora_builder/tests/test_solver/configurations/propane_recycle.json +141 -0
- ahuora_builder/tests/test_solver/configurations/propane_recycle_solved.json +104 -0
- ahuora_builder/tests/test_solver/configurations/pump.json +64 -0
- ahuora_builder/tests/test_solver/configurations/pump_solved.json +59 -0
- ahuora_builder/tests/test_solver/configurations/pump_unit_conversions.json +63 -0
- ahuora_builder/tests/test_solver/configurations/recycle.json +49 -0
- ahuora_builder/tests/test_solver/configurations/recycle_solved.json +50 -0
- ahuora_builder/tests/test_solver/configurations/sb_vapor_frac.json +29 -0
- ahuora_builder/tests/test_solver/configurations/sb_vapor_frac_solved.json +29 -0
- ahuora_builder/tests/test_solver/configurations/solar.json +67 -0
- ahuora_builder/tests/test_solver/configurations/solar_solved.json +50 -0
- ahuora_builder/tests/test_solver/configurations/vapor_frac_target.json +67 -0
- ahuora_builder/tests/test_solver/configurations/vapor_frac_target_solved.json +68 -0
- ahuora_builder/tests/test_solver/test_solve_models.py +250 -0
- ahuora_builder/timing.py +65 -0
- ahuora_builder/types/__init__.py +1 -0
- ahuora_builder/unit_model_manager.py +48 -0
- ahuora_builder-0.1.0.dist-info/METADATA +14 -0
- ahuora_builder-0.1.0.dist-info/RECORD +167 -0
- ahuora_builder-0.1.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,542 @@
|
|
|
1
|
+
from collections import defaultdict, deque
|
|
2
|
+
import re
|
|
3
|
+
from pyomo.network import SequentialDecomposition, Port
|
|
4
|
+
from pyomo.environ import (
|
|
5
|
+
ConcreteModel,
|
|
6
|
+
TransformationFactory,
|
|
7
|
+
SolverFactory,
|
|
8
|
+
Block,
|
|
9
|
+
Expression,
|
|
10
|
+
Constraint,
|
|
11
|
+
Objective,
|
|
12
|
+
minimize,
|
|
13
|
+
assert_optimal_termination,
|
|
14
|
+
ScalarVar,
|
|
15
|
+
units as pyunits,
|
|
16
|
+
TerminationCondition
|
|
17
|
+
)
|
|
18
|
+
from pyomo.core.base.constraint import ScalarConstraint, IndexedConstraint
|
|
19
|
+
from idaes.core import FlowsheetBlock
|
|
20
|
+
from idaes.core.util.model_statistics import report_statistics, degrees_of_freedom
|
|
21
|
+
import idaes.logger as idaeslog
|
|
22
|
+
from idaes.core.util import DiagnosticsToolbox
|
|
23
|
+
from ahuora_builder_types import FlowsheetSchema
|
|
24
|
+
from ahuora_builder_types.flowsheet_schema import SolvedFlowsheetSchema
|
|
25
|
+
from .property_package_manager import PropertyPackageManager
|
|
26
|
+
from .port_manager import PortManager
|
|
27
|
+
from .arc_manager import ArcManager
|
|
28
|
+
from .tear_manager import TearManager
|
|
29
|
+
from .unit_model_manager import UnitModelManager
|
|
30
|
+
from .methods.adapter_library import AdapterLibrary
|
|
31
|
+
from .methods.adapter import (
|
|
32
|
+
serialize_properties_map,
|
|
33
|
+
deactivate_fixed_guesses,
|
|
34
|
+
deactivate_component,
|
|
35
|
+
deactivate_components,
|
|
36
|
+
add_corresponding_constraint,
|
|
37
|
+
)
|
|
38
|
+
from .methods.expression_parsing import parse_expression, ExpressionParsingError
|
|
39
|
+
from .timing import start_timing
|
|
40
|
+
from .methods.units_handler import get_value, get_attached_unit, check_units_equivalent
|
|
41
|
+
from .properties_manager import PropertiesManager
|
|
42
|
+
from .custom.energy.power_property_package import PowerParameterBlock
|
|
43
|
+
from .custom.energy.ac_property_package import acParameterBlock
|
|
44
|
+
from .custom.energy.transformer_property_package import transformerParameterBlock
|
|
45
|
+
from pyomo.core.base.units_container import units, _PyomoUnit
|
|
46
|
+
from idaes.core.util.model_serializer import StoreSpec, from_json, to_json
|
|
47
|
+
from .diagnostics.infeasibilities import print_infeasibilities
|
|
48
|
+
|
|
49
|
+
# CSTR Imports from example flowsheet.
|
|
50
|
+
# To be used as placeholders until bespoke functions can be developed.
|
|
51
|
+
from .custom import hda_reaction as reaction_props
|
|
52
|
+
from .custom.hda_ideal_VLE import HDAParameterBlock
|
|
53
|
+
from ahuora_builder.properties_manager import PropertyComponent
|
|
54
|
+
from property_packages.build_package import build_package
|
|
55
|
+
|
|
56
|
+
# from amplpy import modules
|
|
57
|
+
# Import required to allow the library to set the PATH and allow conopt to be found.
|
|
58
|
+
|
|
59
|
+
def build_flowsheet(dynamic=False,time_set=[0]):
|
|
60
|
+
"""
|
|
61
|
+
Builds a flowsheet block
|
|
62
|
+
"""
|
|
63
|
+
# create the model and the flowsheet block
|
|
64
|
+
model = ConcreteModel()
|
|
65
|
+
model.fs = FlowsheetBlock(dynamic=dynamic, time_set=time_set, time_units=units.s)
|
|
66
|
+
model.fs.guess_vars = []
|
|
67
|
+
model.fs.controlled_vars = Block()
|
|
68
|
+
# Always add the power property package (as there's only one and there's no different property package types)
|
|
69
|
+
# TODO: should this be on-demand?
|
|
70
|
+
model.fs.power_pp = PowerParameterBlock()
|
|
71
|
+
model.fs.ac_pp = acParameterBlock()
|
|
72
|
+
model.fs.tr_pp = transformerParameterBlock()
|
|
73
|
+
# properties map: { id: pyomo variable or expression }
|
|
74
|
+
# used to map symbols for a sympy expression
|
|
75
|
+
model.fs.properties_map = PropertiesManager()
|
|
76
|
+
# list of component constraints to add later
|
|
77
|
+
model.fs.constraint_exprs = []
|
|
78
|
+
|
|
79
|
+
# Placholder property packages for CSTR reactor.
|
|
80
|
+
model.fs.BTHM_params = HDAParameterBlock()
|
|
81
|
+
#Hard-coded peng-robinson package for reactor:
|
|
82
|
+
model.fs.peng_robinson = build_package("peng-robinson",["benzene", "toluene", "hydrogen", "methane"], ["Liq","Vap"])
|
|
83
|
+
|
|
84
|
+
# Reaction package for the HDA reaction
|
|
85
|
+
model.fs.reaction_params = reaction_props.HDAReactionParameterBlock(
|
|
86
|
+
property_package=model.fs.peng_robinson
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
return model
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class FlowsheetManager:
|
|
93
|
+
"""
|
|
94
|
+
Manages the flowsheet, including the property packages, unit models, ports, arcs, and tears
|
|
95
|
+
Includes methods to load, initialise, and solve the flowsheet
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
def __init__(self, schema: FlowsheetSchema) -> None:
|
|
99
|
+
"""
|
|
100
|
+
Stores all relevant information about the flowsheet, without actually loading it
|
|
101
|
+
"""
|
|
102
|
+
self.timing = start_timing()
|
|
103
|
+
self.timing.add_timing("initialise_flowsheet_manager")
|
|
104
|
+
|
|
105
|
+
self.model = build_flowsheet(dynamic=schema.dynamic, time_set=schema.time_set)
|
|
106
|
+
|
|
107
|
+
self.schema = schema
|
|
108
|
+
# Add property packages first, so that unit models can use them
|
|
109
|
+
self.property_packages = PropertyPackageManager(self)
|
|
110
|
+
# Add the port manager, so the unit models can register their ports
|
|
111
|
+
self.ports = PortManager()
|
|
112
|
+
# Add unit models
|
|
113
|
+
self.unit_models = UnitModelManager(self)
|
|
114
|
+
# Add arcs to connect the unit models together
|
|
115
|
+
self.arcs = ArcManager(self)
|
|
116
|
+
# set certain arcs as tears
|
|
117
|
+
self.tears = TearManager(self)
|
|
118
|
+
|
|
119
|
+
def load(self) -> None:
|
|
120
|
+
"""
|
|
121
|
+
Parses the schema and loads the model
|
|
122
|
+
"""
|
|
123
|
+
self.timing.step_into("load_flowsheet")
|
|
124
|
+
# Load property packages first, so that unit models can use them
|
|
125
|
+
self.timing.add_timing("load_property_packages")
|
|
126
|
+
self.property_packages.load()
|
|
127
|
+
self.timing.step_into("load_unit_models")
|
|
128
|
+
self.unit_models.load()
|
|
129
|
+
self.timing.step_out()
|
|
130
|
+
# no need to load ports seperately, they are loaded by the unit models
|
|
131
|
+
# Load arcs to connect the unit models together
|
|
132
|
+
self.arcs.load()
|
|
133
|
+
# load any expressions
|
|
134
|
+
self.load_specs()
|
|
135
|
+
# if dynamics, apply finite difference transformaiton
|
|
136
|
+
if self.schema.dynamic:
|
|
137
|
+
print("performing finite difference with", len(self.model.fs.time), "time steps")
|
|
138
|
+
TransformationFactory("dae.finite_difference").apply_to(
|
|
139
|
+
self.model.fs,
|
|
140
|
+
nfe=len(self.model.fs.time)-1, # Number of finite elements to use for discretization. We aren't adding any extra steps as our constraints dont work for that.
|
|
141
|
+
wrt=self.model.fs.time,
|
|
142
|
+
scheme="BACKWARD"
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
self.timing.step_out()
|
|
147
|
+
|
|
148
|
+
def load_specs(self) -> None:
|
|
149
|
+
"""
|
|
150
|
+
Loads expressions from the schema
|
|
151
|
+
"""
|
|
152
|
+
fs = self.model.fs
|
|
153
|
+
specs = self.schema.expressions or []
|
|
154
|
+
|
|
155
|
+
## Build dependency tree and sort specs before loading.
|
|
156
|
+
dependencies, result_ids = self.build_spec_dependency_tree(specs)
|
|
157
|
+
sorted_result_ids = self.topological_sort(dependencies, result_ids)
|
|
158
|
+
|
|
159
|
+
# load the specs (expressions within specifications tab)
|
|
160
|
+
for result_id in sorted_result_ids:
|
|
161
|
+
spec_config = next(spec for spec in specs if spec["id"] == result_id)
|
|
162
|
+
expression_str = spec_config["expression"]
|
|
163
|
+
try:
|
|
164
|
+
component_name = f"{spec_config['name']}_{spec_config['id']}"
|
|
165
|
+
def expression_rule(blk, time_index):
|
|
166
|
+
return parse_expression(expression_str, fs,time_index)
|
|
167
|
+
|
|
168
|
+
component = Expression(fs.time, rule=expression_rule)
|
|
169
|
+
fs.add_component(component_name, component)
|
|
170
|
+
fs.properties_map.add(
|
|
171
|
+
spec_config["id"], component, component.name, unknown_units=True
|
|
172
|
+
)
|
|
173
|
+
except ExpressionParsingError as e:
|
|
174
|
+
raise ExpressionParsingError(f"{e} when parsing expression '{expression_str}' for {component_name}: ")
|
|
175
|
+
|
|
176
|
+
# load constraints (expressions for specific property infos)
|
|
177
|
+
# can only handle equality constraints for now
|
|
178
|
+
for component, expr_str, id in fs.constraint_exprs:
|
|
179
|
+
# get the time index
|
|
180
|
+
#for time_index in fs.time:
|
|
181
|
+
if component.index_set().dimen != 0 and component.index_set() != fs.time:
|
|
182
|
+
raise ExpressionParsingError(f"Cannot add constraint for {component}: only time-indexed components are supported.")
|
|
183
|
+
try:
|
|
184
|
+
def constraint_rule(blk, time_index):
|
|
185
|
+
expression = parse_expression(expr_str, fs,time_index)
|
|
186
|
+
# make sure the units of the expression are the same as the component
|
|
187
|
+
u1, u2 = get_attached_unit(component), get_attached_unit(expression)
|
|
188
|
+
if not check_units_equivalent(u1, u2):
|
|
189
|
+
raise ValueError(
|
|
190
|
+
f"Failed to add constraint for {component}: units do not match (expected {u1}, got {u2})"
|
|
191
|
+
)
|
|
192
|
+
return pyunits.convert(component[time_index], to_units=u2) == expression
|
|
193
|
+
c = Constraint(component.index_set(), rule= constraint_rule)
|
|
194
|
+
name = f"equality_constraint_{id}"
|
|
195
|
+
fs.add_component(name, c)
|
|
196
|
+
add_corresponding_constraint(fs, c, id)
|
|
197
|
+
except ExpressionParsingError as e:
|
|
198
|
+
raise ExpressionParsingError(f"Failed to parse constraint expression '{expr_str}' for {component}: {expr_str}, error: {e}")
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def build_spec_dependency_tree(self, specs) -> tuple:
|
|
202
|
+
"""
|
|
203
|
+
Builds dependency tree for expressions based on the references in their respective expressions.
|
|
204
|
+
"""
|
|
205
|
+
dependencies = defaultdict(
|
|
206
|
+
set
|
|
207
|
+
) # Maps an expression's result_id to a set of result_ids it depends on
|
|
208
|
+
result_ids = set() # A set to track all result_ids
|
|
209
|
+
|
|
210
|
+
# Get a list of all result_id's.
|
|
211
|
+
for spec in specs:
|
|
212
|
+
result_id = spec["id"]
|
|
213
|
+
result_ids.add(result_id)
|
|
214
|
+
|
|
215
|
+
for spec in specs:
|
|
216
|
+
result_id = spec["id"]
|
|
217
|
+
expression = spec["expression"]
|
|
218
|
+
|
|
219
|
+
# Find the result_ids that this expression depends on
|
|
220
|
+
dependent_expressions = self.get_dependent_expressions(
|
|
221
|
+
expression, result_ids
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
if dependent_expressions:
|
|
225
|
+
# If the expression depends on another result_id, add dependency
|
|
226
|
+
for id in dependent_expressions:
|
|
227
|
+
dependencies[id].add(result_id)
|
|
228
|
+
|
|
229
|
+
return dependencies, result_ids
|
|
230
|
+
|
|
231
|
+
def get_dependent_expressions(self, expression: str, all_result_ids: set) -> list:
|
|
232
|
+
"""
|
|
233
|
+
Gets all result_ids referenced in the expression.
|
|
234
|
+
"""
|
|
235
|
+
# match result_ids starting with 'id_' followed by numbers
|
|
236
|
+
ids = re.findall(r"\b(id_\d+)\b", expression)
|
|
237
|
+
|
|
238
|
+
# Filter the referenced_ids to only include those that are in all_result_ids
|
|
239
|
+
valid_referenced_ids = []
|
|
240
|
+
for id in ids:
|
|
241
|
+
# get the numeric part after "id_" and check if it's in the all_result_ids
|
|
242
|
+
numeric_id = int(id[3:])
|
|
243
|
+
if numeric_id in all_result_ids:
|
|
244
|
+
valid_referenced_ids.append(numeric_id)
|
|
245
|
+
|
|
246
|
+
return valid_referenced_ids
|
|
247
|
+
|
|
248
|
+
def topological_sort(self, dependencies: dict, result_ids: set) -> list:
|
|
249
|
+
"""
|
|
250
|
+
Performs topological sorting on the specification dependency tree.
|
|
251
|
+
"""
|
|
252
|
+
# Track teh in-degree count for all expressions (edges coming into it)
|
|
253
|
+
in_degree = defaultdict(int)
|
|
254
|
+
|
|
255
|
+
# Count dependencies for each expression
|
|
256
|
+
for result_id in result_ids:
|
|
257
|
+
for dep in dependencies[result_id]:
|
|
258
|
+
in_degree[dep] += 1
|
|
259
|
+
|
|
260
|
+
# Initialise the queue with result_ids that have no dependencies (in-degree 0)
|
|
261
|
+
dequeue = deque(
|
|
262
|
+
[result_id for result_id in result_ids if in_degree[result_id] == 0]
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
sorted_result_ids = []
|
|
266
|
+
|
|
267
|
+
while dequeue:
|
|
268
|
+
result_id = dequeue.popleft()
|
|
269
|
+
sorted_result_ids.append(result_id)
|
|
270
|
+
|
|
271
|
+
# loop thtough and decrement each dependent expression's in_degree, and append it to the deque if it has an in-degree of 0.
|
|
272
|
+
for dependent_result_id in dependencies[result_id]:
|
|
273
|
+
in_degree[dependent_result_id] -= 1
|
|
274
|
+
if in_degree[dependent_result_id] == 0:
|
|
275
|
+
dequeue.append(dependent_result_id)
|
|
276
|
+
|
|
277
|
+
# If there are any result_ids left with non-zero in-degree, a cycle exists so error.
|
|
278
|
+
if len(sorted_result_ids) != len(result_ids):
|
|
279
|
+
raise ValueError(
|
|
280
|
+
"Cycle detected in the dependency graph. Check an expression does not reference itself!"
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
return sorted_result_ids
|
|
284
|
+
|
|
285
|
+
def initialise(self) -> None:
|
|
286
|
+
"""
|
|
287
|
+
Expands the arcs and initialises the model
|
|
288
|
+
"""
|
|
289
|
+
# check if initialisation is disabled for this scenario
|
|
290
|
+
if getattr(self.schema, "disable_initialization", False):
|
|
291
|
+
# if disable initialisation is set to True, then we don't need to initialise the model
|
|
292
|
+
print("Initialisation is disabled for this scenario.")
|
|
293
|
+
|
|
294
|
+
# We need to "expand_arcs" to make them a bidirection link that actually imposes constraints on the model.
|
|
295
|
+
TransformationFactory("network.expand_arcs").apply_to(self.model)
|
|
296
|
+
|
|
297
|
+
self.timing.step_into("initialise_model")
|
|
298
|
+
|
|
299
|
+
# load tear guesses (including var/constraint unfixing & deactivation and/or equality constraint deactivation)
|
|
300
|
+
self.tears.load()
|
|
301
|
+
|
|
302
|
+
tears = self.tears._tears
|
|
303
|
+
|
|
304
|
+
def init_unit(unit):
|
|
305
|
+
if getattr(self.schema, "disable_initialization", False):
|
|
306
|
+
return
|
|
307
|
+
|
|
308
|
+
print(f"Initializing unit {unit}")
|
|
309
|
+
self.timing.add_timing(f"init_{unit.name}")
|
|
310
|
+
#unit.display()
|
|
311
|
+
unit.initialize(outlvl=idaeslog.INFO)
|
|
312
|
+
#unit.report()
|
|
313
|
+
|
|
314
|
+
self.timing.add_timing("setup_sequential_decomposition")
|
|
315
|
+
# Use SequentialDecomposition to initialise the model
|
|
316
|
+
seq = SequentialDecomposition(
|
|
317
|
+
#run_first_pass=True,
|
|
318
|
+
iterLim=1,
|
|
319
|
+
)
|
|
320
|
+
seq.set_tear_set(tears)
|
|
321
|
+
# use create_graph to get the order of sequential decomposition, and also to
|
|
322
|
+
# find any units that are not connected to the sequential decomposition
|
|
323
|
+
G = seq.create_graph(self.model)
|
|
324
|
+
order = seq.calculation_order(G)
|
|
325
|
+
seq_blocks = []
|
|
326
|
+
for o in order:
|
|
327
|
+
seq_blocks.append(o[0])
|
|
328
|
+
print("Order of initialisation:", [blk.name for blk in seq_blocks])
|
|
329
|
+
# set all the tear guesses before running the decomposition
|
|
330
|
+
for arc in tears:
|
|
331
|
+
port = arc.destination
|
|
332
|
+
# guesses used are initial values for each var
|
|
333
|
+
guesses = {}
|
|
334
|
+
guesses = {key: get_value(var) for key, var in port.vars.items()}
|
|
335
|
+
print(f"Guess for {port}: {guesses}")
|
|
336
|
+
|
|
337
|
+
self.timing.step_into("run_sequential_decomposition")
|
|
338
|
+
|
|
339
|
+
# sequential decomposition completes when all vars across port
|
|
340
|
+
# equalities are within tol of each other
|
|
341
|
+
seq.options["tol"] = 1e-2
|
|
342
|
+
# seq.options["solve_tears"] = False
|
|
343
|
+
res = seq.run(self.model, init_unit)
|
|
344
|
+
|
|
345
|
+
self.timing.step_out()
|
|
346
|
+
self.timing.add_timing("initialise_disconnected_units")
|
|
347
|
+
# Initialise any unit model that is not connected to the sequential decomposition
|
|
348
|
+
for blk in self.model.fs.component_data_objects(
|
|
349
|
+
Block, descend_into=False, active=True
|
|
350
|
+
):
|
|
351
|
+
ports = list(blk.component_objects(Port, descend_into=False))
|
|
352
|
+
if len(ports) == 0:
|
|
353
|
+
continue # if the block has no ports, then it is not a unit model
|
|
354
|
+
if blk in seq_blocks:
|
|
355
|
+
continue # already initialised by sequential decomposition
|
|
356
|
+
init_unit(blk)
|
|
357
|
+
|
|
358
|
+
# unfix guess vars
|
|
359
|
+
deactivate_fixed_guesses(self.model.fs.guess_vars)
|
|
360
|
+
|
|
361
|
+
self.timing.step_out()
|
|
362
|
+
|
|
363
|
+
def serialise(self) -> SolvedFlowsheetSchema:
|
|
364
|
+
self.timing.add_timing("serialise_model")
|
|
365
|
+
|
|
366
|
+
initial_values = {}
|
|
367
|
+
for unit_model_id, unit_model in self.unit_models._unit_models.items():
|
|
368
|
+
initial_values[str(unit_model_id)] = to_json(unit_model, return_dict=True, wts=StoreSpec.value())
|
|
369
|
+
|
|
370
|
+
solved_flowsheet = SolvedFlowsheetSchema(
|
|
371
|
+
id=self.schema.id,
|
|
372
|
+
properties=serialize_properties_map(self.model.fs),
|
|
373
|
+
initial_values=initial_values
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
return solved_flowsheet
|
|
377
|
+
|
|
378
|
+
def report_statistics(self) -> None:
|
|
379
|
+
"""
|
|
380
|
+
Reports statistics about the model
|
|
381
|
+
"""
|
|
382
|
+
report_statistics(self.model)
|
|
383
|
+
# I think the diagnostics toolbox is taking too long to run, so commenting it out for now.
|
|
384
|
+
# dt = DiagnosticsToolbox(self.model)
|
|
385
|
+
# dt.report_structural_issues()
|
|
386
|
+
# dt.display_overconstrained_set()
|
|
387
|
+
# dt.display_underconstrained_set()
|
|
388
|
+
|
|
389
|
+
def diagnose_problems(self) -> None:
|
|
390
|
+
print("=== DIAGNOSTICS ===")
|
|
391
|
+
report_statistics(self.model)
|
|
392
|
+
dt = DiagnosticsToolbox(self.model)
|
|
393
|
+
dt.report_structural_issues()
|
|
394
|
+
dt.display_overconstrained_set()
|
|
395
|
+
dt.display_underconstrained_set()
|
|
396
|
+
#dt.display_components_with_inconsistent_units()
|
|
397
|
+
try:
|
|
398
|
+
dt.compute_infeasibility_explanation()
|
|
399
|
+
except Exception as e:
|
|
400
|
+
print(f"{e}") # error is probably because it is feasible
|
|
401
|
+
dt.report_numerical_issues()
|
|
402
|
+
dt.display_near_parallel_constraints()
|
|
403
|
+
dt.display_variables_at_or_outside_bounds()
|
|
404
|
+
print("=== END DIAGNOSTICS ===")
|
|
405
|
+
|
|
406
|
+
def degrees_of_freedom(self) -> int:
|
|
407
|
+
"""
|
|
408
|
+
Returns the degrees of freedom of the model
|
|
409
|
+
"""
|
|
410
|
+
return int(degrees_of_freedom(self.model))
|
|
411
|
+
|
|
412
|
+
def check_model_valid(self) -> None:
|
|
413
|
+
"""
|
|
414
|
+
Checks if the model is valid by checking the
|
|
415
|
+
degrees of freedom. Will raise an exception if
|
|
416
|
+
the model is not valid.
|
|
417
|
+
"""
|
|
418
|
+
self.timing.add_timing("check_model_valid")
|
|
419
|
+
degrees_of_freedom = self.degrees_of_freedom()
|
|
420
|
+
if degrees_of_freedom != 0:
|
|
421
|
+
#self.model.display() # prints the vars/constraints for debugging
|
|
422
|
+
raise Exception(
|
|
423
|
+
f"Degrees of freedom is not 0. Degrees of freedom: {degrees_of_freedom}"
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
def solve(self) -> None:
|
|
427
|
+
"""
|
|
428
|
+
Solves the model
|
|
429
|
+
"""
|
|
430
|
+
self.timing.add_timing("solve_model")
|
|
431
|
+
print("=== Starting Solve ===")
|
|
432
|
+
|
|
433
|
+
opt = SolverFactory(self.schema.solver_option)
|
|
434
|
+
# opt.options["max_iter"] = 5000
|
|
435
|
+
|
|
436
|
+
if self.schema.solver_option != "conopt":
|
|
437
|
+
opt.options["max_iter"] = 1000
|
|
438
|
+
try:
|
|
439
|
+
res = opt.solve(self.model, tee=True)
|
|
440
|
+
if res.solver.termination_condition != TerminationCondition.optimal:
|
|
441
|
+
print_infeasibilities(self.model.fs.properties_map)
|
|
442
|
+
assert_optimal_termination(res)
|
|
443
|
+
except ValueError as e:
|
|
444
|
+
if str(e).startswith("No variables appear"):
|
|
445
|
+
# https://github.com/Pyomo/pyomo/pull/3445
|
|
446
|
+
pass
|
|
447
|
+
else:
|
|
448
|
+
raise e
|
|
449
|
+
|
|
450
|
+
def optimize(self) -> None:
|
|
451
|
+
if self.schema.optimizations is None or self.schema.optimizations == []:
|
|
452
|
+
return
|
|
453
|
+
|
|
454
|
+
self.timing.add_timing("optimize_model")
|
|
455
|
+
print("=== Starting Optimization ===")
|
|
456
|
+
|
|
457
|
+
# ipopt doesn't support multiple objectives, so we need to create
|
|
458
|
+
# a single objective expression.
|
|
459
|
+
# this is done by summing all objectives in the model, adding
|
|
460
|
+
# or subtracting based on the sense (minimize or maximize)
|
|
461
|
+
objective_expr = 0
|
|
462
|
+
for schema in self.schema.optimizations:
|
|
463
|
+
# get the expression component to optimize
|
|
464
|
+
# TODO: This is assuming optimisation is run on a steady-state simulation. We need to change how this works to handle dynamics.
|
|
465
|
+
# For now, just hardcoding time_index=0
|
|
466
|
+
objective_component = self.model.fs.properties_map.get_component(schema.objective)
|
|
467
|
+
# the objective component should be a scalar in non-dynamic models
|
|
468
|
+
# in a dynamic model it'll be indexed across all time steps
|
|
469
|
+
# We sum up all time steps so each time step is weighted equally.
|
|
470
|
+
objective = sum(objective_component.values())
|
|
471
|
+
# add or subtract the objective based on the sense
|
|
472
|
+
sense = schema.sense
|
|
473
|
+
if sense == "minimize":
|
|
474
|
+
objective_expr += objective
|
|
475
|
+
else:
|
|
476
|
+
objective_expr -= objective
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
# unfix relevant vars (add to degrees of freedom)
|
|
481
|
+
for dof_info in schema.unfixed_variables:
|
|
482
|
+
id = dof_info.id # Id of the propertyValue for this degree of freedom
|
|
483
|
+
var: PropertyComponent = self.model.fs.properties_map.get(id)
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
# TODO: may need to handle deactivating constraints,
|
|
487
|
+
# for expressions that are constrained (instead of state vars)
|
|
488
|
+
c = var.corresponding_constraint
|
|
489
|
+
if c is not None:
|
|
490
|
+
# TODO: better typing for constraints
|
|
491
|
+
if isinstance(c, ScalarConstraint) or isinstance(c, IndexedConstraint):
|
|
492
|
+
c.deactivate()
|
|
493
|
+
else:
|
|
494
|
+
deactivate_components(c)
|
|
495
|
+
else:
|
|
496
|
+
for i in var.component.values():
|
|
497
|
+
if isinstance(i, ScalarVar):
|
|
498
|
+
i.unfix()
|
|
499
|
+
# Because if not, it is ExpressionData, meaning it is already an expression and doesn't need to be unfixed. (we've already checked if there is a constraint for it above too.)
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
# TODO: set attributes for upper and lower bounds of property infos. i.e. use propertyinfo id.
|
|
503
|
+
# Var is either a Variable or Expression
|
|
504
|
+
# set the minimum or maximum bounds for this variable if they are enabled
|
|
505
|
+
#self.model.upper_bound_12 = Constraint(expr= var <= upper_bound_value )
|
|
506
|
+
|
|
507
|
+
upper_bound = dof_info.upper_bound
|
|
508
|
+
lower_bound = dof_info.lower_bound
|
|
509
|
+
|
|
510
|
+
c = var.component
|
|
511
|
+
|
|
512
|
+
if upper_bound is not None:
|
|
513
|
+
def upper_bound_rule(model,index):
|
|
514
|
+
return c[index] <= upper_bound
|
|
515
|
+
upper_bound_constraint = Constraint(c.index_set(),rule=upper_bound_rule)
|
|
516
|
+
setattr(self.model,"upper_bound_" + str(id), upper_bound_constraint)
|
|
517
|
+
|
|
518
|
+
if lower_bound is not None:
|
|
519
|
+
def lower_bound_rule(model,index):
|
|
520
|
+
return c[index] >= lower_bound
|
|
521
|
+
lower_bound_constraint = Constraint(c.index_set(),rule=lower_bound_rule)
|
|
522
|
+
setattr(self.model,"lower_bound_" + str(id), lower_bound_constraint)
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
# add the objective to the model
|
|
526
|
+
self.model.objective = Objective(expr=objective_expr, sense=minimize)
|
|
527
|
+
|
|
528
|
+
# solve the model with the objective
|
|
529
|
+
opt = SolverFactory(self.schema.solver_option)
|
|
530
|
+
|
|
531
|
+
if self.schema.solver_option != "conopt":
|
|
532
|
+
opt.options["max_iter"] = 1000
|
|
533
|
+
|
|
534
|
+
try:
|
|
535
|
+
res = opt.solve(self.model, tee=True)
|
|
536
|
+
assert_optimal_termination(res)
|
|
537
|
+
except ValueError as e:
|
|
538
|
+
if str(e).startswith("No variables appear"):
|
|
539
|
+
# https://github.com/Pyomo/pyomo/pull/3445
|
|
540
|
+
pass
|
|
541
|
+
else:
|
|
542
|
+
raise e
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Because there are a lot of things that reference the flowsheet manager, we
|
|
2
|
+
# get a circular dependency.
|
|
3
|
+
# But this is only a problem for the type hints.
|
|
4
|
+
# so instead, we will use this trick:
|
|
5
|
+
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .flowsheet_manager import FlowsheetManager
|
|
9
|
+
else:
|
|
10
|
+
FlowsheetManager = "FlowsheetManager"
|
|
11
|
+
# This only imports the type if we are checking types.
|
|
12
|
+
# Then as long as we use the type hints as a forward reference, we are okay.
|
|
13
|
+
# https://peps.python.org/pep-0484/#forward-references
|
|
14
|
+
# e.g
|
|
15
|
+
# from .flowsheet_manager_type import FlowsheetManager
|
|
16
|
+
# def __init__(self, flowsheet_manager: "FlowsheetManager"):
|
|
17
|
+
# rather than
|
|
18
|
+
# from .flowsheet_manager import FlowsheetManager
|
|
19
|
+
# def __init__(self, flowsheet_manager: FlowsheetManager):
|
|
20
|
+
#
|