processforge 0.2.4__tar.gz → 0.2.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. {processforge-0.2.4/src/processforge.egg-info → processforge-0.2.7}/PKG-INFO +3 -1
  2. {processforge-0.2.4 → processforge-0.2.7}/pyproject.toml +4 -1
  3. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/__init__.py +2 -0
  4. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/flowsheet.py +28 -0
  5. processforge-0.2.7/src/processforge/fmu/__init__.py +3 -0
  6. processforge-0.2.7/src/processforge/fmu/_fmi_vars.py +143 -0
  7. processforge-0.2.7/src/processforge/fmu/builder.py +190 -0
  8. processforge-0.2.7/src/processforge/fmu/slave_template.py +252 -0
  9. processforge-0.2.7/src/processforge/provenance.py +123 -0
  10. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/result.py +60 -3
  11. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/simulate.py +40 -1
  12. {processforge-0.2.4 → processforge-0.2.7/src/processforge.egg-info}/PKG-INFO +3 -1
  13. {processforge-0.2.4 → processforge-0.2.7}/src/processforge.egg-info/SOURCES.txt +5 -0
  14. {processforge-0.2.4 → processforge-0.2.7}/src/processforge.egg-info/requires.txt +3 -0
  15. {processforge-0.2.4 → processforge-0.2.7}/LICENSE +0 -0
  16. {processforge-0.2.4 → processforge-0.2.7}/MANIFEST.in +0 -0
  17. {processforge-0.2.4 → processforge-0.2.7}/README.md +0 -0
  18. {processforge-0.2.4 → processforge-0.2.7}/flowsheets/archive/example_dynamic_hybrid.json +0 -0
  19. {processforge-0.2.4 → processforge-0.2.7}/flowsheets/archive/example_dynamic_tank.json +0 -0
  20. {processforge-0.2.4 → processforge-0.2.7}/flowsheets/archive/example_flash.json +0 -0
  21. {processforge-0.2.4 → processforge-0.2.7}/flowsheets/archive/hydraulic_chain.json +0 -0
  22. {processforge-0.2.4 → processforge-0.2.7}/flowsheets/closed-loop-chain.json +0 -0
  23. {processforge-0.2.4 → processforge-0.2.7}/flowsheets/hydraulic-chain.json +0 -0
  24. {processforge-0.2.4 → processforge-0.2.7}/setup.cfg +0 -0
  25. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/_schema.py +0 -0
  26. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/__init__.py +0 -0
  27. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/backends/__init__.py +0 -0
  28. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/backends/base.py +0 -0
  29. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/backends/casadi_backend.py +0 -0
  30. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/backends/pyomo_backend.py +0 -0
  31. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/backends/scipy_backend.py +0 -0
  32. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/jacobian.py +0 -0
  33. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/mixin.py +0 -0
  34. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/solver.py +0 -0
  35. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/stream_var.py +0 -0
  36. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/units/__init__.py +0 -0
  37. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/units/flash_eo.py +0 -0
  38. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/units/heater_eo.py +0 -0
  39. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/units/pipes_eo.py +0 -0
  40. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/units/pump_eo.py +0 -0
  41. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/units/strainer_eo.py +0 -0
  42. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/eo/units/valve_eo.py +0 -0
  43. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/flowsheet.py +0 -0
  44. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/schemas/__init__.py +0 -0
  45. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/schemas/flowsheet_schema.json +0 -0
  46. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/solver.py +0 -0
  47. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/thermo.py +0 -0
  48. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/__init__.py +0 -0
  49. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/flash.py +0 -0
  50. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/heater.py +0 -0
  51. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/pipes.py +0 -0
  52. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/pump.py +0 -0
  53. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/solver.py +0 -0
  54. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/strainer.py +0 -0
  55. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/tank.py +0 -0
  56. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/units/valve.py +0 -0
  57. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/utils/__init__.py +0 -0
  58. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/utils/flowsheet_diagram.py +0 -0
  59. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/utils/validate_flowsheet.py +0 -0
  60. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/utils/validation.py +0 -0
  61. {processforge-0.2.4 → processforge-0.2.7}/src/processforge/validate.py +0 -0
  62. {processforge-0.2.4 → processforge-0.2.7}/src/processforge.egg-info/dependency_links.txt +0 -0
  63. {processforge-0.2.4 → processforge-0.2.7}/src/processforge.egg-info/entry_points.txt +0 -0
  64. {processforge-0.2.4 → processforge-0.2.7}/src/processforge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: processforge
3
- Version: 0.2.4
3
+ Version: 0.2.7
4
4
  Summary: A Python-based process simulation framework for chemical engineering applications.
5
5
  Author-email: Process Forge Team <team@processforge.dev>
6
6
  License-Expression: BSD-3-Clause
@@ -34,6 +34,8 @@ Requires-Dist: pyomo>=6.7; extra == "eo"
34
34
  Provides-Extra: eo-casadi
35
35
  Requires-Dist: pyomo>=6.7; extra == "eo-casadi"
36
36
  Requires-Dist: casadi>=3.6; extra == "eo-casadi"
37
+ Provides-Extra: fmu
38
+ Requires-Dist: pythonfmu>=0.6; extra == "fmu"
37
39
  Provides-Extra: dev
38
40
  Requires-Dist: pytest>=7.0; extra == "dev"
39
41
  Requires-Dist: black; extra == "dev"
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "processforge"
7
- version = "0.2.4"
7
+ version = "0.2.7"
8
8
  description = "A Python-based process simulation framework for chemical engineering applications."
9
9
  readme = "README.md"
10
10
  license = "BSD-3-Clause"
@@ -45,6 +45,9 @@ eo-casadi = [
45
45
  "pyomo>=6.7",
46
46
  "casadi>=3.6",
47
47
  ]
48
+ fmu = [
49
+ "pythonfmu>=0.6",
50
+ ]
48
51
  dev = [
49
52
  "pytest>=7.0",
50
53
  "black",
@@ -11,6 +11,7 @@ Provides steady-state and dynamic process simulation capabilities including:
11
11
  """
12
12
 
13
13
  from .flowsheet import Flowsheet
14
+ from .provenance import build_run_info
14
15
  from .solver import Solver
15
16
  from .thermo import get_enthalpy_molar, get_Cp_molar, get_K_values, rachford_rice
16
17
  from .validate import validate_flowsheet
@@ -45,6 +46,7 @@ __all__ = [
45
46
  "plot_results",
46
47
  "plot_timeseries",
47
48
  "save_results_zarr",
49
+ "build_run_info",
48
50
  "Pump",
49
51
  "Valve",
50
52
  "Strainer",
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
 
4
4
  from copy import deepcopy
5
5
 
6
+ import numpy as np
6
7
  from loguru import logger
7
8
 
8
9
  from .jacobian import GlobalJacobianManager
@@ -64,12 +65,19 @@ class EOFlowsheet:
64
65
  """
65
66
  Build and solve the EO system.
66
67
 
68
+ After this method returns, ``self.x0`` holds the initial-guess vector
69
+ and ``self.var_names`` holds a human-readable label for each element —
70
+ both can be passed directly to
71
+ :func:`processforge.provenance.build_run_info` for reproducibility.
72
+
67
73
  Returns:
68
74
  Stream result dict in the same format as ``Flowsheet.run()``:
69
75
  ``{stream_name: {"T": ..., "P": ..., "flowrate": ..., "z": {...}}}``
70
76
  """
71
77
  manager = self._build()
72
78
  x0 = self._warm_start(manager)
79
+ self.x0: "np.ndarray" = x0.copy()
80
+ self.var_names: list[str] = self._build_var_names(manager)
73
81
  solver = EOSolver(backend=self.backend)
74
82
  x_sol, converged, stats = solver.solve(manager, x0)
75
83
  if not converged:
@@ -172,6 +180,26 @@ class EOFlowsheet:
172
180
  comps.update(stream.get("z", {}).keys())
173
181
  return sorted(comps)
174
182
 
183
+ # ------------------------------------------------------------------
184
+ # Provenance helpers
185
+ # ------------------------------------------------------------------
186
+
187
+ def _build_var_names(self, manager: "GlobalJacobianManager") -> list[str]:
188
+ """Return a human-readable label for every scalar in the x vector.
189
+
190
+ Variable order mirrors ``GlobalJacobianManager`` layout:
191
+ ``T``, ``P``, ``flowrate``, then one entry per component ``z_<comp>``
192
+ for each registered stream in registration order.
193
+ """
194
+ suffixes = ["T", "P", "flowrate"] + [
195
+ f"z_{c}" for c in manager.components
196
+ ]
197
+ names: list[str] = []
198
+ for stream_name in manager._streams:
199
+ for suffix in suffixes:
200
+ names.append(f"{stream_name}/{suffix}")
201
+ return names
202
+
175
203
  # ------------------------------------------------------------------
176
204
  # Warm-start
177
205
  # ------------------------------------------------------------------
@@ -0,0 +1,3 @@
1
+ from .builder import build_fmu
2
+
3
+ __all__ = ["build_fmu"]
@@ -0,0 +1,143 @@
1
+ """FMI variable spec generation for ProcessForge FMU export."""
2
+ from __future__ import annotations
3
+
4
+ import re
5
+
6
+
7
+ def _sanitize_name(s: str) -> str:
8
+ """Replace characters invalid in Python identifiers with underscores."""
9
+ return re.sub(r"[^a-zA-Z0-9_]", "_", s)
10
+
11
+
12
+ def get_fmi_variable_specs(
13
+ feed_streams: list[str],
14
+ output_streams: list[str],
15
+ components: list[str],
16
+ unit_params: dict[str, dict],
17
+ config: dict,
18
+ mode: str,
19
+ ) -> list[dict]:
20
+ """Return ordered list of FMI variable spec dicts for a flowsheet.
21
+
22
+ Each dict contains:
23
+ attr_name – Python attribute name on the slave instance
24
+ initial_value – float initial value
25
+ causality – "input" | "output" | "parameter"
26
+ variability – "continuous" | "fixed"
27
+ description – human-readable string
28
+ """
29
+ specs: list[dict] = []
30
+
31
+ # --- Inputs: feed stream boundary conditions ---
32
+ for stream_name in feed_streams:
33
+ stream_cfg = config["streams"][stream_name]
34
+ safe_s = _sanitize_name(stream_name)
35
+
36
+ specs.append({
37
+ "attr_name": f"feed_{safe_s}_T",
38
+ "initial_value": float(stream_cfg.get("T", 298.15)),
39
+ "causality": "input",
40
+ "variability": "continuous",
41
+ "description": f"Temperature of feed stream '{stream_name}' [K]",
42
+ })
43
+ specs.append({
44
+ "attr_name": f"feed_{safe_s}_P",
45
+ "initial_value": float(stream_cfg.get("P", 101325.0)),
46
+ "causality": "input",
47
+ "variability": "continuous",
48
+ "description": f"Pressure of feed stream '{stream_name}' [Pa]",
49
+ })
50
+ specs.append({
51
+ "attr_name": f"feed_{safe_s}_flowrate",
52
+ "initial_value": float(stream_cfg.get("flowrate", 1.0)),
53
+ "causality": "input",
54
+ "variability": "continuous",
55
+ "description": f"Molar flowrate of feed stream '{stream_name}' [mol/s]",
56
+ })
57
+ z_cfg = stream_cfg.get("z", {})
58
+ for comp in components:
59
+ safe_c = _sanitize_name(comp)
60
+ specs.append({
61
+ "attr_name": f"feed_{safe_s}_z_{safe_c}",
62
+ "initial_value": float(z_cfg.get(comp, 0.0)),
63
+ "causality": "input",
64
+ "variability": "continuous",
65
+ "description": f"Mole fraction of {comp} in feed stream '{stream_name}'",
66
+ })
67
+
68
+ # --- Outputs: calculated stream properties ---
69
+ for stream_name in output_streams:
70
+ safe_s = _sanitize_name(stream_name)
71
+
72
+ specs.append({
73
+ "attr_name": f"out_{safe_s}_T",
74
+ "initial_value": 0.0,
75
+ "causality": "output",
76
+ "variability": "continuous",
77
+ "description": f"Temperature of stream '{stream_name}' [K]",
78
+ })
79
+ specs.append({
80
+ "attr_name": f"out_{safe_s}_P",
81
+ "initial_value": 0.0,
82
+ "causality": "output",
83
+ "variability": "continuous",
84
+ "description": f"Pressure of stream '{stream_name}' [Pa]",
85
+ })
86
+ specs.append({
87
+ "attr_name": f"out_{safe_s}_flowrate",
88
+ "initial_value": 0.0,
89
+ "causality": "output",
90
+ "variability": "continuous",
91
+ "description": f"Molar flowrate of stream '{stream_name}' [mol/s]",
92
+ })
93
+ for comp in components:
94
+ safe_c = _sanitize_name(comp)
95
+ specs.append({
96
+ "attr_name": f"out_{safe_s}_z_{safe_c}",
97
+ "initial_value": 0.0,
98
+ "causality": "output",
99
+ "variability": "continuous",
100
+ "description": f"Mole fraction of {comp} in stream '{stream_name}'",
101
+ })
102
+
103
+ # --- Parameters: unit design values (fixed for FMU lifetime) ---
104
+ for unit_name, params in unit_params.items():
105
+ safe_u = _sanitize_name(unit_name)
106
+ for key, value in params.items():
107
+ if not isinstance(value, (int, float)):
108
+ continue
109
+ safe_k = _sanitize_name(key)
110
+ specs.append({
111
+ "attr_name": f"param_{safe_u}_{safe_k}",
112
+ "initial_value": float(value),
113
+ "causality": "parameter",
114
+ "variability": "fixed",
115
+ "description": f"Parameter '{key}' of unit '{unit_name}'",
116
+ })
117
+
118
+ # --- Tank state outputs (dynamic mode only) ---
119
+ if mode == "dynamic":
120
+ for unit_name, unit_cfg in config["units"].items():
121
+ if unit_cfg.get("type") != "Tank":
122
+ continue
123
+ safe_u = _sanitize_name(unit_name)
124
+ initial_T = float(unit_cfg.get("initial_T", 298.15))
125
+ specs.append({
126
+ "attr_name": f"state_{safe_u}_T",
127
+ "initial_value": initial_T,
128
+ "causality": "output",
129
+ "variability": "continuous",
130
+ "description": f"Tank temperature in unit '{unit_name}' [K]",
131
+ })
132
+ initial_n = unit_cfg.get("initial_n", {})
133
+ for comp in components:
134
+ safe_c = _sanitize_name(comp)
135
+ specs.append({
136
+ "attr_name": f"state_{safe_u}_n_{safe_c}",
137
+ "initial_value": float(initial_n.get(comp, 0.0)),
138
+ "causality": "output",
139
+ "variability": "continuous",
140
+ "description": f"Molar holdup of {comp} in tank '{unit_name}' [mol]",
141
+ })
142
+
143
+ return specs
@@ -0,0 +1,190 @@
1
+ """Build a PythonFMU co-simulation FMU from a ProcessForge flowsheet config."""
2
+ from __future__ import annotations
3
+
4
+ import os
5
+ import re
6
+ import shutil
7
+ import subprocess
8
+ import tempfile
9
+
10
+ from ..utils.validate_flowsheet import validate_flowsheet
11
+ from ._fmi_vars import _sanitize_name
12
+ from .slave_template import render_slave_source
13
+
14
+
15
+ def build_fmu(
16
+ config_path: str,
17
+ output_dir: str = "outputs",
18
+ backend: str = "scipy",
19
+ ) -> str:
20
+ """Build an FMI 2.0 co-simulation FMU from a ProcessForge flowsheet JSON.
21
+
22
+ Args:
23
+ config_path: Path to the flowsheet JSON file.
24
+ output_dir: Directory where the ``.fmu`` file will be written.
25
+ backend: EO solver backend for steady-state mode
26
+ (``"scipy"``, ``"pyomo"``, or ``"casadi"``).
27
+
28
+ Returns:
29
+ Absolute path to the generated ``.fmu`` file.
30
+
31
+ Raises:
32
+ RuntimeError: If ``pythonfmu`` is not installed or the build fails.
33
+ """
34
+ if not shutil.which("pythonfmu"):
35
+ raise RuntimeError(
36
+ "PythonFMU is not installed or not on PATH. "
37
+ "Install it with: pip install processforge[fmu]"
38
+ )
39
+
40
+ config = validate_flowsheet(config_path)
41
+ interface = _analyze_config(config)
42
+ slave_class_name = _get_slave_class_name(config, config_path)
43
+
44
+ os.makedirs(output_dir, exist_ok=True)
45
+
46
+ with tempfile.TemporaryDirectory() as staging_dir:
47
+ # Place the config JSON in the staging directory so PythonFMU bundles
48
+ # it into the FMU's resources/ directory.
49
+ config_staging = os.path.join(staging_dir, "flowsheet_config.json")
50
+ shutil.copy(config_path, config_staging)
51
+
52
+ slave_source = render_slave_source(
53
+ slave_class_name=slave_class_name,
54
+ mode=interface["mode"],
55
+ backend=backend,
56
+ feed_streams=interface["feed_streams"],
57
+ output_streams=interface["output_streams"],
58
+ components=interface["components"],
59
+ unit_params=interface["unit_params"],
60
+ config=config,
61
+ )
62
+
63
+ slave_py = os.path.join(staging_dir, f"{slave_class_name}.py")
64
+ with open(slave_py, "w") as f:
65
+ f.write(slave_source)
66
+
67
+ try:
68
+ result = subprocess.run(
69
+ [
70
+ "pythonfmu",
71
+ "build",
72
+ "-f", slave_py,
73
+ "-d", os.path.abspath(output_dir),
74
+ config_staging,
75
+ ],
76
+ check=True,
77
+ capture_output=True,
78
+ text=True,
79
+ )
80
+ except subprocess.CalledProcessError as exc:
81
+ raise RuntimeError(
82
+ f"pythonfmu build failed:\n{exc.stderr}"
83
+ ) from exc
84
+
85
+ fmu_path = os.path.abspath(
86
+ os.path.join(output_dir, f"{slave_class_name}.fmu")
87
+ )
88
+ return fmu_path
89
+
90
+
91
+ # ---------------------------------------------------------------------------
92
+ # Config analysis
93
+ # ---------------------------------------------------------------------------
94
+
95
+ def _analyze_config(config: dict) -> dict:
96
+ """Extract the FMI interface description from a validated config dict.
97
+
98
+ Returns a dict with:
99
+ feed_streams – list of feed stream names (from config["streams"])
100
+ output_streams – all unit outlet stream names (not feeds)
101
+ components – sorted list of component names
102
+ unit_params – {unit_name: {param_key: value}} for numeric params
103
+ mode – "steady" or "dynamic"
104
+ """
105
+ feed_streams: list[str] = list(config["streams"].keys())
106
+
107
+ # Collect all outlet stream names from unit definitions
108
+ all_outlet_streams: set[str] = set()
109
+ for unit_cfg in config["units"].values():
110
+ for outlet in _get_outlets(unit_cfg):
111
+ all_outlet_streams.add(outlet)
112
+
113
+ # Output streams = outlets not already in feeds
114
+ feed_set = set(feed_streams)
115
+ output_streams: list[str] = [s for s in all_outlet_streams if s not in feed_set]
116
+ # Preserve stable order (insertion order of config["units"])
117
+ seen: set[str] = set()
118
+ output_streams_ordered: list[str] = []
119
+ for unit_cfg in config["units"].values():
120
+ for outlet in _get_outlets(unit_cfg):
121
+ if outlet not in feed_set and outlet not in seen:
122
+ output_streams_ordered.append(outlet)
123
+ seen.add(outlet)
124
+
125
+ # Component discovery — same logic as EOFlowsheet._collect_components()
126
+ comp_set: set[str] = set()
127
+ for stream in config["streams"].values():
128
+ comp_set.update(stream.get("z", {}).keys())
129
+ components = sorted(comp_set)
130
+
131
+ # Unit parameters — numeric config keys, excluding topology keys
132
+ _topology_keys = {"type", "in", "out", "out_liq", "out_vap"}
133
+ unit_params: dict[str, dict] = {}
134
+ for unit_name, unit_cfg in config["units"].items():
135
+ params = {
136
+ k: v
137
+ for k, v in unit_cfg.items()
138
+ if k not in _topology_keys and isinstance(v, (int, float))
139
+ }
140
+ if params:
141
+ unit_params[unit_name] = params
142
+
143
+ # Simulation mode — force "dynamic" if any Tank is present
144
+ mode = config.get("simulation", {}).get("mode", "steady")
145
+ has_tank = any(
146
+ cfg.get("type") == "Tank" for cfg in config["units"].values()
147
+ )
148
+ if has_tank:
149
+ mode = "dynamic"
150
+
151
+ return {
152
+ "feed_streams": feed_streams,
153
+ "output_streams": output_streams_ordered,
154
+ "components": components,
155
+ "unit_params": unit_params,
156
+ "mode": mode,
157
+ }
158
+
159
+
160
+ def _get_outlets(unit_cfg: dict) -> list[str]:
161
+ """Return outlet stream name(s) — handles Flash (out_vap + out_liq)."""
162
+ if unit_cfg.get("type") == "Flash":
163
+ outlets = []
164
+ if unit_cfg.get("out_liq"):
165
+ outlets.append(unit_cfg["out_liq"])
166
+ if unit_cfg.get("out_vap"):
167
+ outlets.append(unit_cfg["out_vap"])
168
+ return outlets
169
+ out = unit_cfg.get("out")
170
+ return [out] if out else []
171
+
172
+
173
+ def _get_slave_class_name(config: dict, config_path: str) -> str:
174
+ """Derive a valid Python class name for the FMU slave."""
175
+ name = config.get("metadata", {}).get("name", "")
176
+ if not name:
177
+ name = os.path.splitext(os.path.basename(config_path))[0]
178
+
179
+ # Convert to PascalCase-safe identifier
180
+ name = re.sub(r"[^a-zA-Z0-9]", "_", name)
181
+ name = re.sub(r"_+", "_", name).strip("_")
182
+ # Ensure starts with a letter
183
+ if name and name[0].isdigit():
184
+ name = "FMU_" + name
185
+ if not name:
186
+ name = "ProcessForgeFMU"
187
+
188
+ # PascalCase: capitalise each word segment
189
+ name = "".join(part.capitalize() for part in name.split("_"))
190
+ return name or "ProcessForgeFMU"
@@ -0,0 +1,252 @@
1
+ """Generate per-flowsheet FMU slave source code for PythonFMU."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+
6
+ from ._fmi_vars import _sanitize_name, get_fmi_variable_specs
7
+
8
+
9
+ def render_slave_source(
10
+ slave_class_name: str,
11
+ mode: str,
12
+ backend: str,
13
+ feed_streams: list[str],
14
+ output_streams: list[str],
15
+ components: list[str],
16
+ unit_params: dict[str, dict],
17
+ config: dict,
18
+ ) -> str:
19
+ """Return complete Python source for the per-flowsheet FMU slave.
20
+
21
+ The generated file is stand-alone: ``_sanitize_name`` is embedded so the
22
+ slave does not depend on processforge internals for name resolution.
23
+ """
24
+ specs = get_fmi_variable_specs(
25
+ feed_streams, output_streams, components, unit_params, config, mode
26
+ )
27
+
28
+ lines: list[str] = []
29
+
30
+ # ------------------------------------------------------------------ header
31
+ lines += [
32
+ "# AUTO-GENERATED by processforge.fmu.slave_template — DO NOT EDIT",
33
+ "from __future__ import annotations",
34
+ "import json",
35
+ "import os",
36
+ "import re",
37
+ "from copy import deepcopy",
38
+ "from pythonfmu import Fmi2Slave, Fmi2Causality, Fmi2Variability, Real",
39
+ "",
40
+ "",
41
+ "def _sanitize_name(s: str) -> str:",
42
+ ' return re.sub(r"[^a-zA-Z0-9_]", "_", s)',
43
+ "",
44
+ "",
45
+ f"class {slave_class_name}(Fmi2Slave):",
46
+ "",
47
+ " def __init__(self, **kwargs):",
48
+ " super().__init__(**kwargs)",
49
+ " _cfg_path = os.path.join(self.resources, 'flowsheet_config.json')",
50
+ " with open(_cfg_path) as _f:",
51
+ " self._config = json.load(_f)",
52
+ "",
53
+ ]
54
+
55
+ # -------------------------------------------------------- instance attrs
56
+ lines.append(" # --- instance attribute initial values ---")
57
+ for spec in specs:
58
+ lines.append(f" self.{spec['attr_name']} = {spec['initial_value']!r}")
59
+
60
+ lines.append("")
61
+
62
+ # -------------------------------------------------------- register_variable
63
+ lines.append(" # --- FMI variable registration ---")
64
+ _causality_map = {
65
+ "input": "Fmi2Causality.input",
66
+ "output": "Fmi2Causality.output",
67
+ "parameter": "Fmi2Causality.parameter",
68
+ }
69
+ _variability_map = {
70
+ "continuous": "Fmi2Variability.continuous",
71
+ "fixed": "Fmi2Variability.fixed",
72
+ }
73
+ for spec in specs:
74
+ caus = _causality_map[spec["causality"]]
75
+ var = _variability_map[spec["variability"]]
76
+ desc = spec["description"].replace("'", "\\'")
77
+ lines.append(
78
+ f" self.register_variable(Real("
79
+ f"'{spec['attr_name']}', causality={caus}, "
80
+ f"variability={var}, "
81
+ f"description='{desc}'))"
82
+ )
83
+
84
+ lines.append("")
85
+
86
+ # -------------------------------------------------------- embedded metadata
87
+ components_repr = repr(components)
88
+ feed_streams_repr = repr(feed_streams)
89
+ output_streams_repr = repr(output_streams)
90
+ # unit_params keys used for writing params back into config
91
+ unit_param_keys_repr = repr({k: list(v.keys()) for k, v in unit_params.items()})
92
+ tank_units = [n for n, c in config["units"].items() if c.get("type") == "Tank"]
93
+ tank_units_repr = repr(tank_units)
94
+
95
+ lines += [
96
+ " self._components = " + components_repr,
97
+ " self._feed_stream_names = " + feed_streams_repr,
98
+ " self._output_stream_names = " + output_streams_repr,
99
+ " self._unit_param_keys = " + unit_param_keys_repr,
100
+ " self._tank_units = " + tank_units_repr,
101
+ "",
102
+ ]
103
+
104
+ if mode == "dynamic":
105
+ lines += _render_dynamic_init(tank_units, components)
106
+
107
+ # ---------------------------------------------------------- do_step
108
+ lines.append(" def do_step(self, current_time: float, step_size: float) -> bool:")
109
+
110
+ if mode == "steady":
111
+ lines += _render_steady_do_step(backend)
112
+ else:
113
+ lines += _render_dynamic_do_step()
114
+
115
+ lines.append("")
116
+
117
+ return "\n".join(lines)
118
+
119
+
120
+ # ---------------------------------------------------------------------------
121
+ # Steady-state do_step body
122
+ # ---------------------------------------------------------------------------
123
+
124
+ def _render_steady_do_step(backend: str) -> list[str]:
125
+ return [
126
+ f" from processforge.eo import EOFlowsheet",
127
+ " config = deepcopy(self._config)",
128
+ "",
129
+ " # Write FMI inputs → feed stream conditions",
130
+ " for _sn in self._feed_stream_names:",
131
+ " _ss = _sanitize_name(_sn)",
132
+ " config['streams'][_sn]['T'] = getattr(self, f'feed_{_ss}_T')",
133
+ " config['streams'][_sn]['P'] = getattr(self, f'feed_{_ss}_P')",
134
+ " config['streams'][_sn]['flowrate'] = getattr(self, f'feed_{_ss}_flowrate')",
135
+ " for _c in self._components:",
136
+ " _sc = _sanitize_name(_c)",
137
+ " config['streams'][_sn]['z'][_c] = getattr(self, f'feed_{_ss}_z_{_sc}')",
138
+ "",
139
+ " # Write FMI parameters → unit design values",
140
+ " for _un, _pkeys in self._unit_param_keys.items():",
141
+ " _su = _sanitize_name(_un)",
142
+ " for _pk in _pkeys:",
143
+ " _spk = _sanitize_name(_pk)",
144
+ " _attr = f'param_{_su}_{_spk}'",
145
+ " if hasattr(self, _attr):",
146
+ " config['units'][_un][_pk] = getattr(self, _attr)",
147
+ "",
148
+ f" _fs = EOFlowsheet(config, backend='{backend}')",
149
+ " try:",
150
+ " _results = _fs.run()",
151
+ " except Exception:",
152
+ " return False",
153
+ "",
154
+ " # Write solver results → FMI outputs",
155
+ " for _sn in self._output_stream_names:",
156
+ " _ss = _sanitize_name(_sn)",
157
+ " _stream = _results.get(_sn, {})",
158
+ " setattr(self, f'out_{_ss}_T', float(_stream.get('T', 0.0)))",
159
+ " setattr(self, f'out_{_ss}_P', float(_stream.get('P', 0.0)))",
160
+ " setattr(self, f'out_{_ss}_flowrate', float(_stream.get('flowrate', 0.0)))",
161
+ " for _c in self._components:",
162
+ " _sc = _sanitize_name(_c)",
163
+ " setattr(self, f'out_{_ss}_z_{_sc}',",
164
+ " float(_stream.get('z', {}).get(_c, 0.0)))",
165
+ " return True",
166
+ ]
167
+
168
+
169
+ # ---------------------------------------------------------------------------
170
+ # Dynamic __init__ tail and do_step body
171
+ # ---------------------------------------------------------------------------
172
+
173
+ def _render_dynamic_init(tank_units: list[str], components: list[str]) -> list[str]:
174
+ lines = [
175
+ " # --- Build Flowsheet once for dynamic stepping ---",
176
+ " from processforge.flowsheet import Flowsheet as _Flowsheet",
177
+ " self._flowsheet = _Flowsheet(self._config)",
178
+ " self._flowsheet.build_units()",
179
+ " self._processing_order = self._flowsheet._get_processing_order()",
180
+ "",
181
+ " # Initialise Tank state from config",
182
+ " self._tank_states = {}",
183
+ " for _un in self._tank_units:",
184
+ " _unit = self._flowsheet.units[_un]",
185
+ " _ucfg = self._config['units'][_un]",
186
+ " _init_n = _ucfg.get('initial_n', {})",
187
+ " self._tank_states[_un] = {",
188
+ " 'n': {_c: float(_init_n.get(_c, 0.0)) for _c in self._components},",
189
+ " 'T': float(_ucfg.get('initial_T', 298.15)),",
190
+ " }",
191
+ "",
192
+ ]
193
+ return lines
194
+
195
+
196
+ def _render_dynamic_do_step() -> list[str]:
197
+ return [
198
+ " # Build feed snapshots from FMI inputs",
199
+ " _current = {}",
200
+ " for _sn in self._feed_stream_names:",
201
+ " _ss = _sanitize_name(_sn)",
202
+ " _current[_sn] = {",
203
+ " 'T': getattr(self, f'feed_{_ss}_T'),",
204
+ " 'P': getattr(self, f'feed_{_ss}_P'),",
205
+ " 'flowrate': getattr(self, f'feed_{_ss}_flowrate'),",
206
+ " 'z': {_c: getattr(self, f'feed_{_ss}_z_{_sanitize_name(_c)}')",
207
+ " for _c in self._components},",
208
+ " }",
209
+ "",
210
+ " _components_set = set(self._components)",
211
+ "",
212
+ " # Process units in topological order",
213
+ " for _un in self._processing_order:",
214
+ " _unit = self._flowsheet.units[_un]",
215
+ " _ucfg = self._config['units'][_un]",
216
+ " _inlet = self._flowsheet._get_merged_inlet(_current, _ucfg)",
217
+ "",
218
+ " if _ucfg['type'] == 'Tank':",
219
+ " _outlet, self._tank_states[_un] = \\",
220
+ " self._flowsheet._integrate_tank_step(",
221
+ " _unit, _inlet, self._tank_states[_un],",
222
+ " step_size, _components_set",
223
+ " )",
224
+ " else:",
225
+ " _outlet = _unit.run(_inlet)",
226
+ "",
227
+ " _outlet_name = _ucfg['out']",
228
+ " _current[_outlet_name] = _outlet",
229
+ "",
230
+ " # Write stream results → FMI outputs",
231
+ " for _sn in self._output_stream_names:",
232
+ " _ss = _sanitize_name(_sn)",
233
+ " _stream = _current.get(_sn, {})",
234
+ " setattr(self, f'out_{_ss}_T', float(_stream.get('T', 0.0)))",
235
+ " setattr(self, f'out_{_ss}_P', float(_stream.get('P', 0.0)))",
236
+ " setattr(self, f'out_{_ss}_flowrate', float(_stream.get('flowrate', 0.0)))",
237
+ " for _c in self._components:",
238
+ " _sc = _sanitize_name(_c)",
239
+ " setattr(self, f'out_{_ss}_z_{_sc}',",
240
+ " float(_stream.get('z', {}).get(_c, 0.0)))",
241
+ "",
242
+ " # Write Tank state outputs",
243
+ " for _un, _state in self._tank_states.items():",
244
+ " _su = _sanitize_name(_un)",
245
+ " setattr(self, f'state_{_su}_T', float(_state['T']))",
246
+ " for _c in self._components:",
247
+ " _sc = _sanitize_name(_c)",
248
+ " setattr(self, f'state_{_su}_n_{_sc}',",
249
+ " float(_state['n'].get(_c, 0.0)))",
250
+ "",
251
+ " return True",
252
+ ]
@@ -0,0 +1,123 @@
1
+ """Provenance and metadata utilities for ProcessForge runs."""
2
+ from __future__ import annotations
3
+
4
+ import platform
5
+ import subprocess
6
+ import sys
7
+ from datetime import datetime, timezone
8
+
9
+ import numpy as np
10
+
11
+ _KEY_PACKAGES = [
12
+ "numpy",
13
+ "scipy",
14
+ "zarr",
15
+ "coolprop",
16
+ "loguru",
17
+ "pandas",
18
+ "openpyxl",
19
+ "matplotlib",
20
+ "jsonschema",
21
+ "graphviz",
22
+ ]
23
+
24
+
25
+ def _git_hash() -> str:
26
+ """Return the current git commit hash, or 'unknown' if not in a git repo."""
27
+ try:
28
+ result = subprocess.run(
29
+ ["git", "rev-parse", "HEAD"],
30
+ capture_output=True,
31
+ text=True,
32
+ timeout=5,
33
+ )
34
+ if result.returncode == 0:
35
+ return result.stdout.strip()
36
+ except Exception:
37
+ pass
38
+ return "unknown"
39
+
40
+
41
+ def _package_versions(packages: list[str]) -> dict[str, str]:
42
+ """Return installed versions for each package (or 'unknown')."""
43
+ from importlib.metadata import PackageNotFoundError, version
44
+
45
+ versions: dict[str, str] = {}
46
+ for pkg in packages:
47
+ try:
48
+ versions[pkg] = version(pkg)
49
+ except PackageNotFoundError:
50
+ versions[pkg] = "unknown"
51
+ return versions
52
+
53
+
54
+ def build_run_info(
55
+ config: dict,
56
+ x0: np.ndarray | None = None,
57
+ var_names: list[str] | None = None,
58
+ ) -> dict:
59
+ """Build a run_info metadata dict for provenance tracking.
60
+
61
+ Args:
62
+ config: The validated flowsheet configuration dict.
63
+ x0: Initial guess vector (EO mode) or flattened feed-stream
64
+ initial conditions (dynamic mode).
65
+ var_names: Human-readable label for each element of x0 (e.g.
66
+ ``"feed/T"``, ``"feed/P"``, ``"product/z_H2O"``).
67
+
68
+ Returns:
69
+ A dict suitable for passing to ``save_results_zarr`` as *run_info*.
70
+ Keys:
71
+ - ``git_hash`` – current HEAD commit SHA
72
+ - ``timestamp`` – ISO-8601 UTC timestamp
73
+ - ``python_version`` – full ``sys.version`` string
74
+ - ``platform`` – ``platform.platform()`` string
75
+ - ``processforge_version`` – installed processforge version
76
+ - ``mode`` – ``"steady"`` or ``"dynamic"``
77
+ - ``backend`` – EO backend name (steady-state only)
78
+ - ``pkg_versions`` – dict of {package: version}
79
+ - ``x0`` – numpy float64 array or ``None``
80
+ - ``var_names`` – list of str labels or ``None``
81
+ """
82
+ from . import __version__ as _pf_version
83
+
84
+ sim_cfg = config.get("simulation", {})
85
+ return {
86
+ "git_hash": _git_hash(),
87
+ "timestamp": datetime.now(timezone.utc).isoformat(),
88
+ "python_version": sys.version,
89
+ "platform": platform.platform(),
90
+ "processforge_version": _pf_version,
91
+ "mode": sim_cfg.get("mode", "steady"),
92
+ "backend": sim_cfg.get("backend", "scipy"),
93
+ "pkg_versions": _package_versions(_KEY_PACKAGES),
94
+ "x0": np.asarray(x0, dtype=float) if x0 is not None else None,
95
+ "var_names": list(var_names) if var_names is not None else None,
96
+ }
97
+
98
+
99
+ def build_dynamic_x0(config: dict) -> tuple[np.ndarray, list[str]]:
100
+ """Flatten feed-stream initial conditions into a reproducibility vector.
101
+
102
+ For dynamic simulations there is no single ``x0`` passed to a global
103
+ solver; instead the starting state is fully determined by the feed streams
104
+ in the config. This helper packs those values into a flat numpy array so
105
+ the same schema used for EO runs can be applied here.
106
+
107
+ Returns:
108
+ (x0_array, var_names) where ``x0_array[i]`` corresponds to
109
+ ``var_names[i]`` (e.g. ``"feed/T"``, ``"feed/z_H2O"``).
110
+ """
111
+ values: list[float] = []
112
+ names: list[str] = []
113
+
114
+ for stream_name, stream in config.get("streams", {}).items():
115
+ for prop in ("T", "P", "flowrate"):
116
+ val = stream.get(prop, 0.0)
117
+ values.append(float(val))
118
+ names.append(f"{stream_name}/{prop}")
119
+ for comp, frac in sorted(stream.get("z", {}).items()):
120
+ values.append(float(frac))
121
+ names.append(f"{stream_name}/z_{comp}")
122
+
123
+ return np.asarray(values, dtype=float), names
@@ -39,8 +39,63 @@ def _store_stream(stream_group, stream_data):
39
39
  stream_group.create_dataset(key, data=arr, shape=arr.shape)
40
40
 
41
41
 
42
- def save_results_zarr(results, fname="results.zarr"):
43
- """Persist simulation results in a Zarr directory."""
42
+ def _store_run_info(root, run_info: dict) -> None:
43
+ """Persist provenance metadata in a ``run_info`` Zarr sub-group.
44
+
45
+ Layout::
46
+
47
+ run_info/
48
+ .attrs → git_hash, timestamp, python_version, platform,
49
+ processforge_version, mode, backend
50
+ pkg_versions/
51
+ .attrs → {package: version, ...}
52
+ initial_guess/
53
+ x0 – float64 array (length = n_vars)
54
+ .attrs → var_names (JSON list of string labels)
55
+ """
56
+ ri_group = root.create_group("run_info")
57
+
58
+ scalar_keys = [
59
+ "git_hash",
60
+ "timestamp",
61
+ "python_version",
62
+ "platform",
63
+ "processforge_version",
64
+ "mode",
65
+ "backend",
66
+ ]
67
+ ri_group.attrs.update(
68
+ {k: str(run_info[k]) for k in scalar_keys if k in run_info}
69
+ )
70
+
71
+ pkg_versions = run_info.get("pkg_versions") or {}
72
+ if pkg_versions:
73
+ pkg_group = ri_group.create_group("pkg_versions")
74
+ pkg_group.attrs.update({k: str(v) for k, v in pkg_versions.items()})
75
+
76
+ x0 = run_info.get("x0")
77
+ if x0 is not None:
78
+ x0_arr = np.asarray(x0, dtype=float)
79
+ ig_group = ri_group.create_group("initial_guess")
80
+ ig_group.create_dataset("x0", data=x0_arr, shape=x0_arr.shape)
81
+ var_names = run_info.get("var_names")
82
+ if var_names:
83
+ ig_group.attrs["var_names"] = list(var_names)
84
+
85
+
86
+ def save_results_zarr(results, fname="results.zarr", run_info=None):
87
+ """Persist simulation results in a Zarr directory.
88
+
89
+ Args:
90
+ results: Stream result dict as returned by ``Flowsheet.run()``
91
+ or ``EOFlowsheet.run()``.
92
+ fname: Output path for the Zarr store directory.
93
+ run_info: Optional provenance dict from
94
+ :func:`processforge.provenance.build_run_info`. When
95
+ supplied, a ``run_info`` sub-group is written inside the
96
+ store containing the git hash, package versions, and
97
+ initial-guess vector for full reproducibility.
98
+ """
44
99
  store_path = os.path.abspath(fname)
45
100
  if os.path.exists(store_path):
46
101
  if os.path.isdir(store_path):
@@ -55,6 +110,8 @@ def save_results_zarr(results, fname="results.zarr"):
55
110
  for stream_name, stream_data in results.items():
56
111
  stream_group = root.create_group(stream_name)
57
112
  _store_stream(stream_group, stream_data)
113
+ if run_info is not None:
114
+ _store_run_info(root, run_info)
58
115
  logger.info(f"Saved Zarr results to {store_path}")
59
116
  return store_path
60
117
 
@@ -190,7 +247,7 @@ def _build_dataframe_row(group, stream_name, idx, comp_names, include_time):
190
247
  def _load_dataframe_from_zarr(store_path):
191
248
  store = zarr.storage.LocalStore(store_path)
192
249
  root = zarr.open(store=store, mode="r")
193
- streams = sorted(root.group_keys())
250
+ streams = sorted(k for k in root.group_keys() if k != "run_info")
194
251
  rows = []
195
252
  components = set()
196
253
  mode = root.attrs.get("mode", "steady")
@@ -7,6 +7,7 @@ from loguru import logger
7
7
  from .utils.validate_flowsheet import validate_flowsheet
8
8
  from .flowsheet import Flowsheet
9
9
  from .eo import EOFlowsheet
10
+ from .provenance import build_dynamic_x0, build_run_info
10
11
  from .result import (
11
12
  generate_validation_excel,
12
13
  plot_results,
@@ -40,15 +41,20 @@ def _cmd_run(args):
40
41
  if is_dynamic:
41
42
  fs = Flowsheet(config)
42
43
  logger.info("=== Dynamic Results ===")
44
+ results = fs.run()
45
+ x0, var_names = build_dynamic_x0(config)
46
+ run_info = build_run_info(config, x0=x0, var_names=var_names)
43
47
  else:
44
48
  backend = sim_cfg.get("backend", "scipy")
45
49
  fs = EOFlowsheet(config, backend=backend)
46
50
  logger.info("=== Steady-State EO Results ===")
51
+ results = fs.run()
52
+ run_info = build_run_info(config, x0=fs.x0, var_names=fs.var_names)
47
53
 
48
- results = fs.run()
49
54
  zarr_path = save_results_zarr(
50
55
  results,
51
56
  os.path.join("outputs", f"{base_name}_results.zarr"),
57
+ run_info=run_info,
52
58
  )
53
59
  validation_path = os.path.join("outputs", f"{base_name}_validation.xlsx")
54
60
  generate_validation_excel(
@@ -77,6 +83,26 @@ def _cmd_validate(args):
77
83
  raise SystemExit(1)
78
84
 
79
85
 
86
+ def _cmd_export_fmu(args):
87
+ """Export a flowsheet as an FMI 2.0 co-simulation FMU."""
88
+ fname = args.flowsheet
89
+ if not os.path.exists(fname):
90
+ logger.error(f"Flowsheet file '{fname}' not found.")
91
+ raise SystemExit(1)
92
+
93
+ from .fmu import build_fmu # local import — pythonfmu is optional
94
+
95
+ output_dir = args.output_dir or "outputs"
96
+ backend = args.backend or "scipy"
97
+
98
+ try:
99
+ fmu_path = build_fmu(fname, output_dir=output_dir, backend=backend)
100
+ logger.info(f"FMU written to: {fmu_path}")
101
+ except Exception as e:
102
+ logger.error(f"FMU export failed: {e}")
103
+ raise SystemExit(1)
104
+
105
+
80
106
  def _cmd_diagram(args):
81
107
  """Generate a flowsheet diagram from a JSON file."""
82
108
  fname = args.flowsheet
@@ -126,6 +152,18 @@ def main():
126
152
  diagram_parser.add_argument("--output-dir", "-o", default=".", help="Output directory (default: current directory)")
127
153
  diagram_parser.add_argument("--format", "-f", default="png", choices=["png", "svg", "pdf"], help="Output format (default: png)")
128
154
 
155
+ # processforge export-fmu
156
+ fmu_parser = subparsers.add_parser("export-fmu", help="Export flowsheet as FMI 2.0 co-simulation FMU")
157
+ fmu_parser.add_argument("flowsheet", help="Path to the flowsheet JSON file")
158
+ fmu_parser.add_argument(
159
+ "--output-dir", "-o", default="outputs",
160
+ help="Directory for the output FMU (default: outputs/)",
161
+ )
162
+ fmu_parser.add_argument(
163
+ "--backend", choices=["scipy", "pyomo", "casadi"], default="scipy",
164
+ help="EO solver backend for steady-state mode (default: scipy)",
165
+ )
166
+
129
167
  args = parser.parse_args()
130
168
 
131
169
  if args.command is None:
@@ -136,6 +174,7 @@ def main():
136
174
  "run": _cmd_run,
137
175
  "validate": _cmd_validate,
138
176
  "diagram": _cmd_diagram,
177
+ "export-fmu": _cmd_export_fmu,
139
178
  }
140
179
  commands[args.command](args)
141
180
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: processforge
3
- Version: 0.2.4
3
+ Version: 0.2.7
4
4
  Summary: A Python-based process simulation framework for chemical engineering applications.
5
5
  Author-email: Process Forge Team <team@processforge.dev>
6
6
  License-Expression: BSD-3-Clause
@@ -34,6 +34,8 @@ Requires-Dist: pyomo>=6.7; extra == "eo"
34
34
  Provides-Extra: eo-casadi
35
35
  Requires-Dist: pyomo>=6.7; extra == "eo-casadi"
36
36
  Requires-Dist: casadi>=3.6; extra == "eo-casadi"
37
+ Provides-Extra: fmu
38
+ Requires-Dist: pythonfmu>=0.6; extra == "fmu"
37
39
  Provides-Extra: dev
38
40
  Requires-Dist: pytest>=7.0; extra == "dev"
39
41
  Requires-Dist: black; extra == "dev"
@@ -11,6 +11,7 @@ flowsheets/archive/hydraulic_chain.json
11
11
  src/processforge/__init__.py
12
12
  src/processforge/_schema.py
13
13
  src/processforge/flowsheet.py
14
+ src/processforge/provenance.py
14
15
  src/processforge/result.py
15
16
  src/processforge/simulate.py
16
17
  src/processforge/solver.py
@@ -40,6 +41,10 @@ src/processforge/eo/units/pipes_eo.py
40
41
  src/processforge/eo/units/pump_eo.py
41
42
  src/processforge/eo/units/strainer_eo.py
42
43
  src/processforge/eo/units/valve_eo.py
44
+ src/processforge/fmu/__init__.py
45
+ src/processforge/fmu/_fmi_vars.py
46
+ src/processforge/fmu/builder.py
47
+ src/processforge/fmu/slave_template.py
43
48
  src/processforge/schemas/__init__.py
44
49
  src/processforge/schemas/flowsheet_schema.json
45
50
  src/processforge/units/__init__.py
@@ -26,3 +26,6 @@ pyomo>=6.7
26
26
  [eo-casadi]
27
27
  pyomo>=6.7
28
28
  casadi>=3.6
29
+
30
+ [fmu]
31
+ pythonfmu>=0.6
File without changes
File without changes
File without changes
File without changes