mxlpy 0.19.0__py3-none-any.whl → 0.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mxlpy/model.py CHANGED
@@ -17,13 +17,13 @@ import numpy as np
17
17
  import pandas as pd
18
18
 
19
19
  from mxlpy import fns
20
- from mxlpy.types import (
21
- AbstractSurrogate,
22
- Array,
23
- Derived,
24
- Reaction,
25
- Readout,
26
- )
20
+ from mxlpy.types import AbstractSurrogate, Array, Derived, Reaction, Readout
21
+
22
+ if TYPE_CHECKING:
23
+ from collections.abc import Iterable, Mapping
24
+ from inspect import FullArgSpec
25
+
26
+ from mxlpy.types import Callable, Param, RateFn, RetType
27
27
 
28
28
  __all__ = [
29
29
  "ArityMismatchError",
@@ -34,12 +34,6 @@ __all__ = [
34
34
  "ModelCache",
35
35
  ]
36
36
 
37
- if TYPE_CHECKING:
38
- from collections.abc import Iterable, Mapping
39
- from inspect import FullArgSpec
40
-
41
- from mxlpy.types import Callable, Param, RateFn, RetType
42
-
43
37
 
44
38
  @dataclass
45
39
  class Dependency:
@@ -694,6 +688,21 @@ class Model:
694
688
 
695
689
  return self
696
690
 
691
+ def get_unused_parameters(self) -> set[str]:
692
+ """Get parameters which aren't used in the model."""
693
+ args = set()
694
+ for variable in self._variables.values():
695
+ if isinstance(variable, Derived):
696
+ args.update(variable.args)
697
+ for derived in self._derived.values():
698
+ args.update(derived.args)
699
+ for reaction in self._reactions.values():
700
+ args.update(reaction.args)
701
+ for surrogate in self._surrogates.values():
702
+ args.update(surrogate.args)
703
+
704
+ return set(self._parameters).difference(args)
705
+
697
706
  ##########################################################################
698
707
  # Variables
699
708
  ##########################################################################
@@ -1115,10 +1124,29 @@ class Model:
1115
1124
  args = self.get_dependent(variables=variables, time=time)
1116
1125
 
1117
1126
  stoich = copy.deepcopy(cache.stoich_by_cpds[variable])
1118
- for rxn, derived in cache.dyn_stoich_by_cpds[variable].items():
1127
+ for rxn, derived in cache.dyn_stoich_by_cpds.get(variable, {}).items():
1119
1128
  stoich[rxn] = float(derived.fn(*(args[i] for i in derived.args)))
1120
1129
  return stoich
1121
1130
 
1131
+ def get_raw_stoichiometries_of_variable(
1132
+ self, variable: str
1133
+ ) -> dict[str, float | Derived]:
1134
+ """Retrieve the raw stoichiometry of a specific variable.
1135
+
1136
+ Examples:
1137
+ >>> model.get_stoichiometries_of_variable("x1")
1138
+ {"v1": -1, "v2": Derived(...)}
1139
+
1140
+ Args:
1141
+ variable: The name of the variable for which to retrieve the stoichiometry.
1142
+
1143
+ """
1144
+ stoichs: dict[str, dict[str, float | Derived]] = {}
1145
+ for rxn_name, rxn in self._reactions.items():
1146
+ for cpd_name, factor in rxn.stoichiometry.items():
1147
+ stoichs.setdefault(cpd_name, {})[rxn_name] = factor
1148
+ return stoichs[variable]
1149
+
1122
1150
  @_invalidate_cache
1123
1151
  def add_reaction(
1124
1152
  self,
mxlpy/nn/_keras.py CHANGED
@@ -1,12 +1,19 @@
1
- from typing import cast
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, cast
2
4
 
3
5
  import keras
4
6
  import pandas as pd
5
7
  from tqdm.keras import TqdmCallback
6
8
 
7
- from mxlpy.types import Array
9
+ if TYPE_CHECKING:
10
+ from mxlpy.types import Array
8
11
 
9
- __all__ = ["LSTM", "MLP", "train"]
12
+ __all__ = [
13
+ "LSTM",
14
+ "MLP",
15
+ "train",
16
+ ]
10
17
 
11
18
 
12
19
  def train(
mxlpy/nn/_torch.py CHANGED
@@ -24,7 +24,13 @@ if TYPE_CHECKING:
24
24
 
25
25
  from mxlpy.types import Array
26
26
 
27
- __all__ = ["DefaultDevice", "LSTM", "LossFn", "MLP", "train"]
27
+ __all__ = [
28
+ "DefaultDevice",
29
+ "LSTM",
30
+ "LossFn",
31
+ "MLP",
32
+ "train",
33
+ ]
28
34
 
29
35
  DefaultDevice = torch.device("cpu")
30
36
 
mxlpy/parallel.py CHANGED
@@ -24,11 +24,14 @@ from typing import TYPE_CHECKING, Any, cast
24
24
  import pebble
25
25
  from tqdm import tqdm
26
26
 
27
- __all__ = ["Cache", "parallelise"]
28
-
29
27
  if TYPE_CHECKING:
30
28
  from collections.abc import Callable, Collection, Hashable
31
29
 
30
+ __all__ = [
31
+ "Cache",
32
+ "parallelise",
33
+ ]
34
+
32
35
 
33
36
  def _pickle_name(k: Hashable) -> str:
34
37
  return f"{k}.p"
mxlpy/parameterise.py CHANGED
@@ -1,11 +1,19 @@
1
1
  """Module to parameterise models."""
2
2
 
3
- from pathlib import Path
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING
4
6
 
5
- import pandas as pd
6
7
  from parameteriser.brenda.v0 import Brenda
7
8
 
8
- __all__ = ["get_km_and_kcat_from_brenda"]
9
+ if TYPE_CHECKING:
10
+ from pathlib import Path
11
+
12
+ import pandas as pd
13
+
14
+ __all__ = [
15
+ "get_km_and_kcat_from_brenda",
16
+ ]
9
17
 
10
18
 
11
19
  def get_km_and_kcat_from_brenda(
mxlpy/plot.py CHANGED
@@ -19,10 +19,41 @@ Functions:
19
19
  from __future__ import annotations
20
20
 
21
21
  import contextlib
22
+ import itertools as it
23
+ import math
24
+ from dataclasses import dataclass
25
+ from typing import TYPE_CHECKING, Any, Literal, cast, overload
22
26
 
27
+ import numpy as np
28
+ import pandas as pd
29
+ import seaborn as sns
23
30
  from cycler import cycler
31
+ from matplotlib import pyplot as plt
32
+ from matplotlib.axes import Axes
33
+ from matplotlib.colors import (
34
+ LogNorm,
35
+ Normalize,
36
+ SymLogNorm,
37
+ colorConverter, # type: ignore
38
+ )
39
+ from matplotlib.figure import Figure
40
+ from mpl_toolkits.mplot3d import Axes3D
41
+
42
+ from mxlpy.label_map import LabelMapper
43
+
44
+ if TYPE_CHECKING:
45
+ from collections.abc import Generator, Iterable, Iterator
46
+
47
+ from matplotlib.collections import QuadMesh
48
+ from numpy.typing import NDArray
49
+
50
+ from mxlpy.linear_label_map import LinearLabelMapper
51
+ from mxlpy.model import Model
52
+ from mxlpy.types import Array, ArrayLike
53
+
24
54
 
25
55
  __all__ = [
56
+ "Axs",
26
57
  "Color",
27
58
  "FigAx",
28
59
  "FigAxs",
@@ -31,7 +62,10 @@ __all__ = [
31
62
  "RGBA",
32
63
  "add_grid",
33
64
  "bars",
65
+ "bars_autogrouped",
66
+ "bars_grouped",
34
67
  "context",
68
+ "grid_labels",
35
69
  "grid_layout",
36
70
  "heatmap",
37
71
  "heatmap_from_2d_idx",
@@ -53,37 +87,45 @@ __all__ = [
53
87
  "violins_from_2d_idx",
54
88
  ]
55
89
 
56
- import itertools as it
57
- import math
58
- from typing import TYPE_CHECKING, Any, Literal, cast
59
90
 
60
- import numpy as np
61
- import pandas as pd
62
- import seaborn as sns
63
- from matplotlib import pyplot as plt
64
- from matplotlib.axes import Axes
65
- from matplotlib.colors import (
66
- LogNorm,
67
- Normalize,
68
- SymLogNorm,
69
- colorConverter, # type: ignore
70
- )
71
- from matplotlib.figure import Figure
72
- from mpl_toolkits.mplot3d import Axes3D
91
+ @dataclass
92
+ class Axs:
93
+ """Convenience container axes."""
73
94
 
74
- from mxlpy.label_map import LabelMapper
95
+ axs: NDArray[np.object_]
75
96
 
76
- if TYPE_CHECKING:
77
- from collections.abc import Generator, Iterable
97
+ def __iter__(self) -> Iterator[Axes]:
98
+ """Get flat axes."""
99
+ yield from cast(list[Axes], self.axs.flatten())
78
100
 
79
- from matplotlib.collections import QuadMesh
101
+ def __len__(self) -> int:
102
+ """Length of axes."""
103
+ return len(self.axs.flatten())
104
+
105
+ @overload
106
+ def __getitem__(self, row_col: int) -> Axes: ...
107
+
108
+ @overload
109
+ def __getitem__(self, row_col: slice) -> NDArray[np.object_]: ...
110
+
111
+ @overload
112
+ def __getitem__(self, row_col: tuple[int, int]) -> Axes: ...
113
+
114
+ @overload
115
+ def __getitem__(self, row_col: tuple[slice, int]) -> NDArray[np.object_]: ...
116
+
117
+ @overload
118
+ def __getitem__(self, row_col: tuple[int, slice]) -> NDArray[np.object_]: ...
119
+
120
+ def __getitem__(
121
+ self, row_col: int | slice | tuple[int | slice, int | slice]
122
+ ) -> Axes | NDArray[np.object_]:
123
+ """Get Axes or Array of Axes."""
124
+ return cast(Axes, self.axs[row_col])
80
125
 
81
- from mxlpy.linear_label_map import LinearLabelMapper
82
- from mxlpy.model import Model
83
- from mxlpy.types import Array, ArrayLike
84
126
 
85
127
  type FigAx = tuple[Figure, Axes]
86
- type FigAxs = tuple[Figure, list[Axes]]
128
+ type FigAxs = tuple[Figure, Axs]
87
129
 
88
130
  type Linestyle = Literal[
89
131
  "solid",
@@ -97,6 +139,7 @@ type RGB = tuple[float, float, float]
97
139
  type RGBA = tuple[float, float, float, float]
98
140
  type Color = str | RGB | RGBA
99
141
 
142
+
100
143
  ##########################################################################
101
144
  # Helpers
102
145
  ##########################################################################
@@ -158,7 +201,12 @@ def _partition_by_order_of_magnitude(s: pd.Series) -> list[list[str]]:
158
201
  """Partition a series into groups based on the order of magnitude of the values."""
159
202
  return [
160
203
  i.to_list()
161
- for i in np.floor(np.log10(s)).to_frame(name=0).groupby(0)[0].groups.values() # type: ignore
204
+ for i in s.abs()
205
+ .apply(np.log10)
206
+ .apply(np.floor)
207
+ .to_frame(name=0)
208
+ .groupby(0)[0]
209
+ .groups.values() # type: ignore
162
210
  ]
163
211
 
164
212
 
@@ -258,6 +306,18 @@ def add_grid(ax: Axes) -> Axes:
258
306
  return ax
259
307
 
260
308
 
309
+ def grid_labels(
310
+ axs: Axs,
311
+ xlabel: str | None = None,
312
+ ylabel: str | None = None,
313
+ ) -> None:
314
+ """Apply labels to left and bottom axes."""
315
+ for ax in axs[-1, :]:
316
+ ax.set_xlabel(xlabel)
317
+ for ax in axs[:, 0]:
318
+ ax.set_ylabel(ylabel)
319
+
320
+
261
321
  def rotate_xlabels(
262
322
  ax: Axes,
263
323
  rotation: float = 45,
@@ -367,7 +427,6 @@ def _default_fig_ax(
367
427
 
368
428
 
369
429
  def _default_fig_axs(
370
- axs: list[Axes] | None,
371
430
  *,
372
431
  ncols: int,
373
432
  nrows: int,
@@ -391,19 +450,16 @@ def _default_fig_axs(
391
450
  Figure and Axes objects for the plot.
392
451
 
393
452
  """
394
- if axs is None or len(axs) == 0:
395
- fig, axs_array = plt.subplots(
396
- nrows=nrows,
397
- ncols=ncols,
398
- sharex=sharex,
399
- sharey=sharey,
400
- figsize=figsize,
401
- squeeze=False,
402
- layout="constrained",
403
- )
404
- axs = list(axs_array.flatten())
405
- else:
406
- fig = cast(Figure, axs[0].get_figure())
453
+ fig, axs_array = plt.subplots(
454
+ nrows=nrows,
455
+ ncols=ncols,
456
+ sharex=sharex,
457
+ sharey=sharey,
458
+ figsize=figsize,
459
+ squeeze=False,
460
+ layout="constrained",
461
+ )
462
+ axs = Axs(axs_array)
407
463
 
408
464
  if grid:
409
465
  for ax in axs:
@@ -433,7 +489,6 @@ def two_axes(
433
489
  ) -> FigAxs:
434
490
  """Create a figure with two axes."""
435
491
  return _default_fig_axs(
436
- None,
437
492
  ncols=2,
438
493
  nrows=1,
439
494
  figsize=figsize,
@@ -448,18 +503,17 @@ def grid_layout(
448
503
  *,
449
504
  n_cols: int = 2,
450
505
  col_width: float = 3,
451
- row_height: float = 4,
506
+ row_height: float = 2.5,
452
507
  sharex: bool = True,
453
508
  sharey: bool = False,
454
509
  grid: bool = True,
455
- ) -> tuple[Figure, list[Axes]]:
510
+ ) -> FigAxs:
456
511
  """Create a grid layout for the given number of groups."""
457
512
  n_cols = min(n_groups, n_cols)
458
513
  n_rows = math.ceil(n_groups / n_cols)
459
514
  figsize = (n_cols * col_width, n_rows * row_height)
460
515
 
461
516
  return _default_fig_axs(
462
- None,
463
517
  ncols=n_cols,
464
518
  nrows=n_rows,
465
519
  figsize=figsize,
@@ -475,19 +529,103 @@ def grid_layout(
475
529
 
476
530
 
477
531
  def bars(
478
- x: pd.DataFrame,
532
+ x: pd.Series | pd.DataFrame,
479
533
  *,
480
534
  ax: Axes | None = None,
481
535
  grid: bool = True,
536
+ xlabel: str | None = None,
537
+ ylabel: str | None = None,
482
538
  ) -> FigAx:
483
539
  """Plot multiple lines on the same axis."""
484
540
  fig, ax = _default_fig_ax(ax=ax, grid=grid)
485
- sns.barplot(data=x, ax=ax)
486
- _default_labels(ax, xlabel=x.index.name, ylabel=None)
487
- ax.legend(x.columns)
541
+ sns.barplot(data=cast(pd.DataFrame, x), ax=ax)
542
+
543
+ if xlabel is None:
544
+ xlabel = x.index.name if x.index.name is not None else ""
545
+ _default_labels(ax, xlabel=xlabel, ylabel=ylabel)
546
+ if isinstance(x, pd.DataFrame):
547
+ ax.legend(x.columns)
488
548
  return fig, ax
489
549
 
490
550
 
551
+ def bars_grouped(
552
+ groups: list[pd.DataFrame] | list[pd.Series],
553
+ *,
554
+ n_cols: int = 2,
555
+ col_width: float = 3,
556
+ row_height: float = 4,
557
+ sharey: bool = False,
558
+ grid: bool = True,
559
+ xlabel: str | None = None,
560
+ ylabel: str | None = None,
561
+ ) -> FigAxs:
562
+ """Plot multiple groups of lines on separate axes."""
563
+ fig, axs = grid_layout(
564
+ len(groups),
565
+ n_cols=n_cols,
566
+ col_width=col_width,
567
+ row_height=row_height,
568
+ sharex=False,
569
+ sharey=sharey,
570
+ grid=grid,
571
+ )
572
+
573
+ for group, ax in zip(
574
+ groups,
575
+ axs,
576
+ strict=False,
577
+ ):
578
+ bars(
579
+ group,
580
+ ax=ax,
581
+ grid=grid,
582
+ xlabel=xlabel,
583
+ ylabel=ylabel,
584
+ )
585
+
586
+ axsl = list(axs)
587
+ for i in range(len(groups), len(axs)):
588
+ axsl[i].set_visible(False)
589
+
590
+ return fig, axs
591
+
592
+
593
+ def bars_autogrouped(
594
+ s: pd.Series | pd.DataFrame,
595
+ *,
596
+ n_cols: int = 2,
597
+ col_width: float = 4,
598
+ row_height: float = 3,
599
+ max_group_size: int = 6,
600
+ grid: bool = True,
601
+ xlabel: str | None = None,
602
+ ylabel: str | None = None,
603
+ ) -> FigAxs:
604
+ """Plot a series or dataframe with lines grouped by order of magnitude."""
605
+ group_names = _split_large_groups(
606
+ _partition_by_order_of_magnitude(s)
607
+ if isinstance(s, pd.Series)
608
+ else _partition_by_order_of_magnitude(s.max()),
609
+ max_size=max_group_size,
610
+ )
611
+
612
+ groups: list[pd.Series] | list[pd.DataFrame] = (
613
+ [s.loc[group] for group in group_names]
614
+ if isinstance(s, pd.Series)
615
+ else [s.loc[:, group] for group in group_names]
616
+ )
617
+
618
+ return bars_grouped(
619
+ groups,
620
+ n_cols=n_cols,
621
+ col_width=col_width,
622
+ row_height=row_height,
623
+ grid=grid,
624
+ xlabel=xlabel,
625
+ ylabel=ylabel,
626
+ )
627
+
628
+
491
629
  def lines(
492
630
  x: pd.DataFrame | pd.Series,
493
631
  *,
@@ -498,6 +636,8 @@ def lines(
498
636
  legend: bool = True,
499
637
  linewidth: float | None = None,
500
638
  linestyle: Linestyle | None = None,
639
+ xlabel: str | None = None,
640
+ ylabel: str | None = None,
501
641
  ) -> FigAx:
502
642
  """Plot multiple lines on the same axis."""
503
643
  fig, ax = _default_fig_ax(ax=ax, grid=grid)
@@ -509,7 +649,11 @@ def lines(
509
649
  linestyle=linestyle,
510
650
  color=color,
511
651
  )
512
- _default_labels(ax, xlabel=x.index.name, ylabel=None)
652
+ _default_labels(
653
+ ax,
654
+ xlabel=x.index.name if xlabel is None else xlabel,
655
+ ylabel=ylabel,
656
+ )
513
657
  if legend:
514
658
  names = x.columns if isinstance(x, pd.DataFrame) else [str(x.name)]
515
659
  for line, name in zip(_lines, names, strict=True):
@@ -533,6 +677,8 @@ def lines_grouped(
533
677
  sharex: bool = True,
534
678
  sharey: bool = False,
535
679
  grid: bool = True,
680
+ xlabel: str | None = None,
681
+ ylabel: str | None = None,
536
682
  color: Color | list[list[Color]] | None = None,
537
683
  linewidth: float | None = None,
538
684
  linestyle: Linestyle | None = None,
@@ -561,10 +707,13 @@ def lines_grouped(
561
707
  color=color_,
562
708
  linewidth=linewidth,
563
709
  linestyle=linestyle,
710
+ xlabel=xlabel,
711
+ ylabel=ylabel,
564
712
  )
565
713
 
714
+ axsl = list(axs)
566
715
  for i in range(len(groups), len(axs)):
567
- axs[i].set_visible(False)
716
+ axsl[i].set_visible(False)
568
717
 
569
718
  return fig, axs
570
719
 
@@ -577,6 +726,8 @@ def line_autogrouped(
577
726
  row_height: float = 3,
578
727
  max_group_size: int = 6,
579
728
  grid: bool = True,
729
+ xlabel: str | None = None,
730
+ ylabel: str | None = None,
580
731
  color: Color | list[list[Color]] | None = None,
581
732
  linewidth: float | None = None,
582
733
  linestyle: Linestyle | None = None,
@@ -604,6 +755,8 @@ def line_autogrouped(
604
755
  color=color,
605
756
  linestyle=linestyle,
606
757
  linewidth=linewidth,
758
+ xlabel=xlabel,
759
+ ylabel=ylabel,
607
760
  )
608
761
 
609
762
 
@@ -886,7 +1039,7 @@ def violins_from_2d_idx(
886
1039
  grid=grid,
887
1040
  )
888
1041
 
889
- for ax, col in zip(axs[: len(df.columns)], df.columns, strict=True):
1042
+ for ax, col in zip(axs[: len(df.columns)].flatten(), df.columns, strict=True):
890
1043
  ax.set_title(col)
891
1044
  violins(df[col].unstack(), ax=ax)
892
1045
 
mxlpy/report.py CHANGED
@@ -1,5 +1,7 @@
1
1
  """Generate a report comparing two models."""
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  from collections.abc import Callable
4
6
  from datetime import UTC, datetime
5
7
  from pathlib import Path
@@ -10,7 +12,10 @@ import sympy
10
12
  from mxlpy.meta.source_tools import fn_to_sympy
11
13
  from mxlpy.model import Model
12
14
 
13
- __all__ = ["AnalysisFn", "markdown"]
15
+ __all__ = [
16
+ "AnalysisFn",
17
+ "markdown",
18
+ ]
14
19
 
15
20
  type AnalysisFn = Callable[[Model, Model, Path], tuple[str, Path]]
16
21
 
@@ -84,9 +89,29 @@ def markdown(
84
89
 
85
90
  """
86
91
  content: list[str] = [
87
- f"# Report: {datetime.now(UTC).strftime('%Y-%m-%d')}",
92
+ f"# Report: {datetime.now(UTC).strftime('%Y-%m-%d')}\n",
88
93
  ]
89
94
 
95
+ # Unused
96
+ if unused := m2.get_unused_parameters():
97
+ content.append("## <span style='color: red'>Unused parameters</span>\n")
98
+ names = "\n".join(f"<li>{i}</li>\n" for i in sorted(unused))
99
+ content.append(f"<ul>\n{names}\n</ul>\n")
100
+
101
+ # Model stats
102
+ content.extend(
103
+ [
104
+ "| Model component | Old | New |",
105
+ "| --- | --- | --- |",
106
+ f"| variables | {len(m1.variables)} | {len(m2.variables)}|",
107
+ f"| parameters | {len(m1.parameters)} | {len(m2.parameters)}|",
108
+ f"| derived parameters | {len(m1.derived_parameters)} | {len(m2.derived_parameters)}|",
109
+ f"| derived variables | {len(m1.derived_variables)} | {len(m2.derived_variables)}|",
110
+ f"| reactions | {len(m1.reactions)} | {len(m2.reactions)}|",
111
+ f"| surrogates | {len(m1._surrogates)} | {len(m2._surrogates)}|", # noqa: SLF001
112
+ ]
113
+ )
114
+
90
115
  # Variables
91
116
  new_variables, removed_variables, changed_variables = _new_removed_changed(
92
117
  m1.variables, m2.variables
@@ -106,7 +131,7 @@ def markdown(
106
131
  if len(variables) >= 1:
107
132
  content.extend(
108
133
  (
109
- "## Variables\n",
134
+ "## Variables\n\n",
110
135
  "| Name | Old Value | New Value |",
111
136
  "| ---- | --------- | --------- |",
112
137
  )
@@ -132,7 +157,7 @@ def markdown(
132
157
  if len(pars) >= 1:
133
158
  content.extend(
134
159
  (
135
- "## Parameters\n",
160
+ "## Parameters\n\n",
136
161
  "| Name | Old Value | New Value |",
137
162
  "| ---- | --------- | --------- |",
138
163
  )
@@ -159,7 +184,7 @@ def markdown(
159
184
  if len(derived) >= 1:
160
185
  content.extend(
161
186
  (
162
- "## Derived\n",
187
+ "## Derived\n\n",
163
188
  "| Name | Old Value | New Value |",
164
189
  "| ---- | --------- | --------- |",
165
190
  )
@@ -187,7 +212,7 @@ def markdown(
187
212
  if len(reactions) >= 1:
188
213
  content.extend(
189
214
  (
190
- "## Reactions\n",
215
+ "## Reactions\n\n",
191
216
  "| Name | Old Value | New Value |",
192
217
  "| ---- | --------- | --------- |",
193
218
  )
@@ -207,7 +232,7 @@ def markdown(
207
232
  if len(dependent) >= 1:
208
233
  content.extend(
209
234
  (
210
- "## Numerical differences of dependent values\n",
235
+ "## Numerical differences of dependent values\n\n",
211
236
  "| Name | Old Value | New Value | Relative Change | ",
212
237
  "| ---- | --------- | --------- | --------------- | ",
213
238
  )
@@ -226,7 +251,7 @@ def markdown(
226
251
  if len(rhs) >= 1:
227
252
  content.extend(
228
253
  (
229
- "## Numerical differences of right hand side values\n",
254
+ "## Numerical differences of right hand side values\n\n",
230
255
  "| Name | Old Value | New Value | Relative Change | ",
231
256
  "| ---- | --------- | --------- | --------------- | ",
232
257
  )
mxlpy/sbml/__init__.py CHANGED
@@ -5,10 +5,10 @@ Allows importing and exporting metabolic models in SBML format.
5
5
 
6
6
  from __future__ import annotations
7
7
 
8
+ from ._export import write
9
+ from ._import import read
10
+
8
11
  __all__ = [
9
12
  "read",
10
13
  "write",
11
14
  ]
12
-
13
- from ._export import write
14
- from ._import import read