modelbase2 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
modelbase2/model.py CHANGED
@@ -984,7 +984,7 @@ class Model:
984
984
  return copy.deepcopy(self._reactions)
985
985
 
986
986
  def get_stoichiometries(
987
- self, concs: dict[str, float] | None = None, time: float = 0.0
987
+ self, variables: dict[str, float] | None = None, time: float = 0.0
988
988
  ) -> pd.DataFrame:
989
989
  """Retrieve the stoichiometries of the model.
990
990
 
@@ -1000,7 +1000,7 @@ class Model:
1000
1000
  """
1001
1001
  if (cache := self._cache) is None:
1002
1002
  cache = self._create_cache()
1003
- args = self.get_dependent(concs=concs, time=time)
1003
+ args = self.get_dependent(variables=variables, time=time)
1004
1004
 
1005
1005
  stoich_by_cpds = copy.deepcopy(cache.stoich_by_cpds)
1006
1006
  for cpd, stoich in cache.dyn_stoich_by_cpds.items():
@@ -1291,6 +1291,13 @@ class Model:
1291
1291
  self._surrogates.pop(name)
1292
1292
  return self
1293
1293
 
1294
+ def get_surrogate_reaction_names(self) -> list[str]:
1295
+ """Return reaction names by surrogates."""
1296
+ names = []
1297
+ for i in self._surrogates.values():
1298
+ names.extend(i.stoichiometries)
1299
+ return names
1300
+
1294
1301
  ##########################################################################
1295
1302
  # Get dependent values. This includes
1296
1303
  # - derived parameters
@@ -1301,7 +1308,7 @@ class Model:
1301
1308
 
1302
1309
  def _get_dependent(
1303
1310
  self,
1304
- concs: dict[str, float],
1311
+ variables: dict[str, float],
1305
1312
  time: float = 0.0,
1306
1313
  *,
1307
1314
  cache: ModelCache,
@@ -1313,7 +1320,7 @@ class Model:
1313
1320
  {"x1": 1.0, "x2": 2.0, "k1": 0.1, "time": 0.0}
1314
1321
 
1315
1322
  Args:
1316
- concs: A dictionary of concentrations with keys as the names of the substances
1323
+ variables: A dictionary of concentrations with keys as the names of the substances
1317
1324
  and values as their respective concentrations.
1318
1325
  time: The time point for the calculation
1319
1326
  cache: A ModelCache object containing precomputed values and dependencies.
@@ -1325,7 +1332,7 @@ class Model:
1325
1332
  with their respective names as keys and their calculated values as values.
1326
1333
 
1327
1334
  """
1328
- args: dict[str, float] = cache.all_parameter_values | concs
1335
+ args: dict[str, float] = cache.all_parameter_values | variables
1329
1336
  args["time"] = time
1330
1337
 
1331
1338
  containers = self._derived | self._reactions | self._surrogates
@@ -1336,7 +1343,7 @@ class Model:
1336
1343
 
1337
1344
  def get_dependent(
1338
1345
  self,
1339
- concs: dict[str, float] | None = None,
1346
+ variables: dict[str, float] | None = None,
1340
1347
  time: float = 0.0,
1341
1348
  *,
1342
1349
  include_readouts: bool = False,
@@ -1346,18 +1353,18 @@ class Model:
1346
1353
  Examples:
1347
1354
  # Using initial conditions
1348
1355
  >>> model.get_args()
1349
- {"x1": 1.0, "x2": 2.0, "k1": 0.1, "time": 0.0}
1356
+ {"x1": 1.get_dependent, "x2": 2.0, "k1": 0.1, "time": 0.0}
1350
1357
 
1351
1358
  # With custom concentrations
1352
- >>> model.get_args({"x1": 1.0, "x2": 2.0})
1359
+ >>> model.get_dependent({"x1": 1.0, "x2": 2.0})
1353
1360
  {"x1": 1.0, "x2": 2.0, "k1": 0.1, "time": 0.0}
1354
1361
 
1355
1362
  # With custom concentrations and time
1356
- >>> model.get_args({"x1": 1.0, "x2": 2.0}, time=1.0)
1363
+ >>> model.get_dependent({"x1": 1.0, "x2": 2.0}, time=1.0)
1357
1364
  {"x1": 1.0, "x2": 2.0, "k1": 0.1, "time": 1.0}
1358
1365
 
1359
1366
  Args:
1360
- concs: A dictionary where keys are the names of the concentrations and values are their respective float values.
1367
+ variables: A dictionary where keys are the names of the concentrations and values are their respective float values.
1361
1368
  time: The time point at which the arguments are generated (default is 0.0).
1362
1369
  include_readouts: Whether to include readouts in the arguments (default is False).
1363
1370
 
@@ -1369,7 +1376,7 @@ class Model:
1369
1376
  cache = self._create_cache()
1370
1377
 
1371
1378
  args = self._get_dependent(
1372
- concs=self.get_initial_conditions() if concs is None else concs,
1379
+ variables=self.get_initial_conditions() if variables is None else variables,
1373
1380
  time=time,
1374
1381
  cache=cache,
1375
1382
  )
@@ -1382,14 +1389,14 @@ class Model:
1382
1389
 
1383
1390
  def get_dependent_time_course(
1384
1391
  self,
1385
- concs: pd.DataFrame,
1392
+ variables: pd.DataFrame,
1386
1393
  *,
1387
1394
  include_readouts: bool = False,
1388
1395
  ) -> pd.DataFrame:
1389
1396
  """Generate a DataFrame containing time course arguments for model evaluation.
1390
1397
 
1391
1398
  Examples:
1392
- >>> model.get_args_time_course(
1399
+ >>> model.get_dependent_time_course(
1393
1400
  ... pd.DataFrame({"x1": [1.0, 2.0], "x2": [2.0, 3.0]}
1394
1401
  ... )
1395
1402
  pd.DataFrame({
@@ -1400,7 +1407,7 @@ class Model:
1400
1407
  )
1401
1408
 
1402
1409
  Args:
1403
- concs: A DataFrame containing concentration data with time as the index.
1410
+ variables: A DataFrame containing concentration data with time as the index.
1404
1411
  include_readouts: If True, include readout variables in the resulting DataFrame.
1405
1412
 
1406
1413
  Returns:
@@ -1413,14 +1420,14 @@ class Model:
1413
1420
 
1414
1421
  pars_df = pd.DataFrame(
1415
1422
  np.full(
1416
- (len(concs), len(cache.all_parameter_values)),
1423
+ (len(variables), len(cache.all_parameter_values)),
1417
1424
  np.fromiter(cache.all_parameter_values.values(), dtype=float),
1418
1425
  ),
1419
- index=concs.index,
1426
+ index=variables.index,
1420
1427
  columns=list(cache.all_parameter_values),
1421
1428
  )
1422
1429
 
1423
- args = pd.concat((concs, pars_df), axis=1)
1430
+ args = pd.concat((variables, pars_df), axis=1)
1424
1431
  args["time"] = args.index
1425
1432
 
1426
1433
  containers = self._derived | self._reactions | self._surrogates
@@ -1438,7 +1445,7 @@ class Model:
1438
1445
 
1439
1446
  def get_args(
1440
1447
  self,
1441
- concs: dict[str, float] | None = None,
1448
+ variables: dict[str, float] | None = None,
1442
1449
  time: float = 0.0,
1443
1450
  *,
1444
1451
  include_derived: bool = True,
@@ -1460,7 +1467,7 @@ class Model:
1460
1467
  {"x1": 1.0, "x2": 2.0, "k1": 0.1, "time": 1.0}
1461
1468
 
1462
1469
  Args:
1463
- concs: A dictionary where keys are the names of the concentrations and values are their respective float values.
1470
+ variables: A dictionary where keys are the names of the concentrations and values are their respective float values.
1464
1471
  time: The time point at which the arguments are generated.
1465
1472
  include_derived: Whether to include derived variables in the arguments.
1466
1473
  include_readouts: Whether to include readouts in the arguments.
@@ -1476,13 +1483,13 @@ class Model:
1476
1483
  names.extend(self._readouts)
1477
1484
 
1478
1485
  args = self.get_dependent(
1479
- concs=concs, time=time, include_readouts=include_readouts
1486
+ variables=variables, time=time, include_readouts=include_readouts
1480
1487
  )
1481
1488
  return args.loc[names]
1482
1489
 
1483
1490
  def get_args_time_course(
1484
1491
  self,
1485
- concs: pd.DataFrame,
1492
+ variables: pd.DataFrame,
1486
1493
  *,
1487
1494
  include_derived: bool = True,
1488
1495
  include_readouts: bool = False,
@@ -1501,7 +1508,7 @@ class Model:
1501
1508
  )
1502
1509
 
1503
1510
  Args:
1504
- concs: A DataFrame containing concentration data with time as the index.
1511
+ variables: A DataFrame containing concentration data with time as the index.
1505
1512
  include_derived: Whether to include derived variables in the arguments.
1506
1513
  include_readouts: If True, include readout variables in the resulting DataFrame.
1507
1514
 
@@ -1515,7 +1522,7 @@ class Model:
1515
1522
  names.extend(self.get_derived_variable_names())
1516
1523
 
1517
1524
  args = self.get_dependent_time_course(
1518
- concs=concs, include_readouts=include_readouts
1525
+ variables=variables, include_readouts=include_readouts
1519
1526
  )
1520
1527
  return args.loc[:, names]
1521
1528
 
@@ -1547,7 +1554,7 @@ class Model:
1547
1554
 
1548
1555
  def get_fluxes(
1549
1556
  self,
1550
- concs: dict[str, float] | None = None,
1557
+ variables: dict[str, float] | None = None,
1551
1558
  time: float = 0.0,
1552
1559
  ) -> pd.Series:
1553
1560
  """Calculate the fluxes for the given concentrations and time.
@@ -1566,7 +1573,7 @@ class Model:
1566
1573
  pd.Series({"r1": 0.1, "r2": 0.2})
1567
1574
 
1568
1575
  Args:
1569
- concs: A dictionary where keys are species names and values are their concentrations.
1576
+ variables: A dictionary where keys are species names and values are their concentrations.
1570
1577
  time: The time at which to calculate the fluxes. Defaults to 0.0.
1571
1578
 
1572
1579
  Returns:
@@ -1578,13 +1585,13 @@ class Model:
1578
1585
  names.extend(surrogate.stoichiometries)
1579
1586
 
1580
1587
  args = self.get_dependent(
1581
- concs=concs,
1588
+ variables=variables,
1582
1589
  time=time,
1583
1590
  include_readouts=False,
1584
1591
  )
1585
1592
  return args.loc[names]
1586
1593
 
1587
- def get_fluxes_time_course(self, args: pd.DataFrame) -> pd.DataFrame:
1594
+ def get_fluxes_time_course(self, variables: pd.DataFrame) -> pd.DataFrame:
1588
1595
  """Generate a time course of fluxes for the given reactions and surrogates.
1589
1596
 
1590
1597
  Examples:
@@ -1596,9 +1603,9 @@ class Model:
1596
1603
  time course of fluxes.
1597
1604
 
1598
1605
  Args:
1599
- args (pd.DataFrame): A DataFrame containing the input arguments for the reactions
1600
- and surrogates. Each column corresponds to a specific input
1601
- variable, and each row represents a different time point.
1606
+ variables: A DataFrame containing the input arguments for the reactions
1607
+ and surrogates. Each column corresponds to a specific input
1608
+ variable, and each row represents a different time point.
1602
1609
 
1603
1610
  Returns:
1604
1611
  pd.DataFrame: A DataFrame containing the calculated fluxes for each reaction and
@@ -1610,17 +1617,17 @@ class Model:
1610
1617
  for surrogate in self._surrogates.values():
1611
1618
  names.extend(surrogate.stoichiometries)
1612
1619
 
1613
- args = self.get_dependent_time_course(
1614
- concs=args,
1620
+ variables = self.get_dependent_time_course(
1621
+ variables=variables,
1615
1622
  include_readouts=False,
1616
1623
  )
1617
- return args.loc[:, names]
1624
+ return variables.loc[:, names]
1618
1625
 
1619
1626
  ##########################################################################
1620
1627
  # Get rhs
1621
1628
  ##########################################################################
1622
1629
 
1623
- def __call__(self, /, time: float, concs: Array) -> Array:
1630
+ def __call__(self, /, time: float, variables: Array) -> Array:
1624
1631
  """Simulation version of get_right_hand_side.
1625
1632
 
1626
1633
  Examples:
@@ -1632,7 +1639,7 @@ class Model:
1632
1639
 
1633
1640
  Args:
1634
1641
  time: The current time point.
1635
- concs: Array of concentrations
1642
+ variables: Array of concentrations
1636
1643
 
1637
1644
 
1638
1645
  Returns:
@@ -1641,15 +1648,15 @@ class Model:
1641
1648
  """
1642
1649
  if (cache := self._cache) is None:
1643
1650
  cache = self._create_cache()
1644
- concsd: dict[str, float] = dict(
1651
+ vars_d: dict[str, float] = dict(
1645
1652
  zip(
1646
1653
  cache.var_names,
1647
- concs,
1654
+ variables,
1648
1655
  strict=True,
1649
1656
  )
1650
1657
  )
1651
1658
  dependent: dict[str, float] = self._get_dependent(
1652
- concs=concsd,
1659
+ variables=vars_d,
1653
1660
  time=time,
1654
1661
  cache=cache,
1655
1662
  )
@@ -1667,7 +1674,7 @@ class Model:
1667
1674
 
1668
1675
  def get_right_hand_side(
1669
1676
  self,
1670
- concs: dict[str, float] | None = None,
1677
+ variables: dict[str, float] | None = None,
1671
1678
  time: float = 0.0,
1672
1679
  ) -> pd.Series:
1673
1680
  """Calculate the right-hand side of the differential equations for the model.
@@ -1686,7 +1693,7 @@ class Model:
1686
1693
  pd.Series({"x1": 0.1, "x2": 0.2})
1687
1694
 
1688
1695
  Args:
1689
- concs: A dictionary mapping compound names to their concentrations.
1696
+ variables: A dictionary mapping compound names to their concentrations.
1690
1697
  time: The current time point. Defaults to 0.0.
1691
1698
 
1692
1699
  Returns:
@@ -1697,7 +1704,7 @@ class Model:
1697
1704
  cache = self._create_cache()
1698
1705
  var_names = self.get_variable_names()
1699
1706
  dependent = self._get_dependent(
1700
- concs=self.get_initial_conditions() if concs is None else concs,
1707
+ variables=self.get_initial_conditions() if variables is None else variables,
1701
1708
  time=time,
1702
1709
  cache=cache,
1703
1710
  )
@@ -33,17 +33,18 @@ class MLP(nn.Module):
33
33
  def __init__(
34
34
  self,
35
35
  n_inputs: int,
36
- layers: list[int],
37
- activation: Callable | None = nn.ReLU(),
36
+ neurons_per_layer: list[int],
37
+ activation: Callable | None = None,
38
38
  output_activation: Callable | None = None,
39
39
  ) -> None:
40
40
  """Initializes the MLP with the given number of inputs and list of (hidden) layers.
41
41
 
42
42
  Args:
43
- n_inputs (int): The number of input features.
44
- n_outputs list(int): A list containing the number of neurons in hidden and output layer.
45
- activation Callable | None (default nn.ReLU()): The activation function to be applied after each hidden layer
46
- activation Callable | None (default None): The activation function to be applied after the final (output) layer
43
+ n_inputs: The number of input features.
44
+ neurons_per_layer: Number of neurons per layer
45
+ n_outputs: A list containing the number of neurons in hidden and output layer.
46
+ activation: The activation function to be applied after each hidden layer (default nn.ReLU)
47
+ output_activation: The activation function to be applied after the final (output) layer
47
48
 
48
49
  For instance, MLP(10, layers = [50, 50, 10]) initializes a neural network with the following architecture:
49
50
  - Linear layer with `n_inputs` inputs and 50 outputs
@@ -57,8 +58,8 @@ class MLP(nn.Module):
57
58
 
58
59
  """
59
60
  super().__init__()
60
- self.layers = layers
61
- self.activation = activation
61
+ self.layers = neurons_per_layer
62
+ self.activation = nn.ReLU() if activation is None else activation
62
63
  self.output_activation = output_activation
63
64
 
64
65
  levels = []
modelbase2/npe.py CHANGED
@@ -174,7 +174,8 @@ def train_torch_ss_estimator(
174
174
  n_hidden = max(2 * len(features.columns) * len(targets.columns), 10)
175
175
  n_outputs = len(targets.columns)
176
176
  approximator = MLP(
177
- n_inputs=len(features.columns), layers=[n_hidden, n_hidden, n_outputs]
177
+ n_inputs=len(features.columns),
178
+ neurons_per_layer=[n_hidden, n_hidden, n_outputs],
178
179
  ).to(device)
179
180
 
180
181
  features_ = torch.Tensor(features.to_numpy(), device=device)
@@ -17,8 +17,7 @@ def get_km_and_kcat_from_brenda(
17
17
  You can obtain the database from https://www.brenda-enzymes.org/download.php
18
18
  """
19
19
  brenda = Brenda()
20
- if brenda_path is not None:
21
- brenda.read_database(brenda_path)
20
+ brenda.read_database(brenda_path)
22
21
 
23
22
  kms, kcats = brenda.get_kms_and_kcats(
24
23
  ec=ec,
@@ -1,16 +1,14 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import ast
4
- import inspect
5
4
  import re
6
- import textwrap
7
5
  from datetime import UTC, datetime
8
6
  from typing import TYPE_CHECKING, Any, cast
9
7
 
10
- import dill
11
8
  import libsbml
12
9
  import numpy as np
13
10
 
11
+ from modelbase2.experimental.source_tools import get_fn_ast
14
12
  from modelbase2.sbml._data import AtomicUnit, Compartment
15
13
  from modelbase2.types import Derived
16
14
 
@@ -322,17 +320,7 @@ def _tree_to_sbml(
322
320
 
323
321
 
324
322
  def _sbmlify_fn(fn: Callable, user_args: list[str]) -> libsbml.ASTNode:
325
- try:
326
- source = inspect.getsource(fn)
327
- except OSError: # could not get source code
328
- source = dill.source.getsource(fn)
329
-
330
- tree = ast.parse(textwrap.dedent(source))
331
- if not isinstance(fn_def := tree.body[0], ast.FunctionDef):
332
- msg = "Not a function"
333
- raise TypeError(msg)
334
-
335
- return _tree_to_sbml(fn_def, args=user_args)
323
+ return _tree_to_sbml(get_fn_ast(fn), args=user_args)
336
324
 
337
325
 
338
326
  ##########################################################################
modelbase2/scan.py CHANGED
@@ -35,7 +35,7 @@ import pandas as pd
35
35
 
36
36
  from modelbase2.parallel import Cache, parallelise
37
37
  from modelbase2.simulator import Simulator
38
- from modelbase2.types import ProtocolByPars, SteadyStates, TimeCourseByPars
38
+ from modelbase2.types import ProtocolByPars, SteadyStates, TimeCourseByPars, unwrap
39
39
 
40
40
  if TYPE_CHECKING:
41
41
  from collections.abc import Callable
@@ -325,10 +325,10 @@ def _steady_state_worker(
325
325
 
326
326
  """
327
327
  try:
328
- c, v = (
328
+ c, v = unwrap(
329
329
  Simulator(model, y0=y0)
330
330
  .simulate_to_steady_state(rel_norm=rel_norm)
331
- .get_full_concs_and_fluxes()
331
+ .get_result()
332
332
  )
333
333
  except ZeroDivisionError:
334
334
  c = None
@@ -353,10 +353,10 @@ def _time_course_worker(
353
353
 
354
354
  """
355
355
  try:
356
- c, v = (
356
+ c, v = unwrap(
357
357
  Simulator(model, y0=y0)
358
358
  .simulate_time_course(time_points=time_points)
359
- .get_full_concs_and_fluxes()
359
+ .get_result()
360
360
  )
361
361
  except ZeroDivisionError:
362
362
  c = None
@@ -382,13 +382,13 @@ def _protocol_worker(
382
382
  TimeCourse: Object containing protocol series concentrations and fluxes.
383
383
 
384
384
  """
385
- c, v = (
385
+ c, v = unwrap(
386
386
  Simulator(model, y0=y0)
387
387
  .simulate_over_protocol(
388
388
  protocol=protocol,
389
389
  time_points_per_step=time_points_per_step,
390
390
  )
391
- .get_full_concs_and_fluxes()
391
+ .get_result()
392
392
  )
393
393
  time_points = np.linspace(
394
394
  0,