mxlpy 0.15.0__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mxlpy/integrators/int_assimulo.py +2 -1
- mxlpy/mc.py +84 -70
- mxlpy/mca.py +97 -98
- mxlpy/scan.py +40 -38
- mxlpy/types.py +1 -1
- {mxlpy-0.15.0.dist-info → mxlpy-0.16.0.dist-info}/METADATA +5 -3
- {mxlpy-0.15.0.dist-info → mxlpy-0.16.0.dist-info}/RECORD +9 -9
- {mxlpy-0.15.0.dist-info → mxlpy-0.16.0.dist-info}/WHEEL +0 -0
- {mxlpy-0.15.0.dist-info → mxlpy-0.16.0.dist-info}/licenses/LICENSE +0 -0
@@ -116,7 +116,8 @@ class Assimulo:
|
|
116
116
|
|
117
117
|
"""
|
118
118
|
try:
|
119
|
-
|
119
|
+
t, y = self.integrator.simulate(time_points[-1], 0, time_points) # type: ignore
|
120
|
+
return np.array(t, dtype=float), np.array(y, dtype=float)
|
120
121
|
except CVodeError:
|
121
122
|
return None, None
|
122
123
|
|
mxlpy/mc.py
CHANGED
@@ -31,7 +31,7 @@ from mxlpy.scan import (
|
|
31
31
|
_protocol_worker,
|
32
32
|
_steady_state_worker,
|
33
33
|
_time_course_worker,
|
34
|
-
|
34
|
+
_update_parameters_and_initial_conditions,
|
35
35
|
)
|
36
36
|
from mxlpy.types import (
|
37
37
|
IntegratorType,
|
@@ -74,7 +74,6 @@ class ParameterScanWorker(Protocol):
|
|
74
74
|
def __call__(
|
75
75
|
self,
|
76
76
|
model: Model,
|
77
|
-
y0: dict[str, float] | None,
|
78
77
|
*,
|
79
78
|
parameters: pd.DataFrame,
|
80
79
|
rel_norm: bool,
|
@@ -86,7 +85,6 @@ class ParameterScanWorker(Protocol):
|
|
86
85
|
|
87
86
|
def _parameter_scan_worker(
|
88
87
|
model: Model,
|
89
|
-
y0: dict[str, float] | None,
|
90
88
|
*,
|
91
89
|
parameters: pd.DataFrame,
|
92
90
|
rel_norm: bool,
|
@@ -117,8 +115,7 @@ def _parameter_scan_worker(
|
|
117
115
|
"""
|
118
116
|
return scan.steady_state(
|
119
117
|
model,
|
120
|
-
|
121
|
-
y0=y0,
|
118
|
+
to_scan=parameters,
|
122
119
|
parallel=False,
|
123
120
|
rel_norm=rel_norm,
|
124
121
|
integrator=integrator,
|
@@ -127,8 +124,8 @@ def _parameter_scan_worker(
|
|
127
124
|
|
128
125
|
def steady_state(
|
129
126
|
model: Model,
|
130
|
-
mc_parameters: pd.DataFrame,
|
131
127
|
*,
|
128
|
+
mc_to_scan: pd.DataFrame,
|
132
129
|
y0: dict[str, float] | None = None,
|
133
130
|
max_workers: int | None = None,
|
134
131
|
cache: Cache | None = None,
|
@@ -139,7 +136,7 @@ def steady_state(
|
|
139
136
|
"""Monte-carlo scan of steady states.
|
140
137
|
|
141
138
|
Examples:
|
142
|
-
>>> steady_state(model,
|
139
|
+
>>> steady_state(model, mc_to_scan)
|
143
140
|
p t x y
|
144
141
|
0 0.0 0.1 0.00
|
145
142
|
1.0 0.2 0.01
|
@@ -155,32 +152,35 @@ def steady_state(
|
|
155
152
|
SteadyStates: Object containing the steady state solutions for the given parameter
|
156
153
|
|
157
154
|
"""
|
155
|
+
if y0 is not None:
|
156
|
+
model.update_variables(y0)
|
157
|
+
|
158
158
|
res = parallelise(
|
159
159
|
partial(
|
160
|
-
|
160
|
+
_update_parameters_and_initial_conditions,
|
161
161
|
fn=partial(
|
162
162
|
worker,
|
163
|
-
y0=y0,
|
164
163
|
rel_norm=rel_norm,
|
165
164
|
integrator=integrator,
|
166
165
|
),
|
167
166
|
model=model,
|
168
167
|
),
|
169
|
-
inputs=list(
|
168
|
+
inputs=list(mc_to_scan.iterrows()),
|
170
169
|
max_workers=max_workers,
|
171
170
|
cache=cache,
|
172
171
|
)
|
173
172
|
return SteadyStates(
|
174
173
|
variables=pd.concat({k: v.variables for k, v in res.items()}, axis=1).T,
|
175
174
|
fluxes=pd.concat({k: v.fluxes for k, v in res.items()}, axis=1).T,
|
176
|
-
parameters=
|
175
|
+
parameters=mc_to_scan,
|
177
176
|
)
|
178
177
|
|
179
178
|
|
180
179
|
def time_course(
|
181
180
|
model: Model,
|
181
|
+
*,
|
182
182
|
time_points: Array,
|
183
|
-
|
183
|
+
mc_to_scan: pd.DataFrame,
|
184
184
|
y0: dict[str, float] | None = None,
|
185
185
|
max_workers: int | None = None,
|
186
186
|
cache: Cache | None = None,
|
@@ -190,7 +190,7 @@ def time_course(
|
|
190
190
|
"""MC time course.
|
191
191
|
|
192
192
|
Examples:
|
193
|
-
>>> time_course(model, time_points,
|
193
|
+
>>> time_course(model, time_points, mc_to_scan)
|
194
194
|
p t x y
|
195
195
|
0 0.0 0.1 0.00
|
196
196
|
1.0 0.2 0.01
|
@@ -203,27 +203,29 @@ def time_course(
|
|
203
203
|
3.0 0.4 0.03
|
204
204
|
Returns:
|
205
205
|
tuple[concentrations, fluxes] using pandas multiindex
|
206
|
-
Both dataframes are of shape (#time_points * #
|
206
|
+
Both dataframes are of shape (#time_points * #mc_to_scan, #variables)
|
207
207
|
|
208
208
|
"""
|
209
|
+
if y0 is not None:
|
210
|
+
model.update_variables(y0)
|
211
|
+
|
209
212
|
res = parallelise(
|
210
213
|
partial(
|
211
|
-
|
214
|
+
_update_parameters_and_initial_conditions,
|
212
215
|
fn=partial(
|
213
216
|
worker,
|
214
217
|
time_points=time_points,
|
215
|
-
y0=y0,
|
216
218
|
integrator=integrator,
|
217
219
|
),
|
218
220
|
model=model,
|
219
221
|
),
|
220
|
-
inputs=list(
|
222
|
+
inputs=list(mc_to_scan.iterrows()),
|
221
223
|
max_workers=max_workers,
|
222
224
|
cache=cache,
|
223
225
|
)
|
224
226
|
|
225
227
|
return TimeCourseByPars(
|
226
|
-
parameters=
|
228
|
+
parameters=mc_to_scan,
|
227
229
|
variables=pd.concat({k: v.variables.T for k, v in res.items()}, axis=1).T,
|
228
230
|
fluxes=pd.concat({k: v.fluxes.T for k, v in res.items()}, axis=1).T,
|
229
231
|
)
|
@@ -231,8 +233,9 @@ def time_course(
|
|
231
233
|
|
232
234
|
def time_course_over_protocol(
|
233
235
|
model: Model,
|
236
|
+
*,
|
234
237
|
protocol: pd.DataFrame,
|
235
|
-
|
238
|
+
mc_to_scan: pd.DataFrame,
|
236
239
|
y0: dict[str, float] | None = None,
|
237
240
|
time_points_per_step: int = 10,
|
238
241
|
max_workers: int | None = None,
|
@@ -243,7 +246,7 @@ def time_course_over_protocol(
|
|
243
246
|
"""MC time course.
|
244
247
|
|
245
248
|
Examples:
|
246
|
-
>>> time_course_over_protocol(model, protocol,
|
249
|
+
>>> time_course_over_protocol(model, protocol, mc_to_scan)
|
247
250
|
p t x y
|
248
251
|
0 0.0 0.1 0.00
|
249
252
|
1.0 0.2 0.01
|
@@ -257,22 +260,24 @@ def time_course_over_protocol(
|
|
257
260
|
|
258
261
|
Returns:
|
259
262
|
tuple[concentrations, fluxes] using pandas multiindex
|
260
|
-
Both dataframes are of shape (#time_points * #
|
263
|
+
Both dataframes are of shape (#time_points * #mc_to_scan, #variables)
|
261
264
|
|
262
265
|
"""
|
266
|
+
if y0 is not None:
|
267
|
+
model.update_variables(y0)
|
268
|
+
|
263
269
|
res = parallelise(
|
264
270
|
partial(
|
265
|
-
|
271
|
+
_update_parameters_and_initial_conditions,
|
266
272
|
fn=partial(
|
267
273
|
worker,
|
268
274
|
protocol=protocol,
|
269
|
-
y0=y0,
|
270
275
|
integrator=integrator,
|
271
276
|
time_points_per_step=time_points_per_step,
|
272
277
|
),
|
273
278
|
model=model,
|
274
279
|
),
|
275
|
-
inputs=list(
|
280
|
+
inputs=list(mc_to_scan.iterrows()),
|
276
281
|
max_workers=max_workers,
|
277
282
|
cache=cache,
|
278
283
|
)
|
@@ -281,16 +286,16 @@ def time_course_over_protocol(
|
|
281
286
|
return ProtocolByPars(
|
282
287
|
variables=pd.concat(concs, axis=1).T,
|
283
288
|
fluxes=pd.concat(fluxes, axis=1).T,
|
284
|
-
parameters=
|
289
|
+
parameters=mc_to_scan,
|
285
290
|
protocol=protocol,
|
286
291
|
)
|
287
292
|
|
288
293
|
|
289
294
|
def scan_steady_state(
|
290
295
|
model: Model,
|
291
|
-
parameters: pd.DataFrame,
|
292
|
-
mc_parameters: pd.DataFrame,
|
293
296
|
*,
|
297
|
+
to_scan: pd.DataFrame,
|
298
|
+
mc_to_scan: pd.DataFrame,
|
294
299
|
y0: dict[str, float] | None = None,
|
295
300
|
max_workers: int | None = None,
|
296
301
|
cache: Cache | None = None,
|
@@ -304,7 +309,7 @@ def scan_steady_state(
|
|
304
309
|
>>> scan_steady_state(
|
305
310
|
... model,
|
306
311
|
... parameters=pd.DataFrame({"k1": np.linspace(0, 1, 3)}),
|
307
|
-
...
|
312
|
+
... mc_to_scan=mc_to_scan,
|
308
313
|
... ).variables
|
309
314
|
x y
|
310
315
|
k1
|
@@ -319,8 +324,8 @@ def scan_steady_state(
|
|
319
324
|
|
320
325
|
Args:
|
321
326
|
model: The model to analyze
|
322
|
-
|
323
|
-
|
327
|
+
to_scan: DataFrame containing parameter and initial values to scan over
|
328
|
+
mc_to_scan: DataFrame containing Monte Carlo parameter sets
|
324
329
|
y0: Initial conditions for the solver
|
325
330
|
max_workers: Maximum number of workers for parallel processing
|
326
331
|
cache: Cache object for storing results
|
@@ -332,19 +337,21 @@ def scan_steady_state(
|
|
332
337
|
McSteadyStates: Object containing the steady state solutions for the given parameter
|
333
338
|
|
334
339
|
"""
|
340
|
+
if y0 is not None:
|
341
|
+
model.update_variables(y0)
|
342
|
+
|
335
343
|
res = parallelise(
|
336
344
|
partial(
|
337
|
-
|
345
|
+
_update_parameters_and_initial_conditions,
|
338
346
|
fn=partial(
|
339
347
|
worker,
|
340
|
-
parameters=
|
341
|
-
y0=y0,
|
348
|
+
parameters=to_scan,
|
342
349
|
rel_norm=rel_norm,
|
343
350
|
integrator=integrator,
|
344
351
|
),
|
345
352
|
model=model,
|
346
353
|
),
|
347
|
-
inputs=list(
|
354
|
+
inputs=list(mc_to_scan.iterrows()),
|
348
355
|
cache=cache,
|
349
356
|
max_workers=max_workers,
|
350
357
|
)
|
@@ -353,17 +360,22 @@ def scan_steady_state(
|
|
353
360
|
return McSteadyStates(
|
354
361
|
variables=pd.concat(concs, axis=1).T,
|
355
362
|
fluxes=pd.concat(fluxes, axis=1).T,
|
356
|
-
parameters=
|
357
|
-
|
363
|
+
parameters=to_scan,
|
364
|
+
mc_to_scan=mc_to_scan,
|
358
365
|
)
|
359
366
|
|
360
367
|
|
368
|
+
###############################################################################
|
369
|
+
# MCA
|
370
|
+
###############################################################################
|
371
|
+
|
372
|
+
|
361
373
|
def variable_elasticities(
|
362
374
|
model: Model,
|
363
|
-
variables: list[str],
|
364
|
-
concs: dict[str, float],
|
365
|
-
mc_parameters: pd.DataFrame,
|
366
375
|
*,
|
376
|
+
mc_to_scan: pd.DataFrame,
|
377
|
+
to_scan: list[str] | None = None,
|
378
|
+
variables: dict[str, float] | None = None,
|
367
379
|
time: float = 0,
|
368
380
|
cache: Cache | None = None,
|
369
381
|
max_workers: int | None = None,
|
@@ -377,7 +389,7 @@ def variable_elasticities(
|
|
377
389
|
... model,
|
378
390
|
... variables=["x1", "x2"],
|
379
391
|
... concs={"x1": 1, "x2": 2},
|
380
|
-
...
|
392
|
+
... mc_to_scan=mc_to_scan
|
381
393
|
... )
|
382
394
|
x1 x2
|
383
395
|
0 v1 0.0 0.0
|
@@ -389,9 +401,9 @@ def variable_elasticities(
|
|
389
401
|
|
390
402
|
Args:
|
391
403
|
model: The model to analyze
|
392
|
-
|
393
|
-
|
394
|
-
|
404
|
+
to_scan: List of variables for which to calculate elasticities
|
405
|
+
variables: Custom variable values. Defaults to initial conditions.
|
406
|
+
mc_to_scan: DataFrame containing Monte Carlo parameter sets
|
395
407
|
time: Time point for the analysis
|
396
408
|
cache: Cache object for storing results
|
397
409
|
max_workers: Maximum number of workers for parallel processing
|
@@ -404,18 +416,18 @@ def variable_elasticities(
|
|
404
416
|
"""
|
405
417
|
res = parallelise(
|
406
418
|
partial(
|
407
|
-
|
419
|
+
_update_parameters_and_initial_conditions,
|
408
420
|
fn=partial(
|
409
421
|
mca.variable_elasticities,
|
410
422
|
variables=variables,
|
411
|
-
|
423
|
+
to_scan=to_scan,
|
412
424
|
time=time,
|
413
425
|
displacement=displacement,
|
414
426
|
normalized=normalized,
|
415
427
|
),
|
416
428
|
model=model,
|
417
429
|
),
|
418
|
-
inputs=list(
|
430
|
+
inputs=list(mc_to_scan.iterrows()),
|
419
431
|
cache=cache,
|
420
432
|
max_workers=max_workers,
|
421
433
|
)
|
@@ -424,10 +436,10 @@ def variable_elasticities(
|
|
424
436
|
|
425
437
|
def parameter_elasticities(
|
426
438
|
model: Model,
|
427
|
-
parameters: list[str],
|
428
|
-
concs: dict[str, float],
|
429
|
-
mc_parameters: pd.DataFrame,
|
430
439
|
*,
|
440
|
+
mc_to_scan: pd.DataFrame,
|
441
|
+
to_scan: list[str],
|
442
|
+
variables: dict[str, float],
|
431
443
|
time: float = 0,
|
432
444
|
cache: Cache | None = None,
|
433
445
|
max_workers: int | None = None,
|
@@ -439,9 +451,9 @@ def parameter_elasticities(
|
|
439
451
|
Examples:
|
440
452
|
>>> parameter_elasticities(
|
441
453
|
... model,
|
442
|
-
...
|
454
|
+
... parameters=["p1", "p2"],
|
443
455
|
... concs={"x1": 1, "x2": 2},
|
444
|
-
...
|
456
|
+
... mc_to_scan=mc_to_scan
|
445
457
|
... )
|
446
458
|
p1 p2
|
447
459
|
0 v1 0.0 0.0
|
@@ -453,9 +465,9 @@ def parameter_elasticities(
|
|
453
465
|
|
454
466
|
Args:
|
455
467
|
model: The model to analyze
|
456
|
-
|
457
|
-
|
458
|
-
|
468
|
+
to_scan: List of parameters for which to calculate elasticities
|
469
|
+
variables: Custom variable values. Defaults to initial conditions.
|
470
|
+
mc_to_scan: DataFrame containing Monte Carlo parameter sets
|
459
471
|
time: Time point for the analysis
|
460
472
|
cache: Cache object for storing results
|
461
473
|
max_workers: Maximum number of workers for parallel processing
|
@@ -468,18 +480,18 @@ def parameter_elasticities(
|
|
468
480
|
"""
|
469
481
|
res = parallelise(
|
470
482
|
partial(
|
471
|
-
|
483
|
+
_update_parameters_and_initial_conditions,
|
472
484
|
fn=partial(
|
473
485
|
mca.parameter_elasticities,
|
474
|
-
|
475
|
-
|
486
|
+
to_scan=to_scan,
|
487
|
+
variables=variables,
|
476
488
|
time=time,
|
477
489
|
displacement=displacement,
|
478
490
|
normalized=normalized,
|
479
491
|
),
|
480
492
|
model=model,
|
481
493
|
),
|
482
|
-
inputs=list(
|
494
|
+
inputs=list(mc_to_scan.iterrows()),
|
483
495
|
cache=cache,
|
484
496
|
max_workers=max_workers,
|
485
497
|
)
|
@@ -488,10 +500,10 @@ def parameter_elasticities(
|
|
488
500
|
|
489
501
|
def response_coefficients(
|
490
502
|
model: Model,
|
491
|
-
parameters: list[str],
|
492
|
-
mc_parameters: pd.DataFrame,
|
493
503
|
*,
|
494
|
-
|
504
|
+
mc_to_scan: pd.DataFrame,
|
505
|
+
to_scan: list[str],
|
506
|
+
variables: dict[str, float] | None = None,
|
495
507
|
cache: Cache | None = None,
|
496
508
|
normalized: bool = True,
|
497
509
|
displacement: float = 1e-4,
|
@@ -506,7 +518,7 @@ def response_coefficients(
|
|
506
518
|
>>> response_coefficients(
|
507
519
|
... model,
|
508
520
|
... parameters=["vmax1", "vmax2"],
|
509
|
-
...
|
521
|
+
... mc_to_scan=mc_to_scan,
|
510
522
|
... ).variables
|
511
523
|
x1 x2
|
512
524
|
0 vmax_1 0.01 0.01
|
@@ -516,9 +528,9 @@ def response_coefficients(
|
|
516
528
|
|
517
529
|
Args:
|
518
530
|
model: The model to analyze
|
519
|
-
|
520
|
-
|
521
|
-
|
531
|
+
mc_to_scan: DataFrame containing Monte Carlo parameter sets
|
532
|
+
to_scan: List of parameters for which to calculate elasticities
|
533
|
+
variables: Custom variable values. Defaults to initial conditions.
|
522
534
|
cache: Cache object for storing results
|
523
535
|
normalized: Whether to use normalized elasticities
|
524
536
|
displacement: Displacement for finite difference calculations
|
@@ -531,13 +543,15 @@ def response_coefficients(
|
|
531
543
|
ResponseCoefficientsByPars: Object containing the response coefficients for the given parameters
|
532
544
|
|
533
545
|
"""
|
546
|
+
if variables is not None:
|
547
|
+
model.update_variables(variables)
|
548
|
+
|
534
549
|
res = parallelise(
|
535
550
|
fn=partial(
|
536
|
-
|
551
|
+
_update_parameters_and_initial_conditions,
|
537
552
|
fn=partial(
|
538
553
|
mca.response_coefficients,
|
539
|
-
|
540
|
-
y0=y0,
|
554
|
+
to_scan=to_scan,
|
541
555
|
normalized=normalized,
|
542
556
|
displacement=displacement,
|
543
557
|
rel_norm=rel_norm,
|
@@ -547,7 +561,7 @@ def response_coefficients(
|
|
547
561
|
),
|
548
562
|
model=model,
|
549
563
|
),
|
550
|
-
inputs=list(
|
564
|
+
inputs=list(mc_to_scan.iterrows()),
|
551
565
|
cache=cache,
|
552
566
|
max_workers=max_workers,
|
553
567
|
)
|
@@ -557,5 +571,5 @@ def response_coefficients(
|
|
557
571
|
pd.DataFrame, pd.concat({k: v.variables for k, v in res.items()})
|
558
572
|
),
|
559
573
|
fluxes=cast(pd.DataFrame, pd.concat({k: v.fluxes for k, v in res.items()})),
|
560
|
-
parameters=
|
574
|
+
parameters=mc_to_scan,
|
561
575
|
)
|
mxlpy/mca.py
CHANGED
@@ -38,6 +38,73 @@ if TYPE_CHECKING:
|
|
38
38
|
from mxlpy.types import IntegratorType
|
39
39
|
|
40
40
|
|
41
|
+
def _response_coefficient_worker(
|
42
|
+
parameter: str,
|
43
|
+
*,
|
44
|
+
model: Model,
|
45
|
+
y0: dict[str, float] | None,
|
46
|
+
normalized: bool,
|
47
|
+
rel_norm: bool,
|
48
|
+
displacement: float = 1e-4,
|
49
|
+
integrator: IntegratorType,
|
50
|
+
) -> tuple[pd.Series, pd.Series]:
|
51
|
+
"""Calculate response coefficients for a single parameter.
|
52
|
+
|
53
|
+
Internal helper function that computes concentration and flux response
|
54
|
+
coefficients using finite differences. The function:
|
55
|
+
1. Perturbs the parameter up and down by a small displacement
|
56
|
+
2. Calculates steady states for each perturbation
|
57
|
+
3. Computes response coefficients from the differences
|
58
|
+
4. Optionally normalizes the results
|
59
|
+
|
60
|
+
Args:
|
61
|
+
parameter: Name of the parameter to analyze
|
62
|
+
model: Metabolic model instance
|
63
|
+
y0: Initial conditions as a dictionary {species: value}
|
64
|
+
normalized: Whether to normalize the coefficients
|
65
|
+
rel_norm: Whether to use relative normalization
|
66
|
+
displacement: Relative perturbation size (default: 1e-4)
|
67
|
+
integrator: Integrator function to use for steady state calculation
|
68
|
+
|
69
|
+
Returns:
|
70
|
+
tuple[pd.Series, pd.Series]: Tuple containing:
|
71
|
+
- Series of concentration response coefficients
|
72
|
+
- Series of flux response coefficients
|
73
|
+
|
74
|
+
"""
|
75
|
+
old = model.parameters[parameter]
|
76
|
+
if y0 is not None:
|
77
|
+
model.update_variables(y0)
|
78
|
+
|
79
|
+
model.update_parameters({parameter: old * (1 + displacement)})
|
80
|
+
upper = _steady_state_worker(
|
81
|
+
model,
|
82
|
+
rel_norm=rel_norm,
|
83
|
+
integrator=integrator,
|
84
|
+
)
|
85
|
+
|
86
|
+
model.update_parameters({parameter: old * (1 - displacement)})
|
87
|
+
lower = _steady_state_worker(
|
88
|
+
model,
|
89
|
+
rel_norm=rel_norm,
|
90
|
+
integrator=integrator,
|
91
|
+
)
|
92
|
+
|
93
|
+
conc_resp = (upper.variables - lower.variables) / (2 * displacement * old)
|
94
|
+
flux_resp = (upper.fluxes - lower.fluxes) / (2 * displacement * old)
|
95
|
+
# Reset
|
96
|
+
model.update_parameters({parameter: old})
|
97
|
+
if normalized:
|
98
|
+
norm = _steady_state_worker(
|
99
|
+
model,
|
100
|
+
rel_norm=rel_norm,
|
101
|
+
integrator=integrator,
|
102
|
+
)
|
103
|
+
conc_resp *= old / norm.variables
|
104
|
+
flux_resp *= old / norm.fluxes
|
105
|
+
return conc_resp, flux_resp
|
106
|
+
|
107
|
+
|
41
108
|
###############################################################################
|
42
109
|
# Non-steady state
|
43
110
|
###############################################################################
|
@@ -46,8 +113,8 @@ if TYPE_CHECKING:
|
|
46
113
|
def variable_elasticities(
|
47
114
|
model: Model,
|
48
115
|
*,
|
49
|
-
|
50
|
-
variables:
|
116
|
+
to_scan: list[str] | None = None,
|
117
|
+
variables: dict[str, float] | None = None,
|
51
118
|
time: float = 0,
|
52
119
|
normalized: bool = True,
|
53
120
|
displacement: float = 1e-4,
|
@@ -67,8 +134,8 @@ def variable_elasticities(
|
|
67
134
|
|
68
135
|
Args:
|
69
136
|
model: Metabolic model instance
|
70
|
-
|
71
|
-
variables:
|
137
|
+
to_scan: List of variables to analyze. Uses all if None
|
138
|
+
variables: Custom variable values. Defaults to initial conditions.
|
72
139
|
time: Time point for evaluation
|
73
140
|
normalized: Whether to normalize coefficients
|
74
141
|
displacement: Relative perturbation size
|
@@ -77,23 +144,23 @@ def variable_elasticities(
|
|
77
144
|
DataFrame with elasticity coefficients (reactions x metabolites)
|
78
145
|
|
79
146
|
"""
|
80
|
-
|
81
|
-
|
147
|
+
variables = model.get_initial_conditions() if variables is None else variables
|
148
|
+
to_scan = model.get_variable_names() if to_scan is None else to_scan
|
82
149
|
elasticities = {}
|
83
150
|
|
84
|
-
for var in
|
85
|
-
old =
|
151
|
+
for var in to_scan:
|
152
|
+
old = variables[var]
|
86
153
|
|
87
154
|
upper = model.get_fluxes(
|
88
|
-
variables=
|
155
|
+
variables=variables | {var: old * (1 + displacement)}, time=time
|
89
156
|
)
|
90
157
|
lower = model.get_fluxes(
|
91
|
-
variables=
|
158
|
+
variables=variables | {var: old * (1 - displacement)}, time=time
|
92
159
|
)
|
93
160
|
|
94
161
|
elasticity_coef = (upper - lower) / (2 * displacement * old)
|
95
162
|
if normalized:
|
96
|
-
elasticity_coef *= old / model.get_fluxes(variables=
|
163
|
+
elasticity_coef *= old / model.get_fluxes(variables=variables, time=time)
|
97
164
|
elasticities[var] = elasticity_coef
|
98
165
|
|
99
166
|
return pd.DataFrame(data=elasticities)
|
@@ -101,10 +168,10 @@ def variable_elasticities(
|
|
101
168
|
|
102
169
|
def parameter_elasticities(
|
103
170
|
model: Model,
|
104
|
-
parameters: list[str] | None = None,
|
105
|
-
concs: dict[str, float] | None = None,
|
106
|
-
time: float = 0,
|
107
171
|
*,
|
172
|
+
to_scan: list[str] | None = None,
|
173
|
+
variables: dict[str, float] | None = None,
|
174
|
+
time: float = 0,
|
108
175
|
normalized: bool = True,
|
109
176
|
displacement: float = 1e-4,
|
110
177
|
) -> pd.DataFrame:
|
@@ -119,8 +186,8 @@ def parameter_elasticities(
|
|
119
186
|
|
120
187
|
Args:
|
121
188
|
model: Metabolic model instance
|
122
|
-
|
123
|
-
|
189
|
+
to_scan: List of parameters to analyze. Uses all if None
|
190
|
+
variables: Custom variable values. Defaults to initial conditions.
|
124
191
|
time: Time point for evaluation
|
125
192
|
normalized: Whether to normalize coefficients
|
126
193
|
displacement: Relative perturbation size
|
@@ -129,26 +196,26 @@ def parameter_elasticities(
|
|
129
196
|
DataFrame with parameter elasticities (reactions x parameters)
|
130
197
|
|
131
198
|
"""
|
132
|
-
|
133
|
-
|
199
|
+
variables = model.get_initial_conditions() if variables is None else variables
|
200
|
+
to_scan = model.get_parameter_names() if to_scan is None else to_scan
|
134
201
|
|
135
202
|
elasticities = {}
|
136
203
|
|
137
|
-
|
138
|
-
for par in
|
204
|
+
variables = model.get_initial_conditions() if variables is None else variables
|
205
|
+
for par in to_scan:
|
139
206
|
old = model.parameters[par]
|
140
207
|
|
141
208
|
model.update_parameters({par: old * (1 + displacement)})
|
142
|
-
upper = model.get_fluxes(variables=
|
209
|
+
upper = model.get_fluxes(variables=variables, time=time)
|
143
210
|
|
144
211
|
model.update_parameters({par: old * (1 - displacement)})
|
145
|
-
lower = model.get_fluxes(variables=
|
212
|
+
lower = model.get_fluxes(variables=variables, time=time)
|
146
213
|
|
147
214
|
# Reset
|
148
215
|
model.update_parameters({par: old})
|
149
216
|
elasticity_coef = (upper - lower) / (2 * displacement * old)
|
150
217
|
if normalized:
|
151
|
-
elasticity_coef *= old / model.get_fluxes(variables=
|
218
|
+
elasticity_coef *= old / model.get_fluxes(variables=variables, time=time)
|
152
219
|
elasticities[par] = elasticity_coef
|
153
220
|
|
154
221
|
return pd.DataFrame(data=elasticities)
|
@@ -159,79 +226,11 @@ def parameter_elasticities(
|
|
159
226
|
# ###############################################################################
|
160
227
|
|
161
228
|
|
162
|
-
def _response_coefficient_worker(
|
163
|
-
parameter: str,
|
164
|
-
*,
|
165
|
-
model: Model,
|
166
|
-
y0: dict[str, float] | None,
|
167
|
-
normalized: bool,
|
168
|
-
rel_norm: bool,
|
169
|
-
displacement: float = 1e-4,
|
170
|
-
integrator: IntegratorType,
|
171
|
-
) -> tuple[pd.Series, pd.Series]:
|
172
|
-
"""Calculate response coefficients for a single parameter.
|
173
|
-
|
174
|
-
Internal helper function that computes concentration and flux response
|
175
|
-
coefficients using finite differences. The function:
|
176
|
-
1. Perturbs the parameter up and down by a small displacement
|
177
|
-
2. Calculates steady states for each perturbation
|
178
|
-
3. Computes response coefficients from the differences
|
179
|
-
4. Optionally normalizes the results
|
180
|
-
|
181
|
-
Args:
|
182
|
-
parameter: Name of the parameter to analyze
|
183
|
-
model: Metabolic model instance
|
184
|
-
y0: Initial conditions as a dictionary {species: value}
|
185
|
-
normalized: Whether to normalize the coefficients
|
186
|
-
rel_norm: Whether to use relative normalization
|
187
|
-
displacement: Relative perturbation size (default: 1e-4)
|
188
|
-
integrator: Integrator function to use for steady state calculation
|
189
|
-
|
190
|
-
Returns:
|
191
|
-
tuple[pd.Series, pd.Series]: Tuple containing:
|
192
|
-
- Series of concentration response coefficients
|
193
|
-
- Series of flux response coefficients
|
194
|
-
|
195
|
-
"""
|
196
|
-
old = model.parameters[parameter]
|
197
|
-
|
198
|
-
model.update_parameters({parameter: old * (1 + displacement)})
|
199
|
-
upper = _steady_state_worker(
|
200
|
-
model,
|
201
|
-
y0=y0,
|
202
|
-
rel_norm=rel_norm,
|
203
|
-
integrator=integrator,
|
204
|
-
)
|
205
|
-
|
206
|
-
model.update_parameters({parameter: old * (1 - displacement)})
|
207
|
-
lower = _steady_state_worker(
|
208
|
-
model,
|
209
|
-
y0=y0,
|
210
|
-
rel_norm=rel_norm,
|
211
|
-
integrator=integrator,
|
212
|
-
)
|
213
|
-
|
214
|
-
conc_resp = (upper.variables - lower.variables) / (2 * displacement * old)
|
215
|
-
flux_resp = (upper.fluxes - lower.fluxes) / (2 * displacement * old)
|
216
|
-
# Reset
|
217
|
-
model.update_parameters({parameter: old})
|
218
|
-
if normalized:
|
219
|
-
norm = _steady_state_worker(
|
220
|
-
model,
|
221
|
-
y0=y0,
|
222
|
-
rel_norm=rel_norm,
|
223
|
-
integrator=integrator,
|
224
|
-
)
|
225
|
-
conc_resp *= old / norm.variables
|
226
|
-
flux_resp *= old / norm.fluxes
|
227
|
-
return conc_resp, flux_resp
|
228
|
-
|
229
|
-
|
230
229
|
def response_coefficients(
|
231
230
|
model: Model,
|
232
|
-
parameters: list[str] | None = None,
|
233
231
|
*,
|
234
|
-
|
232
|
+
to_scan: list[str] | None = None,
|
233
|
+
variables: dict[str, float] | None = None,
|
235
234
|
normalized: bool = True,
|
236
235
|
displacement: float = 1e-4,
|
237
236
|
disable_tqdm: bool = False,
|
@@ -250,8 +249,8 @@ def response_coefficients(
|
|
250
249
|
|
251
250
|
Args:
|
252
251
|
model: Metabolic model instance
|
253
|
-
|
254
|
-
|
252
|
+
to_scan: Parameters to analyze. Uses all if None
|
253
|
+
variables: Custom variable values. Defaults to initial conditions.
|
255
254
|
normalized: Whether to normalize coefficients
|
256
255
|
displacement: Relative perturbation size
|
257
256
|
disable_tqdm: Disable progress bar
|
@@ -266,19 +265,19 @@ def response_coefficients(
|
|
266
265
|
- Concentration response coefficients
|
267
266
|
|
268
267
|
"""
|
269
|
-
|
268
|
+
to_scan = model.get_parameter_names() if to_scan is None else to_scan
|
270
269
|
|
271
270
|
res = parallelise(
|
272
271
|
partial(
|
273
272
|
_response_coefficient_worker,
|
274
273
|
model=model,
|
275
|
-
y0=
|
274
|
+
y0=variables,
|
276
275
|
normalized=normalized,
|
277
276
|
displacement=displacement,
|
278
277
|
rel_norm=rel_norm,
|
279
278
|
integrator=integrator,
|
280
279
|
),
|
281
|
-
inputs=list(zip(
|
280
|
+
inputs=list(zip(to_scan, to_scan, strict=True)),
|
282
281
|
cache=None,
|
283
282
|
disable_tqdm=disable_tqdm,
|
284
283
|
parallel=parallel,
|
mxlpy/scan.py
CHANGED
@@ -51,7 +51,7 @@ if TYPE_CHECKING:
|
|
51
51
|
from mxlpy.types import Array
|
52
52
|
|
53
53
|
|
54
|
-
def
|
54
|
+
def _update_parameters_and_initial_conditions[T](
|
55
55
|
pars: pd.Series,
|
56
56
|
fn: Callable[[Model], T],
|
57
57
|
model: Model,
|
@@ -67,7 +67,9 @@ def _update_parameters_and[T](
|
|
67
67
|
Result of the function execution.
|
68
68
|
|
69
69
|
"""
|
70
|
-
|
70
|
+
pd = pars.to_dict()
|
71
|
+
model.update_variables({k: v for k, v in pd.items() if k in model._variables}) # noqa: SLF001
|
72
|
+
model.update_parameters({k: v for k, v in pd.items() if k in model._parameters}) # noqa: SLF001
|
71
73
|
return fn(model)
|
72
74
|
|
73
75
|
|
@@ -282,7 +284,6 @@ class SteadyStateWorker(Protocol):
|
|
282
284
|
def __call__(
|
283
285
|
self,
|
284
286
|
model: Model,
|
285
|
-
y0: dict[str, float] | None,
|
286
287
|
*,
|
287
288
|
rel_norm: bool,
|
288
289
|
integrator: IntegratorType,
|
@@ -297,7 +298,6 @@ class TimeCourseWorker(Protocol):
|
|
297
298
|
def __call__(
|
298
299
|
self,
|
299
300
|
model: Model,
|
300
|
-
y0: dict[str, float] | None,
|
301
301
|
time_points: Array,
|
302
302
|
*,
|
303
303
|
integrator: IntegratorType,
|
@@ -312,7 +312,6 @@ class ProtocolWorker(Protocol):
|
|
312
312
|
def __call__(
|
313
313
|
self,
|
314
314
|
model: Model,
|
315
|
-
y0: dict[str, float] | None,
|
316
315
|
protocol: pd.DataFrame,
|
317
316
|
*,
|
318
317
|
integrator: IntegratorType,
|
@@ -324,7 +323,6 @@ class ProtocolWorker(Protocol):
|
|
324
323
|
|
325
324
|
def _steady_state_worker(
|
326
325
|
model: Model,
|
327
|
-
y0: dict[str, float] | None,
|
328
326
|
*,
|
329
327
|
rel_norm: bool,
|
330
328
|
integrator: IntegratorType,
|
@@ -343,7 +341,7 @@ def _steady_state_worker(
|
|
343
341
|
"""
|
344
342
|
try:
|
345
343
|
res = (
|
346
|
-
Simulator(model,
|
344
|
+
Simulator(model, integrator=integrator)
|
347
345
|
.simulate_to_steady_state(rel_norm=rel_norm)
|
348
346
|
.get_result()
|
349
347
|
)
|
@@ -354,7 +352,6 @@ def _steady_state_worker(
|
|
354
352
|
|
355
353
|
def _time_course_worker(
|
356
354
|
model: Model,
|
357
|
-
y0: dict[str, float] | None,
|
358
355
|
time_points: Array,
|
359
356
|
integrator: IntegratorType,
|
360
357
|
) -> TimeCourse:
|
@@ -372,7 +369,7 @@ def _time_course_worker(
|
|
372
369
|
"""
|
373
370
|
try:
|
374
371
|
res = (
|
375
|
-
Simulator(model,
|
372
|
+
Simulator(model, integrator=integrator)
|
376
373
|
.simulate_time_course(time_points=time_points)
|
377
374
|
.get_result()
|
378
375
|
)
|
@@ -387,7 +384,6 @@ def _time_course_worker(
|
|
387
384
|
|
388
385
|
def _protocol_worker(
|
389
386
|
model: Model,
|
390
|
-
y0: dict[str, float] | None,
|
391
387
|
protocol: pd.DataFrame,
|
392
388
|
*,
|
393
389
|
integrator: IntegratorType = DefaultIntegrator,
|
@@ -408,7 +404,7 @@ def _protocol_worker(
|
|
408
404
|
"""
|
409
405
|
try:
|
410
406
|
res = (
|
411
|
-
Simulator(model,
|
407
|
+
Simulator(model, integrator=integrator)
|
412
408
|
.simulate_over_protocol(
|
413
409
|
protocol=protocol,
|
414
410
|
time_points_per_step=time_points_per_step,
|
@@ -432,20 +428,20 @@ def _protocol_worker(
|
|
432
428
|
|
433
429
|
def steady_state(
|
434
430
|
model: Model,
|
435
|
-
parameters: pd.DataFrame,
|
436
|
-
y0: dict[str, float] | None = None,
|
437
431
|
*,
|
432
|
+
to_scan: pd.DataFrame,
|
433
|
+
y0: dict[str, float] | None = None,
|
438
434
|
parallel: bool = True,
|
439
435
|
rel_norm: bool = False,
|
440
436
|
cache: Cache | None = None,
|
441
437
|
worker: SteadyStateWorker = _steady_state_worker,
|
442
438
|
integrator: IntegratorType = DefaultIntegrator,
|
443
439
|
) -> SteadyStates:
|
444
|
-
"""Get steady-state results over supplied
|
440
|
+
"""Get steady-state results over supplied values.
|
445
441
|
|
446
442
|
Args:
|
447
443
|
model: Model instance to simulate.
|
448
|
-
|
444
|
+
to_scan: DataFrame containing parameter or initial values to scan.
|
449
445
|
y0: Initial conditions as a dictionary {variable: value}.
|
450
446
|
parallel: Whether to execute in parallel (default: True).
|
451
447
|
rel_norm: Whether to use relative normalization (default: False).
|
@@ -478,39 +474,41 @@ def steady_state(
|
|
478
474
|
| (2, 4) | 0.5 | 2 |
|
479
475
|
|
480
476
|
"""
|
477
|
+
if y0 is not None:
|
478
|
+
model.update_variables(y0)
|
479
|
+
|
481
480
|
res = parallelise(
|
482
481
|
partial(
|
483
|
-
|
482
|
+
_update_parameters_and_initial_conditions,
|
484
483
|
fn=partial(
|
485
484
|
worker,
|
486
|
-
y0=y0,
|
487
485
|
rel_norm=rel_norm,
|
488
486
|
integrator=integrator,
|
489
487
|
),
|
490
488
|
model=model,
|
491
489
|
),
|
492
|
-
inputs=list(
|
490
|
+
inputs=list(to_scan.iterrows()),
|
493
491
|
cache=cache,
|
494
492
|
parallel=parallel,
|
495
493
|
)
|
496
494
|
concs = pd.DataFrame({k: v.variables.T for k, v in res.items()}).T
|
497
495
|
fluxes = pd.DataFrame({k: v.fluxes.T for k, v in res.items()}).T
|
498
496
|
idx = (
|
499
|
-
pd.Index(
|
500
|
-
if
|
501
|
-
else pd.MultiIndex.from_frame(
|
497
|
+
pd.Index(to_scan.iloc[:, 0])
|
498
|
+
if to_scan.shape[1] == 1
|
499
|
+
else pd.MultiIndex.from_frame(to_scan)
|
502
500
|
)
|
503
501
|
concs.index = idx
|
504
502
|
fluxes.index = idx
|
505
|
-
return SteadyStates(variables=concs, fluxes=fluxes, parameters=
|
503
|
+
return SteadyStates(variables=concs, fluxes=fluxes, parameters=to_scan)
|
506
504
|
|
507
505
|
|
508
506
|
def time_course(
|
509
507
|
model: Model,
|
510
|
-
|
508
|
+
*,
|
509
|
+
to_scan: pd.DataFrame,
|
511
510
|
time_points: Array,
|
512
511
|
y0: dict[str, float] | None = None,
|
513
|
-
*,
|
514
512
|
parallel: bool = True,
|
515
513
|
cache: Cache | None = None,
|
516
514
|
worker: TimeCourseWorker = _time_course_worker,
|
@@ -521,7 +519,7 @@ def time_course(
|
|
521
519
|
Examples:
|
522
520
|
>>> time_course(
|
523
521
|
>>> model,
|
524
|
-
>>>
|
522
|
+
>>> to_scan=pd.DataFrame({"k1": [1, 1.5, 2]}),
|
525
523
|
>>> time_points=np.linspace(0, 1, 3)
|
526
524
|
>>> ).variables
|
527
525
|
|
@@ -539,7 +537,7 @@ def time_course(
|
|
539
537
|
|
540
538
|
>>> time_course(
|
541
539
|
>>> model,
|
542
|
-
>>>
|
540
|
+
>>> to_scan=cartesian_product({"k1": [1, 2], "k2": [3, 4]}),
|
543
541
|
>>> time_points=[0.0, 0.5, 1.0],
|
544
542
|
>>> ).variables
|
545
543
|
|
@@ -553,7 +551,7 @@ def time_course(
|
|
553
551
|
|
554
552
|
Args:
|
555
553
|
model: Model instance to simulate.
|
556
|
-
|
554
|
+
to_scan: DataFrame containing parameter or initial values to scan.
|
557
555
|
time_points: Array of time points for the simulation.
|
558
556
|
y0: Initial conditions as a dictionary {variable: value}.
|
559
557
|
cache: Optional cache to store and retrieve results.
|
@@ -566,25 +564,27 @@ def time_course(
|
|
566
564
|
|
567
565
|
|
568
566
|
"""
|
567
|
+
if y0 is not None:
|
568
|
+
model.update_variables(y0)
|
569
|
+
|
569
570
|
res = parallelise(
|
570
571
|
partial(
|
571
|
-
|
572
|
+
_update_parameters_and_initial_conditions,
|
572
573
|
fn=partial(
|
573
574
|
worker,
|
574
575
|
time_points=time_points,
|
575
|
-
y0=y0,
|
576
576
|
integrator=integrator,
|
577
577
|
),
|
578
578
|
model=model,
|
579
579
|
),
|
580
|
-
inputs=list(
|
580
|
+
inputs=list(to_scan.iterrows()),
|
581
581
|
cache=cache,
|
582
582
|
parallel=parallel,
|
583
583
|
)
|
584
584
|
concs = cast(dict, {k: v.variables for k, v in res.items()})
|
585
585
|
fluxes = cast(dict, {k: v.fluxes for k, v in res.items()})
|
586
586
|
return TimeCourseByPars(
|
587
|
-
parameters=
|
587
|
+
parameters=to_scan,
|
588
588
|
variables=pd.concat(concs, names=["n", "time"]),
|
589
589
|
fluxes=pd.concat(fluxes, names=["n", "time"]),
|
590
590
|
)
|
@@ -592,11 +592,11 @@ def time_course(
|
|
592
592
|
|
593
593
|
def time_course_over_protocol(
|
594
594
|
model: Model,
|
595
|
-
|
595
|
+
*,
|
596
|
+
to_scan: pd.DataFrame,
|
596
597
|
protocol: pd.DataFrame,
|
597
598
|
time_points_per_step: int = 10,
|
598
599
|
y0: dict[str, float] | None = None,
|
599
|
-
*,
|
600
600
|
parallel: bool = True,
|
601
601
|
cache: Cache | None = None,
|
602
602
|
worker: ProtocolWorker = _protocol_worker,
|
@@ -618,7 +618,7 @@ def time_course_over_protocol(
|
|
618
618
|
|
619
619
|
Args:
|
620
620
|
model: Model instance to simulate.
|
621
|
-
|
621
|
+
to_scan: DataFrame containing parameter or initial values to scan.
|
622
622
|
protocol: Protocol to follow for the simulation.
|
623
623
|
time_points_per_step: Number of time points per protocol step (default: 10).
|
624
624
|
y0: Initial conditions as a dictionary {variable: value}.
|
@@ -631,26 +631,28 @@ def time_course_over_protocol(
|
|
631
631
|
TimeCourseByPars: Protocol series results for each parameter set.
|
632
632
|
|
633
633
|
"""
|
634
|
+
if y0 is not None:
|
635
|
+
model.update_variables(y0)
|
636
|
+
|
634
637
|
res = parallelise(
|
635
638
|
partial(
|
636
|
-
|
639
|
+
_update_parameters_and_initial_conditions,
|
637
640
|
fn=partial(
|
638
641
|
worker,
|
639
642
|
protocol=protocol,
|
640
|
-
y0=y0,
|
641
643
|
time_points_per_step=time_points_per_step,
|
642
644
|
integrator=integrator,
|
643
645
|
),
|
644
646
|
model=model,
|
645
647
|
),
|
646
|
-
inputs=list(
|
648
|
+
inputs=list(to_scan.iterrows()),
|
647
649
|
cache=cache,
|
648
650
|
parallel=parallel,
|
649
651
|
)
|
650
652
|
concs = cast(dict, {k: v.variables for k, v in res.items()})
|
651
653
|
fluxes = cast(dict, {k: v.fluxes for k, v in res.items()})
|
652
654
|
return ProtocolByPars(
|
653
|
-
parameters=
|
655
|
+
parameters=to_scan,
|
654
656
|
protocol=protocol,
|
655
657
|
variables=pd.concat(concs, names=["n", "time"]),
|
656
658
|
fluxes=pd.concat(fluxes, names=["n", "time"]),
|
mxlpy/types.py
CHANGED
@@ -354,7 +354,7 @@ class McSteadyStates:
|
|
354
354
|
variables: pd.DataFrame
|
355
355
|
fluxes: pd.DataFrame
|
356
356
|
parameters: pd.DataFrame
|
357
|
-
|
357
|
+
mc_to_scan: pd.DataFrame
|
358
358
|
|
359
359
|
def __iter__(self) -> Iterator[pd.DataFrame]:
|
360
360
|
"""Iterate over the concentration and flux steady states."""
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: mxlpy
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.16.0
|
4
4
|
Summary: A package to build metabolic models
|
5
5
|
Author-email: Marvin van Aalst <marvin.vanaalst@gmail.com>
|
6
6
|
Maintainer-email: Marvin van Aalst <marvin.vanaalst@gmail.com>
|
@@ -60,7 +60,9 @@ Provides-Extra: torch
|
|
60
60
|
Requires-Dist: torch>=2.5.1; extra == 'torch'
|
61
61
|
Description-Content-Type: text/markdown
|
62
62
|
|
63
|
-
<
|
63
|
+
<p align="center">
|
64
|
+
<img src="docs/assets/logo-diagram.png" width="600px" alt='mxlpy-logo'>
|
65
|
+
</p>
|
64
66
|
|
65
67
|
# mxlpy
|
66
68
|
|
@@ -70,7 +72,7 @@ Description-Content-Type: text/markdown
|
|
70
72
|

|
71
73
|
[](https://github.com/astral-sh/ruff)
|
72
74
|
[](https://github.com/PyCQA/bandit)
|
73
|
-
[](https://pepy.tech/
|
75
|
+
[](https://pepy.tech/projects/mxlpy)
|
74
76
|
|
75
77
|
[docs-badge]: https://img.shields.io/badge/docs-main-green.svg?style=flat-square
|
76
78
|
[docs]: https://computational-biology-aachen.github.io/mxlpy/
|
@@ -5,8 +5,8 @@ mxlpy/fns.py,sha256=ct_RFj9koW8vXHyr27GnbZUHUS_zfs4rDysybuFiOaU,4599
|
|
5
5
|
mxlpy/identify.py,sha256=af52SCG4nlY9sSw22goaIheuvXR09QYK4ksCT24QHWI,1946
|
6
6
|
mxlpy/label_map.py,sha256=urv-QTb0MUEKjwWvKtJSB8H2kvhLn1EKfRIH7awQQ8Y,17769
|
7
7
|
mxlpy/linear_label_map.py,sha256=DqzN_akacPccZwzYAR3ANIdzAU_GU6Xe6gWV9DHAAWU,10282
|
8
|
-
mxlpy/mc.py,sha256=
|
9
|
-
mxlpy/mca.py,sha256=
|
8
|
+
mxlpy/mc.py,sha256=oYd8a3ycyZLyh-ZxTYUjDRNfsCcwSQaLWssxv0yC5Cc,17399
|
9
|
+
mxlpy/mca.py,sha256=1_qBX9lHI6svXSebtwvMldAMwPlLqMylAPmxMbMQdWw,9359
|
10
10
|
mxlpy/model.py,sha256=qzol8nDSbM3HdESh50c4UFjn6Pw5JwcvhQ5AyKnbyvc,57576
|
11
11
|
mxlpy/npe.py,sha256=oiRLA43-qf-AcS2KpQfJIOt7-Ev9Aj5sF6TMq9bJn84,8747
|
12
12
|
mxlpy/parallel.py,sha256=kX4Td5YoovDwZp6kX_3cfO6QtHSS9ieJ0bMZiKs3Xv8,5002
|
@@ -15,13 +15,13 @@ mxlpy/paths.py,sha256=TK2wO4N9lG-UV1JGfeB64q48JVDbwqIUj63rl55MKuQ,1022
|
|
15
15
|
mxlpy/plot.py,sha256=4uu-6d8LH-GWX-sG_TlSpkSsnikv1DLTtnjJzA7nuRA,24670
|
16
16
|
mxlpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
17
17
|
mxlpy/report.py,sha256=h7dhcBzPFydLPxdsEXokzDf7Ce4PirXMsvLqlDZLSWM,7181
|
18
|
-
mxlpy/scan.py,sha256
|
18
|
+
mxlpy/scan.py,sha256=FBPpjv66v4IWZ5OwG_EWUdrucLWR9gq_XEsLFC-otaw,18969
|
19
19
|
mxlpy/simulator.py,sha256=9Ne4P5Jrwgx4oAlljPvCqSCCy98_5Lv1B87y1AkbI4c,21041
|
20
|
-
mxlpy/types.py,sha256=
|
20
|
+
mxlpy/types.py,sha256=GbdyzEDTN8QfUH6-XXdNgf_TzqIXaYvcZGxaXc5kVio,14509
|
21
21
|
mxlpy/experimental/__init__.py,sha256=kZTE-92OErpHzNRqmgSQYH4CGXrogGJ5EL35XGZQ81M,206
|
22
22
|
mxlpy/experimental/diff.py,sha256=4bztagJzFMsQJM7dlun_kv-WrWssM8CIw7gcL63hFf8,8952
|
23
23
|
mxlpy/integrators/__init__.py,sha256=kqmV6a0TRyLGR_XqbyAI652AfptYnXAUpqbSFg0CpP8,450
|
24
|
-
mxlpy/integrators/int_assimulo.py,sha256=
|
24
|
+
mxlpy/integrators/int_assimulo.py,sha256=TCBWQd558ZeRdBba1jCNsFyLBOssKvm8dXK36Aqg4_k,4817
|
25
25
|
mxlpy/integrators/int_scipy.py,sha256=dFHlYTeb2zX97f3VuNdMJdI7WEYshF4JAIgprKKk2z4,4581
|
26
26
|
mxlpy/meta/__init__.py,sha256=Jyy4063fZy6iT4LSwjPyEAVr4N_3xxcLc8wDBoDPyKc,278
|
27
27
|
mxlpy/meta/codegen_latex.py,sha256=R0OJqzE7PnOCWLk52C3XWuRb-zI2eYTvV2oJZJvPsOE,13414
|
@@ -44,7 +44,7 @@ mxlpy/surrogates/_torch.py,sha256=E_1eDUlPSVFwROkdMDCqYwwHE-61pjNMJWotnhjzge0,58
|
|
44
44
|
mxlpy/symbolic/__init__.py,sha256=3hQjCMw8-6iOxeUdfnCg8449fF_BRF2u6lCM1GPpkRY,222
|
45
45
|
mxlpy/symbolic/strikepy.py,sha256=r6nRtckV1nxKq3i1bYYWZOkzwZ5XeKQuZM5ck44vUo0,20010
|
46
46
|
mxlpy/symbolic/symbolic_model.py,sha256=YL9noEeP3_0DoKXwMPELtfmPuP6mgNcLIJgDRCkyB7A,2434
|
47
|
-
mxlpy-0.
|
48
|
-
mxlpy-0.
|
49
|
-
mxlpy-0.
|
50
|
-
mxlpy-0.
|
47
|
+
mxlpy-0.16.0.dist-info/METADATA,sha256=ySMK4udu6wgaUpG7Wn0sa-XYUkaVz4u19C5PjMEM5p0,4551
|
48
|
+
mxlpy-0.16.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
49
|
+
mxlpy-0.16.0.dist-info/licenses/LICENSE,sha256=bEzjyjy1stQhfRDVaVHa3xV1x-V8emwdlbMvYO8Zo84,35073
|
50
|
+
mxlpy-0.16.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|