guts-base 1.0.5__py3-none-any.whl → 1.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of guts-base might be problematic. Click here for more details.

guts_base/__init__.py CHANGED
@@ -4,7 +4,7 @@ from . import data
4
4
  from . import prob
5
5
  from . import plot
6
6
 
7
- __version__ = "1.0.5"
7
+ __version__ = "1.0.7"
8
8
 
9
9
  from .sim import (
10
10
  GutsBase,
@@ -251,6 +251,8 @@ def import_data_to_database(path, database, preprocessing: Optional[Callable] =
251
251
 
252
252
  experiment.to_database(database=database)
253
253
 
254
+ print("Import to database successful.")
255
+
254
256
 
255
257
  def create_database_and_import_data_main(datasets_path, database_path, preprocessing=None, preprocessing_out=None):
256
258
  print("\n")
@@ -225,7 +225,7 @@ def make_openguts_observation_table(
225
225
  experiment_start = get_unique_value(df["time_start_experiment"])
226
226
  experiment_end = get_unique_value(df["time_end_experiment"])
227
227
 
228
- time = pd.date_range(experiment_start, experiment_end, freq=observation_schedule)
228
+ times_nominal = pd.date_range(experiment_start, experiment_end, freq=observation_schedule)
229
229
  timecol_name = f"time [{observation_schedule.lower()}]"
230
230
 
231
231
 
@@ -234,13 +234,17 @@ def make_openguts_observation_table(
234
234
  # calculate survival time
235
235
  df[timecol_name] = df["time_death"] - df["time_start_experiment"]
236
236
 
237
- time_remainder = df[timecol_name] % pd.Timedelta(1, observation_schedule)
238
- if (time_remainder > pd.Timedelta(0)).any():
239
- raise ValueError(
240
- "Observations should be entered at the same time as the experiment start "+
241
- "df['time_death] - df['time_experiment_start'] should be a multiple of "+
242
- f"the time resolution of the observation schedule. Here: 1{observation_schedule}"
243
- )
237
+ # this seems to have been necessary, because reindexing removed times smaller than
238
+ # the observation_schedule interval. This is now resolved by concatenating true
239
+ # times and nominal times
240
+ # TODO: remove this commented block when there appear no more errors
241
+ # time_remainder = df[timecol_name] % pd.Timedelta(1, observation_schedule)
242
+ # if (time_remainder > pd.Timedelta(0)).any():
243
+ # raise ValueError(
244
+ # "Observations should be entered at the same time as the experiment start "+
245
+ # "df['time_death] - df['time_experiment_start'] should be a multiple of "+
246
+ # f"the time resolution of the observation schedule. Here: 1{observation_schedule}"
247
+ # )
244
248
 
245
249
  if observation == "censored":
246
250
  # sum IDs that were marked as censored at time t
@@ -258,8 +262,12 @@ def make_openguts_observation_table(
258
262
  # df to wide frame
259
263
  df_wide = long_to_wide(df_long.reset_index(), id_columns, timecol_name, observation)
260
264
 
265
+ # get a time vector that contains all nominal observation times and also actually
266
+ # occurred days
267
+ observation_times = np.unique(np.concatenate([df_wide.index, times_nominal-experiment_start]))
268
+
261
269
  # reindex wide dataframe on time
262
- df_wide = df_wide.reindex(index=time-experiment_start, method=None)
270
+ df_wide = df_wide.reindex(index=observation_times, method=None)
263
271
  df_wide.index = df_wide.index.set_names(timecol_name)
264
272
  df_wide = df_wide.fillna(0)
265
273
 
@@ -299,8 +307,10 @@ class TimeOfDeathIO:
299
307
  self.data = self.from_file()
300
308
 
301
309
  def main(file: str, sheet: str, out:str, intervention_columns: List[str],
310
+ extra_observation_columns: List[str] = [],
302
311
  observation_schedule="d", rect_interpolate=False):
303
312
  intervention_columns = clean_column_names(list(intervention_columns))
313
+ extra_observation_columns = clean_column_names(list(extra_observation_columns))
304
314
  processed_file = f"{out}/openguts_{os.path.basename(file)}"
305
315
 
306
316
  print("\n")
@@ -454,6 +464,17 @@ def main(file: str, sheet: str, out:str, intervention_columns: List[str],
454
464
  observation_schedule=observation_schedule,
455
465
  )
456
466
 
467
+ _extra_observations = []
468
+ for eob in extra_observation_columns:
469
+ ob_wide = make_openguts_observation_table(
470
+ data,
471
+ observation=eob,
472
+ observation_schedule=observation_schedule
473
+ )
474
+ _extra_observations.append(ob_wide)
475
+ excel_writer(ob_wide, file=processed_file, sheet=eob)
476
+
477
+
457
478
  deaths = lethality - censored
458
479
 
459
480
  # excel export
@@ -537,6 +558,7 @@ def main(file: str, sheet: str, out:str, intervention_columns: List[str],
537
558
  else:
538
559
  warnings.warn("No metadata found in sheets 'meta' or 'Info'.")
539
560
 
561
+ return processed_file
540
562
 
541
563
  @click.command()
542
564
  @click.option("--file", "-f", help="Path to the xlsx file")
@@ -544,12 +566,14 @@ def main(file: str, sheet: str, out:str, intervention_columns: List[str],
544
566
  @click.option("--out", "-o", help="Output directory", default="processed_data")
545
567
  @click.option("--observation_schedule", help="Schedule of the observations: d - daily, h - hourly", default="d")
546
568
  @click.option("--intervention_columns", "-c", multiple=True, type=str, help="Names of the columns that carry the exposure information")
547
- def time_of_death_to_openguts(file, sheet, out, observation_schedule, intervention_columns):
548
- main(
569
+ @click.option("--extra_observation_columns", "-e", multiple=True, type=str, default=[], help="Names of the columns that carry additional observations beside time-of-death and censoring")
570
+ def time_of_death_to_openguts(file, sheet, out, observation_schedule, intervention_columns, extra_observation_columns):
571
+ _ = main(
549
572
  file=file,
550
573
  sheet=sheet,
551
574
  out=out,
552
575
  intervention_columns=intervention_columns,
576
+ extra_observation_columns=extra_observation_columns,
553
577
  observation_schedule=observation_schedule
554
578
  )
555
579
 
guts_base/sim/base.py CHANGED
@@ -49,6 +49,10 @@ class GutsBase(SimulationBase):
49
49
  results_interpolation: Tuple[float,float,int] = (np.nan, np.nan, 100)
50
50
  _skip_data_processing: bool = False
51
51
  ecx_mode: Literal["mean", "draws"] = "mean"
52
+ exposure_scenarios = {
53
+ "acute_1day": {"start": 0.0, "end": 1.0},
54
+ "chronic": {"start": 0.0, "end": None},
55
+ }
52
56
 
53
57
  def initialize(self, input: Optional[Dict] = None):
54
58
  """Initiaization goes through a couple of steps:
@@ -101,7 +105,12 @@ class GutsBase(SimulationBase):
101
105
  self.unit_time = self.config.simulation.unit_time # type: ignore
102
106
 
103
107
  if hasattr(self.config.simulation, "skip_data_processing"):
104
- self._skip_data_processing = bool(self.config.simulation.skip_data_processing) # type: ignore
108
+ self._skip_data_processing = not (
109
+ self.config.simulation.skip_data_processing == "False" or
110
+ self.config.simulation.skip_data_processing == "false" or # type: ignore
111
+ self.config.simulation.skip_data_processing == "" or # type: ignore
112
+ self.config.simulation.skip_data_processing == 0 # type: ignore
113
+ )
105
114
 
106
115
  if hasattr(self.config.simulation, "results_interpolation"):
107
116
  results_interpolation_string = string_to_list(self.config.simulation.results_interpolation)
@@ -249,8 +258,9 @@ class GutsBase(SimulationBase):
249
258
  observations=self.observations
250
259
  )
251
260
 
252
- if "exposure" in self.observations:
253
- self.config.data_structure.exposure.observed=False
261
+ if "exposure" not in self.observations:
262
+ self.observations["exposure"] = self.observations[self.config.simulation.substance]
263
+ self.config.data_structure.exposure.observed=False
254
264
 
255
265
  def _convert_exposure_units(self):
256
266
  """
@@ -418,8 +428,6 @@ class GutsBase(SimulationBase):
418
428
 
419
429
  self.dispatch_constructor()
420
430
  _ = self._prob.posterior_predictions(self, self.inferer.idata) # type: ignore
421
- self.inferer.store_results(output=f"{self.output_path}/numpyro_posterior_interp.nc") # type: ignore
422
- self.logger.info("Recomputed posterior and storing in `numpyro_posterior_interp.nc`")
423
431
 
424
432
 
425
433
  def prior_predictive_checks(self, **plot_kwargs):
@@ -430,9 +438,12 @@ class GutsBase(SimulationBase):
430
438
  def posterior_predictive_checks(self, **plot_kwargs):
431
439
  super().posterior_predictive_checks(**plot_kwargs)
432
440
 
433
- self.recompute_posterior()
441
+ sim_copy = self.copy()
442
+ sim_copy.recompute_posterior()
434
443
  # TODO: Include posterior_predictive group once the survival predictions are correctly working
435
- self._plot.plot_posterior_predictions(self, data_vars=["survival"], groups=["posterior_model_fits"])
444
+ sim_copy._plot.plot_posterior_predictions(
445
+ sim_copy, data_vars=["survival"], groups=["posterior_model_fits"]
446
+ )
436
447
 
437
448
 
438
449
  def plot(self, results):
@@ -479,21 +490,26 @@ class GutsBase(SimulationBase):
479
490
  for coord in exposure_coordinates:
480
491
  concentrations = np.where(coord == exposure_coordinates, 1.0, 0.0)
481
492
 
482
- exposure_dict = {
483
- coord: ExposureDataDict(start=0, end=1, concentration=conc)
484
- for coord, conc in zip(exposure_coordinates, concentrations)
485
- }
486
-
487
- scenario = design_exposure_scenario(
488
- exposures=exposure_dict,
489
- t_max=time_max,
490
- dt=1/24,
491
- exposure_dimension=exposure_dimension
492
- )
493
+ for _name, _expo_scenario in self.exposure_scenarios.items():
494
+ exposure_dict = {
495
+ coord: ExposureDataDict(
496
+ start=_expo_scenario["start"],
497
+ end=_expo_scenario["end"],
498
+ concentration=conc
499
+ )
500
+ for coord, conc in zip(exposure_coordinates, concentrations)
501
+ }
502
+
503
+ scenario = design_exposure_scenario(
504
+ exposures=exposure_dict,
505
+ t_max=time_max,
506
+ dt=1/24,
507
+ exposure_dimension=exposure_dimension
508
+ )
493
509
 
494
- scenarios.update({
495
- f"1day_exposure_{coord}": scenario
496
- })
510
+ scenarios.update({
511
+ f"1day_exposure_{coord}": scenario
512
+ })
497
513
 
498
514
  return scenarios
499
515
 
@@ -802,27 +818,19 @@ class GutsBase(SimulationBase):
802
818
 
803
819
  self.dispatch_constructor()
804
820
 
805
- def export(self, directory: Optional[str] = None):
806
- self.config.simulation.skip_data_processing = False
807
- super().export(directory=directory)
808
-
809
- def copy(self):
810
- """Creates a copy of a SimulationBase object by deepcopying all loose references
811
- TODO: If this works out well integrate into pymob. I have the feeling there will
812
- still be some problems down the line.
813
- """
814
- with warnings.catch_warnings(action="ignore"):
815
- # create the tempdir in the output path, because on the remote cluster
816
- # the default temporary directory may not have enough space. Using the output
817
- # path here resolves any path issues.
818
- tmp_basedir = os.path.join(self.output_path, "tmp")
819
- os.makedirs(tmp_basedir, exist_ok=True)
820
- with tempfile.TemporaryDirectory(dir=tmp_basedir) as name:
821
- self.export(directory=name)
822
- print(f"Exported files ({name}):", os.listdir(name))
823
- sim_copy = type(self).from_directory(name)
824
-
825
- return sim_copy
821
+ def export(self, directory: Optional[str] = None, mode="export", skip_data_processing=True):
822
+ self.config.simulation.skip_data_processing = skip_data_processing
823
+ super().export(directory=directory, mode=mode)
824
+
825
+ def export_to_scenario(self, scenario, force=False):
826
+ """Exports a case study as a new scenario for running inference"""
827
+ self.config.case_study.scenario = scenario
828
+ self.config.case_study.data = None
829
+ self.config.case_study.output = None
830
+ self.config.case_study.scenario_path_override = None
831
+ self.config.simulation.skip_data_processing = True
832
+ self.save_observations(filename=f"observations_{scenario}.nc", force=force)
833
+ self.config.save(force=force)
826
834
 
827
835
  @staticmethod
828
836
  def _condition_posterior(
guts_base/sim/report.py CHANGED
@@ -1,12 +1,15 @@
1
+ from functools import partial
1
2
  import os
2
3
  import itertools as it
3
- from typing import List, Dict
4
+ from typing import List, Dict, Literal, Optional, Union
4
5
  import numpy as np
5
6
  import pandas as pd
6
7
  import xarray as xr
8
+ import arviz as az
7
9
 
8
10
  from pymob import SimulationBase
9
11
  from pymob.sim.report import Report, reporting
12
+ from pymob.inference.analysis import round_to_sigfig, format_parameter
10
13
 
11
14
  from guts_base.plot import plot_survival_multipanel, plot_exposure_multipanel
12
15
  from guts_base.sim.ecx import ECxEstimator
@@ -17,6 +20,12 @@ class GutsReport(Report):
17
20
  ecx_draws: int = 250
18
21
  ecx_force_draws: bool = False
19
22
  set_background_mortality_to_zero = True
23
+ table_parameter_stat_focus = "mean"
24
+ units = xr.Dataset({
25
+ "metric": ["unit"],
26
+ "k_d": ("metric", ["1/t"])
27
+ })
28
+
20
29
 
21
30
  def additional_reports(self, sim: "SimulationBase"):
22
31
  super().additional_reports(sim=sim)
@@ -113,6 +122,164 @@ class GutsReport(Report):
113
122
  return out
114
123
 
115
124
 
125
+ @reporting
126
+ def table_parameter_estimates(self, posterior, indices):
127
+
128
+ if self.rc.table_parameter_estimates_with_batch_dim_vars:
129
+ var_names = {
130
+ k: k for k, v in self.config.model_parameters.free.items()
131
+ }
132
+ else:
133
+ var_names = {
134
+ k: k for k, v in self.config.model_parameters.free.items()
135
+ if self.config.simulation.batch_dimension not in v.dims
136
+ }
137
+
138
+ var_names.update(self.rc.table_parameter_estimates_override_names)
139
+
140
+ if len(self.rc.table_parameter_estimates_exclude_vars) > 0:
141
+ self._write(f"Excluding parameters: {self.rc.table_parameter_estimates_exclude_vars} for meaningful visualization")
142
+
143
+ var_names = {
144
+ k: k for k, v in var_names.items()
145
+ if k not in self.rc.table_parameter_estimates_exclude_vars
146
+ }
147
+
148
+ tab_report = create_table(
149
+ posterior=posterior,
150
+ vars=var_names,
151
+ error_metric=self.rc.table_parameter_estimates_error_metric,
152
+ units=self.units,
153
+ significant_figures=self.rc.table_parameter_estimates_significant_figures,
154
+ nesting_dimension=indices.keys(),
155
+ parameters_as_rows=self.rc.table_parameter_estimates_parameters_as_rows,
156
+ )
157
+
158
+ # rewrite table in the desired output format
159
+ tab = create_table(
160
+ posterior=posterior,
161
+ vars=var_names,
162
+ error_metric=self.rc.table_parameter_estimates_error_metric,
163
+ units=self.units,
164
+ significant_figures=self.rc.table_parameter_estimates_significant_figures,
165
+ fmt=self.rc.table_parameter_estimates_format,
166
+ nesting_dimension=indices.keys(),
167
+ parameters_as_rows=self.rc.table_parameter_estimates_parameters_as_rows,
168
+ )
169
+
170
+ self._write_table(tab=tab, tab_report=tab_report, label_insert="Parameter estimates")
171
+
172
+
173
+ def create_table(
174
+ posterior,
175
+ error_metric: Literal["hdi","sd"] = "hdi",
176
+ vars: Dict = {},
177
+ nesting_dimension: Optional[Union[List,str]] = None,
178
+ units: xr.Dataset = xr.Dataset(),
179
+ fmt: Literal["csv", "tsv", "latex"] = "csv",
180
+ significant_figures: int = 3,
181
+ parameters_as_rows: bool = True,
182
+ ) -> pd.DataFrame:
183
+ """The function is not ready to deal with any nesting dimensionality
184
+ and currently expects the 2-D case
185
+ """
186
+ tab = az.summary(
187
+ posterior, var_names=list(vars.keys()),
188
+ fmt="xarray", kind="stats", stat_focus="mean",
189
+ hdi_prob=0.94
190
+ )
191
+
192
+ tab = tab.rename(vars)
193
+
194
+ _units = flatten_coords(
195
+ dataset=create_units(dataset=tab, defined_units=units),
196
+ keep_dims=["metric"]
197
+ )
198
+ tab = flatten_coords(dataset=tab, keep_dims=["metric"])
199
+
200
+ tab = tab.apply(np.vectorize(
201
+ partial(round_to_sigfig, sig_fig=significant_figures)
202
+ ))
203
+
204
+
205
+ if error_metric == "sd":
206
+ arrays = []
207
+ for _, data_var in tab.data_vars.items():
208
+ par_formatted = data_var.sel(metric=["mean", "sd"])\
209
+ .astype(str).str\
210
+ .join("metric", sep=" ± ")
211
+ arrays.append(par_formatted)
212
+
213
+
214
+ table = xr.combine_by_coords(arrays)
215
+ table = table.assign_coords(metric="mean ± std").expand_dims("metric")
216
+ table = table.to_dataframe().T
217
+
218
+ elif error_metric == "hdi":
219
+ stacked_tab = tab.sel(metric=["mean", "hdi_3%", "hdi_97%"])\
220
+ .assign_coords(metric=["mean", "hdi 3%", "hdi 97%"])
221
+ table = stacked_tab.to_dataframe().T
222
+
223
+ else:
224
+ raise NotImplementedError("Must use one of 'sd' or 'hdi'")
225
+
226
+
227
+ if fmt == "latex":
228
+ table.columns.names = [c.replace('_',' ') for c in table.columns.names]
229
+ table.index = [format_parameter(i) for i in list(table.index)]
230
+ table = table.rename(
231
+ columns={"hdi 3%": "hdi 3\\%", "hdi 97%": "hdi 97\\%"}
232
+ )
233
+ else:
234
+ pass
235
+
236
+ table["unit"] = _units.to_pandas().T
237
+
238
+
239
+ if parameters_as_rows:
240
+ return table
241
+ else:
242
+ return table.T
243
+
244
+ def flatten_coords(dataset: xr.Dataset, keep_dims):
245
+ """flattens extra coordinates beside the keep_dim dimension for all data variables
246
+ producing a array with harmonized dimensions
247
+ """
248
+ ds = dataset.copy()
249
+ for var_name, data_var in ds.data_vars.items():
250
+ extra_coords = [k for k in list(data_var.coords.keys()) if k not in keep_dims]
251
+ if len(extra_coords) == 0:
252
+ continue
253
+
254
+ data_var_ = data_var.stack(index=extra_coords)
255
+
256
+ # otherwise
257
+ for idx in data_var_["index"].values:
258
+ new_var_name = f"{var_name}[{','.join([str(e) for e in idx])}]"
259
+ # reset coordinates to move non-dim index coords from coordinates to the
260
+ # data variables and then select only the var_name from the data vars
261
+ new_data_var = data_var_.sel({"index": idx}).reset_coords()[var_name]
262
+ ds[new_var_name] = new_data_var
263
+
264
+ ds = ds.drop(var_name)
265
+
266
+ # drop any coordinates that should not be in the dataset at this stage
267
+ extra_coords = [k for k in list(ds.coords.keys()) if k not in keep_dims]
268
+ ds = ds.drop(extra_coords)
269
+
270
+ return ds
271
+
272
+ def create_units(dataset: xr.Dataset, defined_units: xr.Dataset):
273
+ units = dataset.sel(metric=["mean"]).astype(str)
274
+ units = units.assign_coords({"metric": ("metric", ["unit"])})
275
+ for k, u in units.data_vars.items():
276
+ if k in defined_units:
277
+ units = units.assign({k: defined_units[k].astype(units[k].dtype)})
278
+ else:
279
+ units[k].values = np.full_like(u.values, "")
280
+
281
+ return units
282
+
116
283
  class ParameterConverter:
117
284
  def __init__(
118
285
  self,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: guts_base
3
- Version: 1.0.5
3
+ Version: 1.0.7
4
4
  Summary: Basic GUTS model implementation in pymob
5
5
  Author-email: Florian Schunck <fluncki@protonmail.com>
6
6
  License: GNU GENERAL PUBLIC LICENSE
@@ -1,25 +1,25 @@
1
- guts_base/__init__.py,sha256=h51lqbwq425YF40jI1Cw6dT0CXtcO_WSrvxbmb1KA3U,227
1
+ guts_base/__init__.py,sha256=W9LH6glLwockPYaR5rX90_p39kfNP3de363E83g-5es,227
2
2
  guts_base/mod.py,sha256=AzOCg1A8FP5EtVfp-66HT7G7h_wnHkenieaxTc9qCyk,5796
3
3
  guts_base/plot.py,sha256=Sr_d0sXHNajPLPelcGl72yCOEEqB7NGNWhViKYAiTng,6692
4
4
  guts_base/prob.py,sha256=ITwo5dAGMHr5xTldilHMbKU6AFsWo4_ZwbfaXh97Gew,5443
5
5
  guts_base/data/__init__.py,sha256=JBgft1DTledwvB5hRZnyGiKWv-RXo1OIpb5kJXloOmo,826
6
6
  guts_base/data/expydb.py,sha256=Kcc6CeZMl3oEelk5UBN9VEfwgNF3CzTh13ooVkufAjE,8218
7
7
  guts_base/data/generator.py,sha256=rGOZU3B0Ho8V6KtfjcAmely8lnlqNFV8cRyGboayTRc,2910
8
- guts_base/data/openguts.py,sha256=WvhYl_AOdvNgzrcVS2f_PYbXNH_wSAz2uIBSR6BMSh0,11078
8
+ guts_base/data/openguts.py,sha256=7C2wknBQBVxOYVgBL9UzuC9PEoqI2xaVp2pOyZwaIqY,11123
9
9
  guts_base/data/preprocessing.py,sha256=qggYkx2x62ingU1BNhJFyL1eQdFQsDJR2lefVfVWW2U,1732
10
10
  guts_base/data/survival.py,sha256=U-Ehloo8vnD81VeIglXLEUHX9lt7SjtEs2YEB0D9FHE,5096
11
- guts_base/data/time_of_death.py,sha256=hwngUwfRP3u8WmD3dHyXrphuu5d8ZJTKyBovGRwAHNQ,21014
11
+ guts_base/data/time_of_death.py,sha256=dHG5jhK--WkemTxJtHVgfdo-FWlFHslBQ0PcYape9fI,22296
12
12
  guts_base/data/utils.py,sha256=u3gGDJK15MfRUP4iIxsS-I1oqxD2qH_ugsT7o_Eac18,236
13
13
  guts_base/sim/__init__.py,sha256=sbHmT1p2saN0MJ-iYnCDOHjkHtTcKgm7X-dHX5o0tYA,435
14
- guts_base/sim/base.py,sha256=mYHRDS6KKOrTzgsTAy5IKcVn6zIhqJH9EY9dFrsY0So,37739
14
+ guts_base/sim/base.py,sha256=PJhBFzbs7TB2ewjE2myhEQNvcDBG4DXDols4qamhoYI,38040
15
15
  guts_base/sim/constructors.py,sha256=Kz9FHIH3EHsSIKd9sQgHa3eveniFifFlk1Hf-QR69Pg,875
16
16
  guts_base/sim/ecx.py,sha256=PeX8UVF1HMMNHaIU-jL6dml4JGezhWwiGSqPFJbOFM4,20808
17
17
  guts_base/sim/mempy.py,sha256=IHd87UrmdXpC7y7q0IjYQJH075frjbp2a-dMVBeqZ0U,10164
18
- guts_base/sim/report.py,sha256=Omm7czcnl3K-eWva_080uQi5RWzUrDxi7gYPjkNO_QM,6787
18
+ guts_base/sim/report.py,sha256=o19MBhKcwty2auPjYWoz4QY91jjJFkA80UTzUuZo1oE,12720
19
19
  guts_base/sim/utils.py,sha256=Qj_FPH6kywVxOwgCerS7w5CyuYR9HKmvBWFpmxwDFgk,256
20
- guts_base-1.0.5.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
21
- guts_base-1.0.5.dist-info/METADATA,sha256=TKQepymlj9INzwGyhPh3jZ37Zcm7BpULGR9L7FvhNR8,45426
22
- guts_base-1.0.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
- guts_base-1.0.5.dist-info/entry_points.txt,sha256=icsHzG2jQ90ZS7XvLsI5Qj0-qGuPv2T0RBVN5daGCPU,183
24
- guts_base-1.0.5.dist-info/top_level.txt,sha256=PxhBgUd4r39W_VI4FyJjARwKbV5_glgCVnd6v_zAGdE,10
25
- guts_base-1.0.5.dist-info/RECORD,,
20
+ guts_base-1.0.7.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
21
+ guts_base-1.0.7.dist-info/METADATA,sha256=JlsmLRB8h8yzgCUIVNxSRQulFm2bM8cNCNytGziFKjg,45426
22
+ guts_base-1.0.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
+ guts_base-1.0.7.dist-info/entry_points.txt,sha256=icsHzG2jQ90ZS7XvLsI5Qj0-qGuPv2T0RBVN5daGCPU,183
24
+ guts_base-1.0.7.dist-info/top_level.txt,sha256=PxhBgUd4r39W_VI4FyJjARwKbV5_glgCVnd6v_zAGdE,10
25
+ guts_base-1.0.7.dist-info/RECORD,,