pastastore 1.3.0__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pastastore/util.py CHANGED
@@ -1,4 +1,7 @@
1
+ """Useful utilities for pastastore."""
2
+
1
3
  import os
4
+ import warnings
2
5
  from typing import Dict, List, Optional, Union
3
6
 
4
7
  import numpy as np
@@ -16,9 +19,12 @@ def _custom_warning(message, category=UserWarning, filename="", lineno=-1, *args
16
19
 
17
20
 
18
21
  class ItemInLibraryException(Exception):
22
+ """Exception when item is already in library."""
23
+
19
24
  pass
20
25
 
21
26
 
27
+ # TODO: remove in future version
22
28
  def delete_pystore_connector(
23
29
  conn=None,
24
30
  path: Optional[str] = None,
@@ -39,6 +45,12 @@ def delete_pystore_connector(
39
45
  list of library names to delete, by default None which deletes
40
46
  all libraries
41
47
  """
48
+ warnings.warn(
49
+ "This function is deprecated. We recommend to migrate to a different "
50
+ "Connector, e.g. `pst.PasConnector`.",
51
+ DeprecationWarning,
52
+ stacklevel=1,
53
+ )
42
54
  import pystore
43
55
 
44
56
  if conn is not None:
@@ -60,6 +72,7 @@ def delete_pystore_connector(
60
72
  print(f" - deleted: {lib}")
61
73
 
62
74
 
75
+ # TODO: remove in future version
63
76
  def delete_arctic_connector(
64
77
  conn=None,
65
78
  connstr: Optional[str] = None,
@@ -80,6 +93,12 @@ def delete_arctic_connector(
80
93
  list of library names to delete, by default None which deletes
81
94
  all libraries
82
95
  """
96
+ warnings.warn(
97
+ "This function is deprecated. We recommend to migrate to a different "
98
+ "Connector, e.g. `pst.ArcticDBConnector`.",
99
+ DeprecationWarning,
100
+ stacklevel=1,
101
+ )
83
102
  import arctic
84
103
 
85
104
  if conn is not None:
@@ -172,6 +191,7 @@ def delete_arcticdb_connector(
172
191
 
173
192
 
174
193
  def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
194
+ """Delete DictConnector object."""
175
195
  print(f"Deleting DictConnector: '{conn.name}' ... ", end="")
176
196
  if libraries is None:
177
197
  del conn
@@ -185,6 +205,7 @@ def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
185
205
 
186
206
 
187
207
  def delete_pas_connector(conn, libraries: Optional[List[str]] = None) -> None:
208
+ """Delete PasConnector object."""
188
209
  import shutil
189
210
 
190
211
  print(f"Deleting PasConnector database: '{conn.name}' ... ", end="")
@@ -222,10 +243,12 @@ def delete_pastastore(pstore, libraries: Optional[List[str]] = None) -> None:
222
243
  TypeError
223
244
  when Connector type is not recognized
224
245
  """
246
+ # TODO: remove in future version
225
247
  if pstore.conn.conn_type == "pystore":
226
248
  delete_pystore_connector(conn=pstore.conn, libraries=libraries)
227
249
  elif pstore.conn.conn_type == "dict":
228
250
  delete_dict_connector(pstore)
251
+ # TODO: remove in future version
229
252
  elif pstore.conn.conn_type == "arctic":
230
253
  delete_arctic_connector(conn=pstore.conn, libraries=libraries)
231
254
  elif pstore.conn.conn_type == "arcticdb":
@@ -303,7 +326,6 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
303
326
  returns True if models are equivalent when detailed_comparison=True
304
327
  else returns DataFrame containing comparison details.
305
328
  """
306
-
307
329
  df = pd.DataFrame(columns=["model 0", "model 1"])
308
330
  so1 = [] # for storing series_original
309
331
  sv1 = [] # for storing series_validated
@@ -335,9 +357,11 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
335
357
  try:
336
358
  assert_series_equal(
337
359
  oso,
338
- ml.oseries.series_original
339
- if PASTAS_LEQ_022
340
- else ml.oseries._series_original,
360
+ (
361
+ ml.oseries.series_original
362
+ if PASTAS_LEQ_022
363
+ else ml.oseries._series_original
364
+ ),
341
365
  )
342
366
  compare_oso = True
343
367
  except (ValueError, AssertionError):
@@ -378,9 +402,9 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
378
402
  for ts in stresses:
379
403
  df.loc[f"- time series: '{ts.name}'"] = ts.name
380
404
  for tsk in ts.settings.keys():
381
- df.loc[
382
- f" - {ts.name} settings: {tsk}", f"model {i}"
383
- ] = ts.settings[tsk]
405
+ df.loc[f" - {ts.name} settings: {tsk}", f"model {i}"] = (
406
+ ts.settings[tsk]
407
+ )
384
408
 
385
409
  if i == 0:
386
410
  if PASTAS_LEQ_022:
@@ -403,9 +427,11 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
403
427
  try:
404
428
  assert_series_equal(
405
429
  so1[counter],
406
- ts.series_original
407
- if PASTAS_LEQ_022
408
- else ts._series_original,
430
+ (
431
+ ts.series_original
432
+ if PASTAS_LEQ_022
433
+ else ts._series_original
434
+ ),
409
435
  )
410
436
  compare_so1 = True
411
437
  except (ValueError, AssertionError):
@@ -548,6 +574,7 @@ def frontiers_checks(
548
574
  check4_gain: bool = True,
549
575
  check5_parambounds: bool = False,
550
576
  csv_dir: Optional[str] = None,
577
+ progressbar: bool = False,
551
578
  ) -> pd.DataFrame: # pragma: no cover
552
579
  """Check models in a PastaStore to see if they pass reliability criteria.
553
580
 
@@ -593,6 +620,8 @@ def frontiers_checks(
593
620
  csv_dir : string, optional
594
621
  directory to store CSV file with overview of checks for every
595
622
  model, by default None which will not store results
623
+ progressbar : bool, optional
624
+ show progressbar, by default False
596
625
 
597
626
  Returns
598
627
  -------
@@ -608,7 +637,6 @@ def frontiers_checks(
608
637
  Application of Time Series Analysis to Estimate Drawdown From Multiple Well
609
638
  Fields. Front. Earth Sci., 14 June 2022 doi:10.3389/feart.2022.907609
610
639
  """
611
-
612
640
  df = pd.DataFrame(columns=["all_checks_passed"])
613
641
 
614
642
  if modelnames is not None:
@@ -625,7 +653,9 @@ def frontiers_checks(
625
653
  else:
626
654
  models = pstore.model_names
627
655
 
628
- for mlnam in tqdm(models, desc="Running model diagnostics"):
656
+ for mlnam in (
657
+ tqdm(models, desc="Running model diagnostics") if progressbar else models
658
+ ):
629
659
  ml = pstore.get_models(mlnam)
630
660
 
631
661
  if ml.parameters["optimal"].hasnans:
@@ -703,7 +733,7 @@ def frontiers_checks(
703
733
  check_tmem_passed,
704
734
  )
705
735
  else:
706
- tmem = ml.get_response_tmax(sm_name)
736
+ tmem = ml.get_response_tmax(sm_name, cutoff=check3_cutoff)
707
737
  if tmem is None: # no rfunc in stressmodel
708
738
  tmem = 0
709
739
  check_tmem_passed = tmem < len_oseries_calib / 2
@@ -730,7 +760,10 @@ def frontiers_checks(
730
760
  else:
731
761
  check_gain_passed = np.abs(gain) > 2 * gain_std
732
762
  checks.loc[
733
- f"gain > 2*std: {sm_name}-{iw:02g} ({sm.distances.index[iw]})",
763
+ (
764
+ f"gain > 2*std: {sm_name}-{iw:02g}"
765
+ f" ({sm.distances.index[iw]})"
766
+ ),
734
767
  :,
735
768
  ] = (
736
769
  gain,
@@ -825,7 +858,6 @@ def frontiers_aic_select(
825
858
  Multiple Well Fields. Front. Earth Sci., 14 June 2022
826
859
  doi:10.3389/feart.2022.907609
827
860
  """
828
-
829
861
  if modelnames is None and oseries is None:
830
862
  modelnames = pstore.model_names
831
863
  elif modelnames is None and oseries is not None:
@@ -849,7 +881,7 @@ def frontiers_aic_select(
849
881
  # with lowest AIC per location
850
882
  collect = []
851
883
  gr = df.join(aic).groupby("oseries")
852
- for o, idf in gr:
884
+ for _, idf in gr:
853
885
  idf.index.name = "modelname"
854
886
  idf = (
855
887
  idf.sort_values("aic").reset_index().set_index(["oseries", "modelname"])
pastastore/version.py CHANGED
@@ -1,7 +1,38 @@
1
+ # ruff: noqa: D100
2
+ from importlib import import_module, metadata
3
+ from platform import python_version
4
+
1
5
  import pastas as ps
2
6
  from packaging.version import parse as parse_version
3
7
 
4
8
  PASTAS_VERSION = parse_version(ps.__version__)
5
9
  PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
10
+ PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
11
+
12
+ __version__ = "1.5.0"
13
+
14
+
15
+ def show_versions(optional=False) -> None:
16
+ """Print the version of dependencies.
17
+
18
+ Parameters
19
+ ----------
20
+ optional : bool, optional
21
+ Print the version of optional dependencies, by default False
22
+ """
23
+ msg = (
24
+ f"Python version : {python_version()}\n"
25
+ f"Pandas version : {metadata.version('pandas')}\n"
26
+ f"Matplotlib version : {metadata.version('matplotlib')}\n"
27
+ f"Pastas version : {metadata.version('pastas')}\n"
28
+ f"PyYAML version : {metadata.version('pyyaml')}\n"
29
+ )
30
+ if optional:
31
+ msg += "\nArcticDB version : "
32
+ try:
33
+ import_module("arcticdb")
34
+ msg += f"{metadata.version('arctidb')}"
35
+ except ImportError:
36
+ msg += "Not Installed"
6
37
 
7
- __version__ = "1.3.0"
38
+ print(msg)
@@ -1,3 +1,5 @@
1
+ """Module containing YAML interface for Pastas models using PastaStore."""
2
+
1
3
  import datetime
2
4
  import logging
3
5
  import os
@@ -11,14 +13,11 @@ import yaml
11
13
 
12
14
  from pastastore.version import PASTAS_LEQ_022
13
15
 
14
- ps.logger.setLevel("ERROR")
15
-
16
- logging.basicConfig(level="INFO")
17
16
  logger = logging.getLogger(__name__)
18
17
 
19
18
 
20
19
  def _convert_dict_dtypes_for_yaml(d: Dict[str, Any]):
21
- """Internal method to convert dictionary values for storing in YAML format.
20
+ """Convert dictionary values for storing in YAML format (internal function).
22
21
 
23
22
  Parameters
24
23
  ----------
@@ -95,7 +94,6 @@ def reduce_to_minimal_dict(d, keys=None):
95
94
  ["name", "oseries", "settings", "tmin", "tmax", "noise",
96
95
  "stressmodels", "rfunc", "stress", "prec", "evap", "stressmodel"]
97
96
  """
98
-
99
97
  if keys is None:
100
98
  keys = [
101
99
  "name",
@@ -170,7 +168,7 @@ class PastastoreYAML:
170
168
  """
171
169
 
172
170
  def __init__(self, pstore):
173
- """Constructor for PastasstoreYAML class.
171
+ """Create for PastasstoreYAML class.
174
172
 
175
173
  Parameters
176
174
  ----------
@@ -181,7 +179,7 @@ class PastastoreYAML:
181
179
  self.pstore = pstore
182
180
 
183
181
  def _parse_rechargemodel_dict(self, d: Dict, onam: Optional[str] = None) -> Dict:
184
- """Internal method to parse RechargeModel dictionary.
182
+ """Parse RechargeModel dictionary (internal method).
185
183
 
186
184
  Note: supports 'nearest' as input to 'prec' and 'evap',
187
185
  which will automatically select nearest stress with kind="prec" or
@@ -208,7 +206,7 @@ class PastastoreYAML:
208
206
  if isinstance(prec_val, dict):
209
207
  pnam = prec_val["name"]
210
208
  p = self.pstore.get_stresses(pnam)
211
- prec_val["series"] = p
209
+ prec_val["series"] = p.squeeze()
212
210
  prec = prec_val
213
211
  elif prec_val.startswith("nearest"):
214
212
  if onam is None:
@@ -224,7 +222,7 @@ class PastastoreYAML:
224
222
  "name": pnam,
225
223
  "settings": "prec",
226
224
  "metadata": pmeta,
227
- "series": p,
225
+ "series": p.squeeze(),
228
226
  }
229
227
  elif isinstance(prec_val, str):
230
228
  pnam = d["prec"]
@@ -233,7 +231,7 @@ class PastastoreYAML:
233
231
  "name": pnam,
234
232
  "settings": "prec",
235
233
  "metadata": pmeta,
236
- "series": p,
234
+ "series": p.squeeze(),
237
235
  }
238
236
  else:
239
237
  raise NotImplementedError(f"Could not parse prec value: '{prec_val}'")
@@ -244,7 +242,7 @@ class PastastoreYAML:
244
242
  if isinstance(evap_val, dict):
245
243
  enam = evap_val["name"]
246
244
  e = self.pstore.get_stresses(enam)
247
- evap_val["series"] = e
245
+ evap_val["series"] = e.squeeze()
248
246
  evap = evap_val
249
247
  elif evap_val.startswith("nearest"):
250
248
  if onam is None:
@@ -260,7 +258,7 @@ class PastastoreYAML:
260
258
  "name": enam,
261
259
  "settings": "evap",
262
260
  "metadata": emeta,
263
- "series": e,
261
+ "series": e.squeeze(),
264
262
  }
265
263
  elif isinstance(evap_val, str):
266
264
  enam = d["evap"]
@@ -269,7 +267,7 @@ class PastastoreYAML:
269
267
  "name": enam,
270
268
  "settings": "evap",
271
269
  "metadata": emeta,
272
- "series": e,
270
+ "series": e.squeeze(),
273
271
  }
274
272
  else:
275
273
  raise NotImplementedError(f"Could not parse evap value: '{evap_val}'")
@@ -310,12 +308,12 @@ class PastastoreYAML:
310
308
  onam = d["oseries"]
311
309
  if isinstance(onam, str):
312
310
  o = self.pstore.get_oseries(onam)
313
- d["oseries"] = o
311
+ d["oseries"] = o.squeeze()
314
312
 
315
313
  return d
316
314
 
317
315
  def _parse_stressmodel_dict(self, d: Dict, onam: Optional[str] = None) -> Dict:
318
- """Internal method to parse StressModel dictionary.
316
+ """Parse StressModel dictionary (internal method).
319
317
 
320
318
  Note: supports 'nearest' or 'nearest <kind>' as input to 'stress',
321
319
  which will automatically select nearest stress with kind=<kind>.
@@ -337,7 +335,6 @@ class PastastoreYAML:
337
335
  containing stresses obtained from PastaStore, and setting
338
336
  defaults if they were not already provided.
339
337
  """
340
-
341
338
  # get stress
342
339
  snam = d.pop("stress")
343
340
 
@@ -361,7 +358,7 @@ class PastastoreYAML:
361
358
  "name": snam,
362
359
  "settings": d.pop("settings", None),
363
360
  "metadata": smeta,
364
- "series": s,
361
+ "series": s.squeeze(),
365
362
  }
366
363
  d["stress"] = [s] if PASTAS_LEQ_022 else s
367
364
 
@@ -380,7 +377,7 @@ class PastastoreYAML:
380
377
  return d
381
378
 
382
379
  def _parse_wellmodel_dict(self, d: Dict, onam: Optional[str] = None) -> Dict:
383
- """Internal method to parse WellModel dictionary.
380
+ """Parse WellModel dictionary (internal method).
384
381
 
385
382
  Note: supports 'nearest' or 'nearest <number> <kind>' as input to
386
383
  'stress', which will automatically select nearest or <number> of
@@ -402,7 +399,6 @@ class PastastoreYAML:
402
399
  containing stresses obtained from PastaStore, and setting
403
400
  defaults if they were not already provided.
404
401
  """
405
-
406
402
  # parse stress
407
403
  snames = d.pop("stress")
408
404
 
@@ -415,12 +411,12 @@ class PastastoreYAML:
415
411
  elif len(snames.split()) == 2:
416
412
  try:
417
413
  n = int(snames.split()[1])
418
- except ValueError:
414
+ except ValueError as e:
419
415
  raise ValueError(
420
416
  f"Could not parse: '{snames}'! "
421
417
  "When using option 'nearest' for WellModel, "
422
418
  "use 'nearest <n>' or 'nearest <n> <kind>'!"
423
- )
419
+ ) from e
424
420
  kind = "well"
425
421
  elif len(snames.split()) == 1:
426
422
  n = 1
@@ -444,7 +440,7 @@ class PastastoreYAML:
444
440
  "name": snam,
445
441
  "settings": "well",
446
442
  "metadata": smeta,
447
- "series": s,
443
+ "series": s.squeeze(),
448
444
  }
449
445
  slist.append(sdict)
450
446
  d["stress"] = slist
@@ -479,6 +475,20 @@ class PastastoreYAML:
479
475
  return d
480
476
 
481
477
  def construct_mldict(self, mlyml: dict, mlnam: str) -> dict:
478
+ """Create Pastas.Model dictionary from YAML dictionary.
479
+
480
+ Parameters
481
+ ----------
482
+ mlyml : dict
483
+ YAML dictionary
484
+ mlnam : str
485
+ model name
486
+
487
+ Returns
488
+ -------
489
+ dict
490
+ dictionary of pastas.Model that can be read by Pastas
491
+ """
482
492
  # get oseries + metadata
483
493
  if isinstance(mlyml["oseries"], dict):
484
494
  onam = str(mlyml["oseries"]["name"])
@@ -490,7 +500,7 @@ class PastastoreYAML:
490
500
  o, ometa = self.pstore.get_oseries(onam, return_metadata=True)
491
501
 
492
502
  # create model to obtain default model settings
493
- ml = ps.Model(o, name=mlnam, metadata=ometa)
503
+ ml = ps.Model(o.squeeze(), name=mlnam, metadata=ometa)
494
504
  mldict = ml.to_dict(series=True)
495
505
 
496
506
  # update with stored model settings
@@ -608,7 +618,6 @@ class PastastoreYAML:
608
618
  NotImplementedError
609
619
  if unsupported stressmodel is encountered
610
620
  """
611
-
612
621
  with open(fyaml, "r") as f:
613
622
  yml = yaml.load(f, Loader=yaml.CFullLoader)
614
623
 
@@ -654,7 +663,6 @@ class PastastoreYAML:
654
663
  the time series are actually the nearest ones! Only used
655
664
  when minimal_yaml=True. Default is False.
656
665
  """
657
-
658
666
  onames = self.pstore.conn._parse_names(oseries, "oseries")
659
667
 
660
668
  for onam in onames:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pastastore
3
- Version: 1.3.0
3
+ Version: 1.5.0
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
@@ -40,11 +40,10 @@ Classifier: Operating System :: Unix
40
40
  Classifier: Operating System :: MacOS
41
41
  Classifier: Programming Language :: Python
42
42
  Classifier: Programming Language :: Python :: 3
43
- Classifier: Programming Language :: Python :: 3.7
44
- Classifier: Programming Language :: Python :: 3.8
45
43
  Classifier: Programming Language :: Python :: 3.9
46
44
  Classifier: Programming Language :: Python :: 3.10
47
45
  Classifier: Programming Language :: Python :: 3.11
46
+ Classifier: Programming Language :: Python :: 3.12
48
47
  Classifier: Programming Language :: Python :: 3 :: Only
49
48
  Classifier: Topic :: Scientific/Engineering :: Hydrology
50
49
  Requires-Python: >=3.7
@@ -53,8 +52,6 @@ License-File: LICENSE
53
52
  Requires-Dist: pastas >=0.13
54
53
  Requires-Dist: tqdm >=4.36
55
54
  Requires-Dist: pyyaml
56
- Provides-Extra: arctic
57
- Requires-Dist: arctic ; extra == 'arctic'
58
55
  Provides-Extra: arcticdb
59
56
  Requires-Dist: arcticdb ; extra == 'arcticdb'
60
57
  Provides-Extra: docs
@@ -74,10 +71,6 @@ Provides-Extra: optional
74
71
  Requires-Dist: contextily ; extra == 'optional'
75
72
  Requires-Dist: pyproj ; extra == 'optional'
76
73
  Requires-Dist: adjustText ; extra == 'optional'
77
- Provides-Extra: pystore
78
- Requires-Dist: fsspec >=0.3.3 ; extra == 'pystore'
79
- Requires-Dist: python-snappy ; extra == 'pystore'
80
- Requires-Dist: dask[dataframe] ; extra == 'pystore'
81
74
  Provides-Extra: test
82
75
  Requires-Dist: pastastore[arcticdb,lint,optional] ; extra == 'test'
83
76
  Requires-Dist: hydropandas[full] ; extra == 'test'
@@ -88,7 +81,16 @@ Requires-Dist: pytest-cov ; extra == 'test'
88
81
  Requires-Dist: pytest-dependency ; extra == 'test'
89
82
  Requires-Dist: pytest-benchmark ; extra == 'test'
90
83
  Requires-Dist: codacy-coverage ; extra == 'test'
91
- Requires-Dist: lxml ; extra == 'test'
84
+ Provides-Extra: test_py312
85
+ Requires-Dist: pastastore[lint,optional] ; extra == 'test_py312'
86
+ Requires-Dist: hydropandas[full] ; extra == 'test_py312'
87
+ Requires-Dist: coverage ; extra == 'test_py312'
88
+ Requires-Dist: codecov ; extra == 'test_py312'
89
+ Requires-Dist: pytest ; extra == 'test_py312'
90
+ Requires-Dist: pytest-cov ; extra == 'test_py312'
91
+ Requires-Dist: pytest-dependency ; extra == 'test_py312'
92
+ Requires-Dist: pytest-benchmark ; extra == 'test_py312'
93
+ Requires-Dist: codacy-coverage ; extra == 'test_py312'
92
94
 
93
95
  ![pastastore](https://github.com/pastas/pastastore/workflows/pastastore/badge.svg)
94
96
  [![Documentation Status](https://readthedocs.org/projects/pastastore/badge/?version=latest)](https://pastastore.readthedocs.io/en/latest/?badge=latest)
@@ -0,0 +1,15 @@
1
+ pastastore/__init__.py,sha256=l6zRpDO0j6MIrfdljCTbkF70bt-GFlPseBd4IlmaC-o,269
2
+ pastastore/base.py,sha256=tUWIenFjiIeQHkA2CYBxmwMy7KXXq5JZ5jZCphRjuj8,67145
3
+ pastastore/connectors.py,sha256=-Rsw8Uf7wcos9XsQMDPgysU2acT-kqt25WFeNnpf5K0,29588
4
+ pastastore/datasets.py,sha256=VTjlekM3UryGpslSdMLAaT-QIAVJfaZql0dka2AbMyc,6665
5
+ pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
6
+ pastastore/store.py,sha256=1HEkr84vmCiB07Yj8iOMybFRY_l93C4J1e2suGd92SU,44171
7
+ pastastore/styling.py,sha256=ioaH10ELV8CFvJA-xAKFbnBklTd6FB1TZV8sqvZrEcw,1518
8
+ pastastore/util.py,sha256=4P1cQ_euEo3-YopKGReQ18EznTxBVfDpXFiEczpn_Bw,31811
9
+ pastastore/version.py,sha256=xF0vR3dPK9WQScTFt_osbszvZt9GKJQTBvrT8nRgswc,1155
10
+ pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
11
+ pastastore-1.5.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
12
+ pastastore-1.5.0.dist-info/METADATA,sha256=_JrozH-4rPhQdxsVuzzHhmW3rWd_T_9oC8H-b68B_7s,8023
13
+ pastastore-1.5.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
14
+ pastastore-1.5.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
15
+ pastastore-1.5.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,14 +0,0 @@
1
- pastastore/__init__.py,sha256=GZvAtp_p3y2bOlcuLG2-jSHrYd72bIMDdBA5Wcy-diU,283
2
- pastastore/base.py,sha256=nPIIiDHJMDUlvPaE2dftjPRxAM2kFtC8kHXvmB9Ayw0,62631
3
- pastastore/connectors.py,sha256=QH6jvnhIIivn1Is9ThBeMAEcJBVnfcK46pJClGMdiBA,28747
4
- pastastore/datasets.py,sha256=f-92WOh2ROCxOVvKGHXjnzd66Q7hO_-BtXRfMKwg1KU,6640
5
- pastastore/plotting.py,sha256=3WTnnOfFZlt4PEsqDzNLFzFOJBtDb0vmwx4ZzdZI_xo,42120
6
- pastastore/store.py,sha256=vIPnq-XYU-HstweWa4RSuNHotYLTBNVQjljt6SH9fWY,39529
7
- pastastore/util.py,sha256=aVIrAtfJPatH7BfASChzwHsoug1KDthcgHlEbxl1-uk,30678
8
- pastastore/version.py,sha256=JXttE_aGZnNgRq0bbXfNL4jCLuHlFt9gD0xf0G99owU,203
9
- pastastore/yaml_interface.py,sha256=yNpyEHtIapx44Thv2PCm7Sc_W-reXDHjT3uRC2gDAzw,29515
10
- pastastore-1.3.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
11
- pastastore-1.3.0.dist-info/METADATA,sha256=gXxIFp2Ge_pHkDPc0qfuI2wiNQGUZUSOSMWavKWQ8Fo,7844
12
- pastastore-1.3.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
13
- pastastore-1.3.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
14
- pastastore-1.3.0.dist-info/RECORD,,