pastastore 1.3.0__tar.gz → 1.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. {pastastore-1.3.0 → pastastore-1.5.0}/PKG-INFO +12 -10
  2. pastastore-1.5.0/pastastore/__init__.py +9 -0
  3. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore/base.py +204 -58
  4. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore/connectors.py +51 -30
  5. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore/datasets.py +3 -2
  6. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore/plotting.py +136 -30
  7. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore/store.py +201 -35
  8. pastastore-1.5.0/pastastore/styling.py +67 -0
  9. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore/util.py +48 -16
  10. pastastore-1.5.0/pastastore/version.py +38 -0
  11. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore/yaml_interface.py +33 -25
  12. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore.egg-info/PKG-INFO +12 -10
  13. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore.egg-info/SOURCES.txt +1 -0
  14. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore.egg-info/requires.txt +11 -9
  15. {pastastore-1.3.0 → pastastore-1.5.0}/pyproject.toml +55 -11
  16. {pastastore-1.3.0 → pastastore-1.5.0}/tests/test_001_import.py +1 -0
  17. {pastastore-1.3.0 → pastastore-1.5.0}/tests/test_002_connectors.py +10 -13
  18. {pastastore-1.3.0 → pastastore-1.5.0}/tests/test_003_pastastore.py +14 -8
  19. {pastastore-1.3.0 → pastastore-1.5.0}/tests/test_004_yaml.py +4 -3
  20. {pastastore-1.3.0 → pastastore-1.5.0}/tests/test_005_maps_plots.py +2 -1
  21. {pastastore-1.3.0 → pastastore-1.5.0}/tests/test_006_benchmark.py +4 -88
  22. pastastore-1.3.0/pastastore/__init__.py +0 -11
  23. pastastore-1.3.0/pastastore/version.py +0 -7
  24. {pastastore-1.3.0 → pastastore-1.5.0}/LICENSE +0 -0
  25. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore.egg-info/dependency_links.txt +0 -0
  26. {pastastore-1.3.0 → pastastore-1.5.0}/pastastore.egg-info/top_level.txt +0 -0
  27. {pastastore-1.3.0 → pastastore-1.5.0}/readme.md +0 -0
  28. {pastastore-1.3.0 → pastastore-1.5.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pastastore
3
- Version: 1.3.0
3
+ Version: 1.5.0
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
@@ -40,11 +40,10 @@ Classifier: Operating System :: Unix
40
40
  Classifier: Operating System :: MacOS
41
41
  Classifier: Programming Language :: Python
42
42
  Classifier: Programming Language :: Python :: 3
43
- Classifier: Programming Language :: Python :: 3.7
44
- Classifier: Programming Language :: Python :: 3.8
45
43
  Classifier: Programming Language :: Python :: 3.9
46
44
  Classifier: Programming Language :: Python :: 3.10
47
45
  Classifier: Programming Language :: Python :: 3.11
46
+ Classifier: Programming Language :: Python :: 3.12
48
47
  Classifier: Programming Language :: Python :: 3 :: Only
49
48
  Classifier: Topic :: Scientific/Engineering :: Hydrology
50
49
  Requires-Python: >=3.7
@@ -73,13 +72,16 @@ Requires-Dist: pytest-cov; extra == "test"
73
72
  Requires-Dist: pytest-dependency; extra == "test"
74
73
  Requires-Dist: pytest-benchmark; extra == "test"
75
74
  Requires-Dist: codacy-coverage; extra == "test"
76
- Requires-Dist: lxml; extra == "test"
77
- Provides-Extra: pystore
78
- Requires-Dist: fsspec>=0.3.3; extra == "pystore"
79
- Requires-Dist: python-snappy; extra == "pystore"
80
- Requires-Dist: dask[dataframe]; extra == "pystore"
81
- Provides-Extra: arctic
82
- Requires-Dist: arctic; extra == "arctic"
75
+ Provides-Extra: test-py312
76
+ Requires-Dist: pastastore[lint,optional]; extra == "test-py312"
77
+ Requires-Dist: hydropandas[full]; extra == "test-py312"
78
+ Requires-Dist: coverage; extra == "test-py312"
79
+ Requires-Dist: codecov; extra == "test-py312"
80
+ Requires-Dist: pytest; extra == "test-py312"
81
+ Requires-Dist: pytest-cov; extra == "test-py312"
82
+ Requires-Dist: pytest-dependency; extra == "test-py312"
83
+ Requires-Dist: pytest-benchmark; extra == "test-py312"
84
+ Requires-Dist: codacy-coverage; extra == "test-py312"
83
85
  Provides-Extra: arcticdb
84
86
  Requires-Dist: arcticdb; extra == "arcticdb"
85
87
  Provides-Extra: docs
@@ -0,0 +1,9 @@
1
+ # ruff: noqa: F401 D104
2
+ from pastastore import connectors, styling, util
3
+ from pastastore.connectors import (
4
+ ArcticDBConnector,
5
+ DictConnector,
6
+ PasConnector,
7
+ )
8
+ from pastastore.store import PastaStore
9
+ from pastastore.version import __version__, show_versions
@@ -1,7 +1,12 @@
1
+ # ruff: noqa: B019
2
+ """Base classes for PastaStore Connectors."""
3
+
1
4
  import functools
2
5
  import json
3
6
  import warnings
4
- from abc import ABC, abstractmethod, abstractproperty
7
+
8
+ # import weakref
9
+ from abc import ABC, abstractmethod
5
10
  from collections.abc import Iterable
6
11
  from itertools import chain
7
12
  from typing import Dict, List, Optional, Tuple, Union
@@ -14,7 +19,7 @@ from pastas.io.pas import PastasEncoder
14
19
  from tqdm.auto import tqdm
15
20
 
16
21
  from pastastore.util import ItemInLibraryException, _custom_warning, validate_names
17
- from pastastore.version import PASTAS_LEQ_022
22
+ from pastastore.version import PASTAS_GEQ_150, PASTAS_LEQ_022
18
23
 
19
24
  FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
20
25
  warnings.showwarning = _custom_warning
@@ -23,10 +28,9 @@ warnings.showwarning = _custom_warning
23
28
  class BaseConnector(ABC):
24
29
  """Base Connector class.
25
30
 
26
- Class holds base logic for dealing with time series and Pastas
27
- Models. Create your own Connector to a data source by writing a a
28
- class that inherits from this BaseConnector. Your class has to
29
- override each abstractmethod and abstractproperty.
31
+ Class holds base logic for dealing with time series and Pastas Models. Create your
32
+ own Connector to a data source by writing a a class that inherits from this
33
+ BaseConnector. Your class has to override each abstractmethod and abstractproperty.
30
34
  """
31
35
 
32
36
  _default_library_names = [
@@ -78,7 +82,7 @@ class BaseConnector(ABC):
78
82
  metadata: Optional[Dict] = None,
79
83
  overwrite: bool = False,
80
84
  ) -> None:
81
- """Internal method to add item for both time series and pastas.Models.
85
+ """Add item for both time series and pastas.Models (internal method).
82
86
 
83
87
  Must be overriden by subclass.
84
88
 
@@ -96,7 +100,7 @@ class BaseConnector(ABC):
96
100
 
97
101
  @abstractmethod
98
102
  def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
99
- """Internal method to get item (series or pastas.Models).
103
+ """Get item (series or pastas.Models) (internal method).
100
104
 
101
105
  Must be overriden by subclass.
102
106
 
@@ -115,7 +119,7 @@ class BaseConnector(ABC):
115
119
 
116
120
  @abstractmethod
117
121
  def _del_item(self, libname: str, name: str) -> None:
118
- """Internal method to delete items (series or models).
122
+ """Delete items (series or models) (internal method).
119
123
 
120
124
  Must be overriden by subclass.
121
125
 
@@ -129,7 +133,7 @@ class BaseConnector(ABC):
129
133
 
130
134
  @abstractmethod
131
135
  def _get_metadata(self, libname: str, name: str) -> Dict:
132
- """Internal method to get metadata.
136
+ """Get metadata (internal method).
133
137
 
134
138
  Must be overriden by subclass.
135
139
 
@@ -146,21 +150,24 @@ class BaseConnector(ABC):
146
150
  dictionary containing metadata
147
151
  """
148
152
 
149
- @abstractproperty
153
+ @property
154
+ @abstractmethod
150
155
  def oseries_names(self):
151
156
  """List of oseries names.
152
157
 
153
158
  Property must be overriden by subclass.
154
159
  """
155
160
 
156
- @abstractproperty
161
+ @property
162
+ @abstractmethod
157
163
  def stresses_names(self):
158
164
  """List of stresses names.
159
165
 
160
166
  Property must be overriden by subclass.
161
167
  """
162
168
 
163
- @abstractproperty
169
+ @property
170
+ @abstractmethod
164
171
  def model_names(self):
165
172
  """List of model names.
166
173
 
@@ -238,7 +245,7 @@ class BaseConnector(ABC):
238
245
  validate: Optional[bool] = None,
239
246
  overwrite: bool = False,
240
247
  ) -> None:
241
- """Internal method to add series to database.
248
+ """Add series to database (internal method).
242
249
 
243
250
  Parameters
244
251
  ----------
@@ -268,9 +275,21 @@ class BaseConnector(ABC):
268
275
  series = self._set_series_name(series, name)
269
276
  if self._pastas_validate(validate):
270
277
  if libname == "oseries":
271
- ps.validate_oseries(series)
278
+ if PASTAS_GEQ_150 and not ps.validate_oseries(series):
279
+ raise ValueError(
280
+ "oseries does not meet pastas criteria,"
281
+ " see `ps.validate_oseries()`!"
282
+ )
283
+ else:
284
+ ps.validate_oseries(series)
272
285
  else:
273
- ps.validate_stress(series)
286
+ if PASTAS_GEQ_150 and not ps.validate_stress(series):
287
+ raise ValueError(
288
+ "stress does not meet pastas criteria,"
289
+ " see `ps.validate_stress()`!"
290
+ )
291
+ else:
292
+ ps.validate_stress(series)
274
293
  in_store = getattr(self, f"{libname}_names")
275
294
  if name not in in_store or overwrite:
276
295
  self._add_item(
@@ -290,7 +309,7 @@ class BaseConnector(ABC):
290
309
  metadata: Optional[dict] = None,
291
310
  validate: Optional[bool] = None,
292
311
  ) -> None:
293
- """Internal method to update time series.
312
+ """Update time series (internal method).
294
313
 
295
314
  Parameters
296
315
  ----------
@@ -312,6 +331,9 @@ class BaseConnector(ABC):
312
331
  self._validate_input_series(series)
313
332
  series = self._set_series_name(series, name)
314
333
  stored = self._get_series(libname, name, progressbar=False)
334
+ if self.conn_type == "pas" and not isinstance(series, type(stored)):
335
+ if isinstance(series, pd.DataFrame):
336
+ stored = stored.to_frame()
315
337
  # get union of index
316
338
  idx_union = stored.index.union(series.index)
317
339
  # update series with new values
@@ -381,7 +403,6 @@ class BaseConnector(ABC):
381
403
  metadata dictionary that will be used to update the stored
382
404
  metadata
383
405
  """
384
-
385
406
  if libname not in ["oseries", "stresses"]:
386
407
  raise ValueError("Library must be 'oseries' or 'stresses'!")
387
408
  update_meta = self._get_metadata(libname, name)
@@ -530,7 +551,7 @@ class BaseConnector(ABC):
530
551
  series: FrameorSeriesUnion,
531
552
  metadata: Optional[Dict] = None,
532
553
  ) -> Tuple[FrameorSeriesUnion, Optional[Dict]]:
533
- """Internal method to parse series input.
554
+ """Parse series input (internal method).
534
555
 
535
556
  Parameters
536
557
  ----------
@@ -659,6 +680,18 @@ class BaseConnector(ABC):
659
680
  self._del_oseries_model_link(oname, n)
660
681
  self._clear_cache("_modelnames_cache")
661
682
 
683
+ def del_model(self, names: Union[list, str]) -> None:
684
+ """Delete model(s) from the database.
685
+
686
+ Alias for del_models().
687
+
688
+ Parameters
689
+ ----------
690
+ names : str or list of str
691
+ name(s) of the model to delete
692
+ """
693
+ self.del_models(names=names)
694
+
662
695
  def del_oseries(self, names: Union[list, str], remove_models: bool = False):
663
696
  """Delete oseries from the database.
664
697
 
@@ -699,7 +732,7 @@ class BaseConnector(ABC):
699
732
  progressbar: bool = True,
700
733
  squeeze: bool = True,
701
734
  ) -> FrameorSeriesUnion:
702
- """Internal method to get time series.
735
+ """Get time series (internal method).
703
736
 
704
737
  Parameters
705
738
  ----------
@@ -762,8 +795,6 @@ class BaseConnector(ABC):
762
795
  imeta = self._get_metadata(libname, n)
763
796
  if imeta is None:
764
797
  imeta = {}
765
- if "name" not in imeta.keys():
766
- imeta["name"] = n
767
798
  metalist.append(imeta)
768
799
  if as_frame:
769
800
  meta = self._meta_list_to_frame(metalist, names=names)
@@ -864,6 +895,45 @@ class BaseConnector(ABC):
864
895
  else:
865
896
  return stresses
866
897
 
898
+ def get_stress(
899
+ self,
900
+ names: Union[list, str],
901
+ return_metadata: bool = False,
902
+ progressbar: bool = False,
903
+ squeeze: bool = True,
904
+ ) -> Union[Union[FrameorSeriesUnion, Dict], Optional[Union[Dict, List]]]:
905
+ """Get stresses from database.
906
+
907
+ Alias for `get_stresses()`
908
+
909
+ Parameters
910
+ ----------
911
+ names : str or list of str
912
+ names of the stresses to load
913
+ return_metadata : bool, optional
914
+ return metadata as dictionary or list of dictionaries,
915
+ default is False
916
+ progressbar : bool, optional
917
+ show progressbar, by default False
918
+ squeeze : bool, optional
919
+ if True return DataFrame or Series instead of dictionary
920
+ for single entry
921
+
922
+ Returns
923
+ -------
924
+ stresses : pandas.DataFrame or dict of DataFrames
925
+ returns time series as DataFrame or dictionary of DataFrames if
926
+ multiple names were passed
927
+ metadata : dict or list of dict
928
+ metadata for each stress, only returned if return_metadata=True
929
+ """
930
+ return self.get_stresses(
931
+ names,
932
+ return_metadata=return_metadata,
933
+ progressbar=progressbar,
934
+ squeeze=squeeze,
935
+ )
936
+
867
937
  def get_models(
868
938
  self,
869
939
  names: Union[list, str],
@@ -911,6 +981,48 @@ class BaseConnector(ABC):
911
981
  else:
912
982
  return models
913
983
 
984
+ def get_model(
985
+ self,
986
+ names: Union[list, str],
987
+ return_dict: bool = False,
988
+ progressbar: bool = False,
989
+ squeeze: bool = True,
990
+ update_ts_settings: bool = False,
991
+ ) -> Union[ps.Model, list]:
992
+ """Load models from database.
993
+
994
+ Alias for get_models().
995
+
996
+ Parameters
997
+ ----------
998
+ names : str or list of str
999
+ names of the models to load
1000
+ return_dict : bool, optional
1001
+ return model dictionary instead of pastas.Model (much
1002
+ faster for obtaining parameters, for example)
1003
+ progressbar : bool, optional
1004
+ show progressbar, by default False
1005
+ squeeze : bool, optional
1006
+ if True return Model instead of list of Models
1007
+ for single entry
1008
+ update_ts_settings : bool, optional
1009
+ update time series settings based on time series in store.
1010
+ overwrites stored tmin/tmax in model.
1011
+
1012
+ Returns
1013
+ -------
1014
+ pastas.Model or list of pastas.Model
1015
+ return pastas model, or list of models if multiple names were
1016
+ passed
1017
+ """
1018
+ return self.get_models(
1019
+ names,
1020
+ return_dict=return_dict,
1021
+ progressbar=progressbar,
1022
+ squeeze=squeeze,
1023
+ update_ts_settings=update_ts_settings,
1024
+ )
1025
+
914
1026
  def empty_library(
915
1027
  self, libname: str, prompt: bool = True, progressbar: bool = True
916
1028
  ):
@@ -954,7 +1066,7 @@ class BaseConnector(ABC):
954
1066
  print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
955
1067
 
956
1068
  def _iter_series(self, libname: str, names: Optional[List[str]] = None):
957
- """Internal method iterate over time series in library.
1069
+ """Iterate over time series in library (internal method).
958
1070
 
959
1071
  Parameters
960
1072
  ----------
@@ -966,7 +1078,7 @@ class BaseConnector(ABC):
966
1078
 
967
1079
 
968
1080
  Yields
969
- -------
1081
+ ------
970
1082
  pandas.Series or pandas.DataFrame
971
1083
  time series contained in library
972
1084
  """
@@ -985,7 +1097,7 @@ class BaseConnector(ABC):
985
1097
 
986
1098
 
987
1099
  Yields
988
- -------
1100
+ ------
989
1101
  pandas.Series or pandas.DataFrame
990
1102
  oseries contained in library
991
1103
  """
@@ -1002,7 +1114,7 @@ class BaseConnector(ABC):
1002
1114
 
1003
1115
 
1004
1116
  Yields
1005
- -------
1117
+ ------
1006
1118
  pandas.Series or pandas.DataFrame
1007
1119
  stresses contained in library
1008
1120
  """
@@ -1023,11 +1135,10 @@ class BaseConnector(ABC):
1023
1135
  which returns a pastas.Model.
1024
1136
 
1025
1137
  Yields
1026
- -------
1138
+ ------
1027
1139
  pastas.Model or dict
1028
1140
  time series model
1029
1141
  """
1030
-
1031
1142
  modelnames = self._parse_names(modelnames, "models")
1032
1143
  for mlnam in modelnames:
1033
1144
  yield self.get_models(mlnam, return_dict=return_dict, progressbar=False)
@@ -1081,11 +1192,10 @@ class BaseConnector(ABC):
1081
1192
  def _update_all_oseries_model_links(self):
1082
1193
  """Add all model names to oseries metadata dictionaries.
1083
1194
 
1084
- Used for old PastaStore versions, where relationship between
1085
- oseries and models was not stored. If there are any models in
1086
- the database and if the oseries_models library is empty, loops
1087
- through all models to determine which oseries each model belongs
1088
- to.
1195
+ Used for old PastaStore versions, where relationship between oseries and models
1196
+ was not stored. If there are any models in the database and if the
1197
+ oseries_models library is empty, loops through all models to determine which
1198
+ oseries each model belongs to.
1089
1199
  """
1090
1200
  # get oseries_models library if there are any contents, if empty
1091
1201
  # add all model links.
@@ -1149,14 +1259,38 @@ class BaseConnector(ABC):
1149
1259
 
1150
1260
  @property
1151
1261
  def n_oseries(self):
1262
+ """
1263
+ Returns the number of oseries.
1264
+
1265
+ Returns
1266
+ -------
1267
+ int
1268
+ The number of oseries names.
1269
+ """
1152
1270
  return len(self.oseries_names)
1153
1271
 
1154
1272
  @property
1155
1273
  def n_stresses(self):
1274
+ """
1275
+ Returns the number of stresses.
1276
+
1277
+ Returns
1278
+ -------
1279
+ int
1280
+ The number of stresses.
1281
+ """
1156
1282
  return len(self.stresses_names)
1157
1283
 
1158
1284
  @property
1159
1285
  def n_models(self):
1286
+ """
1287
+ Returns the number of models in the store.
1288
+
1289
+ Returns
1290
+ -------
1291
+ int
1292
+ The number of models in the store.
1293
+ """
1160
1294
  return len(self.model_names)
1161
1295
 
1162
1296
  @property # type: ignore
@@ -1179,8 +1313,8 @@ class BaseConnector(ABC):
1179
1313
  class ConnectorUtil:
1180
1314
  """Mix-in class for general Connector helper functions.
1181
1315
 
1182
- Only for internal methods, and not methods that are related to CRUD
1183
- operations on database.
1316
+ Only for internal methods, and not methods that are related to CRUD operations on
1317
+ database.
1184
1318
  """
1185
1319
 
1186
1320
  def _parse_names(
@@ -1188,7 +1322,7 @@ class ConnectorUtil:
1188
1322
  names: Optional[Union[list, str]] = None,
1189
1323
  libname: Optional[str] = "oseries",
1190
1324
  ) -> list:
1191
- """Internal method to parse names kwarg, returns iterable with name(s).
1325
+ """Parse names kwarg, returns iterable with name(s) (internal method).
1192
1326
 
1193
1327
  Parameters
1194
1328
  ----------
@@ -1209,13 +1343,13 @@ class ConnectorUtil:
1209
1343
  return [names]
1210
1344
  elif names is None or names == "all":
1211
1345
  if libname == "oseries":
1212
- return getattr(self, "oseries_names")
1346
+ return self.oseries_names
1213
1347
  elif libname == "stresses":
1214
- return getattr(self, "stresses_names")
1348
+ return self.stresses_names
1215
1349
  elif libname == "models":
1216
- return getattr(self, "model_names")
1350
+ return self.model_names
1217
1351
  elif libname == "oseries_models":
1218
- return getattr(self, "oseries_with_models")
1352
+ return self.oseries_with_models
1219
1353
  else:
1220
1354
  raise ValueError(f"No library '{libname}'!")
1221
1355
  else:
@@ -1247,11 +1381,13 @@ class ConnectorUtil:
1247
1381
  meta = pd.DataFrame(metalist)
1248
1382
  elif len(metalist) == 0:
1249
1383
  meta = pd.DataFrame()
1384
+
1250
1385
  meta.index = names
1386
+ meta.index.name = "name"
1251
1387
  return meta
1252
1388
 
1253
1389
  def _parse_model_dict(self, mdict: dict, update_ts_settings: bool = False):
1254
- """Internal method to parse dictionary describing pastas models.
1390
+ """Parse dictionary describing pastas models (internal method).
1255
1391
 
1256
1392
  Parameters
1257
1393
  ----------
@@ -1276,7 +1412,7 @@ class ConnectorUtil:
1276
1412
  if name not in self.oseries.index:
1277
1413
  msg = "oseries '{}' not present in library".format(name)
1278
1414
  raise LookupError(msg)
1279
- mdict["oseries"]["series"] = self.get_oseries(name)
1415
+ mdict["oseries"]["series"] = self.get_oseries(name).squeeze()
1280
1416
  # update tmin/tmax from time series
1281
1417
  if update_ts_settings:
1282
1418
  mdict["oseries"]["settings"]["tmin"] = mdict["oseries"]["series"].index[
@@ -1296,7 +1432,7 @@ class ConnectorUtil:
1296
1432
  if "series" not in stress:
1297
1433
  name = str(stress["name"])
1298
1434
  if name in self.stresses.index:
1299
- stress["series"] = self.get_stresses(name)
1435
+ stress["series"] = self.get_stresses(name).squeeze()
1300
1436
  # update tmin/tmax from time series
1301
1437
  if update_ts_settings:
1302
1438
  stress["settings"]["tmin"] = stress["series"].index[
@@ -1311,7 +1447,7 @@ class ConnectorUtil:
1311
1447
  if "series" not in stress:
1312
1448
  name = str(stress["name"])
1313
1449
  if name in self.stresses.index:
1314
- stress["series"] = self.get_stresses(name)
1450
+ stress["series"] = self.get_stresses(name).squeeze()
1315
1451
  # update tmin/tmax from time series
1316
1452
  if update_ts_settings:
1317
1453
  stress["settings"]["tmin"] = stress["series"].index[
@@ -1327,7 +1463,7 @@ class ConnectorUtil:
1327
1463
  if "series" not in stress:
1328
1464
  name = str(stress["name"])
1329
1465
  if name in self.stresses.index:
1330
- stress["series"] = self.get_stresses(name)
1466
+ stress["series"] = self.get_stresses(name).squeeze()
1331
1467
  # update tmin/tmax from time series
1332
1468
  if update_ts_settings:
1333
1469
  stress["settings"]["tmin"] = stress["series"].index[0]
@@ -1366,7 +1502,7 @@ class ConnectorUtil:
1366
1502
 
1367
1503
  @staticmethod
1368
1504
  def _validate_input_series(series):
1369
- """check if series is pandas.DataFrame or pandas.Series.
1505
+ """Check if series is pandas.DataFrame or pandas.Series.
1370
1506
 
1371
1507
  Parameters
1372
1508
  ----------
@@ -1497,7 +1633,7 @@ class ConnectorUtil:
1497
1633
  raise ValueError(msg)
1498
1634
 
1499
1635
  def _check_oseries_in_store(self, ml: Union[ps.Model, dict]):
1500
- """Internal method, check if Model oseries are contained in PastaStore.
1636
+ """Check if Model oseries are contained in PastaStore (internal method).
1501
1637
 
1502
1638
  Parameters
1503
1639
  ----------
@@ -1530,8 +1666,7 @@ class ConnectorUtil:
1530
1666
  )
1531
1667
 
1532
1668
  def _check_stresses_in_store(self, ml: Union[ps.Model, dict]):
1533
- """Internal method, check if stresses time series are contained in
1534
- PastaStore.
1669
+ """Check if stresses time series are contained in PastaStore (internal method).
1535
1670
 
1536
1671
  Parameters
1537
1672
  ----------
@@ -1672,7 +1807,7 @@ class ConnectorUtil:
1672
1807
  names: Optional[Union[list, str]] = None,
1673
1808
  progressbar: bool = True,
1674
1809
  ):
1675
- """Internal method for writing DataFrame or Series to zipfile.
1810
+ """Write DataFrame or Series to zipfile (internal method).
1676
1811
 
1677
1812
  Parameters
1678
1813
  ----------
@@ -1698,7 +1833,7 @@ class ConnectorUtil:
1698
1833
  archive.writestr(f"{libname}/{n}_meta.json", meta_json)
1699
1834
 
1700
1835
  def _models_to_archive(self, archive, names=None, progressbar=True):
1701
- """Internal method for writing pastas.Model to zipfile.
1836
+ """Write pastas.Model to zipfile (internal method).
1702
1837
 
1703
1838
  Parameters
1704
1839
  ----------
@@ -1717,23 +1852,27 @@ class ConnectorUtil:
1717
1852
  archive.writestr(f"models/{n}.pas", jsondict)
1718
1853
 
1719
1854
  @staticmethod
1720
- def _series_from_json(fjson: str):
1855
+ def _series_from_json(fjson: str, squeeze: bool = True):
1721
1856
  """Load time series from JSON.
1722
1857
 
1723
1858
  Parameters
1724
1859
  ----------
1725
1860
  fjson : str
1726
1861
  path to file
1862
+ squeeze : bool, optional
1863
+ squeeze time series object to obtain pandas Series
1727
1864
 
1728
1865
  Returns
1729
1866
  -------
1730
1867
  s : pd.DataFrame
1731
1868
  DataFrame containing time series
1732
1869
  """
1733
- s = pd.read_json(fjson, orient="columns", precise_float=True)
1870
+ s = pd.read_json(fjson, orient="columns", precise_float=True, dtype=False)
1734
1871
  if not isinstance(s.index, pd.DatetimeIndex):
1735
1872
  s.index = pd.to_datetime(s.index, unit="ms")
1736
1873
  s = s.sort_index() # needed for some reason ...
1874
+ if squeeze:
1875
+ return s.squeeze()
1737
1876
  return s
1738
1877
 
1739
1878
  @staticmethod
@@ -1778,10 +1917,10 @@ class ConnectorUtil:
1778
1917
  class ModelAccessor:
1779
1918
  """Object for managing access to stored models.
1780
1919
 
1781
- Provides dict-like access to models (i.e. PastaStore.models["model1"]),
1782
- or allows adding models to the PastaStore using dict-like assignment
1783
- (i.e. PastaStore.models["model1"] = ml), and it can serve as an iterator
1784
- (i.e. [ml for ml in pstore.models]).
1920
+ Provides dict-like access to models (i.e. PastaStore.models["model1"]), or allows
1921
+ adding models to the PastaStore using dict-like assignment (i.e.
1922
+ PastaStore.models["model1"] = ml), and it can serve as an iterator (i.e. [ml for ml
1923
+ in pstore.models]).
1785
1924
  """
1786
1925
 
1787
1926
  def __init__(self, conn):
@@ -1825,7 +1964,7 @@ class ModelAccessor:
1825
1964
  """Iterate over models.
1826
1965
 
1827
1966
  Yields
1828
- -------
1967
+ ------
1829
1968
  ml : pastas.Model
1830
1969
  model
1831
1970
  """
@@ -1839,6 +1978,13 @@ class ModelAccessor:
1839
1978
  return self.conn.n_models
1840
1979
 
1841
1980
  def random(self):
1981
+ """Return a random model.
1982
+
1983
+ Returns
1984
+ -------
1985
+ pastas.Model
1986
+ A random model object from the connection.
1987
+ """
1842
1988
  from random import choice
1843
1989
 
1844
1990
  return self.conn.get_models(choice(self.conn._modelnames_cache))