hydroserverpy 1.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. hydroserverpy/__init__.py +7 -0
  2. hydroserverpy/api/__init__.py +0 -0
  3. hydroserverpy/api/client.py +203 -0
  4. hydroserverpy/api/models/__init__.py +22 -0
  5. hydroserverpy/api/models/base.py +207 -0
  6. hydroserverpy/api/models/etl/__init__.py +26 -0
  7. hydroserverpy/api/models/etl/data_archive.py +77 -0
  8. hydroserverpy/api/models/etl/data_source.py +146 -0
  9. hydroserverpy/api/models/etl/etl_configuration.py +224 -0
  10. hydroserverpy/api/models/etl/extractors/__init__.py +6 -0
  11. hydroserverpy/api/models/etl/extractors/base.py +52 -0
  12. hydroserverpy/api/models/etl/extractors/ftp_extractor.py +50 -0
  13. hydroserverpy/api/models/etl/extractors/http_extractor.py +28 -0
  14. hydroserverpy/api/models/etl/extractors/local_file_extractor.py +20 -0
  15. hydroserverpy/api/models/etl/factories.py +23 -0
  16. hydroserverpy/api/models/etl/loaders/__init__.py +4 -0
  17. hydroserverpy/api/models/etl/loaders/base.py +11 -0
  18. hydroserverpy/api/models/etl/loaders/hydroserver_loader.py +98 -0
  19. hydroserverpy/api/models/etl/orchestration_configuration.py +35 -0
  20. hydroserverpy/api/models/etl/orchestration_system.py +63 -0
  21. hydroserverpy/api/models/etl/schedule.py +16 -0
  22. hydroserverpy/api/models/etl/status.py +14 -0
  23. hydroserverpy/api/models/etl/timestamp_parser.py +112 -0
  24. hydroserverpy/api/models/etl/transformers/__init__.py +5 -0
  25. hydroserverpy/api/models/etl/transformers/base.py +135 -0
  26. hydroserverpy/api/models/etl/transformers/csv_transformer.py +88 -0
  27. hydroserverpy/api/models/etl/transformers/json_transformer.py +48 -0
  28. hydroserverpy/api/models/etl/types.py +7 -0
  29. hydroserverpy/api/models/iam/__init__.py +0 -0
  30. hydroserverpy/api/models/iam/account.py +12 -0
  31. hydroserverpy/api/models/iam/apikey.py +96 -0
  32. hydroserverpy/api/models/iam/collaborator.py +70 -0
  33. hydroserverpy/api/models/iam/role.py +38 -0
  34. hydroserverpy/api/models/iam/workspace.py +297 -0
  35. hydroserverpy/api/models/sta/__init__.py +0 -0
  36. hydroserverpy/api/models/sta/datastream.py +254 -0
  37. hydroserverpy/api/models/sta/observation.py +103 -0
  38. hydroserverpy/api/models/sta/observed_property.py +37 -0
  39. hydroserverpy/api/models/sta/processing_level.py +35 -0
  40. hydroserverpy/api/models/sta/result_qualifier.py +34 -0
  41. hydroserverpy/api/models/sta/sensor.py +44 -0
  42. hydroserverpy/api/models/sta/thing.py +113 -0
  43. hydroserverpy/api/models/sta/unit.py +36 -0
  44. hydroserverpy/api/services/__init__.py +12 -0
  45. hydroserverpy/api/services/base.py +118 -0
  46. hydroserverpy/api/services/etl/__init__.py +0 -0
  47. hydroserverpy/api/services/etl/data_archive.py +166 -0
  48. hydroserverpy/api/services/etl/data_source.py +163 -0
  49. hydroserverpy/api/services/etl/orchestration_system.py +66 -0
  50. hydroserverpy/api/services/iam/__init__.py +0 -0
  51. hydroserverpy/api/services/iam/role.py +38 -0
  52. hydroserverpy/api/services/iam/workspace.py +232 -0
  53. hydroserverpy/api/services/sta/__init__.py +0 -0
  54. hydroserverpy/api/services/sta/datastream.py +296 -0
  55. hydroserverpy/api/services/sta/observed_property.py +82 -0
  56. hydroserverpy/api/services/sta/processing_level.py +72 -0
  57. hydroserverpy/api/services/sta/result_qualifier.py +64 -0
  58. hydroserverpy/api/services/sta/sensor.py +102 -0
  59. hydroserverpy/api/services/sta/thing.py +195 -0
  60. hydroserverpy/api/services/sta/unit.py +78 -0
  61. hydroserverpy/api/utils.py +22 -0
  62. hydroserverpy/quality/__init__.py +1 -0
  63. hydroserverpy/quality/service.py +405 -0
  64. hydroserverpy-1.5.1.dist-info/METADATA +66 -0
  65. hydroserverpy-1.5.1.dist-info/RECORD +69 -0
  66. hydroserverpy-1.5.1.dist-info/WHEEL +5 -0
  67. hydroserverpy-1.5.1.dist-info/licenses/LICENSE +28 -0
  68. hydroserverpy-1.5.1.dist-info/top_level.txt +1 -0
  69. hydroserverpy-1.5.1.dist-info/zip-safe +1 -0
@@ -0,0 +1,296 @@
1
+ import json
2
+ import pandas as pd
3
+ from typing import Union, Optional, Literal, List, TYPE_CHECKING
4
+ from uuid import UUID
5
+ from datetime import datetime
6
+ from pydantic.alias_generators import to_camel
7
+ from hydroserverpy.api.models import Datastream, ObservationCollection
8
+ from hydroserverpy.api.utils import normalize_uuid
9
+ from ..base import HydroServerBaseService
10
+
11
+ if TYPE_CHECKING:
12
+ from hydroserverpy import HydroServer
13
+ from hydroserverpy.api.models import (
14
+ Workspace,
15
+ Thing,
16
+ Unit,
17
+ Sensor,
18
+ ObservedProperty,
19
+ ProcessingLevel,
20
+ DataSource,
21
+ DataArchive
22
+ )
23
+
24
+
25
+ class DatastreamService(HydroServerBaseService):
26
+ def __init__(self, client: "HydroServer"):
27
+ self.model = Datastream
28
+ super().__init__(client)
29
+
30
+ def list(
31
+ self,
32
+ page: int = ...,
33
+ page_size: int = ...,
34
+ order_by: List[str] = ...,
35
+ workspace: Union["Workspace", UUID, str] = ...,
36
+ thing: Union["Thing", UUID, str] = ...,
37
+ sensor: Union["Sensor", UUID, str] = ...,
38
+ observed_property: Union["ObservedProperty", UUID, str] = ...,
39
+ processing_level: Union["ProcessingLevel", UUID, str] = ...,
40
+ unit: Union["Unit", UUID, str] = ...,
41
+ data_source: Optional[Union["DataSource", UUID, str]] = ...,
42
+ data_archive: Optional[Union["DataArchive", UUID, str]] = ...,
43
+ observation_type: str = ...,
44
+ sampled_medium: str = ...,
45
+ status: Optional[str] = ...,
46
+ result_type: str = ...,
47
+ is_private: bool = ...,
48
+ value_count_max: int = ...,
49
+ value_count_min: int = ...,
50
+ phenomenon_begin_time_max: datetime = ...,
51
+ phenomenon_begin_time_min: datetime = ...,
52
+ phenomenon_end_time_max: datetime = ...,
53
+ phenomenon_end_time_min: datetime = ...,
54
+ result_begin_time_max: datetime = ...,
55
+ result_begin_time_min: datetime = ...,
56
+ result_end_time_max: datetime = ...,
57
+ result_end_time_min: datetime = ...,
58
+ fetch_all: bool = False,
59
+ ) -> List["Workspace"]:
60
+ """Fetch a collection of HydroServer workspaces."""
61
+
62
+ return super().list(
63
+ page=page,
64
+ page_size=page_size,
65
+ order_by=order_by,
66
+ workspace_id=normalize_uuid(workspace),
67
+ thing_id=normalize_uuid(thing),
68
+ sensor_id=normalize_uuid(sensor),
69
+ observed_property_id=normalize_uuid(observed_property),
70
+ processing_level_id=normalize_uuid(processing_level),
71
+ unit_id=normalize_uuid(unit),
72
+ data_source_id=normalize_uuid(data_source),
73
+ data_archive_id=normalize_uuid(data_archive),
74
+ observation_type=observation_type,
75
+ sampled_medium=sampled_medium,
76
+ status=status,
77
+ result_type=result_type,
78
+ is_private=is_private,
79
+ value_count_max=value_count_max,
80
+ value_count_min=value_count_min,
81
+ phenomenon_begin_time_max=phenomenon_begin_time_max,
82
+ phenomenon_begin_time_min=phenomenon_begin_time_min,
83
+ phenomenon_end_time_max=phenomenon_end_time_max,
84
+ phenomenon_end_time_min=phenomenon_end_time_min,
85
+ result_begin_time_max=result_begin_time_max,
86
+ result_begin_time_min=result_begin_time_min,
87
+ result_end_time_max=result_end_time_max,
88
+ result_end_time_min=result_end_time_min,
89
+ fetch_all=fetch_all,
90
+ )
91
+
92
+ def create(
93
+ self,
94
+ name: str,
95
+ description: str,
96
+ thing: Union["Thing", UUID, str],
97
+ sensor: Union["Sensor", UUID, str],
98
+ observed_property: Union["ObservedProperty", UUID, str],
99
+ processing_level: Union["ProcessingLevel", UUID, str],
100
+ unit: Union["Unit", UUID, str],
101
+ observation_type: str,
102
+ result_type: str,
103
+ sampled_medium: str,
104
+ no_data_value: float,
105
+ aggregation_statistic: str,
106
+ time_aggregation_interval: float,
107
+ time_aggregation_interval_unit: Literal["seconds", "minutes", "hours", "days"],
108
+ intended_time_spacing: Optional[float] = None,
109
+ intended_time_spacing_unit: Optional[
110
+ Literal["seconds", "minutes", "hours", "days"]
111
+ ] = None,
112
+ status: Optional[str] = None,
113
+ value_count: Optional[int] = None,
114
+ phenomenon_begin_time: Optional[datetime] = None,
115
+ phenomenon_end_time: Optional[datetime] = None,
116
+ result_begin_time: Optional[datetime] = None,
117
+ result_end_time: Optional[datetime] = None,
118
+ is_private: bool = False,
119
+ is_visible: bool = True,
120
+ ) -> "Datastream":
121
+ """Create a new datastream."""
122
+
123
+ body = {
124
+ "name": name,
125
+ "description": description,
126
+ "thingId": normalize_uuid(thing),
127
+ "sensorId": normalize_uuid(sensor),
128
+ "observedPropertyId": normalize_uuid(observed_property),
129
+ "processingLevelId": normalize_uuid(processing_level),
130
+ "unitId": normalize_uuid(unit),
131
+ "observationType": observation_type,
132
+ "resultType": result_type,
133
+ "sampledMedium": sampled_medium,
134
+ "noDataValue": no_data_value,
135
+ "aggregationStatistic": aggregation_statistic,
136
+ "timeAggregationInterval": time_aggregation_interval,
137
+ "timeAggregationIntervalUnit": time_aggregation_interval_unit,
138
+ "intendedTimeSpacing": intended_time_spacing,
139
+ "intendedTimeSpacingUnit": intended_time_spacing_unit,
140
+ "status": status,
141
+ "valueCount": value_count,
142
+ "phenomenonBeginTime": phenomenon_begin_time,
143
+ "phenomenonEndTime": phenomenon_end_time,
144
+ "resultBeginTime": result_begin_time,
145
+ "resultEndTime": result_end_time,
146
+ "isPrivate": is_private,
147
+ "isVisible": is_visible,
148
+ }
149
+
150
+ return super().create(**body)
151
+
152
+ def update(
153
+ self,
154
+ uid: Union[UUID, str],
155
+ name: str = ...,
156
+ description: str = ...,
157
+ thing: Union["Thing", UUID, str] = ...,
158
+ sensor: Union["Sensor", UUID, str] = ...,
159
+ observed_property: Union["ObservedProperty", UUID, str] = ...,
160
+ processing_level: Union["ProcessingLevel", UUID, str] = ...,
161
+ unit: Union["Unit", UUID, str] = ...,
162
+ observation_type: str = ...,
163
+ result_type: str = ...,
164
+ sampled_medium: str = ...,
165
+ no_data_value: float = ...,
166
+ aggregation_statistic: str = ...,
167
+ time_aggregation_interval: float = ...,
168
+ time_aggregation_interval_unit: Literal[
169
+ "seconds", "minutes", "hours", "days"
170
+ ] = ...,
171
+ intended_time_spacing: Optional[float] = ...,
172
+ intended_time_spacing_unit: Optional[
173
+ Literal["seconds", "minutes", "hours", "days"]
174
+ ] = ...,
175
+ status: Optional[str] = ...,
176
+ value_count: Optional[int] = ...,
177
+ phenomenon_begin_time: Optional[datetime] = ...,
178
+ phenomenon_end_time: Optional[datetime] = ...,
179
+ result_begin_time: Optional[datetime] = ...,
180
+ result_end_time: Optional[datetime] = ...,
181
+ is_private: bool = ...,
182
+ is_visible: bool = ...,
183
+ ) -> "Datastream":
184
+ """Update a datastream."""
185
+
186
+ body = {
187
+ "name": name,
188
+ "description": description,
189
+ "thingId": normalize_uuid(thing),
190
+ "sensorId": normalize_uuid(sensor),
191
+ "observedPropertyId": normalize_uuid(observed_property),
192
+ "processingLevelId": normalize_uuid(processing_level),
193
+ "unitId": normalize_uuid(unit),
194
+ "observationType": observation_type,
195
+ "resultType": result_type,
196
+ "sampledMedium": sampled_medium,
197
+ "noDataValue": no_data_value,
198
+ "aggregationStatistic": aggregation_statistic,
199
+ "timeAggregationInterval": time_aggregation_interval,
200
+ "timeAggregationIntervalUnit": time_aggregation_interval_unit,
201
+ "intendedTimeSpacing": intended_time_spacing,
202
+ "intendedTimeSpacingUnit": intended_time_spacing_unit,
203
+ "status": status,
204
+ "valueCount": value_count,
205
+ "phenomenonBeginTime": phenomenon_begin_time,
206
+ "phenomenonEndTime": phenomenon_end_time,
207
+ "resultBeginTime": result_begin_time,
208
+ "resultEndTime": result_end_time,
209
+ "isPrivate": is_private,
210
+ "isVisible": is_visible,
211
+ }
212
+
213
+ return super().update(uid=str(uid), **body)
214
+
215
+ def get_observations(
216
+ self,
217
+ uid: Union[UUID, str],
218
+ page: int = ...,
219
+ page_size: int = 100000,
220
+ order_by: List[str] = ...,
221
+ phenomenon_time_max: datetime = ...,
222
+ phenomenon_time_min: datetime = ...,
223
+ result_qualifier_code: str = ...,
224
+ fetch_all: bool = False,
225
+ ) -> ObservationCollection:
226
+ """Retrieve observations of a datastream."""
227
+
228
+ params = {
229
+ "page": page,
230
+ "page_size": page_size,
231
+ "order_by": ",".join(order_by) if order_by is not ... else order_by,
232
+ "phenomenon_time_max": phenomenon_time_max,
233
+ "phenomenon_time_min": phenomenon_time_min,
234
+ "result_qualifier_code": result_qualifier_code,
235
+ "format": "column"
236
+ }
237
+ params = {
238
+ k: ("null" if v is None else v)
239
+ for k, v in params.items()
240
+ if v is not ...
241
+ }
242
+
243
+ path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/observations"
244
+ response = self.client.request("get", path, params=params)
245
+ datastream = self.get(uid=uid)
246
+ collection = ObservationCollection(
247
+ datastream=datastream,
248
+ response=response,
249
+ order_by=params.get("order_by"),
250
+ filters={k: v for k, v in params.items() if k not in ["page", "page_size", "order_by", "format"]},
251
+ )
252
+ if fetch_all is True:
253
+ collection = collection.fetch_all()
254
+
255
+ return collection
256
+
257
+ def load_observations(
258
+ self,
259
+ uid: Union[UUID, str],
260
+ observations: pd.DataFrame,
261
+ mode: str = "insert"
262
+ ) -> None:
263
+ """Load observations to a datastream."""
264
+
265
+ path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/observations/bulk-create"
266
+ headers = {"Content-type": "application/json"}
267
+ params = {"mode": mode}
268
+ body = {
269
+ "fields": [to_camel(col) for col in observations.columns.tolist()],
270
+ "data": observations.values.tolist()
271
+ }
272
+
273
+ self.client.request(
274
+ "post", path, headers=headers, params=params, data=json.dumps(body, default=self.default_serializer)
275
+ )
276
+
277
+ def delete_observations(
278
+ self,
279
+ uid: Union[UUID, str],
280
+ phenomenon_time_start: Optional[datetime] = None,
281
+ phenomenon_time_end: Optional[datetime] = None,
282
+ ) -> None:
283
+ """Delete observations from a datastream."""
284
+
285
+ path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/observations/bulk-delete"
286
+ headers = {"Content-type": "application/json"}
287
+ body = {}
288
+
289
+ if phenomenon_time_start is not None:
290
+ body["phenomenonTimeStart"] = phenomenon_time_start
291
+ if phenomenon_time_end is not None:
292
+ body["phenomenonTimeEnd"] = phenomenon_time_end
293
+
294
+ self.client.request(
295
+ "post", path, headers=headers, data=json.dumps(body, default=self.default_serializer)
296
+ )
@@ -0,0 +1,82 @@
1
+ from typing import Optional, Union, List, TYPE_CHECKING
2
+ from uuid import UUID
3
+ from hydroserverpy.api.models import ObservedProperty
4
+ from hydroserverpy.api.utils import normalize_uuid
5
+ from ..base import HydroServerBaseService
6
+
7
+ if TYPE_CHECKING:
8
+ from hydroserverpy import HydroServer
9
+ from hydroserverpy.api.models import Workspace, Thing, Datastream
10
+
11
+
12
+ class ObservedPropertyService(HydroServerBaseService):
13
+ def __init__(self, client: "HydroServer"):
14
+ self.model = ObservedProperty
15
+ super().__init__(client)
16
+
17
+ def list(
18
+ self,
19
+ page: int = ...,
20
+ page_size: int = ...,
21
+ order_by: List[str] = ...,
22
+ workspace: Optional[Union["Workspace", UUID, str]] = ...,
23
+ thing: Optional[Union["Thing", UUID, str]] = ...,
24
+ datastream: Optional[Union["Datastream", UUID, str]] = ...,
25
+ observed_property_type: str = ...,
26
+ fetch_all: bool = False,
27
+ ) -> List["ObservedProperty"]:
28
+ """Fetch a collection of observed properties."""
29
+
30
+ return super().list(
31
+ page=page,
32
+ page_size=page_size,
33
+ order_by=order_by,
34
+ workspace_id=normalize_uuid(workspace),
35
+ thing_id=normalize_uuid(thing),
36
+ datastream_id=normalize_uuid(datastream),
37
+ type=observed_property_type,
38
+ fetch_all=fetch_all,
39
+ )
40
+
41
+ def create(
42
+ self,
43
+ name: str,
44
+ definition: str,
45
+ description: str,
46
+ observed_property_type: str,
47
+ code: str,
48
+ workspace: Optional[Union["Workspace", UUID, str]] = None,
49
+ ) -> "ObservedProperty":
50
+ """Create a new observed property."""
51
+
52
+ body = {
53
+ "name": name,
54
+ "definition": definition,
55
+ "description": description,
56
+ "type": observed_property_type,
57
+ "code": code,
58
+ "workspaceId": normalize_uuid(workspace),
59
+ }
60
+
61
+ return super().create(**body)
62
+
63
+ def update(
64
+ self,
65
+ uid: Union[UUID, str],
66
+ name: str = ...,
67
+ definition: str = ...,
68
+ description: str = ...,
69
+ observed_property_type: str = ...,
70
+ code: str = ...,
71
+ ) -> "ObservedProperty":
72
+ """Update an observed property."""
73
+
74
+ body = {
75
+ "name": name,
76
+ "definition": definition,
77
+ "description": description,
78
+ "observedPropertyType": observed_property_type,
79
+ "code": code,
80
+ }
81
+
82
+ return super().update(uid=str(uid), **body)
@@ -0,0 +1,72 @@
1
+ from typing import Optional, Union, List, TYPE_CHECKING
2
+ from uuid import UUID
3
+ from hydroserverpy.api.models import ProcessingLevel
4
+ from hydroserverpy.api.utils import normalize_uuid
5
+ from ..base import HydroServerBaseService
6
+
7
+ if TYPE_CHECKING:
8
+ from hydroserverpy import HydroServer
9
+ from hydroserverpy.api.models import Workspace, Thing, Datastream
10
+
11
+
12
+ class ProcessingLevelService(HydroServerBaseService):
13
+ def __init__(self, client: "HydroServer"):
14
+ self.model = ProcessingLevel
15
+ super().__init__(client)
16
+
17
+ def list(
18
+ self,
19
+ page: int = ...,
20
+ page_size: int = ...,
21
+ order_by: List[str] = ...,
22
+ workspace: Optional[Union["Workspace", UUID, str]] = ...,
23
+ thing: Optional[Union["Thing", UUID, str]] = ...,
24
+ datastream: Optional[Union["Datastream", UUID, str]] = ...,
25
+ fetch_all: bool = False,
26
+ ) -> List["ProcessingLevel"]:
27
+ """Fetch a collection of processing levels."""
28
+
29
+ return super().list(
30
+ page=page,
31
+ page_size=page_size,
32
+ order_by=order_by,
33
+ workspace_id=normalize_uuid(workspace),
34
+ thing_id=normalize_uuid(thing),
35
+ datastream_id=normalize_uuid(datastream),
36
+ fetch_all=fetch_all,
37
+ )
38
+
39
+ def create(
40
+ self,
41
+ code: str,
42
+ definition: Optional[str] = None,
43
+ explanation: Optional[str] = None,
44
+ workspace: Optional[Union["Workspace", UUID, str]] = None,
45
+ ) -> "ProcessingLevel":
46
+ """Create a new processing level."""
47
+
48
+ body = {
49
+ "code": code,
50
+ "definition": definition,
51
+ "explanation": explanation,
52
+ "workspaceId": normalize_uuid(workspace),
53
+ }
54
+
55
+ return super().create(**body)
56
+
57
+ def update(
58
+ self,
59
+ uid: Union[UUID, str],
60
+ code: str = ...,
61
+ definition: str = ...,
62
+ explanation: str = ...,
63
+ ) -> "ProcessingLevel":
64
+ """Update a processing level."""
65
+
66
+ body = {
67
+ "code": code,
68
+ "definition": definition,
69
+ "explanation": explanation,
70
+ }
71
+
72
+ return super().update(uid=str(uid), **body)
@@ -0,0 +1,64 @@
1
+ from typing import Optional, Union, List, TYPE_CHECKING
2
+ from uuid import UUID
3
+ from hydroserverpy.api.models import ResultQualifier
4
+ from hydroserverpy.api.utils import normalize_uuid
5
+ from ..base import HydroServerBaseService
6
+
7
+ if TYPE_CHECKING:
8
+ from hydroserverpy import HydroServer
9
+ from hydroserverpy.api.models import Workspace
10
+
11
+
12
+ class ResultQualifierService(HydroServerBaseService):
13
+ def __init__(self, client: "HydroServer"):
14
+ self.model = ResultQualifier
15
+ super().__init__(client)
16
+
17
+ def list(
18
+ self,
19
+ page: int = ...,
20
+ page_size: int = ...,
21
+ order_by: List[str] = ...,
22
+ workspace: Optional[Union["Workspace", UUID, str]] = ...,
23
+ fetch_all: bool = False,
24
+ ) -> List["ResultQualifier"]:
25
+ """Fetch a collection of result qualifiers."""
26
+
27
+ return super().list(
28
+ page=page,
29
+ page_size=page_size,
30
+ order_by=order_by,
31
+ workspace_id=normalize_uuid(workspace),
32
+ fetch_all=fetch_all,
33
+ )
34
+
35
+ def create(
36
+ self,
37
+ code: str,
38
+ description: Optional[str] = None,
39
+ workspace: Optional[Union["Workspace", UUID, str]] = None,
40
+ ) -> "ResultQualifier":
41
+ """Create a new result qualifier."""
42
+
43
+ body = {
44
+ "code": code,
45
+ "description": description,
46
+ "workspaceId": normalize_uuid(workspace),
47
+ }
48
+
49
+ return super().create(**body)
50
+
51
+ def update(
52
+ self,
53
+ uid: Union[UUID, str],
54
+ code: str = ...,
55
+ description: str = ...,
56
+ ) -> "ResultQualifier":
57
+ """Update a result qualifier."""
58
+
59
+ body = {
60
+ "code": code,
61
+ "description": description,
62
+ }
63
+
64
+ return super().update(uid=str(uid), **body)
@@ -0,0 +1,102 @@
1
+ from typing import Optional, Union, List, TYPE_CHECKING
2
+ from uuid import UUID
3
+ from hydroserverpy.api.models import Sensor
4
+ from hydroserverpy.api.utils import normalize_uuid
5
+ from ..base import HydroServerBaseService
6
+
7
+ if TYPE_CHECKING:
8
+ from hydroserverpy import HydroServer
9
+ from hydroserverpy.api.models import Workspace, Thing, Datastream
10
+
11
+
12
+ class SensorService(HydroServerBaseService):
13
+ def __init__(self, client: "HydroServer"):
14
+ self.model = Sensor
15
+ super().__init__(client)
16
+
17
+ def list(
18
+ self,
19
+ page: int = ...,
20
+ page_size: int = ...,
21
+ order_by: List[str] = ...,
22
+ workspace: Optional[Union["Workspace", UUID, str]] = ...,
23
+ thing: Optional[Union["Thing", UUID, str]] = ...,
24
+ datastream: Optional[Union["Datastream", UUID, str]] = ...,
25
+ encoding_type: str = ...,
26
+ manufacturer: Optional[str] = ...,
27
+ method_type: str = ...,
28
+ fetch_all: bool = False,
29
+ ) -> List["Sensor"]:
30
+ """Fetch a collection of sensors."""
31
+
32
+ return super().list(
33
+ page=page,
34
+ page_size=page_size,
35
+ order_by=order_by,
36
+ workspace_id=normalize_uuid(workspace),
37
+ thing_id=normalize_uuid(thing),
38
+ datastream_id=normalize_uuid(datastream),
39
+ encoding_type=encoding_type,
40
+ manufacturer=manufacturer,
41
+ method_type=method_type,
42
+ fetch_all=fetch_all,
43
+ )
44
+
45
+ def create(
46
+ self,
47
+ name: str,
48
+ description: str,
49
+ encoding_type: str,
50
+ method_type: str,
51
+ manufacturer: Optional[str] = None,
52
+ sensor_model: Optional[str] = None,
53
+ sensor_model_link: Optional[str] = None,
54
+ method_link: Optional[str] = None,
55
+ method_code: Optional[str] = None,
56
+ workspace: Optional[Union["Workspace", UUID, str]] = None,
57
+ ) -> "Sensor":
58
+ """Create a new sensor."""
59
+
60
+ body = {
61
+ "name": name,
62
+ "description": description,
63
+ "encodingType": encoding_type,
64
+ "methodType": method_type,
65
+ "manufacturer": manufacturer,
66
+ "model": sensor_model,
67
+ "modelLink": sensor_model_link,
68
+ "methodLink": method_link,
69
+ "methodCode": method_code,
70
+ "workspaceId": normalize_uuid(workspace),
71
+ }
72
+
73
+ return super().create(**body)
74
+
75
+ def update(
76
+ self,
77
+ uid: Union[UUID, str],
78
+ name: str = ...,
79
+ description: str = ...,
80
+ encoding_type: str = ...,
81
+ method_type: str = ...,
82
+ manufacturer: Optional[str] = ...,
83
+ sensor_model: Optional[str] = ...,
84
+ sensor_model_link: Optional[str] = ...,
85
+ method_link: Optional[str] = ...,
86
+ method_code: Optional[str] = ...,
87
+ ) -> "Sensor":
88
+ """Update a sensor."""
89
+
90
+ body = {
91
+ "name": name,
92
+ "description": description,
93
+ "encodingType": encoding_type,
94
+ "methodType": method_type,
95
+ "manufacturer": manufacturer,
96
+ "model": sensor_model,
97
+ "modelLink": sensor_model_link,
98
+ "methodLink": method_link,
99
+ "methodCode": method_code,
100
+ }
101
+
102
+ return super().update(uid=str(uid), **body)