hydroserverpy 1.2.1__py3-none-any.whl → 1.3.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hydroserverpy might be problematic. Click here for more details.

Files changed (47) hide show
  1. hydroserverpy/__init__.py +1 -1
  2. hydroserverpy/api/{main.py → client.py} +52 -22
  3. hydroserverpy/api/models/__init__.py +1 -2
  4. hydroserverpy/api/models/base.py +180 -47
  5. hydroserverpy/api/models/etl/data_archive.py +31 -59
  6. hydroserverpy/api/models/etl/data_source.py +34 -76
  7. hydroserverpy/api/models/etl/orchestration_system.py +21 -36
  8. hydroserverpy/api/models/iam/apikey.py +57 -38
  9. hydroserverpy/api/models/iam/collaborator.py +55 -19
  10. hydroserverpy/api/models/iam/role.py +32 -4
  11. hydroserverpy/api/models/iam/workspace.py +58 -86
  12. hydroserverpy/api/models/sta/datastream.py +122 -214
  13. hydroserverpy/api/models/sta/observation.py +101 -0
  14. hydroserverpy/api/models/sta/observed_property.py +18 -53
  15. hydroserverpy/api/models/sta/processing_level.py +16 -31
  16. hydroserverpy/api/models/sta/result_qualifier.py +16 -31
  17. hydroserverpy/api/models/sta/sensor.py +27 -88
  18. hydroserverpy/api/models/sta/thing.py +48 -152
  19. hydroserverpy/api/models/sta/unit.py +16 -29
  20. hydroserverpy/api/services/__init__.py +1 -0
  21. hydroserverpy/api/services/base.py +92 -76
  22. hydroserverpy/api/services/etl/data_archive.py +42 -72
  23. hydroserverpy/api/services/etl/data_source.py +42 -72
  24. hydroserverpy/api/services/etl/orchestration_system.py +25 -33
  25. hydroserverpy/api/services/iam/role.py +38 -0
  26. hydroserverpy/api/services/iam/workspace.py +96 -99
  27. hydroserverpy/api/services/sta/datastream.py +151 -210
  28. hydroserverpy/api/services/sta/observed_property.py +31 -49
  29. hydroserverpy/api/services/sta/processing_level.py +30 -36
  30. hydroserverpy/api/services/sta/result_qualifier.py +24 -34
  31. hydroserverpy/api/services/sta/sensor.py +34 -48
  32. hydroserverpy/api/services/sta/thing.py +96 -89
  33. hydroserverpy/api/services/sta/unit.py +30 -34
  34. hydroserverpy/api/utils.py +22 -0
  35. hydroserverpy/etl/extractors/base.py +2 -4
  36. hydroserverpy/etl/loaders/hydroserver_loader.py +1 -0
  37. hydroserverpy/etl/timestamp_parser.py +82 -48
  38. hydroserverpy/etl/transformers/base.py +5 -9
  39. hydroserverpy/etl_csv/hydroserver_etl_csv.py +1 -1
  40. {hydroserverpy-1.2.1.dist-info → hydroserverpy-1.3.0b1.dist-info}/METADATA +1 -1
  41. hydroserverpy-1.3.0b1.dist-info/RECORD +70 -0
  42. hydroserverpy/api/http.py +0 -22
  43. hydroserverpy-1.2.1.dist-info/RECORD +0 -68
  44. {hydroserverpy-1.2.1.dist-info → hydroserverpy-1.3.0b1.dist-info}/WHEEL +0 -0
  45. {hydroserverpy-1.2.1.dist-info → hydroserverpy-1.3.0b1.dist-info}/licenses/LICENSE +0 -0
  46. {hydroserverpy-1.2.1.dist-info → hydroserverpy-1.3.0b1.dist-info}/top_level.txt +0 -0
  47. {hydroserverpy-1.2.1.dist-info → hydroserverpy-1.3.0b1.dist-info}/zip-safe +0 -0
@@ -3,9 +3,10 @@ import pandas as pd
3
3
  from typing import Union, Optional, Literal, List, TYPE_CHECKING
4
4
  from uuid import UUID
5
5
  from datetime import datetime
6
- from hydroserverpy.api.models import Datastream
7
- from ..base import SensorThingsService
8
-
6
+ from pydantic.alias_generators import to_camel
7
+ from hydroserverpy.api.models import Datastream, ObservationCollection
8
+ from hydroserverpy.api.utils import normalize_uuid
9
+ from ..base import HydroServerBaseService
9
10
 
10
11
  if TYPE_CHECKING:
11
12
  from hydroserverpy import HydroServer
@@ -16,46 +17,77 @@ if TYPE_CHECKING:
16
17
  Sensor,
17
18
  ObservedProperty,
18
19
  ProcessingLevel,
20
+ DataSource,
21
+ DataArchive
19
22
  )
20
23
 
21
24
 
22
- class DatastreamService(SensorThingsService):
23
- def __init__(self, connection: "HydroServer"):
24
- self._model = Datastream
25
- self._api_route = "api/data"
26
- self._endpoint_route = "datastreams"
27
- self._sta_route = "api/sensorthings/v1.1/Datastreams"
28
-
29
- super().__init__(connection)
25
+ class DatastreamService(HydroServerBaseService):
26
+ def __init__(self, client: "HydroServer"):
27
+ self.model = Datastream
28
+ super().__init__(client)
30
29
 
31
30
  def list(
32
31
  self,
33
- workspace: Optional[Union["Workspace", UUID, str]] = None,
34
- thing: Optional[Union["Thing", UUID, str]] = None,
35
- page: int = 1,
36
- page_size: int = 100,
37
- ) -> List["Datastream"]:
38
- """Fetch a collection of datastreams."""
39
-
40
- params = {"$top": page_size, "$skip": page_size * (page - 1)}
41
-
42
- filters = []
43
- if workspace:
44
- filters.append(
45
- f"properties/workspace/id eq '{str(getattr(workspace, 'uid', workspace))}'"
46
- )
47
- if thing:
48
- filters.append(f"Thing/id eq '{str(getattr(thing, 'uid', thing))}'")
49
-
50
- if filters:
51
- params["$filter"] = " and ".join(filters)
52
-
53
- return super()._list(params=params)
54
-
55
- def get(self, uid: Union[UUID, str]) -> "Datastream":
56
- """Get a datastream by ID."""
32
+ page: int = ...,
33
+ page_size: int = ...,
34
+ order_by: List[str] = ...,
35
+ workspace: Union["Workspace", UUID, str] = ...,
36
+ thing: Union["Thing", UUID, str] = ...,
37
+ sensor: Union["Sensor", UUID, str] = ...,
38
+ observed_property: Union["ObservedProperty", UUID, str] = ...,
39
+ processing_level: Union["ProcessingLevel", UUID, str] = ...,
40
+ unit: Union["Unit", UUID, str] = ...,
41
+ data_source: Optional[Union["DataSource", UUID, str]] = ...,
42
+ data_archive: Optional[Union["DataArchive", UUID, str]] = ...,
43
+ observation_type: str = ...,
44
+ sampled_medium: str = ...,
45
+ status: Optional[str] = ...,
46
+ result_type: str = ...,
47
+ is_private: bool = ...,
48
+ value_count_max: int = ...,
49
+ value_count_min: int = ...,
50
+ phenomenon_begin_time_max: datetime = ...,
51
+ phenomenon_begin_time_min: datetime = ...,
52
+ phenomenon_end_time_max: datetime = ...,
53
+ phenomenon_end_time_min: datetime = ...,
54
+ result_begin_time_max: datetime = ...,
55
+ result_begin_time_min: datetime = ...,
56
+ result_end_time_max: datetime = ...,
57
+ result_end_time_min: datetime = ...,
58
+ fetch_all: bool = False,
59
+ ) -> List["Workspace"]:
60
+ """Fetch a collection of HydroServer workspaces."""
57
61
 
58
- return super()._get(uid=str(uid))
62
+ return super().list(
63
+ page=page,
64
+ page_size=page_size,
65
+ order_by=order_by,
66
+ workspace_id=normalize_uuid(workspace),
67
+ thing_id=normalize_uuid(thing),
68
+ sensor_id=normalize_uuid(sensor),
69
+ observed_property_id=normalize_uuid(observed_property),
70
+ processing_level_id=normalize_uuid(processing_level),
71
+ unit_id=normalize_uuid(unit),
72
+ data_source_id=normalize_uuid(data_source),
73
+ data_archive_id=normalize_uuid(data_archive),
74
+ observation_type=observation_type,
75
+ sampled_medium=sampled_medium,
76
+ status=status,
77
+ result_type=result_type,
78
+ is_private=is_private,
79
+ value_count_max=value_count_max,
80
+ value_count_min=value_count_min,
81
+ phenomenon_begin_time_max=phenomenon_begin_time_max,
82
+ phenomenon_begin_time_min=phenomenon_begin_time_min,
83
+ phenomenon_end_time_max=phenomenon_end_time_max,
84
+ phenomenon_end_time_min=phenomenon_end_time_min,
85
+ result_begin_time_max=result_begin_time_max,
86
+ result_begin_time_min=result_begin_time_min,
87
+ result_end_time_max=result_end_time_max,
88
+ result_end_time_min=result_end_time_min,
89
+ fetch_all=fetch_all,
90
+ )
59
91
 
60
92
  def create(
61
93
  self,
@@ -88,18 +120,14 @@ class DatastreamService(SensorThingsService):
88
120
  ) -> "Datastream":
89
121
  """Create a new datastream."""
90
122
 
91
- kwargs = {
123
+ body = {
92
124
  "name": name,
93
125
  "description": description,
94
- "thingId": str(getattr(thing, "uid", thing)),
95
- "sensorId": str(getattr(sensor, "uid", sensor)),
96
- "observedPropertyId": str(
97
- getattr(observed_property, "uid", observed_property)
98
- ),
99
- "processingLevelId": str(
100
- getattr(processing_level, "uid", processing_level)
101
- ),
102
- "unitId": str(getattr(unit, "uid", unit)),
126
+ "thingId": normalize_uuid(thing),
127
+ "sensorId": normalize_uuid(sensor),
128
+ "observedPropertyId": normalize_uuid(observed_property),
129
+ "processingLevelId": normalize_uuid(processing_level),
130
+ "unitId": normalize_uuid(unit),
103
131
  "observationType": observation_type,
104
132
  "resultType": result_type,
105
133
  "sampledMedium": sampled_medium,
@@ -111,21 +139,15 @@ class DatastreamService(SensorThingsService):
111
139
  "intendedTimeSpacingUnit": intended_time_spacing_unit,
112
140
  "status": status,
113
141
  "valueCount": value_count,
114
- "phenomenonBeginTime": (
115
- phenomenon_begin_time.isoformat() if phenomenon_begin_time else None
116
- ),
117
- "phenomenonEndTime": (
118
- phenomenon_end_time.isoformat() if phenomenon_end_time else None
119
- ),
120
- "resultBeginTime": (
121
- result_begin_time.isoformat() if result_begin_time else None
122
- ),
123
- "resultEndTime": result_end_time.isoformat() if result_end_time else None,
142
+ "phenomenonBeginTime": phenomenon_begin_time,
143
+ "phenomenonEndTime": phenomenon_end_time,
144
+ "resultBeginTime": result_begin_time,
145
+ "resultEndTime": result_end_time,
124
146
  "isPrivate": is_private,
125
147
  "isVisible": is_visible,
126
148
  }
127
149
 
128
- return super()._create(**kwargs)
150
+ return super().create(**body)
129
151
 
130
152
  def update(
131
153
  self,
@@ -161,22 +183,14 @@ class DatastreamService(SensorThingsService):
161
183
  ) -> "Datastream":
162
184
  """Update a datastream."""
163
185
 
164
- kwargs = {
186
+ body = {
165
187
  "name": name,
166
188
  "description": description,
167
- "thingId": ... if thing is ... else str(getattr(thing, "uid", thing)),
168
- "sensorId": ... if sensor is ... else str(getattr(sensor, "uid", sensor)),
169
- "observedPropertyId": (
170
- ...
171
- if observed_property is ...
172
- else str(getattr(observed_property, "uid", observed_property))
173
- ),
174
- "processingLevelId": (
175
- ...
176
- if processing_level is ...
177
- else str(getattr(processing_level, "uid", processing_level))
178
- ),
179
- "unitId": ... if unit is ... else str(getattr(unit, "uid", unit)),
189
+ "thingId": normalize_uuid(thing),
190
+ "sensorId": normalize_uuid(sensor),
191
+ "observedPropertyId": normalize_uuid(observed_property),
192
+ "processingLevelId": normalize_uuid(processing_level),
193
+ "unitId": normalize_uuid(unit),
180
194
  "observationType": observation_type,
181
195
  "resultType": result_type,
182
196
  "sampledMedium": sampled_medium,
@@ -188,167 +202,94 @@ class DatastreamService(SensorThingsService):
188
202
  "intendedTimeSpacingUnit": intended_time_spacing_unit,
189
203
  "status": status,
190
204
  "valueCount": value_count,
191
- "phenomenonBeginTime": (
192
- phenomenon_begin_time.isoformat()
193
- if phenomenon_begin_time
194
- not in (
195
- None,
196
- ...,
197
- )
198
- else phenomenon_begin_time
199
- ),
200
- "phenomenonEndTime": (
201
- phenomenon_end_time.isoformat()
202
- if phenomenon_end_time
203
- not in (
204
- None,
205
- ...,
206
- )
207
- else phenomenon_end_time
208
- ),
209
- "resultBeginTime": (
210
- result_begin_time.isoformat()
211
- if result_begin_time
212
- not in (
213
- None,
214
- ...,
215
- )
216
- else result_begin_time
217
- ),
218
- "resultEndTime": (
219
- result_end_time.isoformat()
220
- if result_end_time
221
- not in (
222
- None,
223
- ...,
224
- )
225
- else result_end_time
226
- ),
205
+ "phenomenonBeginTime": phenomenon_begin_time,
206
+ "phenomenonEndTime": phenomenon_end_time,
207
+ "resultBeginTime": result_begin_time,
208
+ "resultEndTime": result_end_time,
227
209
  "isPrivate": is_private,
228
210
  "isVisible": is_visible,
229
211
  }
230
212
 
231
- return super()._update(
232
- uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
233
- )
234
-
235
- def delete(self, uid: Union[UUID, str]) -> None:
236
- """Delete a datastream."""
237
-
238
- super()._delete(uid=str(uid))
213
+ return super().update(uid=str(uid), **body)
239
214
 
240
215
  def get_observations(
241
216
  self,
242
217
  uid: Union[UUID, str],
243
- start_time: datetime = None,
244
- end_time: datetime = None,
245
- page: int = 1,
218
+ page: int = ...,
246
219
  page_size: int = 100000,
247
- include_quality: bool = False,
220
+ order_by: List[str] = ...,
221
+ phenomenon_time_max: datetime = ...,
222
+ phenomenon_time_min: datetime = ...,
248
223
  fetch_all: bool = False,
249
- ) -> pd.DataFrame:
224
+ ) -> ObservationCollection:
250
225
  """Retrieve observations of a datastream."""
251
226
 
252
- filters = []
253
- if start_time:
254
- filters.append(
255
- f'phenomenonTime ge {start_time.strftime("%Y-%m-%dT%H:%M:%S%z")}'
256
- )
257
- if end_time:
258
- filters.append(
259
- f'phenomenonTime le {end_time.strftime("%Y-%m-%dT%H:%M:%S%z")}'
260
- )
227
+ params = {
228
+ "page": page,
229
+ "page_size": page_size,
230
+ "order_by": ",".join(order_by) if order_by is not ... else order_by,
231
+ "phenomenon_time_max": phenomenon_time_max,
232
+ "phenomenon_time_min": phenomenon_time_min,
233
+ "format": "column"
234
+ }
235
+ params = {
236
+ k: ("null" if v is None else v)
237
+ for k, v in params.items()
238
+ if v is not ...
239
+ }
261
240
 
262
- if fetch_all:
263
- page = 1
241
+ path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/observations"
242
+ response = self.client.request("get", path, params=params)
243
+ datastream = self.get(uid=uid)
244
+ collection = ObservationCollection(
245
+ datastream=datastream,
246
+ response=response,
247
+ order_by=params.get("order_by"),
248
+ filters={k: v for k, v in params.items() if k not in ["page", "page_size", "order_by", "format"]},
249
+ )
250
+ if fetch_all is True:
251
+ collection = collection.fetch_all()
264
252
 
265
- observations = []
253
+ return collection
266
254
 
267
- while True:
268
- response = self._connection.request(
269
- "get",
270
- f"api/sensorthings/v1.1/Datastreams('{str(uid)}')/Observations",
271
- params={
272
- "$resultFormat": "dataArray",
273
- "$select": f'phenomenonTime,result{",resultQuality" if include_quality else ""}',
274
- "$count": True,
275
- "$top": page_size,
276
- "$skip": (page - 1) * page_size,
277
- "$filter": " and ".join(filters) if filters else None,
278
- },
279
- )
280
- response_content = json.loads(response.content)
281
- data_array = (
282
- response_content["value"][0]["dataArray"]
283
- if response_content["value"]
284
- else []
285
- )
286
- observations.extend(
287
- [
288
- (
289
- [
290
- obs[0],
291
- obs[1],
292
- obs[2]["qualityCode"] if obs[2]["qualityCode"] else None,
293
- (
294
- obs[2]["resultQualifiers"]
295
- if obs[2]["resultQualifiers"]
296
- else None
297
- ),
298
- ]
299
- if include_quality
300
- else [obs[0], obs[1]]
301
- )
302
- for obs in data_array
303
- ]
304
- )
305
- if not fetch_all or len(data_array) < page_size:
306
- break
307
- page += 1
255
+ def load_observations(
256
+ self,
257
+ uid: Union[UUID, str],
258
+ observations: pd.DataFrame,
259
+ ) -> None:
260
+ """Load observations to a datastream."""
308
261
 
309
- columns = ["timestamp", "value"]
310
- if include_quality:
311
- columns.extend(["quality_code", "result_quality"])
262
+ path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/observations/bulk-create"
263
+ headers = {"Content-type": "application/json"}
264
+ params = {"mode": "insert"}
265
+ body = {
266
+ "fields": [to_camel(col) for col in observations.columns.tolist()],
267
+ "data": observations.values.tolist()
268
+ }
312
269
 
313
- data_frame = pd.DataFrame(observations, columns=columns)
314
- data_frame["timestamp"] = pd.to_datetime(data_frame["timestamp"])
270
+ print(body)
315
271
 
316
- return data_frame
272
+ self.client.request(
273
+ "post", path, headers=headers, params=params, data=json.dumps(body, default=self.default_serializer)
274
+ )
317
275
 
318
- def load_observations(
276
+ def delete_observations(
319
277
  self,
320
278
  uid: Union[UUID, str],
321
- observations: pd.DataFrame,
279
+ phenomenon_time_start: Optional[datetime] = None,
280
+ phenomenon_time_end: Optional[datetime] = None,
322
281
  ) -> None:
323
- """Load observations to a datastream."""
282
+ """Delete observations from a datastream."""
283
+
284
+ path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/observations/bulk-delete"
285
+ headers = {"Content-type": "application/json"}
286
+ body = {}
324
287
 
325
- data_array = [
326
- [
327
- row["timestamp"].strftime("%Y-%m-%dT%H:%M:%S%z"),
328
- row["value"],
329
- (
330
- {
331
- "qualityCode": row.get("quality_code", None),
332
- "resultQualifiers": row.get("result_qualifiers", []),
333
- }
334
- if "quality_code" in row or "result_qualifiers" in row
335
- else {}
336
- ),
337
- ]
338
- for _, row in observations.iterrows()
339
- ]
288
+ if phenomenon_time_start is not None:
289
+ body["phenomenonTimeStart"] = phenomenon_time_start
290
+ if phenomenon_time_end is not None:
291
+ body["phenomenonTimeEnd"] = phenomenon_time_end
340
292
 
341
- self._connection.request(
342
- "post",
343
- f"api/sensorthings/v1.1/CreateObservations",
344
- headers={"Content-type": "application/json"},
345
- data=json.dumps(
346
- [
347
- {
348
- "Datastream": {"@iot.id": str(uid)},
349
- "components": ["phenomenonTime", "result", "resultQuality"],
350
- "dataArray": data_array,
351
- }
352
- ]
353
- ),
293
+ self.client.request(
294
+ "post", path, headers=headers, data=json.dumps(body, default=self.default_serializer)
354
295
  )
@@ -1,52 +1,41 @@
1
1
  from typing import Optional, Union, List, TYPE_CHECKING
2
2
  from uuid import UUID
3
- from ..base import SensorThingsService
4
3
  from hydroserverpy.api.models import ObservedProperty
5
-
4
+ from hydroserverpy.api.utils import normalize_uuid
5
+ from ..base import HydroServerBaseService
6
6
 
7
7
  if TYPE_CHECKING:
8
8
  from hydroserverpy import HydroServer
9
- from hydroserverpy.api.models import Workspace
10
-
9
+ from hydroserverpy.api.models import Workspace, Thing, Datastream
11
10
 
12
- class ObservedPropertyService(SensorThingsService):
13
- def __init__(self, connection: "HydroServer"):
14
- self._model = ObservedProperty
15
- self._api_route = "api/data"
16
- self._endpoint_route = "observed-properties"
17
- self._sta_route = "api/sensorthings/v1.1/ObservedProperties"
18
11
 
19
- super().__init__(connection)
12
+ class ObservedPropertyService(HydroServerBaseService):
13
+ def __init__(self, client: "HydroServer"):
14
+ self.model = ObservedProperty
15
+ super().__init__(client)
20
16
 
21
17
  def list(
22
18
  self,
23
- workspace: Optional[Union["Workspace", UUID, str]] = None,
24
- page: int = 1,
25
- page_size: int = 100,
19
+ page: int = ...,
20
+ page_size: int = ...,
21
+ order_by: List[str] = ...,
22
+ workspace: Optional[Union["Workspace", UUID, str]] = ...,
23
+ thing: Optional[Union["Thing", UUID, str]] = ...,
24
+ datastream: Optional[Union["Datastream", UUID, str]] = ...,
25
+ observed_property_type: str = ...,
26
+ fetch_all: bool = False,
26
27
  ) -> List["ObservedProperty"]:
27
28
  """Fetch a collection of observed properties."""
28
29
 
29
- params = {"$top": page_size, "$skip": page_size * (page - 1)}
30
-
31
- if workspace:
32
- params["$filter"] = (
33
- f"properties/workspace/id eq '{str(getattr(workspace, 'uid', workspace))}'"
34
- )
35
-
36
- return super()._list(params=params)
37
-
38
- def get(
39
- self, uid: Union[UUID, str], fetch_by_datastream_uid: bool = False
40
- ) -> "ObservedProperty":
41
- """Get an observed property by ID."""
42
-
43
- return self._get(
44
- uid=str(uid),
45
- path=(
46
- f"api/sensorthings/v1.1/Datastreams('{str(uid)}')/ObservedProperty"
47
- if fetch_by_datastream_uid
48
- else None
49
- ),
30
+ return super().list(
31
+ page=page,
32
+ page_size=page_size,
33
+ order_by=order_by,
34
+ workspace_id=normalize_uuid(workspace),
35
+ thing_id=normalize_uuid(thing),
36
+ datastream_id=normalize_uuid(datastream),
37
+ type=observed_property_type,
38
+ fetch_all=fetch_all,
50
39
  )
51
40
 
52
41
  def create(
@@ -56,20 +45,20 @@ class ObservedPropertyService(SensorThingsService):
56
45
  description: str,
57
46
  observed_property_type: str,
58
47
  code: str,
59
- workspace: Union["Workspace", UUID, str],
48
+ workspace: Optional[Union["Workspace", UUID, str]] = None,
60
49
  ) -> "ObservedProperty":
61
50
  """Create a new observed property."""
62
51
 
63
- kwargs = {
52
+ body = {
64
53
  "name": name,
65
54
  "definition": definition,
66
55
  "description": description,
67
56
  "type": observed_property_type,
68
57
  "code": code,
69
- "workspaceId": str(getattr(workspace, "uid", workspace)),
58
+ "workspaceId": normalize_uuid(workspace),
70
59
  }
71
60
 
72
- return super()._create(**kwargs)
61
+ return super().create(**body)
73
62
 
74
63
  def update(
75
64
  self,
@@ -82,19 +71,12 @@ class ObservedPropertyService(SensorThingsService):
82
71
  ) -> "ObservedProperty":
83
72
  """Update an observed property."""
84
73
 
85
- kwargs = {
74
+ body = {
86
75
  "name": name,
87
76
  "definition": definition,
88
77
  "description": description,
89
- "type": observed_property_type,
78
+ "observedPropertyType": observed_property_type,
90
79
  "code": code,
91
80
  }
92
81
 
93
- return super()._update(
94
- uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
95
- )
96
-
97
- def delete(self, uid: Union[UUID, str]) -> None:
98
- """Delete an observed property."""
99
-
100
- super()._delete(uid=str(uid))
82
+ return super().update(uid=str(uid), **body)
@@ -1,78 +1,72 @@
1
1
  from typing import Optional, Union, List, TYPE_CHECKING
2
2
  from uuid import UUID
3
- from ..base import EndpointService
4
3
  from hydroserverpy.api.models import ProcessingLevel
5
-
4
+ from hydroserverpy.api.utils import normalize_uuid
5
+ from ..base import HydroServerBaseService
6
6
 
7
7
  if TYPE_CHECKING:
8
8
  from hydroserverpy import HydroServer
9
- from hydroserverpy.api.models import Workspace
10
-
9
+ from hydroserverpy.api.models import Workspace, Thing, Datastream
11
10
 
12
- class ProcessingLevelService(EndpointService):
13
- def __init__(self, connection: "HydroServer"):
14
- self._model = ProcessingLevel
15
- self._api_route = "api/data"
16
- self._endpoint_route = "processing-levels"
17
11
 
18
- super().__init__(connection)
12
+ class ProcessingLevelService(HydroServerBaseService):
13
+ def __init__(self, client: "HydroServer"):
14
+ self.model = ProcessingLevel
15
+ super().__init__(client)
19
16
 
20
17
  def list(
21
18
  self,
22
- workspace: Optional[Union["Workspace", UUID, str]] = None,
19
+ page: int = ...,
20
+ page_size: int = ...,
21
+ order_by: List[str] = ...,
22
+ workspace: Optional[Union["Workspace", UUID, str]] = ...,
23
+ thing: Optional[Union["Thing", UUID, str]] = ...,
24
+ datastream: Optional[Union["Datastream", UUID, str]] = ...,
25
+ fetch_all: bool = False,
23
26
  ) -> List["ProcessingLevel"]:
24
27
  """Fetch a collection of processing levels."""
25
28
 
26
- workspace_id = getattr(workspace, "uid", workspace)
27
- workspace_id = str(workspace_id) if workspace_id else None
28
-
29
- return super()._list(
30
- params={"workspace_id": workspace_id} if workspace_id else {},
29
+ return super().list(
30
+ page=page,
31
+ page_size=page_size,
32
+ order_by=order_by,
33
+ workspace_id=normalize_uuid(workspace),
34
+ thing_id=normalize_uuid(thing),
35
+ datastream_id=normalize_uuid(datastream),
36
+ fetch_all=fetch_all,
31
37
  )
32
38
 
33
- def get(self, uid: Union[UUID, str]) -> "ProcessingLevel":
34
- """Get a processing level by ID."""
35
-
36
- return super()._get(uid=str(uid))
37
-
38
39
  def create(
39
40
  self,
40
- workspace: Union["Workspace", UUID, str],
41
41
  code: str,
42
42
  definition: Optional[str] = None,
43
43
  explanation: Optional[str] = None,
44
+ workspace: Optional[Union["Workspace", UUID, str]] = None,
44
45
  ) -> "ProcessingLevel":
45
46
  """Create a new processing level."""
46
47
 
47
- kwargs = {
48
+ body = {
48
49
  "code": code,
49
50
  "definition": definition,
50
51
  "explanation": explanation,
51
- "workspaceId": str(getattr(workspace, "uid", workspace)),
52
+ "workspaceId": normalize_uuid(workspace),
52
53
  }
53
54
 
54
- return super()._create(**kwargs)
55
+ return super().create(**body)
55
56
 
56
57
  def update(
57
58
  self,
58
59
  uid: Union[UUID, str],
59
60
  code: str = ...,
60
- definition: Optional[str] = ...,
61
- explanation: Optional[str] = ...,
61
+ definition: str = ...,
62
+ explanation: str = ...,
62
63
  ) -> "ProcessingLevel":
63
64
  """Update a processing level."""
64
65
 
65
- kwargs = {
66
+ body = {
66
67
  "code": code,
67
68
  "definition": definition,
68
69
  "explanation": explanation,
69
70
  }
70
71
 
71
- return super()._update(
72
- uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
73
- )
74
-
75
- def delete(self, uid: Union[UUID, str]) -> None:
76
- """Delete a processing level."""
77
-
78
- super()._delete(uid=str(uid))
72
+ return super().update(uid=str(uid), **body)