hydroserverpy 0.4.0__py3-none-any.whl → 0.5.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hydroserverpy might be problematic. Click here for more details.
- hydroserverpy/__init__.py +2 -3
- hydroserverpy/api/http.py +24 -0
- hydroserverpy/api/main.py +152 -0
- hydroserverpy/api/models/__init__.py +18 -0
- hydroserverpy/api/models/base.py +74 -0
- hydroserverpy/api/models/etl/__init__.py +0 -0
- hydroserverpy/api/models/iam/__init__.py +0 -0
- hydroserverpy/api/models/iam/account.py +12 -0
- hydroserverpy/api/models/iam/collaborator.py +34 -0
- hydroserverpy/api/models/iam/role.py +10 -0
- hydroserverpy/api/models/iam/workspace.py +203 -0
- hydroserverpy/api/models/sta/__init__.py +0 -0
- hydroserverpy/api/models/sta/datastream.py +336 -0
- hydroserverpy/api/models/sta/observed_property.py +72 -0
- hydroserverpy/api/models/sta/processing_level.py +50 -0
- hydroserverpy/api/models/sta/result_qualifier.py +49 -0
- hydroserverpy/api/models/sta/sensor.py +105 -0
- hydroserverpy/api/models/sta/thing.py +217 -0
- hydroserverpy/api/models/sta/unit.py +49 -0
- hydroserverpy/api/services/__init__.py +8 -0
- hydroserverpy/api/services/base.py +92 -0
- hydroserverpy/api/services/etl/__init__.py +0 -0
- hydroserverpy/api/services/iam/__init__.py +0 -0
- hydroserverpy/api/services/iam/workspace.py +126 -0
- hydroserverpy/api/services/sta/__init__.py +0 -0
- hydroserverpy/api/services/sta/datastream.py +354 -0
- hydroserverpy/api/services/sta/observed_property.py +98 -0
- hydroserverpy/api/services/sta/processing_level.py +78 -0
- hydroserverpy/api/services/sta/result_qualifier.py +74 -0
- hydroserverpy/api/services/sta/sensor.py +116 -0
- hydroserverpy/api/services/sta/thing.py +188 -0
- hydroserverpy/api/services/sta/unit.py +82 -0
- hydroserverpy/etl/loaders/hydroserver_loader.py +1 -1
- hydroserverpy/etl_csv/hydroserver_etl_csv.py +1 -1
- {hydroserverpy-0.4.0.dist-info → hydroserverpy-0.5.0b1.dist-info}/METADATA +4 -3
- hydroserverpy-0.5.0b1.dist-info/RECORD +59 -0
- {hydroserverpy-0.4.0.dist-info → hydroserverpy-0.5.0b1.dist-info}/WHEEL +1 -1
- hydroserverpy/core/endpoints/__init__.py +0 -9
- hydroserverpy/core/endpoints/base.py +0 -146
- hydroserverpy/core/endpoints/data_loaders.py +0 -93
- hydroserverpy/core/endpoints/data_sources.py +0 -93
- hydroserverpy/core/endpoints/datastreams.py +0 -225
- hydroserverpy/core/endpoints/observed_properties.py +0 -111
- hydroserverpy/core/endpoints/processing_levels.py +0 -111
- hydroserverpy/core/endpoints/result_qualifiers.py +0 -111
- hydroserverpy/core/endpoints/sensors.py +0 -111
- hydroserverpy/core/endpoints/things.py +0 -261
- hydroserverpy/core/endpoints/units.py +0 -111
- hydroserverpy/core/schemas/__init__.py +0 -9
- hydroserverpy/core/schemas/base.py +0 -124
- hydroserverpy/core/schemas/data_loaders.py +0 -73
- hydroserverpy/core/schemas/data_sources.py +0 -223
- hydroserverpy/core/schemas/datastreams.py +0 -330
- hydroserverpy/core/schemas/observed_properties.py +0 -43
- hydroserverpy/core/schemas/processing_levels.py +0 -31
- hydroserverpy/core/schemas/result_qualifiers.py +0 -26
- hydroserverpy/core/schemas/sensors.py +0 -68
- hydroserverpy/core/schemas/things.py +0 -346
- hydroserverpy/core/schemas/units.py +0 -29
- hydroserverpy/core/service.py +0 -200
- hydroserverpy-0.4.0.dist-info/RECORD +0 -51
- /hydroserverpy/{core → api}/__init__.py +0 -0
- {hydroserverpy-0.4.0.dist-info → hydroserverpy-0.5.0b1.dist-info/licenses}/LICENSE +0 -0
- {hydroserverpy-0.4.0.dist-info → hydroserverpy-0.5.0b1.dist-info}/top_level.txt +0 -0
- {hydroserverpy-0.4.0.dist-info → hydroserverpy-0.5.0b1.dist-info}/zip-safe +0 -0
|
@@ -0,0 +1,354 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import Union, Optional, Literal, List, TYPE_CHECKING
|
|
4
|
+
from uuid import UUID
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from hydroserverpy.api.models import Datastream
|
|
7
|
+
from ..base import SensorThingsService
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from hydroserverpy import HydroServer
|
|
12
|
+
from hydroserverpy.api.models import (
|
|
13
|
+
Workspace,
|
|
14
|
+
Thing,
|
|
15
|
+
Unit,
|
|
16
|
+
Sensor,
|
|
17
|
+
ObservedProperty,
|
|
18
|
+
ProcessingLevel,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DatastreamService(SensorThingsService):
|
|
23
|
+
def __init__(self, connection: "HydroServer"):
|
|
24
|
+
self._model = Datastream
|
|
25
|
+
self._api_route = "api/data"
|
|
26
|
+
self._endpoint_route = "datastreams"
|
|
27
|
+
self._sta_route = "api/sensorthings/v1.1/Datastreams"
|
|
28
|
+
|
|
29
|
+
super().__init__(connection)
|
|
30
|
+
|
|
31
|
+
def list(
|
|
32
|
+
self,
|
|
33
|
+
workspace: Optional[Union["Workspace", UUID, str]] = None,
|
|
34
|
+
thing: Optional[Union["Thing", UUID, str]] = None,
|
|
35
|
+
page: int = 1,
|
|
36
|
+
page_size: int = 100,
|
|
37
|
+
) -> List["Datastream"]:
|
|
38
|
+
"""Fetch a collection of datastreams."""
|
|
39
|
+
|
|
40
|
+
params = {"$top": page_size, "$skip": page_size * (page - 1)}
|
|
41
|
+
|
|
42
|
+
filters = []
|
|
43
|
+
if workspace:
|
|
44
|
+
filters.append(
|
|
45
|
+
f"properties/workspace/id eq '{str(getattr(workspace, 'uid', workspace))}'"
|
|
46
|
+
)
|
|
47
|
+
if thing:
|
|
48
|
+
filters.append(f"Thing/id eq '{str(getattr(thing, 'uid', thing))}'")
|
|
49
|
+
|
|
50
|
+
if filters:
|
|
51
|
+
params["$filter"] = " and ".join(filters)
|
|
52
|
+
|
|
53
|
+
return super()._list(params=params)
|
|
54
|
+
|
|
55
|
+
def get(self, uid: Union[UUID, str]) -> "Datastream":
|
|
56
|
+
"""Get a datastream by ID."""
|
|
57
|
+
|
|
58
|
+
return super()._get(uid=str(uid))
|
|
59
|
+
|
|
60
|
+
def create(
|
|
61
|
+
self,
|
|
62
|
+
name: str,
|
|
63
|
+
description: str,
|
|
64
|
+
thing: Union["Thing", UUID, str],
|
|
65
|
+
sensor: Union["Sensor", UUID, str],
|
|
66
|
+
observed_property: Union["ObservedProperty", UUID, str],
|
|
67
|
+
processing_level: Union["ProcessingLevel", UUID, str],
|
|
68
|
+
unit: Union["Unit", UUID, str],
|
|
69
|
+
observation_type: str,
|
|
70
|
+
result_type: str,
|
|
71
|
+
sampled_medium: str,
|
|
72
|
+
no_data_value: float,
|
|
73
|
+
aggregation_statistic: str,
|
|
74
|
+
time_aggregation_interval: float,
|
|
75
|
+
time_aggregation_interval_unit: Literal["seconds", "minutes", "hours", "days"],
|
|
76
|
+
intended_time_spacing: Optional[float] = None,
|
|
77
|
+
intended_time_spacing_unit: Optional[
|
|
78
|
+
Literal["seconds", "minutes", "hours", "days"]
|
|
79
|
+
] = None,
|
|
80
|
+
status: Optional[str] = None,
|
|
81
|
+
value_count: Optional[int] = None,
|
|
82
|
+
phenomenon_begin_time: Optional[datetime] = None,
|
|
83
|
+
phenomenon_end_time: Optional[datetime] = None,
|
|
84
|
+
result_begin_time: Optional[datetime] = None,
|
|
85
|
+
result_end_time: Optional[datetime] = None,
|
|
86
|
+
is_private: bool = False,
|
|
87
|
+
is_visible: bool = True,
|
|
88
|
+
) -> "Datastream":
|
|
89
|
+
"""Create a new datastream."""
|
|
90
|
+
|
|
91
|
+
kwargs = {
|
|
92
|
+
"name": name,
|
|
93
|
+
"description": description,
|
|
94
|
+
"thingId": str(getattr(thing, "uid", thing)),
|
|
95
|
+
"sensorId": str(getattr(sensor, "uid", sensor)),
|
|
96
|
+
"observedPropertyId": str(
|
|
97
|
+
getattr(observed_property, "uid", observed_property)
|
|
98
|
+
),
|
|
99
|
+
"processingLevelId": str(
|
|
100
|
+
getattr(processing_level, "uid", processing_level)
|
|
101
|
+
),
|
|
102
|
+
"unitId": str(getattr(unit, "uid", unit)),
|
|
103
|
+
"observationType": observation_type,
|
|
104
|
+
"resultType": result_type,
|
|
105
|
+
"sampledMedium": sampled_medium,
|
|
106
|
+
"noDataValue": no_data_value,
|
|
107
|
+
"aggregationStatistic": aggregation_statistic,
|
|
108
|
+
"timeAggregationInterval": time_aggregation_interval,
|
|
109
|
+
"timeAggregationIntervalUnit": time_aggregation_interval_unit,
|
|
110
|
+
"intendedTimeSpacing": intended_time_spacing,
|
|
111
|
+
"intendedTimeSpacingUnit": intended_time_spacing_unit,
|
|
112
|
+
"status": status,
|
|
113
|
+
"valueCount": value_count,
|
|
114
|
+
"phenomenonBeginTime": (
|
|
115
|
+
phenomenon_begin_time.isoformat() if phenomenon_begin_time else None
|
|
116
|
+
),
|
|
117
|
+
"phenomenonEndTime": (
|
|
118
|
+
phenomenon_end_time.isoformat() if phenomenon_end_time else None
|
|
119
|
+
),
|
|
120
|
+
"resultBeginTime": (
|
|
121
|
+
result_begin_time.isoformat() if result_begin_time else None
|
|
122
|
+
),
|
|
123
|
+
"resultEndTime": result_end_time.isoformat() if result_end_time else None,
|
|
124
|
+
"isPrivate": is_private,
|
|
125
|
+
"isVisible": is_visible,
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
return super()._create(**kwargs)
|
|
129
|
+
|
|
130
|
+
def update(
|
|
131
|
+
self,
|
|
132
|
+
uid: Union[UUID, str],
|
|
133
|
+
name: str = ...,
|
|
134
|
+
description: str = ...,
|
|
135
|
+
thing: Union["Thing", UUID, str] = ...,
|
|
136
|
+
sensor: Union["Sensor", UUID, str] = ...,
|
|
137
|
+
observed_property: Union["ObservedProperty", UUID, str] = ...,
|
|
138
|
+
processing_level: Union["ProcessingLevel", UUID, str] = ...,
|
|
139
|
+
unit: Union["Unit", UUID, str] = ...,
|
|
140
|
+
observation_type: str = ...,
|
|
141
|
+
result_type: str = ...,
|
|
142
|
+
sampled_medium: str = ...,
|
|
143
|
+
no_data_value: float = ...,
|
|
144
|
+
aggregation_statistic: str = ...,
|
|
145
|
+
time_aggregation_interval: float = ...,
|
|
146
|
+
time_aggregation_interval_unit: Literal[
|
|
147
|
+
"seconds", "minutes", "hours", "days"
|
|
148
|
+
] = ...,
|
|
149
|
+
intended_time_spacing: Optional[float] = ...,
|
|
150
|
+
intended_time_spacing_unit: Optional[
|
|
151
|
+
Literal["seconds", "minutes", "hours", "days"]
|
|
152
|
+
] = ...,
|
|
153
|
+
status: Optional[str] = ...,
|
|
154
|
+
value_count: Optional[int] = ...,
|
|
155
|
+
phenomenon_begin_time: Optional[datetime] = ...,
|
|
156
|
+
phenomenon_end_time: Optional[datetime] = ...,
|
|
157
|
+
result_begin_time: Optional[datetime] = ...,
|
|
158
|
+
result_end_time: Optional[datetime] = ...,
|
|
159
|
+
is_private: bool = ...,
|
|
160
|
+
is_visible: bool = ...,
|
|
161
|
+
) -> "Datastream":
|
|
162
|
+
"""Update a datastream."""
|
|
163
|
+
|
|
164
|
+
kwargs = {
|
|
165
|
+
"name": name,
|
|
166
|
+
"description": description,
|
|
167
|
+
"thingId": ... if thing is ... else str(getattr(thing, "uid", thing)),
|
|
168
|
+
"sensorId": ... if sensor is ... else str(getattr(sensor, "uid", sensor)),
|
|
169
|
+
"observedPropertyId": (
|
|
170
|
+
...
|
|
171
|
+
if observed_property is ...
|
|
172
|
+
else str(getattr(observed_property, "uid", observed_property))
|
|
173
|
+
),
|
|
174
|
+
"processingLevelId": (
|
|
175
|
+
...
|
|
176
|
+
if processing_level is ...
|
|
177
|
+
else str(getattr(processing_level, "uid", processing_level))
|
|
178
|
+
),
|
|
179
|
+
"unitId": ... if unit is ... else str(getattr(unit, "uid", unit)),
|
|
180
|
+
"observationType": observation_type,
|
|
181
|
+
"resultType": result_type,
|
|
182
|
+
"sampledMedium": sampled_medium,
|
|
183
|
+
"noDataValue": no_data_value,
|
|
184
|
+
"aggregationStatistic": aggregation_statistic,
|
|
185
|
+
"timeAggregationInterval": time_aggregation_interval,
|
|
186
|
+
"timeAggregationIntervalUnit": time_aggregation_interval_unit,
|
|
187
|
+
"intendedTimeSpacing": intended_time_spacing,
|
|
188
|
+
"intendedTimeSpacingUnit": intended_time_spacing_unit,
|
|
189
|
+
"status": status,
|
|
190
|
+
"valueCount": value_count,
|
|
191
|
+
"phenomenonBeginTime": (
|
|
192
|
+
phenomenon_begin_time.isoformat()
|
|
193
|
+
if phenomenon_begin_time
|
|
194
|
+
not in (
|
|
195
|
+
None,
|
|
196
|
+
...,
|
|
197
|
+
)
|
|
198
|
+
else None
|
|
199
|
+
),
|
|
200
|
+
"phenomenonEndTime": (
|
|
201
|
+
phenomenon_end_time.isoformat()
|
|
202
|
+
if phenomenon_end_time
|
|
203
|
+
not in (
|
|
204
|
+
None,
|
|
205
|
+
...,
|
|
206
|
+
)
|
|
207
|
+
else None
|
|
208
|
+
),
|
|
209
|
+
"resultBeginTime": (
|
|
210
|
+
result_begin_time.isoformat()
|
|
211
|
+
if result_begin_time
|
|
212
|
+
not in (
|
|
213
|
+
None,
|
|
214
|
+
...,
|
|
215
|
+
)
|
|
216
|
+
else None
|
|
217
|
+
),
|
|
218
|
+
"resultEndTime": (
|
|
219
|
+
result_end_time.isoformat()
|
|
220
|
+
if result_end_time
|
|
221
|
+
not in (
|
|
222
|
+
None,
|
|
223
|
+
...,
|
|
224
|
+
)
|
|
225
|
+
else None
|
|
226
|
+
),
|
|
227
|
+
"isPrivate": is_private,
|
|
228
|
+
"isVisible": is_visible,
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
return super()._update(
|
|
232
|
+
uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
def delete(self, uid: Union[UUID, str]) -> None:
|
|
236
|
+
"""Delete a datastream."""
|
|
237
|
+
|
|
238
|
+
super()._delete(uid=str(uid))
|
|
239
|
+
|
|
240
|
+
def get_observations(
|
|
241
|
+
self,
|
|
242
|
+
uid: Union[UUID, str],
|
|
243
|
+
start_time: datetime = None,
|
|
244
|
+
end_time: datetime = None,
|
|
245
|
+
page: int = 1,
|
|
246
|
+
page_size: int = 100000,
|
|
247
|
+
include_quality: bool = False,
|
|
248
|
+
fetch_all: bool = False,
|
|
249
|
+
) -> pd.DataFrame:
|
|
250
|
+
"""Retrieve observations of a datastream."""
|
|
251
|
+
|
|
252
|
+
filters = []
|
|
253
|
+
if start_time:
|
|
254
|
+
filters.append(
|
|
255
|
+
f'phenomenonTime ge {start_time.strftime("%Y-%m-%dT%H:%M:%S%z")}'
|
|
256
|
+
)
|
|
257
|
+
if end_time:
|
|
258
|
+
filters.append(
|
|
259
|
+
f'phenomenonTime le {end_time.strftime("%Y-%m-%dT%H:%M:%S%z")}'
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
if fetch_all:
|
|
263
|
+
page = 1
|
|
264
|
+
|
|
265
|
+
observations = []
|
|
266
|
+
|
|
267
|
+
while True:
|
|
268
|
+
response = self._connection.request(
|
|
269
|
+
"get",
|
|
270
|
+
f"api/sensorthings/v1.1/Datastreams('{str(uid)}')/Observations",
|
|
271
|
+
params={
|
|
272
|
+
"$resultFormat": "dataArray",
|
|
273
|
+
"$select": f'phenomenonTime,result{",resultQuality" if include_quality else ""}',
|
|
274
|
+
"$count": True,
|
|
275
|
+
"$top": page_size,
|
|
276
|
+
"$skip": (page - 1) * page_size,
|
|
277
|
+
"$filter": " and ".join(filters) if filters else None,
|
|
278
|
+
},
|
|
279
|
+
)
|
|
280
|
+
response_content = json.loads(response.content)
|
|
281
|
+
data_array = (
|
|
282
|
+
response_content["value"][0]["dataArray"]
|
|
283
|
+
if response_content["value"]
|
|
284
|
+
else []
|
|
285
|
+
)
|
|
286
|
+
observations.extend(
|
|
287
|
+
[
|
|
288
|
+
(
|
|
289
|
+
[
|
|
290
|
+
obs[0],
|
|
291
|
+
obs[1],
|
|
292
|
+
obs[2]["qualityCode"] if obs[2]["qualityCode"] else None,
|
|
293
|
+
(
|
|
294
|
+
obs[2]["resultQualifiers"]
|
|
295
|
+
if obs[2]["resultQualifiers"]
|
|
296
|
+
else None
|
|
297
|
+
),
|
|
298
|
+
]
|
|
299
|
+
if include_quality
|
|
300
|
+
else [obs[0], obs[1]]
|
|
301
|
+
)
|
|
302
|
+
for obs in data_array
|
|
303
|
+
]
|
|
304
|
+
)
|
|
305
|
+
if not fetch_all or len(data_array) < page_size:
|
|
306
|
+
break
|
|
307
|
+
page += 1
|
|
308
|
+
|
|
309
|
+
columns = ["timestamp", "value"]
|
|
310
|
+
if include_quality:
|
|
311
|
+
columns.extend(["quality_code", "result_quality"])
|
|
312
|
+
|
|
313
|
+
data_frame = pd.DataFrame(observations, columns=columns)
|
|
314
|
+
data_frame["timestamp"] = pd.to_datetime(data_frame["timestamp"])
|
|
315
|
+
|
|
316
|
+
return data_frame
|
|
317
|
+
|
|
318
|
+
def load_observations(
|
|
319
|
+
self,
|
|
320
|
+
uid: Union[UUID, str],
|
|
321
|
+
observations: pd.DataFrame,
|
|
322
|
+
) -> None:
|
|
323
|
+
"""Load observations to a datastream."""
|
|
324
|
+
|
|
325
|
+
data_array = [
|
|
326
|
+
[
|
|
327
|
+
row["timestamp"].strftime("%Y-%m-%dT%H:%M:%S%z"),
|
|
328
|
+
row["value"],
|
|
329
|
+
(
|
|
330
|
+
{
|
|
331
|
+
"qualityCode": row.get("quality_code", None),
|
|
332
|
+
"resultQualifiers": row.get("result_qualifiers", []),
|
|
333
|
+
}
|
|
334
|
+
if "quality_code" in row or "result_qualifiers" in row
|
|
335
|
+
else {}
|
|
336
|
+
),
|
|
337
|
+
]
|
|
338
|
+
for _, row in observations.iterrows()
|
|
339
|
+
]
|
|
340
|
+
|
|
341
|
+
self._connection.request(
|
|
342
|
+
"post",
|
|
343
|
+
f"api/sensorthings/v1.1/CreateObservations",
|
|
344
|
+
headers={"Content-type": "application/json"},
|
|
345
|
+
data=json.dumps(
|
|
346
|
+
[
|
|
347
|
+
{
|
|
348
|
+
"Datastream": {"@iot.id": str(uid)},
|
|
349
|
+
"components": ["phenomenonTime", "result", "resultQuality"],
|
|
350
|
+
"dataArray": data_array,
|
|
351
|
+
}
|
|
352
|
+
]
|
|
353
|
+
),
|
|
354
|
+
)
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from typing import Optional, Union, List, TYPE_CHECKING
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from ..base import SensorThingsService
|
|
4
|
+
from hydroserverpy.api.models import ObservedProperty
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from hydroserverpy import HydroServer
|
|
9
|
+
from hydroserverpy.api.models import Workspace
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ObservedPropertyService(SensorThingsService):
|
|
13
|
+
def __init__(self, connection: "HydroServer"):
|
|
14
|
+
self._model = ObservedProperty
|
|
15
|
+
self._api_route = "api/data"
|
|
16
|
+
self._endpoint_route = "observed-properties"
|
|
17
|
+
self._sta_route = "api/sensorthings/v1.1/ObservedProperties"
|
|
18
|
+
|
|
19
|
+
super().__init__(connection)
|
|
20
|
+
|
|
21
|
+
def list(
|
|
22
|
+
self,
|
|
23
|
+
workspace: Optional[Union["Workspace", UUID, str]] = None,
|
|
24
|
+
page: int = 1,
|
|
25
|
+
page_size: int = 100,
|
|
26
|
+
) -> List["ObservedProperty"]:
|
|
27
|
+
"""Fetch a collection of observed properties."""
|
|
28
|
+
|
|
29
|
+
params = {"$top": page_size, "$skip": page_size * (page - 1)}
|
|
30
|
+
|
|
31
|
+
if workspace:
|
|
32
|
+
params["$filter"] = (
|
|
33
|
+
f"properties/workspace/id eq '{str(getattr(workspace, 'uid', workspace))}'"
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
return super()._list(params=params)
|
|
37
|
+
|
|
38
|
+
def get(self, uid: Union[UUID, str], fetch_by_datastream_uid: bool = False) -> "ObservedProperty":
|
|
39
|
+
"""Get an observed property by ID."""
|
|
40
|
+
|
|
41
|
+
return self._get(
|
|
42
|
+
uid=str(uid),
|
|
43
|
+
path=(
|
|
44
|
+
f"api/sensorthings/v1.1/Datastreams('{str(uid)}')/ObservedProperty"
|
|
45
|
+
if fetch_by_datastream_uid
|
|
46
|
+
else None
|
|
47
|
+
),
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
def create(
|
|
51
|
+
self,
|
|
52
|
+
name: str,
|
|
53
|
+
definition: str,
|
|
54
|
+
description: str,
|
|
55
|
+
observed_property_type: str,
|
|
56
|
+
code: str,
|
|
57
|
+
workspace: Union["Workspace", UUID, str],
|
|
58
|
+
) -> "ObservedProperty":
|
|
59
|
+
"""Create a new observed property."""
|
|
60
|
+
|
|
61
|
+
kwargs = {
|
|
62
|
+
"name": name,
|
|
63
|
+
"definition": definition,
|
|
64
|
+
"description": description,
|
|
65
|
+
"type": observed_property_type,
|
|
66
|
+
"code": code,
|
|
67
|
+
"workspaceId": str(getattr(workspace, "uid", workspace)),
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return super()._create(**kwargs)
|
|
71
|
+
|
|
72
|
+
def update(
|
|
73
|
+
self,
|
|
74
|
+
uid: Union[UUID, str],
|
|
75
|
+
name: str = ...,
|
|
76
|
+
definition: str = ...,
|
|
77
|
+
description: str = ...,
|
|
78
|
+
observed_property_type: str = ...,
|
|
79
|
+
code: str = ...,
|
|
80
|
+
) -> "ObservedProperty":
|
|
81
|
+
"""Update an observed property."""
|
|
82
|
+
|
|
83
|
+
kwargs = {
|
|
84
|
+
"name": name,
|
|
85
|
+
"definition": definition,
|
|
86
|
+
"description": description,
|
|
87
|
+
"type": observed_property_type,
|
|
88
|
+
"code": code,
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return super()._update(
|
|
92
|
+
uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
def delete(self, uid: Union[UUID, str]) -> None:
|
|
96
|
+
"""Delete an observed property."""
|
|
97
|
+
|
|
98
|
+
super()._delete(uid=str(uid))
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from typing import Optional, Union, List, TYPE_CHECKING
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from ..base import EndpointService
|
|
4
|
+
from hydroserverpy.api.models import ProcessingLevel
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from hydroserverpy import HydroServer
|
|
9
|
+
from hydroserverpy.api.models import Workspace
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ProcessingLevelService(EndpointService):
|
|
13
|
+
def __init__(self, connection: "HydroServer"):
|
|
14
|
+
self._model = ProcessingLevel
|
|
15
|
+
self._api_route = "api/data"
|
|
16
|
+
self._endpoint_route = "processing-levels"
|
|
17
|
+
|
|
18
|
+
super().__init__(connection)
|
|
19
|
+
|
|
20
|
+
def list(
|
|
21
|
+
self,
|
|
22
|
+
workspace: Optional[Union["Workspace", UUID, str]] = None,
|
|
23
|
+
) -> List["ProcessingLevel"]:
|
|
24
|
+
"""Fetch a collection of processing levels."""
|
|
25
|
+
|
|
26
|
+
workspace_id = getattr(workspace, "uid", workspace)
|
|
27
|
+
workspace_id = str(workspace_id) if workspace_id else None
|
|
28
|
+
|
|
29
|
+
return super()._list(
|
|
30
|
+
params={"workspace_id": workspace_id} if workspace_id else {},
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
def get(self, uid: Union[UUID, str]) -> "ProcessingLevel":
|
|
34
|
+
"""Get a processing level by ID."""
|
|
35
|
+
|
|
36
|
+
return super()._get(uid=str(uid))
|
|
37
|
+
|
|
38
|
+
def create(
|
|
39
|
+
self,
|
|
40
|
+
workspace: Union["Workspace", UUID, str],
|
|
41
|
+
code: str,
|
|
42
|
+
definition: Optional[str] = None,
|
|
43
|
+
explanation: Optional[str] = None,
|
|
44
|
+
) -> "ProcessingLevel":
|
|
45
|
+
"""Create a new processing level."""
|
|
46
|
+
|
|
47
|
+
kwargs = {
|
|
48
|
+
"code": code,
|
|
49
|
+
"definition": definition,
|
|
50
|
+
"explanation": explanation,
|
|
51
|
+
"workspaceId": str(getattr(workspace, "uid", workspace)),
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return super()._create(**kwargs)
|
|
55
|
+
|
|
56
|
+
def update(
|
|
57
|
+
self,
|
|
58
|
+
uid: Union[UUID, str],
|
|
59
|
+
code: str = ...,
|
|
60
|
+
definition: Optional[str] = ...,
|
|
61
|
+
explanation: Optional[str] = ...,
|
|
62
|
+
) -> "ProcessingLevel":
|
|
63
|
+
"""Update a processing level."""
|
|
64
|
+
|
|
65
|
+
kwargs = {
|
|
66
|
+
"code": code,
|
|
67
|
+
"definition": definition,
|
|
68
|
+
"explanation": explanation,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return super()._update(
|
|
72
|
+
uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
def delete(self, uid: Union[UUID, str]) -> None:
|
|
76
|
+
"""Delete a processing level."""
|
|
77
|
+
|
|
78
|
+
super()._delete(uid=str(uid))
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
from typing import Optional, Union, List, TYPE_CHECKING
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from ..base import EndpointService
|
|
4
|
+
from hydroserverpy.api.models import ResultQualifier
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from hydroserverpy import HydroServer
|
|
9
|
+
from hydroserverpy.api.models import Workspace
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ResultQualifierService(EndpointService):
|
|
13
|
+
def __init__(self, connection: "HydroServer"):
|
|
14
|
+
self._model = ResultQualifier
|
|
15
|
+
self._api_route = "api/data"
|
|
16
|
+
self._endpoint_route = "result-qualifiers"
|
|
17
|
+
|
|
18
|
+
super().__init__(connection)
|
|
19
|
+
|
|
20
|
+
def list(
|
|
21
|
+
self,
|
|
22
|
+
workspace: Optional[Union["Workspace", UUID, str]] = None,
|
|
23
|
+
) -> List["ResultQualifier"]:
|
|
24
|
+
"""Fetch a collection of result qualifiers."""
|
|
25
|
+
|
|
26
|
+
workspace_id = getattr(workspace, "uid", workspace)
|
|
27
|
+
workspace_id = str(workspace_id) if workspace_id else None
|
|
28
|
+
|
|
29
|
+
return super()._list(
|
|
30
|
+
params={"workspace_id": workspace_id} if workspace_id else {},
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
def get(self, uid: Union[UUID, str]) -> "ResultQualifier":
|
|
34
|
+
"""Get a result qualifier by ID."""
|
|
35
|
+
|
|
36
|
+
return super()._get(uid=str(uid))
|
|
37
|
+
|
|
38
|
+
def create(
|
|
39
|
+
self,
|
|
40
|
+
workspace: Union["Workspace", UUID, str],
|
|
41
|
+
code: str,
|
|
42
|
+
description: str,
|
|
43
|
+
) -> "ResultQualifier":
|
|
44
|
+
"""Create a new result qualifier."""
|
|
45
|
+
|
|
46
|
+
kwargs = {
|
|
47
|
+
"code": code,
|
|
48
|
+
"description": description,
|
|
49
|
+
"workspaceId": str(getattr(workspace, "uid", workspace)),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return super()._create(**kwargs)
|
|
53
|
+
|
|
54
|
+
def update(
|
|
55
|
+
self,
|
|
56
|
+
uid: Union[UUID, str],
|
|
57
|
+
code: str = ...,
|
|
58
|
+
description: str = ...,
|
|
59
|
+
) -> "ResultQualifier":
|
|
60
|
+
"""Update a result qualifier."""
|
|
61
|
+
|
|
62
|
+
kwargs = {
|
|
63
|
+
"code": code,
|
|
64
|
+
"description": description,
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return super()._update(
|
|
68
|
+
uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
def delete(self, uid: Union[UUID, str]) -> None:
|
|
72
|
+
"""Delete a result qualifier."""
|
|
73
|
+
|
|
74
|
+
super()._delete(uid=str(uid))
|