hydroserverpy 1.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hydroserverpy/__init__.py +7 -0
- hydroserverpy/api/__init__.py +0 -0
- hydroserverpy/api/client.py +203 -0
- hydroserverpy/api/models/__init__.py +22 -0
- hydroserverpy/api/models/base.py +207 -0
- hydroserverpy/api/models/etl/__init__.py +26 -0
- hydroserverpy/api/models/etl/data_archive.py +77 -0
- hydroserverpy/api/models/etl/data_source.py +146 -0
- hydroserverpy/api/models/etl/etl_configuration.py +224 -0
- hydroserverpy/api/models/etl/extractors/__init__.py +6 -0
- hydroserverpy/api/models/etl/extractors/base.py +52 -0
- hydroserverpy/api/models/etl/extractors/ftp_extractor.py +50 -0
- hydroserverpy/api/models/etl/extractors/http_extractor.py +28 -0
- hydroserverpy/api/models/etl/extractors/local_file_extractor.py +20 -0
- hydroserverpy/api/models/etl/factories.py +23 -0
- hydroserverpy/api/models/etl/loaders/__init__.py +4 -0
- hydroserverpy/api/models/etl/loaders/base.py +11 -0
- hydroserverpy/api/models/etl/loaders/hydroserver_loader.py +98 -0
- hydroserverpy/api/models/etl/orchestration_configuration.py +35 -0
- hydroserverpy/api/models/etl/orchestration_system.py +63 -0
- hydroserverpy/api/models/etl/schedule.py +16 -0
- hydroserverpy/api/models/etl/status.py +14 -0
- hydroserverpy/api/models/etl/timestamp_parser.py +112 -0
- hydroserverpy/api/models/etl/transformers/__init__.py +5 -0
- hydroserverpy/api/models/etl/transformers/base.py +135 -0
- hydroserverpy/api/models/etl/transformers/csv_transformer.py +88 -0
- hydroserverpy/api/models/etl/transformers/json_transformer.py +48 -0
- hydroserverpy/api/models/etl/types.py +7 -0
- hydroserverpy/api/models/iam/__init__.py +0 -0
- hydroserverpy/api/models/iam/account.py +12 -0
- hydroserverpy/api/models/iam/apikey.py +96 -0
- hydroserverpy/api/models/iam/collaborator.py +70 -0
- hydroserverpy/api/models/iam/role.py +38 -0
- hydroserverpy/api/models/iam/workspace.py +297 -0
- hydroserverpy/api/models/sta/__init__.py +0 -0
- hydroserverpy/api/models/sta/datastream.py +254 -0
- hydroserverpy/api/models/sta/observation.py +103 -0
- hydroserverpy/api/models/sta/observed_property.py +37 -0
- hydroserverpy/api/models/sta/processing_level.py +35 -0
- hydroserverpy/api/models/sta/result_qualifier.py +34 -0
- hydroserverpy/api/models/sta/sensor.py +44 -0
- hydroserverpy/api/models/sta/thing.py +113 -0
- hydroserverpy/api/models/sta/unit.py +36 -0
- hydroserverpy/api/services/__init__.py +12 -0
- hydroserverpy/api/services/base.py +118 -0
- hydroserverpy/api/services/etl/__init__.py +0 -0
- hydroserverpy/api/services/etl/data_archive.py +166 -0
- hydroserverpy/api/services/etl/data_source.py +163 -0
- hydroserverpy/api/services/etl/orchestration_system.py +66 -0
- hydroserverpy/api/services/iam/__init__.py +0 -0
- hydroserverpy/api/services/iam/role.py +38 -0
- hydroserverpy/api/services/iam/workspace.py +232 -0
- hydroserverpy/api/services/sta/__init__.py +0 -0
- hydroserverpy/api/services/sta/datastream.py +296 -0
- hydroserverpy/api/services/sta/observed_property.py +82 -0
- hydroserverpy/api/services/sta/processing_level.py +72 -0
- hydroserverpy/api/services/sta/result_qualifier.py +64 -0
- hydroserverpy/api/services/sta/sensor.py +102 -0
- hydroserverpy/api/services/sta/thing.py +195 -0
- hydroserverpy/api/services/sta/unit.py +78 -0
- hydroserverpy/api/utils.py +22 -0
- hydroserverpy/quality/__init__.py +1 -0
- hydroserverpy/quality/service.py +405 -0
- hydroserverpy-1.5.1.dist-info/METADATA +66 -0
- hydroserverpy-1.5.1.dist-info/RECORD +69 -0
- hydroserverpy-1.5.1.dist-info/WHEEL +5 -0
- hydroserverpy-1.5.1.dist-info/licenses/LICENSE +28 -0
- hydroserverpy-1.5.1.dist-info/top_level.txt +1 -0
- hydroserverpy-1.5.1.dist-info/zip-safe +1 -0
|
@@ -0,0 +1,297 @@
|
|
|
1
|
+
from typing import List, Union, Optional, ClassVar, TYPE_CHECKING
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from pydantic import Field, EmailStr, AliasPath
|
|
5
|
+
from ..base import HydroServerBaseModel
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from hydroserverpy import HydroServer
|
|
9
|
+
from hydroserverpy.api.models import (
|
|
10
|
+
Role,
|
|
11
|
+
Collaborator,
|
|
12
|
+
APIKey,
|
|
13
|
+
Account,
|
|
14
|
+
Thing,
|
|
15
|
+
ObservedProperty,
|
|
16
|
+
Sensor,
|
|
17
|
+
Unit,
|
|
18
|
+
ProcessingLevel,
|
|
19
|
+
ResultQualifier,
|
|
20
|
+
Datastream,
|
|
21
|
+
OrchestrationSystem,
|
|
22
|
+
DataSource,
|
|
23
|
+
DataArchive,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Workspace(HydroServerBaseModel):
|
|
28
|
+
name: str = Field(..., max_length=255)
|
|
29
|
+
is_private: bool
|
|
30
|
+
owner: "Account"
|
|
31
|
+
collaborator_role_id: Optional[Union[UUID, str]] = Field(
|
|
32
|
+
None, validation_alias=AliasPath("collaboratorRole", "id")
|
|
33
|
+
)
|
|
34
|
+
pending_transfer_to: Optional["Account"] = None
|
|
35
|
+
|
|
36
|
+
_editable_fields: ClassVar[set[str]] = {"name", "is_private"}
|
|
37
|
+
|
|
38
|
+
def __init__(self, client: "HydroServer", **data):
|
|
39
|
+
super().__init__(client=client, service=client.workspaces, **data)
|
|
40
|
+
|
|
41
|
+
self._roles = None
|
|
42
|
+
self._collaborators = None
|
|
43
|
+
self._collaborator_role = None
|
|
44
|
+
self._apikeys = None
|
|
45
|
+
self._things = None
|
|
46
|
+
self._observedproperties = None
|
|
47
|
+
self._processinglevels = None
|
|
48
|
+
self._resultqualifiers = None
|
|
49
|
+
self._units = None
|
|
50
|
+
self._sensors = None
|
|
51
|
+
self._datastreams = None
|
|
52
|
+
self._orchestrationsystems = None
|
|
53
|
+
self._datasources = None
|
|
54
|
+
self._dataarchives = None
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def get_route(cls):
|
|
58
|
+
return "workspaces"
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def roles(self) -> List["Role"]:
|
|
62
|
+
"""The roles that can be assigned for this workspace."""
|
|
63
|
+
|
|
64
|
+
if self._roles is None:
|
|
65
|
+
self._roles = self.client.roles.list(workspace=self.uid, fetch_all=True).items
|
|
66
|
+
|
|
67
|
+
return self._roles
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def collaborators(self) -> List["Collaborator"]:
|
|
71
|
+
"""The collaborators associated with this workspace."""
|
|
72
|
+
|
|
73
|
+
if self._collaborators is None:
|
|
74
|
+
self._collaborators = self.client.workspaces.list_collaborators(uid=self.uid)
|
|
75
|
+
|
|
76
|
+
return self._collaborators
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def collaborator_role(self) -> Optional["Role"]:
|
|
80
|
+
"""The user's collaborator role on this workspace."""
|
|
81
|
+
|
|
82
|
+
if self._collaborator_role is None and self.collaborator_role_id is not None:
|
|
83
|
+
self._collaborator_role = self.client.roles.get(uid=self.collaborator_role_id)
|
|
84
|
+
|
|
85
|
+
return self._collaborator_role
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def apikeys(self) -> List["APIKey"]:
|
|
89
|
+
"""The API keys associated with this workspace."""
|
|
90
|
+
|
|
91
|
+
if self._apikeys is None:
|
|
92
|
+
self._apikeys = self.client.workspaces.list_api_keys(uid=self.uid)
|
|
93
|
+
|
|
94
|
+
return self._apikeys
|
|
95
|
+
|
|
96
|
+
@property
|
|
97
|
+
def things(self) -> List["Thing"]:
|
|
98
|
+
"""The things associated with this workspace."""
|
|
99
|
+
|
|
100
|
+
if self._things is None:
|
|
101
|
+
self._things = self.client.things.list(workspace=self.uid, fetch_all=True).items
|
|
102
|
+
|
|
103
|
+
return self._things
|
|
104
|
+
|
|
105
|
+
@property
|
|
106
|
+
def observedproperties(self) -> List["ObservedProperty"]:
|
|
107
|
+
"""The observed properties associated with this workspace."""
|
|
108
|
+
|
|
109
|
+
if self._observedproperties is None:
|
|
110
|
+
self._observedproperties = self.client.observedproperties.list(workspace=self.uid, fetch_all=True).items
|
|
111
|
+
|
|
112
|
+
return self._observedproperties
|
|
113
|
+
|
|
114
|
+
@property
|
|
115
|
+
def processinglevels(self) -> List["ProcessingLevel"]:
|
|
116
|
+
"""The processing levels associated with this workspace."""
|
|
117
|
+
|
|
118
|
+
if self._processinglevels is None:
|
|
119
|
+
self._processinglevels = self.client.processinglevels.list(workspace=self.uid, fetch_all=True).items
|
|
120
|
+
|
|
121
|
+
return self._processinglevels
|
|
122
|
+
|
|
123
|
+
@property
|
|
124
|
+
def resultqualifiers(self) -> List["ResultQualifier"]:
|
|
125
|
+
"""The result qualifiers associated with this workspace."""
|
|
126
|
+
|
|
127
|
+
if self._resultqualifiers is None:
|
|
128
|
+
self._resultqualifiers = self.client.resultqualifiers.list(workspace=self.uid, fetch_all=True).items
|
|
129
|
+
|
|
130
|
+
return self._resultqualifiers
|
|
131
|
+
|
|
132
|
+
@property
|
|
133
|
+
def units(self) -> List["Unit"]:
|
|
134
|
+
"""The units associated with this workspace."""
|
|
135
|
+
|
|
136
|
+
if self._units is None:
|
|
137
|
+
self._units = self.client.units.list(workspace=self.uid, fetch_all=True).items
|
|
138
|
+
|
|
139
|
+
return self._units
|
|
140
|
+
|
|
141
|
+
@property
|
|
142
|
+
def sensors(self) -> List["Sensor"]:
|
|
143
|
+
"""The sensors associated with this workspace."""
|
|
144
|
+
|
|
145
|
+
if self._sensors is None:
|
|
146
|
+
self._sensors = self.client.sensors.list(workspace=self.uid, fetch_all=True).items
|
|
147
|
+
|
|
148
|
+
return self._sensors
|
|
149
|
+
|
|
150
|
+
@property
|
|
151
|
+
def datastreams(self) -> List["Datastream"]:
|
|
152
|
+
"""The datastreams associated with this workspace."""
|
|
153
|
+
|
|
154
|
+
if self._datastreams is None:
|
|
155
|
+
self._datastreams = self.client.datastreams.list(workspace=self.uid, fetch_all=True).items
|
|
156
|
+
|
|
157
|
+
return self._datastreams
|
|
158
|
+
|
|
159
|
+
@property
|
|
160
|
+
def orchestrationsystems(self) -> List["OrchestrationSystem"]:
|
|
161
|
+
"""The orchestration systems associated with this workspace."""
|
|
162
|
+
|
|
163
|
+
if self._orchestrationsystems is None:
|
|
164
|
+
self._orchestrationsystems = self.client.orchestrationsystems.list(workspace=self.uid, fetch_all=True).items
|
|
165
|
+
|
|
166
|
+
return self._orchestrationsystems
|
|
167
|
+
|
|
168
|
+
@property
|
|
169
|
+
def datasources(self) -> List["DataSource"]:
|
|
170
|
+
"""The data sources associated with this workspace."""
|
|
171
|
+
|
|
172
|
+
if self._datasources is None:
|
|
173
|
+
self._datasources = self.client.datasources.list(workspace=self.uid, fetch_all=True).items
|
|
174
|
+
|
|
175
|
+
return self._datasources
|
|
176
|
+
|
|
177
|
+
@property
|
|
178
|
+
def dataarchives(self) -> List["DataArchive"]:
|
|
179
|
+
"""The data archives associated with this workspace."""
|
|
180
|
+
|
|
181
|
+
if self._dataarchives is None:
|
|
182
|
+
self._dataarchives = self.client.dataarchives.list(workspace=self.uid, fetch_all=True).items
|
|
183
|
+
|
|
184
|
+
return self._dataarchives
|
|
185
|
+
|
|
186
|
+
def create_api_key(
|
|
187
|
+
self,
|
|
188
|
+
role: Union["Role", UUID, str],
|
|
189
|
+
name: str,
|
|
190
|
+
description: Optional[str] = None,
|
|
191
|
+
is_active: bool = True,
|
|
192
|
+
expires_at: Optional[datetime] = None
|
|
193
|
+
):
|
|
194
|
+
"""Create an API key associated with this workspace."""
|
|
195
|
+
|
|
196
|
+
response, key = self.client.workspaces.create_api_key(
|
|
197
|
+
uid=self.uid,
|
|
198
|
+
role=role,
|
|
199
|
+
name=name,
|
|
200
|
+
description=description,
|
|
201
|
+
is_active=is_active,
|
|
202
|
+
expires_at=expires_at
|
|
203
|
+
)
|
|
204
|
+
self._apikeys = None
|
|
205
|
+
|
|
206
|
+
return response, key
|
|
207
|
+
|
|
208
|
+
def update_api_key(
|
|
209
|
+
self,
|
|
210
|
+
api_key_id: Union[UUID, str],
|
|
211
|
+
role: Union["Role", UUID, str] = ...,
|
|
212
|
+
name: str = ...,
|
|
213
|
+
description: Optional[str] = ...,
|
|
214
|
+
is_active: bool = ...,
|
|
215
|
+
expires_at: Optional[datetime] = ...
|
|
216
|
+
):
|
|
217
|
+
"""Create an API key associated with this workspace."""
|
|
218
|
+
|
|
219
|
+
response = self.client.workspaces.update_api_key(
|
|
220
|
+
uid=self.uid,
|
|
221
|
+
api_key_id=api_key_id,
|
|
222
|
+
role=role,
|
|
223
|
+
name=name,
|
|
224
|
+
description=description,
|
|
225
|
+
is_active=is_active,
|
|
226
|
+
expires_at=expires_at
|
|
227
|
+
)
|
|
228
|
+
self._apikeys = None
|
|
229
|
+
|
|
230
|
+
return response
|
|
231
|
+
|
|
232
|
+
def delete_api_key(self, api_key_id: Union[UUID, str]):
|
|
233
|
+
"""Delete an API key associated with this workspace."""
|
|
234
|
+
|
|
235
|
+
self.client.workspaces.delete_api_key(
|
|
236
|
+
uid=self.uid,
|
|
237
|
+
api_key_id=api_key_id
|
|
238
|
+
)
|
|
239
|
+
self._apikeys = None
|
|
240
|
+
|
|
241
|
+
def regenerate_api_key(self, api_key_id: Union[UUID, str]):
|
|
242
|
+
"""Regenerate an API key associated with this workspace."""
|
|
243
|
+
|
|
244
|
+
api_key, key = self.client.workspaces.regenerate_api_key(
|
|
245
|
+
uid=self.uid,
|
|
246
|
+
api_key_id=api_key_id
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
return api_key, key
|
|
250
|
+
|
|
251
|
+
def add_collaborator(
|
|
252
|
+
self, email: EmailStr, role: Union["Role", UUID, str]
|
|
253
|
+
) -> "Collaborator":
|
|
254
|
+
"""Add a new collaborator to the workspace."""
|
|
255
|
+
|
|
256
|
+
response = self.client.workspaces.add_collaborator(
|
|
257
|
+
uid=self.uid, email=email, role=role
|
|
258
|
+
)
|
|
259
|
+
self._collaborators = None
|
|
260
|
+
|
|
261
|
+
return response
|
|
262
|
+
|
|
263
|
+
def edit_collaborator_role(
|
|
264
|
+
self, email: EmailStr, role: Union["Role", UUID, str]
|
|
265
|
+
) -> "Collaborator":
|
|
266
|
+
"""Edit a collaborator's role in this workspace."""
|
|
267
|
+
|
|
268
|
+
response = self.client.workspaces.edit_collaborator_role(
|
|
269
|
+
uid=self.uid, email=email, role=role
|
|
270
|
+
)
|
|
271
|
+
self._collaborators = None
|
|
272
|
+
|
|
273
|
+
return response
|
|
274
|
+
|
|
275
|
+
def remove_collaborator(self, email: EmailStr) -> None:
|
|
276
|
+
"""Remove a collaborator from the workspace."""
|
|
277
|
+
|
|
278
|
+
self.client.workspaces.remove_collaborator(uid=self.uid, email=email)
|
|
279
|
+
self._collaborators = None
|
|
280
|
+
|
|
281
|
+
def transfer_ownership(self, email: EmailStr) -> None:
|
|
282
|
+
"""Transfer ownership of this workspace to another HydroServer user."""
|
|
283
|
+
|
|
284
|
+
self.client.workspaces.transfer_ownership(uid=self.uid, email=email)
|
|
285
|
+
self.refresh()
|
|
286
|
+
|
|
287
|
+
def accept_ownership_transfer(self) -> None:
|
|
288
|
+
"""Accept ownership transfer of this workspace."""
|
|
289
|
+
|
|
290
|
+
self.client.workspaces.accept_ownership_transfer(uid=self.uid)
|
|
291
|
+
self.refresh()
|
|
292
|
+
|
|
293
|
+
def cancel_ownership_transfer(self) -> None:
|
|
294
|
+
"""Cancel ownership transfer of this workspace."""
|
|
295
|
+
|
|
296
|
+
self.client.workspaces.cancel_ownership_transfer(uid=self.uid)
|
|
297
|
+
self.refresh()
|
|
File without changes
|
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import List, Union, Optional, Literal, ClassVar, TYPE_CHECKING
|
|
4
|
+
from pydantic import Field
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from hydroserverpy.api.utils import normalize_uuid
|
|
8
|
+
from ..base import HydroServerBaseModel
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from hydroserverpy import HydroServer
|
|
12
|
+
from hydroserverpy.api.models import (
|
|
13
|
+
Workspace,
|
|
14
|
+
Thing,
|
|
15
|
+
Sensor,
|
|
16
|
+
ObservedProperty,
|
|
17
|
+
Unit,
|
|
18
|
+
ProcessingLevel,
|
|
19
|
+
DataSource,
|
|
20
|
+
DataArchive
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class Datastream(HydroServerBaseModel):
|
|
25
|
+
name: str = Field(..., max_length=255)
|
|
26
|
+
description: str
|
|
27
|
+
observation_type: str = Field(..., max_length=255)
|
|
28
|
+
sampled_medium: str = Field(..., max_length=255)
|
|
29
|
+
no_data_value: float
|
|
30
|
+
aggregation_statistic: str = Field(..., max_length=255)
|
|
31
|
+
time_aggregation_interval: float
|
|
32
|
+
status: Optional[str] = Field(None, max_length=255)
|
|
33
|
+
result_type: str = Field(..., max_length=255)
|
|
34
|
+
value_count: Optional[int] = Field(None, ge=0)
|
|
35
|
+
phenomenon_begin_time: Optional[datetime] = None
|
|
36
|
+
phenomenon_end_time: Optional[datetime] = None
|
|
37
|
+
result_begin_time: Optional[datetime] = None
|
|
38
|
+
result_end_time: Optional[datetime] = None
|
|
39
|
+
is_private: bool = False
|
|
40
|
+
is_visible: bool = True
|
|
41
|
+
time_aggregation_interval_unit: Literal["seconds", "minutes", "hours", "days"]
|
|
42
|
+
intended_time_spacing: Optional[float] = None
|
|
43
|
+
intended_time_spacing_unit: Optional[
|
|
44
|
+
Literal["seconds", "minutes", "hours", "days"]
|
|
45
|
+
] = None
|
|
46
|
+
data_source_id: Optional[uuid.UUID] = None
|
|
47
|
+
thing_id: uuid.UUID
|
|
48
|
+
workspace_id: uuid.UUID
|
|
49
|
+
sensor_id: uuid.UUID
|
|
50
|
+
observed_property_id: uuid.UUID
|
|
51
|
+
processing_level_id: uuid.UUID
|
|
52
|
+
unit_id: uuid.UUID
|
|
53
|
+
|
|
54
|
+
_editable_fields: ClassVar[set[str]] = {
|
|
55
|
+
"name", "description", "observation_type", "sampled_medium", "no_data_value", "aggregation_statistic",
|
|
56
|
+
"time_aggregation_interval", "status", "result_type", "value_count", "phenomenon_begin_time",
|
|
57
|
+
"phenomenon_end_time", "result_begin_time", "result_end_time", "is_private", "is_visible",
|
|
58
|
+
"time_aggregation_interval_unit", "intended_time_spacing", "intended_time_spacing_unit", "thing_id",
|
|
59
|
+
"sensor_id", "observed_property_id", "processing_level_id", "unit_id"
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
def __init__(self, client: "HydroServer", **data):
|
|
63
|
+
super().__init__(client=client, service=client.datastreams, **data)
|
|
64
|
+
|
|
65
|
+
self._workspace = None
|
|
66
|
+
self._thing = None
|
|
67
|
+
self._observed_property = None
|
|
68
|
+
self._unit = None
|
|
69
|
+
self._processing_level = None
|
|
70
|
+
self._sensor = None
|
|
71
|
+
self._data_source = None
|
|
72
|
+
self._data_archives = None
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def get_route(cls):
|
|
76
|
+
return "datastreams"
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def workspace(self) -> "Workspace":
|
|
80
|
+
"""The workspace this datastream belongs to."""
|
|
81
|
+
|
|
82
|
+
if self._workspace is None:
|
|
83
|
+
self._workspace = self.client.workspaces.get(uid=self.workspace_id)
|
|
84
|
+
|
|
85
|
+
return self._workspace
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def thing(self) -> "Thing":
|
|
89
|
+
"""The thing this datastream belongs to."""
|
|
90
|
+
|
|
91
|
+
if self._thing is None:
|
|
92
|
+
self._thing = self.client.things.get(uid=self.thing_id)
|
|
93
|
+
|
|
94
|
+
return self._thing
|
|
95
|
+
|
|
96
|
+
@thing.setter
|
|
97
|
+
def thing(self, thing: Union["Thing", UUID, str] = ...):
|
|
98
|
+
if not thing:
|
|
99
|
+
raise ValueError("Thing of datastream cannot be None.")
|
|
100
|
+
if normalize_uuid(thing) != str(self.thing_id):
|
|
101
|
+
self.thing_id = normalize_uuid(thing)
|
|
102
|
+
self._thing = None
|
|
103
|
+
|
|
104
|
+
@property
|
|
105
|
+
def sensor(self) -> "Sensor":
|
|
106
|
+
"""The sensor of this datastream."""
|
|
107
|
+
|
|
108
|
+
if self._sensor is None:
|
|
109
|
+
self._sensor = self.client.sensors.get(uid=self.sensor_id)
|
|
110
|
+
|
|
111
|
+
return self._sensor
|
|
112
|
+
|
|
113
|
+
@sensor.setter
|
|
114
|
+
def sensor(self, sensor: Union["Sensor", UUID, str] = ...):
|
|
115
|
+
if not sensor:
|
|
116
|
+
raise ValueError("Sensor of datastream cannot be None.")
|
|
117
|
+
if normalize_uuid(sensor) != str(self.sensor_id):
|
|
118
|
+
self.sensor_id = normalize_uuid(sensor)
|
|
119
|
+
self._sensor = None
|
|
120
|
+
|
|
121
|
+
@property
|
|
122
|
+
def observed_property(self) -> "ObservedProperty":
|
|
123
|
+
"""The observed property of this datastream."""
|
|
124
|
+
|
|
125
|
+
if self._observed_property is None:
|
|
126
|
+
self._observed_property = self.client.observedproperties.get(uid=self.observed_property_id)
|
|
127
|
+
|
|
128
|
+
return self._observed_property
|
|
129
|
+
|
|
130
|
+
@observed_property.setter
|
|
131
|
+
def observed_property(self, observed_property: Union["ObservedProperty", UUID, str] = ...):
|
|
132
|
+
if not observed_property:
|
|
133
|
+
raise ValueError("Observed property of datastream cannot be None.")
|
|
134
|
+
if normalize_uuid(observed_property) != str(self.observed_property_id):
|
|
135
|
+
self.observed_property_id = normalize_uuid(observed_property)
|
|
136
|
+
self._observed_property = None
|
|
137
|
+
|
|
138
|
+
@property
|
|
139
|
+
def unit(self) -> "Unit":
|
|
140
|
+
"""The unit of this datastream."""
|
|
141
|
+
|
|
142
|
+
if self._unit is None:
|
|
143
|
+
self._unit = self.client.units.get(uid=self.unit_id)
|
|
144
|
+
|
|
145
|
+
return self._unit
|
|
146
|
+
|
|
147
|
+
@unit.setter
|
|
148
|
+
def unit(self, unit: Union["Unit", UUID, str] = ...):
|
|
149
|
+
if not unit:
|
|
150
|
+
raise ValueError("Unit of datastream cannot be None.")
|
|
151
|
+
if normalize_uuid(unit) != str(self.unit_id):
|
|
152
|
+
self.unit_id = normalize_uuid(unit)
|
|
153
|
+
self._unit = None
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def processing_level(self) -> "ProcessingLevel":
|
|
157
|
+
"""The processing level of this datastream."""
|
|
158
|
+
|
|
159
|
+
if self._processing_level is None:
|
|
160
|
+
self._processing_level = self.client.processinglevels.get(uid=self.processing_level_id)
|
|
161
|
+
|
|
162
|
+
return self._processing_level
|
|
163
|
+
|
|
164
|
+
@processing_level.setter
|
|
165
|
+
def processing_level(self, processing_level: Union["ProcessingLevel", UUID, str] = ...):
|
|
166
|
+
if not processing_level:
|
|
167
|
+
raise ValueError("Processing level of datastream cannot be None.")
|
|
168
|
+
if normalize_uuid(processing_level) != str(self.processing_level_id):
|
|
169
|
+
self.processing_level_id = normalize_uuid(processing_level)
|
|
170
|
+
self._processing_level = None
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def data_source(self) -> Optional["DataSource"]:
|
|
174
|
+
"""The data source of this datastream."""
|
|
175
|
+
|
|
176
|
+
if self._data_source is None and self.data_source_id is not None:
|
|
177
|
+
self._data_source = self.client.datasources.get(uid=self.data_source_id)
|
|
178
|
+
|
|
179
|
+
return self._data_source
|
|
180
|
+
|
|
181
|
+
@property
|
|
182
|
+
def data_archives(self) -> List["DataArchive"]:
|
|
183
|
+
"""The data archives of this datastream."""
|
|
184
|
+
|
|
185
|
+
if self._data_archives is None:
|
|
186
|
+
self._data_archives = self.client.dataarchives.list(datastream=self.uid, fetch_all=True).items
|
|
187
|
+
|
|
188
|
+
return self._data_archives
|
|
189
|
+
|
|
190
|
+
def get_observations(
|
|
191
|
+
self,
|
|
192
|
+
page: int = ...,
|
|
193
|
+
page_size: int = 100000,
|
|
194
|
+
order_by: List[str] = ...,
|
|
195
|
+
phenomenon_time_max: datetime = ...,
|
|
196
|
+
phenomenon_time_min: datetime = ...,
|
|
197
|
+
result_qualifier_code: str = ...,
|
|
198
|
+
fetch_all: bool = False,
|
|
199
|
+
) -> pd.DataFrame:
|
|
200
|
+
"""Retrieve the observations for this datastream."""
|
|
201
|
+
|
|
202
|
+
return self.client.datastreams.get_observations(
|
|
203
|
+
uid=self.uid,
|
|
204
|
+
page=page,
|
|
205
|
+
page_size=page_size,
|
|
206
|
+
order_by=order_by,
|
|
207
|
+
phenomenon_time_max=phenomenon_time_max,
|
|
208
|
+
phenomenon_time_min=phenomenon_time_min,
|
|
209
|
+
result_qualifier_code=result_qualifier_code,
|
|
210
|
+
fetch_all=fetch_all
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
def load_observations(
|
|
214
|
+
self,
|
|
215
|
+
observations: pd.DataFrame,
|
|
216
|
+
mode: str = "insert"
|
|
217
|
+
) -> None:
|
|
218
|
+
"""Load a DataFrame of observations to the datastream."""
|
|
219
|
+
|
|
220
|
+
return self.client.datastreams.load_observations(
|
|
221
|
+
uid=self.uid,
|
|
222
|
+
observations=observations,
|
|
223
|
+
mode=mode
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
def delete_observations(
|
|
227
|
+
self,
|
|
228
|
+
phenomenon_time_start: Optional[datetime] = None,
|
|
229
|
+
phenomenon_time_end: Optional[datetime] = None,
|
|
230
|
+
):
|
|
231
|
+
"""Delete the observations for this datastream."""
|
|
232
|
+
|
|
233
|
+
return self.client.datastreams.delete_observations(
|
|
234
|
+
uid=self.uid,
|
|
235
|
+
phenomenon_time_start=phenomenon_time_start,
|
|
236
|
+
phenomenon_time_end=phenomenon_time_end,
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
# TODO: Find a better long-term solution for this issue.
|
|
240
|
+
def sync_phenomenon_end_time(self):
|
|
241
|
+
"""Ensures the phenomenon_end_time field matches the actual end time of the observations."""
|
|
242
|
+
|
|
243
|
+
path = f"/{self.client.base_route}/{self.get_route()}/{str(self.uid)}/observations"
|
|
244
|
+
response = self.client.request(
|
|
245
|
+
"get", path, params={"page_size": 1, "order_by": "-phenomenonTime"}
|
|
246
|
+
|
|
247
|
+
).json()
|
|
248
|
+
|
|
249
|
+
if len(response) > 0:
|
|
250
|
+
self.phenomenon_end_time = datetime.fromisoformat(response[0]["phenomenonTime"])
|
|
251
|
+
else:
|
|
252
|
+
self.phenomenon_end_time = None
|
|
253
|
+
|
|
254
|
+
self.save()
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from typing import Optional, Any, List, TYPE_CHECKING
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from requests import Response
|
|
5
|
+
from pydantic.alias_generators import to_snake
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from hydroserverpy.api.models import Datastream
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class ObservationCollection:
|
|
13
|
+
dataframe: pd.DataFrame
|
|
14
|
+
filters: Optional[dict[str, Any]] = None
|
|
15
|
+
order_by: Optional[List[str]] = None
|
|
16
|
+
page: Optional[int] = None
|
|
17
|
+
page_size: Optional[int] = None
|
|
18
|
+
total_pages: Optional[int] = None
|
|
19
|
+
total_count: Optional[int] = None
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
datastream: "Datastream",
|
|
24
|
+
response: Optional[Response] = None,
|
|
25
|
+
**data
|
|
26
|
+
):
|
|
27
|
+
self.filters = data.get("filters")
|
|
28
|
+
self.order_by = data.get("order_by")
|
|
29
|
+
self.page = data.get("page") or (int(response.headers.get("X-Page")) if response else None)
|
|
30
|
+
self.page_size = data.get("page_size") or (int(response.headers.get("X-Page-Size")) if response else None)
|
|
31
|
+
self.total_pages = data.get("total_pages") or (int(response.headers.get("X-Total-Pages")) if response else None)
|
|
32
|
+
self.total_count = data.get("total_count") or (int(response.headers.get("X-Total-Count")) if response else None)
|
|
33
|
+
self.datastream = datastream
|
|
34
|
+
|
|
35
|
+
if "dataframe" in data:
|
|
36
|
+
self.dataframe = data["dataframe"]
|
|
37
|
+
elif response is not None:
|
|
38
|
+
data = response.json()
|
|
39
|
+
self.dataframe = pd.DataFrame({to_snake(k): v for k, v in data.items()})
|
|
40
|
+
if "phenomenon_time" in self.dataframe.columns:
|
|
41
|
+
self.dataframe["phenomenon_time"] = pd.to_datetime(
|
|
42
|
+
self.dataframe["phenomenon_time"], utc=True, format="ISO8601"
|
|
43
|
+
)
|
|
44
|
+
else:
|
|
45
|
+
self.dataframe = pd.DataFrame()
|
|
46
|
+
|
|
47
|
+
def next_page(self):
|
|
48
|
+
"""Fetches the next page of data from HydroServer."""
|
|
49
|
+
|
|
50
|
+
return self.datastream.get_observations(
|
|
51
|
+
**(self.filters or {}),
|
|
52
|
+
page=(self.page or 0) + 1,
|
|
53
|
+
page_size=self.page_size or 100000,
|
|
54
|
+
order_by=self.order_by or ...,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
def previous_page(self):
|
|
58
|
+
"""Fetches the previous page of data from HydroServer."""
|
|
59
|
+
|
|
60
|
+
if not self.page or self.page <= 1:
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
return self.datastream.get_observations(
|
|
64
|
+
**(self.filters or {}),
|
|
65
|
+
page=self.page - 1,
|
|
66
|
+
page_size=self.page_size or 100000,
|
|
67
|
+
order_by=self.order_by or ...,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
def fetch_all(self) -> "ObservationCollection":
|
|
71
|
+
"""Fetches all pages of data from HydroServer for this collection."""
|
|
72
|
+
|
|
73
|
+
all_dataframes = []
|
|
74
|
+
page_num = 1
|
|
75
|
+
|
|
76
|
+
while self.total_pages is None or page_num <= self.total_pages:
|
|
77
|
+
if page_num == self.page:
|
|
78
|
+
all_dataframes.append(self.dataframe)
|
|
79
|
+
else:
|
|
80
|
+
observations = self.datastream.get_observations(
|
|
81
|
+
**(self.filters or {}),
|
|
82
|
+
page=page_num,
|
|
83
|
+
page_size=self.page_size or 100000,
|
|
84
|
+
order_by=self.order_by or ...,
|
|
85
|
+
)
|
|
86
|
+
if observations.dataframe.empty:
|
|
87
|
+
break
|
|
88
|
+
all_dataframes.append(observations.dataframe)
|
|
89
|
+
|
|
90
|
+
page_num += 1
|
|
91
|
+
|
|
92
|
+
merged_dataframe = pd.concat(all_dataframes, ignore_index=True)
|
|
93
|
+
|
|
94
|
+
return self.__class__(
|
|
95
|
+
dataframe=merged_dataframe,
|
|
96
|
+
datastream=self.datastream,
|
|
97
|
+
filters=self.filters,
|
|
98
|
+
order_by=self.order_by or ...,
|
|
99
|
+
page=1,
|
|
100
|
+
page_size=len(merged_dataframe),
|
|
101
|
+
total_pages=1,
|
|
102
|
+
total_count=len(merged_dataframe)
|
|
103
|
+
)
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Optional, ClassVar, TYPE_CHECKING
|
|
3
|
+
from pydantic import Field
|
|
4
|
+
from ..base import HydroServerBaseModel
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from hydroserverpy import HydroServer
|
|
8
|
+
from hydroserverpy.api.models import Workspace
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ObservedProperty(HydroServerBaseModel):
|
|
12
|
+
name: str = Field(..., max_length=255)
|
|
13
|
+
definition: str
|
|
14
|
+
description: str
|
|
15
|
+
observed_property_type: str = Field(..., max_length=255, alias="type")
|
|
16
|
+
code: str = Field(..., max_length=255)
|
|
17
|
+
workspace_id: Optional[uuid.UUID] = None
|
|
18
|
+
|
|
19
|
+
_editable_fields: ClassVar[set[str]] = {"name", "definition", "description", "observed_property_type", "code"}
|
|
20
|
+
|
|
21
|
+
def __init__(self, client: "HydroServer", **data):
|
|
22
|
+
super().__init__(client=client, service=client.observedproperties, **data)
|
|
23
|
+
|
|
24
|
+
self._workspace = None
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def get_route(cls):
|
|
28
|
+
return "observed-properties"
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def workspace(self) -> Optional["Workspace"]:
|
|
32
|
+
"""The workspace this observed property belongs to."""
|
|
33
|
+
|
|
34
|
+
if self._workspace is None and self.workspace_id:
|
|
35
|
+
self._workspace = self.client.workspaces.get(uid=self.workspace_id)
|
|
36
|
+
|
|
37
|
+
return self._workspace
|