hydroserverpy 0.5.0b1__py3-none-any.whl → 0.5.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hydroserverpy might be problematic. Click here for more details.
- hydroserverpy/api/http.py +0 -2
- hydroserverpy/api/main.py +21 -0
- hydroserverpy/api/models/__init__.py +3 -0
- hydroserverpy/api/models/etl/data_archive.py +105 -0
- hydroserverpy/api/models/etl/data_source.py +150 -0
- hydroserverpy/api/models/etl/orchestration_configuration.py +35 -0
- hydroserverpy/api/models/etl/orchestration_system.py +78 -0
- hydroserverpy/api/models/iam/workspace.py +35 -0
- hydroserverpy/api/models/sta/datastream.py +3 -1
- hydroserverpy/api/services/__init__.py +3 -0
- hydroserverpy/api/services/base.py +12 -2
- hydroserverpy/api/services/etl/data_archive.py +196 -0
- hydroserverpy/api/services/etl/data_source.py +196 -0
- hydroserverpy/api/services/etl/orchestration_system.py +74 -0
- hydroserverpy/api/services/sta/observed_property.py +3 -1
- hydroserverpy/etl_csv/hydroserver_etl_csv.py +49 -34
- {hydroserverpy-0.5.0b1.dist-info → hydroserverpy-0.5.0b2.dist-info}/METADATA +1 -1
- {hydroserverpy-0.5.0b1.dist-info → hydroserverpy-0.5.0b2.dist-info}/RECORD +22 -15
- {hydroserverpy-0.5.0b1.dist-info → hydroserverpy-0.5.0b2.dist-info}/WHEEL +1 -1
- {hydroserverpy-0.5.0b1.dist-info → hydroserverpy-0.5.0b2.dist-info}/licenses/LICENSE +0 -0
- {hydroserverpy-0.5.0b1.dist-info → hydroserverpy-0.5.0b2.dist-info}/top_level.txt +0 -0
- {hydroserverpy-0.5.0b1.dist-info → hydroserverpy-0.5.0b2.dist-info}/zip-safe +0 -0
hydroserverpy/api/http.py
CHANGED
hydroserverpy/api/main.py
CHANGED
|
@@ -10,6 +10,9 @@ from hydroserverpy.api.services import (
|
|
|
10
10
|
ResultQualifierService,
|
|
11
11
|
SensorService,
|
|
12
12
|
DatastreamService,
|
|
13
|
+
OrchestrationSystemService,
|
|
14
|
+
DataSourceService,
|
|
15
|
+
DataArchiveService,
|
|
13
16
|
)
|
|
14
17
|
|
|
15
18
|
|
|
@@ -150,3 +153,21 @@ class HydroServer:
|
|
|
150
153
|
"""Utilities for managing HydroServer datastreams."""
|
|
151
154
|
|
|
152
155
|
return DatastreamService(self)
|
|
156
|
+
|
|
157
|
+
@property
|
|
158
|
+
def orchestrationsystems(self):
|
|
159
|
+
"""Utilities for managing HydroServer orchestration systems."""
|
|
160
|
+
|
|
161
|
+
return OrchestrationSystemService(self)
|
|
162
|
+
|
|
163
|
+
@property
|
|
164
|
+
def datasources(self):
|
|
165
|
+
"""Utilities for managing HydroServer data sources."""
|
|
166
|
+
|
|
167
|
+
return DataSourceService(self)
|
|
168
|
+
|
|
169
|
+
@property
|
|
170
|
+
def dataarchives(self):
|
|
171
|
+
"""Utilities for managing HydroServer data archives."""
|
|
172
|
+
|
|
173
|
+
return DataArchiveService(self)
|
|
@@ -10,6 +10,9 @@ from .sta.result_qualifier import ResultQualifier
|
|
|
10
10
|
from .sta.sensor import Sensor
|
|
11
11
|
from .sta.thing import Thing
|
|
12
12
|
from .sta.unit import Unit
|
|
13
|
+
from .etl.orchestration_system import OrchestrationSystem
|
|
14
|
+
from .etl.data_source import DataSource
|
|
15
|
+
from .etl.data_archive import DataArchive
|
|
13
16
|
|
|
14
17
|
Workspace.model_rebuild()
|
|
15
18
|
Role.model_rebuild()
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
from typing import Union, Optional, TYPE_CHECKING, List
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
from .orchestration_system import OrchestrationSystem
|
|
5
|
+
from .orchestration_configuration import OrchestrationConfigurationFields
|
|
6
|
+
from ..sta.datastream import Datastream
|
|
7
|
+
from ..base import HydroServerModel
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from hydroserverpy import HydroServer
|
|
11
|
+
from hydroserverpy.api.models import Workspace
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class DataArchiveFields(BaseModel):
|
|
15
|
+
name: str = Field(..., max_length=255)
|
|
16
|
+
settings: Optional[dict] = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DataArchive(
|
|
20
|
+
HydroServerModel, DataArchiveFields, OrchestrationConfigurationFields
|
|
21
|
+
):
|
|
22
|
+
def __init__(self, _connection: "HydroServer", _uid: Union[UUID, str], **data):
|
|
23
|
+
super().__init__(
|
|
24
|
+
_connection=_connection, _model_ref="dataarchives", _uid=_uid, **data
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
self._workspace_id = str(data.get("workspace_id") or data["workspaceId"])
|
|
28
|
+
self._orchestration_system_id = str(
|
|
29
|
+
data.get("orchestration_system_id") or data["orchestrationSystem"]["id"]
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
self._workspace = None
|
|
33
|
+
|
|
34
|
+
if data.get("orchestrationSystem"):
|
|
35
|
+
self._orchestration_system = OrchestrationSystem(
|
|
36
|
+
_connection=_connection,
|
|
37
|
+
_uid=self._orchestration_system_id,
|
|
38
|
+
**data["orchestrationSystem"]
|
|
39
|
+
)
|
|
40
|
+
else:
|
|
41
|
+
self._orchestration_system = None
|
|
42
|
+
|
|
43
|
+
if data.get("datastreams"):
|
|
44
|
+
self._datastreams = [
|
|
45
|
+
Datastream(_connection=_connection, _uid=datastream["id"], **datastream)
|
|
46
|
+
for datastream in data["datastreams"]
|
|
47
|
+
]
|
|
48
|
+
else:
|
|
49
|
+
self._datastreams = []
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def workspace(self) -> "Workspace":
|
|
53
|
+
"""The workspace this data archive belongs to."""
|
|
54
|
+
|
|
55
|
+
if self._workspace is None and self._workspace_id:
|
|
56
|
+
self._workspace = self._connection.workspaces.get(uid=self._workspace_id)
|
|
57
|
+
|
|
58
|
+
return self._workspace
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def orchestration_system(self) -> "OrchestrationSystem":
|
|
62
|
+
"""The orchestration system that manages this data archive."""
|
|
63
|
+
|
|
64
|
+
if self._orchestration_system is None and self._orchestration_system_id:
|
|
65
|
+
self._orchestration_system = self._connection.orchestration_systems.get(
|
|
66
|
+
uid=self._orchestration_system_id
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
return self._orchestration_system
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def datastreams(self) -> List["Datastream"]:
|
|
73
|
+
"""The datastreams this data archive provides data for."""
|
|
74
|
+
|
|
75
|
+
return self._datastreams
|
|
76
|
+
|
|
77
|
+
def refresh(self):
|
|
78
|
+
"""Refresh this data archive from HydroServer."""
|
|
79
|
+
|
|
80
|
+
super()._refresh()
|
|
81
|
+
self._workspace = None
|
|
82
|
+
|
|
83
|
+
def save(self):
|
|
84
|
+
"""Save changes to this data archive to HydroServer."""
|
|
85
|
+
|
|
86
|
+
super()._save()
|
|
87
|
+
|
|
88
|
+
def delete(self):
|
|
89
|
+
"""Delete this data archive from HydroServer."""
|
|
90
|
+
|
|
91
|
+
super()._delete()
|
|
92
|
+
|
|
93
|
+
def add_datastream(self, datastream: Union["Datastream", UUID, str]):
|
|
94
|
+
"""Add a datastream to this data archive."""
|
|
95
|
+
|
|
96
|
+
self._connection.dataarchives.add_datastream(
|
|
97
|
+
uid=self.uid, datastream=datastream
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
def remove_datastream(self, datastream: Union["Datastream", UUID, str]):
|
|
101
|
+
"""Remove a datastream from this data archive."""
|
|
102
|
+
|
|
103
|
+
self._connection.dataarchives.remove_datastream(
|
|
104
|
+
uid=self.uid, datastream=datastream
|
|
105
|
+
)
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import tempfile
|
|
2
|
+
from typing import Union, List, Optional, TYPE_CHECKING
|
|
3
|
+
from uuid import UUID
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
from urllib.request import urlopen
|
|
6
|
+
from hydroserverpy.etl_csv.hydroserver_etl_csv import HydroServerETLCSV
|
|
7
|
+
from .orchestration_system import OrchestrationSystem
|
|
8
|
+
from .orchestration_configuration import OrchestrationConfigurationFields
|
|
9
|
+
from ..sta.datastream import Datastream
|
|
10
|
+
from ..base import HydroServerModel
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from hydroserverpy import HydroServer
|
|
14
|
+
from hydroserverpy.api.models import Workspace
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DataSourceFields(BaseModel):
|
|
18
|
+
name: str = Field(..., max_length=255)
|
|
19
|
+
settings: Optional[dict] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DataSource(HydroServerModel, DataSourceFields, OrchestrationConfigurationFields):
|
|
23
|
+
def __init__(self, _connection: "HydroServer", _uid: Union[UUID, str], **data):
|
|
24
|
+
super().__init__(
|
|
25
|
+
_connection=_connection, _model_ref="datasources", _uid=_uid, **data
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
self._workspace_id = str(data.get("workspace_id") or data["workspaceId"])
|
|
29
|
+
self._orchestration_system_id = str(
|
|
30
|
+
data.get("orchestration_system_id") or data["orchestrationSystem"]["id"]
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
self._workspace = None
|
|
34
|
+
|
|
35
|
+
if data.get("orchestrationSystem"):
|
|
36
|
+
self._orchestration_system = OrchestrationSystem(
|
|
37
|
+
_connection=_connection,
|
|
38
|
+
_uid=self._orchestration_system_id,
|
|
39
|
+
**data["orchestrationSystem"]
|
|
40
|
+
)
|
|
41
|
+
else:
|
|
42
|
+
self._orchestration_system = None
|
|
43
|
+
|
|
44
|
+
if data.get("datastreams"):
|
|
45
|
+
self._datastreams = [
|
|
46
|
+
Datastream(_connection=_connection, _uid=datastream["id"], **datastream)
|
|
47
|
+
for datastream in data["datastreams"]
|
|
48
|
+
]
|
|
49
|
+
else:
|
|
50
|
+
self._datastreams = []
|
|
51
|
+
|
|
52
|
+
@property
|
|
53
|
+
def workspace(self) -> "Workspace":
|
|
54
|
+
"""The workspace this data source belongs to."""
|
|
55
|
+
|
|
56
|
+
if self._workspace is None and self._workspace_id:
|
|
57
|
+
self._workspace = self._connection.workspaces.get(uid=self._workspace_id)
|
|
58
|
+
|
|
59
|
+
return self._workspace
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def orchestration_system(self) -> "OrchestrationSystem":
|
|
63
|
+
"""The orchestration system that manages this data source."""
|
|
64
|
+
|
|
65
|
+
if self._orchestration_system is None and self._orchestration_system_id:
|
|
66
|
+
self._orchestration_system = self._connection.orchestration_systems.get(
|
|
67
|
+
uid=self._orchestration_system_id
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
return self._orchestration_system
|
|
71
|
+
|
|
72
|
+
@orchestration_system.setter
|
|
73
|
+
def orchestration_system(
|
|
74
|
+
self, orchestration_system: Union["OrchestrationSystem", UUID, str]
|
|
75
|
+
):
|
|
76
|
+
if not orchestration_system:
|
|
77
|
+
raise ValueError("Orchestration system of data source cannot be None.")
|
|
78
|
+
if str(getattr(orchestration_system, "uid", orchestration_system)) != str(
|
|
79
|
+
self.orchestration_system.uid
|
|
80
|
+
):
|
|
81
|
+
self._orchestration_system = self._connection.orchestrationsystems.get(
|
|
82
|
+
uid=str(getattr(orchestration_system, "uid", orchestration_system))
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
@property
|
|
86
|
+
def datastreams(self) -> List["Datastream"]:
|
|
87
|
+
"""The datastreams this data source provides data for."""
|
|
88
|
+
|
|
89
|
+
if self._datastreams is None:
|
|
90
|
+
data_source = self._connection.datasources.get(uid=self.uid)
|
|
91
|
+
self._datastreams = data_source.datastreams
|
|
92
|
+
|
|
93
|
+
return self._datastreams
|
|
94
|
+
|
|
95
|
+
def refresh(self):
|
|
96
|
+
"""Refresh this data source from HydroServer."""
|
|
97
|
+
|
|
98
|
+
super()._refresh()
|
|
99
|
+
self._workspace = None
|
|
100
|
+
self._datastreams = None
|
|
101
|
+
|
|
102
|
+
def save(self):
|
|
103
|
+
"""Save changes to this data source to HydroServer."""
|
|
104
|
+
|
|
105
|
+
super()._save()
|
|
106
|
+
|
|
107
|
+
def delete(self):
|
|
108
|
+
"""Delete this data source from HydroServer."""
|
|
109
|
+
|
|
110
|
+
super()._delete()
|
|
111
|
+
|
|
112
|
+
def add_datastream(self, datastream: Union["Datastream", UUID, str]):
|
|
113
|
+
"""Add a datastream to this data source."""
|
|
114
|
+
|
|
115
|
+
self._connection.datasources.add_datastream(uid=self.uid, datastream=datastream)
|
|
116
|
+
|
|
117
|
+
def remove_datastream(self, datastream: Union["Datastream", UUID, str]):
|
|
118
|
+
"""Remove a datastream from this data source."""
|
|
119
|
+
|
|
120
|
+
self._connection.datasources.remove_datastream(
|
|
121
|
+
uid=self.uid, datastream=datastream
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
# TODO: Replace with ETL module.
|
|
125
|
+
def load_data(self):
|
|
126
|
+
"""Load data for this data source."""
|
|
127
|
+
|
|
128
|
+
if self.paused is True:
|
|
129
|
+
return
|
|
130
|
+
|
|
131
|
+
if self.settings["extractor"]["type"] == "local":
|
|
132
|
+
with open(self.settings["extractor"]["path"]) as data_file:
|
|
133
|
+
loader = HydroServerETLCSV(
|
|
134
|
+
self._connection, data_file=data_file, data_source=self
|
|
135
|
+
)
|
|
136
|
+
loader.run()
|
|
137
|
+
elif self.settings["extractor"]["type"] == "HTTP":
|
|
138
|
+
with tempfile.NamedTemporaryFile(mode="w") as temp_file:
|
|
139
|
+
with urlopen(self.settings["extractor"]["urlTemplate"]) as response:
|
|
140
|
+
chunk_size = 1024 * 1024 * 10 # Use a 10mb chunk size.
|
|
141
|
+
while True:
|
|
142
|
+
chunk = response.read(chunk_size)
|
|
143
|
+
if not chunk:
|
|
144
|
+
break
|
|
145
|
+
temp_file.write(chunk)
|
|
146
|
+
temp_file.seek(0)
|
|
147
|
+
loader = HydroServerETLCSV(
|
|
148
|
+
self._connection, data_file=temp_file, data_source=self
|
|
149
|
+
)
|
|
150
|
+
loader.run()
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from pydantic import AliasPath
|
|
2
|
+
from typing import Optional, Literal
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class OrchestrationConfigurationFields(BaseModel):
|
|
8
|
+
interval: Optional[int] = Field(
|
|
9
|
+
None, gt=0, validation_alias=AliasPath("schedule", "interval")
|
|
10
|
+
)
|
|
11
|
+
interval_units: Optional[Literal["minutes", "hours", "days"]] = Field(
|
|
12
|
+
None, validation_alias=AliasPath("schedule", "intervalUnits")
|
|
13
|
+
)
|
|
14
|
+
crontab: Optional[str] = Field(
|
|
15
|
+
None, max_length=255, validation_alias=AliasPath("schedule", "crontab")
|
|
16
|
+
)
|
|
17
|
+
start_time: Optional[datetime] = Field(
|
|
18
|
+
None, validation_alias=AliasPath("schedule", "startTime")
|
|
19
|
+
)
|
|
20
|
+
end_time: Optional[datetime] = Field(
|
|
21
|
+
None, validation_alias=AliasPath("schedule", "endTime")
|
|
22
|
+
)
|
|
23
|
+
last_run_successful: Optional[bool] = Field(
|
|
24
|
+
None, validation_alias=AliasPath("status", "lastRunSuccessful")
|
|
25
|
+
)
|
|
26
|
+
last_run_message: Optional[str] = Field(
|
|
27
|
+
None, max_length=255, validation_alias=AliasPath("status", "lastRunMessage")
|
|
28
|
+
)
|
|
29
|
+
last_run: Optional[datetime] = Field(
|
|
30
|
+
None, validation_alias=AliasPath("status", "lastRun")
|
|
31
|
+
)
|
|
32
|
+
next_run: Optional[datetime] = Field(
|
|
33
|
+
None, validation_alias=AliasPath("status", "nextRun")
|
|
34
|
+
)
|
|
35
|
+
paused: bool = Field(False, validation_alias=AliasPath("status", "paused"))
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from typing import Union, List, TYPE_CHECKING
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
from ..base import HydroServerModel
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from hydroserverpy import HydroServer
|
|
8
|
+
from hydroserverpy.api.models import Workspace, DataSource, DataArchive
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class OrchestrationSystemFields(BaseModel):
|
|
12
|
+
name: str = Field(..., max_length=255)
|
|
13
|
+
orchestration_system_type: str = Field(..., max_length=255, alias="type")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class OrchestrationSystem(HydroServerModel, OrchestrationSystemFields):
|
|
17
|
+
def __init__(self, _connection: "HydroServer", _uid: Union[UUID, str], **data):
|
|
18
|
+
super().__init__(
|
|
19
|
+
_connection=_connection,
|
|
20
|
+
_model_ref="orchestrationsystems",
|
|
21
|
+
_uid=_uid,
|
|
22
|
+
**data
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
self._workspace_id = str(data.get("workspace_id") or data["workspaceId"])
|
|
26
|
+
|
|
27
|
+
self._workspace = None
|
|
28
|
+
self._datasources = None
|
|
29
|
+
self._dataarchives = None
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def workspace(self) -> "Workspace":
|
|
33
|
+
"""The workspace this orchestration system belongs to."""
|
|
34
|
+
|
|
35
|
+
if self._workspace is None and self._workspace_id:
|
|
36
|
+
self._workspace = self._connection.workspaces.get(uid=self._workspace_id)
|
|
37
|
+
|
|
38
|
+
return self._workspace
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def datasources(self) -> List["DataSource"]:
|
|
42
|
+
"""The data sources associated with this workspace."""
|
|
43
|
+
|
|
44
|
+
if self._datasources is None:
|
|
45
|
+
self._datasources = self._connection.datasources.list(
|
|
46
|
+
orchestration_system=self.uid
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
return self._datasources
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def dataarchives(self) -> List["DataArchive"]:
|
|
53
|
+
"""The data archives associated with this workspace."""
|
|
54
|
+
|
|
55
|
+
if self._dataarchives is None:
|
|
56
|
+
self._dataarchives = self._connection.dataarchives.list(
|
|
57
|
+
orchestration_system=self.uid
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
return self._dataarchives
|
|
61
|
+
|
|
62
|
+
def refresh(self):
|
|
63
|
+
"""Refresh this orchestration system from HydroServer."""
|
|
64
|
+
|
|
65
|
+
super()._refresh()
|
|
66
|
+
self._workspace = None
|
|
67
|
+
self._datasources = None
|
|
68
|
+
self._dataarchives = None
|
|
69
|
+
|
|
70
|
+
def save(self):
|
|
71
|
+
"""Save changes to this orchestration system to HydroServer."""
|
|
72
|
+
|
|
73
|
+
super()._save()
|
|
74
|
+
|
|
75
|
+
def delete(self):
|
|
76
|
+
"""Delete this orchestration system from HydroServer."""
|
|
77
|
+
|
|
78
|
+
super()._delete()
|
|
@@ -16,6 +16,9 @@ if TYPE_CHECKING:
|
|
|
16
16
|
ProcessingLevel,
|
|
17
17
|
ResultQualifier,
|
|
18
18
|
Datastream,
|
|
19
|
+
OrchestrationSystem,
|
|
20
|
+
DataSource,
|
|
21
|
+
DataArchive,
|
|
19
22
|
)
|
|
20
23
|
|
|
21
24
|
|
|
@@ -41,6 +44,9 @@ class Workspace(HydroServerModel, WorkspaceFields):
|
|
|
41
44
|
self._units = None
|
|
42
45
|
self._sensors = None
|
|
43
46
|
self._datastreams = None
|
|
47
|
+
self._orchestrationsystems = None
|
|
48
|
+
self._datasources = None
|
|
49
|
+
self._dataarchives = None
|
|
44
50
|
|
|
45
51
|
@property
|
|
46
52
|
def roles(self) -> List["Role"]:
|
|
@@ -131,6 +137,35 @@ class Workspace(HydroServerModel, WorkspaceFields):
|
|
|
131
137
|
|
|
132
138
|
return self._datastreams
|
|
133
139
|
|
|
140
|
+
@property
|
|
141
|
+
def orchestrationsystems(self) -> List["OrchestrationSystem"]:
|
|
142
|
+
"""The orchestration systems associated with this workspace."""
|
|
143
|
+
|
|
144
|
+
if self._orchestrationsystems is None:
|
|
145
|
+
self._orchestrationsystems = self._connection.orchestrationsystems.list(
|
|
146
|
+
workspace=self.uid
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
return self._orchestrationsystems
|
|
150
|
+
|
|
151
|
+
@property
|
|
152
|
+
def datasources(self) -> List["DataSource"]:
|
|
153
|
+
"""The data sources associated with this workspace."""
|
|
154
|
+
|
|
155
|
+
if self._datasources is None:
|
|
156
|
+
self._datasources = self._connection.datasources.list(workspace=self.uid)
|
|
157
|
+
|
|
158
|
+
return self._datasources
|
|
159
|
+
|
|
160
|
+
@property
|
|
161
|
+
def dataarchives(self) -> List["DataArchive"]:
|
|
162
|
+
"""The data archives associated with this workspace."""
|
|
163
|
+
|
|
164
|
+
if self._dataarchives is None:
|
|
165
|
+
self._dataarchives = self._connection.dataarchives.list(workspace=self.uid)
|
|
166
|
+
|
|
167
|
+
return self._dataarchives
|
|
168
|
+
|
|
134
169
|
def refresh(self) -> None:
|
|
135
170
|
"""Refresh the workspace details from HydroServer."""
|
|
136
171
|
|
|
@@ -263,7 +263,9 @@ class Datastream(HydroServerModel, DatastreamFields):
|
|
|
263
263
|
"""The processing level of this datastream."""
|
|
264
264
|
|
|
265
265
|
if self._processing_level is None:
|
|
266
|
-
self._processing_level = self._connection.processinglevels.get(
|
|
266
|
+
self._processing_level = self._connection.processinglevels.get(
|
|
267
|
+
uid=self._processing_level_id
|
|
268
|
+
)
|
|
267
269
|
self._original_data["processing_level"] = self._processing_level
|
|
268
270
|
|
|
269
271
|
return self._processing_level
|
|
@@ -6,3 +6,6 @@ from .sta.processing_level import ProcessingLevelService
|
|
|
6
6
|
from .sta.result_qualifier import ResultQualifierService
|
|
7
7
|
from .sta.sensor import SensorService
|
|
8
8
|
from .sta.datastream import DatastreamService
|
|
9
|
+
from .etl.orchestration_system import OrchestrationSystemService
|
|
10
|
+
from .etl.data_source import DataSourceService
|
|
11
|
+
from .etl.data_archive import DataArchiveService
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from typing import TYPE_CHECKING, Type, Union, Optional
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
from uuid import UUID
|
|
3
4
|
|
|
4
5
|
if TYPE_CHECKING:
|
|
@@ -38,7 +39,7 @@ class EndpointService:
|
|
|
38
39
|
path = f"/{self._api_route}/{self._endpoint_route}"
|
|
39
40
|
headers = {"Content-type": "application/json"}
|
|
40
41
|
response = self._connection.request(
|
|
41
|
-
"post", path, headers=headers, json=kwargs
|
|
42
|
+
"post", path, headers=headers, json=self._to_iso_time(kwargs)
|
|
42
43
|
).json()
|
|
43
44
|
|
|
44
45
|
return self._model(
|
|
@@ -49,7 +50,7 @@ class EndpointService:
|
|
|
49
50
|
path = f"/{self._api_route}/{self._endpoint_route}/{str(uid)}"
|
|
50
51
|
headers = {"Content-type": "application/json"}
|
|
51
52
|
response = self._connection.request(
|
|
52
|
-
"patch", path, headers=headers, json=kwargs
|
|
53
|
+
"patch", path, headers=headers, json=self._to_iso_time(kwargs)
|
|
53
54
|
).json()
|
|
54
55
|
|
|
55
56
|
return self._model(
|
|
@@ -62,6 +63,15 @@ class EndpointService:
|
|
|
62
63
|
|
|
63
64
|
return response
|
|
64
65
|
|
|
66
|
+
def _to_iso_time(self, obj):
|
|
67
|
+
if isinstance(obj, dict):
|
|
68
|
+
return {k: self._to_iso_time(v) for k, v in obj.items()}
|
|
69
|
+
elif isinstance(obj, list):
|
|
70
|
+
return [self._to_iso_time(i) for i in obj]
|
|
71
|
+
elif isinstance(obj, datetime):
|
|
72
|
+
return obj.isoformat()
|
|
73
|
+
return obj
|
|
74
|
+
|
|
65
75
|
|
|
66
76
|
class SensorThingsService(EndpointService):
|
|
67
77
|
_sta_route: str
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Optional, Literal, Union, List, TYPE_CHECKING
|
|
3
|
+
from uuid import UUID
|
|
4
|
+
from ..base import EndpointService
|
|
5
|
+
from hydroserverpy.api.models import DataArchive, Datastream
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from hydroserverpy import HydroServer
|
|
10
|
+
from hydroserverpy.api.models import Workspace, OrchestrationSystem
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DataArchiveService(EndpointService):
|
|
14
|
+
def __init__(self, connection: "HydroServer"):
|
|
15
|
+
self._model = DataArchive
|
|
16
|
+
self._api_route = "api/data"
|
|
17
|
+
self._endpoint_route = "data-archives"
|
|
18
|
+
|
|
19
|
+
super().__init__(connection)
|
|
20
|
+
|
|
21
|
+
def list(
|
|
22
|
+
self,
|
|
23
|
+
workspace: Optional[Union["Workspace", UUID, str]] = None,
|
|
24
|
+
orchestration_system: Optional[Union["OrchestrationSystem", UUID, str]] = None,
|
|
25
|
+
) -> List["DataArchive"]:
|
|
26
|
+
"""Fetch a collection of data archives."""
|
|
27
|
+
|
|
28
|
+
params = {}
|
|
29
|
+
|
|
30
|
+
workspace_id = getattr(workspace, "uid", workspace)
|
|
31
|
+
workspace_id = str(workspace_id) if workspace_id else None
|
|
32
|
+
|
|
33
|
+
orchestration_system_id = getattr(
|
|
34
|
+
orchestration_system, "uid", orchestration_system
|
|
35
|
+
)
|
|
36
|
+
orchestration_system_id = (
|
|
37
|
+
str(orchestration_system_id) if orchestration_system_id else None
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
if workspace_id:
|
|
41
|
+
params["workspace_id"] = workspace_id
|
|
42
|
+
|
|
43
|
+
if orchestration_system_id:
|
|
44
|
+
params["orchestration_system_id"] = orchestration_system_id
|
|
45
|
+
|
|
46
|
+
return super()._list(
|
|
47
|
+
params=params,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
def get(self, uid: Union[UUID, str]) -> "DataArchive":
|
|
51
|
+
"""Get a data archive by ID."""
|
|
52
|
+
|
|
53
|
+
return super()._get(uid=str(uid))
|
|
54
|
+
|
|
55
|
+
def create(
|
|
56
|
+
self,
|
|
57
|
+
name: str,
|
|
58
|
+
workspace: Union["Workspace", UUID, str],
|
|
59
|
+
orchestration_system: Union["OrchestrationSystem", UUID, str],
|
|
60
|
+
settings: Optional[dict] = None,
|
|
61
|
+
interval: Optional[int] = None,
|
|
62
|
+
interval_units: Optional[Literal["minutes", "hours", "days"]] = None,
|
|
63
|
+
crontab: Optional[str] = None,
|
|
64
|
+
start_time: Optional[datetime] = None,
|
|
65
|
+
end_time: Optional[datetime] = None,
|
|
66
|
+
last_run_successful: Optional[bool] = None,
|
|
67
|
+
last_run_message: Optional[str] = None,
|
|
68
|
+
last_run: Optional[datetime] = None,
|
|
69
|
+
next_run: Optional[datetime] = None,
|
|
70
|
+
paused: bool = False,
|
|
71
|
+
datastreams: Optional[List[Union["Datastream", UUID, str]]] = None,
|
|
72
|
+
) -> "DataArchive":
|
|
73
|
+
"""Create a new data archive."""
|
|
74
|
+
|
|
75
|
+
kwargs = {
|
|
76
|
+
"name": name,
|
|
77
|
+
"workspaceId": str(getattr(workspace, "uid", workspace)),
|
|
78
|
+
"orchestrationSystemId": getattr(
|
|
79
|
+
orchestration_system, "uid", orchestration_system
|
|
80
|
+
),
|
|
81
|
+
"settings": settings,
|
|
82
|
+
"schedule": {
|
|
83
|
+
"interval": interval,
|
|
84
|
+
"intervalUnits": interval_units,
|
|
85
|
+
"crontab": crontab,
|
|
86
|
+
"startTime": start_time,
|
|
87
|
+
"endTime": end_time,
|
|
88
|
+
},
|
|
89
|
+
"status": {
|
|
90
|
+
"lastRunSuccessful": last_run_successful,
|
|
91
|
+
"lastRunMessage": last_run_message,
|
|
92
|
+
"lastRun": last_run,
|
|
93
|
+
"nextRun": next_run,
|
|
94
|
+
"paused": paused,
|
|
95
|
+
},
|
|
96
|
+
"datastreamIds": (
|
|
97
|
+
[
|
|
98
|
+
str(getattr(datastream, "uid", datastream))
|
|
99
|
+
for datastream in datastreams
|
|
100
|
+
]
|
|
101
|
+
if datastreams
|
|
102
|
+
else []
|
|
103
|
+
),
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return super()._create(**kwargs)
|
|
107
|
+
|
|
108
|
+
def update(
|
|
109
|
+
self,
|
|
110
|
+
uid: Union[UUID, str],
|
|
111
|
+
name: str = ...,
|
|
112
|
+
orchestration_system: Union["OrchestrationSystem", UUID, str] = ...,
|
|
113
|
+
settings: Optional[dict] = ...,
|
|
114
|
+
interval: Optional[int] = ...,
|
|
115
|
+
interval_units: Optional[Literal["minutes", "hours", "days"]] = ...,
|
|
116
|
+
crontab: Optional[str] = ...,
|
|
117
|
+
start_time: Optional[datetime] = ...,
|
|
118
|
+
end_time: Optional[datetime] = ...,
|
|
119
|
+
last_run_successful: Optional[bool] = ...,
|
|
120
|
+
last_run_message: Optional[str] = ...,
|
|
121
|
+
last_run: Optional[datetime] = ...,
|
|
122
|
+
next_run: Optional[datetime] = ...,
|
|
123
|
+
paused: bool = ...,
|
|
124
|
+
) -> "DataArchive":
|
|
125
|
+
"""Update a data archive."""
|
|
126
|
+
|
|
127
|
+
status_kwargs = {
|
|
128
|
+
k: v
|
|
129
|
+
for k, v in {
|
|
130
|
+
"lastRunSuccessful": last_run_successful,
|
|
131
|
+
"lastRunMessage": last_run_message,
|
|
132
|
+
"lastRun": last_run,
|
|
133
|
+
"nextRun": next_run,
|
|
134
|
+
"paused": paused,
|
|
135
|
+
}.items()
|
|
136
|
+
if v is not ...
|
|
137
|
+
}
|
|
138
|
+
status_kwargs = status_kwargs if status_kwargs else ...
|
|
139
|
+
|
|
140
|
+
schedule_kwargs = {
|
|
141
|
+
k: v
|
|
142
|
+
for k, v in {
|
|
143
|
+
"interval": interval,
|
|
144
|
+
"intervalUnits": interval_units,
|
|
145
|
+
"crontab": crontab,
|
|
146
|
+
"startTime": start_time,
|
|
147
|
+
"endTime": end_time,
|
|
148
|
+
}.items()
|
|
149
|
+
if v is not ...
|
|
150
|
+
}
|
|
151
|
+
schedule_kwargs = schedule_kwargs if schedule_kwargs else ...
|
|
152
|
+
|
|
153
|
+
kwargs = {
|
|
154
|
+
k: v
|
|
155
|
+
for k, v in {
|
|
156
|
+
"name": name,
|
|
157
|
+
"orchestrationSystemId": getattr(
|
|
158
|
+
orchestration_system, "uid", orchestration_system
|
|
159
|
+
),
|
|
160
|
+
"settings": settings,
|
|
161
|
+
"schedule": schedule_kwargs,
|
|
162
|
+
"status": status_kwargs,
|
|
163
|
+
}.items()
|
|
164
|
+
if v is not ...
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
return super()._update(uid=str(uid), **kwargs)
|
|
168
|
+
|
|
169
|
+
def delete(self, uid: Union[UUID, str]) -> None:
|
|
170
|
+
"""Delete a data archive."""
|
|
171
|
+
|
|
172
|
+
super()._delete(uid=str(uid))
|
|
173
|
+
|
|
174
|
+
def add_datastream(
|
|
175
|
+
self, uid: Union[UUID, str], datastream: Union["Datastream", UUID, str]
|
|
176
|
+
) -> None:
|
|
177
|
+
"""Add a datastream to this data archive."""
|
|
178
|
+
|
|
179
|
+
datastream_id = str(getattr(datastream, "uid", datastream))
|
|
180
|
+
|
|
181
|
+
self._connection.request(
|
|
182
|
+
"post",
|
|
183
|
+
f"{self._api_route}/{self._endpoint_route}/{str(uid)}/datastreams/{datastream_id}",
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
def remove_datastream(
|
|
187
|
+
self, uid: Union[UUID, str], datastream: Union["Datastream", UUID, str]
|
|
188
|
+
) -> None:
|
|
189
|
+
"""Remove a datastream from this data archive."""
|
|
190
|
+
|
|
191
|
+
datastream_id = str(getattr(datastream, "uid", datastream))
|
|
192
|
+
|
|
193
|
+
self._connection.request(
|
|
194
|
+
"delete",
|
|
195
|
+
f"{self._api_route}/{self._endpoint_route}/{str(uid)}/datastreams/{datastream_id}",
|
|
196
|
+
)
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Optional, Literal, Union, List, TYPE_CHECKING
|
|
3
|
+
from uuid import UUID
|
|
4
|
+
from ..base import EndpointService
|
|
5
|
+
from hydroserverpy.api.models import DataSource, Datastream
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from hydroserverpy import HydroServer
|
|
10
|
+
from hydroserverpy.api.models import Workspace, OrchestrationSystem
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DataSourceService(EndpointService):
|
|
14
|
+
def __init__(self, connection: "HydroServer"):
|
|
15
|
+
self._model = DataSource
|
|
16
|
+
self._api_route = "api/data"
|
|
17
|
+
self._endpoint_route = "data-sources"
|
|
18
|
+
|
|
19
|
+
super().__init__(connection)
|
|
20
|
+
|
|
21
|
+
def list(
|
|
22
|
+
self,
|
|
23
|
+
workspace: Optional[Union["Workspace", UUID, str]] = None,
|
|
24
|
+
orchestration_system: Optional[Union["OrchestrationSystem", UUID, str]] = None,
|
|
25
|
+
) -> List["DataSource"]:
|
|
26
|
+
"""Fetch a collection of data sources."""
|
|
27
|
+
|
|
28
|
+
params = {}
|
|
29
|
+
|
|
30
|
+
workspace_id = getattr(workspace, "uid", workspace)
|
|
31
|
+
workspace_id = str(workspace_id) if workspace_id else None
|
|
32
|
+
|
|
33
|
+
orchestration_system_id = getattr(
|
|
34
|
+
orchestration_system, "uid", orchestration_system
|
|
35
|
+
)
|
|
36
|
+
orchestration_system_id = (
|
|
37
|
+
str(orchestration_system_id) if orchestration_system_id else None
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
if workspace_id:
|
|
41
|
+
params["workspace_id"] = workspace_id
|
|
42
|
+
|
|
43
|
+
if orchestration_system_id:
|
|
44
|
+
params["orchestration_system_id"] = orchestration_system_id
|
|
45
|
+
|
|
46
|
+
return super()._list(
|
|
47
|
+
params=params,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
def get(self, uid: Union[UUID, str]) -> "DataSource":
|
|
51
|
+
"""Get a data source by ID."""
|
|
52
|
+
|
|
53
|
+
return super()._get(uid=str(uid))
|
|
54
|
+
|
|
55
|
+
def create(
|
|
56
|
+
self,
|
|
57
|
+
name: str,
|
|
58
|
+
workspace: Union["Workspace", UUID, str],
|
|
59
|
+
orchestration_system: Union["OrchestrationSystem", UUID, str],
|
|
60
|
+
settings: Optional[dict] = None,
|
|
61
|
+
interval: Optional[int] = None,
|
|
62
|
+
interval_units: Optional[Literal["minutes", "hours", "days"]] = None,
|
|
63
|
+
crontab: Optional[str] = None,
|
|
64
|
+
start_time: Optional[datetime] = None,
|
|
65
|
+
end_time: Optional[datetime] = None,
|
|
66
|
+
last_run_successful: Optional[bool] = None,
|
|
67
|
+
last_run_message: Optional[str] = None,
|
|
68
|
+
last_run: Optional[datetime] = None,
|
|
69
|
+
next_run: Optional[datetime] = None,
|
|
70
|
+
paused: bool = False,
|
|
71
|
+
datastreams: Optional[List[Union["Datastream", UUID, str]]] = None,
|
|
72
|
+
) -> "DataSource":
|
|
73
|
+
"""Create a new data source."""
|
|
74
|
+
|
|
75
|
+
kwargs = {
|
|
76
|
+
"name": name,
|
|
77
|
+
"workspaceId": str(getattr(workspace, "uid", workspace)),
|
|
78
|
+
"orchestrationSystemId": getattr(
|
|
79
|
+
orchestration_system, "uid", orchestration_system
|
|
80
|
+
),
|
|
81
|
+
"settings": settings,
|
|
82
|
+
"schedule": {
|
|
83
|
+
"interval": interval,
|
|
84
|
+
"intervalUnits": interval_units,
|
|
85
|
+
"crontab": crontab,
|
|
86
|
+
"startTime": start_time,
|
|
87
|
+
"endTime": end_time,
|
|
88
|
+
},
|
|
89
|
+
"status": {
|
|
90
|
+
"lastRunSuccessful": last_run_successful,
|
|
91
|
+
"lastRunMessage": last_run_message,
|
|
92
|
+
"lastRun": last_run,
|
|
93
|
+
"nextRun": next_run,
|
|
94
|
+
"paused": paused,
|
|
95
|
+
},
|
|
96
|
+
"datastreamIds": (
|
|
97
|
+
[
|
|
98
|
+
str(getattr(datastream, "uid", datastream))
|
|
99
|
+
for datastream in datastreams
|
|
100
|
+
]
|
|
101
|
+
if datastreams
|
|
102
|
+
else []
|
|
103
|
+
),
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return super()._create(**kwargs)
|
|
107
|
+
|
|
108
|
+
def update(
|
|
109
|
+
self,
|
|
110
|
+
uid: Union[UUID, str],
|
|
111
|
+
name: str = ...,
|
|
112
|
+
orchestration_system: Union["OrchestrationSystem", UUID, str] = ...,
|
|
113
|
+
settings: Optional[dict] = ...,
|
|
114
|
+
interval: Optional[int] = ...,
|
|
115
|
+
interval_units: Optional[Literal["minutes", "hours", "days"]] = ...,
|
|
116
|
+
crontab: Optional[str] = ...,
|
|
117
|
+
start_time: Optional[datetime] = ...,
|
|
118
|
+
end_time: Optional[datetime] = ...,
|
|
119
|
+
last_run_successful: Optional[bool] = ...,
|
|
120
|
+
last_run_message: Optional[str] = ...,
|
|
121
|
+
last_run: Optional[datetime] = ...,
|
|
122
|
+
next_run: Optional[datetime] = ...,
|
|
123
|
+
paused: bool = ...,
|
|
124
|
+
) -> "DataSource":
|
|
125
|
+
"""Update a data source."""
|
|
126
|
+
|
|
127
|
+
status_kwargs = {
|
|
128
|
+
k: v
|
|
129
|
+
for k, v in {
|
|
130
|
+
"lastRunSuccessful": last_run_successful,
|
|
131
|
+
"lastRunMessage": last_run_message,
|
|
132
|
+
"lastRun": last_run,
|
|
133
|
+
"nextRun": next_run,
|
|
134
|
+
"paused": paused,
|
|
135
|
+
}.items()
|
|
136
|
+
if v is not ...
|
|
137
|
+
}
|
|
138
|
+
status_kwargs = status_kwargs if status_kwargs else ...
|
|
139
|
+
|
|
140
|
+
schedule_kwargs = {
|
|
141
|
+
k: v
|
|
142
|
+
for k, v in {
|
|
143
|
+
"interval": interval,
|
|
144
|
+
"intervalUnits": interval_units,
|
|
145
|
+
"crontab": crontab,
|
|
146
|
+
"startTime": start_time,
|
|
147
|
+
"endTime": end_time,
|
|
148
|
+
}.items()
|
|
149
|
+
if v is not ...
|
|
150
|
+
}
|
|
151
|
+
schedule_kwargs = schedule_kwargs if schedule_kwargs else ...
|
|
152
|
+
|
|
153
|
+
kwargs = {
|
|
154
|
+
k: v
|
|
155
|
+
for k, v in {
|
|
156
|
+
"name": name,
|
|
157
|
+
"orchestrationSystemId": getattr(
|
|
158
|
+
orchestration_system, "uid", orchestration_system
|
|
159
|
+
),
|
|
160
|
+
"settings": settings,
|
|
161
|
+
"schedule": schedule_kwargs,
|
|
162
|
+
"status": status_kwargs,
|
|
163
|
+
}.items()
|
|
164
|
+
if v is not ...
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
return super()._update(uid=str(uid), **kwargs)
|
|
168
|
+
|
|
169
|
+
def delete(self, uid: Union[UUID, str]) -> None:
|
|
170
|
+
"""Delete a data source."""
|
|
171
|
+
|
|
172
|
+
super()._delete(uid=str(uid))
|
|
173
|
+
|
|
174
|
+
def add_datastream(
|
|
175
|
+
self, uid: Union[UUID, str], datastream: Union["Datastream", UUID, str]
|
|
176
|
+
) -> None:
|
|
177
|
+
"""Add a datastream to this data source."""
|
|
178
|
+
|
|
179
|
+
datastream_id = str(getattr(datastream, "uid", datastream))
|
|
180
|
+
|
|
181
|
+
self._connection.request(
|
|
182
|
+
"post",
|
|
183
|
+
f"{self._api_route}/{self._endpoint_route}/{str(uid)}/datastreams/{datastream_id}",
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
def remove_datastream(
|
|
187
|
+
self, uid: Union[UUID, str], datastream: Union["Datastream", UUID, str]
|
|
188
|
+
) -> None:
|
|
189
|
+
"""Remove a datastream from this data source."""
|
|
190
|
+
|
|
191
|
+
datastream_id = str(getattr(datastream, "uid", datastream))
|
|
192
|
+
|
|
193
|
+
self._connection.request(
|
|
194
|
+
"delete",
|
|
195
|
+
f"{self._api_route}/{self._endpoint_route}/{str(uid)}/datastreams/{datastream_id}",
|
|
196
|
+
)
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
from typing import Optional, Union, List, TYPE_CHECKING
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from ..base import EndpointService
|
|
4
|
+
from hydroserverpy.api.models import OrchestrationSystem
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from hydroserverpy import HydroServer
|
|
9
|
+
from hydroserverpy.api.models import Workspace
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class OrchestrationSystemService(EndpointService):
|
|
13
|
+
def __init__(self, connection: "HydroServer"):
|
|
14
|
+
self._model = OrchestrationSystem
|
|
15
|
+
self._api_route = "api/data"
|
|
16
|
+
self._endpoint_route = "orchestration-systems"
|
|
17
|
+
|
|
18
|
+
super().__init__(connection)
|
|
19
|
+
|
|
20
|
+
def list(
|
|
21
|
+
self,
|
|
22
|
+
workspace: Optional[Union["Workspace", UUID, str]] = None,
|
|
23
|
+
) -> List["OrchestrationSystem"]:
|
|
24
|
+
"""Fetch a collection of orchestration systems."""
|
|
25
|
+
|
|
26
|
+
workspace_id = getattr(workspace, "uid", workspace)
|
|
27
|
+
workspace_id = str(workspace_id) if workspace_id else None
|
|
28
|
+
|
|
29
|
+
return super()._list(
|
|
30
|
+
params={"workspace_id": workspace_id} if workspace_id else {},
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
def get(self, uid: Union[UUID, str]) -> "OrchestrationSystem":
|
|
34
|
+
"""Get an orchestration system by ID."""
|
|
35
|
+
|
|
36
|
+
return super()._get(uid=str(uid))
|
|
37
|
+
|
|
38
|
+
def create(
|
|
39
|
+
self,
|
|
40
|
+
workspace: Union["Workspace", UUID, str],
|
|
41
|
+
name: str,
|
|
42
|
+
orchestration_system_type: str,
|
|
43
|
+
) -> "OrchestrationSystem":
|
|
44
|
+
"""Create a new orchestration system."""
|
|
45
|
+
|
|
46
|
+
kwargs = {
|
|
47
|
+
"name": name,
|
|
48
|
+
"type": orchestration_system_type,
|
|
49
|
+
"workspaceId": str(getattr(workspace, "uid", workspace)),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return super()._create(**kwargs)
|
|
53
|
+
|
|
54
|
+
def update(
|
|
55
|
+
self,
|
|
56
|
+
uid: Union[UUID, str],
|
|
57
|
+
name: str = ...,
|
|
58
|
+
orchestration_system_type: str = ...,
|
|
59
|
+
) -> "OrchestrationSystem":
|
|
60
|
+
"""Update an orchestration system."""
|
|
61
|
+
|
|
62
|
+
kwargs = {
|
|
63
|
+
"name": name,
|
|
64
|
+
"type": orchestration_system_type,
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return super()._update(
|
|
68
|
+
uid=str(uid), **{k: v for k, v in kwargs.items() if v is not ...}
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
def delete(self, uid: Union[UUID, str]) -> None:
|
|
72
|
+
"""Delete an orchestration system."""
|
|
73
|
+
|
|
74
|
+
super()._delete(uid=str(uid))
|
|
@@ -35,7 +35,9 @@ class ObservedPropertyService(SensorThingsService):
|
|
|
35
35
|
|
|
36
36
|
return super()._list(params=params)
|
|
37
37
|
|
|
38
|
-
def get(
|
|
38
|
+
def get(
|
|
39
|
+
self, uid: Union[UUID, str], fetch_by_datastream_uid: bool = False
|
|
40
|
+
) -> "ObservedProperty":
|
|
39
41
|
"""Get an observed property by ID."""
|
|
40
42
|
|
|
41
43
|
return self._get(
|
|
@@ -10,7 +10,7 @@ from .exceptions import HeaderParsingError, TimestampParsingError
|
|
|
10
10
|
import warnings
|
|
11
11
|
|
|
12
12
|
if TYPE_CHECKING:
|
|
13
|
-
from
|
|
13
|
+
from hydroserverpy.api.models import DataSource
|
|
14
14
|
|
|
15
15
|
logger = logging.getLogger("hydroserver_etl")
|
|
16
16
|
logger.addHandler(logging.NullHandler())
|
|
@@ -36,10 +36,15 @@ class HydroServerETLCSV:
|
|
|
36
36
|
datastream.uid: datastream for datastream in data_source.datastreams
|
|
37
37
|
}
|
|
38
38
|
|
|
39
|
+
self._datastream_mapping = {
|
|
40
|
+
mapping["targetIdentifier"]: mapping["sourceIdentifier"]
|
|
41
|
+
for payload in self._data_source.settings["payloads"]
|
|
42
|
+
for mapping in payload.get("mappings", [])
|
|
43
|
+
}
|
|
44
|
+
|
|
39
45
|
self._timestamp_column_index = None
|
|
40
46
|
self._datastream_column_indexes = None
|
|
41
47
|
self._datastream_start_row_indexes = {}
|
|
42
|
-
self._last_loaded_timestamp = self._data_source.data_source_thru
|
|
43
48
|
|
|
44
49
|
self._message = None
|
|
45
50
|
self._failed_datastreams = []
|
|
@@ -59,7 +64,10 @@ class HydroServerETLCSV:
|
|
|
59
64
|
:return: None
|
|
60
65
|
"""
|
|
61
66
|
|
|
62
|
-
data_reader = csv.reader(
|
|
67
|
+
data_reader = csv.reader(
|
|
68
|
+
self._data_file,
|
|
69
|
+
delimiter=self._data_source.settings["transformer"]["delimiter"],
|
|
70
|
+
)
|
|
63
71
|
|
|
64
72
|
try:
|
|
65
73
|
for i, row in enumerate(data_reader):
|
|
@@ -104,13 +112,13 @@ class HydroServerETLCSV:
|
|
|
104
112
|
:return: A list of datetime and value pairs for each datastream
|
|
105
113
|
"""
|
|
106
114
|
|
|
107
|
-
if index == self._data_source.
|
|
108
|
-
index == self._data_source.
|
|
115
|
+
if index == self._data_source.settings["transformer"]["headerRow"] or (
|
|
116
|
+
index == self._data_source.settings["transformer"]["dataStartRow"]
|
|
109
117
|
and self._timestamp_column_index is None
|
|
110
118
|
):
|
|
111
119
|
self._parse_file_header(row)
|
|
112
120
|
|
|
113
|
-
if index < self._data_source.
|
|
121
|
+
if index < self._data_source.settings["transformer"]["dataStartRow"]:
|
|
114
122
|
return
|
|
115
123
|
|
|
116
124
|
timestamp = self._parse_row_timestamp(row)
|
|
@@ -135,7 +143,7 @@ class HydroServerETLCSV:
|
|
|
135
143
|
"phenomenon_time": timestamp,
|
|
136
144
|
"result": row[
|
|
137
145
|
self._datastream_column_indexes[
|
|
138
|
-
datastream.
|
|
146
|
+
self._datastream_mapping[str(datastream.uid)]
|
|
139
147
|
]
|
|
140
148
|
],
|
|
141
149
|
}
|
|
@@ -155,17 +163,19 @@ class HydroServerETLCSV:
|
|
|
155
163
|
|
|
156
164
|
try:
|
|
157
165
|
self._timestamp_column_index = (
|
|
158
|
-
row.index(self._data_source.
|
|
159
|
-
if isinstance(
|
|
160
|
-
|
|
166
|
+
row.index(self._data_source.settings["transformer"]["timestampKey"])
|
|
167
|
+
if isinstance(
|
|
168
|
+
self._data_source.settings["transformer"]["timestampKey"], str
|
|
169
|
+
)
|
|
170
|
+
else int(self._data_source.settings["transformer"]["timestampKey"]) - 1
|
|
161
171
|
)
|
|
162
172
|
if self._timestamp_column_index > len(row):
|
|
163
173
|
raise ValueError
|
|
164
174
|
self._datastream_column_indexes = {
|
|
165
|
-
datastream.
|
|
166
|
-
row.index(datastream.
|
|
167
|
-
if not datastream.
|
|
168
|
-
else int(datastream.
|
|
175
|
+
self._datastream_mapping[str(datastream.uid)]: (
|
|
176
|
+
row.index(self._datastream_mapping[str(datastream.uid)])
|
|
177
|
+
if not self._datastream_mapping[str(datastream.uid)].isdigit()
|
|
178
|
+
else int(self._datastream_mapping[str(datastream.uid)]) - 1
|
|
169
179
|
)
|
|
170
180
|
for datastream in self._datastreams.values()
|
|
171
181
|
}
|
|
@@ -190,28 +200,40 @@ class HydroServerETLCSV:
|
|
|
190
200
|
|
|
191
201
|
try:
|
|
192
202
|
if (
|
|
193
|
-
self._data_source.
|
|
194
|
-
|
|
203
|
+
self._data_source.settings["transformer"].get("timestampFormat")
|
|
204
|
+
== "ISO8601"
|
|
205
|
+
or self._data_source.settings["transformer"].get("timestampFormat")
|
|
206
|
+
is None
|
|
195
207
|
):
|
|
196
208
|
timestamp = isoparse(row[self._timestamp_column_index])
|
|
197
209
|
else:
|
|
198
210
|
timestamp = datetime.strptime(
|
|
199
211
|
row[self._timestamp_column_index],
|
|
200
|
-
self._data_source.
|
|
212
|
+
self._data_source.settings["transformer"].get("timestampFormat"),
|
|
201
213
|
)
|
|
202
214
|
except ValueError as e:
|
|
203
215
|
raise TimestampParsingError(str(e)) from e
|
|
204
216
|
|
|
205
217
|
if timestamp.tzinfo is None:
|
|
206
|
-
if not self._data_source.
|
|
218
|
+
if not self._data_source.settings["transformer"].get(
|
|
219
|
+
"timestampOffset"
|
|
220
|
+
) or self._data_source.settings["transformer"].get(
|
|
221
|
+
"timestampOffset"
|
|
222
|
+
).endswith(
|
|
223
|
+
"0000"
|
|
224
|
+
):
|
|
207
225
|
timestamp = timestamp.replace(tzinfo=timezone.utc)
|
|
208
226
|
else:
|
|
209
227
|
try:
|
|
210
228
|
timestamp = timestamp.replace(
|
|
211
229
|
tzinfo=datetime.strptime(
|
|
212
|
-
self._data_source.
|
|
230
|
+
self._data_source.settings["transformer"].get(
|
|
231
|
+
"timestampFormat"
|
|
232
|
+
)[:-2]
|
|
213
233
|
+ ":"
|
|
214
|
-
+ self._data_source.
|
|
234
|
+
+ self._data_source.settings["transformer"].get(
|
|
235
|
+
"timestampFormat"
|
|
236
|
+
)[3:],
|
|
215
237
|
"%z",
|
|
216
238
|
).tzinfo
|
|
217
239
|
)
|
|
@@ -264,12 +286,6 @@ class HydroServerETLCSV:
|
|
|
264
286
|
except HTTPError:
|
|
265
287
|
failed_datastreams.append(datastream_id)
|
|
266
288
|
|
|
267
|
-
if not self._last_loaded_timestamp or (
|
|
268
|
-
observations[-1]["phenomenon_time"]
|
|
269
|
-
and observations[-1]["phenomenon_time"]
|
|
270
|
-
> self._last_loaded_timestamp
|
|
271
|
-
):
|
|
272
|
-
self._last_loaded_timestamp = observations[-1]["phenomenon_time"]
|
|
273
289
|
elif datastream_id in self._failed_datastreams:
|
|
274
290
|
logger.info(
|
|
275
291
|
f"Skipping observations POST request from "
|
|
@@ -292,29 +308,28 @@ class HydroServerETLCSV:
|
|
|
292
308
|
"""
|
|
293
309
|
|
|
294
310
|
if self._data_source.crontab is not None:
|
|
295
|
-
|
|
311
|
+
next_run = croniter.croniter(
|
|
296
312
|
self._data_source.crontab, datetime.now()
|
|
297
313
|
).get_next(datetime)
|
|
298
314
|
elif (
|
|
299
315
|
self._data_source.interval is not None
|
|
300
316
|
and self._data_source.interval_units is not None
|
|
301
317
|
):
|
|
302
|
-
|
|
318
|
+
next_run = datetime.now() + timedelta(
|
|
303
319
|
**{self._data_source.interval_units: self._data_source.interval}
|
|
304
320
|
)
|
|
305
321
|
else:
|
|
306
|
-
|
|
322
|
+
next_run = None
|
|
307
323
|
|
|
308
|
-
self._data_source.
|
|
309
|
-
self._data_source.last_sync_successful = (
|
|
324
|
+
self._data_source.last_run_successful = (
|
|
310
325
|
True
|
|
311
326
|
if not self._file_timestamp_error
|
|
312
327
|
and not self._file_header_error
|
|
313
328
|
and len(self._failed_datastreams) == 0
|
|
314
329
|
else False
|
|
315
330
|
)
|
|
316
|
-
self._data_source.
|
|
317
|
-
self._data_source.
|
|
318
|
-
self._data_source.
|
|
331
|
+
self._data_source.last_run_message = self._message
|
|
332
|
+
self._data_source.last_run = datetime.now(timezone.utc)
|
|
333
|
+
self._data_source.next_run = next_run
|
|
319
334
|
|
|
320
335
|
self._data_source.save()
|
|
@@ -1,31 +1,38 @@
|
|
|
1
1
|
hydroserverpy/__init__.py,sha256=FgaGFyhCjwmpJYEKNzOZxvfRx2neWMaOybj1z02_VSE,218
|
|
2
2
|
hydroserverpy/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
hydroserverpy/api/http.py,sha256=
|
|
4
|
-
hydroserverpy/api/main.py,sha256=
|
|
5
|
-
hydroserverpy/api/models/__init__.py,sha256=
|
|
3
|
+
hydroserverpy/api/http.py,sha256=C5DgvEeiu54RaL-9oouFPSKosC8Uy5Qdwm5hYh2Ps-s,620
|
|
4
|
+
hydroserverpy/api/main.py,sha256=OWJpCIuheBgWQA7R33BiG89-upMS-a3K_AScbVeVjxQ,4760
|
|
5
|
+
hydroserverpy/api/models/__init__.py,sha256=ELrf3b7Aix7YcVF__Q_8e_G_FF8GYlX0J5l7fkGcHnY,690
|
|
6
6
|
hydroserverpy/api/models/base.py,sha256=dc2tfMSgizymxAAOVURfy7Jzeh6xIiiq7hfWZI7l1_Q,2280
|
|
7
7
|
hydroserverpy/api/models/etl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
hydroserverpy/api/models/etl/data_archive.py,sha256=u-gpvUsaWaw0kyF3bPMm2e55Jx2yhvSV9ufXXaNtrTc,3429
|
|
9
|
+
hydroserverpy/api/models/etl/data_source.py,sha256=ca-9KKVhkLNaUn3vOIk-JgdWk58fTRme8YKIesk8WIw,5455
|
|
10
|
+
hydroserverpy/api/models/etl/orchestration_configuration.py,sha256=ElSrgi7ioFZJFJg6aGogW5ZZk7fA17y4p--yWwiOhZ0,1367
|
|
11
|
+
hydroserverpy/api/models/etl/orchestration_system.py,sha256=25En2G0z1gQzN-RW3UlrEGgkC952QDW21oYnawCX8hY,2357
|
|
8
12
|
hydroserverpy/api/models/iam/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
13
|
hydroserverpy/api/models/iam/account.py,sha256=7COk_CPYFlthg1uFWTBlJESfnuqMW90TSjZoIcBb-_8,439
|
|
10
14
|
hydroserverpy/api/models/iam/collaborator.py,sha256=jp661DKDCwk8c8HFPAV-YVhEc80F5eGDKaSHmH62Q8Q,1007
|
|
11
15
|
hydroserverpy/api/models/iam/role.py,sha256=8FVTj_1QwtPF9tk7baliMVg000kjc5N8oP6eYo8vTDY,275
|
|
12
|
-
hydroserverpy/api/models/iam/workspace.py,sha256=
|
|
16
|
+
hydroserverpy/api/models/iam/workspace.py,sha256=s9u1oZyOdxM7txjJARFcIBrWMHQSDxODdreiatFsXJs,7331
|
|
13
17
|
hydroserverpy/api/models/sta/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
-
hydroserverpy/api/models/sta/datastream.py,sha256=
|
|
18
|
+
hydroserverpy/api/models/sta/datastream.py,sha256=sB-KifvegbyDUnyPE_NCHFrab1ZSVVb6g-Gs7kUgMiE,10774
|
|
15
19
|
hydroserverpy/api/models/sta/observed_property.py,sha256=ThTg8aPMHPxbk9Hzpxw3AwM16gE1xvYpRK8UkiOdGeA,2180
|
|
16
20
|
hydroserverpy/api/models/sta/processing_level.py,sha256=y5_0wX7QGXgswvukXJtbpOiTCZ9pI8E08DXaTSUHakg,1470
|
|
17
21
|
hydroserverpy/api/models/sta/result_qualifier.py,sha256=IJcY04KjP9e2D-jPzUJjH2PC-JvDNCjbi5LKkTVSwgw,1416
|
|
18
22
|
hydroserverpy/api/models/sta/sensor.py,sha256=TD9R1Uwcu1t9tRQBfk0crsSJmV5UN_9kH9Ye9b7lDJc,3055
|
|
19
23
|
hydroserverpy/api/models/sta/thing.py,sha256=o4Xn_Luy2IEOCBjXTbek7GvPoXZyKA0dhfzoFM6nfTs,6357
|
|
20
24
|
hydroserverpy/api/models/sta/unit.py,sha256=Pbxxp9hZErsrYImIb8-1HVnZAsJopE3US_AplSQWOJQ,1398
|
|
21
|
-
hydroserverpy/api/services/__init__.py,sha256=
|
|
22
|
-
hydroserverpy/api/services/base.py,sha256=
|
|
25
|
+
hydroserverpy/api/services/__init__.py,sha256=B0kGyn8_HlIBf9deU6qLocQTbxe59qestUZtLU9CeXk,532
|
|
26
|
+
hydroserverpy/api/services/base.py,sha256=zLhRDlf4h4zBXHpicKMI7xgQrXN_wHvvthvQzYNyj2E,3415
|
|
23
27
|
hydroserverpy/api/services/etl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
|
+
hydroserverpy/api/services/etl/data_archive.py,sha256=hlNJOHJSZ1kV2n2xivWIBtT1Eovj65PDbwpAyXnlZsM,6506
|
|
29
|
+
hydroserverpy/api/services/etl/data_source.py,sha256=DCgTyh8lF2iwh4uszePFg9UupXxJCN7Ww9Ut1MQKHis,6491
|
|
30
|
+
hydroserverpy/api/services/etl/orchestration_system.py,sha256=JFuSJJUq4JJUt8KlZ-Ga0ktyQIe2U0Sa7ogd4oLjex4,2166
|
|
24
31
|
hydroserverpy/api/services/iam/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
32
|
hydroserverpy/api/services/iam/workspace.py,sha256=jJiqkMxFEp9REjR4LXyVp2o45CGBrrEadGelPPCuRJs,4547
|
|
26
33
|
hydroserverpy/api/services/sta/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
34
|
hydroserverpy/api/services/sta/datastream.py,sha256=_m-xFom3z8wo5-1_q8NjWUpcw36wYv1brIG7xeGGadk,12402
|
|
28
|
-
hydroserverpy/api/services/sta/observed_property.py,sha256=
|
|
35
|
+
hydroserverpy/api/services/sta/observed_property.py,sha256=nRlqBldJpXlj8VOZ4EwNOs4ZgmBw5w-EqAChfM3Z0Z0,2908
|
|
29
36
|
hydroserverpy/api/services/sta/processing_level.py,sha256=Oupfeww2XgT83AwR5Spt91VjZK6MG0XIl11Et9fRjA0,2255
|
|
30
37
|
hydroserverpy/api/services/sta/result_qualifier.py,sha256=XG5Ng3xdFT-l3Ktkuq23Cty1RfmepBO7EQ9gPzidZuA,2069
|
|
31
38
|
hydroserverpy/api/services/sta/sensor.py,sha256=SbDhLjlOaM2ypLDfXmQVinj7eHHJ_fHxjTD68dM2pQI,3473
|
|
@@ -48,12 +55,12 @@ hydroserverpy/etl/transformers/csv_transformer.py,sha256=9DKSO4NfUUDlr_c6UnH4AU3
|
|
|
48
55
|
hydroserverpy/etl/transformers/json_transformer.py,sha256=ity0MXcYjEnlun4Y6cVSrnjrglKrK4JOXXHxWHIHN2A,2323
|
|
49
56
|
hydroserverpy/etl_csv/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
50
57
|
hydroserverpy/etl_csv/exceptions.py,sha256=0UY8YUlNepG0y6FfH36hJyR1bOhwYHSZIdUSSMTg7GA,314
|
|
51
|
-
hydroserverpy/etl_csv/hydroserver_etl_csv.py,sha256=
|
|
58
|
+
hydroserverpy/etl_csv/hydroserver_etl_csv.py,sha256=1RRqZWXb8pMhkeg6Tn1IbQ8SdKNgOl78eqeBguEXnog,13849
|
|
52
59
|
hydroserverpy/quality/__init__.py,sha256=GGBMkFSXciJLYrbV-NraFrj_mXWCy_GTcy9KKrKXU4c,84
|
|
53
60
|
hydroserverpy/quality/service.py,sha256=U02UfLKVmFvr5ySiH0n0JYzUIabq5uprrHIiwcqBlqY,13879
|
|
54
|
-
hydroserverpy-0.5.
|
|
55
|
-
hydroserverpy-0.5.
|
|
56
|
-
hydroserverpy-0.5.
|
|
57
|
-
hydroserverpy-0.5.
|
|
58
|
-
hydroserverpy-0.5.
|
|
59
|
-
hydroserverpy-0.5.
|
|
61
|
+
hydroserverpy-0.5.0b2.dist-info/licenses/LICENSE,sha256=xVqFxDw3QOEJukakL7gQCqIMTQ1dlSCTo6Oc1otNW80,1508
|
|
62
|
+
hydroserverpy-0.5.0b2.dist-info/METADATA,sha256=IBrwK9SbCsLvo0JQyIEQmBCxSMpdkZ4i0uNTTVU96cY,532
|
|
63
|
+
hydroserverpy-0.5.0b2.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
64
|
+
hydroserverpy-0.5.0b2.dist-info/top_level.txt,sha256=Zf37hrncXLOYvXhgCrf5mZdeq81G9fShdE2LfYbtb7w,14
|
|
65
|
+
hydroserverpy-0.5.0b2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
66
|
+
hydroserverpy-0.5.0b2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|