hydroserverpy 1.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hydroserverpy/__init__.py +7 -0
- hydroserverpy/api/__init__.py +0 -0
- hydroserverpy/api/client.py +203 -0
- hydroserverpy/api/models/__init__.py +22 -0
- hydroserverpy/api/models/base.py +207 -0
- hydroserverpy/api/models/etl/__init__.py +26 -0
- hydroserverpy/api/models/etl/data_archive.py +77 -0
- hydroserverpy/api/models/etl/data_source.py +146 -0
- hydroserverpy/api/models/etl/etl_configuration.py +224 -0
- hydroserverpy/api/models/etl/extractors/__init__.py +6 -0
- hydroserverpy/api/models/etl/extractors/base.py +52 -0
- hydroserverpy/api/models/etl/extractors/ftp_extractor.py +50 -0
- hydroserverpy/api/models/etl/extractors/http_extractor.py +28 -0
- hydroserverpy/api/models/etl/extractors/local_file_extractor.py +20 -0
- hydroserverpy/api/models/etl/factories.py +23 -0
- hydroserverpy/api/models/etl/loaders/__init__.py +4 -0
- hydroserverpy/api/models/etl/loaders/base.py +11 -0
- hydroserverpy/api/models/etl/loaders/hydroserver_loader.py +98 -0
- hydroserverpy/api/models/etl/orchestration_configuration.py +35 -0
- hydroserverpy/api/models/etl/orchestration_system.py +63 -0
- hydroserverpy/api/models/etl/schedule.py +16 -0
- hydroserverpy/api/models/etl/status.py +14 -0
- hydroserverpy/api/models/etl/timestamp_parser.py +112 -0
- hydroserverpy/api/models/etl/transformers/__init__.py +5 -0
- hydroserverpy/api/models/etl/transformers/base.py +135 -0
- hydroserverpy/api/models/etl/transformers/csv_transformer.py +88 -0
- hydroserverpy/api/models/etl/transformers/json_transformer.py +48 -0
- hydroserverpy/api/models/etl/types.py +7 -0
- hydroserverpy/api/models/iam/__init__.py +0 -0
- hydroserverpy/api/models/iam/account.py +12 -0
- hydroserverpy/api/models/iam/apikey.py +96 -0
- hydroserverpy/api/models/iam/collaborator.py +70 -0
- hydroserverpy/api/models/iam/role.py +38 -0
- hydroserverpy/api/models/iam/workspace.py +297 -0
- hydroserverpy/api/models/sta/__init__.py +0 -0
- hydroserverpy/api/models/sta/datastream.py +254 -0
- hydroserverpy/api/models/sta/observation.py +103 -0
- hydroserverpy/api/models/sta/observed_property.py +37 -0
- hydroserverpy/api/models/sta/processing_level.py +35 -0
- hydroserverpy/api/models/sta/result_qualifier.py +34 -0
- hydroserverpy/api/models/sta/sensor.py +44 -0
- hydroserverpy/api/models/sta/thing.py +113 -0
- hydroserverpy/api/models/sta/unit.py +36 -0
- hydroserverpy/api/services/__init__.py +12 -0
- hydroserverpy/api/services/base.py +118 -0
- hydroserverpy/api/services/etl/__init__.py +0 -0
- hydroserverpy/api/services/etl/data_archive.py +166 -0
- hydroserverpy/api/services/etl/data_source.py +163 -0
- hydroserverpy/api/services/etl/orchestration_system.py +66 -0
- hydroserverpy/api/services/iam/__init__.py +0 -0
- hydroserverpy/api/services/iam/role.py +38 -0
- hydroserverpy/api/services/iam/workspace.py +232 -0
- hydroserverpy/api/services/sta/__init__.py +0 -0
- hydroserverpy/api/services/sta/datastream.py +296 -0
- hydroserverpy/api/services/sta/observed_property.py +82 -0
- hydroserverpy/api/services/sta/processing_level.py +72 -0
- hydroserverpy/api/services/sta/result_qualifier.py +64 -0
- hydroserverpy/api/services/sta/sensor.py +102 -0
- hydroserverpy/api/services/sta/thing.py +195 -0
- hydroserverpy/api/services/sta/unit.py +78 -0
- hydroserverpy/api/utils.py +22 -0
- hydroserverpy/quality/__init__.py +1 -0
- hydroserverpy/quality/service.py +405 -0
- hydroserverpy-1.5.1.dist-info/METADATA +66 -0
- hydroserverpy-1.5.1.dist-info/RECORD +69 -0
- hydroserverpy-1.5.1.dist-info/WHEEL +5 -0
- hydroserverpy-1.5.1.dist-info/licenses/LICENSE +28 -0
- hydroserverpy-1.5.1.dist-info/top_level.txt +1 -0
- hydroserverpy-1.5.1.dist-info/zip-safe +1 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Optional, ClassVar, TYPE_CHECKING
|
|
3
|
+
from pydantic import Field
|
|
4
|
+
from ..base import HydroServerBaseModel
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from hydroserverpy import HydroServer
|
|
8
|
+
from hydroserverpy.api.models import Workspace
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ProcessingLevel(HydroServerBaseModel):
|
|
12
|
+
code: str = Field(..., max_length=255)
|
|
13
|
+
definition: Optional[str] = None
|
|
14
|
+
explanation: Optional[str] = None
|
|
15
|
+
workspace_id: Optional[uuid.UUID] = None
|
|
16
|
+
|
|
17
|
+
_editable_fields: ClassVar[set[str]] = {"code", "definition", "explanation"}
|
|
18
|
+
|
|
19
|
+
def __init__(self, client: "HydroServer", **data):
|
|
20
|
+
super().__init__(client=client, service=client.processinglevels, **data)
|
|
21
|
+
|
|
22
|
+
self._workspace = None
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
def get_route(cls):
|
|
26
|
+
return "processing-levels"
|
|
27
|
+
|
|
28
|
+
@property
|
|
29
|
+
def workspace(self) -> Optional["Workspace"]:
|
|
30
|
+
"""The workspace this processing level belongs to."""
|
|
31
|
+
|
|
32
|
+
if self._workspace is None and self.workspace_id:
|
|
33
|
+
self._workspace = self.client.workspaces.get(uid=self.workspace_id)
|
|
34
|
+
|
|
35
|
+
return self._workspace
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Optional, ClassVar, TYPE_CHECKING
|
|
3
|
+
from pydantic import Field
|
|
4
|
+
from ..base import HydroServerBaseModel
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from hydroserverpy import HydroServer
|
|
8
|
+
from hydroserverpy.api.models import Workspace
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ResultQualifier(HydroServerBaseModel):
|
|
12
|
+
code: str = Field(..., max_length=255)
|
|
13
|
+
description: str
|
|
14
|
+
workspace_id: Optional[uuid.UUID] = None
|
|
15
|
+
|
|
16
|
+
_editable_fields: ClassVar[set[str]] = {"code", "description"}
|
|
17
|
+
|
|
18
|
+
def __init__(self, client: "HydroServer", **data):
|
|
19
|
+
super().__init__(client=client, service=client.resultqualifiers, **data)
|
|
20
|
+
|
|
21
|
+
self._workspace = None
|
|
22
|
+
|
|
23
|
+
@classmethod
|
|
24
|
+
def get_route(cls):
|
|
25
|
+
return "result-qualifiers"
|
|
26
|
+
|
|
27
|
+
@property
|
|
28
|
+
def workspace(self) -> Optional["Workspace"]:
|
|
29
|
+
"""The workspace this result qualifier belongs to."""
|
|
30
|
+
|
|
31
|
+
if self._workspace is None and self.workspace_id:
|
|
32
|
+
self._workspace = self.client.workspaces.get(uid=self.workspace_id)
|
|
33
|
+
|
|
34
|
+
return self._workspace
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Optional, ClassVar, TYPE_CHECKING
|
|
3
|
+
from pydantic import Field
|
|
4
|
+
from ..base import HydroServerBaseModel
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from hydroserverpy import HydroServer
|
|
8
|
+
from hydroserverpy.api.models import Workspace
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Sensor(HydroServerBaseModel):
|
|
12
|
+
name: str = Field(..., max_length=255)
|
|
13
|
+
description: str
|
|
14
|
+
encoding_type: str = Field(..., max_length=255)
|
|
15
|
+
manufacturer: Optional[str] = Field(None, max_length=255)
|
|
16
|
+
sensor_model: Optional[str] = Field(None, max_length=255, alias="model")
|
|
17
|
+
sensor_model_link: Optional[str] = Field(None, max_length=500, alias="modelLink")
|
|
18
|
+
method_type: str = Field(..., max_length=100)
|
|
19
|
+
method_link: Optional[str] = Field(None, max_length=500)
|
|
20
|
+
method_code: Optional[str] = Field(None, max_length=50)
|
|
21
|
+
workspace_id: Optional[uuid.UUID] = None
|
|
22
|
+
|
|
23
|
+
_editable_fields: ClassVar[set[str]] = {
|
|
24
|
+
"name", "description", "encoding_type", "manufacturer", "sensor_model", "sensor_model_link", "method_type",
|
|
25
|
+
"method_link", "method_code"
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
def __init__(self, client: "HydroServer", **data):
|
|
29
|
+
super().__init__(client=client, service=client.sensors, **data)
|
|
30
|
+
|
|
31
|
+
self._workspace = None
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def get_route(cls):
|
|
35
|
+
return "sensors"
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def workspace(self) -> Optional["Workspace"]:
|
|
39
|
+
"""The workspace this sensor belongs to."""
|
|
40
|
+
|
|
41
|
+
if self._workspace is None and self.workspace_id:
|
|
42
|
+
self._workspace = self.client.workspaces.get(uid=self.workspace_id)
|
|
43
|
+
|
|
44
|
+
return self._workspace
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Optional, ClassVar, List, Dict, IO, TYPE_CHECKING
|
|
3
|
+
from pydantic import (
|
|
4
|
+
Field,
|
|
5
|
+
AliasPath,
|
|
6
|
+
AliasChoices,
|
|
7
|
+
AnyHttpUrl,
|
|
8
|
+
field_validator
|
|
9
|
+
)
|
|
10
|
+
from ..base import HydroServerBaseModel
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from hydroserverpy import HydroServer
|
|
14
|
+
from hydroserverpy.api.models import Workspace, Datastream
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Thing(HydroServerBaseModel):
|
|
18
|
+
name: str = Field(..., max_length=200)
|
|
19
|
+
description: str
|
|
20
|
+
sampling_feature_type: str = Field(..., max_length=200)
|
|
21
|
+
sampling_feature_code: str = Field(..., max_length=200)
|
|
22
|
+
site_type: str = Field(..., max_length=200)
|
|
23
|
+
data_disclaimer: Optional[str] = None
|
|
24
|
+
is_private: bool
|
|
25
|
+
latitude: float = Field(..., ge=-90, le=90, validation_alias=AliasPath("location", "latitude"))
|
|
26
|
+
longitude: float = Field(..., ge=-180, le=180, validation_alias=AliasPath("location", "longitude"))
|
|
27
|
+
elevation_m: Optional[float] = Field(
|
|
28
|
+
None, ge=-99999, le=99999, alias="elevation_m", validation_alias=AliasPath("location", "elevation_m")
|
|
29
|
+
)
|
|
30
|
+
elevation_datum: Optional[str] = Field(
|
|
31
|
+
None, max_length=255, validation_alias=AliasChoices("elevationDatum", AliasPath("location", "elevationDatum"))
|
|
32
|
+
)
|
|
33
|
+
state: Optional[str] = Field(None, max_length=200, validation_alias=AliasPath("location", "state"))
|
|
34
|
+
county: Optional[str] = Field(None, max_length=200, validation_alias=AliasPath("location", "county"))
|
|
35
|
+
country: Optional[str] = Field(None, max_length=2, validation_alias=AliasPath("location", "country"))
|
|
36
|
+
tags: Dict[str, str]
|
|
37
|
+
photos: Dict[str, AnyHttpUrl]
|
|
38
|
+
workspace_id: uuid.UUID
|
|
39
|
+
|
|
40
|
+
_editable_fields: ClassVar[set[str]] = {
|
|
41
|
+
"name", "description", "sampling_feature_type", "sampling_feature_code", "site_type", "data_disclaimer",
|
|
42
|
+
"is_private", "latitude", "longitude", "elevation_m", "elevation_datum", "state", "county", "country"
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
def __init__(self, client: "HydroServer", **data):
|
|
46
|
+
super().__init__(client=client, service=client.things, **data)
|
|
47
|
+
|
|
48
|
+
self._workspace = None
|
|
49
|
+
self._datastreams = None
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
def get_route(cls):
|
|
53
|
+
return "things"
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def workspace(self) -> "Workspace":
|
|
57
|
+
"""The workspace this thing belongs to."""
|
|
58
|
+
|
|
59
|
+
if self._workspace is None:
|
|
60
|
+
self._workspace = self.client.workspaces.get(uid=self.workspace_id)
|
|
61
|
+
|
|
62
|
+
return self._workspace
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def datastreams(self) -> List["Datastream"]:
|
|
66
|
+
"""The datastreams collected at this thing."""
|
|
67
|
+
|
|
68
|
+
if self._datastreams is None:
|
|
69
|
+
self._datastreams = self.client.datastreams.list(thing=self.uid, fetch_all=True).items
|
|
70
|
+
|
|
71
|
+
return self._datastreams
|
|
72
|
+
|
|
73
|
+
@field_validator("tags", mode="before")
|
|
74
|
+
def transform_tags(cls, v):
|
|
75
|
+
if isinstance(v, list):
|
|
76
|
+
return {item["key"]: item["value"] for item in v if "key" in item and "value" in item}
|
|
77
|
+
return v
|
|
78
|
+
|
|
79
|
+
@field_validator("photos", mode="before")
|
|
80
|
+
def transform_photos(cls, v):
|
|
81
|
+
if isinstance(v, list):
|
|
82
|
+
return {item["name"]: item["link"] for item in v if "name" in item and "link" in item}
|
|
83
|
+
return v
|
|
84
|
+
|
|
85
|
+
def add_tag(self, key: str, value: str):
|
|
86
|
+
"""Add a tag to this thing."""
|
|
87
|
+
|
|
88
|
+
self.client.things.add_tag(uid=self.uid, key=key, value=value)
|
|
89
|
+
self.tags[key] = value
|
|
90
|
+
|
|
91
|
+
def update_tag(self, key: str, value: str):
|
|
92
|
+
"""Edit a tag of this thing."""
|
|
93
|
+
|
|
94
|
+
self.client.things.update_tag(uid=self.uid, key=key, value=value)
|
|
95
|
+
self.tags[key] = value
|
|
96
|
+
|
|
97
|
+
def delete_tag(self, key: str):
|
|
98
|
+
"""Delete a tag of this thing."""
|
|
99
|
+
|
|
100
|
+
self.client.things.delete_tag(uid=self.uid, key=key, value=self.tags[key])
|
|
101
|
+
del self.tags[key]
|
|
102
|
+
|
|
103
|
+
def add_photo(self, file: IO[bytes]):
|
|
104
|
+
"""Add a photo of this thing."""
|
|
105
|
+
|
|
106
|
+
photo = self.client.things.add_photo(uid=self.uid, file=file)
|
|
107
|
+
self.photos[photo["name"]] = photo["link"]
|
|
108
|
+
|
|
109
|
+
def delete_photo(self, name: str):
|
|
110
|
+
"""Delete a photo of this thing."""
|
|
111
|
+
|
|
112
|
+
self.client.things.delete_photo(uid=self.uid, name=name)
|
|
113
|
+
del self.photos[name]
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Optional, ClassVar, TYPE_CHECKING
|
|
3
|
+
from pydantic import Field
|
|
4
|
+
from ..base import HydroServerBaseModel
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from hydroserverpy import HydroServer
|
|
8
|
+
from hydroserverpy.api.models import Workspace
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Unit(HydroServerBaseModel):
|
|
12
|
+
name: str = Field(..., max_length=255)
|
|
13
|
+
symbol: str = Field(..., max_length=255)
|
|
14
|
+
definition: str
|
|
15
|
+
unit_type: str = Field(..., max_length=255, alias="type")
|
|
16
|
+
workspace_id: Optional[uuid.UUID] = None
|
|
17
|
+
|
|
18
|
+
_editable_fields: ClassVar[set[str]] = {"name", "symbol", "definition", "unit_type"}
|
|
19
|
+
|
|
20
|
+
def __init__(self, client: "HydroServer", **data):
|
|
21
|
+
super().__init__(client=client, service=client.units, **data)
|
|
22
|
+
|
|
23
|
+
self._workspace = None
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
def get_route(cls):
|
|
27
|
+
return "units"
|
|
28
|
+
|
|
29
|
+
@property
|
|
30
|
+
def workspace(self) -> Optional["Workspace"]:
|
|
31
|
+
"""The workspace this unit belongs to."""
|
|
32
|
+
|
|
33
|
+
if self._workspace is None and self.workspace_id:
|
|
34
|
+
self._workspace = self.client.workspaces.get(uid=self.workspace_id)
|
|
35
|
+
|
|
36
|
+
return self._workspace
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from .iam.workspace import WorkspaceService
|
|
2
|
+
from .iam.role import RoleService
|
|
3
|
+
from .sta.thing import ThingService
|
|
4
|
+
from .sta.observed_property import ObservedPropertyService
|
|
5
|
+
from .sta.unit import UnitService
|
|
6
|
+
from .sta.processing_level import ProcessingLevelService
|
|
7
|
+
from .sta.result_qualifier import ResultQualifierService
|
|
8
|
+
from .sta.sensor import SensorService
|
|
9
|
+
from .sta.datastream import DatastreamService
|
|
10
|
+
from .etl.orchestration_system import OrchestrationSystemService
|
|
11
|
+
from .etl.data_source import DataSourceService
|
|
12
|
+
from .etl.data_archive import DataArchiveService
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import uuid
|
|
3
|
+
from typing import TYPE_CHECKING, Type, List, Union
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from hydroserverpy.api.models.base import HydroServerBaseModel, HydroServerCollection
|
|
6
|
+
from hydroserverpy.api.utils import order_by_to_camel
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from hydroserverpy import HydroServer
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class HydroServerBaseService:
|
|
13
|
+
model: Type[HydroServerBaseModel]
|
|
14
|
+
|
|
15
|
+
def __init__(self, client: "HydroServer") -> None:
|
|
16
|
+
self.client = client
|
|
17
|
+
|
|
18
|
+
def list(
|
|
19
|
+
self,
|
|
20
|
+
page: int = ...,
|
|
21
|
+
page_size: int = ...,
|
|
22
|
+
order_by: List[str] = ...,
|
|
23
|
+
fetch_all: bool = False,
|
|
24
|
+
**kwargs
|
|
25
|
+
):
|
|
26
|
+
kwargs = {
|
|
27
|
+
k: v for k, v in kwargs.items() if v is not ...
|
|
28
|
+
}
|
|
29
|
+
params = kwargs.copy()
|
|
30
|
+
params.update({
|
|
31
|
+
"page": page,
|
|
32
|
+
"page_size": page_size,
|
|
33
|
+
"order_by": [order_by_to_camel(order) for order in order_by] if order_by is not ... else order_by
|
|
34
|
+
})
|
|
35
|
+
params = {
|
|
36
|
+
k: ("null" if v is None else v)
|
|
37
|
+
for k, v in params.items()
|
|
38
|
+
if v is not ...
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
path = f"/{self.client.base_route}/{self.model.get_route()}"
|
|
42
|
+
response = self.client.request("get", path, params=params)
|
|
43
|
+
collection = HydroServerCollection(
|
|
44
|
+
model=self.model,
|
|
45
|
+
client=self.client,
|
|
46
|
+
service=self,
|
|
47
|
+
response=response,
|
|
48
|
+
order_by=params.get("order_by"),
|
|
49
|
+
filters={
|
|
50
|
+
(k[:-3] if k.endswith("_id") else k): v
|
|
51
|
+
for k, v in kwargs.items()
|
|
52
|
+
}
|
|
53
|
+
)
|
|
54
|
+
if fetch_all is True:
|
|
55
|
+
collection = collection.fetch_all()
|
|
56
|
+
|
|
57
|
+
return collection
|
|
58
|
+
|
|
59
|
+
def get(
|
|
60
|
+
self,
|
|
61
|
+
uid: Union[uuid.UUID, str]
|
|
62
|
+
):
|
|
63
|
+
path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}"
|
|
64
|
+
response = self.client.request("get", path).json()
|
|
65
|
+
|
|
66
|
+
return self.model(
|
|
67
|
+
client=self.client, uid=uuid.UUID(str(response.pop("id"))), **response
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
def create(self, **kwargs):
|
|
71
|
+
path = f"/{self.client.base_route}/{self.model.get_route()}"
|
|
72
|
+
headers = {"Content-type": "application/json"}
|
|
73
|
+
response = self.client.request(
|
|
74
|
+
"post", path, headers=headers, data=json.dumps(kwargs, default=self.default_serializer)
|
|
75
|
+
).json()
|
|
76
|
+
|
|
77
|
+
return self.model(
|
|
78
|
+
client=self.client, uid=uuid.UUID(str(response.pop("id"))), **response
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
def update(
|
|
82
|
+
self,
|
|
83
|
+
uid: Union[uuid.UUID, str],
|
|
84
|
+
**kwargs
|
|
85
|
+
):
|
|
86
|
+
path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}"
|
|
87
|
+
headers = {"Content-type": "application/json"}
|
|
88
|
+
body = self.prune_unset(kwargs) or {}
|
|
89
|
+
response = self.client.request(
|
|
90
|
+
"patch", path, headers=headers, data=json.dumps(body, default=self.default_serializer)
|
|
91
|
+
).json()
|
|
92
|
+
|
|
93
|
+
return self.model(
|
|
94
|
+
client=self.client, uid=uuid.UUID(str(response.pop("id"))), **response
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
def delete(
|
|
98
|
+
self,
|
|
99
|
+
uid: Union[uuid.UUID, str]
|
|
100
|
+
) -> None:
|
|
101
|
+
path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}"
|
|
102
|
+
self.client.request("delete", path)
|
|
103
|
+
|
|
104
|
+
@staticmethod
|
|
105
|
+
def default_serializer(obj):
|
|
106
|
+
if isinstance(obj, datetime):
|
|
107
|
+
return obj.isoformat()
|
|
108
|
+
raise TypeError(f"Type {type(obj)} not serializable")
|
|
109
|
+
|
|
110
|
+
def prune_unset(self, obj):
|
|
111
|
+
if isinstance(obj, dict):
|
|
112
|
+
cleaned = {
|
|
113
|
+
k: self.prune_unset(v)
|
|
114
|
+
for k, v in obj.items()
|
|
115
|
+
if v is not ... and self.prune_unset(v) is not None
|
|
116
|
+
}
|
|
117
|
+
return cleaned if cleaned else None
|
|
118
|
+
return obj
|
|
File without changes
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
from typing import Optional, Union, List, Literal, TYPE_CHECKING
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from hydroserverpy.api.models import DataArchive
|
|
5
|
+
from hydroserverpy.api.utils import normalize_uuid
|
|
6
|
+
from ..base import HydroServerBaseService
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from hydroserverpy import HydroServer
|
|
10
|
+
from hydroserverpy.api.models import Workspace, OrchestrationSystem, Datastream
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DataArchiveService(HydroServerBaseService):
|
|
14
|
+
def __init__(self, client: "HydroServer"):
|
|
15
|
+
self.model = DataArchive
|
|
16
|
+
super().__init__(client)
|
|
17
|
+
|
|
18
|
+
def list(
|
|
19
|
+
self,
|
|
20
|
+
page: int = ...,
|
|
21
|
+
page_size: int = ...,
|
|
22
|
+
order_by: List[str] = ...,
|
|
23
|
+
workspace: Optional[Union["Workspace", UUID, str]] = ...,
|
|
24
|
+
datastream: Optional[Union["Datastream", UUID, str]] = ...,
|
|
25
|
+
orchestration_system: Optional[Union["OrchestrationSystem", UUID, str]] = ...,
|
|
26
|
+
fetch_all: bool = False,
|
|
27
|
+
) -> List["DataArchive"]:
|
|
28
|
+
"""Fetch a collection of data archives."""
|
|
29
|
+
|
|
30
|
+
return super().list(
|
|
31
|
+
page=page,
|
|
32
|
+
page_size=page_size,
|
|
33
|
+
order_by=order_by,
|
|
34
|
+
workspace_id=normalize_uuid(workspace),
|
|
35
|
+
datastream_id=normalize_uuid(datastream),
|
|
36
|
+
orchestration_system_id=normalize_uuid(orchestration_system),
|
|
37
|
+
fetch_all=fetch_all,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
def create(
|
|
41
|
+
self,
|
|
42
|
+
name: str,
|
|
43
|
+
workspace: Union["Workspace", UUID, str],
|
|
44
|
+
orchestration_system: Union["OrchestrationSystem", UUID, str],
|
|
45
|
+
settings: Optional[dict] = None,
|
|
46
|
+
interval: Optional[int] = None,
|
|
47
|
+
interval_units: Optional[Literal["minutes", "hours", "days"]] = None,
|
|
48
|
+
crontab: Optional[str] = None,
|
|
49
|
+
start_time: Optional[datetime] = None,
|
|
50
|
+
end_time: Optional[datetime] = None,
|
|
51
|
+
last_run_successful: Optional[bool] = None,
|
|
52
|
+
last_run_message: Optional[str] = None,
|
|
53
|
+
last_run: Optional[datetime] = None,
|
|
54
|
+
next_run: Optional[datetime] = None,
|
|
55
|
+
paused: bool = False,
|
|
56
|
+
datastreams: Optional[List[Union["Datastream", UUID, str]]] = None,
|
|
57
|
+
) -> "DataArchive":
|
|
58
|
+
"""Create a new data archive."""
|
|
59
|
+
|
|
60
|
+
body = {
|
|
61
|
+
"name": name,
|
|
62
|
+
"workspaceId": normalize_uuid(workspace),
|
|
63
|
+
"orchestrationSystemId": normalize_uuid(orchestration_system),
|
|
64
|
+
"settings": settings,
|
|
65
|
+
"schedule": {
|
|
66
|
+
"interval": interval,
|
|
67
|
+
"intervalUnits": interval_units,
|
|
68
|
+
"crontab": crontab,
|
|
69
|
+
"startTime": start_time,
|
|
70
|
+
"endTime": end_time,
|
|
71
|
+
},
|
|
72
|
+
"status": {
|
|
73
|
+
"lastRunSuccessful": last_run_successful,
|
|
74
|
+
"lastRunMessage": last_run_message,
|
|
75
|
+
"lastRun": last_run,
|
|
76
|
+
"nextRun": next_run,
|
|
77
|
+
"paused": paused,
|
|
78
|
+
},
|
|
79
|
+
"datastreamIds": (
|
|
80
|
+
[
|
|
81
|
+
normalize_uuid(datastream)
|
|
82
|
+
for datastream in datastreams
|
|
83
|
+
]
|
|
84
|
+
if datastreams
|
|
85
|
+
else []
|
|
86
|
+
),
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return super().create(**body)
|
|
90
|
+
|
|
91
|
+
def update(
|
|
92
|
+
self,
|
|
93
|
+
uid: Union[UUID, str],
|
|
94
|
+
name: str = ...,
|
|
95
|
+
orchestration_system: Union["OrchestrationSystem", UUID, str] = ...,
|
|
96
|
+
settings: Optional[dict] = ...,
|
|
97
|
+
interval: Optional[int] = ...,
|
|
98
|
+
interval_units: Optional[Literal["minutes", "hours", "days"]] = ...,
|
|
99
|
+
crontab: Optional[str] = ...,
|
|
100
|
+
start_time: Optional[datetime] = ...,
|
|
101
|
+
end_time: Optional[datetime] = ...,
|
|
102
|
+
last_run_successful: Optional[bool] = ...,
|
|
103
|
+
last_run_message: Optional[str] = ...,
|
|
104
|
+
last_run: Optional[datetime] = ...,
|
|
105
|
+
next_run: Optional[datetime] = ...,
|
|
106
|
+
paused: bool = ...,
|
|
107
|
+
) -> "DataArchive":
|
|
108
|
+
"""Update a data archive."""
|
|
109
|
+
|
|
110
|
+
status_body = {
|
|
111
|
+
k: v
|
|
112
|
+
for k, v in {
|
|
113
|
+
"lastRunSuccessful": last_run_successful,
|
|
114
|
+
"lastRunMessage": last_run_message,
|
|
115
|
+
"lastRun": last_run,
|
|
116
|
+
"nextRun": next_run,
|
|
117
|
+
"paused": paused,
|
|
118
|
+
}.items()
|
|
119
|
+
if v is not ...
|
|
120
|
+
}
|
|
121
|
+
status_body = status_body if status_body else ...
|
|
122
|
+
|
|
123
|
+
schedule_body = {
|
|
124
|
+
k: v
|
|
125
|
+
for k, v in {
|
|
126
|
+
"interval": interval,
|
|
127
|
+
"intervalUnits": interval_units,
|
|
128
|
+
"crontab": crontab,
|
|
129
|
+
"startTime": start_time,
|
|
130
|
+
"endTime": end_time,
|
|
131
|
+
}.items()
|
|
132
|
+
if v is not ...
|
|
133
|
+
}
|
|
134
|
+
schedule_body = schedule_body if schedule_body else ...
|
|
135
|
+
|
|
136
|
+
body = {
|
|
137
|
+
k: v
|
|
138
|
+
for k, v in {
|
|
139
|
+
"name": name,
|
|
140
|
+
"orchestrationSystemId": getattr(
|
|
141
|
+
orchestration_system, "uid", orchestration_system
|
|
142
|
+
),
|
|
143
|
+
"settings": settings,
|
|
144
|
+
"schedule": schedule_body,
|
|
145
|
+
"status": status_body,
|
|
146
|
+
}.items()
|
|
147
|
+
if v is not ...
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return super().update(uid=str(uid), **body)
|
|
151
|
+
|
|
152
|
+
def add_datastream(
|
|
153
|
+
self, uid: Union[UUID, str], datastream: Union["Datastream", UUID, str]
|
|
154
|
+
) -> None:
|
|
155
|
+
"""Add a datastream to this data archive."""
|
|
156
|
+
|
|
157
|
+
path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/datastreams/{normalize_uuid(datastream)}"
|
|
158
|
+
self.client.request("put", path)
|
|
159
|
+
|
|
160
|
+
def remove_datastream(
|
|
161
|
+
self, uid: Union[UUID, str], datastream: Union["Datastream", UUID, str]
|
|
162
|
+
) -> None:
|
|
163
|
+
"""Remove a datastream from this data archive."""
|
|
164
|
+
|
|
165
|
+
path = f"/{self.client.base_route}/{self.model.get_route()}/{str(uid)}/datastreams/{normalize_uuid(datastream)}"
|
|
166
|
+
self.client.request("delete", path)
|