hydroserverpy 0.2.3__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hydroserverpy might be problematic. Click here for more details.
- hydroserverpy/__init__.py +6 -15
- hydroserverpy/core/endpoints/__init__.py +9 -0
- hydroserverpy/core/endpoints/base.py +133 -0
- hydroserverpy/core/endpoints/data_loaders.py +92 -0
- hydroserverpy/core/endpoints/data_sources.py +92 -0
- hydroserverpy/core/endpoints/datastreams.py +188 -0
- hydroserverpy/core/endpoints/observed_properties.py +93 -0
- hydroserverpy/core/endpoints/processing_levels.py +93 -0
- hydroserverpy/core/endpoints/result_qualifiers.py +93 -0
- hydroserverpy/core/endpoints/sensors.py +93 -0
- hydroserverpy/core/endpoints/things.py +240 -0
- hydroserverpy/core/endpoints/units.py +93 -0
- hydroserverpy/{components → core/schemas}/__init__.py +1 -2
- hydroserverpy/core/schemas/base.py +117 -0
- hydroserverpy/core/schemas/data_loaders.py +71 -0
- hydroserverpy/core/schemas/data_sources.py +206 -0
- hydroserverpy/core/schemas/datastreams.py +299 -0
- hydroserverpy/core/schemas/observed_properties.py +35 -0
- hydroserverpy/core/schemas/processing_levels.py +27 -0
- hydroserverpy/core/schemas/result_qualifiers.py +23 -0
- hydroserverpy/core/schemas/sensors.py +53 -0
- hydroserverpy/core/schemas/things.py +309 -0
- hydroserverpy/core/schemas/units.py +30 -0
- hydroserverpy/core/service.py +186 -0
- hydroserverpy/etl/__init__.py +0 -0
- hydroserverpy/{etl.py → etl/service.py} +32 -47
- hydroserverpy/quality/__init__.py +1 -0
- hydroserverpy/quality/service.py +391 -0
- {hydroserverpy-0.2.3.dist-info → hydroserverpy-0.3.0.dist-info}/METADATA +6 -3
- hydroserverpy-0.3.0.dist-info/RECORD +36 -0
- {hydroserverpy-0.2.3.dist-info → hydroserverpy-0.3.0.dist-info}/WHEEL +1 -1
- hydroserverpy/components/data_loaders.py +0 -67
- hydroserverpy/components/data_sources.py +0 -98
- hydroserverpy/components/datastreams.py +0 -47
- hydroserverpy/components/observed_properties.py +0 -48
- hydroserverpy/components/processing_levels.py +0 -48
- hydroserverpy/components/result_qualifiers.py +0 -48
- hydroserverpy/components/sensors.py +0 -48
- hydroserverpy/components/things.py +0 -48
- hydroserverpy/components/units.py +0 -48
- hydroserverpy/components/users.py +0 -28
- hydroserverpy/main.py +0 -62
- hydroserverpy/models.py +0 -218
- hydroserverpy/schemas/data_loaders.py +0 -27
- hydroserverpy/schemas/data_sources.py +0 -58
- hydroserverpy/schemas/datastreams.py +0 -56
- hydroserverpy/schemas/observed_properties.py +0 -33
- hydroserverpy/schemas/processing_levels.py +0 -33
- hydroserverpy/schemas/result_qualifiers.py +0 -32
- hydroserverpy/schemas/sensors.py +0 -39
- hydroserverpy/schemas/things.py +0 -108
- hydroserverpy/schemas/units.py +0 -32
- hydroserverpy/schemas/users.py +0 -28
- hydroserverpy/service.py +0 -170
- hydroserverpy/utils.py +0 -37
- hydroserverpy-0.2.3.dist-info/RECORD +0 -35
- /hydroserverpy/{schemas → core}/__init__.py +0 -0
- /hydroserverpy/{exceptions.py → etl/exceptions.py} +0 -0
- {hydroserverpy-0.2.3.dist-info → hydroserverpy-0.3.0.dist-info}/LICENSE +0 -0
- {hydroserverpy-0.2.3.dist-info → hydroserverpy-0.3.0.dist-info}/top_level.txt +0 -0
- {hydroserverpy-0.2.3.dist-info → hydroserverpy-0.3.0.dist-info}/zip-safe +0 -0
hydroserverpy/models.py
DELETED
|
@@ -1,218 +0,0 @@
|
|
|
1
|
-
import yaml
|
|
2
|
-
import re
|
|
3
|
-
import simplejson as json
|
|
4
|
-
from uuid import UUID
|
|
5
|
-
from datetime import datetime
|
|
6
|
-
from crontab import CronTab
|
|
7
|
-
from pydantic import BaseModel, validator, root_validator, conint, AnyHttpUrl, Field
|
|
8
|
-
from typing import List, Optional, Literal, Union
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class HydroLoaderDatastream(BaseModel):
|
|
12
|
-
id: UUID
|
|
13
|
-
value_count: Optional[int] = Field(None, alias='valueCount')
|
|
14
|
-
result_time: Optional[datetime] = Field(None, alias='resultTime')
|
|
15
|
-
phenomenon_time: Optional[datetime] = Field(None, alias='phenomenonTime')
|
|
16
|
-
file_row_start_index: Optional[int]
|
|
17
|
-
file_result_end_time: Optional[datetime]
|
|
18
|
-
chunk_result_start_time: Optional[datetime]
|
|
19
|
-
chunk_result_end_time: Optional[datetime]
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class HydroLoaderObservationsResponse(BaseModel):
|
|
23
|
-
datastream_id: str
|
|
24
|
-
request_url: str
|
|
25
|
-
status_code: int
|
|
26
|
-
reason: str
|
|
27
|
-
chunk_start_time: str
|
|
28
|
-
chunk_end_time: str
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class HydroLoaderConfSchedule(BaseModel):
|
|
32
|
-
crontab: Optional[str]
|
|
33
|
-
interval_units: Optional[Literal['minutes', 'hours', 'days', 'weeks', 'months']]
|
|
34
|
-
interval: Optional[conint(gt=0)]
|
|
35
|
-
start_time: Optional[datetime]
|
|
36
|
-
end_time: Optional[datetime]
|
|
37
|
-
paused: Optional[bool]
|
|
38
|
-
|
|
39
|
-
@root_validator(pre=True)
|
|
40
|
-
def check_crontab_or_interval(cls, values):
|
|
41
|
-
"""
|
|
42
|
-
The check_crontab_or_interval function is a validator that ensures that the HydroLoaderConfSchedule model
|
|
43
|
-
does not include both a crontab and an interval. It also ensures that if an interval is
|
|
44
|
-
included, it includes both an interval and its units.
|
|
45
|
-
|
|
46
|
-
:param cls: Pass the class of the model to be created
|
|
47
|
-
:param values: Pass the values of the fields in a form to
|
|
48
|
-
:return: The values dictionary
|
|
49
|
-
"""
|
|
50
|
-
|
|
51
|
-
if values.get('crontab') and (values.get('interval_units') or values.get('interval')):
|
|
52
|
-
raise ValueError('Schedule can include either a crontab or an interval, not both.')
|
|
53
|
-
|
|
54
|
-
if (
|
|
55
|
-
values.get('interval_units') and not values.get('interval')
|
|
56
|
-
) or (
|
|
57
|
-
not values.get('interval_units') and values.get('interval')
|
|
58
|
-
):
|
|
59
|
-
raise ValueError('Interval must include both an interval and interval_units.')
|
|
60
|
-
|
|
61
|
-
return values
|
|
62
|
-
|
|
63
|
-
@validator('crontab')
|
|
64
|
-
def check_valid_crontab(cls, v):
|
|
65
|
-
"""
|
|
66
|
-
The check_valid_crontab function is a validator that uses the CronTab library to check if the inputted
|
|
67
|
-
crontab string is valid. If it's not, an exception will be raised.
|
|
68
|
-
|
|
69
|
-
:param cls: Pass the class to the function
|
|
70
|
-
:param v: Pass the value of the field to be validated
|
|
71
|
-
:return: The crontab string
|
|
72
|
-
"""
|
|
73
|
-
|
|
74
|
-
if v is not None:
|
|
75
|
-
CronTab(v)
|
|
76
|
-
|
|
77
|
-
return v
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
class HydroLoaderConfFileAccess(BaseModel):
|
|
81
|
-
path: Optional[str]
|
|
82
|
-
url: Optional[AnyHttpUrl]
|
|
83
|
-
header_row: Optional[conint(gt=0)] = None
|
|
84
|
-
data_start_row: Optional[conint(gt=0)] = 1
|
|
85
|
-
delimiter: Optional[str] = ','
|
|
86
|
-
quote_char: Optional[str] = '"'
|
|
87
|
-
|
|
88
|
-
@root_validator(pre=True)
|
|
89
|
-
def check_path_or_url(cls, values):
|
|
90
|
-
"""
|
|
91
|
-
The check_path_or_url function is a validator that takes in the values of the HydroLoaderConfFileAccess model
|
|
92
|
-
and checks to see if there is either a path or url. If there isn't, it raises an error.
|
|
93
|
-
|
|
94
|
-
:param cls: Pass the class of the object being created
|
|
95
|
-
:param values: Pass in the values of the path and url parameters
|
|
96
|
-
:return: The values dictionary
|
|
97
|
-
"""
|
|
98
|
-
|
|
99
|
-
if bool(values.get('path')) == bool(values.get('url')):
|
|
100
|
-
raise ValueError('File access must include either a path or a URL.')
|
|
101
|
-
|
|
102
|
-
return values
|
|
103
|
-
|
|
104
|
-
@root_validator(pre=True)
|
|
105
|
-
def check_header_and_data_rows(cls, values):
|
|
106
|
-
"""
|
|
107
|
-
The check_header_and_data_rows function is a class method that takes in the values of the header_row and
|
|
108
|
-
data_start_row and ensures the header row is not greater than the data start row. If it is, it raises an error.
|
|
109
|
-
|
|
110
|
-
:param cls: Refer to the class that is being created
|
|
111
|
-
:param values: Get the values of the header_row and data_start_row
|
|
112
|
-
:return: The values dictionary
|
|
113
|
-
"""
|
|
114
|
-
|
|
115
|
-
if values.get('header_row') is not None and values.get('header_row') >= values.get('data_start_row'):
|
|
116
|
-
raise ValueError('Header row cannot occur after data start row.')
|
|
117
|
-
|
|
118
|
-
return values
|
|
119
|
-
|
|
120
|
-
@validator('delimiter')
|
|
121
|
-
def convert_delimiters(cls, v):
|
|
122
|
-
"""
|
|
123
|
-
The convert_delimiters function is a validator that takes in a string and replaces all instances of '\\t' with
|
|
124
|
-
'\t'. This function is used to convert the delimiters from the input file into tab-delimited format.
|
|
125
|
-
|
|
126
|
-
:param cls: Pass the class object to the function
|
|
127
|
-
:param v: Pass the value of the escaped delimiter into the function
|
|
128
|
-
:return: An unescaped delimiter string
|
|
129
|
-
"""
|
|
130
|
-
|
|
131
|
-
return v.replace('\\t', '\t')
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
class HydroLoaderConfFileTimestamp(BaseModel):
|
|
135
|
-
column: Union[conint(gt=0), str]
|
|
136
|
-
format: Optional[str] = '%Y-%m-%dT%H:%M:%S%Z'
|
|
137
|
-
offset: Optional[str] = '+0000'
|
|
138
|
-
|
|
139
|
-
@validator('format')
|
|
140
|
-
def check_valid_strftime(cls, v):
|
|
141
|
-
"""
|
|
142
|
-
The check_valid_strftime function is a validator that takes in a datetime strf string and checks to see if it
|
|
143
|
-
is valid. It uses the strftime function from the datetime module, which returns an error if the string passed
|
|
144
|
-
to it is not valid.
|
|
145
|
-
|
|
146
|
-
:param cls: Pass the class to which the validator is attached
|
|
147
|
-
:param v: Pass the value of the argument to be checked
|
|
148
|
-
:return: The value of the string
|
|
149
|
-
"""
|
|
150
|
-
|
|
151
|
-
datetime.now().strftime(v)
|
|
152
|
-
|
|
153
|
-
return v
|
|
154
|
-
|
|
155
|
-
@validator('offset', always=True)
|
|
156
|
-
def parse_tzinfo(cls, v):
|
|
157
|
-
"""
|
|
158
|
-
The parse_tzinfo function is a validator for the offset field. It takes in a string and returns an instance
|
|
159
|
-
of datetime.tzinfo, which is used by Python's datetime module to represent timezone information. The function
|
|
160
|
-
first checks that the input string is a valid UTC offset formatted like "+0000". If it is not valid, then it
|
|
161
|
-
raises an exception.
|
|
162
|
-
|
|
163
|
-
:param cls: Pass in the class of the object being created
|
|
164
|
-
:param v: Pass the timezone offset
|
|
165
|
-
:return: A tzinfo object
|
|
166
|
-
"""
|
|
167
|
-
|
|
168
|
-
tzinfo_pattern = r'^[+-](0[0-9]|1[0-4])[0-5][0-9]$'
|
|
169
|
-
if v is not None and re.match(tzinfo_pattern, v) is None:
|
|
170
|
-
raise ValueError('The offset must be a valid UTC timezone offset formatted such as "+0000".')
|
|
171
|
-
|
|
172
|
-
return v # datetime.strptime(v, '%z').tzinfo if v is not None else None
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
class HydroLoaderConfFileDatastream(BaseModel):
|
|
176
|
-
column: Union[conint(gt=0), str]
|
|
177
|
-
id: UUID
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
class HydroLoaderConf(BaseModel):
|
|
181
|
-
schedule: Optional[HydroLoaderConfSchedule]
|
|
182
|
-
file_access: HydroLoaderConfFileAccess
|
|
183
|
-
file_timestamp: HydroLoaderConfFileTimestamp
|
|
184
|
-
datastreams: List[HydroLoaderConfFileDatastream]
|
|
185
|
-
|
|
186
|
-
@root_validator()
|
|
187
|
-
def check_header_and_fields(cls, values):
|
|
188
|
-
""""""
|
|
189
|
-
|
|
190
|
-
if not values.get('file_access') or not values['file_access'].header_row:
|
|
191
|
-
if values.get('file_timestamp') and not isinstance(values['file_timestamp'].column, int):
|
|
192
|
-
raise ValueError('If no header row is defined, all column identifiers must be integers.')
|
|
193
|
-
if values.get('datastreams'):
|
|
194
|
-
for datastream in values['datastreams']:
|
|
195
|
-
if not isinstance(datastream.column, int):
|
|
196
|
-
raise ValueError('If no header row is defined, all column identifiers must be integers.')
|
|
197
|
-
|
|
198
|
-
return values
|
|
199
|
-
|
|
200
|
-
def to_yaml(
|
|
201
|
-
self,
|
|
202
|
-
file_path: str
|
|
203
|
-
):
|
|
204
|
-
"""
|
|
205
|
-
The to_yaml function takes a file path and writes the configuration to that file in YAML format.
|
|
206
|
-
|
|
207
|
-
:param self: Refer to the current instance of the class
|
|
208
|
-
:param file_path: str: Specify the file path to save the configuration
|
|
209
|
-
:return: A yaml file
|
|
210
|
-
"""
|
|
211
|
-
|
|
212
|
-
with open(file_path, 'w') as conf_file:
|
|
213
|
-
yaml.dump(
|
|
214
|
-
json.loads(self.json()),
|
|
215
|
-
conf_file,
|
|
216
|
-
sort_keys=False,
|
|
217
|
-
default_flow_style=False
|
|
218
|
-
)
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel
|
|
2
|
-
from uuid import UUID
|
|
3
|
-
from hydroserverpy.utils import allow_partial
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class DataLoaderID(BaseModel):
|
|
7
|
-
id: UUID
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class DataLoaderFields(BaseModel):
|
|
11
|
-
name: str
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class DataLoaderGetResponse(DataLoaderFields, DataLoaderID):
|
|
15
|
-
pass
|
|
16
|
-
|
|
17
|
-
class Config:
|
|
18
|
-
allow_population_by_field_name = True
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class DataLoaderPostBody(DataLoaderFields):
|
|
22
|
-
pass
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
@allow_partial
|
|
26
|
-
class DataLoaderPatchBody(DataLoaderFields):
|
|
27
|
-
pass
|
|
@@ -1,58 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field
|
|
2
|
-
from typing import Optional, Literal, Union
|
|
3
|
-
from pydantic import AnyHttpUrl, conint
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from uuid import UUID
|
|
6
|
-
from hydroserverpy.utils import allow_partial
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class DataSourceID(BaseModel):
|
|
10
|
-
id: UUID
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class DataSourceFields(BaseModel):
|
|
14
|
-
name: str
|
|
15
|
-
path: Optional[str]
|
|
16
|
-
url: Optional[AnyHttpUrl]
|
|
17
|
-
header_row: Optional[conint(gt=0)] = Field(None, alias='headerRow')
|
|
18
|
-
data_start_row: Optional[conint(gt=0)] = Field(1, alias='dataStartRow')
|
|
19
|
-
delimiter: Optional[str] = ','
|
|
20
|
-
quote_char: Optional[str] = Field('"', alias='quoteChar')
|
|
21
|
-
interval: Optional[conint(gt=0)]
|
|
22
|
-
interval_units: Optional[Literal['minutes', 'hours', 'days', 'weeks', 'months']] = \
|
|
23
|
-
Field(None, alias='intervalUnits')
|
|
24
|
-
crontab: Optional[str]
|
|
25
|
-
start_time: Optional[datetime] = Field(None, alias='startTime')
|
|
26
|
-
end_time: Optional[datetime] = Field(None, alias='endTime')
|
|
27
|
-
paused: Optional[bool]
|
|
28
|
-
timestamp_column: Union[conint(gt=0), str] = Field(..., alias='timestampColumn')
|
|
29
|
-
timestamp_format: Optional[str] = Field('%Y-%m-%dT%H:%M:%S%Z', alias='timestampFormat')
|
|
30
|
-
timestamp_offset: Optional[str] = Field('+0000', alias='timestampOffset')
|
|
31
|
-
data_loader_id: UUID = Field(..., alias='dataLoaderId')
|
|
32
|
-
data_source_thru: Optional[datetime] = Field(None, alias='dataSourceThru')
|
|
33
|
-
last_sync_successful: Optional[bool] = Field(None, alias='lastSyncSuccessful')
|
|
34
|
-
last_sync_message: Optional[str] = Field(None, alias='lastSyncMessage')
|
|
35
|
-
last_synced: Optional[datetime] = Field(None, alias='lastSynced')
|
|
36
|
-
next_sync: Optional[datetime] = Field(None, alias='nextSync')
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
class DataSourceGetResponse(DataSourceFields, DataSourceID):
|
|
40
|
-
pass
|
|
41
|
-
|
|
42
|
-
class Config:
|
|
43
|
-
allow_population_by_field_name = True
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
class DataSourcePostBody(DataSourceFields):
|
|
47
|
-
pass
|
|
48
|
-
|
|
49
|
-
class Config:
|
|
50
|
-
allow_population_by_field_name = True
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
@allow_partial
|
|
54
|
-
class DataSourcePatchBody(DataSourceFields):
|
|
55
|
-
pass
|
|
56
|
-
|
|
57
|
-
class Config:
|
|
58
|
-
allow_population_by_field_name = True
|
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field
|
|
2
|
-
from typing import Union, Literal, Optional
|
|
3
|
-
from uuid import UUID
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from hydroserverpy.utils import allow_partial
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class DatastreamID(BaseModel):
|
|
9
|
-
id: UUID
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class DatastreamFields(BaseModel):
|
|
13
|
-
name: Union[UUID, str]
|
|
14
|
-
description: str
|
|
15
|
-
observation_type: str = Field(..., alias='observationType')
|
|
16
|
-
sampled_medium: str = Field(..., alias='sampledMedium')
|
|
17
|
-
no_data_value: float = Field(..., alias='noDataValue')
|
|
18
|
-
aggregation_statistic: str = Field(..., alias='aggregationStatistic')
|
|
19
|
-
time_aggregation_interval: float = Field(..., alias='timeAggregationInterval')
|
|
20
|
-
status: str = None
|
|
21
|
-
result_type: str = Field(..., alias='resultType')
|
|
22
|
-
value_count: int = Field(None, alias='valueCount')
|
|
23
|
-
intended_time_spacing: float = Field(None, alias='intendedTimeSpacing')
|
|
24
|
-
phenomenon_begin_time: datetime = Field(None, alias='phenomenonBeginTime')
|
|
25
|
-
phenomenon_end_time: datetime = Field(None, alias='phenomenonEndTime')
|
|
26
|
-
result_begin_time: datetime = Field(None, alias='resultBeginTime')
|
|
27
|
-
result_end_time: datetime = Field(None, alias='resultEndTime')
|
|
28
|
-
data_source_id: UUID = Field(None, alias='dataSourceId')
|
|
29
|
-
data_source_column: str = Field(None, alias='dataSourceColumn')
|
|
30
|
-
is_visible: bool = Field(True, alias='isVisible')
|
|
31
|
-
thing_id: UUID = Field(..., alias='thingId')
|
|
32
|
-
sensor_id: UUID = Field(..., alias='sensorId')
|
|
33
|
-
observed_property_id: UUID = Field(..., alias='observedPropertyId')
|
|
34
|
-
processing_level_id: UUID = Field(..., alias='processingLevelId')
|
|
35
|
-
unit_id: UUID = Field(..., alias='unitId')
|
|
36
|
-
time_aggregation_interval_units: Literal['seconds', 'minutes', 'hours', 'days'] = \
|
|
37
|
-
Field(..., alias='timeAggregationIntervalUnits')
|
|
38
|
-
intended_time_spacing_units: Optional[Literal['seconds', 'minutes', 'hours', 'days']] = \
|
|
39
|
-
Field(None, alias='intendedTimeSpacingUnits')
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
class DatastreamGetResponse(DatastreamFields, DatastreamID):
|
|
43
|
-
|
|
44
|
-
class Config:
|
|
45
|
-
allow_population_by_field_name = True
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
class DatastreamPostBody(DatastreamFields):
|
|
49
|
-
|
|
50
|
-
class Config:
|
|
51
|
-
allow_population_by_field_name = True
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
@allow_partial
|
|
55
|
-
class DatastreamPatchBody(DatastreamFields):
|
|
56
|
-
thing_id: UUID = Field(..., alias='thingId')
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel
|
|
2
|
-
from uuid import UUID
|
|
3
|
-
from typing import Optional
|
|
4
|
-
from hydroserverpy.utils import allow_partial
|
|
5
|
-
from hydroserverpy.schemas.users import UserFields
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class ObservedPropertyID(BaseModel):
|
|
9
|
-
id: UUID
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class ObservedPropertyFields(BaseModel):
|
|
13
|
-
name: str
|
|
14
|
-
definition: str
|
|
15
|
-
description: str = None
|
|
16
|
-
type: str = None
|
|
17
|
-
code: str = None
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
class ObservedPropertyGetResponse(ObservedPropertyFields, ObservedPropertyID):
|
|
21
|
-
owner: Optional[str]
|
|
22
|
-
|
|
23
|
-
class Config:
|
|
24
|
-
allow_population_by_field_name = True
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class ObservedPropertyPostBody(ObservedPropertyFields):
|
|
28
|
-
pass
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
@allow_partial
|
|
32
|
-
class ObservedPropertyPatchBody(ObservedPropertyFields):
|
|
33
|
-
pass
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel
|
|
2
|
-
from uuid import UUID
|
|
3
|
-
from typing import Optional
|
|
4
|
-
from ..utils import allow_partial
|
|
5
|
-
from ..schemas.users import UserFields
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class ProcessingLevelID(BaseModel):
|
|
9
|
-
id: UUID
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class ProcessingLevelFields(BaseModel):
|
|
13
|
-
code: str
|
|
14
|
-
definition: str = None
|
|
15
|
-
explanation: str = None
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class ProcessingLevelGetResponse(ProcessingLevelFields, ProcessingLevelID):
|
|
19
|
-
owner: Optional[str]
|
|
20
|
-
|
|
21
|
-
class Config:
|
|
22
|
-
allow_population_by_field_name = True
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
class ProcessingLevelPostBody(ProcessingLevelFields):
|
|
26
|
-
|
|
27
|
-
class Config:
|
|
28
|
-
allow_population_by_field_name = True
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
@allow_partial
|
|
32
|
-
class ProcessingLevelPatchBody(ProcessingLevelFields):
|
|
33
|
-
pass
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel
|
|
2
|
-
from uuid import UUID
|
|
3
|
-
from typing import Optional
|
|
4
|
-
from ..utils import allow_partial
|
|
5
|
-
from ..schemas.users import UserFields
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class ResultQualifierID(BaseModel):
|
|
9
|
-
id: UUID
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class ResultQualifierFields(BaseModel):
|
|
13
|
-
code: str
|
|
14
|
-
description: str
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
class ResultQualifierGetResponse(ResultQualifierFields, ResultQualifierID):
|
|
18
|
-
owner: Optional[str]
|
|
19
|
-
|
|
20
|
-
class Config:
|
|
21
|
-
allow_population_by_field_name = True
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class ResultQualifierPostBody(ResultQualifierFields):
|
|
25
|
-
|
|
26
|
-
class Config:
|
|
27
|
-
allow_population_by_field_name = True
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
@allow_partial
|
|
31
|
-
class ResultQualifierPatchBody(ResultQualifierFields):
|
|
32
|
-
pass
|
hydroserverpy/schemas/sensors.py
DELETED
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field
|
|
2
|
-
from uuid import UUID
|
|
3
|
-
from typing import Optional
|
|
4
|
-
from hydroserverpy.utils import allow_partial
|
|
5
|
-
from hydroserverpy.schemas.users import UserFields
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class SensorID(BaseModel):
|
|
9
|
-
id: UUID
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class SensorFields(BaseModel):
|
|
13
|
-
name: str
|
|
14
|
-
description: str
|
|
15
|
-
encoding_type: str = Field(alias="encodingType")
|
|
16
|
-
manufacturer: str = None
|
|
17
|
-
model: str = None
|
|
18
|
-
model_link: str = Field(None, alias='modelLink')
|
|
19
|
-
method_type: str = Field(alias='methodType')
|
|
20
|
-
method_link: str = Field(None, alias='methodLink')
|
|
21
|
-
method_code: str = Field(None, alias='methodCode')
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class SensorGetResponse(SensorFields, SensorID):
|
|
25
|
-
owner: Optional[str]
|
|
26
|
-
|
|
27
|
-
class Config:
|
|
28
|
-
allow_population_by_field_name = True
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class SensorPostBody(SensorFields):
|
|
32
|
-
|
|
33
|
-
class Config:
|
|
34
|
-
allow_population_by_field_name = True
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@allow_partial
|
|
38
|
-
class SensorPatchBody(SensorFields):
|
|
39
|
-
pass
|
hydroserverpy/schemas/things.py
DELETED
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field, root_validator
|
|
2
|
-
from typing import List, Optional
|
|
3
|
-
from uuid import UUID
|
|
4
|
-
from hydroserverpy.utils import allow_partial
|
|
5
|
-
from hydroserverpy.schemas.observed_properties import ObservedPropertyGetResponse
|
|
6
|
-
from hydroserverpy.schemas.processing_levels import ProcessingLevelGetResponse
|
|
7
|
-
from hydroserverpy.schemas.units import UnitGetResponse
|
|
8
|
-
from hydroserverpy.schemas.sensors import SensorGetResponse
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class ThingID(BaseModel):
|
|
12
|
-
id: UUID
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class ThingFields(BaseModel):
|
|
16
|
-
name: str
|
|
17
|
-
description: str
|
|
18
|
-
sampling_feature_type: str = Field(alias='samplingFeatureType')
|
|
19
|
-
sampling_feature_code: str = Field(alias='samplingFeatureCode')
|
|
20
|
-
site_type: str = Field(alias='siteType')
|
|
21
|
-
data_disclaimer: str = Field(None, alias='dataDisclaimer')
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class LocationFields(BaseModel):
|
|
25
|
-
latitude: float
|
|
26
|
-
longitude: float
|
|
27
|
-
elevation_m: float = None
|
|
28
|
-
elevation_datum: str = Field(None, alias='elevationDatum')
|
|
29
|
-
state: str = None
|
|
30
|
-
county: str = None
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
class OrganizationFields(BaseModel):
|
|
34
|
-
organization_name: Optional[str] = Field(None, alias='organizationName')
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
class AssociationFields(BaseModel):
|
|
38
|
-
is_primary_owner: bool = Field(..., alias='isPrimaryOwner')
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
class PersonFields(BaseModel):
|
|
42
|
-
first_name: str = Field(..., alias='firstName')
|
|
43
|
-
last_name: str = Field(..., alias='lastName')
|
|
44
|
-
email: str
|
|
45
|
-
|
|
46
|
-
class Config:
|
|
47
|
-
allow_population_by_field_name = True
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
class OwnerFields(AssociationFields, OrganizationFields, PersonFields):
|
|
51
|
-
pass
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
class ThingGetResponse(LocationFields, ThingFields, ThingID):
|
|
55
|
-
is_private: bool = Field(..., alias='isPrivate')
|
|
56
|
-
is_primary_owner: bool = Field(..., alias='isPrimaryOwner')
|
|
57
|
-
owns_thing: bool = Field(..., alias='ownsThing')
|
|
58
|
-
follows_thing: bool = Field(..., alias='followsThing')
|
|
59
|
-
owners: List[OwnerFields]
|
|
60
|
-
|
|
61
|
-
class Config:
|
|
62
|
-
allow_population_by_field_name = True
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
class ThingPostBody(ThingFields, LocationFields):
|
|
66
|
-
|
|
67
|
-
class Config:
|
|
68
|
-
allow_population_by_field_name = True
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
@allow_partial
|
|
72
|
-
class ThingPatchBody(ThingFields, LocationFields):
|
|
73
|
-
|
|
74
|
-
class Config:
|
|
75
|
-
allow_population_by_field_name = True
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
class ThingOwnershipPatchBody(BaseModel):
|
|
79
|
-
email: str
|
|
80
|
-
make_owner: Optional[bool] = Field(False, alias='makeOwner')
|
|
81
|
-
remove_owner: Optional[bool] = Field(False, alias='removeOwner')
|
|
82
|
-
transfer_primary: Optional[bool] = Field(False, alias='transferPrimary')
|
|
83
|
-
|
|
84
|
-
@root_validator()
|
|
85
|
-
def validate_only_one_method_allowed(cls, field_values):
|
|
86
|
-
|
|
87
|
-
assert [
|
|
88
|
-
field_values.get('make_owner', False),
|
|
89
|
-
field_values.get('remove_owner', False),
|
|
90
|
-
field_values.get('transfer_primary', False)
|
|
91
|
-
].count(True) == 1, \
|
|
92
|
-
'You must perform one and only one action from among "makeOwner", "removeOwner", and "transferPrimary".'
|
|
93
|
-
|
|
94
|
-
return field_values
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
class ThingPrivacyPatchBody(BaseModel):
|
|
98
|
-
is_private: bool = Field(..., alias="isPrivate")
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
class ThingMetadataGetResponse(BaseModel):
|
|
102
|
-
units: List[UnitGetResponse]
|
|
103
|
-
sensors: List[SensorGetResponse]
|
|
104
|
-
processing_levels: List[ProcessingLevelGetResponse] = Field(..., alias='processingLevels')
|
|
105
|
-
observed_properties: List[ObservedPropertyGetResponse] = Field(..., alias='observedProperties')
|
|
106
|
-
|
|
107
|
-
class Config:
|
|
108
|
-
allow_population_by_field_name = True
|
hydroserverpy/schemas/units.py
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel
|
|
2
|
-
from uuid import UUID
|
|
3
|
-
from typing import Optional
|
|
4
|
-
from hydroserverpy.utils import allow_partial
|
|
5
|
-
from hydroserverpy.schemas.users import UserFields
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class UnitID(BaseModel):
|
|
9
|
-
id: UUID
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class UnitFields(BaseModel):
|
|
13
|
-
name: str
|
|
14
|
-
symbol: str
|
|
15
|
-
definition: str
|
|
16
|
-
type: str
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class UnitGetResponse(UnitFields, UnitID):
|
|
20
|
-
owner: Optional[str]
|
|
21
|
-
|
|
22
|
-
class Config:
|
|
23
|
-
allow_population_by_field_name = True
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class UnitPostBody(UnitFields):
|
|
27
|
-
pass
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
@allow_partial
|
|
31
|
-
class UnitPatchBody(UnitFields):
|
|
32
|
-
pass
|
hydroserverpy/schemas/users.py
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
class OrganizationFields(BaseModel):
|
|
5
|
-
code: str
|
|
6
|
-
name: str
|
|
7
|
-
description: str = None
|
|
8
|
-
type: str
|
|
9
|
-
link: str = None
|
|
10
|
-
|
|
11
|
-
@classmethod
|
|
12
|
-
def is_empty(cls, obj):
|
|
13
|
-
return not (obj.name and obj.code and obj.type)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class UserFields(BaseModel):
|
|
17
|
-
first_name: str = Field(alias="firstName")
|
|
18
|
-
last_name: str = Field(alias="lastName")
|
|
19
|
-
email: str = None
|
|
20
|
-
middle_name: str = Field(default=None, alias="middleName")
|
|
21
|
-
phone: str = None
|
|
22
|
-
address: str = None
|
|
23
|
-
type: str = None
|
|
24
|
-
link: str = None
|
|
25
|
-
organization: OrganizationFields = None
|
|
26
|
-
|
|
27
|
-
class Config:
|
|
28
|
-
allow_population_by_field_name = True
|