hydroserverpy 0.3.0__py3-none-any.whl → 0.5.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hydroserverpy might be problematic. Click here for more details.

Files changed (83) hide show
  1. hydroserverpy/__init__.py +3 -4
  2. hydroserverpy/api/http.py +24 -0
  3. hydroserverpy/api/main.py +152 -0
  4. hydroserverpy/api/models/__init__.py +18 -0
  5. hydroserverpy/api/models/base.py +74 -0
  6. hydroserverpy/api/models/etl/__init__.py +0 -0
  7. hydroserverpy/api/models/iam/__init__.py +0 -0
  8. hydroserverpy/api/models/iam/account.py +12 -0
  9. hydroserverpy/api/models/iam/collaborator.py +34 -0
  10. hydroserverpy/api/models/iam/role.py +10 -0
  11. hydroserverpy/api/models/iam/workspace.py +203 -0
  12. hydroserverpy/api/models/sta/__init__.py +0 -0
  13. hydroserverpy/api/models/sta/datastream.py +336 -0
  14. hydroserverpy/api/models/sta/observed_property.py +72 -0
  15. hydroserverpy/api/models/sta/processing_level.py +50 -0
  16. hydroserverpy/api/models/sta/result_qualifier.py +49 -0
  17. hydroserverpy/api/models/sta/sensor.py +105 -0
  18. hydroserverpy/api/models/sta/thing.py +217 -0
  19. hydroserverpy/api/models/sta/unit.py +49 -0
  20. hydroserverpy/api/services/__init__.py +8 -0
  21. hydroserverpy/api/services/base.py +92 -0
  22. hydroserverpy/api/services/etl/__init__.py +0 -0
  23. hydroserverpy/api/services/iam/__init__.py +0 -0
  24. hydroserverpy/api/services/iam/workspace.py +126 -0
  25. hydroserverpy/api/services/sta/__init__.py +0 -0
  26. hydroserverpy/api/services/sta/datastream.py +354 -0
  27. hydroserverpy/api/services/sta/observed_property.py +98 -0
  28. hydroserverpy/api/services/sta/processing_level.py +78 -0
  29. hydroserverpy/api/services/sta/result_qualifier.py +74 -0
  30. hydroserverpy/api/services/sta/sensor.py +116 -0
  31. hydroserverpy/api/services/sta/thing.py +188 -0
  32. hydroserverpy/api/services/sta/unit.py +82 -0
  33. hydroserverpy/etl/__init__.py +21 -0
  34. hydroserverpy/etl/extractors/__init__.py +0 -0
  35. hydroserverpy/etl/extractors/base.py +13 -0
  36. hydroserverpy/etl/extractors/ftp_extractor.py +50 -0
  37. hydroserverpy/etl/extractors/http_extractor.py +84 -0
  38. hydroserverpy/etl/extractors/local_file_extractor.py +25 -0
  39. hydroserverpy/etl/hydroserver_etl.py +40 -0
  40. hydroserverpy/etl/loaders/__init__.py +0 -0
  41. hydroserverpy/etl/loaders/base.py +13 -0
  42. hydroserverpy/etl/loaders/hydroserver_loader.py +68 -0
  43. hydroserverpy/etl/transformers/__init__.py +0 -0
  44. hydroserverpy/etl/transformers/base.py +52 -0
  45. hydroserverpy/etl/transformers/csv_transformer.py +88 -0
  46. hydroserverpy/etl/transformers/json_transformer.py +62 -0
  47. hydroserverpy/etl/types.py +7 -0
  48. hydroserverpy/etl_csv/__init__.py +0 -0
  49. hydroserverpy/{etl/service.py → etl_csv/hydroserver_etl_csv.py} +93 -55
  50. hydroserverpy/quality/service.py +84 -70
  51. hydroserverpy-0.5.0b1.dist-info/METADATA +19 -0
  52. hydroserverpy-0.5.0b1.dist-info/RECORD +59 -0
  53. {hydroserverpy-0.3.0.dist-info → hydroserverpy-0.5.0b1.dist-info}/WHEEL +1 -1
  54. hydroserverpy/core/endpoints/__init__.py +0 -9
  55. hydroserverpy/core/endpoints/base.py +0 -133
  56. hydroserverpy/core/endpoints/data_loaders.py +0 -92
  57. hydroserverpy/core/endpoints/data_sources.py +0 -92
  58. hydroserverpy/core/endpoints/datastreams.py +0 -188
  59. hydroserverpy/core/endpoints/observed_properties.py +0 -93
  60. hydroserverpy/core/endpoints/processing_levels.py +0 -93
  61. hydroserverpy/core/endpoints/result_qualifiers.py +0 -93
  62. hydroserverpy/core/endpoints/sensors.py +0 -93
  63. hydroserverpy/core/endpoints/things.py +0 -240
  64. hydroserverpy/core/endpoints/units.py +0 -93
  65. hydroserverpy/core/schemas/__init__.py +0 -9
  66. hydroserverpy/core/schemas/base.py +0 -117
  67. hydroserverpy/core/schemas/data_loaders.py +0 -71
  68. hydroserverpy/core/schemas/data_sources.py +0 -206
  69. hydroserverpy/core/schemas/datastreams.py +0 -299
  70. hydroserverpy/core/schemas/observed_properties.py +0 -35
  71. hydroserverpy/core/schemas/processing_levels.py +0 -27
  72. hydroserverpy/core/schemas/result_qualifiers.py +0 -23
  73. hydroserverpy/core/schemas/sensors.py +0 -53
  74. hydroserverpy/core/schemas/things.py +0 -309
  75. hydroserverpy/core/schemas/units.py +0 -30
  76. hydroserverpy/core/service.py +0 -186
  77. hydroserverpy-0.3.0.dist-info/METADATA +0 -18
  78. hydroserverpy-0.3.0.dist-info/RECORD +0 -36
  79. /hydroserverpy/{core → api}/__init__.py +0 -0
  80. /hydroserverpy/{etl → etl_csv}/exceptions.py +0 -0
  81. {hydroserverpy-0.3.0.dist-info → hydroserverpy-0.5.0b1.dist-info/licenses}/LICENSE +0 -0
  82. {hydroserverpy-0.3.0.dist-info → hydroserverpy-0.5.0b1.dist-info}/top_level.txt +0 -0
  83. {hydroserverpy-0.3.0.dist-info → hydroserverpy-0.5.0b1.dist-info}/zip-safe +0 -0
@@ -1,206 +0,0 @@
1
- import tempfile
2
- import io
3
- from pydantic import BaseModel, Field
4
- from typing import Optional, Literal, Union, List, TYPE_CHECKING
5
- from datetime import datetime
6
- from uuid import UUID
7
- from urllib.request import urlopen
8
- from hydroserverpy.core.schemas.base import HydroServerCoreModel
9
- from hydroserverpy.etl.service import HydroServerETL
10
-
11
- if TYPE_CHECKING:
12
- from hydroserverpy.core.schemas.data_loaders import DataLoader
13
- from hydroserverpy.core.schemas.datastreams import Datastream
14
-
15
-
16
- class DataSourceFields(BaseModel):
17
- name: str = Field(
18
- ..., strip_whitespace=True, max_length=255,
19
- description='The name of the data source.'
20
- )
21
- path: Optional[str] = Field(
22
- None, strip_whitespace=True, max_length=255,
23
- description='The path to a local data source file.'
24
- )
25
- link: Optional[str] = Field(
26
- None, strip_whitespace=True, max_length=255,
27
- description='The link to a remote data source file.'
28
- )
29
- header_row: Optional[int] = Field(
30
- None, gt=0, lt=9999,
31
- description='The row number where the data begins.'
32
- )
33
- data_start_row: Optional[int] = Field(
34
- None, gt=0, lt=9999,
35
- description='The row number where the data begins.'
36
- )
37
- delimiter: Optional[str] = Field(
38
- ',', strip_whitespace=True, max_length=1,
39
- description='The delimiter used by the data source file.'
40
- )
41
- quote_char: Optional[str] = Field(
42
- '"', strip_whitespace=True, max_length=1,
43
- description='The quote delimiter character used by the data source file.'
44
- )
45
- interval: Optional[int] = Field(
46
- None, gt=0, lt=9999,
47
- description='The time interval at which the data source should be loaded.'
48
- )
49
- interval_units: Optional[Literal['minutes', 'hours', 'days', 'weeks', 'months']] = Field(
50
- None,
51
- description='The interval units used by the data source file.'
52
- )
53
- crontab: Optional[str] = Field(
54
- None, strip_whitespace=True, max_length=255,
55
- description='The crontab used to schedule when the data source should be loaded.'
56
- )
57
- start_time: Optional[datetime] = Field(
58
- None,
59
- description='When the data source should begin being loaded.'
60
- )
61
- end_time: Optional[datetime] = Field(
62
- None,
63
- description='When the data source should stop being loaded.'
64
- )
65
- paused: Optional[bool] = Field(
66
- False,
67
- description='Whether loading the data source should be paused or not.'
68
- )
69
- timestamp_column: Union[int, str] = Field(
70
- ..., strip_whitespace=True, max_length=255,
71
- description='The column of the data source file containing the timestamps.'
72
- )
73
- timestamp_format: Optional[str] = Field(
74
- '%Y-%m-%dT%H:%M:%S%Z', strip_whitespace=True, max_length=255,
75
- description='The format of the timestamps, using Python\'s datetime strftime codes.'
76
- )
77
- timestamp_offset: Optional[str] = Field(
78
- '+0000', strip_whitespace=True, max_length=255,
79
- description='An ISO 8601 time zone offset designator code to be applied to timestamps in the data source file.'
80
- )
81
- data_loader_id: UUID = Field(
82
- ...,
83
- description='The ID of the data loader responsible for loading this data source.'
84
- )
85
- data_source_thru: Optional[datetime] = Field(
86
- None,
87
- description='The timestamp through which the data source contains data.'
88
- )
89
- last_sync_successful: Optional[bool] = Field(
90
- None,
91
- description='Whether the last data loading attempt was successful of not.'
92
- )
93
- last_sync_message: Optional[str] = Field(
94
- None, strip_whitespace=True,
95
- description='A message generated by the data loader it attempted to load data from this data source.'
96
- )
97
- last_synced: Optional[datetime] = Field(
98
- None,
99
- description='The last time the data loader attempted to load data from this data source.'
100
- )
101
- next_sync: Optional[datetime] = Field(
102
- None,
103
- description="The next time the data loader will attempt to load data from this data source."
104
- )
105
-
106
-
107
- class DataSource(HydroServerCoreModel, DataSourceFields):
108
- """
109
- A model representing a data source, extending the core functionality of HydroServerCoreModel with additional
110
- properties and methods.
111
-
112
- :ivar _datastreams: A private attribute to cache the list of datastreams associated with the data source.
113
- :ivar _data_loader: A private attribute to cache the data loader associated with the data source.
114
- """
115
-
116
- def __init__(self, _endpoint, _uid: Optional[UUID] = None, **data):
117
- """
118
- Initialize a DataSource instance.
119
-
120
- :param _endpoint: The endpoint associated with the DataSource.
121
- :param _uid: The unique identifier for the DataSource.
122
- :type _uid: Optional[UUID]
123
- :param data: Additional attributes for the DataSource.
124
- """
125
-
126
- super().__init__(_endpoint=_endpoint, _uid=_uid, **data)
127
- self._datastreams = None
128
- self._data_loader = None
129
-
130
- @property
131
- def datastreams(self) -> List['Datastream']:
132
- """
133
- Retrieve the datastreams associated with the DataSource. If not already cached, fetch the datastreams from the
134
- server.
135
-
136
- :return: A list of datastreams associated with the data source.
137
- :rtype: List[Datastream]
138
- """
139
-
140
- if self._datastreams is None:
141
- self._datastreams = self._endpoint.list_datastreams(uid=self.uid)
142
-
143
- return self._datastreams
144
-
145
- @property
146
- def data_loader(self) -> 'DataLoader':
147
- """
148
- Retrieve the data loader associated with the data source. If not already cached, fetch the data loader from the
149
- server.
150
-
151
- :return: The data loader associated with the data source.
152
- :rtype: DataLoader
153
- """
154
-
155
- if self._data_loader is None:
156
- self._data_loader = self._endpoint._service.dataloaders.get(uid=self.data_loader_id) # noqa
157
-
158
- return self._data_loader
159
-
160
- def refresh(self) -> None:
161
- """
162
- Refresh the data source with the latest data from the server and update cached datastreams and data loader if
163
- they were previously loaded.
164
- """
165
-
166
- entity = self._endpoint.get(uid=self.uid).model_dump(exclude=['uid'])
167
- self._original_data = entity
168
- self.__dict__.update(entity)
169
- if self._datastreams is not None:
170
- self._datastreams = self._endpoint.list_datastreams(uid=self.uid)
171
- if self._data_loader is not None:
172
- self._data_loader = self._endpoint._service.dataloaders.get(uid=self.data_loader_id) # noqa
173
-
174
- def load_observations(self) -> None:
175
- """
176
- Load observations data from a local file or a remote URL into HydroServer using this data source configuration.
177
- """
178
-
179
- if self.path:
180
- with open(self.path, 'rb') as f:
181
- with io.TextIOWrapper(f, encoding='utf-8') as data_file:
182
- hs_etl = HydroServerETL(
183
- service=getattr(self._endpoint, '_service'),
184
- data_file=data_file,
185
- data_source=self,
186
- )
187
- hs_etl.run()
188
- elif self.link:
189
- with tempfile.NamedTemporaryFile(mode='w+b') as temp_file:
190
- with urlopen(self.link) as response:
191
- chunk_size = 1024 * 1024 * 10 # Use a 10mb chunk size.
192
- while True:
193
- chunk = response.read(chunk_size)
194
- if not chunk:
195
- break
196
- temp_file.write(chunk)
197
- temp_file.seek(0)
198
- with io.TextIOWrapper(temp_file, encoding='utf-8') as data_file:
199
- hs_etl = HydroServerETL(
200
- service=getattr(self._endpoint, '_service'),
201
- data_file=data_file,
202
- data_source=self,
203
- )
204
- hs_etl.run()
205
- else:
206
- return None
@@ -1,299 +0,0 @@
1
- from pydantic import BaseModel, Field
2
- from pandas import DataFrame
3
- from typing import Optional, Literal, TYPE_CHECKING
4
- from uuid import UUID
5
- from datetime import datetime
6
- from hydroserverpy.core.schemas.base import HydroServerCoreModel
7
-
8
- if TYPE_CHECKING:
9
- from hydroserverpy.core.schemas.things import Thing
10
- from hydroserverpy.core.schemas.data_sources import DataSource
11
- from hydroserverpy.core.schemas.sensors import Sensor
12
- from hydroserverpy.core.schemas.units import Unit
13
- from hydroserverpy.core.schemas.processing_levels import ProcessingLevel
14
- from hydroserverpy.core.schemas.observed_properties import ObservedProperty
15
-
16
-
17
- class DatastreamFields(BaseModel):
18
- name: str = Field(
19
- ..., strip_whitespace=True, max_length=255,
20
- description='The name of the datastream.'
21
- )
22
- description: str = Field(
23
- ..., strip_whitespace=True,
24
- description='A description of the datastream.'
25
- )
26
- observation_type: str = Field(
27
- ..., strip_whitespace=True, max_length=255,
28
- description='The type of observation recorded in this datastream'
29
- )
30
- sampled_medium: str = Field(
31
- ..., strip_whitespace=True, max_length=255,
32
- description='The physical medium in which the observations were sampled.'
33
- )
34
- no_data_value: float = Field(
35
- ...,
36
- description='A numerical value representing no data at a given timestamp.',
37
- )
38
- aggregation_statistic: str = Field(
39
- ..., strip_whitespace=True, max_length=255,
40
- description='The statistic calculated over the time aggregation interval of observations in this datastream.'
41
- )
42
- time_aggregation_interval: float = Field(
43
- ...,
44
- description='The time interval over which the aggregation statistic is applied to observations.',
45
- )
46
- status: Optional[str] = Field(
47
- None, strip_whitespace=True, max_length=255,
48
- description='The current status of this datastream.'
49
- )
50
- result_type: str = Field(
51
- ..., strip_whitespace=True, max_length=255,
52
- description='The type of result recorded in this datastream.'
53
- )
54
- value_count: Optional[int] = Field(
55
- None, ge=0,
56
- description='The total number of observations in this datastream.'
57
- )
58
- phenomenon_begin_time: Optional[datetime] = Field(
59
- None,
60
- description='The timestamp representing when the first phenomenon recorded in this datastream occurred.'
61
- )
62
- phenomenon_end_time: Optional[datetime] = Field(
63
- None,
64
- description='The timestamp representing when the last phenomenon recorded in this datastream occurred.'
65
- )
66
- result_begin_time: Optional[datetime] = Field(
67
- None,
68
- description='The timestamp representing when the first observation of this datastream was recorded.'
69
- )
70
- result_end_time: Optional[datetime] = Field(
71
- None,
72
- description='The timestamp representing when the last observation of this datastream was recorded.'
73
- )
74
- data_source_id: Optional[UUID] = Field(
75
- None,
76
- description='The data source for observations of this datastream.'
77
- )
78
- data_source_column: Optional[str] = Field(
79
- None, strip_whitespace=True, max_length=255,
80
- description='The name of the column containing this datastream\'s observations in the data source file.'
81
- )
82
- is_visible: bool = Field(
83
- True,
84
- description='Whether this datastream is publicly visible.'
85
- )
86
- is_data_visible: bool = Field(
87
- True,
88
- description='Whether this observations associated with this datastream are publicly visible.'
89
- )
90
- thing_id: UUID = Field(
91
- ...,
92
- description='The site/thing from which observations of this datastream were recorded.'
93
- )
94
- sensor_id: UUID = Field(
95
- ...,
96
- description='The sensor used to record observations of this datastream.'
97
- )
98
- observed_property_id: UUID = Field(
99
- ...,
100
- description='The physical property being observed for this datastream.'
101
- )
102
- processing_level_id: UUID = Field(
103
- ...,
104
- description='The processing level applied to this datastream.'
105
- )
106
- unit_id: UUID = Field(
107
- ...,
108
- description='The unit used to record observations for this datastream.'
109
- )
110
- time_aggregation_interval_units: Literal['seconds', 'minutes', 'hours', 'days'] = Field(
111
- ...,
112
- description='The time unit for this datastream\'s time aggregation interval'
113
- )
114
- intended_time_spacing: Optional[float] = Field(
115
- None,
116
- description='The time interval at which observations should be made for this datastream.'
117
- )
118
- intended_time_spacing_units: Optional[Literal['seconds', 'minutes', 'hours', 'days']] = Field(
119
- None,
120
- description='The time unit for this datastream\'s intended time spacing interval'
121
- )
122
-
123
-
124
- class Datastream(HydroServerCoreModel, DatastreamFields):
125
- """
126
- A model representing a datastream, extending the core functionality of HydroServerCoreModel with additional
127
- properties and methods.
128
-
129
- :ivar _thing: A private attribute to cache the associated thing entity.
130
- :ivar _data_source: A private attribute to cache the associated data source entity.
131
- :ivar _observed_property: A private attribute to cache the associated observed property entity.
132
- :ivar _processing_level: A private attribute to cache the associated processing level entity.
133
- :ivar _unit: A private attribute to cache the associated unit entity.
134
- :ivar _sensor: A private attribute to cache the associated sensor entity.
135
- """
136
-
137
- def __init__(self, _endpoint, _uid: Optional[UUID] = None, **data):
138
- """
139
- Initialize a Datastream instance.
140
-
141
- :param _endpoint: The endpoint associated with the Datastream.
142
- :param _uid: The unique identifier for the Datastream.
143
- :type _uid: Optional[UUID]
144
- :param data: Additional attributes for the Datastream.
145
- """
146
-
147
- super().__init__(_endpoint=_endpoint, _uid=_uid, **data)
148
- self._thing = None
149
- self._data_source = None
150
- self._observed_property = None
151
- self._processing_level = None
152
- self._unit = None
153
- self._sensor = None
154
-
155
- @property
156
- def thing(self) -> 'Thing':
157
- """
158
- The thing entity associated with the datastream. If not already cached, fetch it from the server.
159
-
160
- :return: The thing entity associated with the datastream.
161
- :rtype: Thing
162
- """
163
-
164
- if self._thing is None:
165
- self._thing = self._endpoint._service.things.get(uid=self.thing_id) # noqa
166
-
167
- return self._thing
168
-
169
- @property
170
- def data_source(self) -> 'DataSource':
171
- """
172
- The data source entity associated with the datastream. If not already cached, fetch it from the server.
173
-
174
- :return: The data source entity associated with the datastream.
175
- :rtype: DataSource
176
- """
177
-
178
- if self._data_source is None:
179
- self._data_source = self._endpoint._service.datasources.get(uid=self.data_source_id) # noqa
180
-
181
- return self._data_source
182
-
183
- @property
184
- def observed_property(self) -> 'ObservedProperty':
185
- """
186
- Retrieve the observed property entity associated with the datastream. If not already cached, fetch it from the
187
- server.
188
-
189
- :return: The observed property entity associated with the datastream.
190
- :rtype: ObservedProperty
191
- """
192
-
193
- if self._observed_property is None:
194
- self._observed_property = self._endpoint._service.observedproperties.get(uid=self.observed_property_id) # noqa
195
-
196
- return self._observed_property
197
-
198
- @property
199
- def processing_level(self) -> 'ProcessingLevel':
200
- """
201
- Retrieve the processing level entity associated with the datastream. If not already cached, fetch it from the
202
- server.
203
-
204
- :return: The processing level entity associated with the datastream.
205
- :rtype: ProcessingLevel
206
- """
207
-
208
- if self._processing_level is None:
209
- self._processing_level = self._endpoint._service.processinglevels.get(uid=self.processing_level_id) # noqa
210
-
211
- return self._processing_level
212
-
213
- @property
214
- def unit(self) -> 'Unit':
215
- """
216
- Retrieve the unit entity associated with the datastream. If not already cached, fetch it from the server.
217
-
218
- :return: The unit entity associated with the datastream.
219
- :rtype: Unit
220
- """
221
-
222
- if self._unit is None:
223
- self._unit = self._endpoint._service.units.get(uid=self.unit_id) # noqa
224
-
225
- return self._unit
226
-
227
- @property
228
- def sensor(self) -> 'Sensor':
229
- """
230
- Retrieve the sensor entity associated with the datastream. If not already cached, fetch it from the server.
231
-
232
- :return: The sensor entity associated with the datastream.
233
- :rtype: Any
234
- """
235
-
236
- if self._sensor is None:
237
- self._sensor = self._endpoint._service.sensors.get(uid=self.sensor_id) # noqa
238
-
239
- return self._sensor
240
-
241
- def refresh(self) -> None:
242
- """
243
- Refresh the datastream with the latest data from the server and update cached entities if they were previously
244
- loaded.
245
- """
246
-
247
- entity = self._endpoint.get(uid=self.uid).model_dump(exclude=['uid'])
248
- self._original_data = entity
249
- self.__dict__.update(entity)
250
- if self._thing is not None:
251
- self._thing = self._endpoint._service.things.get(uid=self.thing_id) # noqa
252
- if self._data_source is not None:
253
- self._data_source = self._endpoint._service.datasources.get(uid=self.data_source_id) # noqa
254
- if self._observed_property is not None:
255
- self._observed_property = self._endpoint._service.observedproperties.get(uid=self.observed_property_id) # noqa
256
- if self._processing_level is not None:
257
- self._processing_level = self._endpoint._service.processinglevels.get(uid=self.processing_level_id) # noqa
258
- if self._unit is not None:
259
- self._unit = self._endpoint._service.units.get(uid=self.unit_id) # noqa
260
- if self._sensor is not None:
261
- self._sensor = self._endpoint._service.sensors.get(uid=self.sensor_id) # noqa
262
-
263
- def get_observations(
264
- self,
265
- start_time: datetime = None,
266
- end_time: datetime = None,
267
- page: int = 1,
268
- page_size: int = 100000,
269
- include_quality: bool = False,
270
- fetch_all: bool = False
271
- ) -> DataFrame:
272
- """
273
- Retrieve the observations for this datastream.
274
-
275
- :return: A DataFrame containing the observations associated with the datastream.
276
- :rtype: DataFrame
277
- """
278
-
279
- return self._endpoint.get_observations(
280
- uid=self.uid, start_time=start_time, end_time=end_time, page=page, page_size=page_size,
281
- include_quality=include_quality, fetch_all=fetch_all
282
- )
283
-
284
- def load_observations(
285
- self,
286
- observations: DataFrame,
287
- ) -> None:
288
- """
289
- Load a DataFrame of observations to the datastream.
290
-
291
- :param observations: A pandas DataFrame containing the observations to be uploaded.
292
- :type observations: DataFrame
293
- :return: None
294
- """
295
-
296
- return self._endpoint.load_observations(
297
- uid=self.uid,
298
- observations=observations,
299
- )
@@ -1,35 +0,0 @@
1
- from pydantic import BaseModel, Field
2
- from typing import Optional
3
- from hydroserverpy.core.schemas.base import HydroServerCoreModel
4
-
5
-
6
- class ObservedPropertyFields(BaseModel):
7
- name: str = Field(
8
- ..., strip_whitespace=True, max_length=255,
9
- description='The name of the observed property.'
10
- )
11
- definition: str = Field(
12
- ..., strip_whitespace=True,
13
- description='The definition of the observed property.'
14
- )
15
- description: Optional[str] = Field(
16
- None, strip_whitespace=True,
17
- description='A description of the observed property.'
18
- )
19
- type: Optional[str] = Field(
20
- None, strip_whitespace=True, max_length=255,
21
- description='The type of the observed property.'
22
- )
23
- code: Optional[str] = Field(
24
- None, strip_whitespace=True, max_length=255,
25
- description='A code representing the observed property.'
26
- )
27
-
28
-
29
- class ObservedProperty(HydroServerCoreModel, ObservedPropertyFields):
30
- """
31
- A model representing an observed property, extending the core functionality of HydroServerCoreModel with additional
32
- fields defined in ObservedPropertyFields.
33
- """
34
-
35
- pass
@@ -1,27 +0,0 @@
1
- from pydantic import BaseModel, Field
2
- from typing import Optional
3
- from hydroserverpy.core.schemas.base import HydroServerCoreModel
4
-
5
-
6
- class ProcessingLevelFields(BaseModel):
7
- code: str = Field(
8
- ..., strip_whitespace=True, max_length=255,
9
- description='A code representing the processing level.'
10
- )
11
- definition: Optional[str] = Field(
12
- None, strip_whitespace=True,
13
- description='The definition of the processing level.'
14
- )
15
- explanation: Optional[str] = Field(
16
- None, strip_whitespace=True,
17
- description='The explanation of the processing level.'
18
- )
19
-
20
-
21
- class ProcessingLevel(HydroServerCoreModel, ProcessingLevelFields):
22
- """
23
- A model representing a processing level, extending the core functionality of HydroServerCoreModel with additional
24
- fields defined in ProcessingLevelFields.
25
- """
26
-
27
- pass
@@ -1,23 +0,0 @@
1
- from pydantic import BaseModel, Field
2
- from typing import Optional
3
- from hydroserverpy.core.schemas.base import HydroServerCoreModel
4
-
5
-
6
- class ResultQualifierFields(BaseModel):
7
- code: str = Field(
8
- ..., strip_whitespace=True, max_length=255,
9
- description='A code representing the result qualifier.'
10
- )
11
- description: Optional[str] = Field(
12
- None, strip_whitespace=True,
13
- description='A description of the result qualifier.'
14
- )
15
-
16
-
17
- class ResultQualifier(HydroServerCoreModel, ResultQualifierFields):
18
- """
19
- A model representing an result qualifier, extending the core functionality of HydroServerCoreModel with additional
20
- fields defined in ResultQualifierFields.
21
- """
22
-
23
- pass
@@ -1,53 +0,0 @@
1
- from pydantic import BaseModel, Field, ConfigDict
2
- from typing import Optional
3
- from hydroserverpy.core.schemas.base import HydroServerCoreModel
4
-
5
-
6
- class SensorFields(BaseModel):
7
- name: str = Field(
8
- ..., strip_whitespace=True, max_length=255,
9
- description='The name of the sensor.'
10
- )
11
- description: str = Field(
12
- strip_whitespace=True,
13
- description='A description of the sensor.'
14
- )
15
- encoding_type: str = Field(
16
- ..., strip_whitespace=True, max_length=255,
17
- description='The encoding type of the sensor.'
18
- )
19
- manufacturer: Optional[str] = Field(
20
- None, strip_whitespace=True, max_length=255,
21
- description='The manufacturer of the sensor.'
22
- )
23
- model: Optional[str] = Field(
24
- None, strip_whitespace=True, max_length=255,
25
- description='The model of the sensor.'
26
- )
27
- model_link: Optional[str] = Field(
28
- None, strip_whitespace=True, max_length=500,
29
- description='A link to a website or file that describes the sensor model.'
30
- )
31
- method_type: str = Field(
32
- ..., strip_whitespace=True, max_length=100,
33
- description='The type of method used by this sensor to collect observations.'
34
- )
35
- method_link: Optional[str] = Field(
36
- None, strip_whitespace=True, max_length=500,
37
- description='A link to a website or file that describes the sensor method.'
38
- )
39
- method_code: Optional[str] = Field(
40
- None, strip_whitespace=True, max_length=50,
41
- description='A code representing the sensor method.'
42
- )
43
-
44
- model_config = ConfigDict(protected_namespaces=())
45
-
46
-
47
- class Sensor(HydroServerCoreModel, SensorFields):
48
- """
49
- A model representing a sensor, extending the core functionality of HydroServerCoreModel with additional
50
- fields defined in SensorFields.
51
- """
52
-
53
- pass