earthscope-sdk 1.1.0__py3-none-any.whl → 1.2.0b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- earthscope_sdk/__init__.py +1 -1
- earthscope_sdk/client/_client.py +47 -0
- earthscope_sdk/client/data_access/__init__.py +0 -0
- earthscope_sdk/client/data_access/_arrow/__init__.py +0 -0
- earthscope_sdk/client/data_access/_arrow/_common.py +94 -0
- earthscope_sdk/client/data_access/_arrow/_gnss.py +116 -0
- earthscope_sdk/client/data_access/_base.py +85 -0
- earthscope_sdk/client/data_access/_query_plan/__init__.py +0 -0
- earthscope_sdk/client/data_access/_query_plan/_gnss_observations.py +295 -0
- earthscope_sdk/client/data_access/_query_plan/_query_plan.py +259 -0
- earthscope_sdk/client/data_access/_query_plan/_request_set.py +133 -0
- earthscope_sdk/client/data_access/_service.py +114 -0
- earthscope_sdk/client/discovery/__init__.py +0 -0
- earthscope_sdk/client/discovery/_base.py +303 -0
- earthscope_sdk/client/discovery/_service.py +209 -0
- earthscope_sdk/client/discovery/models.py +144 -0
- earthscope_sdk/common/context.py +71 -1
- earthscope_sdk/common/service.py +10 -8
- earthscope_sdk/config/models.py +14 -1
- earthscope_sdk/util/__init__.py +0 -0
- earthscope_sdk/util/_concurrency.py +64 -0
- earthscope_sdk/util/_itertools.py +57 -0
- earthscope_sdk/util/_time.py +57 -0
- earthscope_sdk/util/_types.py +5 -0
- {earthscope_sdk-1.1.0.dist-info → earthscope_sdk-1.2.0b0.dist-info}/METADATA +11 -1
- earthscope_sdk-1.2.0b0.dist-info/RECORD +49 -0
- earthscope_sdk-1.1.0.dist-info/RECORD +0 -30
- {earthscope_sdk-1.1.0.dist-info → earthscope_sdk-1.2.0b0.dist-info}/WHEEL +0 -0
- {earthscope_sdk-1.1.0.dist-info → earthscope_sdk-1.2.0b0.dist-info}/licenses/LICENSE +0 -0
- {earthscope_sdk-1.1.0.dist-info → earthscope_sdk-1.2.0b0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,303 @@
|
|
1
|
+
import datetime as dt
|
2
|
+
from typing import Optional, Union
|
3
|
+
|
4
|
+
from earthscope_sdk.client.discovery.models import (
|
5
|
+
ListNetworkDatasourcesResult,
|
6
|
+
ListSessionDatasourcesResult,
|
7
|
+
ListStationDatasourcesResult,
|
8
|
+
ListStreamDatasourcesResult,
|
9
|
+
NetworkDatasource,
|
10
|
+
Page,
|
11
|
+
SessionDatasource,
|
12
|
+
StationDatasource,
|
13
|
+
StreamDatasource,
|
14
|
+
StreamType,
|
15
|
+
)
|
16
|
+
from earthscope_sdk.common.service import SdkService
|
17
|
+
from earthscope_sdk.util._itertools import to_list
|
18
|
+
from earthscope_sdk.util._types import ListOrItem
|
19
|
+
|
20
|
+
|
21
|
+
class DiscoveryBaseService(SdkService):
|
22
|
+
"""
|
23
|
+
Discovery service functionality
|
24
|
+
"""
|
25
|
+
|
26
|
+
async def _list_network_datasources(
|
27
|
+
self,
|
28
|
+
*,
|
29
|
+
network_name: Optional[ListOrItem[str]] = None,
|
30
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
31
|
+
with_edid_only=False,
|
32
|
+
limit=50,
|
33
|
+
offset=0,
|
34
|
+
) -> Union[Page[NetworkDatasource], Page[str]]:
|
35
|
+
"""
|
36
|
+
List network datasources
|
37
|
+
|
38
|
+
Args:
|
39
|
+
network_name: The name(s) of the network to list datasources for.
|
40
|
+
network_edid: The EDID(s) of the network to list datasources for.
|
41
|
+
with_edid_only: Whether to return only the EDIDs of the datasources.
|
42
|
+
limit: The maximum number of datasources to return.
|
43
|
+
offset: The offset to start the list from.
|
44
|
+
|
45
|
+
Returns:
|
46
|
+
A list of network datasources.
|
47
|
+
"""
|
48
|
+
params = {
|
49
|
+
"with_edid_only": with_edid_only,
|
50
|
+
"limit": limit,
|
51
|
+
"offset": offset,
|
52
|
+
}
|
53
|
+
|
54
|
+
if network_name:
|
55
|
+
params["network_name"] = to_list(network_name)
|
56
|
+
|
57
|
+
if network_edid:
|
58
|
+
params["network_edid"] = to_list(network_edid)
|
59
|
+
|
60
|
+
req = self.ctx.httpx_client.build_request(
|
61
|
+
method="GET",
|
62
|
+
url=f"{self.resources.api_url}beta/discover/datasource/network",
|
63
|
+
params=params,
|
64
|
+
)
|
65
|
+
|
66
|
+
resp = await self._send_with_retries(req)
|
67
|
+
|
68
|
+
return ListNetworkDatasourcesResult.validate_json(resp.content)
|
69
|
+
|
70
|
+
async def _list_station_datasources(
|
71
|
+
self,
|
72
|
+
*,
|
73
|
+
network_name: Optional[ListOrItem[str]] = None,
|
74
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
75
|
+
station_name: Optional[ListOrItem[str]] = None,
|
76
|
+
station_edid: Optional[ListOrItem[str]] = None,
|
77
|
+
with_edid_only=False,
|
78
|
+
with_parent_edids=False,
|
79
|
+
limit=50,
|
80
|
+
offset=0,
|
81
|
+
) -> Union[Page[StationDatasource], Page[str]]:
|
82
|
+
"""
|
83
|
+
List station datasources
|
84
|
+
|
85
|
+
Args:
|
86
|
+
network_name: The name(s) of the network to list datasources for.
|
87
|
+
network_edid: The EDID(s) of the network to list datasources for.
|
88
|
+
station_name: The name(s) of the station to list datasources for.
|
89
|
+
station_edid: The EDID(s) of the station to list datasources for.
|
90
|
+
with_edid_only: Whether to return only the EDIDs of the datasources.
|
91
|
+
with_parent_edids: Whether to return the parent EDIDs of the datasources.
|
92
|
+
limit: The maximum number of datasources to return.
|
93
|
+
offset: The offset to start the list from.
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
A list of station datasources.
|
97
|
+
"""
|
98
|
+
params = {
|
99
|
+
"with_edid_only": with_edid_only,
|
100
|
+
"with_parent_edids": with_parent_edids,
|
101
|
+
"limit": limit,
|
102
|
+
"offset": offset,
|
103
|
+
}
|
104
|
+
|
105
|
+
if network_name:
|
106
|
+
params["network_name"] = to_list(network_name)
|
107
|
+
|
108
|
+
if network_edid:
|
109
|
+
params["network_edid"] = to_list(network_edid)
|
110
|
+
|
111
|
+
if station_name:
|
112
|
+
params["station_name"] = to_list(station_name)
|
113
|
+
|
114
|
+
if station_edid:
|
115
|
+
params["station_edid"] = to_list(station_edid)
|
116
|
+
|
117
|
+
req = self.ctx.httpx_client.build_request(
|
118
|
+
method="GET",
|
119
|
+
url=f"{self.resources.api_url}beta/discover/datasource/station",
|
120
|
+
params=params,
|
121
|
+
)
|
122
|
+
|
123
|
+
resp = await self._send_with_retries(req)
|
124
|
+
|
125
|
+
return ListStationDatasourcesResult.validate_json(resp.content)
|
126
|
+
|
127
|
+
async def _list_session_datasources(
|
128
|
+
self,
|
129
|
+
*,
|
130
|
+
network_name: Optional[ListOrItem[str]] = None,
|
131
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
132
|
+
station_name: Optional[ListOrItem[str]] = None,
|
133
|
+
station_edid: Optional[ListOrItem[str]] = None,
|
134
|
+
session_name: Optional[ListOrItem[str]] = None,
|
135
|
+
session_edid: Optional[ListOrItem[str]] = None,
|
136
|
+
sample_interval: Optional[dt.timedelta] = None,
|
137
|
+
roll: Optional[dt.timedelta] = None,
|
138
|
+
with_edid_only=False,
|
139
|
+
with_parents=False,
|
140
|
+
with_parent_edids=False,
|
141
|
+
limit=50,
|
142
|
+
offset=0,
|
143
|
+
) -> Union[Page[SessionDatasource], Page[str]]:
|
144
|
+
"""
|
145
|
+
List session datasources
|
146
|
+
|
147
|
+
Args:
|
148
|
+
network_name: The name(s) of the network to list datasources for.
|
149
|
+
network_edid: The EDID(s) of the network to list datasources for.
|
150
|
+
station_name: The name(s) of the station to list datasources for.
|
151
|
+
station_edid: The EDID(s) of the station to list datasources for.
|
152
|
+
session_name: The name(s) of the session to list datasources for.
|
153
|
+
session_edid: The EDID(s) of the session to list datasources for.
|
154
|
+
sample_interval: The sample interval to list datasources for.
|
155
|
+
roll: The roll to list datasources for.
|
156
|
+
with_edid_only: Whether to return only the EDIDs of the datasources.
|
157
|
+
with_parents: Whether to return the parent datasources.
|
158
|
+
with_parent_edids: Whether to return the parent EDIDs of the datasources.
|
159
|
+
limit: The maximum number of datasources to return.
|
160
|
+
offset: The offset to start the list from.
|
161
|
+
|
162
|
+
Returns:
|
163
|
+
A list of session datasources.
|
164
|
+
"""
|
165
|
+
params = {
|
166
|
+
"with_edid_only": with_edid_only,
|
167
|
+
"with_parents": with_parents,
|
168
|
+
"with_parent_edids": with_parent_edids,
|
169
|
+
"limit": limit,
|
170
|
+
"offset": offset,
|
171
|
+
}
|
172
|
+
|
173
|
+
if network_name:
|
174
|
+
params["network_name"] = to_list(network_name)
|
175
|
+
|
176
|
+
if network_edid:
|
177
|
+
params["network_edid"] = to_list(network_edid)
|
178
|
+
|
179
|
+
if station_name:
|
180
|
+
params["station_name"] = to_list(station_name)
|
181
|
+
|
182
|
+
if station_edid:
|
183
|
+
params["station_edid"] = to_list(station_edid)
|
184
|
+
|
185
|
+
if session_name:
|
186
|
+
params["session_name"] = to_list(session_name)
|
187
|
+
|
188
|
+
if session_edid:
|
189
|
+
params["session_edid"] = to_list(session_edid)
|
190
|
+
|
191
|
+
if sample_interval:
|
192
|
+
# Convert to milliseconds
|
193
|
+
params["sample_interval"] = 1000 * sample_interval.total_seconds()
|
194
|
+
|
195
|
+
if roll:
|
196
|
+
# Convert to seconds
|
197
|
+
params["roll"] = roll.total_seconds()
|
198
|
+
|
199
|
+
req = self.ctx.httpx_client.build_request(
|
200
|
+
method="GET",
|
201
|
+
url=f"{self.resources.api_url}beta/discover/datasource/session",
|
202
|
+
params=params,
|
203
|
+
)
|
204
|
+
|
205
|
+
resp = await self._send_with_retries(req)
|
206
|
+
|
207
|
+
return ListSessionDatasourcesResult.validate_json(resp.content)
|
208
|
+
|
209
|
+
async def _list_stream_datasources(
|
210
|
+
self,
|
211
|
+
*,
|
212
|
+
network_name: Optional[ListOrItem[str]] = None,
|
213
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
214
|
+
station_name: Optional[ListOrItem[str]] = None,
|
215
|
+
station_edid: Optional[ListOrItem[str]] = None,
|
216
|
+
stream_name: Optional[ListOrItem[str]] = None,
|
217
|
+
stream_edid: Optional[ListOrItem[str]] = None,
|
218
|
+
stream_type: Optional[StreamType] = None,
|
219
|
+
facility: Optional[str] = None,
|
220
|
+
software: Optional[str] = None,
|
221
|
+
label: Optional[str] = None,
|
222
|
+
sample_interval: Optional[dt.timedelta] = None,
|
223
|
+
with_edid_only=False,
|
224
|
+
with_parents=False,
|
225
|
+
with_parent_edids=False,
|
226
|
+
limit=50,
|
227
|
+
offset=0,
|
228
|
+
) -> Union[Page[StreamDatasource], Page[str]]:
|
229
|
+
"""
|
230
|
+
List stream datasources
|
231
|
+
|
232
|
+
Args:
|
233
|
+
network_name: The name(s) of the network to list datasources for.
|
234
|
+
network_edid: The EDID(s) of the network to list datasources for.
|
235
|
+
station_name: The name(s) of the station to list datasources for.
|
236
|
+
station_edid: The EDID(s) of the station to list datasources for.
|
237
|
+
stream_name: The name(s) of the stream to list datasources for.
|
238
|
+
stream_edid: The EDID(s) of the stream to list datasources for.
|
239
|
+
stream_type: The type of stream to list datasources for.
|
240
|
+
facility: The facility to list datasources for.
|
241
|
+
software: The software to list datasources for.
|
242
|
+
label: The label to list datasources for.
|
243
|
+
sample_interval: The sample interval to list datasources for.
|
244
|
+
with_edid_only: Whether to return only the EDIDs of the datasources.
|
245
|
+
with_parents: Whether to return the parent datasources.
|
246
|
+
with_parent_edids: Whether to return the parent EDIDs of the datasources.
|
247
|
+
limit: The maximum number of datasources to return.
|
248
|
+
offset: The offset to start the list from.
|
249
|
+
|
250
|
+
Returns:
|
251
|
+
A list of stream datasources.
|
252
|
+
"""
|
253
|
+
params = {
|
254
|
+
"with_edid_only": with_edid_only,
|
255
|
+
"with_parents": with_parents,
|
256
|
+
"with_parent_edids": with_parent_edids,
|
257
|
+
"limit": limit,
|
258
|
+
"offset": offset,
|
259
|
+
}
|
260
|
+
|
261
|
+
if network_name:
|
262
|
+
params["network_name"] = to_list(network_name)
|
263
|
+
|
264
|
+
if network_edid:
|
265
|
+
params["network_edid"] = to_list(network_edid)
|
266
|
+
|
267
|
+
if station_name:
|
268
|
+
params["station_name"] = to_list(station_name)
|
269
|
+
|
270
|
+
if station_edid:
|
271
|
+
params["station_edid"] = to_list(station_edid)
|
272
|
+
|
273
|
+
if stream_name:
|
274
|
+
params["stream_name"] = to_list(stream_name)
|
275
|
+
|
276
|
+
if stream_edid:
|
277
|
+
params["stream_edid"] = to_list(stream_edid)
|
278
|
+
|
279
|
+
if stream_type is not None:
|
280
|
+
params["stream_type"] = stream_type.value
|
281
|
+
|
282
|
+
if facility is not None:
|
283
|
+
params["facility"] = facility
|
284
|
+
|
285
|
+
if software is not None:
|
286
|
+
params["software"] = software
|
287
|
+
|
288
|
+
if label is not None:
|
289
|
+
params["label"] = label
|
290
|
+
|
291
|
+
if sample_interval is not None:
|
292
|
+
# Convert to milliseconds
|
293
|
+
params["sample_interval"] = 1000 * sample_interval.total_seconds()
|
294
|
+
|
295
|
+
req = self.ctx.httpx_client.build_request(
|
296
|
+
method="GET",
|
297
|
+
url=f"{self.resources.api_url}beta/discover/datasource/stream",
|
298
|
+
params=params,
|
299
|
+
)
|
300
|
+
|
301
|
+
resp = await self._send_with_retries(req)
|
302
|
+
|
303
|
+
return ListStreamDatasourcesResult.validate_json(resp.content)
|
@@ -0,0 +1,209 @@
|
|
1
|
+
import datetime as dt
|
2
|
+
from functools import partial
|
3
|
+
from typing import Any, AsyncIterator, Coroutine, Optional, TypeVar, Union
|
4
|
+
|
5
|
+
from earthscope_sdk.client.discovery._base import DiscoveryBaseService
|
6
|
+
from earthscope_sdk.client.discovery.models import (
|
7
|
+
NetworkDatasource,
|
8
|
+
Page,
|
9
|
+
SessionDatasource,
|
10
|
+
StationDatasource,
|
11
|
+
StreamDatasource,
|
12
|
+
StreamType,
|
13
|
+
)
|
14
|
+
from earthscope_sdk.common.context import SdkContext
|
15
|
+
from earthscope_sdk.util._types import ListOrItem
|
16
|
+
|
17
|
+
P = TypeVar("P")
|
18
|
+
|
19
|
+
|
20
|
+
class _DiscoveryService(DiscoveryBaseService):
|
21
|
+
"""
|
22
|
+
L2 discovery service functionality
|
23
|
+
"""
|
24
|
+
|
25
|
+
async def _iter_pages(
|
26
|
+
self,
|
27
|
+
fn: Coroutine[Any, Any, Page[P]],
|
28
|
+
limit: int,
|
29
|
+
) -> AsyncIterator[Page[P]]:
|
30
|
+
"""
|
31
|
+
Iterate over pages of results from a function.
|
32
|
+
|
33
|
+
Args:
|
34
|
+
fn: The function to call to get a page of results.
|
35
|
+
limit: The maximum number of results to return.
|
36
|
+
|
37
|
+
Returns:
|
38
|
+
An iterator over the pages of results.
|
39
|
+
"""
|
40
|
+
offset = 0
|
41
|
+
page_size = 100
|
42
|
+
has_next = True
|
43
|
+
result_ct = 0
|
44
|
+
|
45
|
+
while has_next and result_ct < limit:
|
46
|
+
page_size = min(page_size, limit - result_ct)
|
47
|
+
page: Page[P] = await fn(offset=offset, limit=page_size)
|
48
|
+
yield page
|
49
|
+
has_next = page.has_next
|
50
|
+
offset += page_size
|
51
|
+
result_ct += len(page.items)
|
52
|
+
|
53
|
+
async def _load_all_pages(
|
54
|
+
self,
|
55
|
+
fn: Coroutine[Any, Any, Page[P]],
|
56
|
+
limit: int,
|
57
|
+
) -> list[P]:
|
58
|
+
"""
|
59
|
+
Load all pages of results from a function.
|
60
|
+
"""
|
61
|
+
results: list[P] = []
|
62
|
+
async for p in self._iter_pages(fn=fn, limit=limit):
|
63
|
+
results.extend(p.items)
|
64
|
+
|
65
|
+
return results
|
66
|
+
|
67
|
+
async def _list_network_datasources(
|
68
|
+
self,
|
69
|
+
*,
|
70
|
+
network_name: Optional[ListOrItem[str]] = None,
|
71
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
72
|
+
with_edid_only=False,
|
73
|
+
limit=1000,
|
74
|
+
) -> Union[list[NetworkDatasource], list[str]]:
|
75
|
+
return await self._load_all_pages(
|
76
|
+
fn=partial(
|
77
|
+
super()._list_network_datasources,
|
78
|
+
network_name=network_name,
|
79
|
+
network_edid=network_edid,
|
80
|
+
with_edid_only=with_edid_only,
|
81
|
+
),
|
82
|
+
limit=limit,
|
83
|
+
)
|
84
|
+
|
85
|
+
async def _list_station_datasources(
|
86
|
+
self,
|
87
|
+
*,
|
88
|
+
network_name: Optional[ListOrItem[str]] = None,
|
89
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
90
|
+
station_name: Optional[ListOrItem[str]] = None,
|
91
|
+
station_edid: Optional[ListOrItem[str]] = None,
|
92
|
+
with_edid_only=False,
|
93
|
+
with_parent_edids=False,
|
94
|
+
limit=1000,
|
95
|
+
) -> Union[list[StationDatasource], list[str]]:
|
96
|
+
return await self._load_all_pages(
|
97
|
+
fn=partial(
|
98
|
+
super()._list_station_datasources,
|
99
|
+
network_name=network_name,
|
100
|
+
network_edid=network_edid,
|
101
|
+
station_name=station_name,
|
102
|
+
station_edid=station_edid,
|
103
|
+
with_edid_only=with_edid_only,
|
104
|
+
with_parent_edids=with_parent_edids,
|
105
|
+
),
|
106
|
+
limit=limit,
|
107
|
+
)
|
108
|
+
|
109
|
+
async def _list_session_datasources(
|
110
|
+
self,
|
111
|
+
*,
|
112
|
+
network_name: Optional[ListOrItem[str]] = None,
|
113
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
114
|
+
station_name: Optional[ListOrItem[str]] = None,
|
115
|
+
station_edid: Optional[ListOrItem[str]] = None,
|
116
|
+
session_name: Optional[ListOrItem[str]] = None,
|
117
|
+
session_edid: Optional[ListOrItem[str]] = None,
|
118
|
+
sample_interval: Optional[dt.timedelta] = None,
|
119
|
+
roll: Optional[dt.timedelta] = None,
|
120
|
+
with_edid_only=False,
|
121
|
+
with_parents=False,
|
122
|
+
with_parent_edids=False,
|
123
|
+
limit=1000,
|
124
|
+
) -> Union[list[SessionDatasource], list[str]]:
|
125
|
+
return await self._load_all_pages(
|
126
|
+
fn=partial(
|
127
|
+
super()._list_session_datasources,
|
128
|
+
network_name=network_name,
|
129
|
+
network_edid=network_edid,
|
130
|
+
station_name=station_name,
|
131
|
+
station_edid=station_edid,
|
132
|
+
session_name=session_name,
|
133
|
+
session_edid=session_edid,
|
134
|
+
sample_interval=sample_interval,
|
135
|
+
roll=roll,
|
136
|
+
with_edid_only=with_edid_only,
|
137
|
+
with_parents=with_parents,
|
138
|
+
with_parent_edids=with_parent_edids,
|
139
|
+
),
|
140
|
+
limit=limit,
|
141
|
+
)
|
142
|
+
|
143
|
+
async def _list_stream_datasources(
|
144
|
+
self,
|
145
|
+
*,
|
146
|
+
network_name: Optional[ListOrItem[str]] = None,
|
147
|
+
network_edid: Optional[ListOrItem[str]] = None,
|
148
|
+
station_name: Optional[ListOrItem[str]] = None,
|
149
|
+
station_edid: Optional[ListOrItem[str]] = None,
|
150
|
+
stream_name: Optional[ListOrItem[str]] = None,
|
151
|
+
stream_edid: Optional[ListOrItem[str]] = None,
|
152
|
+
stream_type: Optional[StreamType] = None,
|
153
|
+
facility: Optional[str] = None,
|
154
|
+
software: Optional[str] = None,
|
155
|
+
label: Optional[str] = None,
|
156
|
+
sample_interval: Optional[dt.timedelta] = None,
|
157
|
+
with_edid_only=False,
|
158
|
+
with_parents=False,
|
159
|
+
with_parent_edids=False,
|
160
|
+
limit=1000,
|
161
|
+
) -> Union[list[StreamDatasource], list[str]]:
|
162
|
+
return await self._load_all_pages(
|
163
|
+
fn=partial(
|
164
|
+
super()._list_stream_datasources,
|
165
|
+
network_name=network_name,
|
166
|
+
network_edid=network_edid,
|
167
|
+
station_name=station_name,
|
168
|
+
station_edid=station_edid,
|
169
|
+
stream_name=stream_name,
|
170
|
+
stream_edid=stream_edid,
|
171
|
+
stream_type=stream_type,
|
172
|
+
facility=facility,
|
173
|
+
software=software,
|
174
|
+
label=label,
|
175
|
+
sample_interval=sample_interval,
|
176
|
+
with_edid_only=with_edid_only,
|
177
|
+
with_parents=with_parents,
|
178
|
+
with_parent_edids=with_parent_edids,
|
179
|
+
),
|
180
|
+
limit=limit,
|
181
|
+
)
|
182
|
+
|
183
|
+
|
184
|
+
class AsyncDiscoveryService(_DiscoveryService):
|
185
|
+
"""
|
186
|
+
Discovery service functionality
|
187
|
+
"""
|
188
|
+
|
189
|
+
def __init__(self, ctx: SdkContext):
|
190
|
+
super().__init__(ctx)
|
191
|
+
|
192
|
+
self.list_network_datasources = self._list_network_datasources
|
193
|
+
self.list_station_datasources = self._list_station_datasources
|
194
|
+
self.list_session_datasources = self._list_session_datasources
|
195
|
+
self.list_stream_datasources = self._list_stream_datasources
|
196
|
+
|
197
|
+
|
198
|
+
class DiscoveryService(_DiscoveryService):
|
199
|
+
"""
|
200
|
+
Discovery service functionality
|
201
|
+
"""
|
202
|
+
|
203
|
+
def __init__(self, ctx: SdkContext):
|
204
|
+
super().__init__(ctx)
|
205
|
+
|
206
|
+
self.list_network_datasources = ctx.syncify(self._list_network_datasources)
|
207
|
+
self.list_station_datasources = ctx.syncify(self._list_station_datasources)
|
208
|
+
self.list_session_datasources = ctx.syncify(self._list_session_datasources)
|
209
|
+
self.list_stream_datasources = ctx.syncify(self._list_stream_datasources)
|
@@ -0,0 +1,144 @@
|
|
1
|
+
from datetime import timedelta
|
2
|
+
from enum import Enum
|
3
|
+
from typing import Annotated, Any, Generic, Iterable, Optional, TypeVar, Union
|
4
|
+
|
5
|
+
from pydantic import BaseModel, BeforeValidator, TypeAdapter
|
6
|
+
|
7
|
+
from earthscope_sdk.util._itertools import to_set
|
8
|
+
|
9
|
+
|
10
|
+
def _coerce_timedelta_ms(v: Union[int, float, timedelta, str]) -> timedelta:
|
11
|
+
if isinstance(v, (int, float)):
|
12
|
+
return timedelta(milliseconds=v)
|
13
|
+
|
14
|
+
# fallback to Pydantic's timedelta parser
|
15
|
+
return v
|
16
|
+
|
17
|
+
|
18
|
+
P = TypeVar("P")
|
19
|
+
|
20
|
+
|
21
|
+
class Page(BaseModel, Generic[P]):
|
22
|
+
has_next: bool
|
23
|
+
offset: int
|
24
|
+
limit: int
|
25
|
+
items: list[P]
|
26
|
+
total: Optional[int] = None
|
27
|
+
|
28
|
+
|
29
|
+
class DatasourceBaseModel(BaseModel):
|
30
|
+
edid: str
|
31
|
+
names: dict[str, str]
|
32
|
+
description: Optional[str] = None
|
33
|
+
|
34
|
+
def to_arrow_columns(
|
35
|
+
self,
|
36
|
+
*,
|
37
|
+
fields: Union[list[str], str] = ["edid", "names"],
|
38
|
+
namespaces: Union[list[str], str] = [],
|
39
|
+
) -> dict[str, Any]:
|
40
|
+
"""
|
41
|
+
Convert the datasource model to a dictionary suitable for use in an Arrow table.
|
42
|
+
"""
|
43
|
+
result = {}
|
44
|
+
namespaces = to_set(namespaces)
|
45
|
+
fields = to_set(fields)
|
46
|
+
|
47
|
+
# Add names to fields if namespaces are requested
|
48
|
+
if namespaces:
|
49
|
+
fields.add("names")
|
50
|
+
|
51
|
+
for field in fields:
|
52
|
+
if field != "names":
|
53
|
+
result[field] = getattr(self, field)
|
54
|
+
continue
|
55
|
+
|
56
|
+
# Explode names to own columns
|
57
|
+
if not namespaces:
|
58
|
+
names = {k.lower(): v for k, v in self.names.items()}
|
59
|
+
else:
|
60
|
+
names = {
|
61
|
+
k_lower: v
|
62
|
+
for k, v in self.names.items()
|
63
|
+
if (k_lower := k.lower()) in namespaces
|
64
|
+
}
|
65
|
+
|
66
|
+
result.update(names)
|
67
|
+
|
68
|
+
return result
|
69
|
+
|
70
|
+
|
71
|
+
class NetworkDatasource(DatasourceBaseModel): ...
|
72
|
+
|
73
|
+
|
74
|
+
ListNetworkDatasourcesResult = TypeAdapter(Union[Page[str], Page[NetworkDatasource]])
|
75
|
+
|
76
|
+
|
77
|
+
class StationDatasource(DatasourceBaseModel):
|
78
|
+
network_edids: Optional[list[str]] = None
|
79
|
+
networks: Optional[list[NetworkDatasource]] = None
|
80
|
+
|
81
|
+
|
82
|
+
ListStationDatasourcesResult = TypeAdapter(Union[Page[str], Page[StationDatasource]])
|
83
|
+
|
84
|
+
|
85
|
+
class _StationDatasourceMember(DatasourceBaseModel):
|
86
|
+
station_edid: Optional[str] = None
|
87
|
+
station: Optional[StationDatasource] = None
|
88
|
+
|
89
|
+
def to_arrow_columns(
|
90
|
+
self,
|
91
|
+
*,
|
92
|
+
fields: list[str] = ["edid", "names"],
|
93
|
+
namespaces: Optional[list[str]] = None,
|
94
|
+
) -> dict[str, Any]:
|
95
|
+
result = super().to_arrow_columns(fields=fields, namespaces=namespaces)
|
96
|
+
if self.station:
|
97
|
+
parent_columns = self.station.to_arrow_columns(
|
98
|
+
fields=["names"],
|
99
|
+
namespaces=namespaces,
|
100
|
+
)
|
101
|
+
result.update(parent_columns)
|
102
|
+
|
103
|
+
return result
|
104
|
+
|
105
|
+
|
106
|
+
class SessionDatasource(_StationDatasourceMember):
|
107
|
+
sample_interval: Annotated[timedelta, BeforeValidator(_coerce_timedelta_ms)]
|
108
|
+
"""
|
109
|
+
Session sample interval.
|
110
|
+
"""
|
111
|
+
|
112
|
+
roll: timedelta # already in seconds
|
113
|
+
"""
|
114
|
+
Session file roll cadence.
|
115
|
+
"""
|
116
|
+
|
117
|
+
|
118
|
+
ListSessionDatasourcesResult = TypeAdapter(Union[Page[str], Page[SessionDatasource]])
|
119
|
+
|
120
|
+
|
121
|
+
class StreamType(Enum):
|
122
|
+
GNSS_RAW = "gnss_raw"
|
123
|
+
GNSS_PPP = "gnss_ppp"
|
124
|
+
|
125
|
+
|
126
|
+
class StreamDatasource(_StationDatasourceMember):
|
127
|
+
stream_type: StreamType
|
128
|
+
facility: str
|
129
|
+
software: str
|
130
|
+
label: str
|
131
|
+
sample_interval: Annotated[timedelta, BeforeValidator(_coerce_timedelta_ms)]
|
132
|
+
"""
|
133
|
+
Stream sample interval.
|
134
|
+
"""
|
135
|
+
|
136
|
+
def to_arrow_columns(
|
137
|
+
self,
|
138
|
+
*,
|
139
|
+
fields: Iterable[str] = ["edid", "names", "facility", "software", "label"],
|
140
|
+
) -> dict[str, Any]:
|
141
|
+
return super().to_arrow_columns(fields=fields)
|
142
|
+
|
143
|
+
|
144
|
+
ListStreamDatasourcesResult = TypeAdapter(Union[Page[str], Page[StreamDatasource]])
|