tilebox-datasets 0.35.0__py3-none-any.whl → 0.36.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tilebox/datasets/aio/dataset.py +21 -31
- tilebox/datasets/aio/pagination.py +1 -31
- tilebox/datasets/data/datapoint.py +44 -1
- tilebox/datasets/datasetsv1/core_pb2.py +8 -8
- tilebox/datasets/datasetsv1/core_pb2.pyi +4 -2
- tilebox/datasets/sync/dataset.py +21 -28
- tilebox/datasets/sync/pagination.py +1 -31
- {tilebox_datasets-0.35.0.dist-info → tilebox_datasets-0.36.0.dist-info}/METADATA +1 -1
- {tilebox_datasets-0.35.0.dist-info → tilebox_datasets-0.36.0.dist-info}/RECORD +10 -10
- {tilebox_datasets-0.35.0.dist-info → tilebox_datasets-0.36.0.dist-info}/WHEEL +0 -0
tilebox/datasets/aio/dataset.py
CHANGED
|
@@ -7,17 +7,18 @@ from warnings import warn
|
|
|
7
7
|
import xarray as xr
|
|
8
8
|
from tqdm.auto import tqdm
|
|
9
9
|
|
|
10
|
+
from _tilebox.grpc.aio.pagination import Pagination as PaginationProtocol
|
|
11
|
+
from _tilebox.grpc.aio.pagination import paginated_request
|
|
10
12
|
from _tilebox.grpc.aio.producer_consumer import async_producer_consumer
|
|
11
13
|
from _tilebox.grpc.error import ArgumentError, NotFoundError
|
|
12
14
|
from tilebox.datasets.aio.pagination import (
|
|
13
|
-
paginated_request,
|
|
14
15
|
with_progressbar,
|
|
15
16
|
with_time_progress_callback,
|
|
16
17
|
with_time_progressbar,
|
|
17
18
|
)
|
|
18
19
|
from tilebox.datasets.data.collection import CollectionInfo
|
|
19
20
|
from tilebox.datasets.data.data_access import QueryFilters, SpatialFilter, SpatialFilterLike
|
|
20
|
-
from tilebox.datasets.data.datapoint import DatapointInterval, DatapointPage, QueryResultPage
|
|
21
|
+
from tilebox.datasets.data.datapoint import DatapointInterval, DatapointIntervalLike, DatapointPage, QueryResultPage
|
|
21
22
|
from tilebox.datasets.data.datasets import Dataset
|
|
22
23
|
from tilebox.datasets.data.pagination import Pagination
|
|
23
24
|
from tilebox.datasets.data.time_interval import TimeInterval, TimeIntervalLike
|
|
@@ -242,7 +243,7 @@ class CollectionClient:
|
|
|
242
243
|
|
|
243
244
|
async def _find_interval(
|
|
244
245
|
self,
|
|
245
|
-
datapoint_id_interval:
|
|
246
|
+
datapoint_id_interval: DatapointIntervalLike,
|
|
246
247
|
end_inclusive: bool = True,
|
|
247
248
|
*,
|
|
248
249
|
skip_data: bool = False,
|
|
@@ -266,18 +267,13 @@ class CollectionClient:
|
|
|
266
267
|
datapoint_id_interval, end_inclusive, skip_data=skip_data, show_progress=show_progress
|
|
267
268
|
)
|
|
268
269
|
|
|
269
|
-
start_id, end_id = datapoint_id_interval
|
|
270
|
-
|
|
271
270
|
filters = QueryFilters(
|
|
272
|
-
temporal_extent=DatapointInterval(
|
|
273
|
-
start_id=as_uuid(start_id),
|
|
274
|
-
end_id=as_uuid(end_id),
|
|
275
|
-
start_exclusive=False,
|
|
276
|
-
end_inclusive=end_inclusive,
|
|
277
|
-
)
|
|
271
|
+
temporal_extent=DatapointInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive)
|
|
278
272
|
)
|
|
279
273
|
|
|
280
|
-
|
|
274
|
+
async def request(page: PaginationProtocol) -> QueryResultPage:
|
|
275
|
+
query_page = Pagination(page.limit, page.starting_after)
|
|
276
|
+
return await self._dataset._service.query([self._collection.id], filters, skip_data, query_page)
|
|
281
277
|
|
|
282
278
|
initial_page = Pagination()
|
|
283
279
|
pages = paginated_request(request, initial_page)
|
|
@@ -288,27 +284,19 @@ class CollectionClient:
|
|
|
288
284
|
|
|
289
285
|
async def _find_interval_legacy(
|
|
290
286
|
self,
|
|
291
|
-
datapoint_id_interval:
|
|
287
|
+
datapoint_id_interval: DatapointIntervalLike,
|
|
292
288
|
end_inclusive: bool = True,
|
|
293
289
|
*,
|
|
294
290
|
skip_data: bool = False,
|
|
295
291
|
show_progress: bool = False,
|
|
296
292
|
) -> xr.Dataset:
|
|
297
|
-
|
|
293
|
+
datapoint_interval = DatapointInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive)
|
|
298
294
|
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
)
|
|
305
|
-
request = partial(
|
|
306
|
-
self._dataset._service.get_dataset_for_datapoint_interval,
|
|
307
|
-
str(self._collection.id),
|
|
308
|
-
datapoint_interval,
|
|
309
|
-
skip_data,
|
|
310
|
-
False,
|
|
311
|
-
)
|
|
295
|
+
async def request(page: PaginationProtocol) -> DatapointPage:
|
|
296
|
+
query_page = Pagination(page.limit, page.starting_after)
|
|
297
|
+
return await self._dataset._service.get_dataset_for_datapoint_interval(
|
|
298
|
+
str(self._collection.id), datapoint_interval, skip_data, False, query_page
|
|
299
|
+
)
|
|
312
300
|
|
|
313
301
|
initial_page = Pagination()
|
|
314
302
|
pages = paginated_request(request, initial_page)
|
|
@@ -427,9 +415,10 @@ class CollectionClient:
|
|
|
427
415
|
yield page
|
|
428
416
|
|
|
429
417
|
async def _load_page(
|
|
430
|
-
self, filters: QueryFilters, skip_data: bool, page:
|
|
418
|
+
self, filters: QueryFilters, skip_data: bool, page: PaginationProtocol | None = None
|
|
431
419
|
) -> QueryResultPage:
|
|
432
|
-
|
|
420
|
+
query_page = Pagination(page.limit, page.starting_after) if page else Pagination()
|
|
421
|
+
return await self._dataset._service.query([self._collection.id], filters, skip_data, query_page)
|
|
433
422
|
|
|
434
423
|
async def _load_legacy(
|
|
435
424
|
self,
|
|
@@ -472,10 +461,11 @@ class CollectionClient:
|
|
|
472
461
|
yield page
|
|
473
462
|
|
|
474
463
|
async def _load_page_legacy(
|
|
475
|
-
self, time_interval: TimeInterval, skip_data: bool, skip_meta: bool, page:
|
|
464
|
+
self, time_interval: TimeInterval, skip_data: bool, skip_meta: bool, page: PaginationProtocol | None = None
|
|
476
465
|
) -> DatapointPage:
|
|
466
|
+
query_page = Pagination(page.limit, page.starting_after) if page else Pagination()
|
|
477
467
|
return await self._dataset._service.get_dataset_for_time_interval(
|
|
478
|
-
str(self._collection.id), time_interval, skip_data, skip_meta,
|
|
468
|
+
str(self._collection.id), time_interval, skip_data, skip_meta, query_page
|
|
479
469
|
)
|
|
480
470
|
|
|
481
471
|
async def ingest(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import time
|
|
2
|
-
from collections.abc import AsyncIterator
|
|
2
|
+
from collections.abc import AsyncIterator
|
|
3
3
|
from datetime import datetime, timezone
|
|
4
4
|
from typing import TypeVar
|
|
5
5
|
|
|
@@ -9,41 +9,11 @@ from tilebox.datasets.data import (
|
|
|
9
9
|
TimeInterval,
|
|
10
10
|
)
|
|
11
11
|
from tilebox.datasets.data.datapoint import DatapointPage, QueryResultPage
|
|
12
|
-
from tilebox.datasets.data.pagination import Pagination
|
|
13
12
|
from tilebox.datasets.progress import ProgressCallback, TimeIntervalProgressBar
|
|
14
13
|
|
|
15
14
|
ResultPage = TypeVar("ResultPage", bound=DatapointPage | QueryResultPage)
|
|
16
15
|
|
|
17
16
|
|
|
18
|
-
async def paginated_request(
|
|
19
|
-
paging_request: Callable[[Pagination], Awaitable[ResultPage]],
|
|
20
|
-
initial_page: Pagination | None = None,
|
|
21
|
-
) -> AsyncIterator[ResultPage]:
|
|
22
|
-
"""Make a paginated request to a gRPC service endpoint.
|
|
23
|
-
|
|
24
|
-
The endpoint is expected to return a next_page field, which is used for subsequent requests. Once no such
|
|
25
|
-
next_page field is returned, the request is completed.
|
|
26
|
-
|
|
27
|
-
Args:
|
|
28
|
-
paging_request: A function that takes a page as input and returns a Datapoints object
|
|
29
|
-
Often this will be a functools.partial object that wraps a gRPC service endpoint
|
|
30
|
-
and only leaves the page argument remaining
|
|
31
|
-
initial_page: The initial page to request
|
|
32
|
-
|
|
33
|
-
Yields:
|
|
34
|
-
Datapoints: The individual pages of the response
|
|
35
|
-
"""
|
|
36
|
-
if initial_page is None:
|
|
37
|
-
initial_page = Pagination()
|
|
38
|
-
|
|
39
|
-
response = await paging_request(initial_page)
|
|
40
|
-
yield response
|
|
41
|
-
|
|
42
|
-
while response.next_page.starting_after is not None:
|
|
43
|
-
response = await paging_request(response.next_page)
|
|
44
|
-
yield response
|
|
45
|
-
|
|
46
|
-
|
|
47
17
|
async def with_progressbar(
|
|
48
18
|
paginated_request: AsyncIterator[ResultPage],
|
|
49
19
|
progress_description: str,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from datetime import datetime
|
|
3
|
-
from typing import Any
|
|
3
|
+
from typing import Any, TypeAlias
|
|
4
4
|
from uuid import UUID
|
|
5
5
|
|
|
6
6
|
from tilebox.datasets.data.pagination import Pagination
|
|
@@ -9,6 +9,8 @@ from tilebox.datasets.data.uuid import uuid_message_to_uuid, uuid_to_uuid_messag
|
|
|
9
9
|
from tilebox.datasets.datasetsv1 import core_pb2, data_access_pb2, data_ingestion_pb2
|
|
10
10
|
from tilebox.datasets.message_pool import get_message_type
|
|
11
11
|
|
|
12
|
+
DatapointIntervalLike: TypeAlias = "tuple[str, str] | tuple[UUID, UUID] | DatapointInterval"
|
|
13
|
+
|
|
12
14
|
|
|
13
15
|
@dataclass(frozen=True)
|
|
14
16
|
class DatapointInterval:
|
|
@@ -34,6 +36,47 @@ class DatapointInterval:
|
|
|
34
36
|
end_inclusive=self.end_inclusive,
|
|
35
37
|
)
|
|
36
38
|
|
|
39
|
+
@classmethod
|
|
40
|
+
def parse(
|
|
41
|
+
cls, arg: DatapointIntervalLike, start_exclusive: bool = False, end_inclusive: bool = True
|
|
42
|
+
) -> "DatapointInterval":
|
|
43
|
+
"""
|
|
44
|
+
Convert a variety of input types to a DatapointInterval.
|
|
45
|
+
|
|
46
|
+
Supported input types:
|
|
47
|
+
- DatapointInterval: Return the input as is
|
|
48
|
+
- tuple of two UUIDs: Return an DatapointInterval with start and end id set to the given values
|
|
49
|
+
- tuple of two strings: Return an DatapointInterval with start and end id set to the UUIDs parsed from the given strings
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
arg: The input to convert
|
|
53
|
+
start_exclusive: Whether the start id is exclusive
|
|
54
|
+
end_inclusive: Whether the end id is inclusive
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
DatapointInterval: The parsed ID interval
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
match arg:
|
|
61
|
+
case DatapointInterval(_, _, _, _):
|
|
62
|
+
return arg
|
|
63
|
+
case (UUID(), UUID()):
|
|
64
|
+
start, end = arg
|
|
65
|
+
return DatapointInterval(
|
|
66
|
+
start_id=start,
|
|
67
|
+
end_id=end,
|
|
68
|
+
start_exclusive=start_exclusive,
|
|
69
|
+
end_inclusive=end_inclusive,
|
|
70
|
+
)
|
|
71
|
+
case (str(), str()):
|
|
72
|
+
start, end = arg
|
|
73
|
+
return DatapointInterval(
|
|
74
|
+
start_id=UUID(start),
|
|
75
|
+
end_id=UUID(end),
|
|
76
|
+
start_exclusive=start_exclusive,
|
|
77
|
+
end_inclusive=end_inclusive,
|
|
78
|
+
)
|
|
79
|
+
|
|
37
80
|
|
|
38
81
|
@dataclass(frozen=True)
|
|
39
82
|
class AnyMessage:
|
|
@@ -26,7 +26,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__
|
|
|
26
26
|
from tilebox.datasets.datasetsv1 import dataset_type_pb2 as datasets_dot_v1_dot_dataset__type__pb2
|
|
27
27
|
|
|
28
28
|
|
|
29
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x64\x61tasets/v1/core.proto\x12\x0b\x64\x61tasets.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1e\x64\x61tasets/v1/dataset_type.proto\"\x18\n\x02ID\x12\x12\n\x04uuid\x18\x01 \x01(\x0cR\x04uuid\"\xce\x01\n\x0cTimeInterval\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"\xb5\x01\n\x11\x44\x61tapointInterval\x12*\n\x08start_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x07startId\x12&\n\x06\x65nd_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x05\x65ndId\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"v\n\x10LegacyPagination\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x12*\n\x0estarting_after\x18\x02 \x01(\tH\x01R\rstartingAfter\x88\x01\x01\x42\x08\n\x06_limitB\x11\n\x0f_starting_after\"\x81\x01\n\nPagination\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x12;\n\x0estarting_after\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDH\x01R\rstartingAfter\x88\x01\x01\x42\x08\n\x06_limitB\x11\n\x0f_starting_after\"6\n\x03\x41ny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05value\">\n\x0bRepeatedAny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x03(\x0cR\x05value\"\xad\x01\n\x11\x44\x61tapointMetadata\x12\x39\n\nevent_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\teventTime\x12\x41\n\x0eingestion_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\x12\x13\n\x02id\x18\x03 \x01(\tH\x00R\x02id\x88\x01\x01\x42\x05\n\x03_id\"n\n\nDatapoints\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\"\xc0\x01\n\rDatapointPage\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\x12?\n\tnext_page\x18\x03 \x01(\x0b\x32\x1d.datasets.v1.LegacyPaginationH\x00R\x08nextPage\x88\x01\x01\x42\x0c\n\n_next_page\"e\n\tDatapoint\x12\x32\n\x04meta\x18\x01 \x01(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x10.datasets.v1.AnyR\x04\x64\x61ta\"^\n\nCollection\x12\x1b\n\tlegacy_id\x18\x01 \x01(\tR\x08legacyId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1f\n\x02id\x18\x03 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\"\xc3\x01\n\x0e\x43ollectionInfo\x12\x37\n\ncollection\x18\x01 \x01(\x0b\x32\x17.datasets.v1.CollectionR\ncollection\x12\x42\n\x0c\x61vailability\x18\x02 \x01(\x0b\x32\x19.datasets.v1.TimeIntervalH\x00R\x0c\x61vailability\x88\x01\x01\x12\x19\n\x05\x63ount\x18\x03 \x01(\x04H\x01R\x05\x63ount\x88\x01\x01\x42\x0f\n\r_availabilityB\x08\n\x06_count\"B\n\x0f\x43ollectionInfos\x12/\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x1b.datasets.v1.CollectionInfoR\x04\x64\x61ta\"\
|
|
29
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x64\x61tasets/v1/core.proto\x12\x0b\x64\x61tasets.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1e\x64\x61tasets/v1/dataset_type.proto\"\x18\n\x02ID\x12\x12\n\x04uuid\x18\x01 \x01(\x0cR\x04uuid\"\xce\x01\n\x0cTimeInterval\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"\xb5\x01\n\x11\x44\x61tapointInterval\x12*\n\x08start_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x07startId\x12&\n\x06\x65nd_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x05\x65ndId\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"v\n\x10LegacyPagination\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x12*\n\x0estarting_after\x18\x02 \x01(\tH\x01R\rstartingAfter\x88\x01\x01\x42\x08\n\x06_limitB\x11\n\x0f_starting_after\"\x81\x01\n\nPagination\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x12;\n\x0estarting_after\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDH\x01R\rstartingAfter\x88\x01\x01\x42\x08\n\x06_limitB\x11\n\x0f_starting_after\"6\n\x03\x41ny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05value\">\n\x0bRepeatedAny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x03(\x0cR\x05value\"\xad\x01\n\x11\x44\x61tapointMetadata\x12\x39\n\nevent_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\teventTime\x12\x41\n\x0eingestion_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\x12\x13\n\x02id\x18\x03 \x01(\tH\x00R\x02id\x88\x01\x01\x42\x05\n\x03_id\"n\n\nDatapoints\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\"\xc0\x01\n\rDatapointPage\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\x12?\n\tnext_page\x18\x03 \x01(\x0b\x32\x1d.datasets.v1.LegacyPaginationH\x00R\x08nextPage\x88\x01\x01\x42\x0c\n\n_next_page\"e\n\tDatapoint\x12\x32\n\x04meta\x18\x01 \x01(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x10.datasets.v1.AnyR\x04\x64\x61ta\"^\n\nCollection\x12\x1b\n\tlegacy_id\x18\x01 \x01(\tR\x08legacyId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1f\n\x02id\x18\x03 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\"\xc3\x01\n\x0e\x43ollectionInfo\x12\x37\n\ncollection\x18\x01 \x01(\x0b\x32\x17.datasets.v1.CollectionR\ncollection\x12\x42\n\x0c\x61vailability\x18\x02 \x01(\x0b\x32\x19.datasets.v1.TimeIntervalH\x00R\x0c\x61vailability\x88\x01\x01\x12\x19\n\x05\x63ount\x18\x03 \x01(\x04H\x01R\x05\x63ount\x88\x01\x01\x42\x0f\n\r_availabilityB\x08\n\x06_count\"B\n\x0f\x43ollectionInfos\x12/\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x1b.datasets.v1.CollectionInfoR\x04\x64\x61ta\"\x96\x03\n\x07\x44\x61taset\x12\x1f\n\x02id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\x12*\n\x08group_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x07groupId\x12.\n\x04type\x18\x03 \x01(\x0b\x32\x1a.datasets.v1.AnnotatedTypeR\x04type\x12\x1b\n\tcode_name\x18\x04 \x01(\tR\x08\x63odeName\x12\x12\n\x04name\x18\x05 \x01(\tR\x04name\x12\x18\n\x07summary\x18\x06 \x01(\tR\x07summary\x12\x12\n\x04icon\x18\x07 \x01(\tR\x04icon\x12 \n\x0b\x64\x65scription\x18\x08 \x01(\tR\x0b\x64\x65scription\x12@\n\x0bpermissions\x18\n \x03(\x0e\x32\x1e.datasets.v1.DatasetPermissionR\x0bpermissions\x12\x37\n\nvisibility\x18\x0b \x01(\x0e\x32\x17.datasets.v1.VisibilityR\nvisibility\x12\x12\n\x04slug\x18\x0c \x01(\tR\x04slug\"\xa2\x01\n\x0c\x44\x61tasetGroup\x12\x1f\n\x02id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\x12,\n\tparent_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x08parentId\x12\x1b\n\tcode_name\x18\x03 \x01(\tR\x08\x63odeName\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x12\n\x04icon\x18\x05 \x01(\tR\x04icon*\x9b\x01\n\x11\x44\x61tasetPermission\x12\"\n\x1e\x44\x41TASET_PERMISSION_UNSPECIFIED\x10\x00\x12\"\n\x1e\x44\x41TASET_PERMISSION_ACCESS_DATA\x10\x01\x12!\n\x1d\x44\x41TASET_PERMISSION_WRITE_DATA\x10\x02\x12\x1b\n\x17\x44\x41TASET_PERMISSION_EDIT\x10\x03*v\n\nVisibility\x12\x1a\n\x16VISIBILITY_UNSPECIFIED\x10\x00\x12\x16\n\x12VISIBILITY_PRIVATE\x10\x01\x12\x1d\n\x19VISIBILITY_SHARED_WITH_ME\x10\x02\x12\x15\n\x11VISIBILITY_PUBLIC\x10\x03\x42\xab\x01\n\x0f\x63om.datasets.v1B\tCoreProtoP\x01Z@github.com/tilebox/tilebox-go/protogen/go/datasets/v1;datasetsv1\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3')
|
|
30
30
|
|
|
31
31
|
_globals = globals()
|
|
32
32
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
@@ -34,10 +34,10 @@ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.core_pb2', _glo
|
|
|
34
34
|
if not _descriptor._USE_C_DESCRIPTORS:
|
|
35
35
|
_globals['DESCRIPTOR']._loaded_options = None
|
|
36
36
|
_globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\tCoreProtoP\001Z@github.com/tilebox/tilebox-go/protogen/go/datasets/v1;datasetsv1\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1'
|
|
37
|
-
_globals['_DATASETPERMISSION']._serialized_start=
|
|
38
|
-
_globals['_DATASETPERMISSION']._serialized_end=
|
|
39
|
-
_globals['_VISIBILITY']._serialized_start=
|
|
40
|
-
_globals['_VISIBILITY']._serialized_end=
|
|
37
|
+
_globals['_DATASETPERMISSION']._serialized_start=2418
|
|
38
|
+
_globals['_DATASETPERMISSION']._serialized_end=2573
|
|
39
|
+
_globals['_VISIBILITY']._serialized_start=2575
|
|
40
|
+
_globals['_VISIBILITY']._serialized_end=2693
|
|
41
41
|
_globals['_ID']._serialized_start=104
|
|
42
42
|
_globals['_ID']._serialized_end=128
|
|
43
43
|
_globals['_TIMEINTERVAL']._serialized_start=131
|
|
@@ -67,7 +67,7 @@ if not _descriptor._USE_C_DESCRIPTORS:
|
|
|
67
67
|
_globals['_COLLECTIONINFOS']._serialized_start=1775
|
|
68
68
|
_globals['_COLLECTIONINFOS']._serialized_end=1841
|
|
69
69
|
_globals['_DATASET']._serialized_start=1844
|
|
70
|
-
_globals['_DATASET']._serialized_end=
|
|
71
|
-
_globals['_DATASETGROUP']._serialized_start=
|
|
72
|
-
_globals['_DATASETGROUP']._serialized_end=
|
|
70
|
+
_globals['_DATASET']._serialized_end=2250
|
|
71
|
+
_globals['_DATASETGROUP']._serialized_start=2253
|
|
72
|
+
_globals['_DATASETGROUP']._serialized_end=2415
|
|
73
73
|
# @@protoc_insertion_point(module_scope)
|
|
@@ -155,7 +155,7 @@ class CollectionInfos(_message.Message):
|
|
|
155
155
|
def __init__(self, data: _Optional[_Iterable[_Union[CollectionInfo, _Mapping]]] = ...) -> None: ...
|
|
156
156
|
|
|
157
157
|
class Dataset(_message.Message):
|
|
158
|
-
__slots__ = ("id", "group_id", "type", "code_name", "name", "summary", "icon", "description", "permissions", "visibility")
|
|
158
|
+
__slots__ = ("id", "group_id", "type", "code_name", "name", "summary", "icon", "description", "permissions", "visibility", "slug")
|
|
159
159
|
ID_FIELD_NUMBER: _ClassVar[int]
|
|
160
160
|
GROUP_ID_FIELD_NUMBER: _ClassVar[int]
|
|
161
161
|
TYPE_FIELD_NUMBER: _ClassVar[int]
|
|
@@ -166,6 +166,7 @@ class Dataset(_message.Message):
|
|
|
166
166
|
DESCRIPTION_FIELD_NUMBER: _ClassVar[int]
|
|
167
167
|
PERMISSIONS_FIELD_NUMBER: _ClassVar[int]
|
|
168
168
|
VISIBILITY_FIELD_NUMBER: _ClassVar[int]
|
|
169
|
+
SLUG_FIELD_NUMBER: _ClassVar[int]
|
|
169
170
|
id: ID
|
|
170
171
|
group_id: ID
|
|
171
172
|
type: _dataset_type_pb2.AnnotatedType
|
|
@@ -176,7 +177,8 @@ class Dataset(_message.Message):
|
|
|
176
177
|
description: str
|
|
177
178
|
permissions: _containers.RepeatedScalarFieldContainer[DatasetPermission]
|
|
178
179
|
visibility: Visibility
|
|
179
|
-
|
|
180
|
+
slug: str
|
|
181
|
+
def __init__(self, id: _Optional[_Union[ID, _Mapping]] = ..., group_id: _Optional[_Union[ID, _Mapping]] = ..., type: _Optional[_Union[_dataset_type_pb2.AnnotatedType, _Mapping]] = ..., code_name: _Optional[str] = ..., name: _Optional[str] = ..., summary: _Optional[str] = ..., icon: _Optional[str] = ..., description: _Optional[str] = ..., permissions: _Optional[_Iterable[_Union[DatasetPermission, str]]] = ..., visibility: _Optional[_Union[Visibility, str]] = ..., slug: _Optional[str] = ...) -> None: ...
|
|
180
182
|
|
|
181
183
|
class DatasetGroup(_message.Message):
|
|
182
184
|
__slots__ = ("id", "parent_id", "code_name", "name", "icon")
|
tilebox/datasets/sync/dataset.py
CHANGED
|
@@ -7,10 +7,12 @@ import xarray as xr
|
|
|
7
7
|
from tqdm.auto import tqdm
|
|
8
8
|
|
|
9
9
|
from _tilebox.grpc.error import ArgumentError, NotFoundError
|
|
10
|
+
from _tilebox.grpc.pagination import Pagination as PaginationProtocol
|
|
11
|
+
from _tilebox.grpc.pagination import paginated_request
|
|
10
12
|
from _tilebox.grpc.producer_consumer import concurrent_producer_consumer
|
|
11
13
|
from tilebox.datasets.data.collection import CollectionInfo
|
|
12
14
|
from tilebox.datasets.data.data_access import QueryFilters, SpatialFilter, SpatialFilterLike
|
|
13
|
-
from tilebox.datasets.data.datapoint import DatapointInterval, DatapointPage, QueryResultPage
|
|
15
|
+
from tilebox.datasets.data.datapoint import DatapointInterval, DatapointIntervalLike, DatapointPage, QueryResultPage
|
|
14
16
|
from tilebox.datasets.data.datasets import Dataset
|
|
15
17
|
from tilebox.datasets.data.pagination import Pagination
|
|
16
18
|
from tilebox.datasets.data.time_interval import TimeInterval, TimeIntervalLike
|
|
@@ -27,7 +29,6 @@ from tilebox.datasets.protobuf_conversion.to_protobuf import (
|
|
|
27
29
|
)
|
|
28
30
|
from tilebox.datasets.service import TileboxDatasetService
|
|
29
31
|
from tilebox.datasets.sync.pagination import (
|
|
30
|
-
paginated_request,
|
|
31
32
|
with_progressbar,
|
|
32
33
|
with_time_progress_callback,
|
|
33
34
|
with_time_progressbar,
|
|
@@ -232,7 +233,7 @@ class CollectionClient:
|
|
|
232
233
|
|
|
233
234
|
def _find_interval(
|
|
234
235
|
self,
|
|
235
|
-
datapoint_id_interval:
|
|
236
|
+
datapoint_id_interval: DatapointIntervalLike,
|
|
236
237
|
end_inclusive: bool = True,
|
|
237
238
|
*,
|
|
238
239
|
skip_data: bool = False,
|
|
@@ -256,19 +257,13 @@ class CollectionClient:
|
|
|
256
257
|
datapoint_id_interval, end_inclusive, skip_data=skip_data, show_progress=show_progress
|
|
257
258
|
)
|
|
258
259
|
|
|
259
|
-
start_id, end_id = datapoint_id_interval
|
|
260
|
-
|
|
261
260
|
filters = QueryFilters(
|
|
262
|
-
temporal_extent=DatapointInterval(
|
|
263
|
-
start_id=as_uuid(start_id),
|
|
264
|
-
end_id=as_uuid(end_id),
|
|
265
|
-
start_exclusive=False,
|
|
266
|
-
end_inclusive=end_inclusive,
|
|
267
|
-
)
|
|
261
|
+
temporal_extent=DatapointInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive)
|
|
268
262
|
)
|
|
269
263
|
|
|
270
|
-
def request(page:
|
|
271
|
-
|
|
264
|
+
def request(page: PaginationProtocol) -> QueryResultPage:
|
|
265
|
+
query_page = Pagination(page.limit, page.starting_after)
|
|
266
|
+
return self._dataset._service.query([self._collection.id], filters, skip_data, query_page).get()
|
|
272
267
|
|
|
273
268
|
initial_page = Pagination()
|
|
274
269
|
pages = paginated_request(request, initial_page)
|
|
@@ -279,7 +274,7 @@ class CollectionClient:
|
|
|
279
274
|
|
|
280
275
|
def _find_interval_legacy(
|
|
281
276
|
self,
|
|
282
|
-
datapoint_id_interval:
|
|
277
|
+
datapoint_id_interval: DatapointIntervalLike,
|
|
283
278
|
end_inclusive: bool = True,
|
|
284
279
|
*,
|
|
285
280
|
skip_data: bool = False,
|
|
@@ -298,18 +293,12 @@ class CollectionClient:
|
|
|
298
293
|
Returns:
|
|
299
294
|
The datapoints in the given interval as an xarray dataset
|
|
300
295
|
"""
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
datapoint_interval = DatapointInterval(
|
|
304
|
-
start_id=as_uuid(start_id),
|
|
305
|
-
end_id=as_uuid(end_id),
|
|
306
|
-
start_exclusive=False,
|
|
307
|
-
end_inclusive=end_inclusive,
|
|
308
|
-
)
|
|
296
|
+
datapoint_interval = DatapointInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive)
|
|
309
297
|
|
|
310
|
-
def request(page:
|
|
298
|
+
def request(page: PaginationProtocol) -> DatapointPage:
|
|
299
|
+
query_page = Pagination(page.limit, page.starting_after)
|
|
311
300
|
return self._dataset._service.get_dataset_for_datapoint_interval(
|
|
312
|
-
str(self._collection.id), datapoint_interval, skip_data, False,
|
|
301
|
+
str(self._collection.id), datapoint_interval, skip_data, False, query_page
|
|
313
302
|
).get()
|
|
314
303
|
|
|
315
304
|
initial_page = Pagination()
|
|
@@ -427,8 +416,11 @@ class CollectionClient:
|
|
|
427
416
|
|
|
428
417
|
yield from pages
|
|
429
418
|
|
|
430
|
-
def _load_page(
|
|
431
|
-
|
|
419
|
+
def _load_page(
|
|
420
|
+
self, filters: QueryFilters, skip_data: bool, page: PaginationProtocol | None = None
|
|
421
|
+
) -> QueryResultPage:
|
|
422
|
+
query_page = Pagination(page.limit, page.starting_after) if page else Pagination()
|
|
423
|
+
return self._dataset._service.query([self._collection.id], filters, skip_data, query_page).get()
|
|
432
424
|
|
|
433
425
|
def _load_legacy(
|
|
434
426
|
self,
|
|
@@ -470,10 +462,11 @@ class CollectionClient:
|
|
|
470
462
|
yield from pages
|
|
471
463
|
|
|
472
464
|
def _load_page_legacy(
|
|
473
|
-
self, time_interval: TimeInterval, skip_data: bool, skip_meta: bool, page:
|
|
465
|
+
self, time_interval: TimeInterval, skip_data: bool, skip_meta: bool, page: PaginationProtocol | None = None
|
|
474
466
|
) -> DatapointPage:
|
|
467
|
+
query_page = Pagination(page.limit, page.starting_after) if page else Pagination()
|
|
475
468
|
return self._dataset._service.get_dataset_for_time_interval(
|
|
476
|
-
str(self._collection.id), time_interval, skip_data, skip_meta,
|
|
469
|
+
str(self._collection.id), time_interval, skip_data, skip_meta, query_page
|
|
477
470
|
).get()
|
|
478
471
|
|
|
479
472
|
def ingest(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import time
|
|
2
|
-
from collections.abc import
|
|
2
|
+
from collections.abc import Iterator
|
|
3
3
|
from datetime import datetime, timezone
|
|
4
4
|
from typing import TypeVar
|
|
5
5
|
|
|
@@ -9,41 +9,11 @@ from tilebox.datasets.data import (
|
|
|
9
9
|
TimeInterval,
|
|
10
10
|
)
|
|
11
11
|
from tilebox.datasets.data.datapoint import DatapointPage, QueryResultPage
|
|
12
|
-
from tilebox.datasets.data.pagination import Pagination
|
|
13
12
|
from tilebox.datasets.progress import ProgressCallback, TimeIntervalProgressBar
|
|
14
13
|
|
|
15
14
|
ResultPage = TypeVar("ResultPage", bound=DatapointPage | QueryResultPage)
|
|
16
15
|
|
|
17
16
|
|
|
18
|
-
def paginated_request(
|
|
19
|
-
paging_request: Callable[[Pagination], ResultPage],
|
|
20
|
-
initial_page: Pagination | None = None,
|
|
21
|
-
) -> Iterator[ResultPage]:
|
|
22
|
-
"""Make a paginated request to a gRPC service endpoint.
|
|
23
|
-
|
|
24
|
-
The endpoint is expected to return a next_page field, which is used for subsequent requests. Once no such
|
|
25
|
-
next_page field is returned, the request is completed.
|
|
26
|
-
|
|
27
|
-
Args:
|
|
28
|
-
paging_request: A function that takes a page as input and returns a Datapoints object
|
|
29
|
-
Often this will be a functools.partial object that wraps a gRPC service endpoint
|
|
30
|
-
and only leaves the page argument remaining
|
|
31
|
-
initial_page: The initial page to request
|
|
32
|
-
|
|
33
|
-
Yields:
|
|
34
|
-
Datapoints: The individual pages of the response
|
|
35
|
-
"""
|
|
36
|
-
if initial_page is None:
|
|
37
|
-
initial_page = Pagination()
|
|
38
|
-
|
|
39
|
-
response = paging_request(initial_page)
|
|
40
|
-
yield response
|
|
41
|
-
|
|
42
|
-
while response.next_page.starting_after is not None:
|
|
43
|
-
response = paging_request(response.next_page)
|
|
44
|
-
yield response
|
|
45
|
-
|
|
46
|
-
|
|
47
17
|
def with_progressbar(
|
|
48
18
|
paginated_request: Iterator[ResultPage],
|
|
49
19
|
progress_description: str,
|
|
@@ -6,13 +6,13 @@ tilebox/datasets/progress.py,sha256=NC3YFnK4EqspCH0QPKsAHmkzoAl5KHedPWYvdS37Tp8,
|
|
|
6
6
|
tilebox/datasets/service.py,sha256=ooF8QIUFaTBxmyMEPoHk2J7lwj5AzvGP_2dTWQkyi3g,10230
|
|
7
7
|
tilebox/datasets/aio/__init__.py,sha256=0x_gddLgDsUCdl8MMZj4MPH1lp4HuOrExMHTjIFmM6s,405
|
|
8
8
|
tilebox/datasets/aio/client.py,sha256=NLA59PvFVRJtAhfYbRNYOvzJ9KnL6Uptji7pBYDNRcc,2126
|
|
9
|
-
tilebox/datasets/aio/dataset.py,sha256=
|
|
10
|
-
tilebox/datasets/aio/pagination.py,sha256=
|
|
9
|
+
tilebox/datasets/aio/dataset.py,sha256=5WW8UhI3JpDlCtVzvxOnSxF7htEDH2LkQEfDe65XuKk,27189
|
|
10
|
+
tilebox/datasets/aio/pagination.py,sha256=C9MNcxnD6Hycsx0mLYXrMC76qehic_ca0R6Q_VpiCS8,6148
|
|
11
11
|
tilebox/datasets/aio/timeseries.py,sha256=iQqIyh9TPL_gJz18GCxmtFJEwObR9S2rPsUohFYM8wQ,301
|
|
12
12
|
tilebox/datasets/data/__init__.py,sha256=vgHSaWXMuC5NLlMwC_VhwKd4nvpOn1NIL4YzaD58goQ,401
|
|
13
13
|
tilebox/datasets/data/collection.py,sha256=u9O8vjLYLoe888sZq4yPs3NFlGem5QfdE-wZQW4OK2w,2670
|
|
14
14
|
tilebox/datasets/data/data_access.py,sha256=Z0PuUem9I2TOdAiHiaLrZ-pC1UiNhr9dES0PP3K85aE,5622
|
|
15
|
-
tilebox/datasets/data/datapoint.py,sha256=
|
|
15
|
+
tilebox/datasets/data/datapoint.py,sha256=h89tQ2Vs08dj7TlF_GSf9Od0ekOHbIOJY7px_rxiNgk,8631
|
|
16
16
|
tilebox/datasets/data/datasets.py,sha256=AXFrTvDgytox3lGoO-fUG1Ob7eYFtig1p2DAQ4bWxts,5246
|
|
17
17
|
tilebox/datasets/data/pagination.py,sha256=ouFO2pWf1BFsDVVY37QDxPN1TbYRSWXfQF1p6sHeZY0,1332
|
|
18
18
|
tilebox/datasets/data/time_interval.py,sha256=CADKc0xxf5PYwF2VsQ2GVi4JapwBQ6-JXrVV8VHv-so,9737
|
|
@@ -21,8 +21,8 @@ tilebox/datasets/data/uuid.py,sha256=uc9P1wCowkdsxnJMhcDGmjWT2Py8Tiqh3LSsAMJngdo
|
|
|
21
21
|
tilebox/datasets/datasetsv1/collections_pb2.py,sha256=8Ax9D85AnfchVjmuTlvE6WATy0DNr_37uDAwh8i3ZB0,3415
|
|
22
22
|
tilebox/datasets/datasetsv1/collections_pb2.pyi,sha256=CcjNu9HSi2zyKcC3GWYPRBFeDTjfkd36SEO-WYIiBQQ,1749
|
|
23
23
|
tilebox/datasets/datasetsv1/collections_pb2_grpc.py,sha256=oFR5lUwWEdxVA33cQIsjve92NUAU7VS9QmsbT5flMoo,7040
|
|
24
|
-
tilebox/datasets/datasetsv1/core_pb2.py,sha256=
|
|
25
|
-
tilebox/datasets/datasetsv1/core_pb2.pyi,sha256=
|
|
24
|
+
tilebox/datasets/datasetsv1/core_pb2.py,sha256=k88K48SIWV4DtDNLGUd2q7qzQqXNbtSE1yc8tL3FbFA,8202
|
|
25
|
+
tilebox/datasets/datasetsv1/core_pb2.pyi,sha256=n6gV3BiHrcCgmRHvrMmjxRyhulcVFdsGJsMcXSSJUJk,9150
|
|
26
26
|
tilebox/datasets/datasetsv1/core_pb2_grpc.py,sha256=xYOs94SXiNYAlFodACnsXW5QovLsHY5tCk3p76RH5Zc,158
|
|
27
27
|
tilebox/datasets/datasetsv1/data_access_pb2.py,sha256=fOgy5ZQb9ojGUbFr6_NFDowYQfegXUidi9ljdJeIyKY,6182
|
|
28
28
|
tilebox/datasets/datasetsv1/data_access_pb2.pyi,sha256=maP2FKtta1nmQRyNLKvTPOlqA-LDEV-x8N6ZgMJQTak,5743
|
|
@@ -49,9 +49,9 @@ tilebox/datasets/protobuf_conversion/protobuf_xarray.py,sha256=xTHN69r6GrXtDlOPu
|
|
|
49
49
|
tilebox/datasets/protobuf_conversion/to_protobuf.py,sha256=Q-RaVsLrRgSzQYI4nHjB3ppliyHG0jR9EB2sfT1riYg,7640
|
|
50
50
|
tilebox/datasets/sync/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
51
|
tilebox/datasets/sync/client.py,sha256=d6d2ABi9Lo9oIgpnq_1O0v4JqJrppaP4hCT5TWWXHyE,2101
|
|
52
|
-
tilebox/datasets/sync/dataset.py,sha256=
|
|
53
|
-
tilebox/datasets/sync/pagination.py,sha256=
|
|
52
|
+
tilebox/datasets/sync/dataset.py,sha256=88FhFRP4tUq0cxeaWGouz8v3fbeKLuOdhx6NyWvYzbQ,27399
|
|
53
|
+
tilebox/datasets/sync/pagination.py,sha256=aWE9iBfPd6Gj-wT2Pfg2Z-owRe53fFpuG5HN-pporeI,6062
|
|
54
54
|
tilebox/datasets/sync/timeseries.py,sha256=4nTP8_tmv6V7PXTUNzzlbzlxv0OXo_IqVLtSdJpUOW0,303
|
|
55
|
-
tilebox_datasets-0.
|
|
56
|
-
tilebox_datasets-0.
|
|
57
|
-
tilebox_datasets-0.
|
|
55
|
+
tilebox_datasets-0.36.0.dist-info/METADATA,sha256=LSIcvqFmQGwUAURNAZIAkRBJxOKJnz34kMbSTBkn8JM,4048
|
|
56
|
+
tilebox_datasets-0.36.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
57
|
+
tilebox_datasets-0.36.0.dist-info/RECORD,,
|
|
File without changes
|