stac-fastapi-opensearch 5.0.0a1__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,6 +31,7 @@ from stac_fastapi.extensions.core import (
31
31
  TokenPaginationExtension,
32
32
  TransactionExtension,
33
33
  )
34
+ from stac_fastapi.extensions.core.filter import FilterConformanceClasses
34
35
  from stac_fastapi.extensions.third_party import BulkTransactionExtension
35
36
  from stac_fastapi.opensearch.config import OpensearchSettings
36
37
  from stac_fastapi.opensearch.database_logic import (
@@ -56,7 +57,7 @@ filter_extension = FilterExtension(
56
57
  client=EsAsyncBaseFiltersClient(database=database_logic)
57
58
  )
58
59
  filter_extension.conformance_classes.append(
59
- "http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators"
60
+ FilterConformanceClasses.ADVANCED_COMPARISON_OPERATORS
60
61
  )
61
62
 
62
63
  aggregation_extension = AggregationExtension(
@@ -104,22 +105,24 @@ database_logic.extensions = [type(ext).__name__ for ext in extensions]
104
105
 
105
106
  post_request_model = create_post_request_model(search_extensions)
106
107
 
107
- api = StacApi(
108
- title=os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-opensearch"),
109
- description=os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-opensearch"),
110
- api_version=os.getenv("STAC_FASTAPI_VERSION", "5.0.0a1"),
111
- settings=settings,
112
- extensions=extensions,
113
- client=CoreClient(
108
+ app_config = {
109
+ "title": os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-opensearch"),
110
+ "description": os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-opensearch"),
111
+ "api_version": os.getenv("STAC_FASTAPI_VERSION", "6.0.0"),
112
+ "settings": settings,
113
+ "extensions": extensions,
114
+ "client": CoreClient(
114
115
  database=database_logic,
115
116
  session=session,
116
117
  post_request_model=post_request_model,
117
118
  landing_page_id=os.getenv("STAC_FASTAPI_LANDING_PAGE_ID", "stac-fastapi"),
118
119
  ),
119
- search_get_request_model=create_get_request_model(search_extensions),
120
- search_post_request_model=post_request_model,
121
- route_dependencies=get_route_dependencies(),
122
- )
120
+ "search_get_request_model": create_get_request_model(search_extensions),
121
+ "search_post_request_model": post_request_model,
122
+ "route_dependencies": get_route_dependencies(),
123
+ }
124
+
125
+ api = StacApi(**app_config)
123
126
 
124
127
 
125
128
  @asynccontextmanager
@@ -1,13 +1,14 @@
1
1
  """Database logic."""
2
2
 
3
3
  import asyncio
4
- import json
5
4
  import logging
6
5
  from base64 import urlsafe_b64decode, urlsafe_b64encode
7
6
  from copy import deepcopy
8
7
  from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, Union
9
8
 
10
9
  import attr
10
+ import orjson
11
+ from fastapi import HTTPException
11
12
  from opensearchpy import exceptions, helpers
12
13
  from opensearchpy.helpers.query import Q
13
14
  from opensearchpy.helpers.search import Search
@@ -16,6 +17,11 @@ from starlette.requests import Request
16
17
  from stac_fastapi.core.base_database_logic import BaseDatabaseLogic
17
18
  from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
18
19
  from stac_fastapi.core.utilities import MAX_LIMIT, bbox2polygon
20
+ from stac_fastapi.extensions.core.transaction.request import (
21
+ PartialCollection,
22
+ PartialItem,
23
+ PatchOperation,
24
+ )
19
25
  from stac_fastapi.opensearch.config import (
20
26
  AsyncOpensearchSettings as AsyncSearchSettings,
21
27
  )
@@ -36,6 +42,10 @@ from stac_fastapi.sfeos_helpers.database import (
36
42
  return_date,
37
43
  validate_refresh,
38
44
  )
45
+ from stac_fastapi.sfeos_helpers.database.utils import (
46
+ merge_to_operations,
47
+ operations_to_script,
48
+ )
39
49
  from stac_fastapi.sfeos_helpers.mappings import (
40
50
  AGGREGATION_MAPPING,
41
51
  COLLECTIONS_INDEX,
@@ -48,6 +58,7 @@ from stac_fastapi.sfeos_helpers.mappings import (
48
58
  Geometry,
49
59
  )
50
60
  from stac_fastapi.types.errors import ConflictError, NotFoundError
61
+ from stac_fastapi.types.links import resolve_links
51
62
  from stac_fastapi.types.rfc3339 import DateTimeType
52
63
  from stac_fastapi.types.stac import Collection, Item
53
64
 
@@ -282,121 +293,97 @@ class DatabaseLogic(BaseDatabaseLogic):
282
293
  @staticmethod
283
294
  def apply_datetime_filter(
284
295
  search: Search, interval: Optional[Union[DateTimeType, str]]
285
- ):
286
- """Apply a filter to search based on datetime field, start_datetime, and end_datetime fields.
296
+ ) -> Search:
297
+ """Apply a filter to search on datetime, start_datetime, and end_datetime fields.
287
298
 
288
299
  Args:
289
- search (Search): The search object to filter.
290
- interval: Optional[Union[DateTimeType, str]]
300
+ search: The search object to filter.
301
+ interval: Optional datetime interval to filter by. Can be:
302
+ - A single datetime string (e.g., "2023-01-01T12:00:00")
303
+ - A datetime range string (e.g., "2023-01-01/2023-12-31")
304
+ - A datetime object
305
+ - A tuple of (start_datetime, end_datetime)
291
306
 
292
307
  Returns:
293
- Search: The filtered search object.
308
+ The filtered search object.
294
309
  """
310
+ if not interval:
311
+ return search
312
+
295
313
  should = []
296
- datetime_search = return_date(interval)
314
+ try:
315
+ datetime_search = return_date(interval)
316
+ except (ValueError, TypeError) as e:
317
+ # Handle invalid interval formats if return_date fails
318
+ logger.error(f"Invalid interval format: {interval}, error: {e}")
319
+ return search
297
320
 
298
- # If the request is a single datetime return
299
- # items with datetimes equal to the requested datetime OR
300
- # the requested datetime is between their start and end datetimes
301
321
  if "eq" in datetime_search:
302
- should.extend(
303
- [
304
- Q(
305
- "bool",
306
- filter=[
307
- Q(
308
- "term",
309
- properties__datetime=datetime_search["eq"],
310
- ),
311
- ],
312
- ),
313
- Q(
314
- "bool",
315
- filter=[
316
- Q(
317
- "range",
318
- properties__start_datetime={
319
- "lte": datetime_search["eq"],
320
- },
321
- ),
322
- Q(
323
- "range",
324
- properties__end_datetime={
325
- "gte": datetime_search["eq"],
326
- },
327
- ),
328
- ],
329
- ),
330
- ]
331
- )
332
-
333
- # If the request is a date range return
334
- # items with datetimes within the requested date range OR
335
- # their startdatetime ithin the requested date range OR
336
- # their enddatetime ithin the requested date range OR
337
- # the requested daterange within their start and end datetimes
322
+ # For exact matches, include:
323
+ # 1. Items with matching exact datetime
324
+ # 2. Items with datetime:null where the time falls within their range
325
+ should = [
326
+ Q(
327
+ "bool",
328
+ filter=[
329
+ Q("exists", field="properties.datetime"),
330
+ Q("term", **{"properties__datetime": datetime_search["eq"]}),
331
+ ],
332
+ ),
333
+ Q(
334
+ "bool",
335
+ must_not=[Q("exists", field="properties.datetime")],
336
+ filter=[
337
+ Q("exists", field="properties.start_datetime"),
338
+ Q("exists", field="properties.end_datetime"),
339
+ Q(
340
+ "range",
341
+ properties__start_datetime={"lte": datetime_search["eq"]},
342
+ ),
343
+ Q(
344
+ "range",
345
+ properties__end_datetime={"gte": datetime_search["eq"]},
346
+ ),
347
+ ],
348
+ ),
349
+ ]
338
350
  else:
339
- should.extend(
340
- [
341
- Q(
342
- "bool",
343
- filter=[
344
- Q(
345
- "range",
346
- properties__datetime={
347
- "gte": datetime_search["gte"],
348
- "lte": datetime_search["lte"],
349
- },
350
- ),
351
- ],
352
- ),
353
- Q(
354
- "bool",
355
- filter=[
356
- Q(
357
- "range",
358
- properties__start_datetime={
359
- "gte": datetime_search["gte"],
360
- "lte": datetime_search["lte"],
361
- },
362
- ),
363
- ],
364
- ),
365
- Q(
366
- "bool",
367
- filter=[
368
- Q(
369
- "range",
370
- properties__end_datetime={
371
- "gte": datetime_search["gte"],
372
- "lte": datetime_search["lte"],
373
- },
374
- ),
375
- ],
376
- ),
377
- Q(
378
- "bool",
379
- filter=[
380
- Q(
381
- "range",
382
- properties__start_datetime={
383
- "lte": datetime_search["gte"]
384
- },
385
- ),
386
- Q(
387
- "range",
388
- properties__end_datetime={
389
- "gte": datetime_search["lte"]
390
- },
391
- ),
392
- ],
393
- ),
394
- ]
395
- )
396
-
397
- search = search.query(Q("bool", filter=[Q("bool", should=should)]))
398
-
399
- return search
351
+ # For date ranges, include:
352
+ # 1. Items with datetime in the range
353
+ # 2. Items with datetime:null that overlap the search range
354
+ should = [
355
+ Q(
356
+ "bool",
357
+ filter=[
358
+ Q("exists", field="properties.datetime"),
359
+ Q(
360
+ "range",
361
+ properties__datetime={
362
+ "gte": datetime_search["gte"],
363
+ "lte": datetime_search["lte"],
364
+ },
365
+ ),
366
+ ],
367
+ ),
368
+ Q(
369
+ "bool",
370
+ must_not=[Q("exists", field="properties.datetime")],
371
+ filter=[
372
+ Q("exists", field="properties.start_datetime"),
373
+ Q("exists", field="properties.end_datetime"),
374
+ Q(
375
+ "range",
376
+ properties__start_datetime={"lte": datetime_search["lte"]},
377
+ ),
378
+ Q(
379
+ "range",
380
+ properties__end_datetime={"gte": datetime_search["gte"]},
381
+ ),
382
+ ],
383
+ ),
384
+ ]
385
+
386
+ return search.query(Q("bool", should=should, minimum_should_match=1))
400
387
 
401
388
  @staticmethod
402
389
  def apply_bbox_filter(search: Search, bbox: List):
@@ -551,7 +538,7 @@ class DatabaseLogic(BaseDatabaseLogic):
551
538
  search_after = None
552
539
 
553
540
  if token:
554
- search_after = json.loads(urlsafe_b64decode(token).decode())
541
+ search_after = orjson.loads(urlsafe_b64decode(token))
555
542
  if search_after:
556
543
  search_body["search_after"] = search_after
557
544
 
@@ -591,7 +578,7 @@ class DatabaseLogic(BaseDatabaseLogic):
591
578
  next_token = None
592
579
  if len(hits) > limit and limit < max_result_window:
593
580
  if hits and (sort_array := hits[limit - 1].get("sort")):
594
- next_token = urlsafe_b64encode(json.dumps(sort_array).encode()).decode()
581
+ next_token = urlsafe_b64encode(orjson.dumps(sort_array)).decode()
595
582
 
596
583
  matched = (
597
584
  es_response["hits"]["total"]["value"]
@@ -852,6 +839,135 @@ class DatabaseLogic(BaseDatabaseLogic):
852
839
  refresh=refresh,
853
840
  )
854
841
 
842
+ async def merge_patch_item(
843
+ self,
844
+ collection_id: str,
845
+ item_id: str,
846
+ item: PartialItem,
847
+ base_url: str,
848
+ refresh: bool = True,
849
+ ) -> Item:
850
+ """Database logic for merge patching an item following RF7396.
851
+
852
+ Args:
853
+ collection_id(str): Collection that item belongs to.
854
+ item_id(str): Id of item to be patched.
855
+ item (PartialItem): The partial item to be updated.
856
+ base_url: (str): The base URL used for constructing URLs for the item.
857
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
858
+
859
+ Returns:
860
+ patched item.
861
+ """
862
+ operations = merge_to_operations(item.model_dump())
863
+
864
+ return await self.json_patch_item(
865
+ collection_id=collection_id,
866
+ item_id=item_id,
867
+ operations=operations,
868
+ base_url=base_url,
869
+ refresh=refresh,
870
+ )
871
+
872
+ async def json_patch_item(
873
+ self,
874
+ collection_id: str,
875
+ item_id: str,
876
+ operations: List[PatchOperation],
877
+ base_url: str,
878
+ refresh: bool = True,
879
+ ) -> Item:
880
+ """Database logic for json patching an item following RF6902.
881
+
882
+ Args:
883
+ collection_id(str): Collection that item belongs to.
884
+ item_id(str): Id of item to be patched.
885
+ operations (list): List of operations to run.
886
+ base_url (str): The base URL used for constructing URLs for the item.
887
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
888
+
889
+ Returns:
890
+ patched item.
891
+ """
892
+ new_item_id = None
893
+ new_collection_id = None
894
+ script_operations = []
895
+
896
+ for operation in operations:
897
+ if operation.path in ["collection", "id"] and operation.op in [
898
+ "add",
899
+ "replace",
900
+ ]:
901
+
902
+ if operation.path == "collection" and collection_id != operation.value:
903
+ await self.check_collection_exists(collection_id=operation.value)
904
+ new_collection_id = operation.value
905
+
906
+ if operation.path == "id" and item_id != operation.value:
907
+ new_item_id = operation.value
908
+
909
+ else:
910
+ script_operations.append(operation)
911
+
912
+ script = operations_to_script(script_operations)
913
+
914
+ try:
915
+ await self.client.update(
916
+ index=index_alias_by_collection_id(collection_id),
917
+ id=mk_item_id(item_id, collection_id),
918
+ body={"script": script},
919
+ refresh=True,
920
+ )
921
+
922
+ except exceptions.RequestError as exc:
923
+ raise HTTPException(
924
+ status_code=400, detail=exc.info["error"]["caused_by"]
925
+ ) from exc
926
+
927
+ item = await self.get_one_item(collection_id, item_id)
928
+
929
+ if new_collection_id:
930
+ await self.client.reindex(
931
+ body={
932
+ "dest": {"index": f"{ITEMS_INDEX_PREFIX}{new_collection_id}"},
933
+ "source": {
934
+ "index": f"{ITEMS_INDEX_PREFIX}{collection_id}",
935
+ "query": {"term": {"id": {"value": item_id}}},
936
+ },
937
+ "script": {
938
+ "lang": "painless",
939
+ "source": (
940
+ f"""ctx._id = ctx._id.replace('{collection_id}', '{new_collection_id}');"""
941
+ f"""ctx._source.collection = '{new_collection_id}';"""
942
+ ),
943
+ },
944
+ },
945
+ wait_for_completion=True,
946
+ refresh=True,
947
+ )
948
+
949
+ await self.delete_item(
950
+ item_id=item_id,
951
+ collection_id=collection_id,
952
+ refresh=refresh,
953
+ )
954
+
955
+ item["collection"] = new_collection_id
956
+ collection_id = new_collection_id
957
+
958
+ if new_item_id:
959
+ item["id"] = new_item_id
960
+ item = await self.async_prep_create_item(item=item, base_url=base_url)
961
+ await self.create_item(item=item, refresh=True)
962
+
963
+ await self.delete_item(
964
+ item_id=item_id,
965
+ collection_id=collection_id,
966
+ refresh=refresh,
967
+ )
968
+
969
+ return item
970
+
855
971
  async def delete_item(self, item_id: str, collection_id: str, **kwargs: Any):
856
972
  """Delete a single item from the database.
857
973
 
@@ -904,6 +1020,37 @@ class DatabaseLogic(BaseDatabaseLogic):
904
1020
  except exceptions.NotFoundError:
905
1021
  raise NotFoundError(f"Mapping for index {index_name} not found")
906
1022
 
1023
+ async def get_items_unique_values(
1024
+ self, collection_id: str, field_names: Iterable[str], *, limit: int = 100
1025
+ ) -> Dict[str, List[str]]:
1026
+ """Get the unique values for the given fields in the collection."""
1027
+ limit_plus_one = limit + 1
1028
+ index_name = index_alias_by_collection_id(collection_id)
1029
+
1030
+ query = await self.client.search(
1031
+ index=index_name,
1032
+ body={
1033
+ "size": 0,
1034
+ "aggs": {
1035
+ field: {"terms": {"field": field, "size": limit_plus_one}}
1036
+ for field in field_names
1037
+ },
1038
+ },
1039
+ )
1040
+
1041
+ result: Dict[str, List[str]] = {}
1042
+ for field, agg in query["aggregations"].items():
1043
+ if len(agg["buckets"]) > limit:
1044
+ logger.warning(
1045
+ "Skipping enum field %s: exceeds limit of %d unique values. "
1046
+ "Consider excluding this field from enumeration or increase the limit.",
1047
+ field,
1048
+ limit,
1049
+ )
1050
+ continue
1051
+ result[field] = [bucket["key"] for bucket in agg["buckets"]]
1052
+ return result
1053
+
907
1054
  async def create_collection(self, collection: Collection, **kwargs: Any):
908
1055
  """Create a single collection in the database.
909
1056
 
@@ -1028,6 +1175,95 @@ class DatabaseLogic(BaseDatabaseLogic):
1028
1175
  refresh=refresh,
1029
1176
  )
1030
1177
 
1178
+ async def merge_patch_collection(
1179
+ self,
1180
+ collection_id: str,
1181
+ collection: PartialCollection,
1182
+ base_url: str,
1183
+ refresh: bool = True,
1184
+ ) -> Collection:
1185
+ """Database logic for merge patching a collection following RF7396.
1186
+
1187
+ Args:
1188
+ collection_id(str): Id of collection to be patched.
1189
+ collection (PartialCollection): The partial collection to be updated.
1190
+ base_url: (str): The base URL used for constructing links.
1191
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1192
+
1193
+
1194
+ Returns:
1195
+ patched collection.
1196
+ """
1197
+ operations = merge_to_operations(collection.model_dump())
1198
+
1199
+ return await self.json_patch_collection(
1200
+ collection_id=collection_id,
1201
+ operations=operations,
1202
+ base_url=base_url,
1203
+ refresh=refresh,
1204
+ )
1205
+
1206
+ async def json_patch_collection(
1207
+ self,
1208
+ collection_id: str,
1209
+ operations: List[PatchOperation],
1210
+ base_url: str,
1211
+ refresh: bool = True,
1212
+ ) -> Collection:
1213
+ """Database logic for json patching a collection following RF6902.
1214
+
1215
+ Args:
1216
+ collection_id(str): Id of collection to be patched.
1217
+ operations (list): List of operations to run.
1218
+ base_url (str): The base URL used for constructing links.
1219
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1220
+
1221
+ Returns:
1222
+ patched collection.
1223
+ """
1224
+ new_collection_id = None
1225
+ script_operations = []
1226
+
1227
+ for operation in operations:
1228
+ if (
1229
+ operation.op in ["add", "replace"]
1230
+ and operation.path == "collection"
1231
+ and collection_id != operation.value
1232
+ ):
1233
+ new_collection_id = operation.value
1234
+
1235
+ else:
1236
+ script_operations.append(operation)
1237
+
1238
+ script = operations_to_script(script_operations)
1239
+
1240
+ try:
1241
+ await self.client.update(
1242
+ index=COLLECTIONS_INDEX,
1243
+ id=collection_id,
1244
+ body={"script": script},
1245
+ refresh=True,
1246
+ )
1247
+
1248
+ except exceptions.RequestError as exc:
1249
+ raise HTTPException(
1250
+ status_code=400, detail=exc.info["error"]["caused_by"]
1251
+ ) from exc
1252
+
1253
+ collection = await self.find_collection(collection_id)
1254
+
1255
+ if new_collection_id:
1256
+ collection["id"] = new_collection_id
1257
+ collection["links"] = resolve_links([], base_url)
1258
+
1259
+ await self.update_collection(
1260
+ collection_id=collection_id,
1261
+ collection=collection,
1262
+ refresh=refresh,
1263
+ )
1264
+
1265
+ return collection
1266
+
1031
1267
  async def delete_collection(self, collection_id: str, **kwargs: Any):
1032
1268
  """Delete a collection from the database.
1033
1269
 
@@ -1,2 +1,2 @@
1
1
  """library version."""
2
- __version__ = "5.0.0a1"
2
+ __version__ = "6.0.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: stac-fastapi-opensearch
3
- Version: 5.0.0a1
3
+ Version: 6.0.0
4
4
  Summary: Opensearch stac-fastapi backend.
5
5
  Home-page: https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch
6
6
  License: MIT
@@ -15,8 +15,8 @@ Classifier: Programming Language :: Python :: 3.13
15
15
  Classifier: License :: OSI Approved :: MIT License
16
16
  Requires-Python: >=3.9
17
17
  Description-Content-Type: text/markdown
18
- Requires-Dist: stac-fastapi-core==5.0.0a1
19
- Requires-Dist: sfeos-helpers==5.0.0a1
18
+ Requires-Dist: stac-fastapi-core==6.0.0
19
+ Requires-Dist: sfeos-helpers==6.0.0
20
20
  Requires-Dist: opensearch-py~=2.8.0
21
21
  Requires-Dist: opensearch-py[async]~=2.8.0
22
22
  Requires-Dist: uvicorn~=0.23.0
@@ -26,7 +26,6 @@ Requires-Dist: pytest~=7.0.0; extra == "dev"
26
26
  Requires-Dist: pytest-cov~=4.0.0; extra == "dev"
27
27
  Requires-Dist: pytest-asyncio~=0.21.0; extra == "dev"
28
28
  Requires-Dist: pre-commit~=3.0.0; extra == "dev"
29
- Requires-Dist: requests<3.0.0,>=2.32.0; extra == "dev"
30
29
  Requires-Dist: ciso8601~=2.3.0; extra == "dev"
31
30
  Requires-Dist: httpx<0.28.0,>=0.24.0; extra == "dev"
32
31
  Provides-Extra: docs
@@ -0,0 +1,10 @@
1
+ stac_fastapi/opensearch/__init__.py,sha256=iJWMUgn7mUvmuPQSO_FlyhJ5eDdbbfmGv1qnFOX5-qk,28
2
+ stac_fastapi/opensearch/app.py,sha256=IX_SVF8S4HxiFYG3LL4Z_Ub37lKtbuoyAiWEZCuopSw,5168
3
+ stac_fastapi/opensearch/config.py,sha256=HfaUvcQM2kGNjypdUYFUcrMmBUPu3pG31mvNRESeR_A,5061
4
+ stac_fastapi/opensearch/database_logic.py,sha256=YSAiTv1t4Pm_RRAkc74J-_-FvPQCXmkgJ17VYQ4JpB8,54392
5
+ stac_fastapi/opensearch/version.py,sha256=Fo5UFEQVxJZ3nywa3IY-enu5UQBE0X45nrQaRBe8c9o,45
6
+ stac_fastapi_opensearch-6.0.0.dist-info/METADATA,sha256=zArSUfZvRiCTWQNTmcu6fZqRho0yYvlsvUzOENeuZ4Y,31895
7
+ stac_fastapi_opensearch-6.0.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
+ stac_fastapi_opensearch-6.0.0.dist-info/entry_points.txt,sha256=zjZ0Xsr9BUNJqMkdPpl6zEIUykv1uFdJtNELFRChp0w,76
9
+ stac_fastapi_opensearch-6.0.0.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
+ stac_fastapi_opensearch-6.0.0.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- stac_fastapi/opensearch/__init__.py,sha256=iJWMUgn7mUvmuPQSO_FlyhJ5eDdbbfmGv1qnFOX5-qk,28
2
- stac_fastapi/opensearch/app.py,sha256=LVXmhC2AZhiEhlGYWG29QBGfFP4y7PTYG0Pn0vKiISg,5060
3
- stac_fastapi/opensearch/config.py,sha256=HfaUvcQM2kGNjypdUYFUcrMmBUPu3pG31mvNRESeR_A,5061
4
- stac_fastapi/opensearch/database_logic.py,sha256=hhUMCzhpUMwH8HIIQIXFQsAZmbpt3_VDFWVBrigIXTM,46304
5
- stac_fastapi/opensearch/version.py,sha256=MQ4i7PSJnhrL3abG-m_vOwI3ksvRoHcs0eiBtxywE3E,47
6
- stac_fastapi_opensearch-5.0.0a1.dist-info/METADATA,sha256=rWA3nfQEeXCFy6h7AB6tM_wetrvVU_U5jf5d4HT-74g,31956
7
- stac_fastapi_opensearch-5.0.0a1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
- stac_fastapi_opensearch-5.0.0a1.dist-info/entry_points.txt,sha256=zjZ0Xsr9BUNJqMkdPpl6zEIUykv1uFdJtNELFRChp0w,76
9
- stac_fastapi_opensearch-5.0.0a1.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
- stac_fastapi_opensearch-5.0.0a1.dist-info/RECORD,,