stac-fastapi-elasticsearch 5.0.0a1__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,6 +37,7 @@ from stac_fastapi.extensions.core import (
37
37
  TokenPaginationExtension,
38
38
  TransactionExtension,
39
39
  )
40
+ from stac_fastapi.extensions.core.filter import FilterConformanceClasses
40
41
  from stac_fastapi.extensions.third_party import BulkTransactionExtension
41
42
  from stac_fastapi.sfeos_helpers.aggregation import EsAsyncBaseAggregationClient
42
43
  from stac_fastapi.sfeos_helpers.filter import EsAsyncBaseFiltersClient
@@ -56,7 +57,7 @@ filter_extension = FilterExtension(
56
57
  client=EsAsyncBaseFiltersClient(database=database_logic)
57
58
  )
58
59
  filter_extension.conformance_classes.append(
59
- "http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators"
60
+ FilterConformanceClasses.ADVANCED_COMPARISON_OPERATORS
60
61
  )
61
62
 
62
63
  aggregation_extension = AggregationExtension(
@@ -103,22 +104,24 @@ database_logic.extensions = [type(ext).__name__ for ext in extensions]
103
104
 
104
105
  post_request_model = create_post_request_model(search_extensions)
105
106
 
106
- api = StacApi(
107
- title=os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-elasticsearch"),
108
- description=os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-elasticsearch"),
109
- api_version=os.getenv("STAC_FASTAPI_VERSION", "5.0.0a1"),
110
- settings=settings,
111
- extensions=extensions,
112
- client=CoreClient(
107
+ app_config = {
108
+ "title": os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-elasticsearch"),
109
+ "description": os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-elasticsearch"),
110
+ "api_version": os.getenv("STAC_FASTAPI_VERSION", "6.0.0"),
111
+ "settings": settings,
112
+ "extensions": extensions,
113
+ "client": CoreClient(
113
114
  database=database_logic,
114
115
  session=session,
115
116
  post_request_model=post_request_model,
116
117
  landing_page_id=os.getenv("STAC_FASTAPI_LANDING_PAGE_ID", "stac-fastapi"),
117
118
  ),
118
- search_get_request_model=create_get_request_model(search_extensions),
119
- search_post_request_model=post_request_model,
120
- route_dependencies=get_route_dependencies(),
121
- )
119
+ "search_get_request_model": create_get_request_model(search_extensions),
120
+ "search_post_request_model": post_request_model,
121
+ "route_dependencies": get_route_dependencies(),
122
+ }
123
+
124
+ api = StacApi(**app_config)
122
125
 
123
126
 
124
127
  @asynccontextmanager
@@ -1,7 +1,6 @@
1
1
  """Database logic."""
2
2
 
3
3
  import asyncio
4
- import json
5
4
  import logging
6
5
  from base64 import urlsafe_b64decode, urlsafe_b64encode
7
6
  from copy import deepcopy
@@ -9,8 +8,11 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, Union
9
8
 
10
9
  import attr
11
10
  import elasticsearch.helpers as helpers
11
+ import orjson
12
12
  from elasticsearch.dsl import Q, Search
13
+ from elasticsearch.exceptions import BadRequestError
13
14
  from elasticsearch.exceptions import NotFoundError as ESNotFoundError
15
+ from fastapi import HTTPException
14
16
  from starlette.requests import Request
15
17
 
16
18
  from stac_fastapi.core.base_database_logic import BaseDatabaseLogic
@@ -20,6 +22,11 @@ from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings
20
22
  from stac_fastapi.elasticsearch.config import (
21
23
  ElasticsearchSettings as SyncElasticsearchSettings,
22
24
  )
25
+ from stac_fastapi.extensions.core.transaction.request import (
26
+ PartialCollection,
27
+ PartialItem,
28
+ PatchOperation,
29
+ )
23
30
  from stac_fastapi.sfeos_helpers import filter
24
31
  from stac_fastapi.sfeos_helpers.database import (
25
32
  apply_free_text_filter_shared,
@@ -36,6 +43,10 @@ from stac_fastapi.sfeos_helpers.database import (
36
43
  return_date,
37
44
  validate_refresh,
38
45
  )
46
+ from stac_fastapi.sfeos_helpers.database.utils import (
47
+ merge_to_operations,
48
+ operations_to_script,
49
+ )
39
50
  from stac_fastapi.sfeos_helpers.mappings import (
40
51
  AGGREGATION_MAPPING,
41
52
  COLLECTIONS_INDEX,
@@ -45,6 +56,7 @@ from stac_fastapi.sfeos_helpers.mappings import (
45
56
  Geometry,
46
57
  )
47
58
  from stac_fastapi.types.errors import ConflictError, NotFoundError
59
+ from stac_fastapi.types.links import resolve_links
48
60
  from stac_fastapi.types.rfc3339 import DateTimeType
49
61
  from stac_fastapi.types.stac import Collection, Item
50
62
 
@@ -245,121 +257,97 @@ class DatabaseLogic(BaseDatabaseLogic):
245
257
  @staticmethod
246
258
  def apply_datetime_filter(
247
259
  search: Search, interval: Optional[Union[DateTimeType, str]]
248
- ):
260
+ ) -> Search:
249
261
  """Apply a filter to search on datetime, start_datetime, and end_datetime fields.
250
262
 
251
263
  Args:
252
- search (Search): The search object to filter.
253
- interval: Optional[Union[DateTimeType, str]]
264
+ search: The search object to filter.
265
+ interval: Optional datetime interval to filter by. Can be:
266
+ - A single datetime string (e.g., "2023-01-01T12:00:00")
267
+ - A datetime range string (e.g., "2023-01-01/2023-12-31")
268
+ - A datetime object
269
+ - A tuple of (start_datetime, end_datetime)
254
270
 
255
271
  Returns:
256
- Search: The filtered search object.
272
+ The filtered search object.
257
273
  """
274
+ if not interval:
275
+ return search
276
+
258
277
  should = []
259
- datetime_search = return_date(interval)
278
+ try:
279
+ datetime_search = return_date(interval)
280
+ except (ValueError, TypeError) as e:
281
+ # Handle invalid interval formats if return_date fails
282
+ logger.error(f"Invalid interval format: {interval}, error: {e}")
283
+ return search
260
284
 
261
- # If the request is a single datetime return
262
- # items with datetimes equal to the requested datetime OR
263
- # the requested datetime is between their start and end datetimes
264
285
  if "eq" in datetime_search:
265
- should.extend(
266
- [
267
- Q(
268
- "bool",
269
- filter=[
270
- Q(
271
- "term",
272
- properties__datetime=datetime_search["eq"],
273
- ),
274
- ],
275
- ),
276
- Q(
277
- "bool",
278
- filter=[
279
- Q(
280
- "range",
281
- properties__start_datetime={
282
- "lte": datetime_search["eq"],
283
- },
284
- ),
285
- Q(
286
- "range",
287
- properties__end_datetime={
288
- "gte": datetime_search["eq"],
289
- },
290
- ),
291
- ],
292
- ),
293
- ]
294
- )
295
-
296
- # If the request is a date range return
297
- # items with datetimes within the requested date range OR
298
- # their startdatetime ithin the requested date range OR
299
- # their enddatetime ithin the requested date range OR
300
- # the requested daterange within their start and end datetimes
286
+ # For exact matches, include:
287
+ # 1. Items with matching exact datetime
288
+ # 2. Items with datetime:null where the time falls within their range
289
+ should = [
290
+ Q(
291
+ "bool",
292
+ filter=[
293
+ Q("exists", field="properties.datetime"),
294
+ Q("term", **{"properties__datetime": datetime_search["eq"]}),
295
+ ],
296
+ ),
297
+ Q(
298
+ "bool",
299
+ must_not=[Q("exists", field="properties.datetime")],
300
+ filter=[
301
+ Q("exists", field="properties.start_datetime"),
302
+ Q("exists", field="properties.end_datetime"),
303
+ Q(
304
+ "range",
305
+ properties__start_datetime={"lte": datetime_search["eq"]},
306
+ ),
307
+ Q(
308
+ "range",
309
+ properties__end_datetime={"gte": datetime_search["eq"]},
310
+ ),
311
+ ],
312
+ ),
313
+ ]
301
314
  else:
302
- should.extend(
303
- [
304
- Q(
305
- "bool",
306
- filter=[
307
- Q(
308
- "range",
309
- properties__datetime={
310
- "gte": datetime_search["gte"],
311
- "lte": datetime_search["lte"],
312
- },
313
- ),
314
- ],
315
- ),
316
- Q(
317
- "bool",
318
- filter=[
319
- Q(
320
- "range",
321
- properties__start_datetime={
322
- "gte": datetime_search["gte"],
323
- "lte": datetime_search["lte"],
324
- },
325
- ),
326
- ],
327
- ),
328
- Q(
329
- "bool",
330
- filter=[
331
- Q(
332
- "range",
333
- properties__end_datetime={
334
- "gte": datetime_search["gte"],
335
- "lte": datetime_search["lte"],
336
- },
337
- ),
338
- ],
339
- ),
340
- Q(
341
- "bool",
342
- filter=[
343
- Q(
344
- "range",
345
- properties__start_datetime={
346
- "lte": datetime_search["gte"]
347
- },
348
- ),
349
- Q(
350
- "range",
351
- properties__end_datetime={
352
- "gte": datetime_search["lte"]
353
- },
354
- ),
355
- ],
356
- ),
357
- ]
358
- )
359
-
360
- search = search.query(Q("bool", filter=[Q("bool", should=should)]))
361
-
362
- return search
315
+ # For date ranges, include:
316
+ # 1. Items with datetime in the range
317
+ # 2. Items with datetime:null that overlap the search range
318
+ should = [
319
+ Q(
320
+ "bool",
321
+ filter=[
322
+ Q("exists", field="properties.datetime"),
323
+ Q(
324
+ "range",
325
+ properties__datetime={
326
+ "gte": datetime_search["gte"],
327
+ "lte": datetime_search["lte"],
328
+ },
329
+ ),
330
+ ],
331
+ ),
332
+ Q(
333
+ "bool",
334
+ must_not=[Q("exists", field="properties.datetime")],
335
+ filter=[
336
+ Q("exists", field="properties.start_datetime"),
337
+ Q("exists", field="properties.end_datetime"),
338
+ Q(
339
+ "range",
340
+ properties__start_datetime={"lte": datetime_search["lte"]},
341
+ ),
342
+ Q(
343
+ "range",
344
+ properties__end_datetime={"gte": datetime_search["gte"]},
345
+ ),
346
+ ],
347
+ ),
348
+ ]
349
+
350
+ return search.query(Q("bool", should=should, minimum_should_match=1))
363
351
 
364
352
  @staticmethod
365
353
  def apply_bbox_filter(search: Search, bbox: List):
@@ -527,7 +515,7 @@ class DatabaseLogic(BaseDatabaseLogic):
527
515
  search_after = None
528
516
 
529
517
  if token:
530
- search_after = json.loads(urlsafe_b64decode(token).decode())
518
+ search_after = orjson.loads(urlsafe_b64decode(token))
531
519
 
532
520
  query = search.query.to_dict() if search.query else None
533
521
 
@@ -567,7 +555,7 @@ class DatabaseLogic(BaseDatabaseLogic):
567
555
  next_token = None
568
556
  if len(hits) > limit and limit < max_result_window:
569
557
  if hits and (sort_array := hits[limit - 1].get("sort")):
570
- next_token = urlsafe_b64encode(json.dumps(sort_array).encode()).decode()
558
+ next_token = urlsafe_b64encode(orjson.dumps(sort_array)).decode()
571
559
 
572
560
  matched = (
573
561
  es_response["hits"]["total"]["value"]
@@ -836,6 +824,135 @@ class DatabaseLogic(BaseDatabaseLogic):
836
824
  refresh=refresh,
837
825
  )
838
826
 
827
+ async def merge_patch_item(
828
+ self,
829
+ collection_id: str,
830
+ item_id: str,
831
+ item: PartialItem,
832
+ base_url: str,
833
+ refresh: bool = True,
834
+ ) -> Item:
835
+ """Database logic for merge patching an item following RF7396.
836
+
837
+ Args:
838
+ collection_id(str): Collection that item belongs to.
839
+ item_id(str): Id of item to be patched.
840
+ item (PartialItem): The partial item to be updated.
841
+ base_url: (str): The base URL used for constructing URLs for the item.
842
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
843
+
844
+ Returns:
845
+ patched item.
846
+ """
847
+ operations = merge_to_operations(item.model_dump())
848
+
849
+ return await self.json_patch_item(
850
+ collection_id=collection_id,
851
+ item_id=item_id,
852
+ operations=operations,
853
+ base_url=base_url,
854
+ refresh=refresh,
855
+ )
856
+
857
+ async def json_patch_item(
858
+ self,
859
+ collection_id: str,
860
+ item_id: str,
861
+ operations: List[PatchOperation],
862
+ base_url: str,
863
+ refresh: bool = True,
864
+ ) -> Item:
865
+ """Database logic for json patching an item following RF6902.
866
+
867
+ Args:
868
+ collection_id(str): Collection that item belongs to.
869
+ item_id(str): Id of item to be patched.
870
+ operations (list): List of operations to run.
871
+ base_url (str): The base URL used for constructing URLs for the item.
872
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
873
+
874
+ Returns:
875
+ patched item.
876
+ """
877
+ new_item_id = None
878
+ new_collection_id = None
879
+ script_operations = []
880
+
881
+ for operation in operations:
882
+ if operation.path in ["collection", "id"] and operation.op in [
883
+ "add",
884
+ "replace",
885
+ ]:
886
+
887
+ if operation.path == "collection" and collection_id != operation.value:
888
+ await self.check_collection_exists(collection_id=operation.value)
889
+ new_collection_id = operation.value
890
+
891
+ if operation.path == "id" and item_id != operation.value:
892
+ new_item_id = operation.value
893
+
894
+ else:
895
+ script_operations.append(operation)
896
+
897
+ script = operations_to_script(script_operations)
898
+
899
+ try:
900
+ await self.client.update(
901
+ index=index_alias_by_collection_id(collection_id),
902
+ id=mk_item_id(item_id, collection_id),
903
+ script=script,
904
+ refresh=True,
905
+ )
906
+
907
+ except BadRequestError as exc:
908
+ raise HTTPException(
909
+ status_code=400, detail=exc.info["error"]["caused_by"]
910
+ ) from exc
911
+
912
+ item = await self.get_one_item(collection_id, item_id)
913
+
914
+ if new_collection_id:
915
+ await self.client.reindex(
916
+ body={
917
+ "dest": {"index": f"{ITEMS_INDEX_PREFIX}{new_collection_id}"},
918
+ "source": {
919
+ "index": f"{ITEMS_INDEX_PREFIX}{collection_id}",
920
+ "query": {"term": {"id": {"value": item_id}}},
921
+ },
922
+ "script": {
923
+ "lang": "painless",
924
+ "source": (
925
+ f"""ctx._id = ctx._id.replace('{collection_id}', '{new_collection_id}');"""
926
+ f"""ctx._source.collection = '{new_collection_id}';"""
927
+ ),
928
+ },
929
+ },
930
+ wait_for_completion=True,
931
+ refresh=True,
932
+ )
933
+
934
+ await self.delete_item(
935
+ item_id=item_id,
936
+ collection_id=collection_id,
937
+ refresh=refresh,
938
+ )
939
+
940
+ item["collection"] = new_collection_id
941
+ collection_id = new_collection_id
942
+
943
+ if new_item_id:
944
+ item["id"] = new_item_id
945
+ item = await self.async_prep_create_item(item=item, base_url=base_url)
946
+ await self.create_item(item=item, refresh=True)
947
+
948
+ await self.delete_item(
949
+ item_id=item_id,
950
+ collection_id=collection_id,
951
+ refresh=refresh,
952
+ )
953
+
954
+ return item
955
+
839
956
  async def delete_item(self, item_id: str, collection_id: str, **kwargs: Any):
840
957
  """Delete a single item from the database.
841
958
 
@@ -895,6 +1012,37 @@ class DatabaseLogic(BaseDatabaseLogic):
895
1012
  except ESNotFoundError:
896
1013
  raise NotFoundError(f"Mapping for index {index_name} not found")
897
1014
 
1015
+ async def get_items_unique_values(
1016
+ self, collection_id: str, field_names: Iterable[str], *, limit: int = 100
1017
+ ) -> Dict[str, List[str]]:
1018
+ """Get the unique values for the given fields in the collection."""
1019
+ limit_plus_one = limit + 1
1020
+ index_name = index_alias_by_collection_id(collection_id)
1021
+
1022
+ query = await self.client.search(
1023
+ index=index_name,
1024
+ body={
1025
+ "size": 0,
1026
+ "aggs": {
1027
+ field: {"terms": {"field": field, "size": limit_plus_one}}
1028
+ for field in field_names
1029
+ },
1030
+ },
1031
+ )
1032
+
1033
+ result: Dict[str, List[str]] = {}
1034
+ for field, agg in query["aggregations"].items():
1035
+ if len(agg["buckets"]) > limit:
1036
+ logger.warning(
1037
+ "Skipping enum field %s: exceeds limit of %d unique values. "
1038
+ "Consider excluding this field from enumeration or increase the limit.",
1039
+ field,
1040
+ limit,
1041
+ )
1042
+ continue
1043
+ result[field] = [bucket["key"] for bucket in agg["buckets"]]
1044
+ return result
1045
+
898
1046
  async def create_collection(self, collection: Collection, **kwargs: Any):
899
1047
  """Create a single collection in the database.
900
1048
 
@@ -1038,6 +1186,95 @@ class DatabaseLogic(BaseDatabaseLogic):
1038
1186
  refresh=refresh,
1039
1187
  )
1040
1188
 
1189
+ async def merge_patch_collection(
1190
+ self,
1191
+ collection_id: str,
1192
+ collection: PartialCollection,
1193
+ base_url: str,
1194
+ refresh: bool = True,
1195
+ ) -> Collection:
1196
+ """Database logic for merge patching a collection following RF7396.
1197
+
1198
+ Args:
1199
+ collection_id(str): Id of collection to be patched.
1200
+ collection (PartialCollection): The partial collection to be updated.
1201
+ base_url: (str): The base URL used for constructing links.
1202
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1203
+
1204
+
1205
+ Returns:
1206
+ patched collection.
1207
+ """
1208
+ operations = merge_to_operations(collection.model_dump())
1209
+
1210
+ return await self.json_patch_collection(
1211
+ collection_id=collection_id,
1212
+ operations=operations,
1213
+ base_url=base_url,
1214
+ refresh=refresh,
1215
+ )
1216
+
1217
+ async def json_patch_collection(
1218
+ self,
1219
+ collection_id: str,
1220
+ operations: List[PatchOperation],
1221
+ base_url: str,
1222
+ refresh: bool = True,
1223
+ ) -> Collection:
1224
+ """Database logic for json patching a collection following RF6902.
1225
+
1226
+ Args:
1227
+ collection_id(str): Id of collection to be patched.
1228
+ operations (list): List of operations to run.
1229
+ base_url (str): The base URL used for constructing links.
1230
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1231
+
1232
+ Returns:
1233
+ patched collection.
1234
+ """
1235
+ new_collection_id = None
1236
+ script_operations = []
1237
+
1238
+ for operation in operations:
1239
+ if (
1240
+ operation.op in ["add", "replace"]
1241
+ and operation.path == "collection"
1242
+ and collection_id != operation.value
1243
+ ):
1244
+ new_collection_id = operation.value
1245
+
1246
+ else:
1247
+ script_operations.append(operation)
1248
+
1249
+ script = operations_to_script(script_operations)
1250
+
1251
+ try:
1252
+ await self.client.update(
1253
+ index=COLLECTIONS_INDEX,
1254
+ id=collection_id,
1255
+ script=script,
1256
+ refresh=True,
1257
+ )
1258
+
1259
+ except BadRequestError as exc:
1260
+ raise HTTPException(
1261
+ status_code=400, detail=exc.info["error"]["caused_by"]
1262
+ ) from exc
1263
+
1264
+ collection = await self.find_collection(collection_id)
1265
+
1266
+ if new_collection_id:
1267
+ collection["id"] = new_collection_id
1268
+ collection["links"] = resolve_links([], base_url)
1269
+
1270
+ await self.update_collection(
1271
+ collection_id=collection_id,
1272
+ collection=collection,
1273
+ refresh=refresh,
1274
+ )
1275
+
1276
+ return collection
1277
+
1041
1278
  async def delete_collection(self, collection_id: str, **kwargs: Any):
1042
1279
  """Delete a collection from the database.
1043
1280
 
@@ -1061,28 +1298,15 @@ class DatabaseLogic(BaseDatabaseLogic):
1061
1298
  # Ensure kwargs is a dictionary
1062
1299
  kwargs = kwargs or {}
1063
1300
 
1064
- # Verify that the collection exists
1065
- await self.find_collection(collection_id=collection_id)
1066
-
1067
- # Resolve the `refresh` parameter
1068
1301
  refresh = kwargs.get("refresh", self.async_settings.database_refresh)
1069
1302
  refresh = validate_refresh(refresh)
1070
1303
 
1071
- # Log the deletion attempt
1072
- logger.info(f"Deleting collection {collection_id} with refresh={refresh}")
1073
-
1074
- # Delete the collection from the database
1304
+ # Verify that the collection exists
1305
+ await self.find_collection(collection_id=collection_id)
1075
1306
  await self.client.delete(
1076
1307
  index=COLLECTIONS_INDEX, id=collection_id, refresh=refresh
1077
1308
  )
1078
-
1079
- # Delete the item index for the collection
1080
- try:
1081
- await delete_item_index(collection_id)
1082
- except Exception as e:
1083
- logger.error(
1084
- f"Failed to delete item index for collection {collection_id}: {e}"
1085
- )
1309
+ await delete_item_index(collection_id)
1086
1310
 
1087
1311
  async def bulk_async(
1088
1312
  self,
@@ -1,2 +1,2 @@
1
1
  """library version."""
2
- __version__ = "5.0.0a1"
2
+ __version__ = "6.0.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: stac-fastapi-elasticsearch
3
- Version: 5.0.0a1
3
+ Version: 6.0.0
4
4
  Summary: An implementation of STAC API based on the FastAPI framework with both Elasticsearch and Opensearch.
5
5
  Home-page: https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch
6
6
  License: MIT
@@ -15,8 +15,8 @@ Classifier: Programming Language :: Python :: 3.13
15
15
  Classifier: License :: OSI Approved :: MIT License
16
16
  Requires-Python: >=3.9
17
17
  Description-Content-Type: text/markdown
18
- Requires-Dist: stac-fastapi-core==5.0.0a1
19
- Requires-Dist: sfeos-helpers==5.0.0a1
18
+ Requires-Dist: stac-fastapi-core==6.0.0
19
+ Requires-Dist: sfeos-helpers==6.0.0
20
20
  Requires-Dist: elasticsearch[async]~=8.18.0
21
21
  Requires-Dist: uvicorn~=0.23.0
22
22
  Requires-Dist: starlette<0.36.0,>=0.35.0
@@ -25,7 +25,6 @@ Requires-Dist: pytest~=7.0.0; extra == "dev"
25
25
  Requires-Dist: pytest-cov~=4.0.0; extra == "dev"
26
26
  Requires-Dist: pytest-asyncio~=0.21.0; extra == "dev"
27
27
  Requires-Dist: pre-commit~=3.0.0; extra == "dev"
28
- Requires-Dist: requests<3.0.0,>=2.32.0; extra == "dev"
29
28
  Requires-Dist: ciso8601~=2.3.0; extra == "dev"
30
29
  Requires-Dist: httpx<0.28.0,>=0.24.0; extra == "dev"
31
30
  Provides-Extra: docs
@@ -0,0 +1,10 @@
1
+ stac_fastapi/elasticsearch/__init__.py,sha256=w_MZutYLreNV372sCuO46bPb0TngmPs4u8737ueS0wE,31
2
+ stac_fastapi/elasticsearch/app.py,sha256=CPa_JaIA39dJ1qTYRZsCsmcmpcnjAZ2T4S0wyeUzqEg,5188
3
+ stac_fastapi/elasticsearch/config.py,sha256=PKSowbXmSryMj0Oq15XJduyPL2c_NlDkewXnR1DFP2o,5181
4
+ stac_fastapi/elasticsearch/database_logic.py,sha256=dvhYUe2baZhA4-Saww4tV89scLMHrDgK3lwas-m3U70,55930
5
+ stac_fastapi/elasticsearch/version.py,sha256=Fo5UFEQVxJZ3nywa3IY-enu5UQBE0X45nrQaRBe8c9o,45
6
+ stac_fastapi_elasticsearch-6.0.0.dist-info/METADATA,sha256=pHi-zn9w3UJAYmrCLN_mU7MCKMFGtvo8BMa4JKrhFQs,31931
7
+ stac_fastapi_elasticsearch-6.0.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
+ stac_fastapi_elasticsearch-6.0.0.dist-info/entry_points.txt,sha256=aCKixki0LpUl64UPsPMtiNvfdyq-QsTCxVjJ54VF6Jk,82
9
+ stac_fastapi_elasticsearch-6.0.0.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
+ stac_fastapi_elasticsearch-6.0.0.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- stac_fastapi/elasticsearch/__init__.py,sha256=w_MZutYLreNV372sCuO46bPb0TngmPs4u8737ueS0wE,31
2
- stac_fastapi/elasticsearch/app.py,sha256=uuBk3Vg8vdl15yIQCFCxJ8UfFtaKjMvmXXULMd8Lw0g,5080
3
- stac_fastapi/elasticsearch/config.py,sha256=PKSowbXmSryMj0Oq15XJduyPL2c_NlDkewXnR1DFP2o,5181
4
- stac_fastapi/elasticsearch/database_logic.py,sha256=-qvrOTyoVWe4Ua4lu5Fjq1NmrY_fj_3Emt1HV8i5Dsw,48248
5
- stac_fastapi/elasticsearch/version.py,sha256=MQ4i7PSJnhrL3abG-m_vOwI3ksvRoHcs0eiBtxywE3E,47
6
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/METADATA,sha256=9sB79eLh3KasJb7ykxdvhHvB2w-peD1_N6Z4wltzJmA,31992
7
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/entry_points.txt,sha256=aCKixki0LpUl64UPsPMtiNvfdyq-QsTCxVjJ54VF6Jk,82
9
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/RECORD,,