stac-fastapi-elasticsearch 5.0.0a1__py3-none-any.whl → 6.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,12 +31,14 @@ from stac_fastapi.elasticsearch.database_logic import (
31
31
  )
32
32
  from stac_fastapi.extensions.core import (
33
33
  AggregationExtension,
34
+ CollectionSearchExtension,
34
35
  FilterExtension,
35
36
  FreeTextExtension,
36
37
  SortExtension,
37
38
  TokenPaginationExtension,
38
39
  TransactionExtension,
39
40
  )
41
+ from stac_fastapi.extensions.core.filter import FilterConformanceClasses
40
42
  from stac_fastapi.extensions.third_party import BulkTransactionExtension
41
43
  from stac_fastapi.sfeos_helpers.aggregation import EsAsyncBaseAggregationClient
42
44
  from stac_fastapi.sfeos_helpers.filter import EsAsyncBaseFiltersClient
@@ -56,7 +58,15 @@ filter_extension = FilterExtension(
56
58
  client=EsAsyncBaseFiltersClient(database=database_logic)
57
59
  )
58
60
  filter_extension.conformance_classes.append(
59
- "http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators"
61
+ FilterConformanceClasses.ADVANCED_COMPARISON_OPERATORS
62
+ )
63
+
64
+ # Adding collection search extension for compatibility with stac-auth-proxy
65
+ # (https://github.com/developmentseed/stac-auth-proxy)
66
+ # The extension is not fully implemented yet but is required for collection filtering support
67
+ collection_search_extension = CollectionSearchExtension()
68
+ collection_search_extension.conformance_classes.append(
69
+ "https://api.stacspec.org/v1.0.0-rc.1/collection-search#filter"
60
70
  )
61
71
 
62
72
  aggregation_extension = AggregationExtension(
@@ -74,6 +84,7 @@ search_extensions = [
74
84
  TokenPaginationExtension(),
75
85
  filter_extension,
76
86
  FreeTextExtension(),
87
+ collection_search_extension,
77
88
  ]
78
89
 
79
90
  if TRANSACTIONS_EXTENSIONS:
@@ -103,22 +114,24 @@ database_logic.extensions = [type(ext).__name__ for ext in extensions]
103
114
 
104
115
  post_request_model = create_post_request_model(search_extensions)
105
116
 
106
- api = StacApi(
107
- title=os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-elasticsearch"),
108
- description=os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-elasticsearch"),
109
- api_version=os.getenv("STAC_FASTAPI_VERSION", "5.0.0a1"),
110
- settings=settings,
111
- extensions=extensions,
112
- client=CoreClient(
117
+ app_config = {
118
+ "title": os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-elasticsearch"),
119
+ "description": os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-elasticsearch"),
120
+ "api_version": os.getenv("STAC_FASTAPI_VERSION", "6.1.0"),
121
+ "settings": settings,
122
+ "extensions": extensions,
123
+ "client": CoreClient(
113
124
  database=database_logic,
114
125
  session=session,
115
126
  post_request_model=post_request_model,
116
127
  landing_page_id=os.getenv("STAC_FASTAPI_LANDING_PAGE_ID", "stac-fastapi"),
117
128
  ),
118
- search_get_request_model=create_get_request_model(search_extensions),
119
- search_post_request_model=post_request_model,
120
- route_dependencies=get_route_dependencies(),
121
- )
129
+ "search_get_request_model": create_get_request_model(search_extensions),
130
+ "search_post_request_model": post_request_model,
131
+ "route_dependencies": get_route_dependencies(),
132
+ }
133
+
134
+ api = StacApi(**app_config)
122
135
 
123
136
 
124
137
  @asynccontextmanager
@@ -56,6 +56,10 @@ def _es_config() -> Dict[str, Any]:
56
56
  if (u := os.getenv("ES_USER")) and (p := os.getenv("ES_PASS")):
57
57
  config["http_auth"] = (u, p)
58
58
 
59
+ # Include timeout setting if set
60
+ if request_timeout := os.getenv("ES_TIMEOUT"):
61
+ config["request_timeout"] = request_timeout
62
+
59
63
  # Explicitly exclude SSL settings when not using SSL
60
64
  if not use_ssl:
61
65
  return config
@@ -1,7 +1,6 @@
1
1
  """Database logic."""
2
2
 
3
3
  import asyncio
4
- import json
5
4
  import logging
6
5
  from base64 import urlsafe_b64decode, urlsafe_b64encode
7
6
  from copy import deepcopy
@@ -9,8 +8,11 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, Union
9
8
 
10
9
  import attr
11
10
  import elasticsearch.helpers as helpers
11
+ import orjson
12
12
  from elasticsearch.dsl import Q, Search
13
+ from elasticsearch.exceptions import BadRequestError
13
14
  from elasticsearch.exceptions import NotFoundError as ESNotFoundError
15
+ from fastapi import HTTPException
14
16
  from starlette.requests import Request
15
17
 
16
18
  from stac_fastapi.core.base_database_logic import BaseDatabaseLogic
@@ -20,6 +22,11 @@ from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings
20
22
  from stac_fastapi.elasticsearch.config import (
21
23
  ElasticsearchSettings as SyncElasticsearchSettings,
22
24
  )
25
+ from stac_fastapi.extensions.core.transaction.request import (
26
+ PartialCollection,
27
+ PartialItem,
28
+ PatchOperation,
29
+ )
23
30
  from stac_fastapi.sfeos_helpers import filter
24
31
  from stac_fastapi.sfeos_helpers.database import (
25
32
  apply_free_text_filter_shared,
@@ -36,6 +43,14 @@ from stac_fastapi.sfeos_helpers.database import (
36
43
  return_date,
37
44
  validate_refresh,
38
45
  )
46
+ from stac_fastapi.sfeos_helpers.database.query import (
47
+ ES_MAX_URL_LENGTH,
48
+ add_collections_to_body,
49
+ )
50
+ from stac_fastapi.sfeos_helpers.database.utils import (
51
+ merge_to_operations,
52
+ operations_to_script,
53
+ )
39
54
  from stac_fastapi.sfeos_helpers.mappings import (
40
55
  AGGREGATION_MAPPING,
41
56
  COLLECTIONS_INDEX,
@@ -45,6 +60,7 @@ from stac_fastapi.sfeos_helpers.mappings import (
45
60
  Geometry,
46
61
  )
47
62
  from stac_fastapi.types.errors import ConflictError, NotFoundError
63
+ from stac_fastapi.types.links import resolve_links
48
64
  from stac_fastapi.types.rfc3339 import DateTimeType
49
65
  from stac_fastapi.types.stac import Collection, Item
50
66
 
@@ -245,121 +261,97 @@ class DatabaseLogic(BaseDatabaseLogic):
245
261
  @staticmethod
246
262
  def apply_datetime_filter(
247
263
  search: Search, interval: Optional[Union[DateTimeType, str]]
248
- ):
264
+ ) -> Search:
249
265
  """Apply a filter to search on datetime, start_datetime, and end_datetime fields.
250
266
 
251
267
  Args:
252
- search (Search): The search object to filter.
253
- interval: Optional[Union[DateTimeType, str]]
268
+ search: The search object to filter.
269
+ interval: Optional datetime interval to filter by. Can be:
270
+ - A single datetime string (e.g., "2023-01-01T12:00:00")
271
+ - A datetime range string (e.g., "2023-01-01/2023-12-31")
272
+ - A datetime object
273
+ - A tuple of (start_datetime, end_datetime)
254
274
 
255
275
  Returns:
256
- Search: The filtered search object.
276
+ The filtered search object.
257
277
  """
278
+ if not interval:
279
+ return search
280
+
258
281
  should = []
259
- datetime_search = return_date(interval)
282
+ try:
283
+ datetime_search = return_date(interval)
284
+ except (ValueError, TypeError) as e:
285
+ # Handle invalid interval formats if return_date fails
286
+ logger.error(f"Invalid interval format: {interval}, error: {e}")
287
+ return search
260
288
 
261
- # If the request is a single datetime return
262
- # items with datetimes equal to the requested datetime OR
263
- # the requested datetime is between their start and end datetimes
264
289
  if "eq" in datetime_search:
265
- should.extend(
266
- [
267
- Q(
268
- "bool",
269
- filter=[
270
- Q(
271
- "term",
272
- properties__datetime=datetime_search["eq"],
273
- ),
274
- ],
275
- ),
276
- Q(
277
- "bool",
278
- filter=[
279
- Q(
280
- "range",
281
- properties__start_datetime={
282
- "lte": datetime_search["eq"],
283
- },
284
- ),
285
- Q(
286
- "range",
287
- properties__end_datetime={
288
- "gte": datetime_search["eq"],
289
- },
290
- ),
291
- ],
292
- ),
293
- ]
294
- )
295
-
296
- # If the request is a date range return
297
- # items with datetimes within the requested date range OR
298
- # their startdatetime ithin the requested date range OR
299
- # their enddatetime ithin the requested date range OR
300
- # the requested daterange within their start and end datetimes
290
+ # For exact matches, include:
291
+ # 1. Items with matching exact datetime
292
+ # 2. Items with datetime:null where the time falls within their range
293
+ should = [
294
+ Q(
295
+ "bool",
296
+ filter=[
297
+ Q("exists", field="properties.datetime"),
298
+ Q("term", **{"properties__datetime": datetime_search["eq"]}),
299
+ ],
300
+ ),
301
+ Q(
302
+ "bool",
303
+ must_not=[Q("exists", field="properties.datetime")],
304
+ filter=[
305
+ Q("exists", field="properties.start_datetime"),
306
+ Q("exists", field="properties.end_datetime"),
307
+ Q(
308
+ "range",
309
+ properties__start_datetime={"lte": datetime_search["eq"]},
310
+ ),
311
+ Q(
312
+ "range",
313
+ properties__end_datetime={"gte": datetime_search["eq"]},
314
+ ),
315
+ ],
316
+ ),
317
+ ]
301
318
  else:
302
- should.extend(
303
- [
304
- Q(
305
- "bool",
306
- filter=[
307
- Q(
308
- "range",
309
- properties__datetime={
310
- "gte": datetime_search["gte"],
311
- "lte": datetime_search["lte"],
312
- },
313
- ),
314
- ],
315
- ),
316
- Q(
317
- "bool",
318
- filter=[
319
- Q(
320
- "range",
321
- properties__start_datetime={
322
- "gte": datetime_search["gte"],
323
- "lte": datetime_search["lte"],
324
- },
325
- ),
326
- ],
327
- ),
328
- Q(
329
- "bool",
330
- filter=[
331
- Q(
332
- "range",
333
- properties__end_datetime={
334
- "gte": datetime_search["gte"],
335
- "lte": datetime_search["lte"],
336
- },
337
- ),
338
- ],
339
- ),
340
- Q(
341
- "bool",
342
- filter=[
343
- Q(
344
- "range",
345
- properties__start_datetime={
346
- "lte": datetime_search["gte"]
347
- },
348
- ),
349
- Q(
350
- "range",
351
- properties__end_datetime={
352
- "gte": datetime_search["lte"]
353
- },
354
- ),
355
- ],
356
- ),
357
- ]
358
- )
359
-
360
- search = search.query(Q("bool", filter=[Q("bool", should=should)]))
361
-
362
- return search
319
+ # For date ranges, include:
320
+ # 1. Items with datetime in the range
321
+ # 2. Items with datetime:null that overlap the search range
322
+ should = [
323
+ Q(
324
+ "bool",
325
+ filter=[
326
+ Q("exists", field="properties.datetime"),
327
+ Q(
328
+ "range",
329
+ properties__datetime={
330
+ "gte": datetime_search["gte"],
331
+ "lte": datetime_search["lte"],
332
+ },
333
+ ),
334
+ ],
335
+ ),
336
+ Q(
337
+ "bool",
338
+ must_not=[Q("exists", field="properties.datetime")],
339
+ filter=[
340
+ Q("exists", field="properties.start_datetime"),
341
+ Q("exists", field="properties.end_datetime"),
342
+ Q(
343
+ "range",
344
+ properties__start_datetime={"lte": datetime_search["lte"]},
345
+ ),
346
+ Q(
347
+ "range",
348
+ properties__end_datetime={"gte": datetime_search["gte"]},
349
+ ),
350
+ ],
351
+ ),
352
+ ]
353
+
354
+ return search.query(Q("bool", should=should, minimum_should_match=1))
363
355
 
364
356
  @staticmethod
365
357
  def apply_bbox_filter(search: Search, bbox: List):
@@ -527,11 +519,14 @@ class DatabaseLogic(BaseDatabaseLogic):
527
519
  search_after = None
528
520
 
529
521
  if token:
530
- search_after = json.loads(urlsafe_b64decode(token).decode())
522
+ search_after = orjson.loads(urlsafe_b64decode(token))
531
523
 
532
524
  query = search.query.to_dict() if search.query else None
533
525
 
534
526
  index_param = indices(collection_ids)
527
+ if len(index_param) > ES_MAX_URL_LENGTH - 300:
528
+ index_param = ITEM_INDICES
529
+ query = add_collections_to_body(collection_ids, query)
535
530
 
536
531
  max_result_window = MAX_LIMIT
537
532
 
@@ -567,7 +562,7 @@ class DatabaseLogic(BaseDatabaseLogic):
567
562
  next_token = None
568
563
  if len(hits) > limit and limit < max_result_window:
569
564
  if hits and (sort_array := hits[limit - 1].get("sort")):
570
- next_token = urlsafe_b64encode(json.dumps(sort_array).encode()).decode()
565
+ next_token = urlsafe_b64encode(orjson.dumps(sort_array)).decode()
571
566
 
572
567
  matched = (
573
568
  es_response["hits"]["total"]["value"]
@@ -836,6 +831,135 @@ class DatabaseLogic(BaseDatabaseLogic):
836
831
  refresh=refresh,
837
832
  )
838
833
 
834
+ async def merge_patch_item(
835
+ self,
836
+ collection_id: str,
837
+ item_id: str,
838
+ item: PartialItem,
839
+ base_url: str,
840
+ refresh: bool = True,
841
+ ) -> Item:
842
+ """Database logic for merge patching an item following RF7396.
843
+
844
+ Args:
845
+ collection_id(str): Collection that item belongs to.
846
+ item_id(str): Id of item to be patched.
847
+ item (PartialItem): The partial item to be updated.
848
+ base_url: (str): The base URL used for constructing URLs for the item.
849
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
850
+
851
+ Returns:
852
+ patched item.
853
+ """
854
+ operations = merge_to_operations(item.model_dump())
855
+
856
+ return await self.json_patch_item(
857
+ collection_id=collection_id,
858
+ item_id=item_id,
859
+ operations=operations,
860
+ base_url=base_url,
861
+ refresh=refresh,
862
+ )
863
+
864
+ async def json_patch_item(
865
+ self,
866
+ collection_id: str,
867
+ item_id: str,
868
+ operations: List[PatchOperation],
869
+ base_url: str,
870
+ refresh: bool = True,
871
+ ) -> Item:
872
+ """Database logic for json patching an item following RF6902.
873
+
874
+ Args:
875
+ collection_id(str): Collection that item belongs to.
876
+ item_id(str): Id of item to be patched.
877
+ operations (list): List of operations to run.
878
+ base_url (str): The base URL used for constructing URLs for the item.
879
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
880
+
881
+ Returns:
882
+ patched item.
883
+ """
884
+ new_item_id = None
885
+ new_collection_id = None
886
+ script_operations = []
887
+
888
+ for operation in operations:
889
+ if operation.path in ["collection", "id"] and operation.op in [
890
+ "add",
891
+ "replace",
892
+ ]:
893
+
894
+ if operation.path == "collection" and collection_id != operation.value:
895
+ await self.check_collection_exists(collection_id=operation.value)
896
+ new_collection_id = operation.value
897
+
898
+ if operation.path == "id" and item_id != operation.value:
899
+ new_item_id = operation.value
900
+
901
+ else:
902
+ script_operations.append(operation)
903
+
904
+ script = operations_to_script(script_operations)
905
+
906
+ try:
907
+ await self.client.update(
908
+ index=index_alias_by_collection_id(collection_id),
909
+ id=mk_item_id(item_id, collection_id),
910
+ script=script,
911
+ refresh=True,
912
+ )
913
+
914
+ except BadRequestError as exc:
915
+ raise HTTPException(
916
+ status_code=400, detail=exc.info["error"]["caused_by"]
917
+ ) from exc
918
+
919
+ item = await self.get_one_item(collection_id, item_id)
920
+
921
+ if new_collection_id:
922
+ await self.client.reindex(
923
+ body={
924
+ "dest": {"index": f"{ITEMS_INDEX_PREFIX}{new_collection_id}"},
925
+ "source": {
926
+ "index": f"{ITEMS_INDEX_PREFIX}{collection_id}",
927
+ "query": {"term": {"id": {"value": item_id}}},
928
+ },
929
+ "script": {
930
+ "lang": "painless",
931
+ "source": (
932
+ f"""ctx._id = ctx._id.replace('{collection_id}', '{new_collection_id}');"""
933
+ f"""ctx._source.collection = '{new_collection_id}';"""
934
+ ),
935
+ },
936
+ },
937
+ wait_for_completion=True,
938
+ refresh=True,
939
+ )
940
+
941
+ await self.delete_item(
942
+ item_id=item_id,
943
+ collection_id=collection_id,
944
+ refresh=refresh,
945
+ )
946
+
947
+ item["collection"] = new_collection_id
948
+ collection_id = new_collection_id
949
+
950
+ if new_item_id:
951
+ item["id"] = new_item_id
952
+ item = await self.async_prep_create_item(item=item, base_url=base_url)
953
+ await self.create_item(item=item, refresh=True)
954
+
955
+ await self.delete_item(
956
+ item_id=item_id,
957
+ collection_id=collection_id,
958
+ refresh=refresh,
959
+ )
960
+
961
+ return item
962
+
839
963
  async def delete_item(self, item_id: str, collection_id: str, **kwargs: Any):
840
964
  """Delete a single item from the database.
841
965
 
@@ -895,6 +1019,37 @@ class DatabaseLogic(BaseDatabaseLogic):
895
1019
  except ESNotFoundError:
896
1020
  raise NotFoundError(f"Mapping for index {index_name} not found")
897
1021
 
1022
+ async def get_items_unique_values(
1023
+ self, collection_id: str, field_names: Iterable[str], *, limit: int = 100
1024
+ ) -> Dict[str, List[str]]:
1025
+ """Get the unique values for the given fields in the collection."""
1026
+ limit_plus_one = limit + 1
1027
+ index_name = index_alias_by_collection_id(collection_id)
1028
+
1029
+ query = await self.client.search(
1030
+ index=index_name,
1031
+ body={
1032
+ "size": 0,
1033
+ "aggs": {
1034
+ field: {"terms": {"field": field, "size": limit_plus_one}}
1035
+ for field in field_names
1036
+ },
1037
+ },
1038
+ )
1039
+
1040
+ result: Dict[str, List[str]] = {}
1041
+ for field, agg in query["aggregations"].items():
1042
+ if len(agg["buckets"]) > limit:
1043
+ logger.warning(
1044
+ "Skipping enum field %s: exceeds limit of %d unique values. "
1045
+ "Consider excluding this field from enumeration or increase the limit.",
1046
+ field,
1047
+ limit,
1048
+ )
1049
+ continue
1050
+ result[field] = [bucket["key"] for bucket in agg["buckets"]]
1051
+ return result
1052
+
898
1053
  async def create_collection(self, collection: Collection, **kwargs: Any):
899
1054
  """Create a single collection in the database.
900
1055
 
@@ -1038,6 +1193,95 @@ class DatabaseLogic(BaseDatabaseLogic):
1038
1193
  refresh=refresh,
1039
1194
  )
1040
1195
 
1196
+ async def merge_patch_collection(
1197
+ self,
1198
+ collection_id: str,
1199
+ collection: PartialCollection,
1200
+ base_url: str,
1201
+ refresh: bool = True,
1202
+ ) -> Collection:
1203
+ """Database logic for merge patching a collection following RF7396.
1204
+
1205
+ Args:
1206
+ collection_id(str): Id of collection to be patched.
1207
+ collection (PartialCollection): The partial collection to be updated.
1208
+ base_url: (str): The base URL used for constructing links.
1209
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1210
+
1211
+
1212
+ Returns:
1213
+ patched collection.
1214
+ """
1215
+ operations = merge_to_operations(collection.model_dump())
1216
+
1217
+ return await self.json_patch_collection(
1218
+ collection_id=collection_id,
1219
+ operations=operations,
1220
+ base_url=base_url,
1221
+ refresh=refresh,
1222
+ )
1223
+
1224
+ async def json_patch_collection(
1225
+ self,
1226
+ collection_id: str,
1227
+ operations: List[PatchOperation],
1228
+ base_url: str,
1229
+ refresh: bool = True,
1230
+ ) -> Collection:
1231
+ """Database logic for json patching a collection following RF6902.
1232
+
1233
+ Args:
1234
+ collection_id(str): Id of collection to be patched.
1235
+ operations (list): List of operations to run.
1236
+ base_url (str): The base URL used for constructing links.
1237
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1238
+
1239
+ Returns:
1240
+ patched collection.
1241
+ """
1242
+ new_collection_id = None
1243
+ script_operations = []
1244
+
1245
+ for operation in operations:
1246
+ if (
1247
+ operation.op in ["add", "replace"]
1248
+ and operation.path == "collection"
1249
+ and collection_id != operation.value
1250
+ ):
1251
+ new_collection_id = operation.value
1252
+
1253
+ else:
1254
+ script_operations.append(operation)
1255
+
1256
+ script = operations_to_script(script_operations)
1257
+
1258
+ try:
1259
+ await self.client.update(
1260
+ index=COLLECTIONS_INDEX,
1261
+ id=collection_id,
1262
+ script=script,
1263
+ refresh=True,
1264
+ )
1265
+
1266
+ except BadRequestError as exc:
1267
+ raise HTTPException(
1268
+ status_code=400, detail=exc.info["error"]["caused_by"]
1269
+ ) from exc
1270
+
1271
+ collection = await self.find_collection(collection_id)
1272
+
1273
+ if new_collection_id:
1274
+ collection["id"] = new_collection_id
1275
+ collection["links"] = resolve_links([], base_url)
1276
+
1277
+ await self.update_collection(
1278
+ collection_id=collection_id,
1279
+ collection=collection,
1280
+ refresh=refresh,
1281
+ )
1282
+
1283
+ return collection
1284
+
1041
1285
  async def delete_collection(self, collection_id: str, **kwargs: Any):
1042
1286
  """Delete a collection from the database.
1043
1287
 
@@ -1061,28 +1305,15 @@ class DatabaseLogic(BaseDatabaseLogic):
1061
1305
  # Ensure kwargs is a dictionary
1062
1306
  kwargs = kwargs or {}
1063
1307
 
1064
- # Verify that the collection exists
1065
- await self.find_collection(collection_id=collection_id)
1066
-
1067
- # Resolve the `refresh` parameter
1068
1308
  refresh = kwargs.get("refresh", self.async_settings.database_refresh)
1069
1309
  refresh = validate_refresh(refresh)
1070
1310
 
1071
- # Log the deletion attempt
1072
- logger.info(f"Deleting collection {collection_id} with refresh={refresh}")
1073
-
1074
- # Delete the collection from the database
1311
+ # Verify that the collection exists
1312
+ await self.find_collection(collection_id=collection_id)
1075
1313
  await self.client.delete(
1076
1314
  index=COLLECTIONS_INDEX, id=collection_id, refresh=refresh
1077
1315
  )
1078
-
1079
- # Delete the item index for the collection
1080
- try:
1081
- await delete_item_index(collection_id)
1082
- except Exception as e:
1083
- logger.error(
1084
- f"Failed to delete item index for collection {collection_id}: {e}"
1085
- )
1316
+ await delete_item_index(collection_id)
1086
1317
 
1087
1318
  async def bulk_async(
1088
1319
  self,
@@ -1,2 +1,2 @@
1
1
  """library version."""
2
- __version__ = "5.0.0a1"
2
+ __version__ = "6.1.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: stac-fastapi-elasticsearch
3
- Version: 5.0.0a1
3
+ Version: 6.1.0
4
4
  Summary: An implementation of STAC API based on the FastAPI framework with both Elasticsearch and Opensearch.
5
5
  Home-page: https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch
6
6
  License: MIT
@@ -15,8 +15,8 @@ Classifier: Programming Language :: Python :: 3.13
15
15
  Classifier: License :: OSI Approved :: MIT License
16
16
  Requires-Python: >=3.9
17
17
  Description-Content-Type: text/markdown
18
- Requires-Dist: stac-fastapi-core==5.0.0a1
19
- Requires-Dist: sfeos-helpers==5.0.0a1
18
+ Requires-Dist: stac-fastapi-core==6.1.0
19
+ Requires-Dist: sfeos-helpers==6.1.0
20
20
  Requires-Dist: elasticsearch[async]~=8.18.0
21
21
  Requires-Dist: uvicorn~=0.23.0
22
22
  Requires-Dist: starlette<0.36.0,>=0.35.0
@@ -25,7 +25,6 @@ Requires-Dist: pytest~=7.0.0; extra == "dev"
25
25
  Requires-Dist: pytest-cov~=4.0.0; extra == "dev"
26
26
  Requires-Dist: pytest-asyncio~=0.21.0; extra == "dev"
27
27
  Requires-Dist: pre-commit~=3.0.0; extra == "dev"
28
- Requires-Dist: requests<3.0.0,>=2.32.0; extra == "dev"
29
28
  Requires-Dist: ciso8601~=2.3.0; extra == "dev"
30
29
  Requires-Dist: httpx<0.28.0,>=0.24.0; extra == "dev"
31
30
  Provides-Extra: docs
@@ -52,7 +51,7 @@ Requires-Dist: uvicorn[standard]~=0.23.0; extra == "server"
52
51
  [![GitHub forks](https://img.shields.io/github/forks/stac-utils/stac-fastapi-elasticsearch-opensearch.svg?color=blue)](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/network/members)
53
52
  [![PyPI version](https://img.shields.io/pypi/v/stac-fastapi-elasticsearch.svg?color=blue)](https://pypi.org/project/stac-fastapi-elasticsearch/)
54
53
  [![STAC](https://img.shields.io/badge/STAC-1.1.0-blue.svg)](https://github.com/radiantearth/stac-spec/tree/v1.1.0)
55
- [![stac-fastapi](https://img.shields.io/badge/stac--fastapi-5.2.0-blue.svg)](https://github.com/stac-utils/stac-fastapi)
54
+ [![stac-fastapi](https://img.shields.io/badge/stac--fastapi-6.0.0-blue.svg)](https://github.com/stac-utils/stac-fastapi)
56
55
 
57
56
  ## Sponsors & Supporters
58
57
 
@@ -242,28 +241,30 @@ You can customize additional settings in your `.env` file:
242
241
  |------------------------------|--------------------------------------------------------------------------------------|--------------------------|---------------------------------------------------------------------------------------------|
243
242
  | `ES_HOST` | Hostname for external Elasticsearch/OpenSearch. | `localhost` | Optional |
244
243
  | `ES_PORT` | Port for Elasticsearch/OpenSearch. | `9200` (ES) / `9202` (OS)| Optional |
245
- | `ES_USE_SSL` | Use SSL for connecting to Elasticsearch/OpenSearch. | `false` | Optional |
246
- | `ES_VERIFY_CERTS` | Verify SSL certificates when connecting. | `false` | Optional |
244
+ | `ES_USE_SSL` | Use SSL for connecting to Elasticsearch/OpenSearch. | `true` | Optional |
245
+ | `ES_VERIFY_CERTS` | Verify SSL certificates when connecting. | `true` | Optional |
246
+ | `ES_API_KEY` | API Key for external Elasticsearch/OpenSearch. | N/A | Optional |
247
+ | `ES_TIMEOUT` | Client timeout for Elasticsearch/OpenSearch. | DB client default | Optional |
247
248
  | `STAC_FASTAPI_TITLE` | Title of the API in the documentation. | `stac-fastapi-<backend>` | Optional |
248
249
  | `STAC_FASTAPI_DESCRIPTION` | Description of the API in the documentation. | N/A | Optional |
249
250
  | `STAC_FASTAPI_VERSION` | API version. | `2.1` | Optional |
250
- | `STAC_FASTAPI_LANDING_PAGE_ID` | Landing page ID | `stac-fastapi` | Optional |
251
+ | `STAC_FASTAPI_LANDING_PAGE_ID` | Landing page ID | `stac-fastapi` | Optional |
251
252
  | `APP_HOST` | Server bind address. | `0.0.0.0` | Optional |
252
- | `APP_PORT` | Server port. | `8080` | Optional |
253
+ | `APP_PORT` | Server port. | `8000` | Optional |
253
254
  | `ENVIRONMENT` | Runtime environment. | `local` | Optional |
254
255
  | `WEB_CONCURRENCY` | Number of worker processes. | `10` | Optional |
255
256
  | `RELOAD` | Enable auto-reload for development. | `true` | Optional |
256
257
  | `STAC_FASTAPI_RATE_LIMIT` | API rate limit per client. | `200/minute` | Optional |
257
- | `BACKEND` | Tests-related variable | `elasticsearch` or `opensearch` based on the backend | Optional |
258
- | `ELASTICSEARCH_VERSION` | Version of Elasticsearch to use. | `8.11.0` | Optional | |
259
- | `OPENSEARCH_VERSION` | OpenSearch version | `2.11.1` | Optional
260
- | `ENABLE_DIRECT_RESPONSE` | Enable direct response for maximum performance (disables all FastAPI dependencies, including authentication, custom status codes, and validation) | `false` | Optional
261
- | `RAISE_ON_BULK_ERROR` | Controls whether bulk insert operations raise exceptions on errors. If set to `true`, the operation will stop and raise an exception when an error occurs. If set to `false`, errors will be logged, and the operation will continue. **Note:** STAC Item and ItemCollection validation errors will always raise, regardless of this flag. | `false` Optional |
262
- | `DATABASE_REFRESH` | Controls whether database operations refresh the index immediately after changes. If set to `true`, changes will be immediately searchable. If set to `false`, changes may not be immediately visible but can improve performance for bulk operations. If set to `wait_for`, changes will wait for the next refresh cycle to become visible. | `false` | Optional |
258
+ | `BACKEND` | Tests-related variable | `elasticsearch` or `opensearch` based on the backend | Optional |
259
+ | `ELASTICSEARCH_VERSION` | Version of Elasticsearch to use. | `8.11.0` | Optional |
260
+ | `OPENSEARCH_VERSION` | OpenSearch version | `2.11.1` | Optional |
261
+ | `ENABLE_DIRECT_RESPONSE` | Enable direct response for maximum performance (disables all FastAPI dependencies, including authentication, custom status codes, and validation) | `false` | Optional |
262
+ | `RAISE_ON_BULK_ERROR` | Controls whether bulk insert operations raise exceptions on errors. If set to `true`, the operation will stop and raise an exception when an error occurs. If set to `false`, errors will be logged, and the operation will continue. **Note:** STAC Item and ItemCollection validation errors will always raise, regardless of this flag. | `false` | Optional |
263
+ | `DATABASE_REFRESH` | Controls whether database operations refresh the index immediately after changes. If set to `true`, changes will be immediately searchable. If set to `false`, changes may not be immediately visible but can improve performance for bulk operations. If set to `wait_for`, changes will wait for the next refresh cycle to become visible. | `false` | Optional |
263
264
  | `ENABLE_TRANSACTIONS_EXTENSIONS` | Enables or disables the Transactions and Bulk Transactions API extensions. If set to `false`, the POST `/collections` route and related transaction endpoints (including bulk transaction operations) will be unavailable in the API. This is useful for deployments where mutating the catalog via the API should be prevented. | `true` | Optional |
264
265
 
265
266
  > [!NOTE]
266
- > The variables `ES_HOST`, `ES_PORT`, `ES_USE_SSL`, and `ES_VERIFY_CERTS` apply to both Elasticsearch and OpenSearch backends, so there is no need to rename the key names to `OS_` even if you're using OpenSearch.
267
+ > The variables `ES_HOST`, `ES_PORT`, `ES_USE_SSL`, `ES_VERIFY_CERTS` and `ES_TIMEOUT` apply to both Elasticsearch and OpenSearch backends, so there is no need to rename the key names to `OS_` even if you're using OpenSearch.
267
268
 
268
269
  ## Interacting with the API
269
270
 
@@ -0,0 +1,10 @@
1
+ stac_fastapi/elasticsearch/__init__.py,sha256=w_MZutYLreNV372sCuO46bPb0TngmPs4u8737ueS0wE,31
2
+ stac_fastapi/elasticsearch/app.py,sha256=EK0H5qiRX-frbSHkBkZ_4Lzo0Jn8hBzJckSLCYbQlKs,5662
3
+ stac_fastapi/elasticsearch/config.py,sha256=itvPYr4TiOg9pWQrycgGaQxQ_Vc2KKP3aHdtH0OUZvw,5322
4
+ stac_fastapi/elasticsearch/database_logic.py,sha256=8P-eUGkDQAABOJWaYd_NTHpKpKug7o2c6BMDWYHH5m4,56201
5
+ stac_fastapi/elasticsearch/version.py,sha256=7IrY7mbr0cGVqZsk6wmCeITxZjDgz_mPHUswrziX5ME,45
6
+ stac_fastapi_elasticsearch-6.1.0.dist-info/METADATA,sha256=a2Z6VUi-YBORUsPZZDBJt1HuLqPeEPKtFgE2PqKEXj8,32286
7
+ stac_fastapi_elasticsearch-6.1.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
+ stac_fastapi_elasticsearch-6.1.0.dist-info/entry_points.txt,sha256=aCKixki0LpUl64UPsPMtiNvfdyq-QsTCxVjJ54VF6Jk,82
9
+ stac_fastapi_elasticsearch-6.1.0.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
+ stac_fastapi_elasticsearch-6.1.0.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- stac_fastapi/elasticsearch/__init__.py,sha256=w_MZutYLreNV372sCuO46bPb0TngmPs4u8737ueS0wE,31
2
- stac_fastapi/elasticsearch/app.py,sha256=uuBk3Vg8vdl15yIQCFCxJ8UfFtaKjMvmXXULMd8Lw0g,5080
3
- stac_fastapi/elasticsearch/config.py,sha256=PKSowbXmSryMj0Oq15XJduyPL2c_NlDkewXnR1DFP2o,5181
4
- stac_fastapi/elasticsearch/database_logic.py,sha256=-qvrOTyoVWe4Ua4lu5Fjq1NmrY_fj_3Emt1HV8i5Dsw,48248
5
- stac_fastapi/elasticsearch/version.py,sha256=MQ4i7PSJnhrL3abG-m_vOwI3ksvRoHcs0eiBtxywE3E,47
6
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/METADATA,sha256=9sB79eLh3KasJb7ykxdvhHvB2w-peD1_N6Z4wltzJmA,31992
7
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/entry_points.txt,sha256=aCKixki0LpUl64UPsPMtiNvfdyq-QsTCxVjJ54VF6Jk,82
9
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
- stac_fastapi_elasticsearch-5.0.0a1.dist-info/RECORD,,