stac-fastapi-opensearch 5.0.0a1__py3-none-any.whl → 6.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,12 +25,14 @@ from stac_fastapi.core.session import Session
25
25
  from stac_fastapi.core.utilities import get_bool_env
26
26
  from stac_fastapi.extensions.core import (
27
27
  AggregationExtension,
28
+ CollectionSearchExtension,
28
29
  FilterExtension,
29
30
  FreeTextExtension,
30
31
  SortExtension,
31
32
  TokenPaginationExtension,
32
33
  TransactionExtension,
33
34
  )
35
+ from stac_fastapi.extensions.core.filter import FilterConformanceClasses
34
36
  from stac_fastapi.extensions.third_party import BulkTransactionExtension
35
37
  from stac_fastapi.opensearch.config import OpensearchSettings
36
38
  from stac_fastapi.opensearch.database_logic import (
@@ -56,7 +58,15 @@ filter_extension = FilterExtension(
56
58
  client=EsAsyncBaseFiltersClient(database=database_logic)
57
59
  )
58
60
  filter_extension.conformance_classes.append(
59
- "http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators"
61
+ FilterConformanceClasses.ADVANCED_COMPARISON_OPERATORS
62
+ )
63
+
64
+ # Adding collection search extension for compatibility with stac-auth-proxy
65
+ # (https://github.com/developmentseed/stac-auth-proxy)
66
+ # The extension is not fully implemented yet but is required for collection filtering support
67
+ collection_search_extension = CollectionSearchExtension()
68
+ collection_search_extension.conformance_classes.append(
69
+ "https://api.stacspec.org/v1.0.0-rc.1/collection-search#filter"
60
70
  )
61
71
 
62
72
  aggregation_extension = AggregationExtension(
@@ -74,6 +84,7 @@ search_extensions = [
74
84
  TokenPaginationExtension(),
75
85
  filter_extension,
76
86
  FreeTextExtension(),
87
+ collection_search_extension,
77
88
  ]
78
89
 
79
90
 
@@ -104,22 +115,24 @@ database_logic.extensions = [type(ext).__name__ for ext in extensions]
104
115
 
105
116
  post_request_model = create_post_request_model(search_extensions)
106
117
 
107
- api = StacApi(
108
- title=os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-opensearch"),
109
- description=os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-opensearch"),
110
- api_version=os.getenv("STAC_FASTAPI_VERSION", "5.0.0a1"),
111
- settings=settings,
112
- extensions=extensions,
113
- client=CoreClient(
118
+ app_config = {
119
+ "title": os.getenv("STAC_FASTAPI_TITLE", "stac-fastapi-opensearch"),
120
+ "description": os.getenv("STAC_FASTAPI_DESCRIPTION", "stac-fastapi-opensearch"),
121
+ "api_version": os.getenv("STAC_FASTAPI_VERSION", "6.1.0"),
122
+ "settings": settings,
123
+ "extensions": extensions,
124
+ "client": CoreClient(
114
125
  database=database_logic,
115
126
  session=session,
116
127
  post_request_model=post_request_model,
117
128
  landing_page_id=os.getenv("STAC_FASTAPI_LANDING_PAGE_ID", "stac-fastapi"),
118
129
  ),
119
- search_get_request_model=create_get_request_model(search_extensions),
120
- search_post_request_model=post_request_model,
121
- route_dependencies=get_route_dependencies(),
122
- )
130
+ "search_get_request_model": create_get_request_model(search_extensions),
131
+ "search_post_request_model": post_request_model,
132
+ "route_dependencies": get_route_dependencies(),
133
+ }
134
+
135
+ api = StacApi(**app_config)
123
136
 
124
137
 
125
138
  @asynccontextmanager
@@ -53,6 +53,10 @@ def _es_config() -> Dict[str, Any]:
53
53
 
54
54
  config["headers"] = headers
55
55
 
56
+ # Include timeout setting if set
57
+ if timeout := os.getenv("ES_TIMEOUT"):
58
+ config["timeout"] = timeout
59
+
56
60
  # Explicitly exclude SSL settings when not using SSL
57
61
  if not use_ssl:
58
62
  return config
@@ -1,13 +1,14 @@
1
1
  """Database logic."""
2
2
 
3
3
  import asyncio
4
- import json
5
4
  import logging
6
5
  from base64 import urlsafe_b64decode, urlsafe_b64encode
7
6
  from copy import deepcopy
8
7
  from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, Union
9
8
 
10
9
  import attr
10
+ import orjson
11
+ from fastapi import HTTPException
11
12
  from opensearchpy import exceptions, helpers
12
13
  from opensearchpy.helpers.query import Q
13
14
  from opensearchpy.helpers.search import Search
@@ -16,6 +17,11 @@ from starlette.requests import Request
16
17
  from stac_fastapi.core.base_database_logic import BaseDatabaseLogic
17
18
  from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
18
19
  from stac_fastapi.core.utilities import MAX_LIMIT, bbox2polygon
20
+ from stac_fastapi.extensions.core.transaction.request import (
21
+ PartialCollection,
22
+ PartialItem,
23
+ PatchOperation,
24
+ )
19
25
  from stac_fastapi.opensearch.config import (
20
26
  AsyncOpensearchSettings as AsyncSearchSettings,
21
27
  )
@@ -36,6 +42,14 @@ from stac_fastapi.sfeos_helpers.database import (
36
42
  return_date,
37
43
  validate_refresh,
38
44
  )
45
+ from stac_fastapi.sfeos_helpers.database.query import (
46
+ ES_MAX_URL_LENGTH,
47
+ add_collections_to_body,
48
+ )
49
+ from stac_fastapi.sfeos_helpers.database.utils import (
50
+ merge_to_operations,
51
+ operations_to_script,
52
+ )
39
53
  from stac_fastapi.sfeos_helpers.mappings import (
40
54
  AGGREGATION_MAPPING,
41
55
  COLLECTIONS_INDEX,
@@ -48,6 +62,7 @@ from stac_fastapi.sfeos_helpers.mappings import (
48
62
  Geometry,
49
63
  )
50
64
  from stac_fastapi.types.errors import ConflictError, NotFoundError
65
+ from stac_fastapi.types.links import resolve_links
51
66
  from stac_fastapi.types.rfc3339 import DateTimeType
52
67
  from stac_fastapi.types.stac import Collection, Item
53
68
 
@@ -282,121 +297,97 @@ class DatabaseLogic(BaseDatabaseLogic):
282
297
  @staticmethod
283
298
  def apply_datetime_filter(
284
299
  search: Search, interval: Optional[Union[DateTimeType, str]]
285
- ):
286
- """Apply a filter to search based on datetime field, start_datetime, and end_datetime fields.
300
+ ) -> Search:
301
+ """Apply a filter to search on datetime, start_datetime, and end_datetime fields.
287
302
 
288
303
  Args:
289
- search (Search): The search object to filter.
290
- interval: Optional[Union[DateTimeType, str]]
304
+ search: The search object to filter.
305
+ interval: Optional datetime interval to filter by. Can be:
306
+ - A single datetime string (e.g., "2023-01-01T12:00:00")
307
+ - A datetime range string (e.g., "2023-01-01/2023-12-31")
308
+ - A datetime object
309
+ - A tuple of (start_datetime, end_datetime)
291
310
 
292
311
  Returns:
293
- Search: The filtered search object.
312
+ The filtered search object.
294
313
  """
314
+ if not interval:
315
+ return search
316
+
295
317
  should = []
296
- datetime_search = return_date(interval)
318
+ try:
319
+ datetime_search = return_date(interval)
320
+ except (ValueError, TypeError) as e:
321
+ # Handle invalid interval formats if return_date fails
322
+ logger.error(f"Invalid interval format: {interval}, error: {e}")
323
+ return search
297
324
 
298
- # If the request is a single datetime return
299
- # items with datetimes equal to the requested datetime OR
300
- # the requested datetime is between their start and end datetimes
301
325
  if "eq" in datetime_search:
302
- should.extend(
303
- [
304
- Q(
305
- "bool",
306
- filter=[
307
- Q(
308
- "term",
309
- properties__datetime=datetime_search["eq"],
310
- ),
311
- ],
312
- ),
313
- Q(
314
- "bool",
315
- filter=[
316
- Q(
317
- "range",
318
- properties__start_datetime={
319
- "lte": datetime_search["eq"],
320
- },
321
- ),
322
- Q(
323
- "range",
324
- properties__end_datetime={
325
- "gte": datetime_search["eq"],
326
- },
327
- ),
328
- ],
329
- ),
330
- ]
331
- )
332
-
333
- # If the request is a date range return
334
- # items with datetimes within the requested date range OR
335
- # their startdatetime ithin the requested date range OR
336
- # their enddatetime ithin the requested date range OR
337
- # the requested daterange within their start and end datetimes
326
+ # For exact matches, include:
327
+ # 1. Items with matching exact datetime
328
+ # 2. Items with datetime:null where the time falls within their range
329
+ should = [
330
+ Q(
331
+ "bool",
332
+ filter=[
333
+ Q("exists", field="properties.datetime"),
334
+ Q("term", **{"properties__datetime": datetime_search["eq"]}),
335
+ ],
336
+ ),
337
+ Q(
338
+ "bool",
339
+ must_not=[Q("exists", field="properties.datetime")],
340
+ filter=[
341
+ Q("exists", field="properties.start_datetime"),
342
+ Q("exists", field="properties.end_datetime"),
343
+ Q(
344
+ "range",
345
+ properties__start_datetime={"lte": datetime_search["eq"]},
346
+ ),
347
+ Q(
348
+ "range",
349
+ properties__end_datetime={"gte": datetime_search["eq"]},
350
+ ),
351
+ ],
352
+ ),
353
+ ]
338
354
  else:
339
- should.extend(
340
- [
341
- Q(
342
- "bool",
343
- filter=[
344
- Q(
345
- "range",
346
- properties__datetime={
347
- "gte": datetime_search["gte"],
348
- "lte": datetime_search["lte"],
349
- },
350
- ),
351
- ],
352
- ),
353
- Q(
354
- "bool",
355
- filter=[
356
- Q(
357
- "range",
358
- properties__start_datetime={
359
- "gte": datetime_search["gte"],
360
- "lte": datetime_search["lte"],
361
- },
362
- ),
363
- ],
364
- ),
365
- Q(
366
- "bool",
367
- filter=[
368
- Q(
369
- "range",
370
- properties__end_datetime={
371
- "gte": datetime_search["gte"],
372
- "lte": datetime_search["lte"],
373
- },
374
- ),
375
- ],
376
- ),
377
- Q(
378
- "bool",
379
- filter=[
380
- Q(
381
- "range",
382
- properties__start_datetime={
383
- "lte": datetime_search["gte"]
384
- },
385
- ),
386
- Q(
387
- "range",
388
- properties__end_datetime={
389
- "gte": datetime_search["lte"]
390
- },
391
- ),
392
- ],
393
- ),
394
- ]
395
- )
396
-
397
- search = search.query(Q("bool", filter=[Q("bool", should=should)]))
398
-
399
- return search
355
+ # For date ranges, include:
356
+ # 1. Items with datetime in the range
357
+ # 2. Items with datetime:null that overlap the search range
358
+ should = [
359
+ Q(
360
+ "bool",
361
+ filter=[
362
+ Q("exists", field="properties.datetime"),
363
+ Q(
364
+ "range",
365
+ properties__datetime={
366
+ "gte": datetime_search["gte"],
367
+ "lte": datetime_search["lte"],
368
+ },
369
+ ),
370
+ ],
371
+ ),
372
+ Q(
373
+ "bool",
374
+ must_not=[Q("exists", field="properties.datetime")],
375
+ filter=[
376
+ Q("exists", field="properties.start_datetime"),
377
+ Q("exists", field="properties.end_datetime"),
378
+ Q(
379
+ "range",
380
+ properties__start_datetime={"lte": datetime_search["lte"]},
381
+ ),
382
+ Q(
383
+ "range",
384
+ properties__end_datetime={"gte": datetime_search["gte"]},
385
+ ),
386
+ ],
387
+ ),
388
+ ]
389
+
390
+ return search.query(Q("bool", should=should, minimum_should_match=1))
400
391
 
401
392
  @staticmethod
402
393
  def apply_bbox_filter(search: Search, bbox: List):
@@ -545,20 +536,24 @@ class DatabaseLogic(BaseDatabaseLogic):
545
536
  """
546
537
  search_body: Dict[str, Any] = {}
547
538
  query = search.query.to_dict() if search.query else None
539
+
540
+ index_param = indices(collection_ids)
541
+ if len(index_param) > ES_MAX_URL_LENGTH - 300:
542
+ index_param = ITEM_INDICES
543
+ query = add_collections_to_body(collection_ids, query)
544
+
548
545
  if query:
549
546
  search_body["query"] = query
550
547
 
551
548
  search_after = None
552
549
 
553
550
  if token:
554
- search_after = json.loads(urlsafe_b64decode(token).decode())
551
+ search_after = orjson.loads(urlsafe_b64decode(token))
555
552
  if search_after:
556
553
  search_body["search_after"] = search_after
557
554
 
558
555
  search_body["sort"] = sort if sort else DEFAULT_SORT
559
556
 
560
- index_param = indices(collection_ids)
561
-
562
557
  max_result_window = MAX_LIMIT
563
558
 
564
559
  size_limit = min(limit + 1, max_result_window)
@@ -591,7 +586,7 @@ class DatabaseLogic(BaseDatabaseLogic):
591
586
  next_token = None
592
587
  if len(hits) > limit and limit < max_result_window:
593
588
  if hits and (sort_array := hits[limit - 1].get("sort")):
594
- next_token = urlsafe_b64encode(json.dumps(sort_array).encode()).decode()
589
+ next_token = urlsafe_b64encode(orjson.dumps(sort_array)).decode()
595
590
 
596
591
  matched = (
597
592
  es_response["hits"]["total"]["value"]
@@ -852,6 +847,135 @@ class DatabaseLogic(BaseDatabaseLogic):
852
847
  refresh=refresh,
853
848
  )
854
849
 
850
+ async def merge_patch_item(
851
+ self,
852
+ collection_id: str,
853
+ item_id: str,
854
+ item: PartialItem,
855
+ base_url: str,
856
+ refresh: bool = True,
857
+ ) -> Item:
858
+ """Database logic for merge patching an item following RF7396.
859
+
860
+ Args:
861
+ collection_id(str): Collection that item belongs to.
862
+ item_id(str): Id of item to be patched.
863
+ item (PartialItem): The partial item to be updated.
864
+ base_url: (str): The base URL used for constructing URLs for the item.
865
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
866
+
867
+ Returns:
868
+ patched item.
869
+ """
870
+ operations = merge_to_operations(item.model_dump())
871
+
872
+ return await self.json_patch_item(
873
+ collection_id=collection_id,
874
+ item_id=item_id,
875
+ operations=operations,
876
+ base_url=base_url,
877
+ refresh=refresh,
878
+ )
879
+
880
+ async def json_patch_item(
881
+ self,
882
+ collection_id: str,
883
+ item_id: str,
884
+ operations: List[PatchOperation],
885
+ base_url: str,
886
+ refresh: bool = True,
887
+ ) -> Item:
888
+ """Database logic for json patching an item following RF6902.
889
+
890
+ Args:
891
+ collection_id(str): Collection that item belongs to.
892
+ item_id(str): Id of item to be patched.
893
+ operations (list): List of operations to run.
894
+ base_url (str): The base URL used for constructing URLs for the item.
895
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
896
+
897
+ Returns:
898
+ patched item.
899
+ """
900
+ new_item_id = None
901
+ new_collection_id = None
902
+ script_operations = []
903
+
904
+ for operation in operations:
905
+ if operation.path in ["collection", "id"] and operation.op in [
906
+ "add",
907
+ "replace",
908
+ ]:
909
+
910
+ if operation.path == "collection" and collection_id != operation.value:
911
+ await self.check_collection_exists(collection_id=operation.value)
912
+ new_collection_id = operation.value
913
+
914
+ if operation.path == "id" and item_id != operation.value:
915
+ new_item_id = operation.value
916
+
917
+ else:
918
+ script_operations.append(operation)
919
+
920
+ script = operations_to_script(script_operations)
921
+
922
+ try:
923
+ await self.client.update(
924
+ index=index_alias_by_collection_id(collection_id),
925
+ id=mk_item_id(item_id, collection_id),
926
+ body={"script": script},
927
+ refresh=True,
928
+ )
929
+
930
+ except exceptions.RequestError as exc:
931
+ raise HTTPException(
932
+ status_code=400, detail=exc.info["error"]["caused_by"]
933
+ ) from exc
934
+
935
+ item = await self.get_one_item(collection_id, item_id)
936
+
937
+ if new_collection_id:
938
+ await self.client.reindex(
939
+ body={
940
+ "dest": {"index": f"{ITEMS_INDEX_PREFIX}{new_collection_id}"},
941
+ "source": {
942
+ "index": f"{ITEMS_INDEX_PREFIX}{collection_id}",
943
+ "query": {"term": {"id": {"value": item_id}}},
944
+ },
945
+ "script": {
946
+ "lang": "painless",
947
+ "source": (
948
+ f"""ctx._id = ctx._id.replace('{collection_id}', '{new_collection_id}');"""
949
+ f"""ctx._source.collection = '{new_collection_id}';"""
950
+ ),
951
+ },
952
+ },
953
+ wait_for_completion=True,
954
+ refresh=True,
955
+ )
956
+
957
+ await self.delete_item(
958
+ item_id=item_id,
959
+ collection_id=collection_id,
960
+ refresh=refresh,
961
+ )
962
+
963
+ item["collection"] = new_collection_id
964
+ collection_id = new_collection_id
965
+
966
+ if new_item_id:
967
+ item["id"] = new_item_id
968
+ item = await self.async_prep_create_item(item=item, base_url=base_url)
969
+ await self.create_item(item=item, refresh=True)
970
+
971
+ await self.delete_item(
972
+ item_id=item_id,
973
+ collection_id=collection_id,
974
+ refresh=refresh,
975
+ )
976
+
977
+ return item
978
+
855
979
  async def delete_item(self, item_id: str, collection_id: str, **kwargs: Any):
856
980
  """Delete a single item from the database.
857
981
 
@@ -904,6 +1028,37 @@ class DatabaseLogic(BaseDatabaseLogic):
904
1028
  except exceptions.NotFoundError:
905
1029
  raise NotFoundError(f"Mapping for index {index_name} not found")
906
1030
 
1031
+ async def get_items_unique_values(
1032
+ self, collection_id: str, field_names: Iterable[str], *, limit: int = 100
1033
+ ) -> Dict[str, List[str]]:
1034
+ """Get the unique values for the given fields in the collection."""
1035
+ limit_plus_one = limit + 1
1036
+ index_name = index_alias_by_collection_id(collection_id)
1037
+
1038
+ query = await self.client.search(
1039
+ index=index_name,
1040
+ body={
1041
+ "size": 0,
1042
+ "aggs": {
1043
+ field: {"terms": {"field": field, "size": limit_plus_one}}
1044
+ for field in field_names
1045
+ },
1046
+ },
1047
+ )
1048
+
1049
+ result: Dict[str, List[str]] = {}
1050
+ for field, agg in query["aggregations"].items():
1051
+ if len(agg["buckets"]) > limit:
1052
+ logger.warning(
1053
+ "Skipping enum field %s: exceeds limit of %d unique values. "
1054
+ "Consider excluding this field from enumeration or increase the limit.",
1055
+ field,
1056
+ limit,
1057
+ )
1058
+ continue
1059
+ result[field] = [bucket["key"] for bucket in agg["buckets"]]
1060
+ return result
1061
+
907
1062
  async def create_collection(self, collection: Collection, **kwargs: Any):
908
1063
  """Create a single collection in the database.
909
1064
 
@@ -1028,6 +1183,95 @@ class DatabaseLogic(BaseDatabaseLogic):
1028
1183
  refresh=refresh,
1029
1184
  )
1030
1185
 
1186
+ async def merge_patch_collection(
1187
+ self,
1188
+ collection_id: str,
1189
+ collection: PartialCollection,
1190
+ base_url: str,
1191
+ refresh: bool = True,
1192
+ ) -> Collection:
1193
+ """Database logic for merge patching a collection following RF7396.
1194
+
1195
+ Args:
1196
+ collection_id(str): Id of collection to be patched.
1197
+ collection (PartialCollection): The partial collection to be updated.
1198
+ base_url: (str): The base URL used for constructing links.
1199
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1200
+
1201
+
1202
+ Returns:
1203
+ patched collection.
1204
+ """
1205
+ operations = merge_to_operations(collection.model_dump())
1206
+
1207
+ return await self.json_patch_collection(
1208
+ collection_id=collection_id,
1209
+ operations=operations,
1210
+ base_url=base_url,
1211
+ refresh=refresh,
1212
+ )
1213
+
1214
+ async def json_patch_collection(
1215
+ self,
1216
+ collection_id: str,
1217
+ operations: List[PatchOperation],
1218
+ base_url: str,
1219
+ refresh: bool = True,
1220
+ ) -> Collection:
1221
+ """Database logic for json patching a collection following RF6902.
1222
+
1223
+ Args:
1224
+ collection_id(str): Id of collection to be patched.
1225
+ operations (list): List of operations to run.
1226
+ base_url (str): The base URL used for constructing links.
1227
+ refresh (bool, optional): Refresh the index after performing the operation. Defaults to True.
1228
+
1229
+ Returns:
1230
+ patched collection.
1231
+ """
1232
+ new_collection_id = None
1233
+ script_operations = []
1234
+
1235
+ for operation in operations:
1236
+ if (
1237
+ operation.op in ["add", "replace"]
1238
+ and operation.path == "collection"
1239
+ and collection_id != operation.value
1240
+ ):
1241
+ new_collection_id = operation.value
1242
+
1243
+ else:
1244
+ script_operations.append(operation)
1245
+
1246
+ script = operations_to_script(script_operations)
1247
+
1248
+ try:
1249
+ await self.client.update(
1250
+ index=COLLECTIONS_INDEX,
1251
+ id=collection_id,
1252
+ body={"script": script},
1253
+ refresh=True,
1254
+ )
1255
+
1256
+ except exceptions.RequestError as exc:
1257
+ raise HTTPException(
1258
+ status_code=400, detail=exc.info["error"]["caused_by"]
1259
+ ) from exc
1260
+
1261
+ collection = await self.find_collection(collection_id)
1262
+
1263
+ if new_collection_id:
1264
+ collection["id"] = new_collection_id
1265
+ collection["links"] = resolve_links([], base_url)
1266
+
1267
+ await self.update_collection(
1268
+ collection_id=collection_id,
1269
+ collection=collection,
1270
+ refresh=refresh,
1271
+ )
1272
+
1273
+ return collection
1274
+
1031
1275
  async def delete_collection(self, collection_id: str, **kwargs: Any):
1032
1276
  """Delete a collection from the database.
1033
1277
 
@@ -1,2 +1,2 @@
1
1
  """library version."""
2
- __version__ = "5.0.0a1"
2
+ __version__ = "6.1.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: stac-fastapi-opensearch
3
- Version: 5.0.0a1
3
+ Version: 6.1.0
4
4
  Summary: Opensearch stac-fastapi backend.
5
5
  Home-page: https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch
6
6
  License: MIT
@@ -15,8 +15,8 @@ Classifier: Programming Language :: Python :: 3.13
15
15
  Classifier: License :: OSI Approved :: MIT License
16
16
  Requires-Python: >=3.9
17
17
  Description-Content-Type: text/markdown
18
- Requires-Dist: stac-fastapi-core==5.0.0a1
19
- Requires-Dist: sfeos-helpers==5.0.0a1
18
+ Requires-Dist: stac-fastapi-core==6.1.0
19
+ Requires-Dist: sfeos-helpers==6.1.0
20
20
  Requires-Dist: opensearch-py~=2.8.0
21
21
  Requires-Dist: opensearch-py[async]~=2.8.0
22
22
  Requires-Dist: uvicorn~=0.23.0
@@ -26,7 +26,6 @@ Requires-Dist: pytest~=7.0.0; extra == "dev"
26
26
  Requires-Dist: pytest-cov~=4.0.0; extra == "dev"
27
27
  Requires-Dist: pytest-asyncio~=0.21.0; extra == "dev"
28
28
  Requires-Dist: pre-commit~=3.0.0; extra == "dev"
29
- Requires-Dist: requests<3.0.0,>=2.32.0; extra == "dev"
30
29
  Requires-Dist: ciso8601~=2.3.0; extra == "dev"
31
30
  Requires-Dist: httpx<0.28.0,>=0.24.0; extra == "dev"
32
31
  Provides-Extra: docs
@@ -53,7 +52,7 @@ Requires-Dist: uvicorn[standard]~=0.23.0; extra == "server"
53
52
  [![GitHub forks](https://img.shields.io/github/forks/stac-utils/stac-fastapi-elasticsearch-opensearch.svg?color=blue)](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/network/members)
54
53
  [![PyPI version](https://img.shields.io/pypi/v/stac-fastapi-elasticsearch.svg?color=blue)](https://pypi.org/project/stac-fastapi-elasticsearch/)
55
54
  [![STAC](https://img.shields.io/badge/STAC-1.1.0-blue.svg)](https://github.com/radiantearth/stac-spec/tree/v1.1.0)
56
- [![stac-fastapi](https://img.shields.io/badge/stac--fastapi-5.2.0-blue.svg)](https://github.com/stac-utils/stac-fastapi)
55
+ [![stac-fastapi](https://img.shields.io/badge/stac--fastapi-6.0.0-blue.svg)](https://github.com/stac-utils/stac-fastapi)
57
56
 
58
57
  ## Sponsors & Supporters
59
58
 
@@ -243,28 +242,30 @@ You can customize additional settings in your `.env` file:
243
242
  |------------------------------|--------------------------------------------------------------------------------------|--------------------------|---------------------------------------------------------------------------------------------|
244
243
  | `ES_HOST` | Hostname for external Elasticsearch/OpenSearch. | `localhost` | Optional |
245
244
  | `ES_PORT` | Port for Elasticsearch/OpenSearch. | `9200` (ES) / `9202` (OS)| Optional |
246
- | `ES_USE_SSL` | Use SSL for connecting to Elasticsearch/OpenSearch. | `false` | Optional |
247
- | `ES_VERIFY_CERTS` | Verify SSL certificates when connecting. | `false` | Optional |
245
+ | `ES_USE_SSL` | Use SSL for connecting to Elasticsearch/OpenSearch. | `true` | Optional |
246
+ | `ES_VERIFY_CERTS` | Verify SSL certificates when connecting. | `true` | Optional |
247
+ | `ES_API_KEY` | API Key for external Elasticsearch/OpenSearch. | N/A | Optional |
248
+ | `ES_TIMEOUT` | Client timeout for Elasticsearch/OpenSearch. | DB client default | Optional |
248
249
  | `STAC_FASTAPI_TITLE` | Title of the API in the documentation. | `stac-fastapi-<backend>` | Optional |
249
250
  | `STAC_FASTAPI_DESCRIPTION` | Description of the API in the documentation. | N/A | Optional |
250
251
  | `STAC_FASTAPI_VERSION` | API version. | `2.1` | Optional |
251
- | `STAC_FASTAPI_LANDING_PAGE_ID` | Landing page ID | `stac-fastapi` | Optional |
252
+ | `STAC_FASTAPI_LANDING_PAGE_ID` | Landing page ID | `stac-fastapi` | Optional |
252
253
  | `APP_HOST` | Server bind address. | `0.0.0.0` | Optional |
253
- | `APP_PORT` | Server port. | `8080` | Optional |
254
+ | `APP_PORT` | Server port. | `8000` | Optional |
254
255
  | `ENVIRONMENT` | Runtime environment. | `local` | Optional |
255
256
  | `WEB_CONCURRENCY` | Number of worker processes. | `10` | Optional |
256
257
  | `RELOAD` | Enable auto-reload for development. | `true` | Optional |
257
258
  | `STAC_FASTAPI_RATE_LIMIT` | API rate limit per client. | `200/minute` | Optional |
258
- | `BACKEND` | Tests-related variable | `elasticsearch` or `opensearch` based on the backend | Optional |
259
- | `ELASTICSEARCH_VERSION` | Version of Elasticsearch to use. | `8.11.0` | Optional | |
260
- | `OPENSEARCH_VERSION` | OpenSearch version | `2.11.1` | Optional
261
- | `ENABLE_DIRECT_RESPONSE` | Enable direct response for maximum performance (disables all FastAPI dependencies, including authentication, custom status codes, and validation) | `false` | Optional
262
- | `RAISE_ON_BULK_ERROR` | Controls whether bulk insert operations raise exceptions on errors. If set to `true`, the operation will stop and raise an exception when an error occurs. If set to `false`, errors will be logged, and the operation will continue. **Note:** STAC Item and ItemCollection validation errors will always raise, regardless of this flag. | `false` Optional |
263
- | `DATABASE_REFRESH` | Controls whether database operations refresh the index immediately after changes. If set to `true`, changes will be immediately searchable. If set to `false`, changes may not be immediately visible but can improve performance for bulk operations. If set to `wait_for`, changes will wait for the next refresh cycle to become visible. | `false` | Optional |
259
+ | `BACKEND` | Tests-related variable | `elasticsearch` or `opensearch` based on the backend | Optional |
260
+ | `ELASTICSEARCH_VERSION` | Version of Elasticsearch to use. | `8.11.0` | Optional |
261
+ | `OPENSEARCH_VERSION` | OpenSearch version | `2.11.1` | Optional |
262
+ | `ENABLE_DIRECT_RESPONSE` | Enable direct response for maximum performance (disables all FastAPI dependencies, including authentication, custom status codes, and validation) | `false` | Optional |
263
+ | `RAISE_ON_BULK_ERROR` | Controls whether bulk insert operations raise exceptions on errors. If set to `true`, the operation will stop and raise an exception when an error occurs. If set to `false`, errors will be logged, and the operation will continue. **Note:** STAC Item and ItemCollection validation errors will always raise, regardless of this flag. | `false` | Optional |
264
+ | `DATABASE_REFRESH` | Controls whether database operations refresh the index immediately after changes. If set to `true`, changes will be immediately searchable. If set to `false`, changes may not be immediately visible but can improve performance for bulk operations. If set to `wait_for`, changes will wait for the next refresh cycle to become visible. | `false` | Optional |
264
265
  | `ENABLE_TRANSACTIONS_EXTENSIONS` | Enables or disables the Transactions and Bulk Transactions API extensions. If set to `false`, the POST `/collections` route and related transaction endpoints (including bulk transaction operations) will be unavailable in the API. This is useful for deployments where mutating the catalog via the API should be prevented. | `true` | Optional |
265
266
 
266
267
  > [!NOTE]
267
- > The variables `ES_HOST`, `ES_PORT`, `ES_USE_SSL`, and `ES_VERIFY_CERTS` apply to both Elasticsearch and OpenSearch backends, so there is no need to rename the key names to `OS_` even if you're using OpenSearch.
268
+ > The variables `ES_HOST`, `ES_PORT`, `ES_USE_SSL`, `ES_VERIFY_CERTS` and `ES_TIMEOUT` apply to both Elasticsearch and OpenSearch backends, so there is no need to rename the key names to `OS_` even if you're using OpenSearch.
268
269
 
269
270
  ## Interacting with the API
270
271
 
@@ -0,0 +1,10 @@
1
+ stac_fastapi/opensearch/__init__.py,sha256=iJWMUgn7mUvmuPQSO_FlyhJ5eDdbbfmGv1qnFOX5-qk,28
2
+ stac_fastapi/opensearch/app.py,sha256=EBaN0W8-jP9Q568J6UlC_xM7uWx7PkfE4RulnxYJCYs,5642
3
+ stac_fastapi/opensearch/config.py,sha256=tR-CP3l96pte0gdbQqDHAQVZrWbL57krMrFalLKCTBc,5178
4
+ stac_fastapi/opensearch/database_logic.py,sha256=9c2UKJcFaaZ9fcXUkCYnDy06G16BHGu96kb13Clg0ow,54664
5
+ stac_fastapi/opensearch/version.py,sha256=7IrY7mbr0cGVqZsk6wmCeITxZjDgz_mPHUswrziX5ME,45
6
+ stac_fastapi_opensearch-6.1.0.dist-info/METADATA,sha256=CgFBwwx65wUV-jcw3sbSFhUKcre3GgfWUBlwEhOQRuM,32250
7
+ stac_fastapi_opensearch-6.1.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
+ stac_fastapi_opensearch-6.1.0.dist-info/entry_points.txt,sha256=zjZ0Xsr9BUNJqMkdPpl6zEIUykv1uFdJtNELFRChp0w,76
9
+ stac_fastapi_opensearch-6.1.0.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
+ stac_fastapi_opensearch-6.1.0.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- stac_fastapi/opensearch/__init__.py,sha256=iJWMUgn7mUvmuPQSO_FlyhJ5eDdbbfmGv1qnFOX5-qk,28
2
- stac_fastapi/opensearch/app.py,sha256=LVXmhC2AZhiEhlGYWG29QBGfFP4y7PTYG0Pn0vKiISg,5060
3
- stac_fastapi/opensearch/config.py,sha256=HfaUvcQM2kGNjypdUYFUcrMmBUPu3pG31mvNRESeR_A,5061
4
- stac_fastapi/opensearch/database_logic.py,sha256=hhUMCzhpUMwH8HIIQIXFQsAZmbpt3_VDFWVBrigIXTM,46304
5
- stac_fastapi/opensearch/version.py,sha256=MQ4i7PSJnhrL3abG-m_vOwI3ksvRoHcs0eiBtxywE3E,47
6
- stac_fastapi_opensearch-5.0.0a1.dist-info/METADATA,sha256=rWA3nfQEeXCFy6h7AB6tM_wetrvVU_U5jf5d4HT-74g,31956
7
- stac_fastapi_opensearch-5.0.0a1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
8
- stac_fastapi_opensearch-5.0.0a1.dist-info/entry_points.txt,sha256=zjZ0Xsr9BUNJqMkdPpl6zEIUykv1uFdJtNELFRChp0w,76
9
- stac_fastapi_opensearch-5.0.0a1.dist-info/top_level.txt,sha256=vqn-D9-HsRPTTxy0Vk_KkDmTiMES4owwBQ3ydSZYb2s,13
10
- stac_fastapi_opensearch-5.0.0a1.dist-info/RECORD,,