stac-fastapi-core 4.1.0__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  """Base database logic."""
2
2
 
3
3
  import abc
4
- from typing import Any, Dict, Iterable, Optional
4
+ from typing import Any, Dict, Iterable, List, Optional
5
5
 
6
6
 
7
7
  class BaseDatabaseLogic(abc.ABC):
@@ -36,6 +36,18 @@ class BaseDatabaseLogic(abc.ABC):
36
36
  """Delete an item from the database."""
37
37
  pass
38
38
 
39
+ @abc.abstractmethod
40
+ async def get_items_mapping(self, collection_id: str) -> Dict[str, Dict[str, Any]]:
41
+ """Get the mapping for the items in the collection."""
42
+ pass
43
+
44
+ @abc.abstractmethod
45
+ async def get_items_unique_values(
46
+ self, collection_id: str, field_names: Iterable[str], *, limit: int = ...
47
+ ) -> Dict[str, List[str]]:
48
+ """Get the unique values for the given fields in the collection."""
49
+ pass
50
+
39
51
  @abc.abstractmethod
40
52
  async def create_collection(self, collection: Dict, refresh: bool = False) -> None:
41
53
  """Create a collection in the database."""
stac_fastapi/core/core.py CHANGED
@@ -1,11 +1,10 @@
1
1
  """Core client."""
2
2
 
3
3
  import logging
4
- from collections import deque
5
4
  from datetime import datetime as datetime_type
6
5
  from datetime import timezone
7
6
  from enum import Enum
8
- from typing import Any, Dict, List, Literal, Optional, Set, Type, Union
7
+ from typing import List, Optional, Set, Type, Union
9
8
  from urllib.parse import unquote_plus, urljoin
10
9
 
11
10
  import attr
@@ -22,11 +21,11 @@ from stac_pydantic.version import STAC_VERSION
22
21
 
23
22
  from stac_fastapi.core.base_database_logic import BaseDatabaseLogic
24
23
  from stac_fastapi.core.base_settings import ApiBaseSettings
24
+ from stac_fastapi.core.datetime_utils import format_datetime_range
25
25
  from stac_fastapi.core.models.links import PagingLinks
26
26
  from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
27
27
  from stac_fastapi.core.session import Session
28
28
  from stac_fastapi.core.utilities import filter_fields
29
- from stac_fastapi.extensions.core.filter.client import AsyncBaseFiltersClient
30
29
  from stac_fastapi.extensions.third_party.bulk_transactions import (
31
30
  BaseBulkTransactionsClient,
32
31
  BulkTransactionMethod,
@@ -37,7 +36,6 @@ from stac_fastapi.types.conformance import BASE_CONFORMANCE_CLASSES
37
36
  from stac_fastapi.types.core import AsyncBaseCoreClient, AsyncBaseTransactionsClient
38
37
  from stac_fastapi.types.extension import ApiExtension
39
38
  from stac_fastapi.types.requests import get_base_url
40
- from stac_fastapi.types.rfc3339 import DateTimeType, rfc3339_str_to_datetime
41
39
  from stac_fastapi.types.search import BaseSearchPostRequest
42
40
 
43
41
  logger = logging.getLogger(__name__)
@@ -318,9 +316,8 @@ class CoreClient(AsyncBaseCoreClient):
318
316
  )
319
317
 
320
318
  if datetime:
321
- datetime_search = self._return_date(datetime)
322
319
  search = self.database.apply_datetime_filter(
323
- search=search, datetime_search=datetime_search
320
+ search=search, interval=datetime
324
321
  )
325
322
 
326
323
  if bbox:
@@ -374,87 +371,6 @@ class CoreClient(AsyncBaseCoreClient):
374
371
  )
375
372
  return self.item_serializer.db_to_stac(item, base_url)
376
373
 
377
- @staticmethod
378
- def _return_date(
379
- interval: Optional[Union[DateTimeType, str]]
380
- ) -> Dict[str, Optional[str]]:
381
- """
382
- Convert a date interval.
383
-
384
- (which may be a datetime, a tuple of one or two datetimes a string
385
- representing a datetime or range, or None) into a dictionary for filtering
386
- search results with Elasticsearch.
387
-
388
- This function ensures the output dictionary contains 'gte' and 'lte' keys,
389
- even if they are set to None, to prevent KeyError in the consuming logic.
390
-
391
- Args:
392
- interval (Optional[Union[DateTimeType, str]]): The date interval, which might be a single datetime,
393
- a tuple with one or two datetimes, a string, or None.
394
-
395
- Returns:
396
- dict: A dictionary representing the date interval for use in filtering search results,
397
- always containing 'gte' and 'lte' keys.
398
- """
399
- result: Dict[str, Optional[str]] = {"gte": None, "lte": None}
400
-
401
- if interval is None:
402
- return result
403
-
404
- if isinstance(interval, str):
405
- if "/" in interval:
406
- parts = interval.split("/")
407
- result["gte"] = parts[0] if parts[0] != ".." else None
408
- result["lte"] = (
409
- parts[1] if len(parts) > 1 and parts[1] != ".." else None
410
- )
411
- else:
412
- converted_time = interval if interval != ".." else None
413
- result["gte"] = result["lte"] = converted_time
414
- return result
415
-
416
- if isinstance(interval, datetime_type):
417
- datetime_iso = interval.isoformat()
418
- result["gte"] = result["lte"] = datetime_iso
419
- elif isinstance(interval, tuple):
420
- start, end = interval
421
- # Ensure datetimes are converted to UTC and formatted with 'Z'
422
- if start:
423
- result["gte"] = start.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
424
- if end:
425
- result["lte"] = end.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
426
-
427
- return result
428
-
429
- def _format_datetime_range(self, date_str: str) -> str:
430
- """
431
- Convert a datetime range string into a normalized UTC string for API requests using rfc3339_str_to_datetime.
432
-
433
- Args:
434
- date_str (str): A string containing two datetime values separated by a '/'.
435
-
436
- Returns:
437
- str: A string formatted as 'YYYY-MM-DDTHH:MM:SSZ/YYYY-MM-DDTHH:MM:SSZ', with '..' used if any element is None.
438
- """
439
-
440
- def normalize(dt):
441
- dt = dt.strip()
442
- if not dt or dt == "..":
443
- return ".."
444
- dt_obj = rfc3339_str_to_datetime(dt)
445
- dt_utc = dt_obj.astimezone(timezone.utc)
446
- return dt_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
447
-
448
- if not isinstance(date_str, str):
449
- return "../.."
450
- if "/" not in date_str:
451
- return f"{normalize(date_str)}/{normalize(date_str)}"
452
- try:
453
- start, end = date_str.split("/", 1)
454
- except Exception:
455
- return "../.."
456
- return f"{normalize(start)}/{normalize(end)}"
457
-
458
374
  async def get_search(
459
375
  self,
460
376
  request: Request,
@@ -506,7 +422,7 @@ class CoreClient(AsyncBaseCoreClient):
506
422
  }
507
423
 
508
424
  if datetime:
509
- base_args["datetime"] = self._format_datetime_range(date_str=datetime)
425
+ base_args["datetime"] = format_datetime_range(date_str=datetime)
510
426
 
511
427
  if intersects:
512
428
  base_args["intersects"] = orjson.loads(unquote_plus(intersects))
@@ -576,9 +492,8 @@ class CoreClient(AsyncBaseCoreClient):
576
492
  )
577
493
 
578
494
  if search_request.datetime:
579
- datetime_search = self._return_date(search_request.datetime)
580
495
  search = self.database.apply_datetime_filter(
581
- search=search, datetime_search=datetime_search
496
+ search=search, interval=search_request.datetime
582
497
  )
583
498
 
584
499
  if search_request.bbox:
@@ -607,7 +522,7 @@ class CoreClient(AsyncBaseCoreClient):
607
522
  if hasattr(search_request, "filter_expr"):
608
523
  cql2_filter = getattr(search_request, "filter_expr", None)
609
524
  try:
610
- search = self.database.apply_cql2_filter(search, cql2_filter)
525
+ search = await self.database.apply_cql2_filter(search, cql2_filter)
611
526
  except Exception as e:
612
527
  raise HTTPException(
613
528
  status_code=400, detail=f"Error with cql2_json filter: {e}"
@@ -712,10 +627,11 @@ class TransactionsClient(AsyncBaseTransactionsClient):
712
627
  for feature in features
713
628
  ]
714
629
  attempted = len(processed_items)
630
+
715
631
  success, errors = await self.database.bulk_async(
716
- collection_id,
717
- processed_items,
718
- refresh=kwargs.get("refresh", False),
632
+ collection_id=collection_id,
633
+ processed_items=processed_items,
634
+ **kwargs,
719
635
  )
720
636
  if errors:
721
637
  logger.error(
@@ -729,10 +645,7 @@ class TransactionsClient(AsyncBaseTransactionsClient):
729
645
 
730
646
  # Handle single item
731
647
  await self.database.create_item(
732
- item_dict,
733
- refresh=kwargs.get("refresh", False),
734
- base_url=base_url,
735
- exist_ok=False,
648
+ item_dict, base_url=base_url, exist_ok=False, **kwargs
736
649
  )
737
650
  return ItemSerializer.db_to_stac(item_dict, base_url)
738
651
 
@@ -757,11 +670,12 @@ class TransactionsClient(AsyncBaseTransactionsClient):
757
670
  """
758
671
  item = item.model_dump(mode="json")
759
672
  base_url = str(kwargs["request"].base_url)
673
+
760
674
  now = datetime_type.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
761
675
  item["properties"]["updated"] = now
762
676
 
763
677
  await self.database.create_item(
764
- item, refresh=kwargs.get("refresh", False), base_url=base_url, exist_ok=True
678
+ item, base_url=base_url, exist_ok=True, **kwargs
765
679
  )
766
680
 
767
681
  return ItemSerializer.db_to_stac(item, base_url)
@@ -777,7 +691,9 @@ class TransactionsClient(AsyncBaseTransactionsClient):
777
691
  Returns:
778
692
  None: Returns 204 No Content on successful deletion
779
693
  """
780
- await self.database.delete_item(item_id=item_id, collection_id=collection_id)
694
+ await self.database.delete_item(
695
+ item_id=item_id, collection_id=collection_id, **kwargs
696
+ )
781
697
  return None
782
698
 
783
699
  @overrides
@@ -798,8 +714,9 @@ class TransactionsClient(AsyncBaseTransactionsClient):
798
714
  """
799
715
  collection = collection.model_dump(mode="json")
800
716
  request = kwargs["request"]
717
+
801
718
  collection = self.database.collection_serializer.stac_to_db(collection, request)
802
- await self.database.create_collection(collection=collection)
719
+ await self.database.create_collection(collection=collection, **kwargs)
803
720
  return CollectionSerializer.db_to_stac(
804
721
  collection,
805
722
  request,
@@ -835,7 +752,7 @@ class TransactionsClient(AsyncBaseTransactionsClient):
835
752
 
836
753
  collection = self.database.collection_serializer.stac_to_db(collection, request)
837
754
  await self.database.update_collection(
838
- collection_id=collection_id, collection=collection
755
+ collection_id=collection_id, collection=collection, **kwargs
839
756
  )
840
757
 
841
758
  return CollectionSerializer.db_to_stac(
@@ -860,7 +777,7 @@ class TransactionsClient(AsyncBaseTransactionsClient):
860
777
  Raises:
861
778
  NotFoundError: If the collection doesn't exist
862
779
  """
863
- await self.database.delete_collection(collection_id=collection_id)
780
+ await self.database.delete_collection(collection_id=collection_id, **kwargs)
864
781
  return None
865
782
 
866
783
 
@@ -937,7 +854,7 @@ class BulkTransactionsClient(BaseBulkTransactionsClient):
937
854
  success, errors = self.database.bulk_sync(
938
855
  collection_id,
939
856
  processed_items,
940
- refresh=kwargs.get("refresh", False),
857
+ **kwargs,
941
858
  )
942
859
  if errors:
943
860
  logger.error(f"Bulk sync operation encountered errors: {errors}")
@@ -945,159 +862,3 @@ class BulkTransactionsClient(BaseBulkTransactionsClient):
945
862
  logger.info(f"Bulk sync operation succeeded with {success} actions.")
946
863
 
947
864
  return f"Successfully added/updated {success} Items. {attempted - success} errors occurred."
948
-
949
-
950
- _DEFAULT_QUERYABLES: Dict[str, Dict[str, Any]] = {
951
- "id": {
952
- "description": "ID",
953
- "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/2/properties/id",
954
- },
955
- "collection": {
956
- "description": "Collection",
957
- "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/2/then/properties/collection",
958
- },
959
- "geometry": {
960
- "description": "Geometry",
961
- "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/1/oneOf/0/properties/geometry",
962
- },
963
- "datetime": {
964
- "description": "Acquisition Timestamp",
965
- "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/datetime",
966
- },
967
- "created": {
968
- "description": "Creation Timestamp",
969
- "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/created",
970
- },
971
- "updated": {
972
- "description": "Creation Timestamp",
973
- "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/updated",
974
- },
975
- "cloud_cover": {
976
- "description": "Cloud Cover",
977
- "$ref": "https://stac-extensions.github.io/eo/v1.0.0/schema.json#/definitions/fields/properties/eo:cloud_cover",
978
- },
979
- "cloud_shadow_percentage": {
980
- "title": "Cloud Shadow Percentage",
981
- "description": "Cloud Shadow Percentage",
982
- "type": "number",
983
- "minimum": 0,
984
- "maximum": 100,
985
- },
986
- "nodata_pixel_percentage": {
987
- "title": "No Data Pixel Percentage",
988
- "description": "No Data Pixel Percentage",
989
- "type": "number",
990
- "minimum": 0,
991
- "maximum": 100,
992
- },
993
- }
994
-
995
- _ES_MAPPING_TYPE_TO_JSON: Dict[
996
- str, Literal["string", "number", "boolean", "object", "array", "null"]
997
- ] = {
998
- "date": "string",
999
- "date_nanos": "string",
1000
- "keyword": "string",
1001
- "match_only_text": "string",
1002
- "text": "string",
1003
- "wildcard": "string",
1004
- "byte": "number",
1005
- "double": "number",
1006
- "float": "number",
1007
- "half_float": "number",
1008
- "long": "number",
1009
- "scaled_float": "number",
1010
- "short": "number",
1011
- "token_count": "number",
1012
- "unsigned_long": "number",
1013
- "geo_point": "object",
1014
- "geo_shape": "object",
1015
- "nested": "array",
1016
- }
1017
-
1018
-
1019
- @attr.s
1020
- class EsAsyncBaseFiltersClient(AsyncBaseFiltersClient):
1021
- """Defines a pattern for implementing the STAC filter extension."""
1022
-
1023
- database: BaseDatabaseLogic = attr.ib()
1024
-
1025
- async def get_queryables(
1026
- self, collection_id: Optional[str] = None, **kwargs
1027
- ) -> Dict[str, Any]:
1028
- """Get the queryables available for the given collection_id.
1029
-
1030
- If collection_id is None, returns the intersection of all
1031
- queryables over all collections.
1032
-
1033
- This base implementation returns a blank queryable schema. This is not allowed
1034
- under OGC CQL but it is allowed by the STAC API Filter Extension
1035
-
1036
- https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables
1037
-
1038
- Args:
1039
- collection_id (str, optional): The id of the collection to get queryables for.
1040
- **kwargs: additional keyword arguments
1041
-
1042
- Returns:
1043
- Dict[str, Any]: A dictionary containing the queryables for the given collection.
1044
- """
1045
- queryables: Dict[str, Any] = {
1046
- "$schema": "https://json-schema.org/draft/2019-09/schema",
1047
- "$id": "https://stac-api.example.com/queryables",
1048
- "type": "object",
1049
- "title": "Queryables for STAC API",
1050
- "description": "Queryable names for the STAC API Item Search filter.",
1051
- "properties": _DEFAULT_QUERYABLES,
1052
- "additionalProperties": True,
1053
- }
1054
- if not collection_id:
1055
- return queryables
1056
-
1057
- properties: Dict[str, Any] = queryables["properties"]
1058
- queryables.update(
1059
- {
1060
- "properties": properties,
1061
- "additionalProperties": False,
1062
- }
1063
- )
1064
-
1065
- mapping_data = await self.database.get_items_mapping(collection_id)
1066
- mapping_properties = next(iter(mapping_data.values()))["mappings"]["properties"]
1067
- stack = deque(mapping_properties.items())
1068
-
1069
- while stack:
1070
- field_name, field_def = stack.popleft()
1071
-
1072
- # Iterate over nested fields
1073
- field_properties = field_def.get("properties")
1074
- if field_properties:
1075
- # Fields in Item Properties should be exposed with their un-prefixed names,
1076
- # and not require expressions to prefix them with properties,
1077
- # e.g., eo:cloud_cover instead of properties.eo:cloud_cover.
1078
- if field_name == "properties":
1079
- stack.extend(field_properties.items())
1080
- else:
1081
- stack.extend(
1082
- (f"{field_name}.{k}", v) for k, v in field_properties.items()
1083
- )
1084
-
1085
- # Skip non-indexed or disabled fields
1086
- field_type = field_def.get("type")
1087
- if not field_type or not field_def.get("enabled", True):
1088
- continue
1089
-
1090
- # Generate field properties
1091
- field_result = _DEFAULT_QUERYABLES.get(field_name, {})
1092
- properties[field_name] = field_result
1093
-
1094
- field_name_human = field_name.replace("_", " ").title()
1095
- field_result.setdefault("title", field_name_human)
1096
-
1097
- field_type_json = _ES_MAPPING_TYPE_TO_JSON.get(field_type, field_type)
1098
- field_result.setdefault("type", field_type_json)
1099
-
1100
- if field_type in {"date", "date_nanos"}:
1101
- field_result.setdefault("format", "date-time")
1102
-
1103
- return queryables
@@ -1,6 +1,38 @@
1
- """A few datetime methods."""
1
+ """Utility functions to handle datetime parsing."""
2
2
  from datetime import datetime, timezone
3
3
 
4
+ from stac_fastapi.types.rfc3339 import rfc3339_str_to_datetime
5
+
6
+
7
+ def format_datetime_range(date_str: str) -> str:
8
+ """
9
+ Convert a datetime range string into a normalized UTC string for API requests using rfc3339_str_to_datetime.
10
+
11
+ Args:
12
+ date_str (str): A string containing two datetime values separated by a '/'.
13
+
14
+ Returns:
15
+ str: A string formatted as 'YYYY-MM-DDTHH:MM:SSZ/YYYY-MM-DDTHH:MM:SSZ', with '..' used if any element is None.
16
+ """
17
+
18
+ def normalize(dt):
19
+ dt = dt.strip()
20
+ if not dt or dt == "..":
21
+ return ".."
22
+ dt_obj = rfc3339_str_to_datetime(dt)
23
+ dt_utc = dt_obj.astimezone(timezone.utc)
24
+ return dt_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
25
+
26
+ if not isinstance(date_str, str):
27
+ return "../.."
28
+ if "/" not in date_str:
29
+ return f"{normalize(date_str)}/{normalize(date_str)}"
30
+ try:
31
+ start, end = date_str.split("/", 1)
32
+ except Exception:
33
+ return "../.."
34
+ return f"{normalize(start)}/{normalize(end)}"
35
+
4
36
 
5
37
  # Borrowed from pystac - https://github.com/stac-utils/pystac/blob/f5e4cf4a29b62e9ef675d4a4dac7977b09f53c8f/pystac/utils.py#L370-L394
6
38
  def datetime_to_str(dt: datetime, timespec: str = "auto") -> str: