stac-fastapi-core 6.8.1__py3-none-any.whl → 6.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stac_fastapi/core/core.py +12 -10
- stac_fastapi/core/extensions/catalogs.py +297 -80
- stac_fastapi/core/extensions/collections_search.py +25 -21
- stac_fastapi/core/redis_utils.py +1 -1
- stac_fastapi/core/serializers.py +3 -0
- stac_fastapi/core/utilities.py +84 -6
- stac_fastapi/core/version.py +1 -1
- {stac_fastapi_core-6.8.1.dist-info → stac_fastapi_core-6.10.0.dist-info}/METADATA +1 -1
- {stac_fastapi_core-6.8.1.dist-info → stac_fastapi_core-6.10.0.dist-info}/RECORD +10 -10
- {stac_fastapi_core-6.8.1.dist-info → stac_fastapi_core-6.10.0.dist-info}/WHEEL +0 -0
stac_fastapi/core/core.py
CHANGED
|
@@ -315,7 +315,7 @@ class CoreClient(AsyncBaseCoreClient):
|
|
|
315
315
|
|
|
316
316
|
body_limit = None
|
|
317
317
|
try:
|
|
318
|
-
if request.method == "POST" and request.body():
|
|
318
|
+
if request.method == "POST" and await request.body():
|
|
319
319
|
body_data = await request.json()
|
|
320
320
|
body_limit = body_data.get("limit")
|
|
321
321
|
except Exception:
|
|
@@ -546,18 +546,20 @@ class CoreClient(AsyncBaseCoreClient):
|
|
|
546
546
|
return await self.all_collections(
|
|
547
547
|
limit=search_request.limit if hasattr(search_request, "limit") else None,
|
|
548
548
|
bbox=search_request.bbox if hasattr(search_request, "bbox") else None,
|
|
549
|
-
datetime=
|
|
550
|
-
|
|
551
|
-
|
|
549
|
+
datetime=(
|
|
550
|
+
search_request.datetime if hasattr(search_request, "datetime") else None
|
|
551
|
+
),
|
|
552
552
|
token=search_request.token if hasattr(search_request, "token") else None,
|
|
553
553
|
fields=fields,
|
|
554
554
|
sortby=sortby,
|
|
555
|
-
filter_expr=
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
filter_lang=
|
|
559
|
-
|
|
560
|
-
|
|
555
|
+
filter_expr=(
|
|
556
|
+
search_request.filter if hasattr(search_request, "filter") else None
|
|
557
|
+
),
|
|
558
|
+
filter_lang=(
|
|
559
|
+
search_request.filter_lang
|
|
560
|
+
if hasattr(search_request, "filter_lang")
|
|
561
|
+
else None
|
|
562
|
+
),
|
|
561
563
|
query=search_request.query if hasattr(search_request, "query") else None,
|
|
562
564
|
q=search_request.q if hasattr(search_request, "q") else None,
|
|
563
565
|
request=request,
|
|
@@ -12,9 +12,15 @@ from starlette.responses import Response
|
|
|
12
12
|
from typing_extensions import TypedDict
|
|
13
13
|
|
|
14
14
|
from stac_fastapi.core.models import Catalog
|
|
15
|
-
from stac_fastapi.sfeos_helpers.
|
|
15
|
+
from stac_fastapi.sfeos_helpers.database import (
|
|
16
|
+
search_children_with_pagination_shared,
|
|
17
|
+
search_collections_by_parent_id_shared,
|
|
18
|
+
search_sub_catalogs_with_pagination_shared,
|
|
19
|
+
update_catalog_in_index_shared,
|
|
20
|
+
)
|
|
16
21
|
from stac_fastapi.types import stac as stac_types
|
|
17
22
|
from stac_fastapi.types.core import BaseCoreClient
|
|
23
|
+
from stac_fastapi.types.errors import NotFoundError
|
|
18
24
|
from stac_fastapi.types.extension import ApiExtension
|
|
19
25
|
|
|
20
26
|
logger = logging.getLogger(__name__)
|
|
@@ -56,6 +62,7 @@ class CatalogsExtension(ApiExtension):
|
|
|
56
62
|
settings: extension settings (unused for now).
|
|
57
63
|
"""
|
|
58
64
|
self.settings = settings or {}
|
|
65
|
+
self.router = APIRouter()
|
|
59
66
|
|
|
60
67
|
self.router.add_api_route(
|
|
61
68
|
path="/catalogs",
|
|
@@ -105,6 +112,31 @@ class CatalogsExtension(ApiExtension):
|
|
|
105
112
|
tags=["Catalogs"],
|
|
106
113
|
)
|
|
107
114
|
|
|
115
|
+
# Add endpoint for getting sub-catalogs of a catalog
|
|
116
|
+
self.router.add_api_route(
|
|
117
|
+
path="/catalogs/{catalog_id}/catalogs",
|
|
118
|
+
endpoint=self.get_catalog_catalogs,
|
|
119
|
+
methods=["GET"],
|
|
120
|
+
response_model=Catalogs,
|
|
121
|
+
response_class=self.response_class,
|
|
122
|
+
summary="Get Catalog Sub-Catalogs",
|
|
123
|
+
description="Get sub-catalogs linked from a specific catalog.",
|
|
124
|
+
tags=["Catalogs"],
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# Add endpoint for creating sub-catalogs in a catalog
|
|
128
|
+
self.router.add_api_route(
|
|
129
|
+
path="/catalogs/{catalog_id}/catalogs",
|
|
130
|
+
endpoint=self.create_catalog_catalog,
|
|
131
|
+
methods=["POST"],
|
|
132
|
+
response_model=Catalog,
|
|
133
|
+
response_class=self.response_class,
|
|
134
|
+
status_code=201,
|
|
135
|
+
summary="Create Catalog Sub-Catalog",
|
|
136
|
+
description="Create a new catalog and link it as a sub-catalog of a specific catalog.",
|
|
137
|
+
tags=["Catalogs"],
|
|
138
|
+
)
|
|
139
|
+
|
|
108
140
|
# Add endpoint for creating collections in a catalog
|
|
109
141
|
self.router.add_api_route(
|
|
110
142
|
path="/catalogs/{catalog_id}/collections",
|
|
@@ -118,6 +150,18 @@ class CatalogsExtension(ApiExtension):
|
|
|
118
150
|
tags=["Catalogs"],
|
|
119
151
|
)
|
|
120
152
|
|
|
153
|
+
# Add endpoint for updating a catalog
|
|
154
|
+
self.router.add_api_route(
|
|
155
|
+
path="/catalogs/{catalog_id}",
|
|
156
|
+
endpoint=self.update_catalog,
|
|
157
|
+
methods=["PUT"],
|
|
158
|
+
response_model=Catalog,
|
|
159
|
+
response_class=self.response_class,
|
|
160
|
+
summary="Update Catalog",
|
|
161
|
+
description="Update an existing STAC catalog.",
|
|
162
|
+
tags=["Catalogs"],
|
|
163
|
+
)
|
|
164
|
+
|
|
121
165
|
# Add endpoint for deleting a catalog
|
|
122
166
|
self.router.add_api_route(
|
|
123
167
|
path="/catalogs/{catalog_id}",
|
|
@@ -280,6 +324,55 @@ class CatalogsExtension(ApiExtension):
|
|
|
280
324
|
# Return the created catalog
|
|
281
325
|
return catalog
|
|
282
326
|
|
|
327
|
+
async def update_catalog(
|
|
328
|
+
self, catalog_id: str, catalog: Catalog, request: Request
|
|
329
|
+
) -> Catalog:
|
|
330
|
+
"""Update an existing catalog.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
catalog_id: The ID of the catalog to update.
|
|
334
|
+
catalog: The updated catalog data.
|
|
335
|
+
request: Request object.
|
|
336
|
+
|
|
337
|
+
Returns:
|
|
338
|
+
The updated catalog.
|
|
339
|
+
|
|
340
|
+
Raises:
|
|
341
|
+
HTTPException: If the catalog is not found.
|
|
342
|
+
"""
|
|
343
|
+
try:
|
|
344
|
+
# Verify the catalog exists
|
|
345
|
+
existing_catalog_db = await self.client.database.find_catalog(catalog_id)
|
|
346
|
+
|
|
347
|
+
# Convert STAC catalog to database format
|
|
348
|
+
db_catalog = self.client.catalog_serializer.stac_to_db(catalog, request)
|
|
349
|
+
db_catalog_dict = db_catalog.model_dump()
|
|
350
|
+
db_catalog_dict["type"] = "Catalog"
|
|
351
|
+
|
|
352
|
+
# Preserve parent_ids and other internal fields from the existing catalog
|
|
353
|
+
if "parent_ids" in existing_catalog_db:
|
|
354
|
+
db_catalog_dict["parent_ids"] = existing_catalog_db["parent_ids"]
|
|
355
|
+
|
|
356
|
+
# Update the catalog in the database (upsert via create_catalog)
|
|
357
|
+
await self.client.database.create_catalog(db_catalog_dict, refresh=True)
|
|
358
|
+
|
|
359
|
+
# Return the updated catalog
|
|
360
|
+
return catalog
|
|
361
|
+
|
|
362
|
+
except HTTPException:
|
|
363
|
+
raise
|
|
364
|
+
except Exception as e:
|
|
365
|
+
error_msg = str(e)
|
|
366
|
+
if "not found" in error_msg.lower():
|
|
367
|
+
raise HTTPException(
|
|
368
|
+
status_code=404, detail=f"Catalog {catalog_id} not found"
|
|
369
|
+
)
|
|
370
|
+
logger.error(f"Error updating catalog {catalog_id}: {e}")
|
|
371
|
+
raise HTTPException(
|
|
372
|
+
status_code=500,
|
|
373
|
+
detail=f"Failed to update catalog: {str(e)}",
|
|
374
|
+
)
|
|
375
|
+
|
|
283
376
|
async def get_catalog(self, catalog_id: str, request: Request) -> Catalog:
|
|
284
377
|
"""Get a specific catalog by ID.
|
|
285
378
|
|
|
@@ -364,11 +457,9 @@ class CatalogsExtension(ApiExtension):
|
|
|
364
457
|
await self.client.database.find_catalog(catalog_id)
|
|
365
458
|
|
|
366
459
|
# Find all collections with this catalog in parent_ids
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
index=COLLECTIONS_INDEX, body=query_body, size=10000
|
|
460
|
+
children = await search_collections_by_parent_id_shared(
|
|
461
|
+
self.client.database.client, catalog_id
|
|
370
462
|
)
|
|
371
|
-
children = [hit["_source"] for hit in search_result["hits"]["hits"]]
|
|
372
463
|
|
|
373
464
|
# Safe Unlink: Remove catalog from all children's parent_ids
|
|
374
465
|
# If a child becomes an orphan, adopt it to root
|
|
@@ -439,27 +530,15 @@ class CatalogsExtension(ApiExtension):
|
|
|
439
530
|
# Verify the catalog exists
|
|
440
531
|
await self.client.database.find_catalog(catalog_id)
|
|
441
532
|
|
|
442
|
-
# Query collections by parent_ids field
|
|
533
|
+
# Query collections by parent_ids field
|
|
443
534
|
# This uses the parent_ids field in the collection mapping to find all
|
|
444
535
|
# collections that have this catalog as a parent
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
try:
|
|
449
|
-
search_result = await self.client.database.client.search(
|
|
450
|
-
index=COLLECTIONS_INDEX, body=query_body
|
|
451
|
-
)
|
|
452
|
-
except Exception as e:
|
|
453
|
-
logger.error(
|
|
454
|
-
f"Error searching for collections with parent {catalog_id}: {e}"
|
|
455
|
-
)
|
|
456
|
-
search_result = {"hits": {"hits": []}}
|
|
536
|
+
collections_data = await search_collections_by_parent_id_shared(
|
|
537
|
+
self.client.database.client, catalog_id
|
|
538
|
+
)
|
|
457
539
|
|
|
458
|
-
# Extract collection IDs from
|
|
459
|
-
collection_ids = []
|
|
460
|
-
hits = search_result.get("hits", {}).get("hits", [])
|
|
461
|
-
for hit in hits:
|
|
462
|
-
collection_ids.append(hit.get("_id"))
|
|
540
|
+
# Extract collection IDs from results
|
|
541
|
+
collection_ids = [coll.get("id") for coll in collections_data]
|
|
463
542
|
|
|
464
543
|
# Fetch the collections
|
|
465
544
|
collections = []
|
|
@@ -523,6 +602,194 @@ class CatalogsExtension(ApiExtension):
|
|
|
523
602
|
status_code=404, detail=f"Catalog {catalog_id} not found"
|
|
524
603
|
)
|
|
525
604
|
|
|
605
|
+
async def get_catalog_catalogs(
|
|
606
|
+
self,
|
|
607
|
+
catalog_id: str,
|
|
608
|
+
request: Request,
|
|
609
|
+
limit: int = Query(10, ge=1, le=100),
|
|
610
|
+
token: Optional[str] = Query(None),
|
|
611
|
+
) -> Catalogs:
|
|
612
|
+
"""Get all sub-catalogs of a specific catalog with pagination.
|
|
613
|
+
|
|
614
|
+
Args:
|
|
615
|
+
catalog_id: The ID of the parent catalog.
|
|
616
|
+
request: Request object.
|
|
617
|
+
limit: Maximum number of results to return (default: 10, max: 100).
|
|
618
|
+
token: Pagination token for cursor-based pagination.
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
A Catalogs response containing sub-catalogs with pagination links.
|
|
622
|
+
|
|
623
|
+
Raises:
|
|
624
|
+
HTTPException: If the catalog is not found.
|
|
625
|
+
"""
|
|
626
|
+
try:
|
|
627
|
+
# Verify the catalog exists
|
|
628
|
+
await self.client.database.find_catalog(catalog_id)
|
|
629
|
+
|
|
630
|
+
# Search for sub-catalogs with pagination
|
|
631
|
+
(
|
|
632
|
+
catalogs_data,
|
|
633
|
+
total_hits,
|
|
634
|
+
next_token,
|
|
635
|
+
) = await search_sub_catalogs_with_pagination_shared(
|
|
636
|
+
self.client.database.client, catalog_id, limit, token
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
# Serialize to STAC format
|
|
640
|
+
catalogs = []
|
|
641
|
+
for catalog_data in catalogs_data:
|
|
642
|
+
try:
|
|
643
|
+
catalog = self.client.catalog_serializer.db_to_stac(
|
|
644
|
+
catalog_data,
|
|
645
|
+
request,
|
|
646
|
+
extensions=[
|
|
647
|
+
type(ext).__name__
|
|
648
|
+
for ext in self.client.database.extensions
|
|
649
|
+
],
|
|
650
|
+
)
|
|
651
|
+
catalogs.append(catalog)
|
|
652
|
+
except Exception as e:
|
|
653
|
+
logger.error(
|
|
654
|
+
f"Error serializing catalog {catalog_data.get('id')}: {e}"
|
|
655
|
+
)
|
|
656
|
+
continue
|
|
657
|
+
|
|
658
|
+
# Generate pagination links
|
|
659
|
+
base_url = str(request.base_url)
|
|
660
|
+
links = [
|
|
661
|
+
{"rel": "root", "type": "application/json", "href": base_url},
|
|
662
|
+
{
|
|
663
|
+
"rel": "parent",
|
|
664
|
+
"type": "application/json",
|
|
665
|
+
"href": f"{base_url}catalogs/{catalog_id}",
|
|
666
|
+
},
|
|
667
|
+
{
|
|
668
|
+
"rel": "self",
|
|
669
|
+
"type": "application/json",
|
|
670
|
+
"href": str(request.url),
|
|
671
|
+
},
|
|
672
|
+
]
|
|
673
|
+
|
|
674
|
+
# Add next link if more results exist
|
|
675
|
+
if next_token:
|
|
676
|
+
query_params = {"limit": limit, "token": next_token}
|
|
677
|
+
links.append(
|
|
678
|
+
{
|
|
679
|
+
"rel": "next",
|
|
680
|
+
"href": f"{base_url}catalogs/{catalog_id}/catalogs?{urlencode(query_params)}",
|
|
681
|
+
"type": "application/json",
|
|
682
|
+
"title": "Next page",
|
|
683
|
+
}
|
|
684
|
+
)
|
|
685
|
+
|
|
686
|
+
return {
|
|
687
|
+
"catalogs": catalogs,
|
|
688
|
+
"links": links,
|
|
689
|
+
"numberReturned": len(catalogs),
|
|
690
|
+
"numberMatched": total_hits,
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
except HTTPException:
|
|
694
|
+
# Re-raise HTTP exceptions as-is
|
|
695
|
+
raise
|
|
696
|
+
except Exception as e:
|
|
697
|
+
logger.error(
|
|
698
|
+
f"Error retrieving catalogs for catalog {catalog_id}: {e}",
|
|
699
|
+
exc_info=True,
|
|
700
|
+
)
|
|
701
|
+
raise HTTPException(
|
|
702
|
+
status_code=404, detail=f"Catalog {catalog_id} not found"
|
|
703
|
+
)
|
|
704
|
+
|
|
705
|
+
async def create_catalog_catalog(
|
|
706
|
+
self, catalog_id: str, catalog: Catalog, request: Request
|
|
707
|
+
) -> Catalog:
|
|
708
|
+
"""Create a new catalog or link an existing catalog as a sub-catalog.
|
|
709
|
+
|
|
710
|
+
Logic:
|
|
711
|
+
1. Verifies the parent catalog exists.
|
|
712
|
+
2. If the sub-catalog already exists: Appends the parent ID to its parent_ids
|
|
713
|
+
(enabling poly-hierarchy - a catalog can have multiple parents).
|
|
714
|
+
3. If the sub-catalog is new: Creates it with parent_ids initialized to [catalog_id].
|
|
715
|
+
|
|
716
|
+
Args:
|
|
717
|
+
catalog_id: The ID of the parent catalog.
|
|
718
|
+
catalog: The catalog to create or link.
|
|
719
|
+
request: Request object.
|
|
720
|
+
|
|
721
|
+
Returns:
|
|
722
|
+
The created or linked catalog.
|
|
723
|
+
|
|
724
|
+
Raises:
|
|
725
|
+
HTTPException: If the parent catalog is not found or operation fails.
|
|
726
|
+
"""
|
|
727
|
+
try:
|
|
728
|
+
# 1. Verify the parent catalog exists
|
|
729
|
+
await self.client.database.find_catalog(catalog_id)
|
|
730
|
+
|
|
731
|
+
# 2. Check if the sub-catalog already exists
|
|
732
|
+
try:
|
|
733
|
+
existing_catalog = await self.client.database.find_catalog(catalog.id)
|
|
734
|
+
|
|
735
|
+
# --- UPDATE PATH (Existing Catalog) ---
|
|
736
|
+
# We are linking an existing catalog to a new parent (poly-hierarchy)
|
|
737
|
+
|
|
738
|
+
# Ensure parent_ids list exists
|
|
739
|
+
if "parent_ids" not in existing_catalog:
|
|
740
|
+
existing_catalog["parent_ids"] = []
|
|
741
|
+
|
|
742
|
+
# Append if not already present
|
|
743
|
+
if catalog_id not in existing_catalog["parent_ids"]:
|
|
744
|
+
existing_catalog["parent_ids"].append(catalog_id)
|
|
745
|
+
|
|
746
|
+
# Persist the update
|
|
747
|
+
await update_catalog_in_index_shared(
|
|
748
|
+
self.client.database.client, catalog.id, existing_catalog
|
|
749
|
+
)
|
|
750
|
+
logger.info(
|
|
751
|
+
f"Linked existing catalog {catalog.id} to parent {catalog_id}"
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
# Return the STAC object
|
|
755
|
+
return self.client.catalog_serializer.db_to_stac(
|
|
756
|
+
existing_catalog, request
|
|
757
|
+
)
|
|
758
|
+
|
|
759
|
+
except NotFoundError:
|
|
760
|
+
# --- CREATE PATH (New Catalog) ---
|
|
761
|
+
# Catalog does not exist, so we create it
|
|
762
|
+
|
|
763
|
+
# Convert STAC catalog to database format
|
|
764
|
+
db_catalog = self.client.catalog_serializer.stac_to_db(catalog, request)
|
|
765
|
+
|
|
766
|
+
# Convert to dict
|
|
767
|
+
db_catalog_dict = db_catalog.model_dump()
|
|
768
|
+
db_catalog_dict["type"] = "Catalog"
|
|
769
|
+
|
|
770
|
+
# Initialize parent_ids
|
|
771
|
+
db_catalog_dict["parent_ids"] = [catalog_id]
|
|
772
|
+
|
|
773
|
+
# Create in DB
|
|
774
|
+
await self.client.database.create_catalog(db_catalog_dict, refresh=True)
|
|
775
|
+
logger.info(
|
|
776
|
+
f"Created new catalog {catalog.id} with parent {catalog_id}"
|
|
777
|
+
)
|
|
778
|
+
|
|
779
|
+
return catalog
|
|
780
|
+
|
|
781
|
+
except HTTPException:
|
|
782
|
+
raise
|
|
783
|
+
except Exception as e:
|
|
784
|
+
logger.error(
|
|
785
|
+
f"Error processing sub-catalog {catalog.id} in parent {catalog_id}: {e}",
|
|
786
|
+
exc_info=True,
|
|
787
|
+
)
|
|
788
|
+
raise HTTPException(
|
|
789
|
+
status_code=500,
|
|
790
|
+
detail=f"Failed to process sub-catalog: {str(e)}",
|
|
791
|
+
)
|
|
792
|
+
|
|
526
793
|
async def create_catalog_collection(
|
|
527
794
|
self, catalog_id: str, collection: Collection, request: Request
|
|
528
795
|
) -> stac_types.Collection:
|
|
@@ -791,57 +1058,14 @@ class CatalogsExtension(ApiExtension):
|
|
|
791
1058
|
# 1. Verify the parent catalog exists
|
|
792
1059
|
await self.client.database.find_catalog(catalog_id)
|
|
793
1060
|
|
|
794
|
-
# 2.
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
# Base filter: Parent match
|
|
798
|
-
# This finds anything where 'parent_ids' contains this catalog_id
|
|
799
|
-
filter_queries = [{"term": {"parent_ids": catalog_id}}]
|
|
800
|
-
|
|
801
|
-
# Optional filter: Type
|
|
802
|
-
if type:
|
|
803
|
-
# If user asks for ?type=Catalog, we only return Catalogs
|
|
804
|
-
filter_queries.append({"term": {"type": type}})
|
|
805
|
-
|
|
806
|
-
# 3. Calculate Pagination (Search After)
|
|
807
|
-
body = {
|
|
808
|
-
"query": {"bool": {"filter": filter_queries}},
|
|
809
|
-
"sort": [{"id": {"order": "asc"}}], # Stable sort for pagination
|
|
810
|
-
"size": limit,
|
|
811
|
-
}
|
|
812
|
-
|
|
813
|
-
# Handle search_after token - split by '|' to get all sort values
|
|
814
|
-
search_after: Optional[List[str]] = None
|
|
815
|
-
if token:
|
|
816
|
-
try:
|
|
817
|
-
# The token should be a pipe-separated string of sort values
|
|
818
|
-
# e.g., "collection-1"
|
|
819
|
-
from typing import cast
|
|
820
|
-
|
|
821
|
-
search_after_parts = cast(List[str], token.split("|"))
|
|
822
|
-
# If the number of sort fields doesn't match token parts, ignore the token
|
|
823
|
-
if len(search_after_parts) != len(body["sort"]): # type: ignore
|
|
824
|
-
search_after = None
|
|
825
|
-
else:
|
|
826
|
-
search_after = search_after_parts
|
|
827
|
-
except Exception:
|
|
828
|
-
search_after = None
|
|
829
|
-
|
|
830
|
-
if search_after is not None:
|
|
831
|
-
body["search_after"] = search_after
|
|
832
|
-
|
|
833
|
-
# 4. Execute Search
|
|
834
|
-
search_result = await self.client.database.client.search(
|
|
835
|
-
index=COLLECTIONS_INDEX, body=body
|
|
1061
|
+
# 2. Search for children with pagination
|
|
1062
|
+
children_data, total, next_token = await search_children_with_pagination_shared(
|
|
1063
|
+
self.client.database.client, catalog_id, limit, token, type
|
|
836
1064
|
)
|
|
837
1065
|
|
|
838
|
-
#
|
|
839
|
-
hits = search_result.get("hits", {}).get("hits", [])
|
|
840
|
-
total = search_result.get("hits", {}).get("total", {}).get("value", 0)
|
|
841
|
-
|
|
1066
|
+
# 3. Serialize children based on type
|
|
842
1067
|
children = []
|
|
843
|
-
for
|
|
844
|
-
doc = hit["_source"]
|
|
1068
|
+
for doc in children_data:
|
|
845
1069
|
resource_type = doc.get(
|
|
846
1070
|
"type", "Collection"
|
|
847
1071
|
) # Default to Collection if missing
|
|
@@ -855,7 +1079,7 @@ class CatalogsExtension(ApiExtension):
|
|
|
855
1079
|
|
|
856
1080
|
children.append(child)
|
|
857
1081
|
|
|
858
|
-
#
|
|
1082
|
+
# 4. Format Response
|
|
859
1083
|
# The Children extension uses a specific response format
|
|
860
1084
|
response = {
|
|
861
1085
|
"children": children,
|
|
@@ -876,14 +1100,7 @@ class CatalogsExtension(ApiExtension):
|
|
|
876
1100
|
"numberMatched": total,
|
|
877
1101
|
}
|
|
878
1102
|
|
|
879
|
-
#
|
|
880
|
-
next_token = None
|
|
881
|
-
if len(hits) == limit:
|
|
882
|
-
next_token_values = hits[-1].get("sort")
|
|
883
|
-
if next_token_values:
|
|
884
|
-
# Join all sort values with '|' to create the token
|
|
885
|
-
next_token = "|".join(str(val) for val in next_token_values)
|
|
886
|
-
|
|
1103
|
+
# 5. Generate Next Link
|
|
887
1104
|
if next_token:
|
|
888
1105
|
# Get existing query params
|
|
889
1106
|
parsed_url = urlparse(str(request.url))
|
|
@@ -38,7 +38,7 @@ def build_get_collections_search_doc(original_endpoint):
|
|
|
38
38
|
query: Optional[str] = Query(
|
|
39
39
|
None,
|
|
40
40
|
description="Additional filtering expressed as a string (legacy support)",
|
|
41
|
-
|
|
41
|
+
examples=["platform=landsat AND collection_category=level2"],
|
|
42
42
|
),
|
|
43
43
|
limit: int = Query(
|
|
44
44
|
10,
|
|
@@ -83,14 +83,16 @@ def build_get_collections_search_doc(original_endpoint):
|
|
|
83
83
|
description=(
|
|
84
84
|
"Structured filter expression in CQL2 JSON or CQL2-text format"
|
|
85
85
|
),
|
|
86
|
-
|
|
86
|
+
examples=[
|
|
87
|
+
'{"op": "=", "args": [{"property": "properties.category"}, "level2"]}'
|
|
88
|
+
],
|
|
87
89
|
),
|
|
88
90
|
filter_lang: Optional[str] = Query(
|
|
89
91
|
None,
|
|
90
92
|
description=(
|
|
91
93
|
"Filter language. Must be 'cql2-json' or 'cql2-text' if specified"
|
|
92
94
|
),
|
|
93
|
-
|
|
95
|
+
examples=["cql2-json"],
|
|
94
96
|
),
|
|
95
97
|
):
|
|
96
98
|
# Delegate to original endpoint with parameters
|
|
@@ -160,24 +162,26 @@ def build_post_collections_search_doc(original_post_endpoint):
|
|
|
160
162
|
"- `sortby`: List of sort criteria objects with 'field' and 'direction' (asc/desc)\n"
|
|
161
163
|
"- `fields`: Object with 'include' and 'exclude' arrays for field selection"
|
|
162
164
|
),
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
"
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
"
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
165
|
+
examples=[
|
|
166
|
+
{
|
|
167
|
+
"q": "landsat",
|
|
168
|
+
"query": "platform=landsat AND collection_category=level2",
|
|
169
|
+
"filter": {
|
|
170
|
+
"op": "=",
|
|
171
|
+
"args": [{"property": "properties.category"}, "level2"],
|
|
172
|
+
},
|
|
173
|
+
"filter_lang": "cql2-json",
|
|
174
|
+
"limit": 10,
|
|
175
|
+
"token": "next-page-token",
|
|
176
|
+
"bbox": [-180, -90, 180, 90],
|
|
177
|
+
"datetime": "2020-01-01T00:00:00Z/2021-01-01T12:31:12Z",
|
|
178
|
+
"sortby": [{"field": "id", "direction": "asc"}],
|
|
179
|
+
"fields": {
|
|
180
|
+
"include": ["id", "title", "description"],
|
|
181
|
+
"exclude": ["properties"],
|
|
182
|
+
},
|
|
183
|
+
}
|
|
184
|
+
],
|
|
181
185
|
),
|
|
182
186
|
) -> Union[Collections, Response]:
|
|
183
187
|
return await original_post_endpoint(request, body)
|
stac_fastapi/core/redis_utils.py
CHANGED
stac_fastapi/core/serializers.py
CHANGED
|
@@ -355,6 +355,9 @@ class CatalogSerializer(Serializer):
|
|
|
355
355
|
# Avoid modifying the input dict in-place
|
|
356
356
|
catalog = deepcopy(catalog)
|
|
357
357
|
|
|
358
|
+
# Remove internal fields (not part of STAC spec)
|
|
359
|
+
catalog.pop("parent_ids", None)
|
|
360
|
+
|
|
358
361
|
# Set defaults
|
|
359
362
|
catalog.setdefault("type", "Catalog")
|
|
360
363
|
catalog.setdefault("stac_extensions", [])
|
stac_fastapi/core/utilities.py
CHANGED
|
@@ -6,6 +6,7 @@ such as converting bounding boxes to polygon representations.
|
|
|
6
6
|
|
|
7
7
|
import logging
|
|
8
8
|
import os
|
|
9
|
+
import re
|
|
9
10
|
from typing import Any, Dict, List, Optional, Set, Union
|
|
10
11
|
|
|
11
12
|
from stac_fastapi.types.stac import Item
|
|
@@ -70,8 +71,6 @@ def bbox2polygon(b0: float, b1: float, b2: float, b3: float) -> List[List[List[f
|
|
|
70
71
|
return [[[b0, b1], [b2, b1], [b2, b3], [b0, b3], [b0, b1]]]
|
|
71
72
|
|
|
72
73
|
|
|
73
|
-
# copied from stac-fastapi-pgstac
|
|
74
|
-
# https://github.com/stac-utils/stac-fastapi-pgstac/blob/26f6d918eb933a90833f30e69e21ba3b4e8a7151/stac_fastapi/pgstac/utils.py#L10-L116
|
|
75
74
|
def filter_fields( # noqa: C901
|
|
76
75
|
item: Union[Item, Dict[str, Any]],
|
|
77
76
|
include: Optional[Set[str]] = None,
|
|
@@ -87,15 +86,60 @@ def filter_fields( # noqa: C901
|
|
|
87
86
|
if not include and not exclude:
|
|
88
87
|
return item
|
|
89
88
|
|
|
90
|
-
|
|
89
|
+
def match_pattern(pattern: str, key: str) -> bool:
|
|
90
|
+
"""Check if a key matches a wildcard pattern."""
|
|
91
|
+
regex_pattern = "^" + re.escape(pattern).replace(r"\*", ".*") + "$"
|
|
92
|
+
return bool(re.match(regex_pattern, key))
|
|
93
|
+
|
|
94
|
+
def get_matching_keys(source: Dict[str, Any], pattern: str) -> List[str]:
|
|
95
|
+
"""Get all keys that match the pattern."""
|
|
96
|
+
if not isinstance(source, dict):
|
|
97
|
+
return []
|
|
98
|
+
return [key for key in source.keys() if match_pattern(pattern, key)]
|
|
99
|
+
|
|
91
100
|
def include_fields(
|
|
92
101
|
source: Dict[str, Any], fields: Optional[Set[str]]
|
|
93
102
|
) -> Dict[str, Any]:
|
|
103
|
+
"""Include only the specified fields from the source dictionary."""
|
|
94
104
|
if not fields:
|
|
95
105
|
return source
|
|
96
106
|
|
|
107
|
+
def recursive_include(
|
|
108
|
+
source: Dict[str, Any], path_parts: List[str]
|
|
109
|
+
) -> Dict[str, Any]:
|
|
110
|
+
"""Recursively include fields matching the pattern path."""
|
|
111
|
+
if not path_parts:
|
|
112
|
+
return source
|
|
113
|
+
|
|
114
|
+
if not isinstance(source, dict):
|
|
115
|
+
return {}
|
|
116
|
+
|
|
117
|
+
current_pattern = path_parts[0]
|
|
118
|
+
remaining_parts = path_parts[1:]
|
|
119
|
+
|
|
120
|
+
matching_keys = get_matching_keys(source, current_pattern)
|
|
121
|
+
|
|
122
|
+
if not matching_keys:
|
|
123
|
+
return {}
|
|
124
|
+
|
|
125
|
+
result: Dict[str, Any] = {}
|
|
126
|
+
for key in matching_keys:
|
|
127
|
+
if remaining_parts:
|
|
128
|
+
if isinstance(source[key], dict):
|
|
129
|
+
value = recursive_include(source[key], remaining_parts)
|
|
130
|
+
if value:
|
|
131
|
+
result[key] = value
|
|
132
|
+
else:
|
|
133
|
+
result[key] = source[key]
|
|
134
|
+
|
|
135
|
+
return result
|
|
136
|
+
|
|
97
137
|
clean_item: Dict[str, Any] = {}
|
|
98
138
|
for key_path in fields or []:
|
|
139
|
+
if "*" in key_path:
|
|
140
|
+
value = recursive_include(source, key_path.split("."))
|
|
141
|
+
dict_deep_update(clean_item, value)
|
|
142
|
+
continue
|
|
99
143
|
key_path_parts = key_path.split(".")
|
|
100
144
|
key_root = key_path_parts[0]
|
|
101
145
|
if key_root in source:
|
|
@@ -125,12 +169,46 @@ def filter_fields( # noqa: C901
|
|
|
125
169
|
# The key, or root key of a multi-part key, is not present in the item,
|
|
126
170
|
# so it is ignored
|
|
127
171
|
pass
|
|
172
|
+
|
|
128
173
|
return clean_item
|
|
129
174
|
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
175
|
+
def exclude_fields(
|
|
176
|
+
source: Dict[str, Any],
|
|
177
|
+
fields: Optional[Set[str]],
|
|
178
|
+
) -> None:
|
|
179
|
+
"""Exclude fields from source."""
|
|
180
|
+
|
|
181
|
+
def recursive_exclude(
|
|
182
|
+
source: Dict[str, Any], path_parts: List[str], current_path: str = ""
|
|
183
|
+
) -> None:
|
|
184
|
+
"""Recursively exclude fields matching the pattern path."""
|
|
185
|
+
if not path_parts or not isinstance(source, dict):
|
|
186
|
+
return
|
|
187
|
+
|
|
188
|
+
current_pattern = path_parts[0]
|
|
189
|
+
remaining_parts = path_parts[1:]
|
|
190
|
+
|
|
191
|
+
matching_keys = get_matching_keys(source, current_pattern)
|
|
192
|
+
|
|
193
|
+
for key in list(matching_keys):
|
|
194
|
+
if key not in source:
|
|
195
|
+
continue
|
|
196
|
+
|
|
197
|
+
# Build the full path for this key
|
|
198
|
+
full_path = f"{current_path}.{key}" if current_path else key
|
|
199
|
+
|
|
200
|
+
if remaining_parts:
|
|
201
|
+
if isinstance(source[key], dict):
|
|
202
|
+
recursive_exclude(source[key], remaining_parts, full_path)
|
|
203
|
+
if not source[key]:
|
|
204
|
+
del source[key]
|
|
205
|
+
else:
|
|
206
|
+
source.pop(key, None)
|
|
207
|
+
|
|
133
208
|
for key_path in fields or []:
|
|
209
|
+
if "*" in key_path:
|
|
210
|
+
recursive_exclude(source, key_path.split("."))
|
|
211
|
+
continue
|
|
134
212
|
key_path_part = key_path.split(".")
|
|
135
213
|
key_root = key_path_part[0]
|
|
136
214
|
if key_root in source:
|
stac_fastapi/core/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
"""library version."""
|
|
2
|
-
__version__ = "6.
|
|
2
|
+
__version__ = "6.10.0"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: stac_fastapi_core
|
|
3
|
-
Version: 6.
|
|
3
|
+
Version: 6.10.0
|
|
4
4
|
Summary: Core library for the Elasticsearch and Opensearch stac-fastapi backends.
|
|
5
5
|
Project-URL: Homepage, https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch
|
|
6
6
|
License: MIT
|
|
@@ -2,26 +2,26 @@ stac_fastapi/core/__init__.py,sha256=8izV3IWRGdXmDOK1hIPQAanbWs9EI04PJCGgqG1ZGIs
|
|
|
2
2
|
stac_fastapi/core/base_database_logic.py,sha256=JL7DRcDdqeaLbSPPGcIUMs7q6I3Gm_E5XCOwFG458Io,6053
|
|
3
3
|
stac_fastapi/core/base_settings.py,sha256=R3_Sx7n5XpGMs3zAwFJD7y008WvGU_uI2xkaabm82Kg,239
|
|
4
4
|
stac_fastapi/core/basic_auth.py,sha256=RhFv3RVSHF6OaqnaaU2DO4ncJ_S5nB1q8UNpnVJJsrk,2155
|
|
5
|
-
stac_fastapi/core/core.py,sha256=
|
|
5
|
+
stac_fastapi/core/core.py,sha256=OKgezb8kEaB10aeNpov4xxHabM7_5ABl3wD6VaVdChE,52300
|
|
6
6
|
stac_fastapi/core/datetime_utils.py,sha256=QygF2mJFfI_zqCwmSIec3HYqrsVsn3nUcaRQx3CD7Zw,4683
|
|
7
7
|
stac_fastapi/core/queryables.py,sha256=0gKdxlmCVaIj3ODpmyIfzLChEB1nNKXPZhA3K9ApfL0,3755
|
|
8
8
|
stac_fastapi/core/rate_limit.py,sha256=Gu8dAaJReGsj1L91U6m2tflU6RahpXDRs2-AYSKoybA,1318
|
|
9
|
-
stac_fastapi/core/redis_utils.py,sha256=
|
|
9
|
+
stac_fastapi/core/redis_utils.py,sha256=xqZfXwrZ0Wei6EDereOygW_Aq6DBDykhQYD86Ws9P28,9706
|
|
10
10
|
stac_fastapi/core/route_dependencies.py,sha256=hdtuMkv-zY1vg0YxiCz1aKP0SbBcORqDGEKDGgEazW8,5482
|
|
11
|
-
stac_fastapi/core/serializers.py,sha256=
|
|
11
|
+
stac_fastapi/core/serializers.py,sha256=r9BhjoAWH5Kg6ypEugPZcJ7tPaFx8vWaMJkfoPW5Fdw,13443
|
|
12
12
|
stac_fastapi/core/session.py,sha256=aXqu4LXfVbAAsChMVXd9gAhczA2bZPne6HqPeklAwMY,474
|
|
13
|
-
stac_fastapi/core/utilities.py,sha256=
|
|
14
|
-
stac_fastapi/core/version.py,sha256=
|
|
13
|
+
stac_fastapi/core/utilities.py,sha256=B-tLc_H_v92q8ZNpzk-9nKQMKe-bVHUk64HpybGqYX0,10398
|
|
14
|
+
stac_fastapi/core/version.py,sha256=bQiD_D-FuZl4YJZxrz9LK-THtzxnF7e-QdjJAiGBoSY,46
|
|
15
15
|
stac_fastapi/core/extensions/__init__.py,sha256=oaK-UJDQSEISdQ8VtM0ESxpsv7Hx1HbAdmMnh6MTFD4,356
|
|
16
16
|
stac_fastapi/core/extensions/aggregation.py,sha256=v1hUHqlYuMqfQ554g3cTp16pUyRYucQxPERbHPAFtf8,1878
|
|
17
|
-
stac_fastapi/core/extensions/catalogs.py,sha256=
|
|
18
|
-
stac_fastapi/core/extensions/collections_search.py,sha256=
|
|
17
|
+
stac_fastapi/core/extensions/catalogs.py,sha256=JR9ireabml5zCcWVM1FPIWZjb87Ffnk81MQm8cnUi6I,45668
|
|
18
|
+
stac_fastapi/core/extensions/collections_search.py,sha256=ZxCRQ5jyhrg6zDs037qLDzCz0T3tq3a9_lXfYQ9GweQ,14546
|
|
19
19
|
stac_fastapi/core/extensions/fields.py,sha256=NCT5XHvfaf297eDPNaIFsIzvJnbbUTpScqF0otdx0NA,1066
|
|
20
20
|
stac_fastapi/core/extensions/filter.py,sha256=-NQGME7rR_ereuDx-LAa1M5JhEXFaKiTtkH2asraYHE,2998
|
|
21
21
|
stac_fastapi/core/extensions/query.py,sha256=Xmo8pfZEZKPudZEjjozv3R0wLOP0ayjC9E67sBOXqWY,1803
|
|
22
22
|
stac_fastapi/core/models/__init__.py,sha256=sUsEB7umGZVYXjT4EHqLwm8p2wevtRBdig2Ioj2ZdVQ,631
|
|
23
23
|
stac_fastapi/core/models/links.py,sha256=5KEZKisFN34U4UuOzSQnDy0QdsUOT2VRuuY36vs-FGw,7074
|
|
24
24
|
stac_fastapi/core/models/search.py,sha256=7SgAUyzHGXBXSqB4G6cwq9FMwoAS00momb7jvBkjyow,27
|
|
25
|
-
stac_fastapi_core-6.
|
|
26
|
-
stac_fastapi_core-6.
|
|
27
|
-
stac_fastapi_core-6.
|
|
25
|
+
stac_fastapi_core-6.10.0.dist-info/METADATA,sha256=DQkAL0txwG2gljAhY5jwcijSO-PIFXprbmD_SkwIVLU,3481
|
|
26
|
+
stac_fastapi_core-6.10.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
27
|
+
stac_fastapi_core-6.10.0.dist-info/RECORD,,
|
|
File without changes
|