stac-fastapi-core 4.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stac_fastapi/core/__init__.py +1 -0
- stac_fastapi/core/base_database_logic.py +54 -0
- stac_fastapi/core/base_settings.py +12 -0
- stac_fastapi/core/basic_auth.py +61 -0
- stac_fastapi/core/core.py +1068 -0
- stac_fastapi/core/database_logic.py +226 -0
- stac_fastapi/core/datetime_utils.py +39 -0
- stac_fastapi/core/extensions/__init__.py +5 -0
- stac_fastapi/core/extensions/aggregation.py +577 -0
- stac_fastapi/core/extensions/fields.py +41 -0
- stac_fastapi/core/extensions/filter.py +202 -0
- stac_fastapi/core/extensions/query.py +79 -0
- stac_fastapi/core/models/__init__.py +1 -0
- stac_fastapi/core/models/links.py +205 -0
- stac_fastapi/core/models/search.py +1 -0
- stac_fastapi/core/rate_limit.py +44 -0
- stac_fastapi/core/route_dependencies.py +176 -0
- stac_fastapi/core/serializers.py +177 -0
- stac_fastapi/core/session.py +25 -0
- stac_fastapi/core/utilities.py +135 -0
- stac_fastapi/core/version.py +2 -0
- stac_fastapi_core-4.0.0a1.dist-info/METADATA +361 -0
- stac_fastapi_core-4.0.0a1.dist-info/RECORD +25 -0
- stac_fastapi_core-4.0.0a1.dist-info/WHEEL +5 -0
- stac_fastapi_core-4.0.0a1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"""Route Dependencies Module."""
|
|
2
|
+
|
|
3
|
+
import importlib
|
|
4
|
+
import inspect
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import os
|
|
8
|
+
from typing import List
|
|
9
|
+
|
|
10
|
+
from fastapi import Depends
|
|
11
|
+
from jsonschema import validate
|
|
12
|
+
|
|
13
|
+
_LOGGER = logging.getLogger("uvicorn.default")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
route_dependencies_schema = {
|
|
17
|
+
"type": "array",
|
|
18
|
+
"items": {
|
|
19
|
+
"type": "object",
|
|
20
|
+
"properties": {
|
|
21
|
+
"routes": {
|
|
22
|
+
"type": "array",
|
|
23
|
+
"items": {
|
|
24
|
+
"type": "object",
|
|
25
|
+
"properties": {
|
|
26
|
+
"method": {
|
|
27
|
+
"anyOf": [
|
|
28
|
+
{"$ref": "#/$defs/method"},
|
|
29
|
+
{
|
|
30
|
+
"type": "array",
|
|
31
|
+
"items": {"$ref": "#/$defs/method"},
|
|
32
|
+
"uniqueItems": True,
|
|
33
|
+
},
|
|
34
|
+
]
|
|
35
|
+
},
|
|
36
|
+
"path": {
|
|
37
|
+
"anyOf": [
|
|
38
|
+
{"$ref": "#/$defs/path"},
|
|
39
|
+
{
|
|
40
|
+
"type": "array",
|
|
41
|
+
"items": {"$ref": "#/$defs/path"},
|
|
42
|
+
"uniqueItems": True,
|
|
43
|
+
},
|
|
44
|
+
]
|
|
45
|
+
},
|
|
46
|
+
"type": {"type": "string"},
|
|
47
|
+
},
|
|
48
|
+
"required": ["method", "path"],
|
|
49
|
+
"additionalProperties": False,
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
"dependencies": {
|
|
53
|
+
"type": "array",
|
|
54
|
+
"items": {
|
|
55
|
+
"type": "object",
|
|
56
|
+
"properties": {
|
|
57
|
+
"method": {"type": "string"},
|
|
58
|
+
"args": {"type": "string"},
|
|
59
|
+
"kwargs": {"type": "object"},
|
|
60
|
+
},
|
|
61
|
+
"required": ["method"],
|
|
62
|
+
"additionalProperties": False,
|
|
63
|
+
},
|
|
64
|
+
},
|
|
65
|
+
},
|
|
66
|
+
"dependencies": {
|
|
67
|
+
"routes": ["dependencies"],
|
|
68
|
+
"dependencies": ["routes"],
|
|
69
|
+
},
|
|
70
|
+
"additionalProperties": False,
|
|
71
|
+
},
|
|
72
|
+
"$defs": {
|
|
73
|
+
"method": {
|
|
74
|
+
"type": "string",
|
|
75
|
+
"enum": ["*", "GET", "POST", "PUT", "PATCH", "DELETE"],
|
|
76
|
+
},
|
|
77
|
+
"path": {
|
|
78
|
+
"type": "string",
|
|
79
|
+
"pattern": r"^\*$|\/.*",
|
|
80
|
+
},
|
|
81
|
+
},
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def get_route_dependencies_conf(route_dependencies_env: str) -> list:
|
|
86
|
+
"""Get Route dependencies configuration from file or environment variable."""
|
|
87
|
+
if os.path.exists(route_dependencies_env):
|
|
88
|
+
with open(route_dependencies_env, encoding="utf-8") as route_dependencies_file:
|
|
89
|
+
route_dependencies_conf = json.load(route_dependencies_file)
|
|
90
|
+
|
|
91
|
+
else:
|
|
92
|
+
try:
|
|
93
|
+
route_dependencies_conf = json.loads(route_dependencies_env)
|
|
94
|
+
except json.JSONDecodeError as exception:
|
|
95
|
+
_LOGGER.error("Invalid JSON format for route dependencies. %s", exception)
|
|
96
|
+
raise
|
|
97
|
+
|
|
98
|
+
validate(instance=route_dependencies_conf, schema=route_dependencies_schema)
|
|
99
|
+
|
|
100
|
+
return route_dependencies_conf
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def get_routes(route_dependency_conf: dict) -> list:
|
|
104
|
+
"""Get routes from route dependency configuration."""
|
|
105
|
+
# seperate out any path lists
|
|
106
|
+
intermediate_routes = []
|
|
107
|
+
for route in route_dependency_conf["routes"]:
|
|
108
|
+
|
|
109
|
+
if isinstance(route["path"], list):
|
|
110
|
+
for path in route["path"]:
|
|
111
|
+
intermediate_routes.append({**route, "path": path})
|
|
112
|
+
|
|
113
|
+
else:
|
|
114
|
+
intermediate_routes.append(route)
|
|
115
|
+
|
|
116
|
+
# seperate out any method lists
|
|
117
|
+
routes = []
|
|
118
|
+
for route in intermediate_routes:
|
|
119
|
+
|
|
120
|
+
if isinstance(route["method"], list):
|
|
121
|
+
for method in route["method"]:
|
|
122
|
+
routes.append({**route, "method": method})
|
|
123
|
+
|
|
124
|
+
else:
|
|
125
|
+
routes.append(route)
|
|
126
|
+
|
|
127
|
+
return routes
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def get_dependencies(route_dependency_conf: dict) -> list:
|
|
131
|
+
"""Get dependencies from route dependency configuration."""
|
|
132
|
+
dependencies = []
|
|
133
|
+
for dependency_conf in route_dependency_conf["dependencies"]:
|
|
134
|
+
|
|
135
|
+
module_name, method_name = dependency_conf["method"].rsplit(".", 1)
|
|
136
|
+
module = importlib.import_module(module_name)
|
|
137
|
+
dependency = getattr(module, method_name)
|
|
138
|
+
|
|
139
|
+
if inspect.isclass(dependency):
|
|
140
|
+
|
|
141
|
+
dependency = dependency(
|
|
142
|
+
*dependency_conf.get("args", []), **dependency_conf.get("kwargs", {})
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
dependencies.append(Depends(dependency))
|
|
146
|
+
|
|
147
|
+
return dependencies
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def get_route_dependencies(route_dependencies_env: str = "") -> list:
|
|
151
|
+
"""
|
|
152
|
+
Route dependencies generator.
|
|
153
|
+
|
|
154
|
+
Generate a set of route dependencies for authentication to the
|
|
155
|
+
provided FastAPI application.
|
|
156
|
+
"""
|
|
157
|
+
route_dependencies_env = os.environ.get(
|
|
158
|
+
"STAC_FASTAPI_ROUTE_DEPENDENCIES", route_dependencies_env
|
|
159
|
+
)
|
|
160
|
+
route_dependencies: List[tuple] = []
|
|
161
|
+
|
|
162
|
+
if not route_dependencies_env:
|
|
163
|
+
_LOGGER.info("Authentication skipped.")
|
|
164
|
+
return route_dependencies
|
|
165
|
+
|
|
166
|
+
_LOGGER.info("Authentication enabled.")
|
|
167
|
+
|
|
168
|
+
route_dependencies_conf = get_route_dependencies_conf(route_dependencies_env)
|
|
169
|
+
|
|
170
|
+
for route_dependency_conf in route_dependencies_conf:
|
|
171
|
+
|
|
172
|
+
routes = get_routes(route_dependency_conf)
|
|
173
|
+
dependencies = get_dependencies(route_dependency_conf)
|
|
174
|
+
route_dependencies.append((routes, dependencies))
|
|
175
|
+
|
|
176
|
+
return route_dependencies
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
"""Serializers."""
|
|
2
|
+
import abc
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
from typing import Any, List, Optional
|
|
5
|
+
|
|
6
|
+
import attr
|
|
7
|
+
from starlette.requests import Request
|
|
8
|
+
|
|
9
|
+
from stac_fastapi.core.datetime_utils import now_to_rfc3339_str
|
|
10
|
+
from stac_fastapi.core.models.links import CollectionLinks
|
|
11
|
+
from stac_fastapi.types import stac as stac_types
|
|
12
|
+
from stac_fastapi.types.links import ItemLinks, resolve_links
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@attr.s
|
|
16
|
+
class Serializer(abc.ABC):
|
|
17
|
+
"""Defines serialization methods between the API and the data model.
|
|
18
|
+
|
|
19
|
+
This class is meant to be subclassed and implemented by specific serializers for different STAC objects (e.g. Item, Collection).
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
@abc.abstractmethod
|
|
24
|
+
def db_to_stac(cls, item: dict, base_url: str) -> Any:
|
|
25
|
+
"""Transform database model to STAC object.
|
|
26
|
+
|
|
27
|
+
Arguments:
|
|
28
|
+
item (dict): A dictionary representing the database model.
|
|
29
|
+
base_url (str): The base URL of the STAC API.
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Any: A STAC object, e.g. an `Item` or `Collection`, representing the input `item`.
|
|
33
|
+
"""
|
|
34
|
+
...
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
@abc.abstractmethod
|
|
38
|
+
def stac_to_db(cls, stac_object: Any, base_url: str) -> dict:
|
|
39
|
+
"""Transform STAC object to database model.
|
|
40
|
+
|
|
41
|
+
Arguments:
|
|
42
|
+
stac_object (Any): A STAC object, e.g. an `Item` or `Collection`.
|
|
43
|
+
base_url (str): The base URL of the STAC API.
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
dict: A dictionary representing the database model.
|
|
47
|
+
"""
|
|
48
|
+
...
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ItemSerializer(Serializer):
|
|
52
|
+
"""Serialization methods for STAC items."""
|
|
53
|
+
|
|
54
|
+
@classmethod
|
|
55
|
+
def stac_to_db(cls, stac_data: stac_types.Item, base_url: str) -> stac_types.Item:
|
|
56
|
+
"""Transform STAC item to database-ready STAC item.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
stac_data (stac_types.Item): The STAC item object to be transformed.
|
|
60
|
+
base_url (str): The base URL for the STAC API.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
stac_types.Item: The database-ready STAC item object.
|
|
64
|
+
"""
|
|
65
|
+
item_links = resolve_links(stac_data.get("links", []), base_url)
|
|
66
|
+
stac_data["links"] = item_links
|
|
67
|
+
|
|
68
|
+
now = now_to_rfc3339_str()
|
|
69
|
+
if "created" not in stac_data["properties"]:
|
|
70
|
+
stac_data["properties"]["created"] = now
|
|
71
|
+
stac_data["properties"]["updated"] = now
|
|
72
|
+
return stac_data
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def db_to_stac(cls, item: dict, base_url: str) -> stac_types.Item:
|
|
76
|
+
"""Transform database-ready STAC item to STAC item.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
item (dict): The database-ready STAC item to be transformed.
|
|
80
|
+
base_url (str): The base URL for the STAC API.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
stac_types.Item: The STAC item object.
|
|
84
|
+
"""
|
|
85
|
+
item_id = item["id"]
|
|
86
|
+
collection_id = item["collection"]
|
|
87
|
+
item_links = ItemLinks(
|
|
88
|
+
collection_id=collection_id, item_id=item_id, base_url=base_url
|
|
89
|
+
).create_links()
|
|
90
|
+
|
|
91
|
+
original_links = item.get("links", [])
|
|
92
|
+
if original_links:
|
|
93
|
+
item_links += resolve_links(original_links, base_url)
|
|
94
|
+
|
|
95
|
+
return stac_types.Item(
|
|
96
|
+
type="Feature",
|
|
97
|
+
stac_version=item.get("stac_version", ""),
|
|
98
|
+
stac_extensions=item.get("stac_extensions", []),
|
|
99
|
+
id=item_id,
|
|
100
|
+
collection=item.get("collection", ""),
|
|
101
|
+
geometry=item.get("geometry", {}),
|
|
102
|
+
bbox=item.get("bbox", []),
|
|
103
|
+
properties=item.get("properties", {}),
|
|
104
|
+
links=item_links,
|
|
105
|
+
assets=item.get("assets", {}),
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class CollectionSerializer(Serializer):
|
|
110
|
+
"""Serialization methods for STAC collections."""
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def stac_to_db(
|
|
114
|
+
cls, collection: stac_types.Collection, request: Request
|
|
115
|
+
) -> stac_types.Collection:
|
|
116
|
+
"""
|
|
117
|
+
Transform STAC Collection to database-ready STAC collection.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
stac_data: the STAC Collection object to be transformed
|
|
121
|
+
starlette.requests.Request: the API request
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
stac_types.Collection: The database-ready STAC Collection object.
|
|
125
|
+
"""
|
|
126
|
+
collection = deepcopy(collection)
|
|
127
|
+
collection["links"] = resolve_links(
|
|
128
|
+
collection.get("links", []), str(request.base_url)
|
|
129
|
+
)
|
|
130
|
+
return collection
|
|
131
|
+
|
|
132
|
+
@classmethod
|
|
133
|
+
def db_to_stac(
|
|
134
|
+
cls, collection: dict, request: Request, extensions: Optional[List[str]] = []
|
|
135
|
+
) -> stac_types.Collection:
|
|
136
|
+
"""Transform database model to STAC collection.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
collection (dict): The collection data in dictionary form, extracted from the database.
|
|
140
|
+
starlette.requests.Request: the API request
|
|
141
|
+
extensions: A list of the extension class names (`ext.__name__`) or all enabled STAC API extensions.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
stac_types.Collection: The STAC collection object.
|
|
145
|
+
"""
|
|
146
|
+
# Avoid modifying the input dict in-place ... doing so breaks some tests
|
|
147
|
+
collection = deepcopy(collection)
|
|
148
|
+
|
|
149
|
+
# Set defaults
|
|
150
|
+
collection_id = collection.get("id")
|
|
151
|
+
collection.setdefault("type", "Collection")
|
|
152
|
+
collection.setdefault("stac_extensions", [])
|
|
153
|
+
collection.setdefault("stac_version", "")
|
|
154
|
+
collection.setdefault("title", "")
|
|
155
|
+
collection.setdefault("description", "")
|
|
156
|
+
collection.setdefault("keywords", [])
|
|
157
|
+
collection.setdefault("license", "")
|
|
158
|
+
collection.setdefault("providers", [])
|
|
159
|
+
collection.setdefault("summaries", {})
|
|
160
|
+
collection.setdefault(
|
|
161
|
+
"extent", {"spatial": {"bbox": []}, "temporal": {"interval": []}}
|
|
162
|
+
)
|
|
163
|
+
collection.setdefault("assets", {})
|
|
164
|
+
|
|
165
|
+
# Create the collection links using CollectionLinks
|
|
166
|
+
collection_links = CollectionLinks(
|
|
167
|
+
collection_id=collection_id, request=request, extensions=extensions
|
|
168
|
+
).create_links()
|
|
169
|
+
|
|
170
|
+
# Add any additional links from the collection dictionary
|
|
171
|
+
original_links = collection.get("links")
|
|
172
|
+
if original_links:
|
|
173
|
+
collection_links += resolve_links(original_links, str(request.base_url))
|
|
174
|
+
collection["links"] = collection_links
|
|
175
|
+
|
|
176
|
+
# Return the stac_types.Collection object
|
|
177
|
+
return stac_types.Collection(**collection)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""database session management."""
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
import attr
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@attr.s
|
|
10
|
+
class Session:
|
|
11
|
+
"""Database session management."""
|
|
12
|
+
|
|
13
|
+
@classmethod
|
|
14
|
+
def create_from_env(cls):
|
|
15
|
+
"""Create from environment."""
|
|
16
|
+
...
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
def create_from_settings(cls, settings):
|
|
20
|
+
"""Create a Session object from settings."""
|
|
21
|
+
...
|
|
22
|
+
|
|
23
|
+
def __attrs_post_init__(self):
|
|
24
|
+
"""Post init handler."""
|
|
25
|
+
...
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""Module for geospatial processing functions.
|
|
2
|
+
|
|
3
|
+
This module contains functions for transforming geospatial coordinates,
|
|
4
|
+
such as converting bounding boxes to polygon representations.
|
|
5
|
+
"""
|
|
6
|
+
from typing import Any, Dict, List, Optional, Set, Union
|
|
7
|
+
|
|
8
|
+
from stac_fastapi.types.stac import Item
|
|
9
|
+
|
|
10
|
+
MAX_LIMIT = 10000
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def bbox2polygon(b0: float, b1: float, b2: float, b3: float) -> List[List[List[float]]]:
|
|
14
|
+
"""Transform a bounding box represented by its four coordinates `b0`, `b1`, `b2`, and `b3` into a polygon.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
b0 (float): The x-coordinate of the lower-left corner of the bounding box.
|
|
18
|
+
b1 (float): The y-coordinate of the lower-left corner of the bounding box.
|
|
19
|
+
b2 (float): The x-coordinate of the upper-right corner of the bounding box.
|
|
20
|
+
b3 (float): The y-coordinate of the upper-right corner of the bounding box.
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
List[List[List[float]]]: A polygon represented as a list of lists of coordinates.
|
|
24
|
+
"""
|
|
25
|
+
return [[[b0, b1], [b2, b1], [b2, b3], [b0, b3], [b0, b1]]]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
# copied from stac-fastapi-pgstac
|
|
29
|
+
# https://github.com/stac-utils/stac-fastapi-pgstac/blob/26f6d918eb933a90833f30e69e21ba3b4e8a7151/stac_fastapi/pgstac/utils.py#L10-L116
|
|
30
|
+
def filter_fields( # noqa: C901
|
|
31
|
+
item: Union[Item, Dict[str, Any]],
|
|
32
|
+
include: Optional[Set[str]] = None,
|
|
33
|
+
exclude: Optional[Set[str]] = None,
|
|
34
|
+
) -> Item:
|
|
35
|
+
"""Preserve and remove fields as indicated by the fields extension include/exclude sets.
|
|
36
|
+
|
|
37
|
+
Returns a shallow copy of the Item with the fields filtered.
|
|
38
|
+
|
|
39
|
+
This will not perform a deep copy; values of the original item will be referenced
|
|
40
|
+
in the return item.
|
|
41
|
+
"""
|
|
42
|
+
if not include and not exclude:
|
|
43
|
+
return item
|
|
44
|
+
|
|
45
|
+
# Build a shallow copy of included fields on an item, or a sub-tree of an item
|
|
46
|
+
def include_fields(
|
|
47
|
+
source: Dict[str, Any], fields: Optional[Set[str]]
|
|
48
|
+
) -> Dict[str, Any]:
|
|
49
|
+
if not fields:
|
|
50
|
+
return source
|
|
51
|
+
|
|
52
|
+
clean_item: Dict[str, Any] = {}
|
|
53
|
+
for key_path in fields or []:
|
|
54
|
+
key_path_parts = key_path.split(".")
|
|
55
|
+
key_root = key_path_parts[0]
|
|
56
|
+
if key_root in source:
|
|
57
|
+
if isinstance(source[key_root], dict) and len(key_path_parts) > 1:
|
|
58
|
+
# The root of this key path on the item is a dict, and the
|
|
59
|
+
# key path indicates a sub-key to be included. Walk the dict
|
|
60
|
+
# from the root key and get the full nested value to include.
|
|
61
|
+
value = include_fields(
|
|
62
|
+
source[key_root], fields={".".join(key_path_parts[1:])}
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if isinstance(clean_item.get(key_root), dict):
|
|
66
|
+
# A previously specified key and sub-keys may have been included
|
|
67
|
+
# already, so do a deep merge update if the root key already exists.
|
|
68
|
+
dict_deep_update(clean_item[key_root], value)
|
|
69
|
+
else:
|
|
70
|
+
# The root key does not exist, so add it. Fields
|
|
71
|
+
# extension only allows nested referencing on dicts, so
|
|
72
|
+
# this won't overwrite anything.
|
|
73
|
+
clean_item[key_root] = value
|
|
74
|
+
else:
|
|
75
|
+
# The item value to include is not a dict, or, it is a dict but the
|
|
76
|
+
# key path is for the whole value, not a sub-key. Include the entire
|
|
77
|
+
# value in the cleaned item.
|
|
78
|
+
clean_item[key_root] = source[key_root]
|
|
79
|
+
else:
|
|
80
|
+
# The key, or root key of a multi-part key, is not present in the item,
|
|
81
|
+
# so it is ignored
|
|
82
|
+
pass
|
|
83
|
+
return clean_item
|
|
84
|
+
|
|
85
|
+
# For an item built up for included fields, remove excluded fields. This
|
|
86
|
+
# modifies `source` in place.
|
|
87
|
+
def exclude_fields(source: Dict[str, Any], fields: Optional[Set[str]]) -> None:
|
|
88
|
+
for key_path in fields or []:
|
|
89
|
+
key_path_part = key_path.split(".")
|
|
90
|
+
key_root = key_path_part[0]
|
|
91
|
+
if key_root in source:
|
|
92
|
+
if isinstance(source[key_root], dict) and len(key_path_part) > 1:
|
|
93
|
+
# Walk the nested path of this key to remove the leaf-key
|
|
94
|
+
exclude_fields(
|
|
95
|
+
source[key_root], fields={".".join(key_path_part[1:])}
|
|
96
|
+
)
|
|
97
|
+
# If, after removing the leaf-key, the root is now an empty
|
|
98
|
+
# dict, remove it entirely
|
|
99
|
+
if not source[key_root]:
|
|
100
|
+
del source[key_root]
|
|
101
|
+
else:
|
|
102
|
+
# The key's value is not a dict, or there is no sub-key to remove. The
|
|
103
|
+
# entire key can be removed from the source.
|
|
104
|
+
source.pop(key_root, None)
|
|
105
|
+
|
|
106
|
+
# Coalesce incoming type to a dict
|
|
107
|
+
item = dict(item)
|
|
108
|
+
|
|
109
|
+
clean_item = include_fields(item, include)
|
|
110
|
+
|
|
111
|
+
# If, after including all the specified fields, there are no included properties,
|
|
112
|
+
# return just id and collection.
|
|
113
|
+
if not clean_item:
|
|
114
|
+
return Item({"id": item["id"], "collection": item["collection"]})
|
|
115
|
+
|
|
116
|
+
exclude_fields(clean_item, exclude)
|
|
117
|
+
|
|
118
|
+
return Item(**clean_item)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def dict_deep_update(merge_to: Dict[str, Any], merge_from: Dict[str, Any]) -> None:
|
|
122
|
+
"""Perform a deep update of two dicts.
|
|
123
|
+
|
|
124
|
+
merge_to is updated in-place with the values from merge_from.
|
|
125
|
+
merge_from values take precedence over existing values in merge_to.
|
|
126
|
+
"""
|
|
127
|
+
for k, v in merge_from.items():
|
|
128
|
+
if (
|
|
129
|
+
k in merge_to
|
|
130
|
+
and isinstance(merge_to[k], dict)
|
|
131
|
+
and isinstance(merge_from[k], dict)
|
|
132
|
+
):
|
|
133
|
+
dict_deep_update(merge_to[k], merge_from[k])
|
|
134
|
+
else:
|
|
135
|
+
merge_to[k] = v
|