stac-fastapi-core 4.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,577 @@
1
+ """Request model for the Aggregation extension."""
2
+
3
+ from datetime import datetime
4
+ from datetime import datetime as datetime_type
5
+ from typing import Dict, List, Literal, Optional, Union
6
+ from urllib.parse import unquote_plus, urljoin
7
+
8
+ import attr
9
+ import orjson
10
+ from fastapi import HTTPException, Path, Request
11
+ from pygeofilter.backends.cql2_json import to_cql2
12
+ from pygeofilter.parsers.cql2_text import parse as parse_cql2_text
13
+ from stac_pydantic.shared import BBox
14
+ from typing_extensions import Annotated
15
+
16
+ from stac_fastapi.core.base_database_logic import BaseDatabaseLogic
17
+ from stac_fastapi.core.base_settings import ApiBaseSettings
18
+ from stac_fastapi.core.datetime_utils import datetime_to_str
19
+ from stac_fastapi.core.session import Session
20
+ from stac_fastapi.extensions.core.aggregation.client import AsyncBaseAggregationClient
21
+ from stac_fastapi.extensions.core.aggregation.request import (
22
+ AggregationExtensionGetRequest,
23
+ AggregationExtensionPostRequest,
24
+ )
25
+ from stac_fastapi.extensions.core.aggregation.types import (
26
+ Aggregation,
27
+ AggregationCollection,
28
+ )
29
+ from stac_fastapi.extensions.core.filter.request import (
30
+ FilterExtensionGetRequest,
31
+ FilterExtensionPostRequest,
32
+ )
33
+ from stac_fastapi.types.rfc3339 import DateTimeType
34
+
35
+ FilterLang = Literal["cql-json", "cql2-json", "cql2-text"]
36
+
37
+
38
+ @attr.s
39
+ class EsAggregationExtensionGetRequest(
40
+ AggregationExtensionGetRequest, FilterExtensionGetRequest
41
+ ):
42
+ """Implementation specific query parameters for aggregation precision."""
43
+
44
+ collection_id: Optional[
45
+ Annotated[str, Path(description="Collection ID")]
46
+ ] = attr.ib(default=None)
47
+
48
+ centroid_geohash_grid_frequency_precision: Optional[int] = attr.ib(default=None)
49
+ centroid_geohex_grid_frequency_precision: Optional[int] = attr.ib(default=None)
50
+ centroid_geotile_grid_frequency_precision: Optional[int] = attr.ib(default=None)
51
+ geometry_geohash_grid_frequency_precision: Optional[int] = attr.ib(default=None)
52
+ geometry_geotile_grid_frequency_precision: Optional[int] = attr.ib(default=None)
53
+ datetime_frequency_interval: Optional[str] = attr.ib(default=None)
54
+
55
+
56
+ class EsAggregationExtensionPostRequest(
57
+ AggregationExtensionPostRequest, FilterExtensionPostRequest
58
+ ):
59
+ """Implementation specific query parameters for aggregation precision."""
60
+
61
+ centroid_geohash_grid_frequency_precision: Optional[int] = None
62
+ centroid_geohex_grid_frequency_precision: Optional[int] = None
63
+ centroid_geotile_grid_frequency_precision: Optional[int] = None
64
+ geometry_geohash_grid_frequency_precision: Optional[int] = None
65
+ geometry_geotile_grid_frequency_precision: Optional[int] = None
66
+ datetime_frequency_interval: Optional[str] = None
67
+
68
+
69
+ @attr.s
70
+ class EsAsyncAggregationClient(AsyncBaseAggregationClient):
71
+ """Defines a pattern for implementing the STAC aggregation extension."""
72
+
73
+ database: BaseDatabaseLogic = attr.ib()
74
+ settings: ApiBaseSettings = attr.ib()
75
+ session: Session = attr.ib(default=attr.Factory(Session.create_from_env))
76
+
77
+ DEFAULT_AGGREGATIONS = [
78
+ {"name": "total_count", "data_type": "integer"},
79
+ {"name": "datetime_max", "data_type": "datetime"},
80
+ {"name": "datetime_min", "data_type": "datetime"},
81
+ {
82
+ "name": "datetime_frequency",
83
+ "data_type": "frequency_distribution",
84
+ "frequency_distribution_data_type": "datetime",
85
+ },
86
+ {
87
+ "name": "collection_frequency",
88
+ "data_type": "frequency_distribution",
89
+ "frequency_distribution_data_type": "string",
90
+ },
91
+ {
92
+ "name": "geometry_geohash_grid_frequency",
93
+ "data_type": "frequency_distribution",
94
+ "frequency_distribution_data_type": "string",
95
+ },
96
+ {
97
+ "name": "geometry_geotile_grid_frequency",
98
+ "data_type": "frequency_distribution",
99
+ "frequency_distribution_data_type": "string",
100
+ },
101
+ ]
102
+
103
+ GEO_POINT_AGGREGATIONS = [
104
+ {
105
+ "name": "grid_code_frequency",
106
+ "data_type": "frequency_distribution",
107
+ "frequency_distribution_data_type": "string",
108
+ },
109
+ {
110
+ "name": "centroid_geohash_grid_frequency",
111
+ "data_type": "frequency_distribution",
112
+ "frequency_distribution_data_type": "string",
113
+ },
114
+ {
115
+ "name": "centroid_geohex_grid_frequency",
116
+ "data_type": "frequency_distribution",
117
+ "frequency_distribution_data_type": "string",
118
+ },
119
+ {
120
+ "name": "centroid_geotile_grid_frequency",
121
+ "data_type": "frequency_distribution",
122
+ "frequency_distribution_data_type": "string",
123
+ },
124
+ ]
125
+
126
+ MAX_GEOHASH_PRECISION = 12
127
+ MAX_GEOHEX_PRECISION = 15
128
+ MAX_GEOTILE_PRECISION = 29
129
+ SUPPORTED_DATETIME_INTERVAL = {"day", "month", "year"}
130
+ DEFAULT_DATETIME_INTERVAL = "month"
131
+
132
+ async def get_aggregations(self, collection_id: Optional[str] = None, **kwargs):
133
+ """Get the available aggregations for a catalog or collection defined in the STAC JSON. If no aggregations, default aggregations are used."""
134
+ request: Request = kwargs["request"]
135
+ base_url = str(request.base_url)
136
+ links = [{"rel": "root", "type": "application/json", "href": base_url}]
137
+
138
+ if collection_id is not None:
139
+ collection_endpoint = urljoin(base_url, f"collections/{collection_id}")
140
+ links.extend(
141
+ [
142
+ {
143
+ "rel": "collection",
144
+ "type": "application/json",
145
+ "href": collection_endpoint,
146
+ },
147
+ {
148
+ "rel": "self",
149
+ "type": "application/json",
150
+ "href": urljoin(collection_endpoint + "/", "aggregations"),
151
+ },
152
+ ]
153
+ )
154
+ if await self.database.check_collection_exists(collection_id) is None:
155
+ collection = await self.database.find_collection(collection_id)
156
+ aggregations = collection.get(
157
+ "aggregations", self.DEFAULT_AGGREGATIONS.copy()
158
+ )
159
+ else:
160
+ raise IndexError(f"Collection {collection_id} does not exist")
161
+ else:
162
+ links.append(
163
+ {
164
+ "rel": "self",
165
+ "type": "application/json",
166
+ "href": urljoin(base_url, "aggregations"),
167
+ }
168
+ )
169
+
170
+ aggregations = self.DEFAULT_AGGREGATIONS
171
+ return AggregationCollection(
172
+ type="AggregationCollection", aggregations=aggregations, links=links
173
+ )
174
+
175
+ def extract_precision(
176
+ self, precision: Union[int, None], min_value: int, max_value: int
177
+ ) -> Optional[int]:
178
+ """Ensure that the aggregation precision value is withing the a valid range, otherwise return the minumium value."""
179
+ if precision is not None:
180
+ if precision < min_value or precision > max_value:
181
+ raise HTTPException(
182
+ status_code=400,
183
+ detail=f"Invalid precision. Must be a number between {min_value} and {max_value} inclusive",
184
+ )
185
+ return precision
186
+ else:
187
+ return min_value
188
+
189
+ def extract_date_histogram_interval(self, value: Optional[str]) -> str:
190
+ """
191
+ Ensure that the interval for the date histogram is valid. If no value is provided, the default will be returned.
192
+
193
+ Args:
194
+ value: value entered by the user
195
+
196
+ Returns:
197
+ string containing the date histogram interval to use.
198
+
199
+ Raises:
200
+ HTTPException: if the supplied value is not in the supported intervals
201
+ """
202
+ if value is not None:
203
+ if value not in self.SUPPORTED_DATETIME_INTERVAL:
204
+ raise HTTPException(
205
+ status_code=400,
206
+ detail=f"Invalid datetime interval. Must be one of {self.SUPPORTED_DATETIME_INTERVAL}",
207
+ )
208
+ else:
209
+ return value
210
+ else:
211
+ return self.DEFAULT_DATETIME_INTERVAL
212
+
213
+ @staticmethod
214
+ def _return_date(
215
+ interval: Optional[Union[DateTimeType, str]]
216
+ ) -> Dict[str, Optional[str]]:
217
+ """
218
+ Convert a date interval.
219
+
220
+ (which may be a datetime, a tuple of one or two datetimes a string
221
+ representing a datetime or range, or None) into a dictionary for filtering
222
+ search results with Elasticsearch.
223
+
224
+ This function ensures the output dictionary contains 'gte' and 'lte' keys,
225
+ even if they are set to None, to prevent KeyError in the consuming logic.
226
+
227
+ Args:
228
+ interval (Optional[Union[DateTimeType, str]]): The date interval, which might be a single datetime,
229
+ a tuple with one or two datetimes, a string, or None.
230
+
231
+ Returns:
232
+ dict: A dictionary representing the date interval for use in filtering search results,
233
+ always containing 'gte' and 'lte' keys.
234
+ """
235
+ result: Dict[str, Optional[str]] = {"gte": None, "lte": None}
236
+
237
+ if interval is None:
238
+ return result
239
+
240
+ if isinstance(interval, str):
241
+ if "/" in interval:
242
+ parts = interval.split("/")
243
+ result["gte"] = parts[0] if parts[0] != ".." else None
244
+ result["lte"] = (
245
+ parts[1] if len(parts) > 1 and parts[1] != ".." else None
246
+ )
247
+ else:
248
+ converted_time = interval if interval != ".." else None
249
+ result["gte"] = result["lte"] = converted_time
250
+ return result
251
+
252
+ if isinstance(interval, datetime_type):
253
+ datetime_iso = interval.isoformat()
254
+ result["gte"] = result["lte"] = datetime_iso
255
+ elif isinstance(interval, tuple):
256
+ start, end = interval
257
+ # Ensure datetimes are converted to UTC and formatted with 'Z'
258
+ if start:
259
+ result["gte"] = start.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
260
+ if end:
261
+ result["lte"] = end.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
262
+
263
+ return result
264
+
265
+ def frequency_agg(self, es_aggs, name, data_type):
266
+ """Format an aggregation for a frequency distribution aggregation."""
267
+ buckets = []
268
+ for bucket in es_aggs.get(name, {}).get("buckets", []):
269
+ bucket_data = {
270
+ "key": bucket.get("key_as_string") or bucket.get("key"),
271
+ "data_type": data_type,
272
+ "frequency": bucket.get("doc_count"),
273
+ "to": bucket.get("to"),
274
+ "from": bucket.get("from"),
275
+ }
276
+ buckets.append(bucket_data)
277
+ return Aggregation(
278
+ name=name,
279
+ data_type="frequency_distribution",
280
+ overflow=es_aggs.get(name, {}).get("sum_other_doc_count", 0),
281
+ buckets=buckets,
282
+ )
283
+
284
+ def metric_agg(self, es_aggs, name, data_type):
285
+ """Format an aggregation for a metric aggregation."""
286
+ value = es_aggs.get(name, {}).get("value_as_string") or es_aggs.get(
287
+ name, {}
288
+ ).get("value")
289
+ # ES 7.x does not return datetimes with a 'value_as_string' field
290
+ if "datetime" in name and isinstance(value, float):
291
+ value = datetime_to_str(datetime.fromtimestamp(value / 1e3))
292
+ return Aggregation(
293
+ name=name,
294
+ data_type=data_type,
295
+ value=value,
296
+ )
297
+
298
+ def get_filter(self, filter, filter_lang):
299
+ """Format the filter parameter in cql2-json or cql2-text."""
300
+ if filter_lang == "cql2-text":
301
+ return orjson.loads(to_cql2(parse_cql2_text(filter)))
302
+ elif filter_lang == "cql2-json":
303
+ if isinstance(filter, str):
304
+ return orjson.loads(unquote_plus(filter))
305
+ else:
306
+ return filter
307
+ else:
308
+ raise HTTPException(
309
+ status_code=400,
310
+ detail=f"Unknown filter-lang: {filter_lang}. Only cql2-json or cql2-text are supported.",
311
+ )
312
+
313
+ def _format_datetime_range(self, date_tuple: DateTimeType) -> str:
314
+ """
315
+ Convert a tuple of datetime objects or None into a formatted string for API requests.
316
+
317
+ Args:
318
+ date_tuple (tuple): A tuple containing two elements, each can be a datetime object or None.
319
+
320
+ Returns:
321
+ str: A string formatted as 'YYYY-MM-DDTHH:MM:SS.sssZ/YYYY-MM-DDTHH:MM:SS.sssZ', with '..' used if any element is None.
322
+ """
323
+
324
+ def format_datetime(dt):
325
+ """Format a single datetime object to the ISO8601 extended format with 'Z'."""
326
+ return dt.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" if dt else ".."
327
+
328
+ start, end = date_tuple
329
+ return f"{format_datetime(start)}/{format_datetime(end)}"
330
+
331
+ async def aggregate(
332
+ self,
333
+ aggregate_request: Optional[EsAggregationExtensionPostRequest] = None,
334
+ collection_id: Optional[
335
+ Annotated[str, Path(description="Collection ID")]
336
+ ] = None,
337
+ collections: Optional[List[str]] = [],
338
+ datetime: Optional[DateTimeType] = None,
339
+ intersects: Optional[str] = None,
340
+ filter_lang: Optional[str] = None,
341
+ filter_expr: Optional[str] = None,
342
+ aggregations: Optional[str] = None,
343
+ ids: Optional[List[str]] = None,
344
+ bbox: Optional[BBox] = None,
345
+ centroid_geohash_grid_frequency_precision: Optional[int] = None,
346
+ centroid_geohex_grid_frequency_precision: Optional[int] = None,
347
+ centroid_geotile_grid_frequency_precision: Optional[int] = None,
348
+ geometry_geohash_grid_frequency_precision: Optional[int] = None,
349
+ geometry_geotile_grid_frequency_precision: Optional[int] = None,
350
+ datetime_frequency_interval: Optional[str] = None,
351
+ **kwargs,
352
+ ) -> Union[Dict, Exception]:
353
+ """Get aggregations from the database."""
354
+ request: Request = kwargs["request"]
355
+ base_url = str(request.base_url)
356
+ path = request.url.path
357
+ search = self.database.make_search()
358
+
359
+ if aggregate_request is None:
360
+
361
+ base_args = {
362
+ "collections": collections,
363
+ "ids": ids,
364
+ "bbox": bbox,
365
+ "aggregations": aggregations,
366
+ "centroid_geohash_grid_frequency_precision": centroid_geohash_grid_frequency_precision,
367
+ "centroid_geohex_grid_frequency_precision": centroid_geohex_grid_frequency_precision,
368
+ "centroid_geotile_grid_frequency_precision": centroid_geotile_grid_frequency_precision,
369
+ "geometry_geohash_grid_frequency_precision": geometry_geohash_grid_frequency_precision,
370
+ "geometry_geotile_grid_frequency_precision": geometry_geotile_grid_frequency_precision,
371
+ "datetime_frequency_interval": datetime_frequency_interval,
372
+ }
373
+
374
+ if collection_id:
375
+ collections = [str(collection_id)]
376
+
377
+ if intersects:
378
+ base_args["intersects"] = orjson.loads(unquote_plus(intersects))
379
+
380
+ if datetime:
381
+ base_args["datetime"] = self._format_datetime_range(datetime)
382
+
383
+ if filter_expr:
384
+ base_args["filter"] = self.get_filter(filter_expr, filter_lang)
385
+ aggregate_request = EsAggregationExtensionPostRequest(**base_args)
386
+ else:
387
+ # Workaround for optional path param in POST requests
388
+ if "collections" in path:
389
+ collection_id = path.split("/")[2]
390
+
391
+ filter_lang = "cql2-json"
392
+ if aggregate_request.filter_expr:
393
+ aggregate_request.filter_expr = self.get_filter(
394
+ aggregate_request.filter_expr, filter_lang
395
+ )
396
+
397
+ if collection_id:
398
+ if aggregate_request.collections:
399
+ raise HTTPException(
400
+ status_code=400,
401
+ detail="Cannot query multiple collections when executing '/collections/<collection_id>/aggregate'. Use '/aggregate' and the collections field instead",
402
+ )
403
+ else:
404
+ aggregate_request.collections = [collection_id]
405
+
406
+ if (
407
+ aggregate_request.aggregations is None
408
+ or aggregate_request.aggregations == []
409
+ ):
410
+ raise HTTPException(
411
+ status_code=400,
412
+ detail="No 'aggregations' found. Use '/aggregations' to return available aggregations",
413
+ )
414
+
415
+ if aggregate_request.ids:
416
+ search = self.database.apply_ids_filter(
417
+ search=search, item_ids=aggregate_request.ids
418
+ )
419
+
420
+ if aggregate_request.datetime:
421
+ datetime_search = self._return_date(aggregate_request.datetime)
422
+ search = self.database.apply_datetime_filter(
423
+ search=search, datetime_search=datetime_search
424
+ )
425
+
426
+ if aggregate_request.bbox:
427
+ bbox = aggregate_request.bbox
428
+ if len(bbox) == 6:
429
+ bbox = [bbox[0], bbox[1], bbox[3], bbox[4]]
430
+
431
+ search = self.database.apply_bbox_filter(search=search, bbox=bbox)
432
+
433
+ if aggregate_request.intersects:
434
+ search = self.database.apply_intersects_filter(
435
+ search=search, intersects=aggregate_request.intersects
436
+ )
437
+
438
+ if aggregate_request.collections:
439
+ search = self.database.apply_collections_filter(
440
+ search=search, collection_ids=aggregate_request.collections
441
+ )
442
+ # validate that aggregations are supported for all collections
443
+ for collection_id in aggregate_request.collections:
444
+ aggs = await self.get_aggregations(
445
+ collection_id=collection_id, request=request
446
+ )
447
+ supported_aggregations = (
448
+ aggs["aggregations"] + self.DEFAULT_AGGREGATIONS
449
+ )
450
+
451
+ for agg_name in aggregate_request.aggregations:
452
+ if agg_name not in set([x["name"] for x in supported_aggregations]):
453
+ raise HTTPException(
454
+ status_code=400,
455
+ detail=f"Aggregation {agg_name} not supported by collection {collection_id}",
456
+ )
457
+ else:
458
+ # Validate that the aggregations requested are supported by the catalog
459
+ aggs = await self.get_aggregations(request=request)
460
+ supported_aggregations = aggs["aggregations"]
461
+ for agg_name in aggregate_request.aggregations:
462
+ if agg_name not in [x["name"] for x in supported_aggregations]:
463
+ raise HTTPException(
464
+ status_code=400,
465
+ detail=f"Aggregation {agg_name} not supported at catalog level",
466
+ )
467
+
468
+ if aggregate_request.filter_expr:
469
+ try:
470
+ search = self.database.apply_cql2_filter(
471
+ search, aggregate_request.filter_expr
472
+ )
473
+ except Exception as e:
474
+ raise HTTPException(
475
+ status_code=400, detail=f"Error with cql2 filter: {e}"
476
+ )
477
+
478
+ centroid_geohash_grid_precision = self.extract_precision(
479
+ aggregate_request.centroid_geohash_grid_frequency_precision,
480
+ 1,
481
+ self.MAX_GEOHASH_PRECISION,
482
+ )
483
+
484
+ centroid_geohex_grid_precision = self.extract_precision(
485
+ aggregate_request.centroid_geohex_grid_frequency_precision,
486
+ 0,
487
+ self.MAX_GEOHEX_PRECISION,
488
+ )
489
+
490
+ centroid_geotile_grid_precision = self.extract_precision(
491
+ aggregate_request.centroid_geotile_grid_frequency_precision,
492
+ 0,
493
+ self.MAX_GEOTILE_PRECISION,
494
+ )
495
+
496
+ geometry_geohash_grid_precision = self.extract_precision(
497
+ aggregate_request.geometry_geohash_grid_frequency_precision,
498
+ 1,
499
+ self.MAX_GEOHASH_PRECISION,
500
+ )
501
+
502
+ geometry_geotile_grid_precision = self.extract_precision(
503
+ aggregate_request.geometry_geotile_grid_frequency_precision,
504
+ 0,
505
+ self.MAX_GEOTILE_PRECISION,
506
+ )
507
+
508
+ datetime_frequency_interval = self.extract_date_histogram_interval(
509
+ aggregate_request.datetime_frequency_interval,
510
+ )
511
+
512
+ try:
513
+ db_response = await self.database.aggregate(
514
+ collections,
515
+ aggregate_request.aggregations,
516
+ search,
517
+ centroid_geohash_grid_precision,
518
+ centroid_geohex_grid_precision,
519
+ centroid_geotile_grid_precision,
520
+ geometry_geohash_grid_precision,
521
+ geometry_geotile_grid_precision,
522
+ datetime_frequency_interval,
523
+ )
524
+ except Exception as error:
525
+ if not isinstance(error, IndexError):
526
+ raise error
527
+ aggs = []
528
+ if db_response:
529
+ result_aggs = db_response.get("aggregations", {})
530
+ for agg in {
531
+ frozenset(item.items()): item
532
+ for item in supported_aggregations + self.GEO_POINT_AGGREGATIONS
533
+ }.values():
534
+ if agg["name"] in aggregate_request.aggregations:
535
+ if agg["name"].endswith("_frequency"):
536
+ aggs.append(
537
+ self.frequency_agg(
538
+ result_aggs, agg["name"], agg["data_type"]
539
+ )
540
+ )
541
+ else:
542
+ aggs.append(
543
+ self.metric_agg(result_aggs, agg["name"], agg["data_type"])
544
+ )
545
+ links = [
546
+ {"rel": "root", "type": "application/json", "href": base_url},
547
+ ]
548
+
549
+ if collection_id:
550
+ collection_endpoint = urljoin(base_url, f"collections/{collection_id}")
551
+ links.extend(
552
+ [
553
+ {
554
+ "rel": "collection",
555
+ "type": "application/json",
556
+ "href": collection_endpoint,
557
+ },
558
+ {
559
+ "rel": "self",
560
+ "type": "application/json",
561
+ "href": urljoin(collection_endpoint, "aggregate"),
562
+ },
563
+ ]
564
+ )
565
+ else:
566
+ links.append(
567
+ {
568
+ "rel": "self",
569
+ "type": "application/json",
570
+ "href": urljoin(base_url, "aggregate"),
571
+ }
572
+ )
573
+ results = AggregationCollection(
574
+ type="AggregationCollection", aggregations=aggs, links=links
575
+ )
576
+
577
+ return results
@@ -0,0 +1,41 @@
1
+ """Fields extension."""
2
+
3
+ from typing import Optional, Set
4
+
5
+ from pydantic import BaseModel, Field
6
+
7
+ from stac_fastapi.extensions.core import FieldsExtension as FieldsExtensionBase
8
+ from stac_fastapi.extensions.core.fields import request
9
+
10
+
11
+ class PostFieldsExtension(request.PostFieldsExtension):
12
+ """PostFieldsExtension."""
13
+
14
+ # Set defaults if needed
15
+ # include : Optional[Set[str]] = Field(
16
+ # default_factory=lambda: {
17
+ # "id",
18
+ # "type",
19
+ # "stac_version",
20
+ # "geometry",
21
+ # "bbox",
22
+ # "links",
23
+ # "assets",
24
+ # "properties.datetime",
25
+ # "collection",
26
+ # }
27
+ # )
28
+ include: Optional[Set[str]] = set()
29
+ exclude: Optional[Set[str]] = set()
30
+
31
+
32
+ class FieldsExtensionPostRequest(BaseModel):
33
+ """Additional fields and schema for the POST request."""
34
+
35
+ fields: Optional[PostFieldsExtension] = Field(PostFieldsExtension())
36
+
37
+
38
+ class FieldsExtension(FieldsExtensionBase):
39
+ """Override the POST model."""
40
+
41
+ POST = FieldsExtensionPostRequest