panther 3.8.2__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. panther/__init__.py +1 -1
  2. panther/_load_configs.py +168 -171
  3. panther/_utils.py +26 -49
  4. panther/app.py +85 -105
  5. panther/authentications.py +86 -55
  6. panther/background_tasks.py +25 -14
  7. panther/base_request.py +38 -14
  8. panther/base_websocket.py +172 -94
  9. panther/caching.py +60 -25
  10. panther/cli/create_command.py +20 -10
  11. panther/cli/monitor_command.py +63 -37
  12. panther/cli/template.py +40 -20
  13. panther/cli/utils.py +32 -18
  14. panther/configs.py +65 -58
  15. panther/db/connections.py +139 -0
  16. panther/db/cursor.py +43 -0
  17. panther/db/models.py +64 -29
  18. panther/db/queries/__init__.py +1 -1
  19. panther/db/queries/base_queries.py +127 -0
  20. panther/db/queries/mongodb_queries.py +77 -38
  21. panther/db/queries/pantherdb_queries.py +59 -30
  22. panther/db/queries/queries.py +232 -117
  23. panther/db/utils.py +17 -18
  24. panther/events.py +44 -0
  25. panther/exceptions.py +26 -12
  26. panther/file_handler.py +2 -2
  27. panther/generics.py +163 -0
  28. panther/logging.py +7 -2
  29. panther/main.py +111 -188
  30. panther/middlewares/base.py +3 -0
  31. panther/monitoring.py +8 -5
  32. panther/pagination.py +48 -0
  33. panther/panel/apis.py +32 -5
  34. panther/panel/urls.py +2 -1
  35. panther/permissions.py +3 -3
  36. panther/request.py +6 -13
  37. panther/response.py +114 -34
  38. panther/routings.py +83 -66
  39. panther/serializer.py +214 -33
  40. panther/test.py +31 -21
  41. panther/utils.py +28 -16
  42. panther/websocket.py +7 -4
  43. {panther-3.8.2.dist-info → panther-4.0.0.dist-info}/METADATA +93 -71
  44. panther-4.0.0.dist-info/RECORD +57 -0
  45. {panther-3.8.2.dist-info → panther-4.0.0.dist-info}/WHEEL +1 -1
  46. panther/db/connection.py +0 -92
  47. panther/middlewares/db.py +0 -18
  48. panther/middlewares/redis.py +0 -47
  49. panther-3.8.2.dist-info/RECORD +0 -54
  50. {panther-3.8.2.dist-info → panther-4.0.0.dist-info}/LICENSE +0 -0
  51. {panther-3.8.2.dist-info → panther-4.0.0.dist-info}/entry_points.txt +0 -0
  52. {panther-3.8.2.dist-info → panther-4.0.0.dist-info}/top_level.txt +0 -0
panther/response.py CHANGED
@@ -1,11 +1,20 @@
1
+ import asyncio
1
2
  from types import NoneType
3
+ from typing import Generator, AsyncGenerator
2
4
 
3
5
  import orjson as json
4
6
  from pydantic import BaseModel as PydanticBaseModel
5
7
  from pydantic._internal._model_construction import ModelMetaclass
6
8
 
7
- ResponseDataTypes = list | tuple | set | dict | int | float | str | bool | bytes | NoneType | ModelMetaclass
8
- IterableDataTypes = list | tuple | set
9
+ from panther import status
10
+ from panther._utils import to_async_generator
11
+ from panther.db.cursor import Cursor
12
+ from pantherdb import Cursor as PantherDBCursor
13
+ from panther.monitoring import Monitoring
14
+
15
+ ResponseDataTypes = list | tuple | set | Cursor | PantherDBCursor | dict | int | float | str | bool | bytes | NoneType | ModelMetaclass
16
+ IterableDataTypes = list | tuple | set | Cursor | PantherDBCursor
17
+ StreamingDataTypes = Generator | AsyncGenerator
9
18
 
10
19
 
11
20
  class Response:
@@ -15,76 +24,96 @@ class Response:
15
24
  self,
16
25
  data: ResponseDataTypes = None,
17
26
  headers: dict | None = None,
18
- status_code: int = 200,
27
+ status_code: int = status.HTTP_200_OK,
19
28
  ):
20
29
  """
21
- :param data: should be int | float | dict | list | tuple | set | str | bool | bytes | NoneType
22
- or instance of Pydantic.BaseModel
30
+ :param data: should be an instance of ResponseDataTypes
31
+ :param headers: should be dict of headers
23
32
  :param status_code: should be int
24
33
  """
25
- self.data = self._clean_data_type(data)
26
- self._check_status_code(status_code)
27
- self._headers = headers
34
+ self.headers = headers or {}
35
+ self.data = self.prepare_data(data=data)
36
+ self.status_code = self.check_status_code(status_code=status_code)
28
37
 
29
38
  @property
30
39
  def body(self) -> bytes:
31
40
  if isinstance(self.data, bytes):
32
41
  return self.data
33
- else:
34
- return json.dumps(self.data)
42
+
43
+ if self.data is None:
44
+ return b''
45
+ return json.dumps(self.data)
35
46
 
36
47
  @property
37
48
  def headers(self) -> dict:
38
- content_length = 0 if self.body == b'null' else len(self.body)
39
49
  return {
40
- 'content-type': self.content_type,
41
- 'content-length': content_length,
42
- 'access-control-allow-origin': '*',
43
- } | (self._headers or {})
50
+ 'Content-Type': self.content_type,
51
+ 'Content-Length': len(self.body),
52
+ 'Access-Control-Allow-Origin': '*',
53
+ } | self._headers
44
54
 
45
- def _clean_data_type(self, data: any):
46
- """Make sure the response data is only ResponseDataTypes or Iterable of ResponseDataTypes"""
47
- if issubclass(type(data), PydanticBaseModel):
48
- return data.model_dump()
55
+ @property
56
+ def bytes_headers(self) -> list[list[bytes]]:
57
+ return [[k.encode(), str(v).encode()] for k, v in (self.headers or {}).items()]
49
58
 
50
- elif isinstance(data, IterableDataTypes):
51
- return [self._clean_data_type(d) for d in data]
59
+ @headers.setter
60
+ def headers(self, headers: dict):
61
+ self._headers = headers
62
+
63
+ def prepare_data(self, data: any):
64
+ """Make sure the response data is only ResponseDataTypes or Iterable of ResponseDataTypes"""
65
+ if isinstance(data, (int | float | str | bool | bytes | NoneType)):
66
+ return data
52
67
 
53
68
  elif isinstance(data, dict):
54
- return {key: self._clean_data_type(value) for key, value in data.items()}
69
+ return {key: self.prepare_data(value) for key, value in data.items()}
55
70
 
56
- elif isinstance(data, (int | float | str | bool | bytes | NoneType)):
57
- return data
71
+ elif issubclass(type(data), PydanticBaseModel):
72
+ return data.model_dump()
73
+
74
+ elif isinstance(data, IterableDataTypes):
75
+ return [self.prepare_data(d) for d in data]
58
76
 
59
77
  else:
60
78
  msg = f'Invalid Response Type: {type(data)}'
61
79
  raise TypeError(msg)
62
80
 
63
- def _check_status_code(self, status_code: any):
81
+ @classmethod
82
+ def check_status_code(cls, status_code: any):
64
83
  if not isinstance(status_code, int):
65
- error = f'Response "status_code" Should Be "int". ("{status_code}" is {type(status_code)})'
84
+ error = f'Response `status_code` Should Be `int`. (`{status_code}` is {type(status_code)})'
66
85
  raise TypeError(error)
67
-
68
- self.status_code = status_code
69
-
70
- def _clean_data_with_output_model(self, output_model: ModelMetaclass | None):
71
- if self.data and output_model:
72
- self.data = self._serialize_with_output_model(self.data, output_model=output_model)
86
+ return status_code
73
87
 
74
88
  @classmethod
75
- def _serialize_with_output_model(cls, data: any, /, output_model: ModelMetaclass):
89
+ def apply_output_model(cls, data: any, /, output_model: ModelMetaclass):
90
+ """This method is called in API.__call__"""
76
91
  # Dict
77
92
  if isinstance(data, dict):
93
+ for field_name, field in output_model.model_fields.items():
94
+ if field.validation_alias and field_name in data:
95
+ data[field.validation_alias] = data.pop(field_name)
78
96
  return output_model(**data).model_dump()
79
97
 
80
98
  # Iterable
81
99
  if isinstance(data, IterableDataTypes):
82
- return [cls._serialize_with_output_model(d, output_model=output_model) for d in data]
100
+ return [cls.apply_output_model(d, output_model=output_model) for d in data]
83
101
 
84
102
  # Str | Bool | Bytes
85
103
  msg = 'Type of Response data is not match with `output_model`.\n*hint: You may want to remove `output_model`'
86
104
  raise TypeError(msg)
87
105
 
106
+ async def send_headers(self, send, /):
107
+ await send({'type': 'http.response.start', 'status': self.status_code, 'headers': self.bytes_headers})
108
+
109
+ async def send_body(self, send, receive, /):
110
+ await send({'type': 'http.response.body', 'body': self.body, 'more_body': False})
111
+
112
+ async def send(self, send, receive, /, monitoring: Monitoring):
113
+ await self.send_headers(send)
114
+ await self.send_body(send, receive)
115
+ await monitoring.after(self.status_code)
116
+
88
117
  def __str__(self):
89
118
  if len(data := str(self.data)) > 30:
90
119
  data = f'{data:.27}...'
@@ -93,6 +122,57 @@ class Response:
93
122
  __repr__ = __str__
94
123
 
95
124
 
125
+ class StreamingResponse(Response):
126
+ content_type = 'application/octet-stream'
127
+
128
+ def __init__(self, *args, **kwargs):
129
+ self.connection_closed = False
130
+ super().__init__(*args, **kwargs)
131
+
132
+ async def listen_to_disconnection(self, receive):
133
+ message = await receive()
134
+ if message['type'] == 'http.disconnect':
135
+ self.connection_closed = True
136
+
137
+ def prepare_data(self, data: any) -> AsyncGenerator:
138
+ if isinstance(data, AsyncGenerator):
139
+ return data
140
+ elif isinstance(data, Generator):
141
+ return to_async_generator(data)
142
+ msg = f'Invalid Response Type: {type(data)}'
143
+ raise TypeError(msg)
144
+
145
+ @property
146
+ def headers(self) -> dict:
147
+ return {
148
+ 'Content-Type': self.content_type,
149
+ 'Access-Control-Allow-Origin': '*',
150
+ } | self._headers
151
+
152
+ @headers.setter
153
+ def headers(self, headers: dict):
154
+ self._headers = headers
155
+
156
+ @property
157
+ async def body(self) -> AsyncGenerator:
158
+ async for chunk in self.data:
159
+ if isinstance(chunk, bytes):
160
+ yield chunk
161
+ elif chunk is None:
162
+ yield b''
163
+ else:
164
+ yield json.dumps(chunk)
165
+
166
+ async def send_body(self, send, receive, /):
167
+ asyncio.create_task(self.listen_to_disconnection(receive))
168
+ async for chunk in self.body:
169
+ if self.connection_closed:
170
+ break
171
+ await send({'type': 'http.response.body', 'body': chunk, 'more_body': True})
172
+ else:
173
+ await send({'type': 'http.response.body', 'body': b'', 'more_body': False})
174
+
175
+
96
176
  class HTMLResponse(Response):
97
177
  content_type = 'text/html; charset=utf-8'
98
178
 
panther/routings.py CHANGED
@@ -1,4 +1,3 @@
1
- import logging
2
1
  import re
3
2
  from collections import Counter
4
3
  from collections.abc import Callable, Mapping, MutableMapping
@@ -6,9 +5,7 @@ from copy import deepcopy
6
5
  from functools import partial, reduce
7
6
 
8
7
  from panther.configs import config
9
-
10
-
11
- logger = logging.getLogger('panther')
8
+ from panther.exceptions import PantherError
12
9
 
13
10
 
14
11
  def flatten_urls(urls: dict) -> dict:
@@ -28,20 +25,17 @@ def _flattening_urls(data: dict | Callable, url: str = ''):
28
25
  url = url.removeprefix('/')
29
26
 
30
27
  # Collect it, if it doesn't have problem
31
- if _is_url_endpoint_valid(url=url, endpoint=data):
32
- yield url, data
28
+ _is_url_endpoint_valid(url=url, endpoint=data)
29
+ yield url, data
33
30
 
34
31
 
35
- def _is_url_endpoint_valid(url: str, endpoint: Callable) -> bool:
32
+ def _is_url_endpoint_valid(url: str, endpoint: Callable):
36
33
  if endpoint is ...:
37
- logger.error(f"URL Can't Point To Ellipsis. ('{url}' -> ...)")
34
+ raise PantherError(f"URL Can't Point To Ellipsis. ('{url}' -> ...)")
38
35
  elif endpoint is None:
39
- logger.error(f"URL Can't Point To None. ('{url}' -> None)")
36
+ raise PantherError(f"URL Can't Point To None. ('{url}' -> None)")
40
37
  elif url and not re.match(r'^[a-zA-Z<>0-9_/-]+$', url):
41
- logger.error(f"URL Is Not Valid. --> '{url}'")
42
- else:
43
- return True
44
- return False
38
+ raise PantherError(f"URL Is Not Valid. --> '{url}'")
45
39
 
46
40
 
47
41
  def finalize_urls(urls: dict) -> dict:
@@ -60,7 +54,33 @@ def finalize_urls(urls: dict) -> dict:
60
54
  else:
61
55
  path = {single_path: path or endpoint}
62
56
  urls_list.append(path)
63
- return _merge(*urls_list) if urls_list else {}
57
+ final_urls = _merge(*urls_list) if urls_list else {}
58
+ check_urls_path_variables(final_urls)
59
+ return final_urls
60
+
61
+
62
+ def check_urls_path_variables(urls: dict, path: str = '', ) -> None:
63
+ middle_route_error = []
64
+ last_route_error = []
65
+ for key, value in urls.items():
66
+ new_path = f'{path}/{key}'
67
+
68
+ if isinstance(value, dict):
69
+ if key.startswith('<'):
70
+ middle_route_error.append(new_path)
71
+ check_urls_path_variables(value, path=new_path)
72
+ elif key.startswith('<'):
73
+ last_route_error.append(new_path)
74
+
75
+ if len(middle_route_error) > 1:
76
+ msg = '\n\t- ' + '\n\t- '.join(e for e in middle_route_error)
77
+ raise PantherError(
78
+ f"URLs can't have same-level path variables that point to a dict: {msg}")
79
+
80
+ if len(last_route_error) > 1:
81
+ msg = '\n\t- ' + '\n\t- '.join(e for e in last_route_error)
82
+ raise PantherError(
83
+ f"URLs can't have same-level path variables that point to an endpoint: {msg}")
64
84
 
65
85
 
66
86
  def _merge(destination: MutableMapping, *sources) -> MutableMapping:
@@ -106,67 +126,64 @@ ENDPOINT_NOT_FOUND = (None, '')
106
126
 
107
127
 
108
128
  def find_endpoint(path: str) -> tuple[Callable | None, str]:
109
- urls = config['urls']
129
+ urls = config.URLS
110
130
 
111
- if (location := path.find('?')) != -1:
112
- path = path[:location]
113
- path = path.removesuffix('/').removeprefix('/') # 'user/list'
114
- paths = path.split('/') # ['user', 'list']
115
- paths_len = len(paths)
131
+ # 'user/list/?name=ali' --> 'user/list/' --> 'user/list' --> ['user', 'list']
132
+ parts = path.split('?')[0].strip('/').split('/')
133
+ paths_len = len(parts)
116
134
 
117
- found_path = ''
118
- for i, split_path in enumerate(paths):
135
+ found_path = []
136
+ for i, part in enumerate(parts):
119
137
  last_path = bool((i + 1) == paths_len)
120
- found = urls.get(split_path)
121
-
122
- # `found` is callable
123
- if last_path and callable(found):
124
- found_path += f'{split_path}/'
125
- return found, found_path
126
-
127
- # `found` is dict
128
- if isinstance(found, dict):
129
- found_path += f'{split_path}/'
130
- if last_path and callable(endpoint := found.get('')):
131
- return endpoint, found_path
132
-
133
- urls = found
134
- continue
135
-
136
- # `found` is None
137
- for key, value in urls.items():
138
- if not key.startswith('<'):
139
- continue
140
-
141
- elif last_path:
142
- if callable(value):
143
- found_path += f'{key}/'
144
- return value, found_path
145
- elif isinstance(value, dict) and '' in value:
146
- found_path += f'{key}/'
147
- return value[''], found_path
138
+ found = urls.get(part)
139
+
140
+ if last_path:
141
+ # `found` is callable
142
+ if callable(found):
143
+ found_path.append(part)
144
+ return found, '/'.join(found_path)
145
+
146
+ # `found` is dict
147
+ if isinstance(found, dict) and (endpoint := found.get('')):
148
+ if callable(endpoint):
149
+ found_path.append(part)
150
+ return endpoint, '/'.join(found_path)
148
151
  else:
149
152
  return ENDPOINT_NOT_FOUND
150
153
 
151
- elif isinstance(value, dict):
152
- urls = value
153
- found_path += f'{key}/'
154
- break
154
+ # `found` is None
155
+ for key, value in urls.items():
156
+ if key.startswith('<'):
157
+ if callable(value):
158
+ found_path.append(key)
159
+ return value, '/'.join(found_path)
155
160
 
156
- else:
157
- return ENDPOINT_NOT_FOUND
161
+ elif isinstance(value, dict) and (endpoint := value.get('')):
162
+ if callable(endpoint):
163
+ found_path.append(key)
164
+ return endpoint, '/'.join(found_path)
165
+ else:
166
+ return ENDPOINT_NOT_FOUND
158
167
 
159
- else:
160
168
  return ENDPOINT_NOT_FOUND
161
169
 
162
- return ENDPOINT_NOT_FOUND
170
+ # `found` is dict
171
+ elif isinstance(found, dict):
172
+ found_path.append(part)
173
+ urls = found
174
+ continue
163
175
 
176
+ # `found` is callable
177
+ elif callable(found):
178
+ return ENDPOINT_NOT_FOUND
164
179
 
165
- def collect_path_variables(request_path: str, found_path: str) -> dict:
166
- found_path = found_path.removesuffix('/').removeprefix('/')
167
- request_path = request_path.removesuffix('/').removeprefix('/')
168
- path_variables = {}
169
- for f_path, r_path in zip(found_path.split('/'), request_path.split('/')):
170
- if f_path.startswith('<'):
171
- path_variables[f_path[1:-1]] = r_path
172
- return path_variables
180
+ else:
181
+ # `found` is None
182
+ for key, value in urls.items():
183
+ if key.startswith('<'):
184
+ if isinstance(value, dict):
185
+ found_path.append(key)
186
+ urls = value
187
+ break
188
+ else:
189
+ return ENDPOINT_NOT_FOUND
panther/serializer.py CHANGED
@@ -1,47 +1,228 @@
1
- from pydantic import create_model
1
+ import typing
2
+ from typing import TypeVar, Type
3
+
4
+ from pydantic import create_model, BaseModel, ConfigDict
5
+ from pydantic.fields import FieldInfo, Field
2
6
  from pydantic_core._pydantic_core import PydanticUndefined
3
7
 
8
+ from panther.db import Model
9
+ from panther.request import Request
10
+
11
+
12
+ class MetaModelSerializer:
13
+ KNOWN_CONFIGS = ['model', 'fields', 'exclude', 'required_fields', 'optional_fields']
4
14
 
5
- class ModelSerializer:
6
- def __new__(cls, *args, model=None, **kwargs):
7
- # Check `metaclass`
8
- if len(args) == 0:
9
- address = f'{cls.__module__}.{cls.__name__}'
10
- msg = f"you should not inherit the 'ModelSerializer', you should use it as 'metaclass' -> {address}"
11
- raise TypeError(msg)
15
+ def __new__(
16
+ cls,
17
+ cls_name: str,
18
+ bases: tuple[type[typing.Any], ...],
19
+ namespace: dict[str, typing.Any],
20
+ **kwargs
21
+ ):
22
+ if cls_name == 'ModelSerializer':
23
+ # Put `model` and `request` to the main class with `create_model()`
24
+ namespace['__annotations__'].pop('model')
25
+ namespace['__annotations__'].pop('request')
26
+ cls.model_serializer = type(cls_name, (), namespace)
27
+ return super().__new__(cls)
12
28
 
13
- model_name = args[0]
14
- data = args[2]
15
- address = f'{data["__module__"]}.{model_name}'
29
+ # 1. Initial Check
30
+ cls.check_config(cls_name=cls_name, namespace=namespace)
31
+ config = namespace.pop('Config')
32
+
33
+ # 2. Collect `Fields`
34
+ field_definitions = cls.collect_fields(config=config, namespace=namespace)
35
+
36
+ # 3. Collect `pydantic.model_config`
37
+ model_config = cls.collect_model_config(config=config, namespace=namespace)
38
+ namespace |= {'model_config': model_config}
39
+
40
+ # 4. Create a serializer
41
+ return create_model(
42
+ __model_name=cls_name,
43
+ __module__=namespace['__module__'],
44
+ __validators__=namespace,
45
+ __base__=(cls.model_serializer, BaseModel),
46
+ model=(typing.ClassVar[type[BaseModel]], config.model),
47
+ request=(Request, Field(None, exclude=True)),
48
+ **field_definitions
49
+ )
50
+
51
+ @classmethod
52
+ def check_config(cls, cls_name: str, namespace: dict) -> None:
53
+ module = namespace['__module__']
54
+ address = f'{module}.{cls_name}'
55
+
56
+ # Check `Config`
57
+ if (config := namespace.get('Config')) is None:
58
+ msg = f'`class Config` is required in {address}.'
59
+ raise AttributeError(msg) from None
16
60
 
17
61
  # Check `model`
18
- if model is None:
19
- msg = f"'model' required while using 'ModelSerializer' metaclass -> {address}"
20
- raise AttributeError(msg)
62
+ if (model := getattr(config, 'model', None)) is None:
63
+ msg = f'`{cls_name}.Config.model` is required.'
64
+ raise AttributeError(msg) from None
65
+
66
+ # Check `model` type
67
+ try:
68
+ if not issubclass(model, Model):
69
+ msg = f'`{cls_name}.Config.model` is not subclass of `panther.db.Model`.'
70
+ raise AttributeError(msg) from None
71
+ except TypeError:
72
+ msg = f'`{cls_name}.Config.model` is not subclass of `panther.db.Model`.'
73
+ raise AttributeError(msg) from None
74
+
21
75
  # Check `fields`
22
- if 'fields' not in data:
23
- msg = f"'fields' required while using 'ModelSerializer' metaclass. -> {address}"
76
+ if not hasattr(config, 'fields'):
77
+ msg = f'`{cls_name}.Config.fields` is required.'
24
78
  raise AttributeError(msg) from None
25
79
 
26
- model_fields = model.model_fields
27
- field_definitions = {}
80
+ if config.fields != '*':
81
+ for field_name in config.fields:
82
+ if field_name == '*':
83
+ msg = f"`{cls_name}.Config.fields.{field_name}` is not valid. Did you mean `fields = '*'`"
84
+ raise AttributeError(msg) from None
85
+
86
+ if field_name not in model.model_fields:
87
+ msg = f'`{cls_name}.Config.fields.{field_name}` is not in `{model.__name__}.model_fields`'
88
+ raise AttributeError(msg) from None
89
+
90
+ # Check `required_fields`
91
+ if not hasattr(config, 'required_fields'):
92
+ config.required_fields = []
93
+
94
+ if config.required_fields != '*':
95
+ for required in config.required_fields:
96
+ if required not in config.fields:
97
+ msg = f'`{cls_name}.Config.required_fields.{required}` should be in `Config.fields` too.'
98
+ raise AttributeError(msg) from None
28
99
 
29
- # Collect `fields`
30
- for field_name in data['fields']:
31
- if field_name not in model_fields:
32
- msg = f"'{field_name}' is not in '{model.__name__}' -> {address}"
100
+ # Check `optional_fields`
101
+ if not hasattr(config, 'optional_fields'):
102
+ config.optional_fields = []
103
+
104
+ if config.optional_fields != '*':
105
+ for optional in config.optional_fields:
106
+ if optional not in config.fields:
107
+ msg = f'`{cls_name}.Config.optional_fields.{optional}` should be in `Config.fields` too.'
108
+ raise AttributeError(msg) from None
109
+
110
+ # Check `required_fields` and `optional_fields` together
111
+ if (
112
+ (config.optional_fields == '*' and config.required_fields != []) or
113
+ (config.required_fields == '*' and config.optional_fields != [])
114
+ ):
115
+ msg = (
116
+ f"`{cls_name}.Config.optional_fields` and "
117
+ f"`{cls_name}.Config.required_fields` can't include same fields at the same time"
118
+ )
119
+ raise AttributeError(msg) from None
120
+ for optional in config.optional_fields:
121
+ for required in config.required_fields:
122
+ if optional == required:
123
+ msg = (
124
+ f"`{optional}` can't be in `{cls_name}.Config.optional_fields` and "
125
+ f"`{cls_name}.Config.required_fields` at the same time"
126
+ )
127
+ raise AttributeError(msg) from None
128
+
129
+ # Check `exclude`
130
+ if not hasattr(config, 'exclude'):
131
+ config.exclude = []
132
+
133
+ for field_name in config.exclude:
134
+ if field_name not in model.model_fields:
135
+ msg = f'`{cls_name}.Config.exclude.{field_name}` is not valid.'
33
136
  raise AttributeError(msg) from None
34
- field_definitions[field_name] = (model_fields[field_name].annotation, model_fields[field_name])
35
137
 
36
- # Change `required_fields
37
- for required in data.get('required_fields', []):
38
- if required not in field_definitions:
39
- msg = f"'{required}' is in 'required_fields' but not in 'fields' -> {address}"
138
+ if config.fields != '*' and field_name not in config.fields:
139
+ msg = f'`{cls_name}.Config.exclude.{field_name}` is not defined in `Config.fields`.'
40
140
  raise AttributeError(msg) from None
41
- field_definitions[required][1].default = PydanticUndefined
42
141
 
43
- # Create Model
44
- return create_model(
45
- __model_name=model_name,
46
- **field_definitions
47
- )
142
+ @classmethod
143
+ def collect_fields(cls, config: typing.Callable, namespace: dict) -> dict:
144
+ field_definitions = {}
145
+
146
+ # Define `fields`
147
+ if config.fields == '*':
148
+ for field_name, field in config.model.model_fields.items():
149
+ field_definitions[field_name] = (field.annotation, field)
150
+ else:
151
+ for field_name in config.fields:
152
+ field_definitions[field_name] = (
153
+ config.model.model_fields[field_name].annotation,
154
+ config.model.model_fields[field_name]
155
+ )
156
+
157
+ # Apply `exclude`
158
+ for field_name in config.exclude:
159
+ del field_definitions[field_name]
160
+
161
+ # Apply `required_fields`
162
+ if config.required_fields == '*':
163
+ for value in field_definitions.values():
164
+ value[1].default = PydanticUndefined
165
+ else:
166
+ for field_name in config.required_fields:
167
+ field_definitions[field_name][1].default = PydanticUndefined
168
+
169
+ # Apply `optional_fields`
170
+ if config.optional_fields == '*':
171
+ for value in field_definitions.values():
172
+ value[1].default = value[0]()
173
+ else:
174
+ for field_name in config.optional_fields:
175
+ field_definitions[field_name][1].default = field_definitions[field_name][0]()
176
+
177
+ # Collect and Override `Class Fields`
178
+ for key, value in namespace.pop('__annotations__', {}).items():
179
+ field_info = namespace.pop(key, FieldInfo(annotation=value))
180
+ field_definitions[key] = (value, field_info)
181
+
182
+ return field_definitions
183
+
184
+ @classmethod
185
+ def collect_model_config(cls, config: typing.Callable, namespace: dict) -> dict:
186
+ return {
187
+ attr: getattr(config, attr) for attr in dir(config)
188
+ if not attr.startswith('__') and attr not in cls.KNOWN_CONFIGS
189
+ } | namespace.pop('model_config', {}) | {'arbitrary_types_allowed': True}
190
+
191
+
192
+ class ModelSerializer(metaclass=MetaModelSerializer):
193
+ """
194
+ Doc:
195
+ https://pantherpy.github.io/serializer/#style-2-model-serializer
196
+ Example:
197
+ class PersonSerializer(ModelSerializer):
198
+ class Meta:
199
+ model = Person
200
+ fields = '*'
201
+ exclude = ['created_date'] # Optional
202
+ required_fields = ['first_name', 'last_name'] # Optional
203
+ optional_fields = ['age'] # Optional
204
+ """
205
+ model: type[BaseModel]
206
+ request: Request
207
+
208
+ async def create(self, validated_data: dict) -> Model:
209
+ """
210
+ validated_data = ModelSerializer.model_dump()
211
+ """
212
+ return await self.model.insert_one(validated_data)
213
+
214
+ async def update(self, instance: Model, validated_data: dict) -> Model:
215
+ """
216
+ instance = UpdateAPI.object()
217
+ validated_data = ModelSerializer.model_dump()
218
+ """
219
+ await instance.update(validated_data)
220
+ return instance
221
+
222
+ async def partial_update(self, instance: Model, validated_data: dict) -> Model:
223
+ """
224
+ instance = UpdateAPI.object()
225
+ validated_data = ModelSerializer.model_dump(exclude_none=True)
226
+ """
227
+ await instance.update(validated_data)
228
+ return instance