methodsdk 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. method_security/__init__.py +67 -0
  2. method_security/client.py +137 -0
  3. method_security/common/__init__.py +32 -0
  4. method_security/common/types/__init__.py +32 -0
  5. method_security/common/types/environment_id.py +3 -0
  6. method_security/core/__init__.py +103 -0
  7. method_security/core/api_error.py +23 -0
  8. method_security/core/client_wrapper.py +74 -0
  9. method_security/core/datetime_utils.py +28 -0
  10. method_security/core/file.py +67 -0
  11. method_security/core/force_multipart.py +18 -0
  12. method_security/core/http_client.py +543 -0
  13. method_security/core/http_response.py +55 -0
  14. method_security/core/jsonable_encoder.py +100 -0
  15. method_security/core/pydantic_utilities.py +258 -0
  16. method_security/core/query_encoder.py +58 -0
  17. method_security/core/remove_none_from_dict.py +11 -0
  18. method_security/core/request_options.py +35 -0
  19. method_security/core/serialization.py +276 -0
  20. method_security/issues/__init__.py +40 -0
  21. method_security/issues/client.py +107 -0
  22. method_security/issues/errors/__init__.py +32 -0
  23. method_security/issues/errors/issue_does_not_exist_error.py +11 -0
  24. method_security/issues/raw_client.py +118 -0
  25. method_security/issues/types/__init__.py +42 -0
  26. method_security/issues/types/issue.py +36 -0
  27. method_security/issues/types/issue_closed_reason.py +5 -0
  28. method_security/issues/types/issue_id.py +3 -0
  29. method_security/issues/types/issue_severity.py +5 -0
  30. method_security/issues/types/issue_status.py +5 -0
  31. method_security/objects/__init__.py +32 -0
  32. method_security/objects/types/__init__.py +32 -0
  33. method_security/objects/types/object_id.py +3 -0
  34. method_security/py.typed +0 -0
  35. method_security/version.py +3 -0
  36. methodsdk-0.0.3.dist-info/METADATA +182 -0
  37. methodsdk-0.0.3.dist-info/RECORD +38 -0
  38. methodsdk-0.0.3.dist-info/WHEEL +4 -0
@@ -0,0 +1,258 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ # nopycln: file
4
+ import datetime as dt
5
+ from collections import defaultdict
6
+ from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast
7
+
8
+ import pydantic
9
+
10
+ IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
11
+
12
+ if IS_PYDANTIC_V2:
13
+ from pydantic.v1.datetime_parse import parse_date as parse_date
14
+ from pydantic.v1.datetime_parse import parse_datetime as parse_datetime
15
+ from pydantic.v1.fields import ModelField as ModelField
16
+ from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined]
17
+ from pydantic.v1.typing import get_args as get_args
18
+ from pydantic.v1.typing import get_origin as get_origin
19
+ from pydantic.v1.typing import is_literal_type as is_literal_type
20
+ from pydantic.v1.typing import is_union as is_union
21
+ else:
22
+ from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef]
23
+ from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef]
24
+ from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef]
25
+ from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef]
26
+ from pydantic.typing import get_args as get_args # type: ignore[no-redef]
27
+ from pydantic.typing import get_origin as get_origin # type: ignore[no-redef]
28
+ from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef]
29
+ from pydantic.typing import is_union as is_union # type: ignore[no-redef]
30
+
31
+ from .datetime_utils import serialize_datetime
32
+ from .serialization import convert_and_respect_annotation_metadata
33
+ from typing_extensions import TypeAlias
34
+
35
+ T = TypeVar("T")
36
+ Model = TypeVar("Model", bound=pydantic.BaseModel)
37
+
38
+
39
+ def parse_obj_as(type_: Type[T], object_: Any) -> T:
40
+ dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read")
41
+ if IS_PYDANTIC_V2:
42
+ adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined]
43
+ return adapter.validate_python(dealiased_object)
44
+ return pydantic.parse_obj_as(type_, dealiased_object)
45
+
46
+
47
+ def to_jsonable_with_fallback(obj: Any, fallback_serializer: Callable[[Any], Any]) -> Any:
48
+ if IS_PYDANTIC_V2:
49
+ from pydantic_core import to_jsonable_python
50
+
51
+ return to_jsonable_python(obj, fallback=fallback_serializer)
52
+ return fallback_serializer(obj)
53
+
54
+
55
+ class UniversalBaseModel(pydantic.BaseModel):
56
+ if IS_PYDANTIC_V2:
57
+ model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key]
58
+ # Allow fields beginning with `model_` to be used in the model
59
+ protected_namespaces=(),
60
+ )
61
+
62
+ @pydantic.model_serializer(mode="plain", when_used="json") # type: ignore[attr-defined]
63
+ def serialize_model(self) -> Any: # type: ignore[name-defined]
64
+ serialized = self.model_dump() # type: ignore[attr-defined]
65
+ data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()}
66
+ return data
67
+
68
+ else:
69
+
70
+ class Config:
71
+ smart_union = True
72
+ json_encoders = {dt.datetime: serialize_datetime}
73
+
74
+ @classmethod
75
+ def model_construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model":
76
+ dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
77
+ return cls.construct(_fields_set, **dealiased_object)
78
+
79
+ @classmethod
80
+ def construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model":
81
+ dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
82
+ if IS_PYDANTIC_V2:
83
+ return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc]
84
+ return super().construct(_fields_set, **dealiased_object)
85
+
86
+ def json(self, **kwargs: Any) -> str:
87
+ kwargs_with_defaults = {
88
+ "by_alias": True,
89
+ "exclude_unset": True,
90
+ **kwargs,
91
+ }
92
+ if IS_PYDANTIC_V2:
93
+ return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc]
94
+ return super().json(**kwargs_with_defaults)
95
+
96
+ def dict(self, **kwargs: Any) -> Dict[str, Any]:
97
+ """
98
+ Override the default dict method to `exclude_unset` by default. This function patches
99
+ `exclude_unset` to work include fields within non-None default values.
100
+ """
101
+ # Note: the logic here is multiplexed given the levers exposed in Pydantic V1 vs V2
102
+ # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice.
103
+ #
104
+ # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models
105
+ # that we have less control over, and this is less intrusive than custom serializers for now.
106
+ if IS_PYDANTIC_V2:
107
+ kwargs_with_defaults_exclude_unset = {
108
+ **kwargs,
109
+ "by_alias": True,
110
+ "exclude_unset": True,
111
+ "exclude_none": False,
112
+ }
113
+ kwargs_with_defaults_exclude_none = {
114
+ **kwargs,
115
+ "by_alias": True,
116
+ "exclude_none": True,
117
+ "exclude_unset": False,
118
+ }
119
+ dict_dump = deep_union_pydantic_dicts(
120
+ super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc]
121
+ super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc]
122
+ )
123
+
124
+ else:
125
+ _fields_set = self.__fields_set__.copy()
126
+
127
+ fields = _get_model_fields(self.__class__)
128
+ for name, field in fields.items():
129
+ if name not in _fields_set:
130
+ default = _get_field_default(field)
131
+
132
+ # If the default values are non-null act like they've been set
133
+ # This effectively allows exclude_unset to work like exclude_none where
134
+ # the latter passes through intentionally set none values.
135
+ if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]):
136
+ _fields_set.add(name)
137
+
138
+ if default is not None:
139
+ self.__fields_set__.add(name)
140
+
141
+ kwargs_with_defaults_exclude_unset_include_fields = {
142
+ "by_alias": True,
143
+ "exclude_unset": True,
144
+ "include": _fields_set,
145
+ **kwargs,
146
+ }
147
+
148
+ dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields)
149
+
150
+ return cast(
151
+ Dict[str, Any],
152
+ convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write"),
153
+ )
154
+
155
+
156
+ def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]:
157
+ converted_list: List[Any] = []
158
+ for i, item in enumerate(source):
159
+ destination_value = destination[i]
160
+ if isinstance(item, dict):
161
+ converted_list.append(deep_union_pydantic_dicts(item, destination_value))
162
+ elif isinstance(item, list):
163
+ converted_list.append(_union_list_of_pydantic_dicts(item, destination_value))
164
+ else:
165
+ converted_list.append(item)
166
+ return converted_list
167
+
168
+
169
+ def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any]) -> Dict[str, Any]:
170
+ for key, value in source.items():
171
+ node = destination.setdefault(key, {})
172
+ if isinstance(value, dict):
173
+ deep_union_pydantic_dicts(value, node)
174
+ # Note: we do not do this same processing for sets given we do not have sets of models
175
+ # and given the sets are unordered, the processing of the set and matching objects would
176
+ # be non-trivial.
177
+ elif isinstance(value, list):
178
+ destination[key] = _union_list_of_pydantic_dicts(value, node)
179
+ else:
180
+ destination[key] = value
181
+
182
+ return destination
183
+
184
+
185
+ if IS_PYDANTIC_V2:
186
+
187
+ class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg]
188
+ pass
189
+
190
+ UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc]
191
+ else:
192
+ UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef]
193
+
194
+
195
+ def encode_by_type(o: Any) -> Any:
196
+ encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple)
197
+ for type_, encoder in encoders_by_type.items():
198
+ encoders_by_class_tuples[encoder] += (type_,)
199
+
200
+ if type(o) in encoders_by_type:
201
+ return encoders_by_type[type(o)](o)
202
+ for encoder, classes_tuple in encoders_by_class_tuples.items():
203
+ if isinstance(o, classes_tuple):
204
+ return encoder(o)
205
+
206
+
207
+ def update_forward_refs(model: Type["Model"], **localns: Any) -> None:
208
+ if IS_PYDANTIC_V2:
209
+ model.model_rebuild(raise_errors=False) # type: ignore[attr-defined]
210
+ else:
211
+ model.update_forward_refs(**localns)
212
+
213
+
214
+ # Mirrors Pydantic's internal typing
215
+ AnyCallable = Callable[..., Any]
216
+
217
+
218
+ def universal_root_validator(
219
+ pre: bool = False,
220
+ ) -> Callable[[AnyCallable], AnyCallable]:
221
+ def decorator(func: AnyCallable) -> AnyCallable:
222
+ if IS_PYDANTIC_V2:
223
+ return cast(AnyCallable, pydantic.model_validator(mode="before" if pre else "after")(func)) # type: ignore[attr-defined]
224
+ return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload]
225
+
226
+ return decorator
227
+
228
+
229
+ def universal_field_validator(field_name: str, pre: bool = False) -> Callable[[AnyCallable], AnyCallable]:
230
+ def decorator(func: AnyCallable) -> AnyCallable:
231
+ if IS_PYDANTIC_V2:
232
+ return cast(AnyCallable, pydantic.field_validator(field_name, mode="before" if pre else "after")(func)) # type: ignore[attr-defined]
233
+ return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func))
234
+
235
+ return decorator
236
+
237
+
238
+ PydanticField = Union[ModelField, pydantic.fields.FieldInfo]
239
+
240
+
241
+ def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]:
242
+ if IS_PYDANTIC_V2:
243
+ return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined]
244
+ return cast(Mapping[str, PydanticField], model.__fields__)
245
+
246
+
247
+ def _get_field_default(field: PydanticField) -> Any:
248
+ try:
249
+ value = field.get_default() # type: ignore[union-attr]
250
+ except:
251
+ value = field.default
252
+ if IS_PYDANTIC_V2:
253
+ from pydantic_core import PydanticUndefined
254
+
255
+ if value == PydanticUndefined:
256
+ return None
257
+ return value
258
+ return value
@@ -0,0 +1,58 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from typing import Any, Dict, List, Optional, Tuple
4
+
5
+ import pydantic
6
+
7
+
8
+ # Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict
9
+ def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> List[Tuple[str, Any]]:
10
+ result = []
11
+ for k, v in dict_flat.items():
12
+ key = f"{key_prefix}[{k}]" if key_prefix is not None else k
13
+ if isinstance(v, dict):
14
+ result.extend(traverse_query_dict(v, key))
15
+ elif isinstance(v, list):
16
+ for arr_v in v:
17
+ if isinstance(arr_v, dict):
18
+ result.extend(traverse_query_dict(arr_v, key))
19
+ else:
20
+ result.append((key, arr_v))
21
+ else:
22
+ result.append((key, v))
23
+ return result
24
+
25
+
26
+ def single_query_encoder(query_key: str, query_value: Any) -> List[Tuple[str, Any]]:
27
+ if isinstance(query_value, pydantic.BaseModel) or isinstance(query_value, dict):
28
+ if isinstance(query_value, pydantic.BaseModel):
29
+ obj_dict = query_value.dict(by_alias=True)
30
+ else:
31
+ obj_dict = query_value
32
+ return traverse_query_dict(obj_dict, query_key)
33
+ elif isinstance(query_value, list):
34
+ encoded_values: List[Tuple[str, Any]] = []
35
+ for value in query_value:
36
+ if isinstance(value, pydantic.BaseModel) or isinstance(value, dict):
37
+ if isinstance(value, pydantic.BaseModel):
38
+ obj_dict = value.dict(by_alias=True)
39
+ elif isinstance(value, dict):
40
+ obj_dict = value
41
+
42
+ encoded_values.extend(single_query_encoder(query_key, obj_dict))
43
+ else:
44
+ encoded_values.append((query_key, value))
45
+
46
+ return encoded_values
47
+
48
+ return [(query_key, query_value)]
49
+
50
+
51
+ def encode_query(query: Optional[Dict[str, Any]]) -> Optional[List[Tuple[str, Any]]]:
52
+ if query is None:
53
+ return None
54
+
55
+ encoded_query = []
56
+ for k, v in query.items():
57
+ encoded_query.extend(single_query_encoder(k, v))
58
+ return encoded_query
@@ -0,0 +1,11 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from typing import Any, Dict, Mapping, Optional
4
+
5
+
6
+ def remove_none_from_dict(original: Mapping[str, Optional[Any]]) -> Dict[str, Any]:
7
+ new: Dict[str, Any] = {}
8
+ for key, value in original.items():
9
+ if value is not None:
10
+ new[key] = value
11
+ return new
@@ -0,0 +1,35 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ try:
6
+ from typing import NotRequired # type: ignore
7
+ except ImportError:
8
+ from typing_extensions import NotRequired
9
+
10
+
11
+ class RequestOptions(typing.TypedDict, total=False):
12
+ """
13
+ Additional options for request-specific configuration when calling APIs via the SDK.
14
+ This is used primarily as an optional final parameter for service functions.
15
+
16
+ Attributes:
17
+ - timeout_in_seconds: int. The number of seconds to await an API call before timing out.
18
+
19
+ - max_retries: int. The max number of retries to attempt if the API call fails.
20
+
21
+ - additional_headers: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's header dict
22
+
23
+ - additional_query_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's query parameters dict
24
+
25
+ - additional_body_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's body parameters dict
26
+
27
+ - chunk_size: int. The size, in bytes, to process each chunk of data being streamed back within the response. This equates to leveraging `chunk_size` within `requests` or `httpx`, and is only leveraged for file downloads.
28
+ """
29
+
30
+ timeout_in_seconds: NotRequired[int]
31
+ max_retries: NotRequired[int]
32
+ additional_headers: NotRequired[typing.Dict[str, typing.Any]]
33
+ additional_query_parameters: NotRequired[typing.Dict[str, typing.Any]]
34
+ additional_body_parameters: NotRequired[typing.Dict[str, typing.Any]]
35
+ chunk_size: NotRequired[int]
@@ -0,0 +1,276 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import collections
4
+ import inspect
5
+ import typing
6
+
7
+ import pydantic
8
+ import typing_extensions
9
+
10
+
11
+ class FieldMetadata:
12
+ """
13
+ Metadata class used to annotate fields to provide additional information.
14
+
15
+ Example:
16
+ class MyDict(TypedDict):
17
+ field: typing.Annotated[str, FieldMetadata(alias="field_name")]
18
+
19
+ Will serialize: `{"field": "value"}`
20
+ To: `{"field_name": "value"}`
21
+ """
22
+
23
+ alias: str
24
+
25
+ def __init__(self, *, alias: str) -> None:
26
+ self.alias = alias
27
+
28
+
29
+ def convert_and_respect_annotation_metadata(
30
+ *,
31
+ object_: typing.Any,
32
+ annotation: typing.Any,
33
+ inner_type: typing.Optional[typing.Any] = None,
34
+ direction: typing.Literal["read", "write"],
35
+ ) -> typing.Any:
36
+ """
37
+ Respect the metadata annotations on a field, such as aliasing. This function effectively
38
+ manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for
39
+ TypedDicts, which cannot support aliasing out of the box, and can be extended for additional
40
+ utilities, such as defaults.
41
+
42
+ Parameters
43
+ ----------
44
+ object_ : typing.Any
45
+
46
+ annotation : type
47
+ The type we're looking to apply typing annotations from
48
+
49
+ inner_type : typing.Optional[type]
50
+
51
+ Returns
52
+ -------
53
+ typing.Any
54
+ """
55
+
56
+ if object_ is None:
57
+ return None
58
+ if inner_type is None:
59
+ inner_type = annotation
60
+
61
+ clean_type = _remove_annotations(inner_type)
62
+ # Pydantic models
63
+ if (
64
+ inspect.isclass(clean_type)
65
+ and issubclass(clean_type, pydantic.BaseModel)
66
+ and isinstance(object_, typing.Mapping)
67
+ ):
68
+ return _convert_mapping(object_, clean_type, direction)
69
+ # TypedDicts
70
+ if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping):
71
+ return _convert_mapping(object_, clean_type, direction)
72
+
73
+ if (
74
+ typing_extensions.get_origin(clean_type) == typing.Dict
75
+ or typing_extensions.get_origin(clean_type) == dict
76
+ or clean_type == typing.Dict
77
+ ) and isinstance(object_, typing.Dict):
78
+ key_type = typing_extensions.get_args(clean_type)[0]
79
+ value_type = typing_extensions.get_args(clean_type)[1]
80
+
81
+ return {
82
+ key: convert_and_respect_annotation_metadata(
83
+ object_=value,
84
+ annotation=annotation,
85
+ inner_type=value_type,
86
+ direction=direction,
87
+ )
88
+ for key, value in object_.items()
89
+ }
90
+
91
+ # If you're iterating on a string, do not bother to coerce it to a sequence.
92
+ if not isinstance(object_, str):
93
+ if (
94
+ typing_extensions.get_origin(clean_type) == typing.Set
95
+ or typing_extensions.get_origin(clean_type) == set
96
+ or clean_type == typing.Set
97
+ ) and isinstance(object_, typing.Set):
98
+ inner_type = typing_extensions.get_args(clean_type)[0]
99
+ return {
100
+ convert_and_respect_annotation_metadata(
101
+ object_=item,
102
+ annotation=annotation,
103
+ inner_type=inner_type,
104
+ direction=direction,
105
+ )
106
+ for item in object_
107
+ }
108
+ elif (
109
+ (
110
+ typing_extensions.get_origin(clean_type) == typing.List
111
+ or typing_extensions.get_origin(clean_type) == list
112
+ or clean_type == typing.List
113
+ )
114
+ and isinstance(object_, typing.List)
115
+ ) or (
116
+ (
117
+ typing_extensions.get_origin(clean_type) == typing.Sequence
118
+ or typing_extensions.get_origin(clean_type) == collections.abc.Sequence
119
+ or clean_type == typing.Sequence
120
+ )
121
+ and isinstance(object_, typing.Sequence)
122
+ ):
123
+ inner_type = typing_extensions.get_args(clean_type)[0]
124
+ return [
125
+ convert_and_respect_annotation_metadata(
126
+ object_=item,
127
+ annotation=annotation,
128
+ inner_type=inner_type,
129
+ direction=direction,
130
+ )
131
+ for item in object_
132
+ ]
133
+
134
+ if typing_extensions.get_origin(clean_type) == typing.Union:
135
+ # We should be able to ~relatively~ safely try to convert keys against all
136
+ # member types in the union, the edge case here is if one member aliases a field
137
+ # of the same name to a different name from another member
138
+ # Or if another member aliases a field of the same name that another member does not.
139
+ for member in typing_extensions.get_args(clean_type):
140
+ object_ = convert_and_respect_annotation_metadata(
141
+ object_=object_,
142
+ annotation=annotation,
143
+ inner_type=member,
144
+ direction=direction,
145
+ )
146
+ return object_
147
+
148
+ annotated_type = _get_annotation(annotation)
149
+ if annotated_type is None:
150
+ return object_
151
+
152
+ # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.)
153
+ # Then we can safely call it on the recursive conversion.
154
+ return object_
155
+
156
+
157
+ def _convert_mapping(
158
+ object_: typing.Mapping[str, object],
159
+ expected_type: typing.Any,
160
+ direction: typing.Literal["read", "write"],
161
+ ) -> typing.Mapping[str, object]:
162
+ converted_object: typing.Dict[str, object] = {}
163
+ try:
164
+ annotations = typing_extensions.get_type_hints(expected_type, include_extras=True)
165
+ except NameError:
166
+ # The TypedDict contains a circular reference, so
167
+ # we use the __annotations__ attribute directly.
168
+ annotations = getattr(expected_type, "__annotations__", {})
169
+ aliases_to_field_names = _get_alias_to_field_name(annotations)
170
+ for key, value in object_.items():
171
+ if direction == "read" and key in aliases_to_field_names:
172
+ dealiased_key = aliases_to_field_names.get(key)
173
+ if dealiased_key is not None:
174
+ type_ = annotations.get(dealiased_key)
175
+ else:
176
+ type_ = annotations.get(key)
177
+ # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map
178
+ #
179
+ # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias
180
+ # then we can just pass the value through as is
181
+ if type_ is None:
182
+ converted_object[key] = value
183
+ elif direction == "read" and key not in aliases_to_field_names:
184
+ converted_object[key] = convert_and_respect_annotation_metadata(
185
+ object_=value, annotation=type_, direction=direction
186
+ )
187
+ else:
188
+ converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = (
189
+ convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction)
190
+ )
191
+ return converted_object
192
+
193
+
194
+ def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]:
195
+ maybe_annotated_type = typing_extensions.get_origin(type_)
196
+ if maybe_annotated_type is None:
197
+ return None
198
+
199
+ if maybe_annotated_type == typing_extensions.NotRequired:
200
+ type_ = typing_extensions.get_args(type_)[0]
201
+ maybe_annotated_type = typing_extensions.get_origin(type_)
202
+
203
+ if maybe_annotated_type == typing_extensions.Annotated:
204
+ return type_
205
+
206
+ return None
207
+
208
+
209
+ def _remove_annotations(type_: typing.Any) -> typing.Any:
210
+ maybe_annotated_type = typing_extensions.get_origin(type_)
211
+ if maybe_annotated_type is None:
212
+ return type_
213
+
214
+ if maybe_annotated_type == typing_extensions.NotRequired:
215
+ return _remove_annotations(typing_extensions.get_args(type_)[0])
216
+
217
+ if maybe_annotated_type == typing_extensions.Annotated:
218
+ return _remove_annotations(typing_extensions.get_args(type_)[0])
219
+
220
+ return type_
221
+
222
+
223
+ def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]:
224
+ annotations = typing_extensions.get_type_hints(type_, include_extras=True)
225
+ return _get_alias_to_field_name(annotations)
226
+
227
+
228
+ def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]:
229
+ annotations = typing_extensions.get_type_hints(type_, include_extras=True)
230
+ return _get_field_to_alias_name(annotations)
231
+
232
+
233
+ def _get_alias_to_field_name(
234
+ field_to_hint: typing.Dict[str, typing.Any],
235
+ ) -> typing.Dict[str, str]:
236
+ aliases = {}
237
+ for field, hint in field_to_hint.items():
238
+ maybe_alias = _get_alias_from_type(hint)
239
+ if maybe_alias is not None:
240
+ aliases[maybe_alias] = field
241
+ return aliases
242
+
243
+
244
+ def _get_field_to_alias_name(
245
+ field_to_hint: typing.Dict[str, typing.Any],
246
+ ) -> typing.Dict[str, str]:
247
+ aliases = {}
248
+ for field, hint in field_to_hint.items():
249
+ maybe_alias = _get_alias_from_type(hint)
250
+ if maybe_alias is not None:
251
+ aliases[field] = maybe_alias
252
+ return aliases
253
+
254
+
255
+ def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]:
256
+ maybe_annotated_type = _get_annotation(type_)
257
+
258
+ if maybe_annotated_type is not None:
259
+ # The actual annotations are 1 onward, the first is the annotated type
260
+ annotations = typing_extensions.get_args(maybe_annotated_type)[1:]
261
+
262
+ for annotation in annotations:
263
+ if isinstance(annotation, FieldMetadata) and annotation.alias is not None:
264
+ return annotation.alias
265
+ return None
266
+
267
+
268
+ def _alias_key(
269
+ key: str,
270
+ type_: typing.Any,
271
+ direction: typing.Literal["read", "write"],
272
+ aliases_to_field_names: typing.Dict[str, str],
273
+ ) -> str:
274
+ if direction == "read":
275
+ return aliases_to_field_names.get(key, key)
276
+ return _get_alias_from_type(type_=type_) or key
@@ -0,0 +1,40 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ # isort: skip_file
4
+
5
+ import typing
6
+ from importlib import import_module
7
+
8
+ if typing.TYPE_CHECKING:
9
+ from .types import Issue, IssueClosedReason, IssueId, IssueSeverity, IssueStatus
10
+ from .errors import IssueDoesNotExistError
11
+ _dynamic_imports: typing.Dict[str, str] = {
12
+ "Issue": ".types",
13
+ "IssueClosedReason": ".types",
14
+ "IssueDoesNotExistError": ".errors",
15
+ "IssueId": ".types",
16
+ "IssueSeverity": ".types",
17
+ "IssueStatus": ".types",
18
+ }
19
+
20
+
21
+ def __getattr__(attr_name: str) -> typing.Any:
22
+ module_name = _dynamic_imports.get(attr_name)
23
+ if module_name is None:
24
+ raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}")
25
+ try:
26
+ module = import_module(module_name, __package__)
27
+ result = getattr(module, attr_name)
28
+ return result
29
+ except ImportError as e:
30
+ raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e
31
+ except AttributeError as e:
32
+ raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e
33
+
34
+
35
+ def __dir__():
36
+ lazy_attrs = list(_dynamic_imports.keys())
37
+ return sorted(lazy_attrs)
38
+
39
+
40
+ __all__ = ["Issue", "IssueClosedReason", "IssueDoesNotExistError", "IssueId", "IssueSeverity", "IssueStatus"]